Skip to content

Commit

Permalink
Merge pull request #25 from Webperf-se/features
Browse files Browse the repository at this point in the history
X-mas release (2.0)
  • Loading branch information
flowertwig-org authored Dec 21, 2020
2 parents d2cadf5 + 47a6a5b commit d4c8c92
Show file tree
Hide file tree
Showing 70 changed files with 2,248 additions and 376 deletions.
3 changes: 2 additions & 1 deletion SAMPLE-config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
#### NOTE: Rename this file to 'config.py' and fill in the missing info below

# useragent for HTTP requests
#useragent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0'
useragent = 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'

# enter your API key for Google Pagespeed API
googlePageSpeedApiKey = ""

http_request_timeout = 5
http_request_timeout = 60
198 changes: 141 additions & 57 deletions default.py

Large diffs are not rendered by default.

76 changes: 74 additions & 2 deletions engines/csv.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,82 @@
#-*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from models import Sites, SiteTests
from engines.utils import use_website
import csv


def write_tests(output_filename, siteTests):
with open(output_filename, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=SiteTests.fieldnames())

writer.writeheader()
writer.writerows(siteTests)
writer.writerows(siteTests)


def add_site(input_filename, url):
sites = read_sites(input_filename, 0, -1)
# print(sites)
id = len(sites)
sites.append([id, url])
write_sites(input_filename, sites)

print(_('TEXT_WEBSITE_URL_ADDED').format(url))

return sites


def delete_site(input_filename, url):
sites = read_sites(input_filename, 0, -1)
tmpSites = list()
for site in sites:
site_id = site[0]
site_url = site[1]
if (url != site_url):
tmpSites.append([site_id, site_url])

write_sites(input_filename, tmpSites)

print(_('TEXT_WEBSITE_URL_DELETED').format(site_url))

return tmpSites


def read_sites(input_filename, input_skip, input_take):
sites = list()

with open(input_filename, newline='') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024))
csvfile.seek(0)
reader = csv.reader(csvfile, dialect)

with open(input_filename, newline='') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
current_index = 0
for row in csv_reader:
number_of_fields = len(Sites.fieldnames())
current_number_of_fields = len(row)
if number_of_fields == current_number_of_fields:
# ignore first row as that is our header info
if current_index != 0 and use_website(current_index + 1, input_skip, input_take):
sites.append([row[0], row[1]])
elif current_number_of_fields == 1:
# we have no header and only one colmn, use column as website url
if use_website(current_index, input_skip, input_take):
sites.append([current_index, "".join(row)])
current_index += 1

return sites


def write_sites(output_filename, sites):
sites_output = list()
for site in sites:
site_id = site[0]
site_url = site[1]
site_object = Sites(id=site_id, website=site_url).todata()
sites_output.append(site_object)

with open("output-" + output_filename, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=Sites.fieldnames())

writer.writeheader()
writer.writerows(sites_output)
41 changes: 27 additions & 14 deletions engines/json.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,51 @@
#-*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from engines.utils import use_website
import json


def add_site(input_filename, url):
sites = read_sites(input_filename)
print(sites)
sites = read_sites(input_filename, 0, -1)
# print(sites)
id = len(sites)
sites.append([id, url])
write_sites(input_filename, sites)

print("website with url: " + url + " has been added\n")
print(_('TEXT_WEBSITE_URL_ADDED').format(url))

return sites


def delete_site(input_filename, url):
sites = read_sites(input_filename)
sites = read_sites(input_filename, 0, -1)
tmpSites = list()
for site in sites:
site_id = site[0]
site_url = site[1]
if (url != site_url):
tmpSites.append([site_id, site_url])

write_sites(input_filename, tmpSites)

print("website with url: " + url + " has been deleted\n")
print(_('TEXT_WEBSITE_URL_DELETED').format(site_url))

return tmpSites

def read_sites(input_filename):

def read_sites(input_filename, input_skip, input_take):

print('A')

sites = list()
with open(input_filename) as json_input_file:
data = json.load(json_input_file)
current_index = 0
for site in data["sites"]:
sites.append([site["id"], site["url"]])
if use_website(current_index, input_skip, input_take):
sites.append([site["id"], site["url"]])
current_index += 1
return sites


def write_tests(output_filename, siteTests):
with open(output_filename, 'w') as outfile:
# json require us to have an object as root element
Expand All @@ -43,18 +54,20 @@ def write_tests(output_filename, siteTests):
}
json.dump(testsContainerObject, outfile)


def write_sites(output_filename, sites):
with open(output_filename, 'w') as outfile:
# json require us to have an object as root element
jsonSites = list()
current_siteid = 0
for site in sites:
jsonSites.append({
'id': site[0],
'url': site[1]
})
'id': site[0],
'url': site[1]
})
current_siteid += 1

sitesContainerObject = {
"sites": jsonSites
}
json.dump(sitesContainerObject, outfile)

41 changes: 41 additions & 0 deletions engines/sitemap.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
from models import Sites, SiteTests
from engines.utils import use_website
import config
from tests.utils import *
import re


def read_sites(input_sitemap_url, input_skip, input_take):
sites = list()

sitemap_content = httpRequestGetContent(input_sitemap_url)

regex = r"<loc>(?P<itemurl>[^<]+)<"
matches = re.finditer(regex, sitemap_content, re.MULTILINE)

current_index = 0
for matchNum, match in enumerate(matches, start=1):

item_url = match.group('itemurl')

if use_website(current_index, input_skip, input_take):
sites.append([current_index, item_url])
current_index += 1
return sites


def add_site(input_filename, url):
print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made")

sites = read_sites(input_filename, 0, -1)

return sites


def delete_site(input_filename, url):
print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made")

sites = read_sites(input_filename, 0, -1)

return sites
20 changes: 13 additions & 7 deletions engines/sql.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
#-*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from engines.utils import use_website

def write_tests(output_filename, siteTests):

def write_tests(output_filename, siteTests, input_skip, input_take):
with open(output_filename, 'w') as outfile:
current_index = 0
for test in siteTests:
format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating)
VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n"""
sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], report=test["report"], json=test["data"], recent=1, rating=test["rating"])

outfile.write(sql_command)
if use_website(current_index, input_skip, input_take):
format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating)
VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n"""
sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"],
report=test["report"], json=test["data"], recent=1, rating=test["rating"])

current_index += 1
outfile.write(sql_command)
31 changes: 21 additions & 10 deletions engines/sqlite.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
#-*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from engines.utils import use_website
import sqlite3


def db_tables(output_filename):
conn = sqlite3.connect(output_filename)
c = conn.cursor()
Expand All @@ -12,6 +14,7 @@ def db_tables(output_filename):

conn.close()


def add_site(input_filename, url):
conn = sqlite3.connect(input_filename)
c = conn.cursor()
Expand All @@ -24,10 +27,11 @@ def add_site(input_filename, url):

conn.close()

print("website with url: " + url + " has been added\n")
print(_('TEXT_WEBSITE_URL_ADDED').format(url))

return read_sites(input_filename)


def delete_site(input_filename, url):
conn = sqlite3.connect(input_filename)
c = conn.cursor()
Expand All @@ -39,40 +43,47 @@ def delete_site(input_filename, url):

conn.close()

print("website with url: " + url + " has been deleted\n")
print(_('TEXT_WEBSITE_URL_DELETED').format(url))

return read_sites(input_filename)
return read_sites(input_filename, 0, -1)

def read_sites(input_filename):

def read_sites(input_filename, input_skip, input_take):
sites = list()
order_by='title ASC'
order_by = 'title ASC'

conn = sqlite3.connect(input_filename)
c = conn.cursor()

current_index = 0
for row in c.execute('SELECT id, website FROM sites WHERE active=1 ORDER BY {0}'.format(order_by)):
sites.append([row[0], row[1]])
if use_website(current_index, input_skip, input_take):
sites.append([row[0], row[1]])
current_index += 1
conn.close()
return sites


def write_tests(output_filename, siteTests):
conn = sqlite3.connect(output_filename)
c = conn.cursor()

for test in siteTests:
# set previous testresult as not latest
format_str = """UPDATE sitetests SET most_recent=0 WHERE site_id="{siteid}" AND type_of_test="{testtype}" AND most_recent=1;\n"""
sql_command = format_str.format(siteid=test["site_id"], testtype=test["type_of_test"])
sql_command = format_str.format(
siteid=test["site_id"], testtype=test["type_of_test"])

c.execute(sql_command)
conn.commit()

# update testresult for all sites
format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating)
VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n"""
sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], report=test["report"], json=test["data"], recent=1, rating=test["rating"])
sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"],
report=test["report"], json=test["data"], recent=1, rating=test["rating"])

c.execute(sql_command)
conn.commit()

conn.close()
conn.close()
11 changes: 11 additions & 0 deletions engines/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# -*- coding: utf-8 -*-


def use_website(current_index, skip, take):
if skip > 0 and current_index < skip:
return False

if take != -1 and current_index >= (skip + take):
return False

return True
Binary file added locales/en/LC_MESSAGES/a11y_lighthouse.mo
Binary file not shown.
36 changes: 36 additions & 0 deletions locales/en/LC_MESSAGES/a11y_lighthouse.po
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# English
# Copyright (C) 2020 WebPerf
# FIRST AUTHOR <[email protected]>, 2020.
#
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"POT-Creation-Date: 2020-12-12 10:45+0200\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: Marcus <[email protected]>\n"
"Language-Team: English <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: pygettext.py 1.5\n"

msgid "TEXT_RUNNING_TEST"
msgstr "###############################\nRunning test: 10 - Accessibility (Google Lighthouse)"

msgid "TEXT_REVIEW_A11Y_VERY_GOOD"
msgstr "* The website do not have any apparent issues with accessibility!!\n"

msgid "TEXT_REVIEW_A11Y_IS_GOOD"
msgstr "* The website can be more accessible, but is rather good!\n"

msgid "TEXT_REVIEW_A11Y_IS_OK"
msgstr "* The accessibility is average, but need to get better.\n"

msgid "TEXT_REVIEW_A11Y_IS_BAD"
msgstr "* The website is quite bad at accessibility and sucks for disabled people!\n"

msgid "TEXT_REVIEW_A11Y_IS_VERY_BAD"
msgstr "* The accessibility is apparently really bad!\n"

msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS"
msgstr "* Number of problems with accessibility: {}\n"
Binary file not shown.
Loading

0 comments on commit d4c8c92

Please sign in to comment.