Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrated IPython Widget in MainWindow #56

Merged
merged 6 commits into from
Dec 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ celerybeat.pid
# Environments
.env
.venv
.venv10
env/
venv/
ENV/
Expand Down
4 changes: 1 addition & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,7 @@
"mkdocstrings[python]",
"pymdown-extensions",
],
"gui": [
"pyqt5",
],
"gui": ["pyqt5", "qtconsole"],
}
extras_require["all"] = list(
{rq for target in extras_require.keys() for rq in extras_require[target]}
Expand Down
4 changes: 3 additions & 1 deletion src/staticwordpress/core/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

VERSION_MAJOR = 0
VERSION_MINOR = 0
VERSION_REVISION = 4
VERSION_REVISION = 5
VERISON = f"{VERSION_MAJOR}.{VERSION_MINOR}.{VERSION_REVISION}"

SHARE_FOLDER_PATH = Path(
Expand Down Expand Up @@ -92,6 +92,8 @@ class HOST(ExtendedEnum):
NETLIFY = "NETLIFY"
# CLOUDFLARE = "CLOUDFLARE"
# LOCALHOST = "LOCALHOST"
# GITHUB = "GITHUB"
# GITLAB = "GITLAB"


class URL(ExtendedEnum):
Expand Down
4 changes: 2 additions & 2 deletions src/staticwordpress/core/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@
# INTERNAL IMPORTS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++

from .utils import get_mock_response, get_remote_content, get_clean_url
from .constants import CONFIGS, URL, LINK_REGEX
from ..core.utils import get_mock_response, get_remote_content, get_clean_url
from ..core.constants import CONFIGS, URL, LINK_REGEX

# +++++++++++++++++++++++++++++++++++++++++++++++++++++
# IMPLEMENATIONS
Expand Down
2 changes: 1 addition & 1 deletion src/staticwordpress/core/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,5 @@
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++


class SerpWingsResponseNotValid(Exception):
class ResponseNotValid(Exception):
pass
6 changes: 3 additions & 3 deletions src/staticwordpress/core/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@
# STANDARD LIBARY IMPORTS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++

import logging
from datetime import datetime
from pathlib import Path
import logging

# +++++++++++++++++++++++++++++++++++++++++++++++++++++
# 3rd PARTY LIBRARY IMPORTS
Expand Down Expand Up @@ -81,12 +81,12 @@ def inner(self):
return inner

@check_gh_token
def is_token_valid(self):
def is_token_valid(self) -> bool:
logging.info(f"Verifying Github Token.")
return self._gh_object.get_user().name != ""

@check_gh_token
def is_repo_valid(self):
def is_repo_valid(self) -> bool:
logging.info(f"Verifying Github Repository.")
return self._gh_object.get_user().get_repo(self._gh_repo) is not None

Expand Down
4 changes: 2 additions & 2 deletions src/staticwordpress/core/i18n.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
# INTERNAL IMPORTS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++

from .constants import LANGUAGES, CONFIGS
from ..core.constants import LANGUAGES, CONFIGS

# +++++++++++++++++++++++++++++++++++++++++++++++++++++
# IMPLEMENATIONS
Expand Down Expand Up @@ -65,7 +65,7 @@ def language(self) -> LANGUAGES:
return self._lang

@language.setter
def language(self, language_) -> None:
def language(self, language_: LANGUAGES) -> None:
self._lang = language_


Expand Down
38 changes: 19 additions & 19 deletions src/staticwordpress/core/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,14 @@
# INTERNAL IMPORTS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++

from .constants import (
from ..core.constants import (
PROJECT,
REDIRECTS,
HOST,
SOURCE,
USER_AGENT,
CONFIGS,
VERISON,
USER_AGENT,
LINK_REGEX,
)

Expand Down Expand Up @@ -218,16 +218,16 @@ def save_as(self, path_: str = None) -> None:
def save(self) -> None:
if self.is_open() and self["path"]:
with self["path"].open("w") as f:
_self_copy = deepcopy(self)
_self_copy["path"] = str(self["path"])
_self_copy["user-agent"] = self["user-agent"].value
_self_copy["source"]["type"] = self["source"]["type"].value
_self_copy["destination"]["host"] = self["destination"]["host"].value
_self_copy["redirects"] = self["redirects"].value
_self_copy["destination"]["output"] = str(self["destination"]["output"])
_self_copy["status"] = PROJECT.SAVED.value
self_copy = deepcopy(self)
self_copy["path"] = str(self["path"])
self_copy["user-agent"] = self["user-agent"].value
self_copy["source"]["type"] = self["source"]["type"].value
self_copy["destination"]["host"] = self["destination"]["host"].value
self_copy["redirects"] = self["redirects"].value
self_copy["destination"]["output"] = str(self["destination"]["output"])
self_copy["status"] = PROJECT.SAVED.value

json.dump(_self_copy, f, indent=4)
json.dump(self_copy, f, indent=4)

self["status"] = PROJECT.SAVED

Expand Down Expand Up @@ -328,8 +328,8 @@ def src_type(self) -> SOURCE:
return self["source"]["type"]

@src_type.setter
def src_type(self, source_type_: str) -> None:
self["source"]["type"] = source_type_
def src_type(self, src_type_: str) -> None:
self["source"]["type"] = src_type_

@property
def src_url(self) -> str:
Expand All @@ -344,16 +344,16 @@ def ss_archive(self) -> str:
return self["source"]["simply-static"]["archive"]

@ss_archive.setter
def ss_archive(self, archive_name_: str) -> None:
self["source"]["simply-static"]["archive"] = archive_name_
def ss_archive(self, ss_archive_name_: str) -> None:
self["source"]["simply-static"]["archive"] = ss_archive_name_

@property
def ss_folder(self) -> str:
return self["source"]["simply-static"]["folder"]

@ss_folder.setter
def ss_folder(self, folder_: str) -> None:
self["source"]["simply-static"]["folder"] = folder_
def ss_folder(self, ss_folder_: str) -> None:
self["source"]["simply-static"]["folder"] = ss_folder_

@property
def zip_file_url(self) -> str:
Expand Down Expand Up @@ -450,8 +450,8 @@ def search(self) -> str:
return self["search"]

@search.setter
def search(self, search: str) -> None:
self["search"] = search
def search(self, search_: str) -> None:
self["search"] = search_

@property
def search_path(self) -> Path:
Expand Down
33 changes: 21 additions & 12 deletions src/staticwordpress/core/redirects.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,20 +36,28 @@
# INTERNAL IMPORTS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++

from .constants import HOST, REDIRECTS
from .errors import SerpWingsResponseNotValid
from ..core.constants import HOST, REDIRECTS
from ..core.errors import ResponseNotValid

# +++++++++++++++++++++++++++++++++++++++++++++++++++++
# IMPLEMENATIONS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++


class Redirect:
def __init__(self, from_, to_, query_, status, force_, source_) -> None:
def __init__(
self,
from_: str,
to_: str,
query_: str,
status_: int,
force_: bool,
source_: str,
) -> None: # source should be REDIRECTS
self._from = from_
self._to = to_
self._query = query_
self._status = status
self._status = status_
self._force = force_
self._source = source_
self._hash = hashlib.sha256(from_.encode("utf-8")).hexdigest()
Expand Down Expand Up @@ -100,14 +108,15 @@ def save(self, output_file_, host_: HOST) -> None:
else:
f.write(redirect.as_line(True))

def get_from_plugin(self, redirects_api_path: str, wp_auth_token_: str) -> None:
def get_from_plugin(self, redirects_api_path_: str, wp_auth_token_: str) -> None:
try:
wp_api_response = requests.get(
redirects_api_path, headers={"Authorization": "Basic " + wp_auth_token_}
redirects_api_path_,
headers={"Authorization": "Basic " + wp_auth_token_},
)

if wp_api_response.status_code >= 400:
raise SerpWingsResponseNotValid
raise ResponseNotValid

redirects_as_dict = json.loads(wp_api_response.content)

Expand All @@ -116,23 +125,23 @@ def get_from_plugin(self, redirects_api_path: str, wp_auth_token_: str) -> None:
redirect_=Redirect(
from_=redirect_["url"],
to_=redirect_["action_data"]["url"],
status=redirect_["action_code"],
status_=redirect_["action_code"],
query_=None,
force_=True,
source_=REDIRECTS.REDIRECTION.value,
)
)
except SerpWingsResponseNotValid:
except ResponseNotValid:
logging.info(
f"Redirects are not valid. Make sure that redirection plug is properly configured."
)

def add_search(self, search_page: str) -> None:
def add_search(self, search_page_: str) -> None:
self.add_redirect(
Redirect(
from_="/*",
to_=f"/{search_page}/",
status=301,
to_=f"/{search_page_}/",
status_=301,
query_='{s = ":s"}',
force_=True,
source_=REDIRECTS.NONE.value,
Expand Down
2 changes: 1 addition & 1 deletion src/staticwordpress/core/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def add(self, soup_: BeautifulSoup, url_path_: str) -> None:
}
)

def copy_scripts(self):
def copy_scripts(self) -> None:
"""Copy Search.js into search folder"""
src = Path(f'{SHARE_FOLDER_PATH}/{CONFIGS["SEARCH"]["INDEX"]["src"]}')
if src.exists():
Expand Down
54 changes: 28 additions & 26 deletions src/staticwordpress/core/sitemaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,15 @@
# INTERNAL IMPORTS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++

from .utils import get_clean_url, get_remote_content
from .constants import CONFIGS
from ..core.constants import CONFIGS
from ..core.utils import get_clean_url, get_remote_content, is_url_valid

# +++++++++++++++++++++++++++++++++++++++++++++++++++++
# IMPLEMENATIONS
# +++++++++++++++++++++++++++++++++++++++++++++++++++++


def find_sitemap_location(home_url: str) -> str:
def find_sitemap_location(home_url_: str) -> str:
"""Finding Sitemap Location Using Home Url

Args:
Expand All @@ -57,31 +57,33 @@ def find_sitemap_location(home_url: str) -> str:
Returns:
str: Location of Sitemap
"""
for sitemap_path in CONFIGS["SITEMAP"]["SEARCH_PATHS"]:
sitemap_url = get_clean_url(home_url, sitemap_path)
response = get_remote_content(sitemap_url)
if response.status_code < 400:
return parse.urlparse(response.url).path

# robots.txt
robots_txt = get_clean_url(home_url, "robots.txt")
response = get_remote_content(robots_txt)
if response:
for item in response.text.split("\n"):
if item.startswith("Sitemap:"):
return item.split("Sitemap:")[-1].strip()

# check home page for link rel=sitemap
response = get_remote_content(home_url)
if response:
soup = BeautifulSoup(response.text, features="xml")
for link in soup.find_all("link"):
if link.has_attr("sitemap"):
return link["href"]
if is_url_valid(home_url_):
for sitemap_path in CONFIGS["SITEMAP"]["SEARCH_PATHS"]:
sitemap_url = get_clean_url(home_url_, sitemap_path)
response = get_remote_content(sitemap_url)
if response.status_code < 400:
return parse.urlparse(response.url).path

# robots.txt
robots_txt = get_clean_url(home_url_, "robots.txt")
response = get_remote_content(robots_txt)
if response:
for item in response.text.split("\n"):
if item.startswith("Sitemap:"):
return item.split("Sitemap:")[-1].strip()

# check home page for link rel=sitemap
response = get_remote_content(home_url_)
if response:
soup = BeautifulSoup(response.text, features="xml")
for link in soup.find_all("link"):
if link.has_attr("sitemap"):
return link["href"]
return ""
return ""


def extract_sitemap_paths(sitemap_url: str) -> list:
def extract_sitemap_paths(sitemap_url_: str) -> list:
"""Extract Sub-Sitemap from Index Sitemap

Args:
Expand All @@ -91,7 +93,7 @@ def extract_sitemap_paths(sitemap_url: str) -> list:
list: List of Sub-Sitemaps
"""
sitemap_paths = []
response = get_remote_content(sitemap_url)
response = get_remote_content(sitemap_url_)
for item in response.text.split("\n"):
if ".xsl" in item:
st = item.find("//")
Expand Down
Loading
Loading