Skip to content

Commit

Permalink
**CI** Formatted code + Updated version number and documentation. [sk…
Browse files Browse the repository at this point in the history
…ip ci]
  • Loading branch information
thorwhalen committed Jan 24, 2024
1 parent 0ee6951 commit 5cb9407
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 12 deletions.
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[metadata]
name = tabled
version = 0.1.4
version = 0.1.5
url = https://github.com/i2mint/tabled
platforms = any
description_file = README.md
Expand Down
2 changes: 2 additions & 0 deletions tabled/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ class Join:
class Remove:
fields: Union[str, Iterable[str]]


@dataclass
class Rename:
rename_mapping: Dict[str, str]
Expand Down Expand Up @@ -110,6 +111,7 @@ def rename_func(scope, command):
for old_col, new_col in command.rename_mapping.items():
scope['cumul'] = scope['cumul'].rename(columns={old_col: new_col})


dflt_tables_interpreter_map = {
Load: load_func,
Join: join_func,
Expand Down
40 changes: 29 additions & 11 deletions tabled/tests/join_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,12 @@ def test_execute_commands_simply():

tables = {'table1': table1, 'table2': table2, 'table3': table3}

commands = [Load('table1'), Rename({'Name': 'First Name'}), Remove(['First Name']), Join('table3')]
commands = [
Load('table1'),
Rename({'Name': 'First Name'}),
Remove(['First Name']),
Join('table3'),
]

scope = tables
extra_scope = dict()
Expand All @@ -181,35 +186,48 @@ def are_equal(a, b):

# Test wiki table
from tabled.html import *


def test_extract_wikipedia_tables():
wikiurl = "https://fr.wikipedia.org/wiki/Liste_des_communes_de_France_les_plus_peupl%C3%A9es"
converter = url_to_html_func('requests')
wikiurl = 'https://fr.wikipedia.org/wiki/Liste_des_communes_de_France_les_plus_peupl%C3%A9es'
converter = url_to_html_func('requests')
tables = get_tables_from_url(wikiurl, url_to_html=converter)
assert tables is not None
assert len(tables) > 0
for idx, df in enumerate(tables):
assert isinstance(df, pd.DataFrame)
assert not df.empty


@pytest.fixture
def extracted_dataframes():
converter = url_to_html_func('requests')
url_aeroports_frequentes = "https://fr.wikipedia.org/wiki/Liste_des_a%C3%A9roports_les_plus_fr%C3%A9quent%C3%A9s_en_France"
url_aeroports_vastes = "https://fr.wikipedia.org/wiki/Liste_des_a%C3%A9roports_les_plus_vastes_au_monde"
converter = url_to_html_func('requests')
url_aeroports_frequentes = 'https://fr.wikipedia.org/wiki/Liste_des_a%C3%A9roports_les_plus_fr%C3%A9quent%C3%A9s_en_France'
url_aeroports_vastes = 'https://fr.wikipedia.org/wiki/Liste_des_a%C3%A9roports_les_plus_vastes_au_monde'

dfs_aeroports_frequentes = get_tables_from_url(url_aeroports_frequentes, url_to_html=converter)
dfs_aeroports_vastes = get_tables_from_url(url_aeroports_vastes, url_to_html=converter)
dfs_aeroports_frequentes = get_tables_from_url(
url_aeroports_frequentes, url_to_html=converter
)
dfs_aeroports_vastes = get_tables_from_url(
url_aeroports_vastes, url_to_html=converter
)

return dfs_aeroports_frequentes, dfs_aeroports_vastes


def test_execute_commands_wiki(extracted_dataframes):
from tabled.multi import Join, Remove, Load, Rename

table1_wiki = extracted_dataframes[0][0]
table2_wiki = extracted_dataframes[1][0]
table2_wiki = extracted_dataframes[1][0]

tables = {'table1_wiki': table1_wiki, 'table2_wiki': table2_wiki}
commands = [Load('table2_wiki'), Remove('Aéroport'), Rename({'Code':'Code IATA'}), Join('table1_wiki')]
commands = [
Load('table2_wiki'),
Remove('Aéroport'),
Rename({'Code': 'Code IATA'}),
Join('table1_wiki'),
]

scope = tables
extra_scope = dict()
Expand All @@ -218,4 +236,4 @@ def test_execute_commands_wiki(extracted_dataframes):
next(it)
next(it)
next(it)
assert(extra_scope['cumul'].shape[0] == 1)
assert extra_scope['cumul'].shape[0] == 1

0 comments on commit 5cb9407

Please sign in to comment.