diff --git a/.gitignore b/.gitignore index d465d2d..e9d8e44 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ htmlcov/ .idea *.mo *.pot +.tox diff --git a/Pipfile b/Pipfile index 6f823ce..295e827 100644 --- a/Pipfile +++ b/Pipfile @@ -7,7 +7,7 @@ name = "pypi" [packages] -gitpython = "*" +GitPython = "*" stdlib-list = "*" mccabe = "*" "pep8" = "*" @@ -18,5 +18,5 @@ mccabe = "*" pytest = "*" pytest-cov = "*" codecov = "*" -babel = "*" +Babel = "*" tox = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 500eeea..e5f01a6 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b8a04e5ba30e8fe82687d28c46b4e333057c85feb9fe531000e80675546bb2c6" + "sha256": "1be7ceaeda387a78eef0e3b43b1ec7069a02e73a097dea26dd146f7cdc4d9fe4" }, "host-environment-markers": { "implementation_name": "cpython", @@ -9,9 +9,9 @@ "os_name": "posix", "platform_machine": "x86_64", "platform_python_implementation": "CPython", - "platform_release": "17.4.0", + "platform_release": "17.5.0", "platform_system": "Darwin", - "platform_version": "Darwin Kernel Version 17.4.0: Sun Dec 17 09:19:54 PST 2017; root:xnu-4570.41.2~1/RELEASE_X86_64", + "platform_version": "Darwin Kernel Version 17.5.0: Mon Mar 5 22:24:32 PST 2018; root:xnu-4570.51.1~1/RELEASE_X86_64", "python_full_version": "3.6.1", "python_version": "3.6", "sys_platform": "darwin" @@ -164,6 +164,8 @@ }, "pluggy": { "hashes": [ + "sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c", + "sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5", "sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" ], "version": "==0.6.0" @@ -191,17 +193,10 @@ }, "pytz": { "hashes": [ - "sha256:ed6509d9af298b7995d69a440e2822288f2eca1681b8cce37673dbb10091e5fe", - "sha256:f93ddcdd6342f94cea379c73cddb5724e0d6d0a1c91c9bdef364dc0368ba4fda", - "sha256:61242a9abc626379574a166dc0e96a66cd7c3b27fc10868003fa210be4bff1c9", - "sha256:ba18e6a243b3625513d85239b3e49055a2f0318466e0b8a92b8fb8ca7ccdf55f", - "sha256:07edfc3d4d2705a20a6e99d97f0c4b61c800b8232dc1c04d87e8554f130148dd", - "sha256:3a47ff71597f821cd84a162e71593004286e5be07a340fd462f0d33a760782b5", - "sha256:5bd55c744e6feaa4d599a6cbd8228b4f8f9ba96de2c38d56f08e534b3c9edf0d", - "sha256:887ab5e5b32e4d0c86efddd3d055c1f363cbaa583beb8da5e22d2fa2f64d51ef", - "sha256:410bcd1d6409026fbaa65d9ed33bf6dd8b1e94a499e32168acfc7b332e4095c0" - ], - "version": "==2018.3" + "sha256:65ae0c8101309c45772196b21b74c46b2e5d11b6275c45d251b150d5da334555", + "sha256:c06425302f2cf668f1bba7a0a03f3c1d34d4ebeef2c72003da308b3947c7f749" + ], + "version": "==2018.4" }, "requests": { "hashes": [ diff --git a/README.md b/README.md deleted file mode 100644 index 7a4684a..0000000 --- a/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# Fiasko Bro - -> When flake8 is not enought. - -[![Build Status](https://travis-ci.org/devmanorg/fiasko_bro.svg?branch=master)](https://travis-ci.org/devmanorg/fiasko_bro) -[![codecov](https://codecov.io/gh/devmanorg/fiasko_bro/branch/master/graph/badge.svg)](https://codecov.io/gh/devmanorg/fiasko_bro) -[![Documentation Status](https://readthedocs.org/projects/fiasko-bro/badge/?version=latest)](http://fiasko-bro.readthedocs.io/en/latest/?badge=latest) -[![Maintainability](https://api.codeclimate.com/v1/badges/4f26aec50f07294b37e3/maintainability)](https://codeclimate.com/github/devmanorg/fiasko_bro/maintainability) -[![PyPI version](https://badge.fury.io/py/Fiasko-Bro.svg)](https://badge.fury.io/py/Fiasko-Bro) - -Fiasko is a static analysis tool for python code that catches common style errors. - -![](http://melevir.com/static/fiasko.jpg) - -### Example - -From command line: -```bash -$ LANGUAGE=en fiasko -p ~/projects/fiasko_bro/ --skip_check_repo_size -data_in_repo -pep8 28 PEP8 violations -mccabe_failure has_changed_readme,has_no_libs_from_stdlib_in_requirements -has_star_import -has_local_import -bad_titles value, name -bad_titles n, l, t, i -compare_response_status_to_200 -return_with_parenthesis for example, the line number 16 -file_too_long ast_helpers.py -too_nested duplicates_test.py:83 -================================================== -Total 11 violations -``` -See `fiasko --help` for more CLI arguments. - -From python code: -```python ->>> import fiasko_bro ->>> fiasko_bro.validate_repo('/path/to/repo/') -[('camel_case_vars', 'for example, rename the following: WorkBook'), ('file_too_long', 'coursera.py')] -``` -The `validate_repo` method returns list of tuples which consist of an error slug and an error message. - - -### Installation - -With pip: -```bash -pip install git+https://github.com/devmanorg/fiasko_bro.git -``` - -Or just clone the project and install the requirements: -```bash -$ git clone https://github.com/devmanorg/fiasko_bro.git -$ cd fiasko_bro -$ pip install -r requirements.txt -``` - -### Docs -[fiasko-bro.readthedocs.io](http://fiasko-bro.readthedocs.io/) - - -### Contributing - -To contribute, [pick an issue](https://github.com/devmanorg/fiasko_bro/issues) to work on and leave a comment saying -that you've taken the issue. Don't forget to mention when you want to submit the pull request. - -You can read more about contribution guidelines [in the docs](http://fiasko-bro.readthedocs.io/en/latest/contributing.html) - - -### Launch tests -`python -m pytest` - - -### Versioning -We follow [semantic versioning](https://github.com/dbrock/semver-howto/blob/master/README.md). diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..e82acea --- /dev/null +++ b/README.rst @@ -0,0 +1,114 @@ +Fiasko Bro +========== + + When flake8 is not enough. + +.. image:: https://travis-ci.org/devmanorg/fiasko_bro.svg?branch=master + :target: https://travis-ci.org/devmanorg/fiasko_bro + :alt: Build Status + +.. image:: https://codecov.io/gh/devmanorg/fiasko_bro/branch/master/graph/badge.svg + :target: https://codecov.io/gh/devmanorg/fiasko_bro + :alt: codecov + +.. image:: https://readthedocs.org/projects/fiasko-bro/badge/?version=latest + :target: http://fiasko-bro.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://api.codeclimate.com/v1/badges/4f26aec50f07294b37e3/maintainability + :target: https://codeclimate.com/github/devmanorg/fiasko_bro/maintainability + :alt: Maintainability + +.. image:: https://badge.fury.io/py/Fiasko-Bro.svg + :target: https://badge.fury.io/py/Fiasko-Bro + :alt: PyPI version + +Fiasko is a static analysis tool for Python code that catches common style errors. + +.. image:: http://melevir.com/static/fiasko.jpg + +Example +~~~~~~~ + +From command line: + +.. code-block:: bash + + $ fiasko -p ~/projects/fiasko_bro + git_history_warning add files via upload + pep8 33 PEP8 violations + mccabe_failure has_changed_readme + has_star_import __init__.py + has_local_import setup.py + bad_titles name, n + bad_titles i, r, n, t, l + file_too_long ast_helpers.py + too_nested code_validator.py:54 + indent_not_four_spaces ast_helpers.py:130 + title_shadows slice + ================================================== + Total 11 violations + +See ``fiasko --help`` for more CLI arguments. + +From Python code: + +.. code-block:: python + + >>> from fiasko_bro import validate + >>> validate('/user/projects/fiasko_bro/') + [('git_history_warning', 'add files via upload'), ('pep8', '33 PEP8 violations'), ('mccabe_failure', 'has_changed_readme'), ('has_star_import', '__init__.py'), ('has_local_import', 'setup.py'), ('bad_titles', 'name, n'), ('bad_titles', 'n, r, l, t, i'), ('file_too_long', 'ast_helpers.py'), ('too_nested', 'code_validator.py:54'), ('indent_not_four_spaces', 'ast_helpers.py:130'), ('title_shadows', '_, slice')] + +The ``validate`` method returns list of tuples which consist of an error slug and an error message. + +Fiasko has a flexible Python API which you can read more about `in the docs `_. + +Installation +~~~~~~~~~~~~ + +With pip: + +.. code-block:: bash + + pip install fiasko_bro + +With Pipenv: + +.. code-block:: bash + + pipenv install fiasko_bro + +Or just clone the project and install the requirements: + +.. code-block:: bash + + $ git clone https://github.com/devmanorg/fiasko_bro.git + $ cd fiasko_bro + $ pip install -r requirements.txt + +Docs +~~~~ + +`fiasko-bro.readthedocs.io `_ + + +Contributing +~~~~~~~~~~~~ + +To contribute, `pick an issue `_ to work on and leave a comment saying +that you've taken the issue. Don't forget to mention when you want to submit the pull request. + +You can read more about contribution guidelines in `the docs `_ + +If your suggestion (or bug report) is new, be sure to `create an issue `_ first. + +Launch tests +~~~~~~~~~~~~ + +``python -m pytest`` + + +Versioning +~~~~~~~~~~ + +We follow `semantic versioning `_. diff --git a/bin/fiasko.py b/bin/fiasko.py index 3da7baf..3ec2120 100644 --- a/bin/fiasko.py +++ b/bin/fiasko.py @@ -1,7 +1,7 @@ import os import argparse -import fiasko_bro +from fiasko_bro import validate from fiasko_bro.configparser_helpers import extract_fiasko_config_from_cfg_file @@ -16,8 +16,7 @@ def main(): args = parse_args() config_path = args.config_path or os.path.join(args.path, 'setup.cfg') updated_config = extract_fiasko_config_from_cfg_file(config_path) - fiasko_bro.config.VALIDATOR_SETTINGS.update(updated_config) - violations = fiasko_bro.validate_repo(args.path) + violations = validate(args.path, **updated_config) for violation_slug, violation_message in violations: print('%-40s\t%s' % (violation_slug, violation_message)) print('=' * 50) diff --git a/docs/source/add_validators.rst b/docs/source/add_validators.rst new file mode 100644 index 0000000..39c191c --- /dev/null +++ b/docs/source/add_validators.rst @@ -0,0 +1,249 @@ +Write you own validators +======================== + +Let's pretend we only want to check if the folder contains any Python files with a syntax error. +All the code you need to write in order to implement the behavior is these 12 lines: + +.. code-block:: python + + from fiasko_bro import validate + + + def has_no_syntax_errors(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + if not parsed_file.is_syntax_correct: + return 'syntax_error', parsed_file.name + + + validator_groups = { + 'general': [has_no_syntax_errors] + } + print(validate('/Users/project', error_validator_groups=validator_groups)) + +For the rest of the document we will discuss the things in this example. + +Validator arguments +^^^^^^^^^^^^^^^^^^^ + +A validator receives three kinds of arguments: + - ``ProjectFolder`` objects + - validation parameters + - ``whitelists`` and ``blacklists`` dictionaries (`this is going to change soon `_). + +ProjectFolder +~~~~~~~~~~~~~ + +``ProjectFolder`` objects contain all the information about the project: + - Its Git repository. It's stored in ``repo`` attribute, which is either a ``LocalRepository`` object (if the repository is actually present) or ``None``. + - All of the Python files. They can be accessed through ``get_parsed_py_files`` method. It returns ``ParsedPyFile`` objects which store store path, name, contents and an ast tree for the associated files. + +``ProjectFolder`` class also allows the access to non-Python project files. + +The only argument that's guaranteed to be ``ProjectFolder`` is ``project_folder``. +If ``original_project_folder`` is not ``None``, it's a ``ProjectFolder`` object too. + +To illustrate the usage of ``original_project_folder``, let's consider a validator that naively counts commits to see if any new code was committed: + +.. code-block:: python + + def has_more_commits_than_origin(project_folder, original_project_folder=None, *args, **kwargs): + if not original_project_folder: + return + if not project_folder.repo or not original_project_folder.repo: + return + if project_folder.repo.count_commits() <= original_project_folder.repo.count_commits(): + return 'no_new_code', None + +Notice we made our validator succeed in case there's no ``original_project_folder`` or no repositories are attached to the folders. +We consider it a sensible solution for our case, but you can choose any other behavior. + + +Validation parameters +~~~~~~~~~~~~~~~~~~~~~ + +Validation parameters are simply keyword arguments passed to ``validate`` method. Let's parameterize our syntax validator so +that it could tolerate some number of files with a syntax error: + +.. code-block:: python + + from fiasko_bro import validate + + + def has_almost_no_syntax_errors(project_folder, max_syntax_error_files_amount, *args, **kwargs): + syntax_error_files_amount = 0 + for parsed_file in project_folder.get_parsed_py_files(): + if not parsed_file.is_syntax_correct: + syntax_error_files_amount += 1 + if syntax_error_files_amount > max_syntax_error_files_amount: + return 'too_many_syntax_errors', syntax_error_files_amount + + + validator_groups = { + 'general': [has_almost_no_syntax_errors] + } + print(validate('/Users/project', max_syntax_error_files_amount=2, error_validator_groups=validator_groups)) + +Ignored paths +~~~~~~~~~~~~~~~~~~~ + +Suppose we want to ignore some of the files and directories while we validating for syntax errors. +This is how it can be done: + +.. code-block:: python + + from fiasko_bro import validate + + + def has_almost_no_syntax_errors(project_folder, syntax_files_to_ignore, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(whitelist=syntax_files_to_ignore): + if not parsed_file.is_syntax_correct: + return 'syntax_error', parsed_file.name + + + validator_groups = { + 'general': [has_almost_no_syntax_errors] + } + ignore_list = ['trash.py', 'garbage.py'] + print(validate('/Users/project', syntax_files_to_ignore=ignore_list, error_validator_groups=validator_groups)) + +Now, if ``trash.py`` is a part of a file's path, the file is not going to be returned by ``get_parsed_py_files`` method. + +Validator return values +^^^^^^^^^^^^^^^^^^^^^^^ + +A validator is expected to return either ``None`` (if the validation was successful) or a tuple. + +The tuple has to consist of an error slug (which is used as an error identifier) and some info that will clarify the error. +In the examples above we either return a file name with a syntax error or the number of syntax errors if it's more relevant. +In case there's no helpful information to return, just return ``error_slug, None``. + +Conditionally execute a validator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you want the validator to be executed only for certain types of repositories, add ``tokenized_validator`` to it:: + + from fiasko_bro import tokenized_validator + + @tokenized_validator(token='min_max_challenge') + def has_min_max_functions(solution_repo, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + names = get_all_names_from_tree(parsed_file.ast_tree) + if 'min' in names and 'max' in names: + return + return 'builtins', 'no min or max is used' + +then add the validator to the appropriate group + + code_validator.error_validator_groups['general'].append(has_min_max_functions) + +and when calling ``validate`` for certain folder, pass the token: + + code_validator.validate(project_folder, validator_token='min_max_challenge') + +The validator won't be executed for any folder without ``validator_token='min_max_challenge'``. + +Adding your validators to the default ones +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A quick example +~~~~~~~~~~~~~~~ + +Consider the example: + +.. code-block:: python + + from fiasko_bro import validate, defaults + + + def my_fancy_validator(project_folder, *args, **kwargs): + pass + + + validator_groups = defaults.ERROR_VALIDATOR_GROUPS.copy() + validator_groups['general'] += (my_fancy_validator,) + print( + validate( + '/Users/project', + error_validator_groups=validator_groups, + warning_validators_groups=defaults.WARNING_VALIDATOR_GROUPS + ) + ) + +As you can see, we simply copy the default validators structure, modify it to suit our needs and pass to the ``validate`` method. + +The minor issue is that since we pass our own error validators, the default warning validators have to be restored by hand. +We did so by passing them as an argument too. + +The intricacies +~~~~~~~~~~~~~~~ + +The are two kinds of validators: error validators and warning validators. +The difference between them is that warning validators don't halt the validation process, while the error validators do. +The error validators are expected to be grouped according to their purpose, like so:: + + ERROR_VALIDATOR_GROUPS = OrderedDict( + [ + ( + 'commits', + [validators.has_more_commits_than_origin], + ), + ( + 'syntax', + [validators.has_no_syntax_errors], + ), + ... + ( + 'general', + [ + validators.is_pep8_fine, + ... + ], + ), + ] + ) + +Here, for example, you have the group ``general`` that consists of a list of validators. We used ``OrderedDict`` +because the order in which the validator groups run matters. + +In each group, every single validator is executed. +If one of the validators in the group fails, the ``validate`` method executes the rest of the group and then +returns the error list without proceeding to the next group. +If all the validators in the error group succeed, the warning validators for this group are executed. +Here's the structure of the warnings validators:: + + WARNING_VALIDATOR_GROUPS = { + 'commits': [ + validators.has_no_commit_messages_from_blacklist, + ], + 'syntax': [ + validators.has_indents_of_spaces, + validators.has_no_variables_that_shadow_default_names, + ] + } + +The ``commits`` warning validator group is executed only if the ``commits`` error validator group passes successfully. + +Warning validators are not executed if none of the error validators are failed. +They just add more error messages in case the validation fails. + +Adding pre-validation checks +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Pre-validator checks have the same structure as ``error_validator_groups`` and their usage is the same too: + +.. code-block:: python + + from fiasko_bro import validate + + + def my_pre_validation_check(project_path, *args, **kwargs): + pass + + + pre_validation_checks = { + 'general': [my_pre_validation_check] + } + print(validate('/Users/project', pre_validation_checks=pre_validation_checks)) + +Note that the pre-valdation check receives ``project_path`` (a string), not ``project_folder`` (a ``ProjectFolder`` object) +because the the whole point of the check is to ensure it's OK to parse the files into ASTs. diff --git a/docs/source/advanced_usage.rst b/docs/source/advanced_usage.rst index 6a95a30..bafd78b 100644 --- a/docs/source/advanced_usage.rst +++ b/docs/source/advanced_usage.rst @@ -1,157 +1,78 @@ Advanced usage ============== -Write you own validators ------------------------- - -How validators work -^^^^^^^^^^^^^^^^^^^ - -Of course, the standard suit of validators can be modified in a way that best suits your needs. - -The are two kinds of validators: error validators and warning validators. -The difference between them is that warning validators don't halt the validation process, while the error validators do. -Error validators are grouped according to their purpose, like `in this code `_ :: - - error_validator_groups = OrderedDict( - [ - ( - 'commits', - [validators.has_more_commits_than_origin], - ), - ( - 'readme', - [validators.has_readme_file], - ), - ... - ] - ) - -Here, for example, you have the group ``commits`` that consists of the only ``has_more_commits_than_origin`` validator. - -In each group, every validator is executed. -If some of the validators in the group fail, the ``validate`` method returns the error list without proceeding to the next group. -If all the validators in the error group succeed, the warning validators for this group are executed. -They are stored in ``warning_validator_groups``:: - - warning_validator_groups = { - 'commits': [ - validators.has_no_commit_messages_from_blacklist, - ], - 'syntax': [ - validators.has_indents_of_spaces, - validators.has_no_variables_that_shadow_default_names, - ] - } - -The ``commits`` warning validator group is executed only if the ``commits`` error validator group passes successfully. - -Warning validators just add some more errors in case the validation failed. -They are not executed if none of the error validators failed. - -Add a simple validator -^^^^^^^^^^^^^^^^^^^^^^ - -A simple validator is a validator that only takes the repository (more precisely, ``LocalRepositoryInfo`` object) to validate. It returns ``None`` is case of success -and a tuple of an error slug and an error message in case of a problem. Here's an example of existing validator:: - - def has_no_syntax_errors(solution_repo, *args, **kwargs): - for filename, tree in solution_repo.get_ast_trees(with_filenames=True): - if tree is None: - return 'syntax_error', 'в %s' % filename +Validation parameters +^^^^^^^^^^^^^^^^^^^^^ -Note the ``*args, **kwargs`` part. The validator actually gets a lot of arguments, but doesn't care about them. +The default validation parameters can be found in ``defaults.VALIDATION_PARAMETERS`` dictionary. -Now you can add validator to one of the existing validator groups or create your own: +The correct way to use the dictionary is to treat it as a read-only object. +If you want to override the default values, just pass the parameters to ``validate`` function directly: - code_validator.error_validator_groups['general'].append(has_no_syntax_errors) +.. code-block:: python -Compare against some "original" repo -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + >>> from fiasko_bro import validate, defaults + >>> default_directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + >>> directories_to_skip = default_directories_to_skip.union({'test_fixtures', '.pytest_cache', 'venv'}) + >>> validate('/user/projects/fiasko_bro/', directories_to_skip=directories_to_skip) -If you want your validator to compare against some other repository, add the ``original_repo`` argument. -:: +The names of the parameters tend to be self-explanatory. +They also have sensible defaults so you didn't have to worry about them until absolutely have to. +The list of validation parameters may change as you add your own validators. - def has_more_commits_than_origin(solution_repo, original_repo=None, *args, **kwargs): - if not original_repo: - return - if solution_repo.count_commits() <= original_repo.count_commits(): - return 'no_new_code', None - - -Notice we made our validator succeed in case there's no ``original_repo``. -We consider it a sensible solution for our case, but you can choose any other behavior. +Command line interface +^^^^^^^^^^^^^^^^^^^^^^ -Parameterize your validator -^^^^^^^^^^^^^^^^^^^^^^^^^^^ +When you run -To add a parameter to your validator, just add it to the arguments. -:: +.. code-block:: bash - def has_no_long_files(solution_repo, max_number_of_lines, *args, **kwargs): - for file_path, file_content, _ in solution_repo.get_ast_trees(with_filenames=True, with_file_content=True): - number_of_lines = file_content.count('\n') - if number_of_lines > max_number_of_lines: - file_name = url_helpers.get_filename_from_path(file_path) - return 'file_too_long', file_name + $ fiasko -and then don't forget to pass it: +Fiasko starts to validate the current directory, taking its validation parameters from ``fiasko_bro`` section +of the local ``setup.cfg`` if it's present. - code_validator.validate(repo, max_number_of_lines=200) +The project path and config file location can be modified: -Of course, built-in validators have their own defaults in `_default_settings` property of `CodeValidator` class. +.. code-block:: bash -Conditionally execute a validator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + $ fiasko -p ~/projects/fiasko_bro --config ~/projects/fiasko_bro/setup.cfg -If you want the validator to be executed only for certain types of repositories, add ``tokenized_validator`` to it:: +Right now, the CLI is not as flexible as the Python interface: it lets you use the default validators only +and doesn't let you modify their whitelists and blacklists. - from fiasko_bro import tokenized_validator +The config file +^^^^^^^^^^^^^^^ - @tokenized_validator(token='min_max_challenge') - def has_min_max_functions(solution_repo, *args, **kwargs): - for tree in solution_repo.get_ast_trees(): - names = get_all_names_from_tree(tree) - if 'min' in names and 'max' in names: - return - return 'builtins', 'no min or max is used' +The config file allows you to override validation parameters. -then add the validator to the appropriate group +Here's a part of Fiasko's own ``setup.cfg`` file:: - code_validator.error_validator_groups['general'].append(has_min_max_functions) + [fiasko_bro] + directories_to_skip=build,dist,test_fixtures,.pytest_cache -and when calling ``validate`` for certain repo, pass the token: +(the lack of the whitespace between the directories here `is important `_ for now) - code_validator.validate(solution_repo=solution_repo, validator_token='min_max_challenge') +Python API doesn't take into consideration the ``setup.cfg`` parameters. +This is a `subject to discussion `_. -The validator won't be executed for any other repository. +"Original" repo +^^^^^^^^^^^^^^^ -Blacklist/whitelists for validators -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +If you want to validate how the project deviated from some "original" repository you can do so +by passing ``original_project_path`` argument: -For every rule there's an exception. Exceptions are easy to take into account using blacklists or whitelists. + >>> from fiasko_bro import validate + >>> code_validator.validate(project_path='/path/to/folder/', original_project_path='/path/to/different/folder/') + [('need_readme', None)] -First, add the blacklist and whitelist to the ``code_validator`` instance:: +In this example, the original readme was not modified, even though we expected it to. - code_validator.whitelists['has_no_calls_with_constants'] = ['pow', 'exit'] +Pre-validation checks +^^^^^^^^^^^^^^^^^^^^^ -Then create and add the validator with the same name as the dictionary key:: +Pre-validation checks are here to ensure it's safe to parse the files in the folder into ASTs. For example, they check +file encodings and and the size of the folder under validation so that other validators did not error out. +From the client's perspective, they work exactly like validators. - def has_no_calls_with_constants(solution_repo, whitelists, *args, **kwargs): - whitelist = whitelists.get('has_no_calls_with_constants', []) - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - if 'tests' in filepath: # tests can have constants in asserts - continue - calls = [n for n in ast.walk(tree) if isinstance(n, ast.Call)] - for call in calls: - if isinstance(ast_helpers.get_closest_definition(call), ast.ClassDef): # for case of id = db.String(256) - continue - attr_to_get_name = 'id' if hasattr(call.func, 'id') else 'attr' - function_name = getattr(call.func, attr_to_get_name, None) - if not function_name or function_name in whitelist: - continue - for arg in call.args: - if isinstance(arg, ast.Num): - return 'magic_numbers', 'например, %s' % arg.n -Notice in the first line we pull the whitelist from the dictionary and incorporate it in our validation logic. diff --git a/docs/source/index.rst b/docs/source/index.rst index 7b47caa..d9a3ced 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -10,49 +10,86 @@ Fiasko Bro .. image:: https://readthedocs.org/projects/fiasko-bro/badge/?version=latest :target: http://fiasko-bro.readthedocs.io/en/latest/?badge=latest -Fiasko Bro enables you to automatically review Python code in a git repo. +Fiasko is a static analysis tool for Python code that catches common style errors. +It enables you to comprehensively analyze Python projects by looking not only at the Python code, +but also commit messages, file encodings, non-Python files, etc. Installation -============ +~~~~~~~~~~~~ -With pip:: +With pip: - pip install git+https://github.com/devmanorg/fiasko_bro.git +.. code-block:: bash + pip install fiasko_bro -Or just clone the project and install the requirements:: +With Pipenv: + +.. code-block:: bash + + pipenv install fiasko_bro + +Or just clone the project and install the requirements: + +.. code-block:: bash $ git clone https://github.com/devmanorg/fiasko_bro.git $ cd fiasko_bro $ pip install -r requirements.txt - Usage -===== +~~~~~ + +Fiasko was conceived as a tool used through Python interface. Here's the simplest usage example: -Here's the simplest usage example:: +.. code-block:: python - >>> import fiasko_bro - >>> fiasko_bro.validate_repo('/path/to/repo/') - [('camel_case_vars', 'for example, rename the following: WorkBook'), ('file_too_long', 'source_file.py')] + >>> from fiasko_bro import validate + >>> validate('/user/projects/fiasko_bro/') + [('git_history_warning', 'add files via upload'), ('pep8', '33 PEP8 violations'), ('mccabe_failure', 'has_changed_readme'), ('has_star_import', '__init__.py'), ('has_local_import', 'setup.py'), ('bad_titles', 'name, n'), ('bad_titles', 'n, r, l, t, i'), ('file_too_long', 'ast_helpers.py'), ('too_nested', 'code_validator.py:54'), ('indent_not_four_spaces', 'ast_helpers.py:130'), ('title_shadows', '_, slice')] -Launch tests -============ +Then CLI was added: + +.. code-block:: bash + + $ fiasko -p ~/projects/fiasko_bro + git_history_warning add files via upload + pep8 33 PEP8 violations + mccabe_failure has_changed_readme + has_star_import __init__.py + has_local_import setup.py + bad_titles name, n + bad_titles i, r, n, t, l + file_too_long ast_helpers.py + too_nested code_validator.py:54 + indent_not_four_spaces ast_helpers.py:130 + title_shadows slice + ================================================== + Total 11 violations + +In this example, the folder ``~/projects/fiasko_bro`` contains a git repository which allowed Fiasko to find +a questionable commit message "add files via upload". + +Tests +~~~~~ ``python -m pytest`` +Versioning +~~~~~~~~~~ +We follow `semantic versioning `_. -Whats next -========== +What's next +~~~~~~~~~~~ .. toctree:: :maxdepth: 2 - usage advanced_usage + add_validators validators_info contributing roadmap diff --git a/docs/source/internationalization.rst b/docs/source/internationalization.rst index 86ef98e..eac226b 100644 --- a/docs/source/internationalization.rst +++ b/docs/source/internationalization.rst @@ -10,14 +10,14 @@ The choice of the language depends on environment variables ``LANGUAGE``, ``LC_A For example:: $ python - >>> from fiasko_bro import validate_repo - >>> validate_repo('../10_coursera_temp') + >>> from fiasko_bro import validate + >>> validate('../10_coursera_temp') [('camel_case_vars', 'for example, rename the following: WorkBook'), ('file_too_long', 'coursera.py')] >>> $ export LANGUAGE=ru $ python - >>> from fiasko_bro import validate_repo - >>> validate_repo('../10_coursera_temp') + >>> from fiasko_bro import validate + >>> validate('../10_coursera_temp') [('camel_case_vars', 'переименуй, например, WorkBook'), ('file_too_long', 'coursera.py')] >>> @@ -54,14 +54,14 @@ each of the steps: Now change the locale make sure Fiasko produces the right output:: $ python - >>> from fiasko_bro import validate_repo - >>> validate_repo('../10_coursera_temp') + >>> from fiasko_bro import validate + >>> validate('../10_coursera_temp') [('camel_case_vars', 'for example, rename the following: WorkBook'), ('file_too_long', 'coursera.py')] >>> $ export LANGUAGE= $ python - >>> from fiasko_bro import validate_repo - >>> validate_repo('../10_coursera_temp') + >>> from fiasko_bro import validate + >>> validate('../10_coursera_temp') [('camel_case_vars', 'переименуй, например, WorkBook'), ('file_too_long', 'coursera.py')] >>> diff --git a/docs/source/usage.rst b/docs/source/usage.rst deleted file mode 100644 index e5fe2d4..0000000 --- a/docs/source/usage.rst +++ /dev/null @@ -1,20 +0,0 @@ -How to use Fiasko -================= - - -Here's the simplest usage example: - - >>> import fiasko_bro - >>> fiasko_bro.validate_repo('/path/to/repo/') - [('camel_case_vars', 'переименуй, например, WorkBook.')] - -The ``validate`` method returns list of tuples which consist of an error slug and an error message. - -You might also want to compare it against some "original" repo: - - >>> from fiasko_bro import CodeValidator - >>> code_validator = CodeValidator() - >>> code_validator.validate(solution_repo='/path/to/repo/', original_repo='/path/to/different/repo/') - [('no_new_code', None)] - -In this example, no new code was added to the original repo, so the validation has stopped. diff --git a/fiasko_bro/__init__.py b/fiasko_bro/__init__.py index 2bd05ef..549a66d 100644 --- a/fiasko_bro/__init__.py +++ b/fiasko_bro/__init__.py @@ -1,3 +1,3 @@ -from .code_validator import CodeValidator, validate_repo +from .code_validator import validate from .validator_helpers import tokenized_validator -from .repository_info import LocalRepositoryInfo +from .repository_info import ProjectFolder diff --git a/fiasko_bro/code_helpers.py b/fiasko_bro/code_helpers.py index ea39c1a..33b1dad 100644 --- a/fiasko_bro/code_helpers.py +++ b/fiasko_bro/code_helpers.py @@ -11,7 +11,7 @@ def count_pep8_violations(repository_info, max_line_length=79, path_whitelist=No paths=['--max-line-length', str(max_line_length)], quiet=True ) - python_file_paths = repository_info.get_python_file_filenames() + python_file_paths = [parsed_file.path for parsed_file in repository_info.get_parsed_py_files()] validatable_paths = [] for python_file_path in python_file_paths: for whitelisted_path_part in path_whitelist: @@ -44,8 +44,8 @@ def count_indentation_spaces(line, tab_size=4): return len(line) - len(expanded_line.lstrip()) -def is_repo_too_large(path_to_repo, max_py_files_count): - num_of_py_files = count_py_files(path_to_repo) +def is_repo_too_large(path_to_repo, directories_to_skip, max_py_files_count): + num_of_py_files = count_py_files(path_to_repo, directories_to_skip) if num_of_py_files > max_py_files_count: return True return False diff --git a/fiasko_bro/code_validator.py b/fiasko_bro/code_validator.py index 10ffa3c..a288b4b 100644 --- a/fiasko_bro/code_validator.py +++ b/fiasko_bro/code_validator.py @@ -1,161 +1,86 @@ -from collections import OrderedDict -import logging - -from . import validators -from . import pre_validation_checks -from .repository_info import LocalRepositoryInfo -from . import config - - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -def validate_repo(path_to_repo, path_to_original_repo=None, **kwargs): - code_validator = CodeValidator() - return code_validator.validate(path_to_repo, path_to_original_repo, **kwargs) - - -class CodeValidator: - blacklists = config.DEFAULT_BLACKLISTS - - whitelists = config.DEFAULT_WHITELISTS - - _default_settings = config.VALIDATOR_SETTINGS - - pre_validation_checks = { - 'encoding': [ - pre_validation_checks.are_sources_in_utf - ], - 'size': [ - pre_validation_checks.are_repos_too_large - ], - 'bom': [ - pre_validation_checks.has_no_bom - ] +from . import defaults +from .repository_info import ProjectFolder + + +def _is_successful_validation(validation_result): + return not isinstance(validation_result, tuple) + + +def _run_validator_group(group, arguments): + errors = [] + for validator in group: + validation_result = validator(**arguments) + if not _is_successful_validation(validation_result): + errors.append(validation_result) + return errors + + +def _run_validators_with_group_names(validator_groups, group_names, validator_arguments): + errors = [] + for group_name in group_names: + errors += _run_validator_group( + validator_groups.get(group_name, []), + validator_arguments + ) + return errors + + +def run_validator_group(validator_group, validator_arguments, post_error_validator_group=None): + successful_group_names = [] + for group_name, group in validator_group.items(): + errors = _run_validator_group(group, validator_arguments) + if errors: + if post_error_validator_group: + errors += _run_validators_with_group_names( + post_error_validator_group, + group_names=successful_group_names, + validator_arguments=validator_arguments + ) + return errors + successful_group_names.append(group_name) + return [] + + +def _construct_validator_arguments(project_path, **kwargs): + validator_arguments = { + 'project_path': project_path, } - - error_validator_groups = OrderedDict( - [ - ( - 'commits', - [validators.has_more_commits_than_origin], - ), - ( - 'readme', - [validators.has_readme_file], - ), - ( - 'syntax', - [validators.has_no_syntax_errors], - ), - ( - 'general', - [ - validators.has_no_directories_from_blacklist, - validators.is_pep8_fine, - validators.has_changed_readme, - validators.is_snake_case, - validators.is_mccabe_difficulty_ok, - validators.has_no_encoding_declaration, - validators.has_no_star_imports, - validators.has_no_local_imports, - validators.has_local_var_named_as_global, - validators.has_variables_from_blacklist, - validators.has_no_short_variable_names, - validators.has_no_range_from_zero, - validators.are_tabs_used_for_indentation, - validators.has_no_try_without_exception, - validators.has_frozen_requirements, - validators.has_no_vars_with_lambda, - validators.has_no_calls_with_constants, - validators.has_readme_in_single_language, - validators.has_no_urls_with_hardcoded_arguments, - validators.has_no_nonpythonic_empty_list_validations, - validators.has_no_extra_dockstrings, - validators.has_no_exit_calls_in_functions, - validators.has_no_libs_from_stdlib_in_requirements, - validators.has_no_lines_ends_with_semicolon, - validators.not_validates_response_status_by_comparing_to_200, - validators.has_no_mutable_default_arguments, - validators.has_no_slices_starts_from_zero, - validators.has_no_cast_input_result_to_str, - validators.has_no_return_with_parenthesis, - validators.has_no_long_files, - validators.is_nesting_too_deep, - validators.has_no_string_literal_sums, - ], - ), - ] + validator_arguments.update(defaults.VALIDATION_PARAMETERS) + validator_arguments.update(kwargs) + return validator_arguments + + +def validate(project_path, original_project_path=None, **kwargs): + pre_validation_checks = kwargs.pop('pre_validation_checks', None) or defaults.PRE_VALIDATION_CHECKS + error_validator_groups = kwargs.pop('error_validator_groups', None) + warning_validator_groups = kwargs.pop('warning_validator_groups', None) + if not error_validator_groups: + error_validator_groups = defaults.ERROR_VALIDATOR_GROUPS + # use default warning groups only with default error groups + if not warning_validator_groups: + warning_validator_groups = defaults.WARNING_VALIDATOR_GROUPS + validator_arguments = _construct_validator_arguments( + project_path, + original_project_path=original_project_path, + **kwargs ) - warning_validator_groups = { - 'commits': [ - validators.has_no_commit_messages_from_blacklist, - ], - 'syntax': [ - validators.has_indents_of_spaces, - validators.has_no_variables_that_shadow_default_names, - ] - } - - for name in warning_validator_groups: - assert name in error_validator_groups.keys() - - def __init__(self, **kwargs): - self.validator_arguments = dict(self._default_settings) - self.validator_arguments.update(kwargs) - - @staticmethod - def _is_successful_validation(validation_result): - return not isinstance(validation_result, tuple) + pre_validation_errors = run_validator_group(pre_validation_checks, validator_arguments) + if pre_validation_errors: + return pre_validation_errors - def _run_validator_group(self, group, arguments): - errors = [] - for validator in group: - validation_result = validator(**arguments) - if not self._is_successful_validation(validation_result): - errors.append(validation_result) - return errors - - def _run_warning_validators_until(self, failed_error_group_name, arguments): - """Gets warnings up until but not including the failed group""" - warnings = [] - for error_group_name in self.error_validator_groups.keys(): - if error_group_name == failed_error_group_name: - return warnings - warnings += self._run_validator_group( - self.warning_validator_groups.get(error_group_name, []), - arguments - ) - return warnings - - def run_validator_group(self, group, add_warnings=False, *args, **kwargs): - errors = [] - for error_group_name, error_group in group.items(): - errors += self._run_validator_group( - error_group, - self.validator_arguments - ) - if errors: - if add_warnings: - errors += self._run_warning_validators_until( - error_group_name, - self.validator_arguments - ) - return errors - return errors + validator_arguments['project_folder'] = ProjectFolder( + project_path, + directories_to_skip=validator_arguments['directories_to_skip'] + ) + if original_project_path: + validator_arguments['original_project_folder'] = ProjectFolder( + original_project_path, + directories_to_skip=validator_arguments['directories_to_skip'] + ) + return run_validator_group( + validator_group=error_validator_groups, + validator_arguments=validator_arguments, + post_error_validator_group=warning_validator_groups + ) - def validate(self, repo_path, original_repo_path=None, **kwargs): - self.validator_arguments.update(kwargs) - self.validator_arguments['path_to_repo'] = repo_path - self.validator_arguments['original_repo_path'] = original_repo_path - self.validator_arguments['whitelists'] = self.whitelists - self.validator_arguments['blacklists'] = self.blacklists - pre_validation_errors = self.run_validator_group(self.pre_validation_checks) - if pre_validation_errors: - return pre_validation_errors - self.validator_arguments['solution_repo'] = LocalRepositoryInfo(repo_path) - if original_repo_path: - self.validator_arguments['original_repo'] = LocalRepositoryInfo(original_repo_path) - return self.run_validator_group(self.error_validator_groups, add_warnings=True) diff --git a/fiasko_bro/config.py b/fiasko_bro/config.py deleted file mode 100644 index 1ff046f..0000000 --- a/fiasko_bro/config.py +++ /dev/null @@ -1,141 +0,0 @@ -import os.path - - -VALIDATOR_SETTINGS = { - 'readme_filename': 'README.md', - 'allowed_max_pep8_violations': 5, - 'max_complexity': 7, - 'minimum_name_length': 2, - 'min_percent_of_another_language': 30, - 'last_commits_to_check_amount': 5, - 'tab_size': 4, - 'functions_with_docstrings_percent_limit': 80, - 'max_pep8_line_length': 100, - 'max_number_of_lines': 200, - 'max_indentation_level': 4, - 'max_num_of_py_files': 100, - 'directories_to_skip': [ - 'build', - 'dist', - ] -} - -DEFAULT_BLACKLISTS = { - 'has_variables_from_blacklist': [ - 'list', - 'lists', - 'input', - 'cnt', - 'data', - 'name', - 'load', - 'value', - 'object', - 'file', - 'result', - 'item', - 'num', - 'info', - 'n', - ], - 'has_no_commit_messages_from_blacklist': [ - 'win', - 'commit', - 'commit#1', - 'fix', - 'minor edits', - 'update', - 'done', - 'first commit', - 'start', - 'refactor', - '!', - 'bug fix', - 'corrected', - 'add files via upload', - 'test', - 'fixed', - 'minor bugfix', - 'minor bugfixes', - 'finished', - 'first commit', - 'fixes', - '', - ], - 'has_no_directories_from_blacklist': [ - '.idea', - '__pycache__', - '.vscode', - ], -} - -DEFAULT_WHITELISTS = { - 'has_no_short_variable_names': [ - 'a', - 'b', - 'c', - 'x', - 'y', - 'x1', - 'x2', - 'y1', - 'y2', - '_', - ], - 'has_no_calls_with_constants': [ - 'pow', - 'exit', - 'round', - 'range', - 'enumerate', - 'time', - 'itemgetter', - 'get', - 'group', - 'replace', - 'combinations', - 'seek', - ], - 'is_snake_case': [ - # from sqlalchemy.sqlalchemy.orm.sessionmaker - 'Session', - # from sqlalchemy.ext.automap - 'Base', - 'User', - 'Order', - 'Address', - ], - 'right_assignment_for_snake_case': [ - 'Base', - ], - 'has_no_exit_calls_in_functions': [ - 'main', - ], - 'is_pep8_fine': [ - '{sep}migrations{sep}'.format(sep=os.path.sep), - '{sep}alembic{sep}'.format(sep=os.path.sep), - 'manage.py', - ], - 'has_no_encoding_declaration': [ - '{sep}migrations{sep}'.format(sep=os.path.sep), - ], - 'has_no_local_imports': [ - 'manage.py', - ], - 'has_local_var_named_as_global': [ - 'settings.py', - ], - 'has_variables_from_blacklist': [ - 'apps.py', - ], - 'has_no_extra_dockstrings_whitelist': [ - '{sep}migrations{sep}'.format(sep=os.path.sep), - '{sep}alembic{sep}'.format(sep=os.path.sep), - ], - 'is_nesting_too_deep': [ - '{sep}migrations{sep}'.format(sep=os.path.sep), - '{sep}alembic{sep}'.format(sep=os.path.sep), - 'manage.py', - 'settings.py', - ], -} diff --git a/fiasko_bro/defaults.py b/fiasko_bro/defaults.py new file mode 100644 index 0000000..caf8738 --- /dev/null +++ b/fiasko_bro/defaults.py @@ -0,0 +1,256 @@ +import os.path +from collections import OrderedDict +from types import MappingProxyType + +from . import pre_validation_checks +from . import validators + + +VALIDATION_PARAMETERS = MappingProxyType( + { + 'readme_filename': 'README.md', + 'allowed_max_pep8_violations': 5, + 'max_complexity': 7, + 'minimum_name_length': 2, + 'min_percent_of_another_language': 30, + 'last_commits_to_check_amount': 5, + 'tab_size': 4, + 'functions_with_docstrings_percent_limit': 80, + 'max_pep8_line_length': 100, + 'max_number_of_lines': 200, + 'max_indentation_level': 4, + 'max_num_of_py_files': 100, + 'directories_to_skip': frozenset( + [ + 'build', + 'dist', + '.git', + ] + ), + 'bad_variable_names': frozenset( + [ + 'list', + 'lists', + 'input', + 'cnt', + 'data', + 'name', + 'load', + 'value', + 'object', + 'file', + 'result', + 'item', + 'num', + 'info', + 'n', + ] + ), + 'bad_commit_messages': frozenset( + [ + 'win', + 'commit', + 'commit#1', + 'fix', + 'minor edits', + 'update', + 'done', + 'first commit', + 'start', + 'refactor', + '!', + 'bug fix', + 'corrected', + 'add files via upload', + 'test', + 'fixed', + 'minor bugfix', + 'minor bugfixes', + 'finished', + 'first commit', + 'fixes', + '', + ] + ), + 'data_directories': frozenset( + [ + '.idea', + '__pycache__', + '.vscode', + ] + ), + 'valid_short_variable_names': frozenset( + [ + 'a', + 'b', + 'c', + 'x', + 'y', + 'x1', + 'x2', + 'y1', + 'y2', + '_', + ] + ), + 'valid_calls_with_constants': frozenset( + [ + 'pow', + 'exit', + 'round', + 'range', + 'enumerate', + 'time', + 'itemgetter', + 'get', + 'group', + 'replace', + 'combinations', + 'seek', + ] + ), + 'valid_non_snake_case_left_hand_values': frozenset( + [ + # from sqlalchemy.sqlalchemy.orm.sessionmaker + 'Session', + # from sqlalchemy.ext.automap + 'Base', + 'User', + 'Order', + 'Address', + ] + ), + 'valid_non_snake_case_right_hand_values': frozenset( + [ + 'Base', + ] + ), + 'functions_allowed_to_have_exit_calls': frozenset( + [ + 'main', + ] + ), + 'pep8_paths_to_ignore': frozenset( + [ + '{sep}migrations{sep}'.format(sep=os.path.sep), + '{sep}alembic{sep}'.format(sep=os.path.sep), + 'manage.py', + ] + ), + 'encoding_declarations_paths_to_ignore': frozenset( + [ + '{sep}migrations{sep}'.format(sep=os.path.sep), + ] + ), + 'local_imports_paths_to_ignore': frozenset( + [ + 'manage.py', + ] + ), + 'local_var_named_as_global_paths_to_ignore': frozenset( + [ + 'settings.py', + ] + ), + 'bad_variables_paths_to_ignore': frozenset( + [ + 'apps.py', + ] + ), + 'extra_dockstrings_paths_to_ignore': frozenset( + [ + '{sep}migrations{sep}'.format(sep=os.path.sep), + '{sep}alembic{sep}'.format(sep=os.path.sep), + ] + ), + 'deep_nesting_paths_to_ignore': frozenset( + [ + '{sep}migrations{sep}'.format(sep=os.path.sep), + '{sep}alembic{sep}'.format(sep=os.path.sep), + 'manage.py', + 'settings.py', + ] + ), + } +) + +PRE_VALIDATION_CHECKS = MappingProxyType( + OrderedDict( + { + 'encoding': ( + pre_validation_checks.are_sources_in_utf, + ), + 'size': ( + pre_validation_checks.are_repos_too_large, + ), + 'bom': ( + pre_validation_checks.has_no_bom, + ), + } + ) +) + +ERROR_VALIDATOR_GROUPS = MappingProxyType( + OrderedDict( + { + 'commits': ( + validators.has_more_commits_than_origin, + ), + 'readme': ( + validators.has_readme_file, + ), + 'syntax': ( + validators.has_no_syntax_errors, + ), + 'general': ( + validators.has_no_directories_from_blacklist, + validators.is_pep8_fine, + validators.has_changed_readme, + validators.is_snake_case, + validators.is_mccabe_difficulty_ok, + validators.has_no_encoding_declaration, + validators.has_no_star_imports, + validators.has_no_local_imports, + validators.has_local_var_named_as_global, + validators.has_variables_from_blacklist, + validators.has_no_short_variable_names, + validators.has_no_range_from_zero, + validators.are_tabs_used_for_indentation, + validators.has_no_try_without_exception, + validators.has_frozen_requirements, + validators.has_no_vars_with_lambda, + validators.has_no_calls_with_constants, + validators.has_readme_in_single_language, + validators.has_no_urls_with_hardcoded_arguments, + validators.has_no_nonpythonic_empty_list_validations, + validators.has_no_extra_dockstrings, + validators.has_no_exit_calls_in_functions, + validators.has_no_libs_from_stdlib_in_requirements, + validators.has_no_lines_ends_with_semicolon, + validators.not_validates_response_status_by_comparing_to_200, + validators.has_no_mutable_default_arguments, + validators.has_no_slices_starts_from_zero, + validators.has_no_cast_input_result_to_str, + validators.has_no_return_with_parenthesis, + validators.has_no_long_files, + validators.is_nesting_too_deep, + validators.has_no_string_literal_sums, + ), + } + ) +) + +WARNING_VALIDATOR_GROUPS = MappingProxyType( + { + 'commits': ( + validators.has_no_commit_messages_from_blacklist, + ), + 'syntax': ( + validators.has_indents_of_spaces, + validators.has_no_variables_that_shadow_default_names, + ), + } +) + +for name in WARNING_VALIDATOR_GROUPS: + assert name in ERROR_VALIDATOR_GROUPS.keys() diff --git a/fiasko_bro/file_helpers.py b/fiasko_bro/file_helpers.py index 71d2205..f294b62 100644 --- a/fiasko_bro/file_helpers.py +++ b/fiasko_bro/file_helpers.py @@ -1,14 +1,12 @@ import os -from fiasko_bro.config import VALIDATOR_SETTINGS - -def count_py_files(directory): +def count_py_files(directory, directories_to_skip): all_files = [] for directory, dirs, files in os.walk(directory, topdown=True): dirs[:] = [ d for d in dirs - if d not in VALIDATOR_SETTINGS['directories_to_skip'] + if d not in directories_to_skip ] all_files += files return len([f for f in all_files if f.endswith('.py')]) @@ -30,10 +28,3 @@ def is_in_utf8(name): except UnicodeDecodeError: return False return True - - -def is_filename_in_whitelist(file_name, whitelist): - for whitelisted_part in whitelist: - if whitelisted_part in file_name: - return True - return False diff --git a/fiasko_bro/pre_validation_checks/__init__.py b/fiasko_bro/pre_validation_checks/__init__.py index 686b4e7..b5bc0bd 100644 --- a/fiasko_bro/pre_validation_checks/__init__.py +++ b/fiasko_bro/pre_validation_checks/__init__.py @@ -1,3 +1,3 @@ from .encoding import * from .repo_size import * -from .bom import * \ No newline at end of file +from .bom import * diff --git a/fiasko_bro/pre_validation_checks/bom.py b/fiasko_bro/pre_validation_checks/bom.py index cc2768b..5acf037 100644 --- a/fiasko_bro/pre_validation_checks/bom.py +++ b/fiasko_bro/pre_validation_checks/bom.py @@ -1,13 +1,12 @@ import os import codecs -from fiasko_bro.config import VALIDATOR_SETTINGS -def has_no_bom(path_to_repo, *args, **kwargs): - for root, dirs, filenames in os.walk(path_to_repo): +def has_no_bom(project_path, directories_to_skip, *args, **kwargs): + for root, dirs, filenames in os.walk(project_path): dirs[:] = [ d for d in dirs - if d not in VALIDATOR_SETTINGS['directories_to_skip'] + if d not in directories_to_skip ] for name in filenames: with open(os.path.join(root, name), 'rb') as file_handle: diff --git a/fiasko_bro/pre_validation_checks/encoding.py b/fiasko_bro/pre_validation_checks/encoding.py index bac3965..7b93d60 100644 --- a/fiasko_bro/pre_validation_checks/encoding.py +++ b/fiasko_bro/pre_validation_checks/encoding.py @@ -1,14 +1,13 @@ import os -from fiasko_bro.config import VALIDATOR_SETTINGS from ..file_helpers import is_in_utf8 -def are_sources_in_utf(path_to_repo, *args, **kwargs): - for root, dirs, filenames in os.walk(path_to_repo): +def are_sources_in_utf(project_path, directories_to_skip, *args, **kwargs): + for root, dirs, filenames in os.walk(project_path): dirs[:] = [ d for d in dirs - if d not in VALIDATOR_SETTINGS['directories_to_skip'] + if d not in directories_to_skip ] for name in filenames: if not is_in_utf8(os.path.join(root, name)): diff --git a/fiasko_bro/pre_validation_checks/repo_size.py b/fiasko_bro/pre_validation_checks/repo_size.py index b99f8ab..d74850d 100644 --- a/fiasko_bro/pre_validation_checks/repo_size.py +++ b/fiasko_bro/pre_validation_checks/repo_size.py @@ -1,9 +1,16 @@ from .. import code_helpers -def are_repos_too_large(path_to_repo, max_num_of_py_files, path_to_original_repo=None, *args, **kwargs): - if code_helpers.is_repo_too_large(path_to_repo, max_num_of_py_files): +def are_repos_too_large( + project_path, + directories_to_skip, + max_num_of_py_files, + original_project_path=None, + *args, + **kwargs +): + if code_helpers.is_repo_too_large(project_path, directories_to_skip, max_num_of_py_files): return 'Repo is too large', '' - if path_to_original_repo: - if code_helpers.is_repo_too_large(path_to_original_repo, max_num_of_py_files): + if original_project_path: + if code_helpers.is_repo_too_large(original_project_path, directories_to_skip, max_num_of_py_files): return 'Repo is too large', '' diff --git a/fiasko_bro/repository_info.py b/fiasko_bro/repository_info.py index 6d81fb1..1ffa55e 100644 --- a/fiasko_bro/repository_info.py +++ b/fiasko_bro/repository_info.py @@ -1,34 +1,110 @@ import os import ast -import copy +from itertools import filterfalse import git -from fiasko_bro.config import VALIDATOR_SETTINGS -from . import file_helpers +from .url_helpers import get_filename_from_path -class LocalRepositoryInfo: +class ParsedPyFile: + + def __init__(self, path, content): + self.path = path + self.content = content + self.name = get_filename_from_path(path) + self.ast_tree = self._generate_ast_tree(content) + + @staticmethod + def _generate_ast_tree(content): + try: + tree = ast.parse(content) + except SyntaxError: + tree = None + else: + ParsedPyFile._set_parents(tree) + return tree + + @staticmethod + def _set_parents(tree): + for node in ast.walk(tree): + for child in ast.iter_child_nodes(node): + child.parent = node + + def is_in_whitelist(self, whitelist): + for whitelisted_part in whitelist: + if whitelisted_part in self.path: + return True + return False + + @property + def is_syntax_correct(self): + return self.ast_tree is not None + + def get_name_with_line(self, line_number): + return '{}:{}'.format(self.name, line_number) + + def __str__(self): + return self.name + + def __repr__(self): + return 'ParsedPyFile object for the file {}'.format(self.name) + + +class LocalRepository: + def __init__(self, repository_path): - self.path = repository_path - self._repo = git.Repo(self.path) - self._python_filenames, self._main_file_contents, self._ast_trees = ( - self._get_ast_trees() - ) + self._repo = git.Repo(repository_path) def count_commits(self): return len(list(self._repo.iter_commits())) + def iter_commits(self, *args, **kwargs): + return self._repo.iter_commits(*args, **kwargs) + + def is_tracked_directory(self, directory): + # https://stackoverflow.com/a/34329915/3694363 + return bool(self._repo.git.ls_files(directory)) + + +class ProjectFolder: + + def __init__(self, path, directories_to_skip=None): + if not os.path.isdir(path): + raise FileNotFoundError('Path "{}" not found or is not a directory.'.format(path)) + self.path = path + self._parsed_py_files = self._get_parsed_py_files(directories_to_skip) + try: + self.repo = LocalRepository(path) + except git.InvalidGitRepositoryError: + self.repo = None + def does_file_exist(self, filename): return os.path.isfile(os.path.join(self.path, filename)) - def get_source_file_contents(self, extension_list): + @staticmethod + def _make_root_relative_to_path(root, path): + return root[len(path) + 1:] # +1 is for the slash + + def enumerate_directories(self): + for root, folders, _ in os.walk(self.path): + relative_root = self._make_root_relative_to_path(root, self.path) or '.' + for folder in folders: + directory = '{root}{sep}{folder}'.format( + root=relative_root, + sep=os.path.sep, + folder=folder + ) + yield directory + + def get_source_file_contents(self, extension_list, directories_to_skip=None): file_paths = [] file_contents = [] + directories_to_skip = directories_to_skip or [] for dirname, directories_list, filenames in os.walk(self.path, topdown=True): directories_list[:] = [ d for d in directories_list - if d not in VALIDATOR_SETTINGS['directories_to_skip'] + if d not in directories_to_skip ] for filename in filenames: extension = os.path.splitext(filename)[1] @@ -40,41 +116,19 @@ def get_source_file_contents(self, extension_list): source_file_contents = list(zip(file_paths, file_contents)) return source_file_contents - def _get_ast_trees(self): - py_files = list(zip(*self.get_source_file_contents(['.py']))) or [(), ()] - filenames, main_file_contents = py_files - ast_trees = [] - for file_content in main_file_contents: - try: - tree = ast.parse(file_content) - except SyntaxError as e: - tree = None - if tree: - self._set_parents(tree) - ast_trees.append(tree) - return filenames, main_file_contents, ast_trees - - def get_ast_trees(self, with_filenames=False, with_file_content=False, whitelist=None, - with_syntax_error_trees=False): - ast_trees_copy = copy.deepcopy(self._ast_trees) - all_items = zip(self._python_filenames, self._main_file_contents, ast_trees_copy) - filtered_items = self.filter_file_paths(all_items, whitelist) - - if with_filenames: - if not with_syntax_error_trees: - filtered_items = [r for r in filtered_items if r[2] is not None] - if with_file_content: - return filtered_items - else: - return [(f, t) for (f, c, t) in filtered_items] - else: - if with_syntax_error_trees: - return ast_trees_copy - else: - return [t for t in ast_trees_copy if t is not None] + def _get_parsed_py_files(self, directories_to_skip=None): + py_files = self.get_source_file_contents(['.py'], directories_to_skip) or [(), ()] + parsed_py_files = [ParsedPyFile(path, content) for path, content in py_files] + return parsed_py_files - def get_python_file_filenames(self): - return self._python_filenames + def get_parsed_py_files(self, whitelist=None): + parsed_py_files = self._parsed_py_files + if whitelist: + parsed_py_files = filterfalse( + lambda parsed_file: parsed_file.is_in_whitelist(whitelist), + parsed_py_files + ) + return parsed_py_files def get_file(self, filename): for dirname, _, files in os.walk(self.path, topdown=True): @@ -88,24 +142,3 @@ def does_directory_exist(self, dirname_to_find): if dirname == dirname_to_find or dirname_to_find in dirs: return True return False - - def iter_commits(self, *args, **kwargs): - return self._repo.iter_commits(*args, **kwargs) - - @staticmethod - def filter_file_paths(all_items, whitelist): - if not whitelist: - return all_items - filtered_items = [] - for file_name, file_content, ast_tree in all_items: - if not file_helpers.is_filename_in_whitelist(file_name, whitelist): - filtered_items.append( - (file_name, file_content, ast_tree) - ) - return filtered_items - - @staticmethod - def _set_parents(tree): - for node in ast.walk(tree): - for child in ast.iter_child_nodes(node): - child.parent = node diff --git a/fiasko_bro/validators/code_inclusion.py b/fiasko_bro/validators/code_inclusion.py index 4f2a460..5fbb96e 100644 --- a/fiasko_bro/validators/code_inclusion.py +++ b/fiasko_bro/validators/code_inclusion.py @@ -1,30 +1,23 @@ from .. import code_helpers -from .. import url_helpers -from .. import file_helpers -def is_mccabe_difficulty_ok(solution_repo, max_complexity, *args, **kwargs): +def is_mccabe_difficulty_ok(project_folder, max_complexity, *args, **kwargs): violations = [] - for filename, _ in solution_repo.get_ast_trees(with_filenames=True): - violations += code_helpers.get_mccabe_violations_for_file(filename, max_complexity) + for parsed_file in project_folder.get_parsed_py_files(): + violations += code_helpers.get_mccabe_violations_for_file(parsed_file.path, max_complexity) if violations: return 'mccabe_failure', ','.join(violations) -def is_nesting_too_deep(solution_repo, tab_size, max_indentation_level, whitelists, *args, **kwargs): +def is_nesting_too_deep(project_folder, tab_size, max_indentation_level, deep_nesting_paths_to_ignore, *args, **kwargs): """ Looks at the number of spaces in the beginning and decides if the code is too nested. As a precondition, the code has to pass has_indents_of_spaces. """ - whitelist = whitelists.get('is_nesting_too_deep', []) - for file_path, file_content, _ in solution_repo.get_ast_trees( - with_filenames=True, - with_file_content=True, - whitelist=whitelist - ): - lines = file_content.split('\n') + for parsed_file in project_folder.get_parsed_py_files(whitelist=deep_nesting_paths_to_ignore): + lines = parsed_file.content.split('\n') previous_line_indent = 0 for line_number, line in enumerate(lines): indentation_spaces_amount = code_helpers.count_indentation_spaces(line, tab_size) @@ -33,6 +26,5 @@ def is_nesting_too_deep(solution_repo, tab_size, max_indentation_level, whitelis # make sure it's not a line continuation and indentation_spaces_amount - previous_line_indent == tab_size ): - file_name = url_helpers.get_filename_from_path(file_path) - return 'too_nested', '{}:{}'.format(file_name, line_number) + return 'too_nested', parsed_file.get_name_with_line(line_number) previous_line_indent = indentation_spaces_amount diff --git a/fiasko_bro/validators/comments.py b/fiasko_bro/validators/comments.py index 7bd9413..b423c06 100644 --- a/fiasko_bro/validators/comments.py +++ b/fiasko_bro/validators/comments.py @@ -1,17 +1,20 @@ import ast from .. import ast_helpers -from .. import url_helpers -def has_no_extra_dockstrings(solution_repo, whitelists, functions_with_docstrings_percent_limit, *args, **kwargs): - whitelist = whitelists.get('has_no_extra_dockstrings_whitelist', []) - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True, whitelist=whitelist): - defs = ast_helpers.get_nodes_of_type(tree, ast.FunctionDef) +def has_no_extra_dockstrings( + project_folder, + extra_dockstrings_paths_to_ignore, + functions_with_docstrings_percent_limit, + *args, + **kwargs +): + for parsed_file in project_folder.get_parsed_py_files(whitelist=extra_dockstrings_paths_to_ignore): + defs = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.FunctionDef) if not defs: continue docstrings = [ast.get_docstring(d) for d in defs if ast.get_docstring(d) is not None] if len(docstrings) / len(defs) * 100 > functions_with_docstrings_percent_limit: - filename = url_helpers.get_filename_from_path(filepath) - return 'extra_comments', filename + return 'extra_comments', parsed_file.name diff --git a/fiasko_bro/validators/commits.py b/fiasko_bro/validators/commits.py index 712ed03..97b278e 100644 --- a/fiasko_bro/validators/commits.py +++ b/fiasko_bro/validators/commits.py @@ -1,17 +1,20 @@ -def has_more_commits_than_origin(solution_repo, original_repo=None, *args, **kwargs): - if not original_repo: +def has_more_commits_than_origin(project_folder, original_project_folder=None, *args, **kwargs): + if not original_project_folder: + return + if not project_folder.repo or not original_project_folder.repo: return # FIXME this check works incorrectly in case of # new commit in original repo after student forked it - if solution_repo.count_commits() <= original_repo.count_commits(): + if project_folder.repo.count_commits() <= original_project_folder.repo.count_commits(): return 'no_new_code', None -def has_no_commit_messages_from_blacklist(solution_repo, blacklists, last_commits_to_check_amount, *args, **kwargs): - blacklist = blacklists.get('has_no_commit_messages_from_blacklist', []) - for commit in solution_repo.iter_commits('master', max_count=last_commits_to_check_amount): +def has_no_commit_messages_from_blacklist(project_folder, bad_commit_messages, last_commits_to_check_amount, *args, **kwargs): + if not project_folder.repo: + return + for commit in project_folder.repo.iter_commits('master', max_count=last_commits_to_check_amount): message = commit.message.lower().strip().strip('.\'"') - if message in blacklist: - return 'git_history_warning', '' + if message in bad_commit_messages: + return 'git_history_warning', message diff --git a/fiasko_bro/validators/files.py b/fiasko_bro/validators/files.py index 2615767..537de97 100644 --- a/fiasko_bro/validators/files.py +++ b/fiasko_bro/validators/files.py @@ -3,21 +3,17 @@ from .. import url_helpers -def has_no_long_files(solution_repo, max_number_of_lines, *args, **kwargs): - for file_path, file_content, _ in solution_repo.get_ast_trees( - with_filenames=True, - with_file_content=True - ): - number_of_lines = file_content.count('\n') +def has_no_long_files(project_folder, max_number_of_lines, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + number_of_lines = parsed_file.content.count('\n') if number_of_lines > max_number_of_lines: - file_name = url_helpers.get_filename_from_path(file_path) - return 'file_too_long', file_name + return 'file_too_long', parsed_file.name -def are_tabs_used_for_indentation(solution_repo, *args, **kwargs): +def are_tabs_used_for_indentation(project_folder, directories_to_skip, *args, **kwargs): frontend_extensions = ['.html', '.css', '.js'] relevant_extensions = frontend_extensions + ['.py'] - files_info = solution_repo.get_source_file_contents(relevant_extensions) + files_info = project_folder.get_source_file_contents(relevant_extensions, directories_to_skip) if not files_info: return for filepath, file_content in files_info: @@ -34,21 +30,18 @@ def are_tabs_used_for_indentation(solution_repo, *args, **kwargs): return 'tabs_used_for_indents', filename -def has_no_encoding_declaration(solution_repo, whitelists, *args, **kwargs): - whitelist = whitelists.get('has_no_encoding_declaration', []) - for filepath, file_content, _ in solution_repo.get_ast_trees( - with_filenames=True, - with_file_content=True, - whitelist=whitelist, - ): - first_line = file_content.strip('\n').split('\n')[0].strip().replace(' ', '') +def has_no_encoding_declaration(project_folder, encoding_declarations_paths_to_ignore, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(whitelist=encoding_declarations_paths_to_ignore): + first_line = parsed_file.content.strip('\n').split('\n')[0].strip().replace(' ', '') if first_line.startswith('#') and 'coding:utf-8' in first_line: - filename = url_helpers.get_filename_from_path(filepath) - return 'has_encoding_declarations', filename + return 'has_encoding_declarations_paths_to_ignore', parsed_file.name -def has_no_directories_from_blacklist(solution_repo, blacklists, *args, **kwargs): - blacklist = blacklists.get('has_no_directories_from_blacklist', []) - for dirname in blacklist: - if solution_repo.does_directory_exist(dirname): - return 'data_in_repo', dirname +def has_no_directories_from_blacklist(project_folder, data_directories, *args, **kwargs): + if not project_folder.repo: + return + for directory in project_folder.enumerate_directories(): + for data_directory in data_directories: + if data_directory in directory: + if project_folder.repo.is_tracked_directory(directory): + return 'data_in_repo', data_directory diff --git a/fiasko_bro/validators/imports.py b/fiasko_bro/validators/imports.py index ca140d8..7ab6b55 100644 --- a/fiasko_bro/validators/imports.py +++ b/fiasko_bro/validators/imports.py @@ -1,17 +1,13 @@ from .. import ast_helpers -from .. import url_helpers -def has_no_star_imports(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - if ast_helpers.is_tree_has_star_imports(tree): - filename = url_helpers.get_filename_from_path(filepath) - return 'has_star_import', filename +def has_no_star_imports(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + if ast_helpers.is_tree_has_star_imports(parsed_file.ast_tree): + return 'has_star_import', parsed_file.name -def has_no_local_imports(solution_repo, whitelists, *args, **kwargs): - whitelist = whitelists.get('has_no_local_imports', []) - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True, whitelist=whitelist): - if ast_helpers.is_has_local_imports(tree): - filename = url_helpers.get_filename_from_path(filepath) - return 'has_local_import', filename +def has_no_local_imports(project_folder, local_imports_paths_to_ignore, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(whitelist=local_imports_paths_to_ignore): + if ast_helpers.is_has_local_imports(parsed_file.ast_tree): + return 'has_local_import', parsed_file.name diff --git a/fiasko_bro/validators/naming.py b/fiasko_bro/validators/naming.py index 7e4c259..c4531d5 100644 --- a/fiasko_bro/validators/naming.py +++ b/fiasko_bro/validators/naming.py @@ -4,56 +4,50 @@ from ..i18n import _ -def has_variables_from_blacklist(solution_repo, whitelists, blacklists, *args, **kwargs): - whitelist = whitelists.get('has_variables_from_blacklist', []) - blacklist = blacklists.get('has_variables_from_blacklist', []) - for filename, tree in solution_repo.get_ast_trees(with_filenames=True, whitelist=whitelist): - names = ast_helpers.get_all_defined_names(tree) - bad_names = names.intersection(blacklist) +def has_variables_from_blacklist(project_folder, bad_variables_paths_to_ignore, bad_variable_names, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(whitelist=bad_variables_paths_to_ignore): + names = ast_helpers.get_all_defined_names(parsed_file.ast_tree) + bad_names = names.intersection(bad_variable_names) if bad_names: return 'bad_titles', ', '.join(bad_names) -def has_local_var_named_as_global(solution_repo, whitelists, max_indentation_level, *args, **kwargs): - whitelist = whitelists.get('has_local_var_named_as_global', []) - for filename, tree in solution_repo.get_ast_trees(with_filenames=True, whitelist=whitelist): - bad_names = ast_helpers.get_local_vars_named_as_globals(tree, max_indentation_level) +def has_local_var_named_as_global(project_folder, local_var_named_as_global_paths_to_ignore, max_indentation_level, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(whitelist=local_var_named_as_global_paths_to_ignore): + bad_names = ast_helpers.get_local_vars_named_as_globals(parsed_file.ast_tree, max_indentation_level) if bad_names: message = _('for example, %s') % (', '.join(bad_names)) return 'has_locals_named_as_globals', message -def has_no_short_variable_names(solution_repo, minimum_name_length, whitelists, *args, **kwargs): - whitelist = whitelists.get('has_no_short_variable_names', []) +def has_no_short_variable_names(project_folder, minimum_name_length, valid_short_variable_names, *args, **kwargs): short_names = [] - for tree in solution_repo.get_ast_trees(): - names = ast_helpers.get_all_defined_names(tree) + for parsed_file in project_folder.get_parsed_py_files(): + names = ast_helpers.get_all_defined_names(parsed_file.ast_tree) short_names += [n for n in names - if len(n) < minimum_name_length and n not in whitelist] + if len(n) < minimum_name_length and n not in valid_short_variable_names] if short_names: return 'bad_titles', ', '.join(list(set(short_names))) -def is_snake_case(solution_repo, whitelists, *args, **kwargs): - whitelist = whitelists.get('is_snake_case', []) - right_assignment_whitelist = whitelists.get('right_assignment_for_snake_case', []) +def is_snake_case(project_folder, valid_non_snake_case_left_hand_values, valid_non_snake_case_right_hand_values, *args, **kwargs): buildins_ = dir(builtins) - for tree in solution_repo.get_ast_trees(): - names = ast_helpers.get_all_names_from_tree(tree) + for parsed_file in project_folder.get_parsed_py_files(): + names = ast_helpers.get_all_names_from_tree(parsed_file.ast_tree) whitelisted_names = ast_helpers.get_names_from_assignment_with( - tree, - right_assignment_whitelist + parsed_file.ast_tree, + valid_non_snake_case_right_hand_values ) - imported_names = ast_helpers.get_all_imported_names_from_tree(tree) - defined_class_names = ast_helpers.get_all_class_definitions_from_tree(tree) - namedtuples = ast_helpers.get_all_namedtuple_names(tree) + imported_names = ast_helpers.get_all_imported_names_from_tree(parsed_file.ast_tree) + defined_class_names = ast_helpers.get_all_class_definitions_from_tree(parsed_file.ast_tree) + namedtuples = ast_helpers.get_all_namedtuple_names(parsed_file.ast_tree) names_with_uppercase = [n for n in names if n.lower() != n and n.upper() != n and n not in imported_names and n not in defined_class_names and n not in namedtuples and n not in buildins_ - and n not in whitelist + and n not in valid_non_snake_case_left_hand_values and n not in whitelisted_names] if names_with_uppercase: message = _( @@ -62,10 +56,10 @@ def is_snake_case(solution_repo, whitelists, *args, **kwargs): return 'camel_case_vars', message -def has_no_variables_that_shadow_default_names(solution_repo, *args, **kwargs): +def has_no_variables_that_shadow_default_names(project_folder, *args, **kwargs): buildins_ = dir(builtins) - for tree in solution_repo.get_ast_trees(): - names = ast_helpers.get_all_defined_names(tree, with_static_class_properties=False) + for parsed_file in project_folder.get_parsed_py_files(): + names = ast_helpers.get_all_defined_names(parsed_file.ast_tree, with_static_class_properties=False) bad_names = names.intersection(buildins_) if bad_names: return 'title_shadows', ', '.join(bad_names) diff --git a/fiasko_bro/validators/other_languages.py b/fiasko_bro/validators/other_languages.py index 67f0e81..cc88a69 100644 --- a/fiasko_bro/validators/other_languages.py +++ b/fiasko_bro/validators/other_languages.py @@ -1,16 +1,12 @@ import ast from .. import ast_helpers -from .. import url_helpers -def has_no_return_with_parenthesis(solution_repo, *args, **kwargs): - for filepath, file_content, tree in solution_repo.get_ast_trees( - with_filenames=True, - with_file_content=True - ): - file_content = file_content.split('\n') - return_lines = [n.lineno for n in ast.walk(tree) if isinstance(n, ast.Return)] +def has_no_return_with_parenthesis(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + file_content = parsed_file.content.split('\n') + return_lines = [n.lineno for n in ast.walk(parsed_file.ast_tree) if isinstance(n, ast.Return)] for line_num in return_lines: line = file_content[line_num - 1] if ( @@ -21,21 +17,16 @@ def has_no_return_with_parenthesis(solution_repo, *args, **kwargs): ) and line.strip().endswith(')') ): - filename = url_helpers.get_filename_from_path(filepath) - return 'return_with_parenthesis', '{}:{}'.format(filename, line_num) + return 'return_with_parenthesis', parsed_file.get_name_with_line(line_num) -def has_no_lines_ends_with_semicolon(solution_repo, *args, **kwargs): - for filepath, file_content, tree in solution_repo.get_ast_trees( - with_filenames=True, - with_file_content=True - ): +def has_no_lines_ends_with_semicolon(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): total_lines_with_semicolons = len( - [1 for l in file_content.split('\n') if l.endswith(';') and not l.startswith('#')] + [1 for l in parsed_file.content.split('\n') if l.endswith(';') and not l.startswith('#')] ) # TODO: check docstrings for semicolons - string_nodes = ast_helpers.get_nodes_of_type(tree, ast.Str) + string_nodes = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.Str) semicolons_in_string_constants_amount = sum([n.s.count(';') for n in string_nodes]) if total_lines_with_semicolons > semicolons_in_string_constants_amount: - filename = url_helpers.get_filename_from_path(filepath) - return 'has_semicolons', filename + return 'has_semicolons', parsed_file.name diff --git a/fiasko_bro/validators/pythonic.py b/fiasko_bro/validators/pythonic.py index c1dadc8..6eb2352 100644 --- a/fiasko_bro/validators/pythonic.py +++ b/fiasko_bro/validators/pythonic.py @@ -7,35 +7,33 @@ from ..i18n import _ -def is_pep8_fine(solution_repo, allowed_max_pep8_violations, - max_pep8_line_length, whitelists, *args, **kwargs): - whitelist = whitelists.get('is_pep8_fine', []) +def is_pep8_fine(project_folder, allowed_max_pep8_violations, + max_pep8_line_length, pep8_paths_to_ignore, *args, **kwargs): violations_amount = code_helpers.count_pep8_violations( - solution_repo, + project_folder, max_line_length=max_pep8_line_length, - path_whitelist=whitelist + path_whitelist=pep8_paths_to_ignore ) if violations_amount > allowed_max_pep8_violations: return 'pep8', _('%s PEP8 violations') % violations_amount -def has_no_range_from_zero(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - calls = ast_helpers.get_nodes_of_type(tree, ast.Call) +def has_no_range_from_zero(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + calls = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.Call) for call in calls: if ( getattr(call.func, 'id', None) == 'range' and call.args and len(call.args) == 2 and isinstance(call.args[0], ast.Num) and call.args[0].n == 0 ): - filename = url_helpers.get_filename_from_path(filepath) - return 'manual_zero_in_range', '{}:{}'.format(filename, call.lineno) + return 'manual_zero_in_range', parsed_file.get_name_with_line(call.lineno) -def has_no_try_without_exception(solution_repo, *args, **kwargs): +def has_no_try_without_exception(project_folder, *args, **kwargs): exception_type_to_catch = 'Exception' - for tree in solution_repo.get_ast_trees(): - tryes = [node for node in ast.walk(tree) if isinstance(node, ast.ExceptHandler)] + for parsed_file in project_folder.get_parsed_py_files(): + tryes = [node for node in ast.walk(parsed_file.ast_tree) if isinstance(node, ast.ExceptHandler)] for try_except in tryes: if try_except.type is None: return 'broad_except', '' @@ -49,99 +47,88 @@ def has_no_try_without_exception(solution_repo, *args, **kwargs): return 'broad_except', message -def has_no_vars_with_lambda(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - assigns = ast_helpers.get_nodes_of_type(tree, ast.Assign) +def has_no_vars_with_lambda(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + assigns = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.Assign) for assign in assigns: if isinstance(assign.value, ast.Lambda): - filename = url_helpers.get_filename_from_path(filepath) - return 'named_lambda', '{}:{}'.format(filename, assign.lineno) + return 'named_lambda', '{}:{}'.format(parsed_file.name, assign.lineno) -def has_no_urls_with_hardcoded_arguments(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - string_nodes = [n for n in ast.walk(tree) if isinstance(n, ast.Str)] +def has_no_urls_with_hardcoded_arguments(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + string_nodes = [n for n in ast.walk(parsed_file.ast_tree) if isinstance(n, ast.Str)] for string_node in string_nodes: if url_helpers.is_url_with_params(string_node.s): - filename = url_helpers.get_filename_from_path(filepath) - return 'hardcoded_get_params', '{}:{}'.format(filename, string_node.lineno) + return 'hardcoded_get_params', '{}:{}'.format(parsed_file.name, string_node.lineno) -def has_no_nonpythonic_empty_list_validations(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - ifs_compare_tests = [n.test for n in ast.walk(tree) if +def has_no_nonpythonic_empty_list_validations(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + ifs_compare_tests = [n.test for n in ast.walk(parsed_file.ast_tree) if isinstance(n, ast.If) and isinstance(n.test, ast.Compare)] for compare in ifs_compare_tests: if ast_nodes_validators.is_len_compared_to_zero(compare): - filename = url_helpers.get_filename_from_path(filepath) - return 'nonpythonic_empty_list_validation', '{}:{}'.format(filename, compare.lineno) + return 'nonpythonic_empty_list_validation', parsed_file.get_name_with_line(compare.lineno) -def has_no_exit_calls_in_functions(solution_repo, whitelists, *args, **kwargs): - whitelist = whitelists.get('has_no_exit_calls_in_functions', []) - for tree in solution_repo.get_ast_trees(): - defs = ast_helpers.get_nodes_of_type(tree, ast.FunctionDef) +def has_no_exit_calls_in_functions(project_folder, functions_allowed_to_have_exit_calls, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + defs = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.FunctionDef) for function_definition in defs: - if function_definition.name in whitelist: + if function_definition.name in functions_allowed_to_have_exit_calls: continue if ast_helpers.has_exit_calls(function_definition): return 'has_exit_calls_in_function', function_definition.name -def not_validates_response_status_by_comparing_to_200(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - for compare in ast_helpers.get_nodes_of_type(tree, ast.Compare): +def not_validates_response_status_by_comparing_to_200(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + for compare in ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.Compare): if ast_nodes_validators.is_status_code_compared_to_200(compare): - filename = url_helpers.get_filename_from_path(filepath) - return 'compare_response_status_to_200', '{}:{}'.format(filename, compare.lineno) + return 'compare_response_status_to_200', parsed_file.get_name_with_line(compare.lineno) -def has_no_mutable_default_arguments(solution_repo, *args, **kwargs): +def has_no_mutable_default_arguments(project_folder, *args, **kwargs): funcdef_types = (ast.FunctionDef, ) mutable_types = (ast.List, ast.Dict) - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - for funcdef in ast_helpers.get_nodes_of_type(tree, funcdef_types): + for parsed_file in project_folder.get_parsed_py_files(): + for funcdef in ast_helpers.get_nodes_of_type(parsed_file.ast_tree, funcdef_types): if ast_helpers.is_funcdef_has_arguments_of_types(funcdef, mutable_types): - filename = url_helpers.get_filename_from_path(filepath) - return 'mutable_default_arguments', '{}:{}'.format(filename, funcdef.lineno) + return 'mutable_default_arguments', parsed_file.get_name_with_line(funcdef.lineno) -def has_no_slices_starts_from_zero(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - if ast_helpers.is_tree_has_slices_from_zero(tree): - filename = url_helpers.get_filename_from_path(filepath) - return 'slice_starts_from_zero', filename +def has_no_slices_starts_from_zero(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + if ast_helpers.is_tree_has_slices_from_zero(parsed_file.ast_tree): + return 'slice_starts_from_zero', parsed_file.name -def has_no_cast_input_result_to_str(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - calls = ast_helpers.get_nodes_of_type(tree, ast.Call) +def has_no_cast_input_result_to_str(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + calls = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.Call) for call in calls: if ast_helpers.is_str_call_of_input(call): - filename = url_helpers.get_filename_from_path(filepath) - return 'str_conversion_of_input_result', '{}:{}'.format(filename, call.lineno) + return 'str_conversion_of_input_result', parsed_file.get_name_with_line(call.lineno) -def has_no_string_literal_sums(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - for node in ast.walk(tree): +def has_no_string_literal_sums(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + for node in ast.walk(parsed_file.ast_tree): if ( isinstance(node, ast.BinOp) and isinstance(node.op, ast.Add) and isinstance(node.left, ast.Str) and isinstance(node.right, ast.Str) ): - filename = url_helpers.get_filename_from_path(filepath) - return 'has_string_sum', '{}: {}'.format(filename, node.lineno) + return 'has_string_sum', parsed_file.get_name_with_line(node.lineno) -def has_no_calls_with_constants(solution_repo, whitelists, *args, **kwargs): - whitelist = whitelists.get('has_no_calls_with_constants') - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True): - if 'tests' in filepath: # tests can have constants in asserts +def has_no_calls_with_constants(project_folder, valid_calls_with_constants, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + if 'tests' in parsed_file.path: # tests can have constants in asserts continue - calls = ast_helpers.get_nodes_of_type(tree, ast.Call) + calls = ast_helpers.get_nodes_of_type(parsed_file.ast_tree, ast.Call) for call in calls: - if ast_helpers.is_call_has_constants(call, whitelist): - filename = url_helpers.get_filename_from_path(filepath) - return 'magic_numbers', '{}:{}'.format(filename, call.lineno) + if ast_helpers.is_call_has_constants(call, valid_calls_with_constants): + return 'magic_numbers', '{}:{}'.format(parsed_file.name, call.lineno) diff --git a/fiasko_bro/validators/readme.py b/fiasko_bro/validators/readme.py index 4cb1280..d1da489 100644 --- a/fiasko_bro/validators/readme.py +++ b/fiasko_bro/validators/readme.py @@ -4,16 +4,16 @@ from ..i18n import _ -def has_readme_file(solution_repo, readme_filename, *args, **kwargs): - if not solution_repo.does_file_exist(readme_filename): +def has_readme_file(project_folder, readme_filename, *args, **kwargs): + if not project_folder.does_file_exist(readme_filename): return 'need_readme', _('there is no %s') % readme_filename -def has_changed_readme(solution_repo, readme_filename, original_repo=None, *args, **kwargs): - if not original_repo: +def has_changed_readme(project_folder, readme_filename, original_project_folder=None, *args, **kwargs): + if not original_project_folder: return - original_readme_path = os.path.join(original_repo.path, readme_filename) - solution_readme_path = os.path.join(solution_repo.path, readme_filename) + original_readme_path = os.path.join(original_project_folder.path, readme_filename) + solution_readme_path = os.path.join(project_folder.path, readme_filename) try: with open(original_readme_path, encoding='utf-8') as original_handler: original_readme = original_handler.read() @@ -28,8 +28,8 @@ def has_changed_readme(solution_repo, readme_filename, original_repo=None, *args return 'readme_not_utf_8', None -def has_readme_in_single_language(solution_repo, readme_filename, min_percent_of_another_language, *args, **kwargs): - raw_readme = solution_repo.get_file(readme_filename) +def has_readme_in_single_language(project_folder, readme_filename, min_percent_of_another_language, *args, **kwargs): + raw_readme = project_folder.get_file(readme_filename) readme_no_code = re.sub("\s```[#!A-Za-z]*\n[\s\S]*?\n```\s", '', raw_readme) clean_readme = re.sub("\[([^\]]+)\]\(([^)]+)\)", '', readme_no_code) ru_letters_amount = len(re.findall('[а-яА-Я]', clean_readme)) diff --git a/fiasko_bro/validators/requirements.py b/fiasko_bro/validators/requirements.py index 31b18ee..bfb3b74 100644 --- a/fiasko_bro/validators/requirements.py +++ b/fiasko_bro/validators/requirements.py @@ -2,8 +2,8 @@ from ..i18n import _ -def has_frozen_requirements(solution_repo, *args, **kwargs): - requirements = solution_repo.get_file('requirements.txt') +def has_frozen_requirements(project_folder, *args, **kwargs): + requirements = project_folder.get_file('requirements.txt') if not requirements: return for requirement_line in requirements.split('\n'): @@ -11,8 +11,8 @@ def has_frozen_requirements(solution_repo, *args, **kwargs): return 'unfrozen_requirements', _('for example, %s') % requirement_line -def has_no_libs_from_stdlib_in_requirements(solution_repo, *args, **kwargs): - raw_requirements = solution_repo.get_file('requirements.txt') +def has_no_libs_from_stdlib_in_requirements(project_folder, *args, **kwargs): + raw_requirements = project_folder.get_file('requirements.txt') if not raw_requirements: return diff --git a/fiasko_bro/validators/syntax.py b/fiasko_bro/validators/syntax.py index 200c95a..b01f9b0 100644 --- a/fiasko_bro/validators/syntax.py +++ b/fiasko_bro/validators/syntax.py @@ -2,33 +2,27 @@ from .. import ast_helpers from .. import file_helpers -from .. import url_helpers -def has_no_syntax_errors(solution_repo, *args, **kwargs): - for filepath, tree in solution_repo.get_ast_trees(with_filenames=True, with_syntax_error_trees=True): - if tree is None: - filename = url_helpers.get_filename_from_path(filepath) - return 'syntax_error', filename +def has_no_syntax_errors(project_folder, *args, **kwargs): + for parsed_file in project_folder.get_parsed_py_files(): + if not parsed_file.is_syntax_correct: + return 'syntax_error', parsed_file.name -def has_indents_of_spaces(solution_repo, tab_size, *args, **kwargs): +def has_indents_of_spaces(project_folder, tab_size, *args, **kwargs): """ Since there are cases for which col_offset is computed incorrectly, this validator must be nothing more than a simple warning. """ node_types_to_validate = (ast.For, ast.If, ast.FunctionDef, ast.With) - for filepath, file_content, tree in solution_repo.get_ast_trees( - with_filenames=True, - with_file_content=True - ): - lines_offsets = file_helpers.get_line_offsets(file_content) - for node in ast.walk(tree): + for parsed_file in project_folder.get_parsed_py_files(): + lines_offsets = file_helpers.get_line_offsets(parsed_file.content) + for node in ast.walk(parsed_file.ast_tree): if not ast_helpers.is_node_offset_fine( node, lines_offsets, node_types_to_validate, tab_size, ): - filename = url_helpers.get_filename_from_path(filepath) - return 'indent_not_four_spaces', '{}:{}'.format(filename, node.lineno) + return 'indent_not_four_spaces', parsed_file.get_name_with_line(node.lineno) diff --git a/requirements-dev.txt b/requirements-dev.txt index f9cd32b..9368082 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,11 +6,11 @@ codecov==2.0.15 --hash=sha256:ae00d68e18d8a20e9c3288ba3875ae03db3a8e892115bf9b83 coverage==4.5.1 --hash=sha256:7608a3dd5d73cb06c531b8925e0ef8d3de31fed2544a7de6c63960a1e73ea4bc --hash=sha256:3a2184c6d797a125dca8367878d3b9a178b6fdd05fdc2d35d758c3006a1cd694 --hash=sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80 --hash=sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed --hash=sha256:337ded681dd2ef9ca04ef5d93cfc87e52e09db2594c296b4a0a3662cb1b41249 --hash=sha256:3eb42bf89a6be7deb64116dd1cc4b08171734d721e7a7e57ad64cc4ef29ed2f1 --hash=sha256:be6cfcd8053d13f5f5eeb284aa8a814220c3da1b0078fa859011c7fffd86dab9 --hash=sha256:69bf008a06b76619d3c3f3b1983f5145c75a305a0fea513aca094cae5c40a8f5 --hash=sha256:2eb564bbf7816a9d68dd3369a510be3327f1c618d2357fa6b1216994c2e3d508 --hash=sha256:9d6dd10d49e01571bf6e147d3b505141ffc093a06756c60b053a859cb2128b1f --hash=sha256:701cd6093d63e6b8ad7009d8a92425428bc4d6e7ab8d75efbb665c806c1d79ba --hash=sha256:5a13ea7911ff5e1796b6d5e4fbbf6952381a611209b736d48e675c2756f3f74e --hash=sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd --hash=sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba --hash=sha256:28b2191e7283f4f3568962e373b47ef7f0392993bb6660d079c62bd50fe9d162 --hash=sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d --hash=sha256:8c3cb8c35ec4d9506979b4cf90ee9918bc2e49f84189d9bf5c36c0c1119c6558 --hash=sha256:7e1fe19bd6dce69d9fd159d8e4a80a8f52101380d5d3a4d374b6d3eae0e5de9c --hash=sha256:6bc583dc18d5979dc0f6cec26a8603129de0304d5ae1f17e57a12834e7235062 --hash=sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640 --hash=sha256:7aa36d2b844a3e4a4b356708d79fd2c260281a7390d678a10b91ca595ddc9e99 --hash=sha256:3d72c20bd105022d29b14a7d628462ebdc61de2f303322c0212a054352f3b287 --hash=sha256:4635a184d0bbe537aa185a34193898eee409332a8ccb27eea36f262566585000 --hash=sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6 --hash=sha256:76ecd006d1d8f739430ec50cc872889af1f9c1b6b8f48e29941814b09b0fd3cc --hash=sha256:7d3f553904b0c5c016d1dad058a7554c7ac4c91a789fca496e7d8347ad040653 --hash=sha256:3c79a6f7b95751cdebcd9037e4d06f8d5a9b60e4ed0cd231342aa8ad7124882a --hash=sha256:56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1 --hash=sha256:ac4fef68da01116a5c117eba4dd46f2e06847a497de5ed1d64bb99a5fda1ef91 --hash=sha256:1c383d2ef13ade2acc636556fd544dba6e14fa30755f26812f54300e401f98f2 --hash=sha256:b8815995e050764c8610dbc82641807d196927c3dbed207f0a079833ffcf588d --hash=sha256:104ab3934abaf5be871a583541e8829d6c19ce7bde2923b2751e0d3ca44db60a --hash=sha256:9e112fcbe0148a6fa4f0a02e8d58e94470fc6cb82a5481618fea901699bf34c4 --hash=sha256:15b111b6a0f46ee1a485414a52a7ad1d703bdf984e9ed3c288a4414d3871dcbd --hash=sha256:e4d96c07229f58cb686120f168276e434660e4358cc9cf3b0464210b04913e77 --hash=sha256:f8a923a85cb099422ad5a2e345fe877bbc89a8a8b23235824a93488150e45f6e idna==2.6 --hash=sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4 --hash=sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f more-itertools==4.1.0 --hash=sha256:11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e --hash=sha256:0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea --hash=sha256:c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44 -pluggy==0.6.0 --hash=sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff +pluggy==0.6.0 --hash=sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c --hash=sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5 --hash=sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff py==1.5.3 --hash=sha256:983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a --hash=sha256:29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881 pytest==3.5.0 --hash=sha256:6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c --hash=sha256:fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1 pytest-cov==2.5.1 --hash=sha256:890fe5565400902b0c78b5357004aab1c814115894f4f21370e2433256a3eeec --hash=sha256:03aa752cf11db41d281ea1d807d954c4eda35cfa1b21d6971966cc041bbf6e2d -pytz==2018.3 --hash=sha256:ed6509d9af298b7995d69a440e2822288f2eca1681b8cce37673dbb10091e5fe --hash=sha256:f93ddcdd6342f94cea379c73cddb5724e0d6d0a1c91c9bdef364dc0368ba4fda --hash=sha256:61242a9abc626379574a166dc0e96a66cd7c3b27fc10868003fa210be4bff1c9 --hash=sha256:ba18e6a243b3625513d85239b3e49055a2f0318466e0b8a92b8fb8ca7ccdf55f --hash=sha256:07edfc3d4d2705a20a6e99d97f0c4b61c800b8232dc1c04d87e8554f130148dd --hash=sha256:3a47ff71597f821cd84a162e71593004286e5be07a340fd462f0d33a760782b5 --hash=sha256:5bd55c744e6feaa4d599a6cbd8228b4f8f9ba96de2c38d56f08e534b3c9edf0d --hash=sha256:887ab5e5b32e4d0c86efddd3d055c1f363cbaa583beb8da5e22d2fa2f64d51ef --hash=sha256:410bcd1d6409026fbaa65d9ed33bf6dd8b1e94a499e32168acfc7b332e4095c0 +pytz==2018.4 --hash=sha256:65ae0c8101309c45772196b21b74c46b2e5d11b6275c45d251b150d5da334555 --hash=sha256:c06425302f2cf668f1bba7a0a03f3c1d34d4ebeef2c72003da308b3947c7f749 requests==2.18.4 --hash=sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b --hash=sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e six==1.11.0 --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 tox==3.0.0 --hash=sha256:9ee7de958a43806402a38c0d2aa07fa8553f4d2c20a15b140e9f771c2afeade0 --hash=sha256:96efa09710a3daeeb845561ebbe1497641d9cef2ee0aea30db6969058b2bda2f diff --git a/requirements.txt b/requirements.txt index cfe66c8..da848d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ gitdb2==2.0.3 --hash=sha256:cf9a4b68e8c4da8d42e48728c944ff7af2d8c9db303ac1ab32eac37aa4194b0e --hash=sha256:b60e29d4533e5e25bb50b7678bbc187c8f6bcff1344b4f293b2ba55c85795f09 -gitpython==2.1.8 --hash=sha256:b8367c432de995dc330b5b146c5bfdc0926b8496e100fda6692134e00c0dcdc5 --hash=sha256:ad61bc25deadb535b047684d06f3654c001d9415e1971e51c9c20f5b510076e9 +gitpython==2.1.9 --hash=sha256:05069e26177c650b3cb945dd543a7ef7ca449f8db5b73038b465105673c1ef61 --hash=sha256:c47cc31af6e88979c57a33962cbc30a7c25508d74a1b3a19ec5aa7ed64b03129 mccabe==0.6.1 --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f pep8==1.7.1 --hash=sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee --hash=sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374 smmap2==2.0.3 --hash=sha256:b78ee0f1f5772d69ff50b1cbdb01b8c6647a8354f02f23b488cf4b2cfc923956 --hash=sha256:c7530db63f15f09f8251094b22091298e82bf6c699a6b8344aaaef3f2e1276c3 diff --git a/setup.cfg b/setup.cfg index 27bd052..309f797 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,4 +17,4 @@ ignore = W506,W503 universal=1 [fiasko_bro] -directories_to_skip=build,dist,test_fixtures,.pytest_cache +directories_to_skip=build,dist,test_fixtures,.pytest_cache,.git diff --git a/test_fixtures/general_repo/.vscode/.gitignore b/test_fixtures/general_repo/.vscode/empty.py similarity index 100% rename from test_fixtures/general_repo/.vscode/.gitignore rename to test_fixtures/general_repo/.vscode/empty.py diff --git a/test_fixtures/general_repo/directory_with_pycache/__pycache__/empty.pyc b/test_fixtures/general_repo/directory_with_pycache/__pycache__/empty.pyc new file mode 100644 index 0000000..e69de29 diff --git a/test_fixtures/general_repo_origin/.gitignore b/test_fixtures/general_repo_origin/.gitignore new file mode 100644 index 0000000..722d5e7 --- /dev/null +++ b/test_fixtures/general_repo_origin/.gitignore @@ -0,0 +1 @@ +.vscode diff --git a/test_fixtures/general_repo_origin/.vscode/.gitignore b/test_fixtures/general_repo_origin/.vscode/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_commits_validators/conftest.py b/tests/test_commits_validators/conftest.py index eb2dad7..8cfbefc 100644 --- a/tests/test_commits_validators/conftest.py +++ b/tests/test_commits_validators/conftest.py @@ -3,24 +3,32 @@ import pytest import git -from fiasko_bro.repository_info import LocalRepositoryInfo +from tests.utils import remove_repo +from fiasko_bro.repository_info import ProjectFolder +from fiasko_bro import defaults -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def test_repo(): test_repo_dir = 'test_fixtures{}commits_repo'.format(os.path.sep) + remove_repo(test_repo_dir) repo = git.Repo.init(test_repo_dir) repo.index.add(['initial_file.py']) repo.index.commit('Initial commit') repo.index.add(['second_commit_file.py']) repo.index.commit('win') - return LocalRepositoryInfo(test_repo_dir) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + yield ProjectFolder(test_repo_dir, directories_to_skip) + remove_repo(test_repo_dir) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def origin_repo(): origin_repo_dir = 'test_fixtures{}commits_repo_origin'.format(os.path.sep) + remove_repo(origin_repo_dir) repo = git.Repo.init(origin_repo_dir) repo.index.add(['initial_file.py']) repo.index.commit('Initial commit') - return LocalRepositoryInfo(origin_repo_dir) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + yield ProjectFolder(origin_repo_dir, directories_to_skip) + remove_repo(origin_repo_dir) diff --git a/tests/test_commits_validators/test_has_no_commit_messages_from_blacklist.py b/tests/test_commits_validators/test_has_no_commit_messages_from_blacklist.py index f8046a0..c141321 100644 --- a/tests/test_commits_validators/test_has_no_commit_messages_from_blacklist.py +++ b/tests/test_commits_validators/test_has_no_commit_messages_from_blacklist.py @@ -1,23 +1,25 @@ +from fiasko_bro import defaults from fiasko_bro.validators import has_no_commit_messages_from_blacklist -from fiasko_bro.code_validator import CodeValidator def test_has_no_commit_messages_from_blacklist_fails(test_repo): - expected_output = 'git_history_warning', '' - last_commits_to_check_amount = CodeValidator._default_settings['last_commits_to_check_amount'] + expected_output = 'git_history_warning', 'win' + last_commits_to_check_amount = defaults.VALIDATION_PARAMETERS['last_commits_to_check_amount'] + bad_commit_messages = defaults.VALIDATION_PARAMETERS['bad_commit_messages'] output = has_no_commit_messages_from_blacklist( - solution_repo=test_repo, - blacklists=CodeValidator.blacklists, + project_folder=test_repo, + bad_commit_messages=bad_commit_messages, last_commits_to_check_amount=last_commits_to_check_amount ) assert output == expected_output def test_has_no_commit_messages_from_blacklist_succeeds(origin_repo): - last_commits_to_check_amount = CodeValidator._default_settings['last_commits_to_check_amount'] + last_commits_to_check_amount = defaults.VALIDATION_PARAMETERS['last_commits_to_check_amount'] + bad_commit_messages = defaults.VALIDATION_PARAMETERS['bad_commit_messages'] output = has_no_commit_messages_from_blacklist( - solution_repo=origin_repo, - blacklists=CodeValidator.blacklists, + project_folder=origin_repo, + bad_commit_messages=bad_commit_messages, last_commits_to_check_amount=last_commits_to_check_amount ) assert output is None diff --git a/tests/test_encoding_validators/conftest.py b/tests/test_encoding_validators/conftest.py index a473c3f..f2fc8c4 100644 --- a/tests/test_encoding_validators/conftest.py +++ b/tests/test_encoding_validators/conftest.py @@ -2,25 +2,25 @@ import pytest -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def encoding_repo_path(): encoding_repo_dir = 'test_fixtures{}encoding_repo'.format(os.path.sep) return encoding_repo_dir -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def general_repo_path(): general_repo_dir = 'test_fixtures{}general_repo'.format(os.path.sep) return general_repo_dir -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def test_repo_with_bom_path(): test_repo_dir = 'test_fixtures{0}encoding_repo{0}utf8_with_bom'.format(os.path.sep) return test_repo_dir -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def test_repo_without_bom_path(): test_repo_dir = 'test_fixtures{}general_repo'.format(os.path.sep) return test_repo_dir diff --git a/tests/test_encoding_validators/test_are_sources_in_utf.py b/tests/test_encoding_validators/test_are_sources_in_utf.py index 5ef6b61..b9d71fe 100644 --- a/tests/test_encoding_validators/test_are_sources_in_utf.py +++ b/tests/test_encoding_validators/test_are_sources_in_utf.py @@ -1,18 +1,21 @@ -from fiasko_bro.pre_validation_checks import are_sources_in_utf, VALIDATOR_SETTINGS +from fiasko_bro import defaults +from fiasko_bro.pre_validation_checks import are_sources_in_utf def test_are_sources_in_utf_fail(encoding_repo_path): - output = are_sources_in_utf(encoding_repo_path) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = are_sources_in_utf(encoding_repo_path, directories_to_skip) assert isinstance(output, tuple) assert output[0] == 'sources_not_utf_8' def test_are_sources_in_utf_ok(general_repo_path): - output = are_sources_in_utf(general_repo_path) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = are_sources_in_utf(general_repo_path, directories_to_skip) assert output is None def test_are_sources_in_utf_uses_whitelist(encoding_repo_path): - VALIDATOR_SETTINGS['directories_to_skip'] = ['win1251'] - output = are_sources_in_utf(encoding_repo_path) + directories_to_skip = ['win1251'] + output = are_sources_in_utf(encoding_repo_path, directories_to_skip) assert output is None diff --git a/tests/test_encoding_validators/test_has_no_BOM.py b/tests/test_encoding_validators/test_has_no_BOM.py index 842aa51..b04deb1 100644 --- a/tests/test_encoding_validators/test_has_no_BOM.py +++ b/tests/test_encoding_validators/test_has_no_BOM.py @@ -1,12 +1,15 @@ +from fiasko_bro import defaults from fiasko_bro.pre_validation_checks import has_no_bom def test_has_no_bom_fail(test_repo_with_bom_path): - output = has_no_bom(test_repo_with_bom_path) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = has_no_bom(test_repo_with_bom_path, directories_to_skip) assert isinstance(output, tuple) assert output[0] == 'has_bom' def test_has_no_bom_ok(test_repo_without_bom_path): - output = has_no_bom(test_repo_without_bom_path) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = has_no_bom(test_repo_without_bom_path, directories_to_skip) assert output is None diff --git a/tests/test_general_validators/conftest.py b/tests/test_general_validators/conftest.py index abb2816..70ca1d1 100644 --- a/tests/test_general_validators/conftest.py +++ b/tests/test_general_validators/conftest.py @@ -1,20 +1,27 @@ import os.path import pytest -import git -from fiasko_bro.repository_info import LocalRepositoryInfo +from tests.utils import initialize_repo, remove_repo +from fiasko_bro import defaults +from fiasko_bro.repository_info import ProjectFolder -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def test_repo(): test_repo_dir = 'test_fixtures{}general_repo'.format(os.path.sep) - git.Repo.init(test_repo_dir) - return LocalRepositoryInfo(test_repo_dir) + remove_repo(test_repo_dir) + initialize_repo(test_repo_dir, ignore_gitignore=True) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + yield ProjectFolder(test_repo_dir, directories_to_skip) + remove_repo(test_repo_dir) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def origin_repo(): origin_repo_dir = 'test_fixtures{}general_repo_origin'.format(os.path.sep) - git.Repo.init(origin_repo_dir) - return LocalRepositoryInfo(origin_repo_dir) + remove_repo(origin_repo_dir) + initialize_repo(origin_repo_dir) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + yield ProjectFolder(origin_repo_dir, directories_to_skip) + remove_repo(origin_repo_dir) diff --git a/tests/test_general_validators/test_are_tabs_used_for_indentation.py b/tests/test_general_validators/test_are_tabs_used_for_indentation.py index 74562b5..a18c706 100644 --- a/tests/test_general_validators/test_are_tabs_used_for_indentation.py +++ b/tests/test_general_validators/test_are_tabs_used_for_indentation.py @@ -1,9 +1,11 @@ from fiasko_bro import validators +from fiasko_bro import defaults def test_are_tabs_used_for_indentation_fail_for_py_file(test_repo): expected_output = 'tabs_used_for_indents', 'css_with_tabs.css' output = validators.are_tabs_used_for_indentation( - solution_repo=test_repo, + project_folder=test_repo, + directories_to_skip=defaults.VALIDATION_PARAMETERS['directories_to_skip'] ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_changed_readme.py b/tests/test_general_validators/test_has_changed_readme.py index 552dae5..5ea51b8 100644 --- a/tests/test_general_validators/test_has_changed_readme.py +++ b/tests/test_general_validators/test_has_changed_readme.py @@ -3,9 +3,9 @@ def test_readme_changed_succeeds(test_repo, origin_repo): output = validators.has_changed_readme( - solution_repo=test_repo, + project_folder=test_repo, readme_filename='changed_readme.md', - original_repo=origin_repo, + original_project_folder=origin_repo, ) assert output is None @@ -13,8 +13,8 @@ def test_readme_changed_succeeds(test_repo, origin_repo): def test_readme_changed_fails(test_repo, origin_repo): expected_output = 'need_readme', None output = validators.has_changed_readme( - solution_repo=test_repo, + project_folder=test_repo, readme_filename='unchanged_readme.md', - original_repo=origin_repo, + original_project_folder=origin_repo, ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_frozen_requirements.py b/tests/test_general_validators/test_has_frozen_requirements.py index 0c97419..f29a131 100644 --- a/tests/test_general_validators/test_has_frozen_requirements.py +++ b/tests/test_general_validators/test_has_frozen_requirements.py @@ -5,13 +5,13 @@ def test_has_frozen_requirements_no_frozen(test_repo): expected_output = 'unfrozen_requirements', _('for example, %s') % 'django' output = validators.has_frozen_requirements( - solution_repo=test_repo, + project_folder=test_repo, ) assert output == expected_output def test_has_frozen_requirements_no_requirements_file(origin_repo): output = validators.has_frozen_requirements( - solution_repo=origin_repo, + project_folder=origin_repo, ) assert output is None diff --git a/tests/test_general_validators/test_has_indents_of_spaces.py b/tests/test_general_validators/test_has_indents_of_spaces.py index 75b7e71..1132fd1 100644 --- a/tests/test_general_validators/test_has_indents_of_spaces.py +++ b/tests/test_general_validators/test_has_indents_of_spaces.py @@ -4,7 +4,7 @@ def test_has_indent_of_four_spaces(test_repo): expected_output = 'indent_not_four_spaces', 'has_indents_of_spaces.py:5' output = validators.has_indents_of_spaces( - solution_repo=test_repo, + project_folder=test_repo, tab_size=4, ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_local_var_named_as_global.py b/tests/test_general_validators/test_has_local_var_named_as_global.py index 355d4be..966c182 100644 --- a/tests/test_general_validators/test_has_local_var_named_as_global.py +++ b/tests/test_general_validators/test_has_local_var_named_as_global.py @@ -1,29 +1,26 @@ from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator +from fiasko_bro import defaults from fiasko_bro.i18n import _ def test_has_local_var_named_as_global_fail(test_repo): expected_output = 'has_locals_named_as_globals', _('for example, %s') % 'LOCAL_VAR' - whitelists = CodeValidator.whitelists + ignore_list = defaults.VALIDATION_PARAMETERS['local_var_named_as_global_paths_to_ignore'] output = validators.has_local_var_named_as_global( - solution_repo=test_repo, - whitelists=whitelists, - max_indentation_level=CodeValidator._default_settings['max_indentation_level'] + project_folder=test_repo, + local_var_named_as_global_paths_to_ignore=ignore_list, + max_indentation_level=defaults.VALIDATION_PARAMETERS['max_indentation_level'] ) assert output == expected_output def test_has_local_var_named_as_global_ok(test_repo): - whitelists = {'has_local_var_named_as_global': [ - 'local_var_as_global_test_file.py' - ]} - max_indentation_level = CodeValidator._default_settings[ + max_indentation_level = defaults.VALIDATION_PARAMETERS[ 'max_indentation_level' ] output = validators.has_local_var_named_as_global( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + local_var_named_as_global_paths_to_ignore=['local_var_as_global_test_file.py'], max_indentation_level=max_indentation_level, ) assert output is None diff --git a/tests/test_general_validators/test_has_no_calls_with_constants.py b/tests/test_general_validators/test_has_no_calls_with_constants.py index b489f19..b97db4d 100644 --- a/tests/test_general_validators/test_has_no_calls_with_constants.py +++ b/tests/test_general_validators/test_has_no_calls_with_constants.py @@ -1,21 +1,19 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator def test_has_no_calls_with_constants_fail(test_repo): - whitelists = CodeValidator.whitelists expected_output = 'magic_numbers', 'has_no_vars_with_lambda_test_file.py:9' output = validators.has_no_calls_with_constants( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + valid_calls_with_constants=defaults.VALIDATION_PARAMETERS['valid_calls_with_constants'] ) assert output == expected_output def test_has_no_calls_with_constants_ok(origin_repo): - whitelists = CodeValidator.whitelists output = validators.has_no_calls_with_constants( - solution_repo=origin_repo, - whitelists=whitelists, + project_folder=origin_repo, + valid_calls_with_constants=defaults.VALIDATION_PARAMETERS['valid_calls_with_constants'] ) assert output is None diff --git a/tests/test_general_validators/test_has_no_directories_from_blacklist.py b/tests/test_general_validators/test_has_no_directories_from_blacklist.py index 172a571..5bc9154 100644 --- a/tests/test_general_validators/test_has_no_directories_from_blacklist.py +++ b/tests/test_general_validators/test_has_no_directories_from_blacklist.py @@ -1,21 +1,35 @@ from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator -def test_has_no_directories_from_blacklist(test_repo): +def test_has_no_directories_from_blacklist_fails_simple_data_directory(test_repo): expected_output = 'data_in_repo', '.vscode' - blacklists = CodeValidator.blacklists output = validators.has_no_directories_from_blacklist( - solution_repo=test_repo, - blacklists=blacklists, + project_folder=test_repo, + data_directories=['.vscode'] ) assert output == expected_output -def test_no_star_imports_ok(origin_repo): - blacklists = CodeValidator.blacklists +def test_has_no_directories_from_blacklist_fails_nested_data_directory(test_repo): + expected_output = 'data_in_repo', '__pycache__' output = validators.has_no_directories_from_blacklist( - solution_repo=origin_repo, - blacklists=blacklists, + project_folder=test_repo, + data_directories=['__pycache__'] + ) + assert output == expected_output + + +def test_has_no_directories_from_blacklist_succeeds_directories_not_tracked(origin_repo): + output = validators.has_no_directories_from_blacklist( + project_folder=origin_repo, + data_directories=['.vscode'] + ) + assert output is None + + +def test_has_no_directories_from_blacklist_succeeds_directories_not_found(test_repo): + output = validators.has_no_directories_from_blacklist( + project_folder=test_repo, + data_directories=['the_name_of_data_dir_01236'] ) assert output is None diff --git a/tests/test_general_validators/test_has_no_encoding_declaration.py b/tests/test_general_validators/test_has_no_encoding_declaration.py index 70cecf2..1d221f3 100644 --- a/tests/test_general_validators/test_has_no_encoding_declaration.py +++ b/tests/test_general_validators/test_has_no_encoding_declaration.py @@ -1,20 +1,22 @@ +from fiasko_bro import defaults from fiasko_bro.validators import has_no_encoding_declaration -from fiasko_bro.code_validator import CodeValidator -def test_has_no_encoding_declarations_fails(origin_repo): - expected_output = 'has_encoding_declarations', 'file_with_encoding_declarations.py' +def test_has_no_encoding_declarations_paths_to_ignore_fails(origin_repo): + expected_output = 'has_encoding_declarations_paths_to_ignore', 'file_with_encoding_declarations.py' + ignore_list = defaults.VALIDATION_PARAMETERS['encoding_declarations_paths_to_ignore'] output = has_no_encoding_declaration( - solution_repo=origin_repo, - whitelists=CodeValidator.whitelists + project_folder=origin_repo, + encoding_declarations_paths_to_ignore=ignore_list ) assert output == expected_output -def test_has_no_encoding_declarations_succeeds(test_repo): +def test_has_no_encoding_declarations_paths_to_ignore_succeeds(test_repo): + ignore_list = defaults.VALIDATION_PARAMETERS['encoding_declarations_paths_to_ignore'] output = has_no_encoding_declaration( - solution_repo=test_repo, - whitelists=CodeValidator.whitelists + project_folder=test_repo, + encoding_declarations_paths_to_ignore=ignore_list ) assert output is None diff --git a/tests/test_general_validators/test_has_no_exit_calls_in_functions.py b/tests/test_general_validators/test_has_no_exit_calls_in_functions.py index 609d85c..991b9a4 100644 --- a/tests/test_general_validators/test_has_no_exit_calls_in_functions.py +++ b/tests/test_general_validators/test_has_no_exit_calls_in_functions.py @@ -1,13 +1,25 @@ +from fiasko_bro import defaults from fiasko_bro.validators import has_no_exit_calls_in_functions -from fiasko_bro.code_validator import CodeValidator def test_has_no_exit_calls_in_functions_fails(test_repo): expected_output = 'has_exit_calls_in_function', 'function_with_exit_call' - output = has_no_exit_calls_in_functions(test_repo, whitelists=CodeValidator.whitelists) + functions_allowed_to_have_exit_calls = defaults.VALIDATION_PARAMETERS[ + 'functions_allowed_to_have_exit_calls' + ] + output = has_no_exit_calls_in_functions( + test_repo, + functions_allowed_to_have_exit_calls=functions_allowed_to_have_exit_calls + ) assert output == expected_output def test_has_no_exit_calls_in_functions_succeds(origin_repo): - output = has_no_exit_calls_in_functions(origin_repo, whitelists=CodeValidator.whitelists) + functions_allowed_to_have_exit_calls = defaults.VALIDATION_PARAMETERS[ + 'functions_allowed_to_have_exit_calls' + ] + output = has_no_exit_calls_in_functions( + origin_repo, + functions_allowed_to_have_exit_calls=functions_allowed_to_have_exit_calls + ) assert output is None diff --git a/tests/test_general_validators/test_has_no_extra_docstrings.py b/tests/test_general_validators/test_has_no_extra_docstrings.py index f88a117..c3b677a 100644 --- a/tests/test_general_validators/test_has_no_extra_docstrings.py +++ b/tests/test_general_validators/test_has_no_extra_docstrings.py @@ -1,23 +1,24 @@ +from fiasko_bro import defaults from fiasko_bro.validators import has_no_extra_dockstrings -from fiasko_bro.code_validator import CodeValidator def test_has_no_extra_docstrings_fail(test_repo): expected_output = 'extra_comments', 'file_with_too_many_docstrings.py' + ignore_list = defaults.VALIDATION_PARAMETERS['extra_dockstrings_paths_to_ignore'] output = has_no_extra_dockstrings( - solution_repo=test_repo, - whitelists=CodeValidator.whitelists, + project_folder=test_repo, + extra_dockstrings_paths_to_ignore=ignore_list, functions_with_docstrings_percent_limit=40, ) assert output == expected_output def test_has_no_extra_docstrings_succeed(test_repo): - whitelists = CodeValidator.whitelists - whitelists['has_no_extra_dockstrings_whitelist'] += ['file_with_too_many_docstrings.py'] + ignore_list = list(defaults.VALIDATION_PARAMETERS['extra_dockstrings_paths_to_ignore']) + ignore_list += ['file_with_too_many_docstrings.py'] output = has_no_extra_dockstrings( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + extra_dockstrings_paths_to_ignore=ignore_list, functions_with_docstrings_percent_limit=40, ) assert output is None diff --git a/tests/test_general_validators/test_has_no_local_imports.py b/tests/test_general_validators/test_has_no_local_imports.py index c8b90ba..a46005f 100644 --- a/tests/test_general_validators/test_has_no_local_imports.py +++ b/tests/test_general_validators/test_has_no_local_imports.py @@ -1,21 +1,19 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator def test_no_local_imports_fail(test_repo): expected_output = 'has_local_import', 'no_local_imports_test_file.py' - whitelists = CodeValidator.whitelists output = validators.has_no_local_imports( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + local_imports_paths_to_ignore=defaults.VALIDATION_PARAMETERS['local_imports_paths_to_ignore'] ) assert output == expected_output def test_no_local_imports_ok(test_repo): - whitelists = {'has_no_local_imports': ['no_local_imports_test_file.py']} output = validators.has_no_local_imports( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + local_imports_paths_to_ignore=['no_local_imports_test_file.py'] ) assert output is None diff --git a/tests/test_general_validators/test_has_no_long_files.py b/tests/test_general_validators/test_has_no_long_files.py index 75db386..5551819 100644 --- a/tests/test_general_validators/test_has_no_long_files.py +++ b/tests/test_general_validators/test_has_no_long_files.py @@ -1,21 +1,21 @@ +from fiasko_bro import defaults from fiasko_bro.validators import has_no_long_files -from fiasko_bro.code_validator import CodeValidator def test_has_no_long_files_fails(test_repo): expected_output = 'file_too_long', 'very_long_file.py' - max_number_of_lines = CodeValidator._default_settings['max_number_of_lines'] + max_number_of_lines = defaults.VALIDATION_PARAMETERS['max_number_of_lines'] output = has_no_long_files( - solution_repo=test_repo, + project_folder=test_repo, max_number_of_lines=max_number_of_lines ) assert output == expected_output def test_has_no_long_files_succeeds(origin_repo): - max_number_of_lines = CodeValidator._default_settings['max_number_of_lines'] + max_number_of_lines = defaults.VALIDATION_PARAMETERS['max_number_of_lines'] output = has_no_long_files( - solution_repo=origin_repo, + project_folder=origin_repo, max_number_of_lines=max_number_of_lines ) assert output is None diff --git a/tests/test_general_validators/test_has_no_nonpythonic_empty_list_validations.py b/tests/test_general_validators/test_has_no_nonpythonic_empty_list_validations.py index 6a9c6bf..8efd374 100644 --- a/tests/test_general_validators/test_has_no_nonpythonic_empty_list_validations.py +++ b/tests/test_general_validators/test_has_no_nonpythonic_empty_list_validations.py @@ -7,6 +7,6 @@ def test_has_no_nonpythonic_empty_list_validations(test_repo): 'has_no_nonpythonic_empty_list_validations.py:2' ) output = validators.has_no_nonpythonic_empty_list_validations( - solution_repo=test_repo, + project_folder=test_repo, ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_no_short_variable_names.py b/tests/test_general_validators/test_has_no_short_variable_names.py index 7cae600..d12548b 100644 --- a/tests/test_general_validators/test_has_no_short_variable_names.py +++ b/tests/test_general_validators/test_has_no_short_variable_names.py @@ -1,25 +1,23 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator def test_has_no_short_variable_names_fail(test_repo): expected_output = 'bad_titles', 'sv' - whitelists = CodeValidator.whitelists minimum_name_length = 3 output = validators.has_no_short_variable_names( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + valid_short_variable_names=defaults.VALIDATION_PARAMETERS['valid_short_variable_names'], minimum_name_length=minimum_name_length, ) assert output == expected_output def test_has_no_short_variable_names_ok(test_repo): - whitelists = {'has_no_short_variable_names': ['sv']} minimum_name_length = 3 output = validators.has_no_short_variable_names( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + valid_short_variable_names=['sv'], minimum_name_length=minimum_name_length, ) assert output is None diff --git a/tests/test_general_validators/test_has_no_star_imports.py b/tests/test_general_validators/test_has_no_star_imports.py index 258ae21..39d5e63 100644 --- a/tests/test_general_validators/test_has_no_star_imports.py +++ b/tests/test_general_validators/test_has_no_star_imports.py @@ -4,6 +4,6 @@ def test_no_star_imports_fail(test_repo): expected_output = 'has_star_import', 'no_star_import_test_file.py' output = validators.has_no_star_imports( - solution_repo=test_repo, + project_folder=test_repo, ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_no_string_literal_sums.py b/tests/test_general_validators/test_has_no_string_literal_sums.py index 540adab..e7d2471 100644 --- a/tests/test_general_validators/test_has_no_string_literal_sums.py +++ b/tests/test_general_validators/test_has_no_string_literal_sums.py @@ -3,5 +3,5 @@ def test_has_no_string_literal_sums_fail(test_repo): expected_output = 'has_string_sum' - output = validators.has_no_string_literal_sums(solution_repo=test_repo) + output = validators.has_no_string_literal_sums(project_folder=test_repo) assert output[0] == expected_output diff --git a/tests/test_general_validators/test_has_no_try_without_exception.py b/tests/test_general_validators/test_has_no_try_without_exception.py index c996a9e..40c5ddb 100644 --- a/tests/test_general_validators/test_has_no_try_without_exception.py +++ b/tests/test_general_validators/test_has_no_try_without_exception.py @@ -8,7 +8,7 @@ def test_has_no_try_without_exception_fail(test_repo): _('%s class is too broad; use a more specific exception type') % 'Exception' ) output = validators.has_no_try_without_exception( - solution_repo=test_repo, + project_folder=test_repo, ) assert output == expected_output @@ -19,6 +19,6 @@ def test_has_no_try_without_exception_no_type_exception(origin_repo): '' ) output = validators.has_no_try_without_exception( - solution_repo=origin_repo, + project_folder=origin_repo, ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_no_vars_with_lambda.py b/tests/test_general_validators/test_has_no_vars_with_lambda.py index 0a20c04..74c7de0 100644 --- a/tests/test_general_validators/test_has_no_vars_with_lambda.py +++ b/tests/test_general_validators/test_has_no_vars_with_lambda.py @@ -4,13 +4,13 @@ def test_has_no_vars_with_lambda_fail(test_repo): expected_output = 'named_lambda', 'has_no_vars_with_lambda_test_file.py:4' output = validators.has_no_vars_with_lambda( - solution_repo=test_repo, + project_folder=test_repo, ) assert output == expected_output def test_has_no_vars_with_lambda_ok(origin_repo): output = validators.has_no_vars_with_lambda( - solution_repo=origin_repo, + project_folder=origin_repo, ) assert output is None diff --git a/tests/test_general_validators/test_has_readme_file.py b/tests/test_general_validators/test_has_readme_file.py index e754103..8a4c2b7 100644 --- a/tests/test_general_validators/test_has_readme_file.py +++ b/tests/test_general_validators/test_has_readme_file.py @@ -5,7 +5,7 @@ def test_readme_file_exist(test_repo): readme_filename = 'changed_readme.md' output = validators.has_readme_file( - solution_repo=test_repo, + project_folder=test_repo, readme_filename=readme_filename, ) assert output is None @@ -15,7 +15,7 @@ def test_readme_file_not_exist(test_repo): readme_filename = 'not_exist_readme.md' expected_output = 'need_readme', _('there is no %s') % readme_filename output = validators.has_readme_file( - solution_repo=test_repo, + project_folder=test_repo, readme_filename=readme_filename, ) assert output == expected_output diff --git a/tests/test_general_validators/test_has_readme_in_single_language.py b/tests/test_general_validators/test_has_readme_in_single_language.py index ea95624..07c0156 100644 --- a/tests/test_general_validators/test_has_readme_in_single_language.py +++ b/tests/test_general_validators/test_has_readme_in_single_language.py @@ -1,14 +1,14 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro import CodeValidator def test_has_readme_in_single_language_succeeds(test_repo): readme_filename = 'readme_in_single_language.md' - min_percent = CodeValidator._default_settings[ + min_percent = defaults.VALIDATION_PARAMETERS[ 'min_percent_of_another_language' ] output = validators.has_readme_in_single_language( - solution_repo=test_repo, + project_folder=test_repo, readme_filename=readme_filename, min_percent_of_another_language=min_percent, ) @@ -18,11 +18,11 @@ def test_has_readme_in_single_language_succeeds(test_repo): def test_has_readme_in_single_language_fails(test_repo): readme_filename = 'bilingual_readme.md' expected_output = 'bilingual_readme', '' - min_percent = CodeValidator._default_settings[ + min_percent = defaults.VALIDATION_PARAMETERS[ 'min_percent_of_another_language' ] output = validators.has_readme_in_single_language( - solution_repo=test_repo, + project_folder=test_repo, readme_filename=readme_filename, min_percent_of_another_language=min_percent, ) diff --git a/tests/test_general_validators/test_has_snake_case_vars.py b/tests/test_general_validators/test_has_snake_case_vars.py index 92e873c..5f663e6 100644 --- a/tests/test_general_validators/test_has_snake_case_vars.py +++ b/tests/test_general_validators/test_has_snake_case_vars.py @@ -1,28 +1,33 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator def test_is_snake_case_fail(test_repo): - whitelists = CodeValidator.whitelists + parameters = defaults.VALIDATION_PARAMETERS + valid_non_snake_case_left_hand_values = parameters['valid_non_snake_case_left_hand_values'] + valid_non_snake_case_right_hand_values = parameters['valid_non_snake_case_right_hand_values'] output = validators.is_snake_case( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + valid_non_snake_case_left_hand_values=valid_non_snake_case_left_hand_values, + valid_non_snake_case_right_hand_values=valid_non_snake_case_right_hand_values ) assert isinstance(output, tuple) assert output[0] == 'camel_case_vars' -def test_is_snake_case_ok(test_repo): - expected_output = None - vars_used_not_in_snake_case = [ +def test_is_snake_case_succeeds_for_extended_left_hand_whitelist(test_repo): + parameters = defaults.VALIDATION_PARAMETERS + valid_non_snake_case_left_hand_values = parameters['valid_non_snake_case_left_hand_values'] + valid_non_snake_case_right_hand_values = parameters['valid_non_snake_case_right_hand_values'] + vars_used_not_in_snake_case = { 'CamelCaseVar', 'lowerCamelCaseVar', 'SoMeWieRdCasE' - ] - whitelists = CodeValidator.whitelists - whitelists['is_snake_case'].extend(vars_used_not_in_snake_case) + } + left_hand = valid_non_snake_case_left_hand_values.union(vars_used_not_in_snake_case) output = validators.is_snake_case( - solution_repo=test_repo, - whitelists=whitelists, + project_folder=test_repo, + valid_non_snake_case_left_hand_values=left_hand, + valid_non_snake_case_right_hand_values=valid_non_snake_case_right_hand_values ) - assert output is expected_output + assert output is None diff --git a/tests/test_general_validators/test_has_variables_from_blacklist.py b/tests/test_general_validators/test_has_variables_from_blacklist.py index 825fdf8..5c75fdc 100644 --- a/tests/test_general_validators/test_has_variables_from_blacklist.py +++ b/tests/test_general_validators/test_has_variables_from_blacklist.py @@ -1,40 +1,32 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator def test_has_variables_from_blacklist_fail(test_repo): expected_output = 'bad_titles', 'data' - whitelists = CodeValidator.whitelists - blacklists = CodeValidator.blacklists output = validators.has_variables_from_blacklist( - solution_repo=test_repo, - whitelists=whitelists, - blacklists=blacklists, + project_folder=test_repo, + bad_variables_paths_to_ignore=defaults.VALIDATION_PARAMETERS['bad_variables_paths_to_ignore'], + bad_variable_names=defaults.VALIDATION_PARAMETERS['bad_variable_names'] ) assert output == expected_output def test_has_variables_from_blacklist_with_file_in_whitelist_ok(test_repo): - whitelists = {'has_variables_from_blacklist': [ - 'variables_from_blacklist_test_file.py' - ]} - blacklists = CodeValidator.blacklists output = validators.has_variables_from_blacklist( - solution_repo=test_repo, - whitelists=whitelists, - blacklists=blacklists, + project_folder=test_repo, + bad_variables_paths_to_ignore=['variables_from_blacklist_test_file.py'], + bad_variable_names=defaults.VALIDATION_PARAMETERS['bad_variable_names'] ) assert output is None def test_has_variables_from_blacklist_with_var_in_blacklist_ok(test_repo): - whitelists = CodeValidator.whitelists - blacklists_original = CodeValidator.blacklists - blacklist_for_test = blacklists_original.copy() - blacklist_for_test['has_variables_from_blacklist'].remove('data') + bad_variable_names = list(defaults.VALIDATION_PARAMETERS['bad_variable_names']) + bad_variable_names.remove('data') output = validators.has_variables_from_blacklist( - solution_repo=test_repo, - whitelists=whitelists, - blacklists=blacklist_for_test, + project_folder=test_repo, + bad_variables_paths_to_ignore=defaults.VALIDATION_PARAMETERS['bad_variables_paths_to_ignore'], + bad_variable_names=bad_variable_names ) assert output is None diff --git a/tests/test_general_validators/test_is_nesting_too_deep.py b/tests/test_general_validators/test_is_nesting_too_deep.py index 5f2a6b1..02857fd 100644 --- a/tests/test_general_validators/test_is_nesting_too_deep.py +++ b/tests/test_general_validators/test_is_nesting_too_deep.py @@ -1,16 +1,19 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator def test_is_nesting_too_deep_fails(test_repo): - max_indentation_level = CodeValidator._default_settings[ + max_indentation_level = defaults.VALIDATION_PARAMETERS[ 'max_indentation_level' ] + deep_nesting_paths_to_ignore = defaults.VALIDATION_PARAMETERS[ + 'deep_nesting_paths_to_ignore' + ] output = validators.is_nesting_too_deep( - solution_repo=test_repo, - tab_size=CodeValidator._default_settings['tab_size'], + project_folder=test_repo, + tab_size=defaults.VALIDATION_PARAMETERS['tab_size'], max_indentation_level=max_indentation_level, - whitelists=CodeValidator.whitelists, + deep_nesting_paths_to_ignore=deep_nesting_paths_to_ignore ) assert isinstance(output, tuple) assert output[0] == 'too_nested' @@ -18,13 +21,16 @@ def test_is_nesting_too_deep_fails(test_repo): def test_is_nesting_too_deep_succeeds(origin_repo): - max_indentation_level = CodeValidator._default_settings[ + max_indentation_level = defaults.VALIDATION_PARAMETERS[ 'max_indentation_level' ] + deep_nesting_paths_to_ignore = defaults.VALIDATION_PARAMETERS[ + 'deep_nesting_paths_to_ignore' + ] output = validators.is_nesting_too_deep( - solution_repo=origin_repo, - tab_size=CodeValidator._default_settings['tab_size'], + project_folder=origin_repo, + tab_size=defaults.VALIDATION_PARAMETERS['tab_size'], max_indentation_level=max_indentation_level, - whitelists=CodeValidator.whitelists, + deep_nesting_paths_to_ignore=deep_nesting_paths_to_ignore ) assert output is None diff --git a/tests/test_general_validators/test_mccabe_difficulty.py b/tests/test_general_validators/test_mccabe_difficulty.py index 531a9fe..e643ad3 100644 --- a/tests/test_general_validators/test_mccabe_difficulty.py +++ b/tests/test_general_validators/test_mccabe_difficulty.py @@ -5,7 +5,7 @@ def test_mccabe_difficulty(test_repo): max_complexity = 7 expected_output = 'mccabe_failure', 'function_with_big_complexity' output = validators.is_mccabe_difficulty_ok( - solution_repo=test_repo, + project_folder=test_repo, max_complexity=max_complexity ) assert output == expected_output diff --git a/tests/test_general_validators/test_not_validates_response_status_by_comparing_to_200.py b/tests/test_general_validators/test_not_validates_response_status_by_comparing_to_200.py index a1388ff..f856761 100644 --- a/tests/test_general_validators/test_not_validates_response_status_by_comparing_to_200.py +++ b/tests/test_general_validators/test_not_validates_response_status_by_comparing_to_200.py @@ -7,13 +7,13 @@ def test_not_validates_response_status_by_comparing_to_200_fails(test_repo): 'not_validates_response_status_by_comparing_to_200.py:3' ) output = validators.not_validates_response_status_by_comparing_to_200( - solution_repo=test_repo, + project_folder=test_repo, ) assert output == expected_output def test_not_validates_response_status_by_comparing_to_200_succeeds(origin_repo): output = validators.not_validates_response_status_by_comparing_to_200( - solution_repo=origin_repo, + project_folder=origin_repo, ) assert output is None diff --git a/tests/test_general_validators/test_pep8_violations.py b/tests/test_general_validators/test_pep8_violations.py index 343977d..ae5afcf 100644 --- a/tests/test_general_validators/test_pep8_violations.py +++ b/tests/test_general_validators/test_pep8_violations.py @@ -1,14 +1,12 @@ +from fiasko_bro import defaults from fiasko_bro import validators -from fiasko_bro.code_validator import CodeValidator -from fiasko_bro.i18n import _ def test_pep8_violations_fail(test_repo): - whitelists = CodeValidator.whitelists output = validators.is_pep8_fine( - solution_repo=test_repo, + project_folder=test_repo, allowed_max_pep8_violations=0, - whitelists=whitelists, + pep8_paths_to_ignore=defaults.VALIDATION_PARAMETERS['pep8_paths_to_ignore'], max_pep8_line_length=79, ) assert isinstance(output, tuple) @@ -16,12 +14,10 @@ def test_pep8_violations_fail(test_repo): def test_pep8_violations_ok(test_repo): - expected_output = None - whitelists = CodeValidator.whitelists output = validators.is_pep8_fine( - solution_repo=test_repo, + project_folder=test_repo, allowed_max_pep8_violations=1000, - whitelists=whitelists, + pep8_paths_to_ignore=defaults.VALIDATION_PARAMETERS['pep8_paths_to_ignore'], max_pep8_line_length=1000, ) - assert output == expected_output + assert output is None diff --git a/tests/test_size_validators/conftest.py b/tests/test_size_validators/conftest.py index d446f1f..c1f55c0 100644 --- a/tests/test_size_validators/conftest.py +++ b/tests/test_size_validators/conftest.py @@ -3,13 +3,13 @@ -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def general_repo_origin_path(): general_repo_origin_dir = 'test_fixtures{}general_repo_origin'.format(os.path.sep) return general_repo_origin_dir -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def general_repo_path(): general_repo_dir = 'test_fixtures{}general_repo'.format(os.path.sep) - return general_repo_dir \ No newline at end of file + return general_repo_dir diff --git a/tests/test_size_validators/test_are_repos_to_large.py b/tests/test_size_validators/test_are_repos_to_large.py index dae80d8..c9d2458 100644 --- a/tests/test_size_validators/test_are_repos_to_large.py +++ b/tests/test_size_validators/test_are_repos_to_large.py @@ -1,27 +1,42 @@ from fiasko_bro.pre_validation_checks import are_repos_too_large +from fiasko_bro import defaults def test_repo_size_fail_single(general_repo_path): max_py_files_count = 1 - output = are_repos_too_large(general_repo_path, max_py_files_count) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = are_repos_too_large(general_repo_path, directories_to_skip, max_py_files_count) assert isinstance(output, tuple) assert output[0] == 'Repo is too large' def test_repo_size_fail_double(general_repo_path, general_repo_origin_path): max_py_files_count = 1 - output = are_repos_too_large(general_repo_path, max_py_files_count, general_repo_origin_path) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = are_repos_too_large( + general_repo_path, + directories_to_skip, + max_py_files_count, + general_repo_origin_path + ) assert isinstance(output, tuple) assert output[0] == 'Repo is too large' def test_repo_size_ok_single(general_repo_path): max_py_files_count = 1000 - output = are_repos_too_large(general_repo_path, max_py_files_count) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = are_repos_too_large(general_repo_path, directories_to_skip, max_py_files_count) assert output is None def test_repo_size_ok_double(general_repo_path, general_repo_origin_path): max_py_files_count = 1000 - output = are_repos_too_large(general_repo_path, max_py_files_count, general_repo_origin_path) + directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] + output = are_repos_too_large( + general_repo_path, + directories_to_skip, + max_py_files_count, + general_repo_origin_path + ) assert output is None diff --git a/tests/test_validation_interface/test_incorrect_input_handled.py b/tests/test_validation_interface/test_incorrect_input_handled.py new file mode 100644 index 0000000..ee91a3a --- /dev/null +++ b/tests/test_validation_interface/test_incorrect_input_handled.py @@ -0,0 +1,17 @@ +import os.path + +import pytest + +from fiasko_bro import validate + + +@pytest.fixture(scope='session') +def non_existent_directory(): + directory = 'test_fixtures{}directory_that_should_not_exist'.format(os.path.sep) + assert not os.path.isdir(directory) + return directory + + +def test_not_existing_file_raises_correct_exception(non_existent_directory): + with pytest.raises(FileNotFoundError): + validate(non_existent_directory) diff --git a/tests/test_validation_interface/test_syntax_errors_handled_properly.py b/tests/test_validation_interface/test_syntax_errors_handled_properly.py index 35819db..3a45311 100644 --- a/tests/test_validation_interface/test_syntax_errors_handled_properly.py +++ b/tests/test_validation_interface/test_syntax_errors_handled_properly.py @@ -2,20 +2,22 @@ import pytest -from .utils import initialize_repo -from fiasko_bro import validate_repo +from tests.utils import initialize_repo, remove_repo +from fiasko_bro import validate -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def syntax_error_repo(): repo_path = 'test_fixtures{}syntax_error_repo'.format(os.path.sep) + remove_repo(repo_path) initialize_repo(repo_path) - return repo_path + yield repo_path + remove_repo(repo_path) -def test_warnings_show_up_after_fail(syntax_error_repo): +def test_syntax_error_shows_up(syntax_error_repo): expected_output = [ ('syntax_error', 'file_with_syntax_error.py') ] - output = validate_repo(syntax_error_repo) + output = validate(syntax_error_repo) assert output == expected_output diff --git a/tests/test_validation_interface/test_warnings_work.py b/tests/test_validation_interface/test_warnings_work.py index 229aa63..8b209a8 100644 --- a/tests/test_validation_interface/test_warnings_work.py +++ b/tests/test_validation_interface/test_warnings_work.py @@ -2,15 +2,17 @@ import pytest -from .utils import initialize_repo -from fiasko_bro import validate_repo +from tests.utils import initialize_repo, remove_repo +from fiasko_bro import validate -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def long_file_3_spaces_repo_path(): repo_path = 'test_fixtures{}long_file_3_spaces_repo'.format(os.path.sep) + remove_repo(repo_path) initialize_repo(repo_path) - return repo_path + yield repo_path + remove_repo(repo_path) def test_warnings_show_up_after_fail(long_file_3_spaces_repo_path): @@ -19,5 +21,5 @@ def test_warnings_show_up_after_fail(long_file_3_spaces_repo_path): ('file_too_long', 'long_file_3_spaces.py'), ('indent_not_four_spaces', 'long_file_3_spaces.py:16') ] - output = validate_repo(long_file_3_spaces_repo_path) + output = validate(long_file_3_spaces_repo_path) assert output == expected_output diff --git a/tests/test_validation_interface/utils.py b/tests/test_validation_interface/utils.py deleted file mode 100644 index d9c8499..0000000 --- a/tests/test_validation_interface/utils.py +++ /dev/null @@ -1,6 +0,0 @@ -import git - -def initialize_repo(repo_path): - repo = git.Repo.init(repo_path) - repo.index.add(['*']) - repo.index.commit('Initial commit') diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..df570fa --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,17 @@ +import shutil + +import git + + +def initialize_repo(repo_path, ignore_gitignore=False): + arguments = ['.'] + if ignore_gitignore: + arguments.append('-f') # needed to ensure the global gitignore does not disrupt the test + repo = git.Repo.init(repo_path) + repo.git.add(arguments) + repo.index.commit('Initial commit') + + +def remove_repo(repo_path): + git_folder_path = '{}/.git'.format(repo_path) + shutil.rmtree(git_folder_path, ignore_errors=True)