Skip to content

Commit

Permalink
Merge pull request #4 from yswtrue/master
Browse files Browse the repository at this point in the history
fix when the category is all, then get an error
  • Loading branch information
narkhedesam authored Dec 30, 2020
2 parents 4d3a4a8 + ec6fa94 commit 6e371e4
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 11 deletions.
16 changes: 8 additions & 8 deletions Proxy_List_Scrapper/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@
import requests
from requests.exceptions import ConnectionError

SSL = 'https://www.sslproxies.org/',
GOOGLE = 'https://www.google-proxy.net/',
ANANY = 'https://free-proxy-list.net/anonymous-proxy.html',
UK = 'https://free-proxy-list.net/uk-proxy.html',
US = 'https://www.us-proxy.org/',
NEW = 'https://free-proxy-list.net/',
SPYS_ME = 'http://spys.me/proxy.txt',
PROXYSCRAPE = 'https://api.proxyscrape.com/?request=getproxies&proxytype=all&country=all&ssl=all&anonymity=all',
SSL = 'https://www.sslproxies.org/'
GOOGLE = 'https://www.google-proxy.net/'
ANANY = 'https://free-proxy-list.net/anonymous-proxy.html'
UK = 'https://free-proxy-list.net/uk-proxy.html'
US = 'https://www.us-proxy.org/'
NEW = 'https://free-proxy-list.net/'
SPYS_ME = 'http://spys.me/proxy.txt'
PROXYSCRAPE = 'https://api.proxyscrape.com/?request=getproxies&proxytype=all&country=all&ssl=all&anonymity=all'
PROXYNOVA = 'https://www.proxynova.com/proxy-server-list/'
PROXYLIST_DOWNLOAD_HTTP = 'https://www.proxy-list.download/HTTP'
PROXYLIST_DOWNLOAD_HTTPS = 'https://www.proxy-list.download/HTTPS'
Expand Down
Binary file added dist/Proxy-List-Scrapper-0.2.1.tar.gz
Binary file not shown.
7 changes: 4 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
from setuptools import setup, find_packages

# read the contents of your README file
from os import path

from setuptools import find_packages, setup

this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md')) as f:
long_description = f.read()

setup(
name='Proxy-List-Scrapper',
version='0.2.0',
version='0.2.1',
packages=find_packages(),
url='https://pypi.org/project/Proxy-List-Scrapper/',
license='MIT License',
Expand Down

0 comments on commit 6e371e4

Please sign in to comment.