diff --git a/README.md b/README.md index d81511761..5c3f7ead3 100644 --- a/README.md +++ b/README.md @@ -5,23 +5,18 @@ A simple web scraping tool for recipe sites I use in a project of mine that makes sense to live as a separate package. - pip install git+git://github.com/RyanNoelk/recipe-scraper.git + pip install git+git://github.com/RyanNoelk/recipe-scraper.git@1.0.3 then: from recipe_scrapers import scrap_me # give the url as a string, it can be url from any site listed below - scrap_me = scrap_me('http://allrecipes.com/Recipe/Apple-Cake-Iv/Detail.aspx') - - scrap_me.title() - scrap_me.total_time() - scrap_me.ingredients() - scrap_me.instructions() - - or - - scrap_me.data() + try: + scrap_me = scrap_me('https://www.budgetbytes.com/2017/03/lemon-garlic-roasted-chicken') + print(scrap_me.data()) + except KeyError: + print 'Website is not supported.' ### Contribute diff --git a/example.py b/example.py new file mode 100755 index 000000000..e7562bf25 --- /dev/null +++ b/example.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +# encoding: utf-8 + +from recipe_scrapers import scrap_me + +try: + scrap_me = scrap_me('https://www.budgetbytes.com/2017/03/lemon-garlic-roasted-chicken') + print(scrap_me.data()) +except KeyError: + print "Website is not supported." diff --git a/run_tester.py b/run_tester.py deleted file mode 100755 index 154af2842..000000000 --- a/run_tester.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 - -from recipe_scrapers import scrap_me - -# give the url as a string, it can be url from any site listed below -scrap_me = scrap_me('https://www.budgetbytes.com/2017/03/lemon-garlic-roasted-chicken') - -print(scrap_me.data()) diff --git a/setup.py b/setup.py index 4fc8995b0..8e63eae11 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( name='openeats-recipe-scraper', url='https://github.com/RyanNoelk/recipe-scraper/', - version='1.0.2', + version='1.0.3', description='Python package, scraping recipes from all over the internet', keywords='python recipes scraper harvest', long_description=README,