diff --git a/src/scraper.py b/src/scraper.py index 036de4fb..aaa78d56 100644 --- a/src/scraper.py +++ b/src/scraper.py @@ -15,6 +15,8 @@ import formatter from bs4 import BeautifulSoup import re +import csv_writer +import csv def httpsGet(URL): @@ -31,7 +33,7 @@ def httpsGet(URL): return BeautifulSoup(soup1.prettify(), "html.parser") -def searchAmazon(query, df_flag=0): +def searchAmazon(query, df_flag): """ The searchAmazon function scrapes amazon.com """ @@ -47,7 +49,7 @@ def searchAmazon(query, df_flag=0): products.append(product) return products -def searchWalmart(query, df_flag=0): +def searchWalmart(query, df_flag): """ The searchWalmart function scrapes walmart.com """ @@ -65,7 +67,7 @@ def searchWalmart(query, df_flag=0): products.append(product) return products -def searchEtsy(query, df_flag=0): +def searchEtsy(query, df_flag): """ The searchEtsy function scrapes Etsy.com """ @@ -82,3 +84,15 @@ def searchEtsy(query, df_flag=0): product = formatter.formatResult("Etsy", titles, prices, links, ratings, df_flag) products.append(product) return products + +def driver(product, num=None, df_flag=0,csv=False): + products_1 = searchAmazon(product,df_flag) + products_2 = searchWalmart(product,df_flag) + products_3 = searchEtsy(product,df_flag) + results=products_1+products_2+products_3 + if csv==True: + + print("CSV Saved at: ",args.cd) + print("File Name:", csv_writer.write_csv(results, args.search, args.cd)) + return products_1[:num]+products_2[:num]+products_3[:num] + diff --git a/src/slash.py b/src/slash.py index 5f507c82..42f468b5 100644 --- a/src/slash.py +++ b/src/slash.py @@ -14,7 +14,7 @@ import os import csv import full_version -import csv_writer + @@ -27,23 +27,18 @@ def main(): parser.add_argument('--link', action='store_true', help="Show links in the table") parser.add_argument('--des', action='store_true', help="Sort in descending (non-increasing) order") parser.add_argument('--cd', type=str, help="Change directory to save CSV file with search results", default=os.getcwd()) + parser.add_argument('--csv', action='store_false', help="Save results as CSV",) args = parser.parse_args() if args.full=='T': full_version.full_version().driver() return - products_1 = scraper.searchAmazon(args.search) - products_2 = scraper.searchWalmart(args.search) - products_3 = scraper.searchEtsy(args.search) + results = scraper.driver(args.search,args.num,args.csv) + for sortBy in args.sort: - products1 = formatter.sortList(products_1, sortBy, args.des)[:args.num] - products2 = formatter.sortList(products_2, sortBy, args.des)[:args.num] - products3 = formatter.sortList(products_3, sortBy, args.des)[:args.num] - results = products1 + products2 + products3 - results_1 = products_1 + products_2 + products_3 - results = formatter.sortList(results, "ra" , args.des) + results = formatter.sortList(results, sortBy , args.des) print(args.des) @@ -51,8 +46,6 @@ def main(): print(tabulate(results, headers="keys", tablefmt="github")) print() print() - print("CSV Saved at: ",args.cd) - print("File Name:", csv_writer.write_csv(results_1, args.search, args.cd)) if __name__ == '__main__': main()