Compare commits

...

3 Commits

Author SHA1 Message Date
63cbac5490 BETTERZON-58: Fixing API endpoint of the crawler
- The list of products in the API request was treated like a string and henceforth, only the first product has been crawled
2021-05-17 17:53:20 +02:00
73effffc89 BETTERZON-58: Adding access key verification 2021-05-17 17:32:52 +02:00
c8d37d60f8 BETTERZON-58: Fixing SQL insert 2021-05-17 17:25:01 +02:00
3 changed files with 30 additions and 13 deletions

View File

@ -1,3 +1,5 @@
import os
from flask import Flask
from flask_restful import Resource, Api, reqparse
@ -8,8 +10,8 @@ api = Api(app)
# To parse request data
parser = reqparse.RequestParser()
parser.add_argument('key')
parser.add_argument('products')
parser.add_argument('key', type=str)
parser.add_argument('products', type=int, action='append')
class CrawlerApi(Resource):
@ -19,8 +21,12 @@ class CrawlerApi(Resource):
def post(self):
# Accept crawler request here
args = parser.parse_args()
crawler.crawl(args['products'])
return args
access_key = os.getenv('CRAWLER_ACCESS_KEY')
if(args['key'] == access_key):
crawler.crawl(args['products'])
return {'message': 'success'}
else:
return {'message': 'Wrong access key'}
api.add_resource(CrawlerApi, '/')

View File

@ -34,13 +34,19 @@ def crawl(product_ids: [int]) -> dict:
# Call the appropriate vendor crawling function and append the result to the list of crawled data
if product_vendor_info['vendor_id'] == 1:
# Amazon
crawled_data.append(__crawl_amazon__(product_vendor_info))
data = __crawl_amazon__(product_vendor_info)
if data:
crawled_data.append(data)
elif product_vendor_info['vendor_id'] == 2:
# Apple
crawled_data.append(__crawl_apple__(product_vendor_info))
data = __crawl_apple__(product_vendor_info)
if data:
crawled_data.append(data)
elif product_vendor_info['vendor_id'] == 3:
# Media Markt
crawled_data.append(__crawl_mediamarkt__(product_vendor_info))
data = __crawl_mediamarkt__(product_vendor_info)
if data:
crawled_data.append(data)
else:
products_with_problems.append(product_vendor_info)
continue
@ -67,9 +73,14 @@ def __crawl_amazon__(product_info: dict) -> tuple:
try:
price = int(soup.find(id='priceblock_ourprice').get_text().replace(".", "").replace(",", "").replace("", "").strip())
except RuntimeError:
price = ''
price = -1
except AttributeError:
price = -1
return (product_info['product_id'], product_info['vendor_id'], price)
if price != -1:
return (product_info['product_id'], product_info['vendor_id'], price)
else:
return None
def __crawl_apple__(product_info: dict) -> tuple:
@ -78,7 +89,8 @@ def __crawl_apple__(product_info: dict) -> tuple:
:param product_info: A dict with product info containing product_id, vendor_id, url
:return: A tuple with the crawled data, containing (product_id, vendor_id, price_in_cents)
"""
return (product_info['product_id'], product_info['vendor_id'], 123)
#return (product_info['product_id'], product_info['vendor_id'], 123)
pass
def __crawl_mediamarkt__(product_info: dict) -> tuple:

View File

@ -35,7 +35,7 @@ def getProductsForVendor(vendor_id: int) -> [{}]:
conn = __getConnection__()
cur = conn.cursor()
query = 'SELECT product_id, url FROM product_links WHERE vendor_id = %s' % vendor_id
query = 'SELECT product_id, url FROM product_links WHERE vendor_id = %s'
cur.execute(query, (vendor_id,))
@ -53,8 +53,7 @@ def getProductLinksForProduct(product_id: int) -> [dict]:
conn = __getConnection__()
cur = conn.cursor()
query = 'SELECT vendor_id, url FROM product_links WHERE product_id = %s' % product_id
print(query)
query = 'SELECT vendor_id, url FROM product_links WHERE product_id = %s'
cur.execute(query, (product_id,))
products = list(map(lambda x: {'product_id': product_id, 'vendor_id': x[0], 'url': x[1]}, cur.fetchall()))