mirror of
https://github.com/Mueller-Patrick/Betterzon.git
synced 2024-12-23 04:05:12 +00:00
Compare commits
3 Commits
f98d1fdb24
...
63cbac5490
Author | SHA1 | Date | |
---|---|---|---|
63cbac5490 | |||
73effffc89 | |||
c8d37d60f8 |
|
@ -1,3 +1,5 @@
|
||||||
|
import os
|
||||||
|
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from flask_restful import Resource, Api, reqparse
|
from flask_restful import Resource, Api, reqparse
|
||||||
|
|
||||||
|
@ -8,8 +10,8 @@ api = Api(app)
|
||||||
|
|
||||||
# To parse request data
|
# To parse request data
|
||||||
parser = reqparse.RequestParser()
|
parser = reqparse.RequestParser()
|
||||||
parser.add_argument('key')
|
parser.add_argument('key', type=str)
|
||||||
parser.add_argument('products')
|
parser.add_argument('products', type=int, action='append')
|
||||||
|
|
||||||
|
|
||||||
class CrawlerApi(Resource):
|
class CrawlerApi(Resource):
|
||||||
|
@ -19,8 +21,12 @@ class CrawlerApi(Resource):
|
||||||
def post(self):
|
def post(self):
|
||||||
# Accept crawler request here
|
# Accept crawler request here
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
crawler.crawl(args['products'])
|
access_key = os.getenv('CRAWLER_ACCESS_KEY')
|
||||||
return args
|
if(args['key'] == access_key):
|
||||||
|
crawler.crawl(args['products'])
|
||||||
|
return {'message': 'success'}
|
||||||
|
else:
|
||||||
|
return {'message': 'Wrong access key'}
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(CrawlerApi, '/')
|
api.add_resource(CrawlerApi, '/')
|
||||||
|
|
|
@ -34,13 +34,19 @@ def crawl(product_ids: [int]) -> dict:
|
||||||
# Call the appropriate vendor crawling function and append the result to the list of crawled data
|
# Call the appropriate vendor crawling function and append the result to the list of crawled data
|
||||||
if product_vendor_info['vendor_id'] == 1:
|
if product_vendor_info['vendor_id'] == 1:
|
||||||
# Amazon
|
# Amazon
|
||||||
crawled_data.append(__crawl_amazon__(product_vendor_info))
|
data = __crawl_amazon__(product_vendor_info)
|
||||||
|
if data:
|
||||||
|
crawled_data.append(data)
|
||||||
elif product_vendor_info['vendor_id'] == 2:
|
elif product_vendor_info['vendor_id'] == 2:
|
||||||
# Apple
|
# Apple
|
||||||
crawled_data.append(__crawl_apple__(product_vendor_info))
|
data = __crawl_apple__(product_vendor_info)
|
||||||
|
if data:
|
||||||
|
crawled_data.append(data)
|
||||||
elif product_vendor_info['vendor_id'] == 3:
|
elif product_vendor_info['vendor_id'] == 3:
|
||||||
# Media Markt
|
# Media Markt
|
||||||
crawled_data.append(__crawl_mediamarkt__(product_vendor_info))
|
data = __crawl_mediamarkt__(product_vendor_info)
|
||||||
|
if data:
|
||||||
|
crawled_data.append(data)
|
||||||
else:
|
else:
|
||||||
products_with_problems.append(product_vendor_info)
|
products_with_problems.append(product_vendor_info)
|
||||||
continue
|
continue
|
||||||
|
@ -67,9 +73,14 @@ def __crawl_amazon__(product_info: dict) -> tuple:
|
||||||
try:
|
try:
|
||||||
price = int(soup.find(id='priceblock_ourprice').get_text().replace(".", "").replace(",", "").replace("€", "").strip())
|
price = int(soup.find(id='priceblock_ourprice').get_text().replace(".", "").replace(",", "").replace("€", "").strip())
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
price = ''
|
price = -1
|
||||||
|
except AttributeError:
|
||||||
|
price = -1
|
||||||
|
|
||||||
return (product_info['product_id'], product_info['vendor_id'], price)
|
if price != -1:
|
||||||
|
return (product_info['product_id'], product_info['vendor_id'], price)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def __crawl_apple__(product_info: dict) -> tuple:
|
def __crawl_apple__(product_info: dict) -> tuple:
|
||||||
|
@ -78,7 +89,8 @@ def __crawl_apple__(product_info: dict) -> tuple:
|
||||||
:param product_info: A dict with product info containing product_id, vendor_id, url
|
:param product_info: A dict with product info containing product_id, vendor_id, url
|
||||||
:return: A tuple with the crawled data, containing (product_id, vendor_id, price_in_cents)
|
:return: A tuple with the crawled data, containing (product_id, vendor_id, price_in_cents)
|
||||||
"""
|
"""
|
||||||
return (product_info['product_id'], product_info['vendor_id'], 123)
|
#return (product_info['product_id'], product_info['vendor_id'], 123)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def __crawl_mediamarkt__(product_info: dict) -> tuple:
|
def __crawl_mediamarkt__(product_info: dict) -> tuple:
|
||||||
|
|
|
@ -35,7 +35,7 @@ def getProductsForVendor(vendor_id: int) -> [{}]:
|
||||||
conn = __getConnection__()
|
conn = __getConnection__()
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|
||||||
query = 'SELECT product_id, url FROM product_links WHERE vendor_id = %s' % vendor_id
|
query = 'SELECT product_id, url FROM product_links WHERE vendor_id = %s'
|
||||||
|
|
||||||
cur.execute(query, (vendor_id,))
|
cur.execute(query, (vendor_id,))
|
||||||
|
|
||||||
|
@ -53,8 +53,7 @@ def getProductLinksForProduct(product_id: int) -> [dict]:
|
||||||
conn = __getConnection__()
|
conn = __getConnection__()
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|
||||||
query = 'SELECT vendor_id, url FROM product_links WHERE product_id = %s' % product_id
|
query = 'SELECT vendor_id, url FROM product_links WHERE product_id = %s'
|
||||||
print(query)
|
|
||||||
cur.execute(query, (product_id,))
|
cur.execute(query, (product_id,))
|
||||||
|
|
||||||
products = list(map(lambda x: {'product_id': product_id, 'vendor_id': x[0], 'url': x[1]}, cur.fetchall()))
|
products = list(map(lambda x: {'product_id': product_id, 'vendor_id': x[0], 'url': x[1]}, cur.fetchall()))
|
||||||
|
|
Loading…
Reference in New Issue
Block a user