import logging import os import pymysql import requests from dotenv import load_dotenv import sql_connction_handler as sql_handler API_URL = 'https://creativecommons.tankerkoenig.de/json/detail.php' def get_prices_for_station(station_id, api_id: str, api_key) -> []: """ Crawls the prices for the given station and returns them in a tuple: (E5, E10, Diesel) :param api_id: The api id of the station :return: The tuple with the prices """ url = f'{API_URL}?id={api_id}&apikey={api_key}' station_details = requests.get(url).json() if station_details['ok'] is True: details = station_details['station'] return [ (station_id, details['e5'], 'E5'), (station_id, details['e10'], 'E10'), (station_id, details['diesel'], 'Diesel'), ] return [] def save_prices_to_sql(prices: [()], conn: pymysql.Connection): """ Saves the list of price tuples to sql :param prices: The list of prices to save to sql :param conn: The pymysql connection """ stmt = 'INSERT INTO prices (station, price, fuel_type) VALUES (%s, %s, %s)' cur = conn.cursor() success = cur.executemany(stmt, prices) if success: conn.commit() cur.close() return def get_stations_from_sql(conn: pymysql.Connection) -> []: """ Gets all stations that should be crawled from sql :param conn: The pymysql connection :return: The list of stations """ stmt = 'SELECT station_id, tankerkoenig_id FROM stations WHERE active_crawling = 1' cur = conn.cursor() cur.execute(stmt) res = cur.fetchall() cur.close() stations = [] for station in res: stations.append({ 'id': station[0], 'api_id': station[1] }) return stations def crawl_and_save(): """ Main function, crawls data from api and saves it to sql """ conn = sql_handler.get_connection() api_key = os.getenv('TANKERKOENIG_API_KEY') if not conn or not api_key: logging.warning('Could not fetch prices as either SQL connection or API key are missing!') return stations = get_stations_from_sql(conn) station_prices = [] for station in stations: prices = get_prices_for_station(station['id'], station['api_id'], api_key) station_prices.extend(prices) save_prices_to_sql(station_prices, conn) if __name__ == '__main__': load_dotenv() crawl_and_save()