BETTERZON-117: Adding API endpoint for getting the latest crawling status (#63)

This commit is contained in:
Patrick 2021-05-23 16:01:30 +02:00 committed by GitHub
parent 1e6d99a713
commit f28dae3272
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 137 additions and 6 deletions

View File

@ -17,6 +17,7 @@ import {usersRouter} from './models/users/users.router';
import {pricealarmsRouter} from './models/pricealarms/pricealarms.router'; import {pricealarmsRouter} from './models/pricealarms/pricealarms.router';
import {contactpersonsRouter} from './models/contact_persons/contact_persons.router'; import {contactpersonsRouter} from './models/contact_persons/contact_persons.router';
import {favoriteshopsRouter} from './models/favorite_shops/favoriteshops.router'; import {favoriteshopsRouter} from './models/favorite_shops/favoriteshops.router';
import {crawlingstatusRouter} from './models/crawling_status/crawling_status.router';
const cookieParser = require('cookie-parser'); const cookieParser = require('cookie-parser');
@ -53,6 +54,7 @@ app.use('/vendors', vendorsRouter);
app.use('/pricealarms', pricealarmsRouter); app.use('/pricealarms', pricealarmsRouter);
app.use('/contactpersons', contactpersonsRouter); app.use('/contactpersons', contactpersonsRouter);
app.use('/favoriteshops', favoriteshopsRouter); app.use('/favoriteshops', favoriteshopsRouter);
app.use('/crawlingstatus', crawlingstatusRouter);
app.use(errorHandler); app.use(errorHandler);
app.use(notFoundHandler); app.use(notFoundHandler);

View File

@ -0,0 +1,7 @@
export interface Crawling_Status {
process_id: number;
started_timestamp: Date;
combinations_to_crawl: number;
successful_crawls: number;
failed_crawls: number;
}

View File

@ -0,0 +1,42 @@
/**
* Required External Modules and Interfaces
*/
import express, {Request, Response} from 'express';
import * as CrawlingStatusService from './crawling_status.service';
import {Crawling_Status} from './crawling_status.interface';
import {Crawling_Statuses} from './crawling_statuses.interface';
import * as UserService from '../users/users.service';
/**
* Router Definition
*/
export const crawlingstatusRouter = express.Router();
/**
* Controller Definitions
*/
// GET crawlingstatus/
crawlingstatusRouter.get('/', async (req: Request, res: Response) => {
try {
// Authenticate user
const user_ip = req.connection.remoteAddress ?? '';
const user = await UserService.checkSessionWithCookie(req.cookies.betterauth, user_ip);
if (!user.is_admin) {
res.sendStatus(403);
return;
}
const status: Crawling_Status = await CrawlingStatusService.getCurrent();
res.status(200).send(status);
} catch (e) {
console.log('Error handling a request: ' + e.message);
res.status(500).send(JSON.stringify({'message': 'Internal Server Error. Try again later.'}));
}
});

View File

@ -0,0 +1,75 @@
import * as dotenv from 'dotenv';
dotenv.config();
const mariadb = require('mariadb');
const pool = mariadb.createPool({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_DATABASE,
connectionLimit: 5
});
/**
* Data Model Interfaces
*/
import {Crawling_Status} from './crawling_status.interface';
import {Crawling_Statuses} from './crawling_statuses.interface';
/**
* Service Methods
*/
/**
* Fetches and returns the current crawling status if the issuing user is an admin
*/
export const getCurrent = async (): Promise<Crawling_Status> => {
let conn;
try {
conn = await pool.getConnection();
// Get the current crawling process
let process_info = {
process_id: -1,
started_timestamp: new Date(),
combinations_to_crawl: -1
};
const process = await conn.query('SELECT process_id, started_timestamp, combinations_to_crawl FROM crawling_processes ORDER BY started_timestamp DESC LIMIT 1');
for (let row in process) {
if (row !== 'meta') {
process_info = process[row];
}
}
// Get the current status
let total_crawls = 0;
let successful_crawls = 0;
const rows = await conn.query('SELECT COUNT(status_id) as total, SUM(success) as successful FROM crawling_status WHERE process_id = ?', process_info.process_id);
for (let row in rows) {
if (row !== 'meta') {
total_crawls = rows[row].total;
successful_crawls = rows[row].successful;
}
}
const failed_crawls = total_crawls - successful_crawls;
return {
process_id: process_info.process_id,
started_timestamp: process_info.started_timestamp,
combinations_to_crawl: process_info.combinations_to_crawl,
successful_crawls: successful_crawls,
failed_crawls: failed_crawls,
}
} catch (err) {
throw err;
} finally {
if (conn) {
conn.end();
}
}
};

View File

@ -0,0 +1,5 @@
import {Crawling_Status} from './crawling_status.interface';
export interface Crawling_Statuses {
[key: number]: Crawling_Status;
}

View File

@ -5,4 +5,5 @@ export interface User {
password_hash: string; password_hash: string;
registration_date: Date; registration_date: Date;
last_login_date: Date; last_login_date: Date;
is_admin: boolean;
} }

View File

@ -193,18 +193,20 @@ export const checkSession = async (sessionId: string, sessionKey: string, ip: st
await conn.commit(); await conn.commit();
// Get the other required user information and update the user // Get the other required user information and update the user
const userQuery = 'SELECT user_id, username, email, registration_date, last_login_date FROM users WHERE user_id = ?'; const userQuery = 'SELECT user_id, username, email, registration_date, last_login_date, is_admin FROM users WHERE user_id = ?';
const userRows = await conn.query(userQuery, userId); const userRows = await conn.query(userQuery, userId);
let username = ''; let username = '';
let email = ''; let email = '';
let registrationDate = new Date(); let registrationDate = new Date();
let lastLoginDate = new Date(); let lastLoginDate = new Date();
let is_admin = false;
for (const row in userRows) { for (const row in userRows) {
if (row !== 'meta' && userRows[row].user_id != null) { if (row !== 'meta' && userRows[row].user_id != null) {
username = userRows[row].username; username = userRows[row].username;
email = userRows[row].email; email = userRows[row].email;
registrationDate = userRows[row].registration_date; registrationDate = userRows[row].registration_date;
lastLoginDate = userRows[row].last_login_date; lastLoginDate = userRows[row].last_login_date;
is_admin = userRows[row].is_admin;
} }
} }
@ -215,7 +217,8 @@ export const checkSession = async (sessionId: string, sessionKey: string, ip: st
email: email, email: email,
password_hash: 'HIDDEN', password_hash: 'HIDDEN',
registration_date: registrationDate, registration_date: registrationDate,
last_login_date: lastLoginDate last_login_date: lastLoginDate,
is_admin: is_admin
}; };
} catch (err) { } catch (err) {
@ -225,8 +228,6 @@ export const checkSession = async (sessionId: string, sessionKey: string, ip: st
conn.end(); conn.end();
} }
} }
return {} as User;
}; };
/** /**
@ -312,6 +313,4 @@ export const checkUsernameAndEmail = async (username: string, email: string): Pr
conn.end(); conn.end();
} }
} }
return {hasProblems: true, messages: ['Internal server error'], codes: [3]};
}; };