Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"description": "a basic API for interacting with databases of literacy app data",
"main": "app.js",
"scripts": {
"test": "mocha --recursive -reporter spec -require test/setup.js"
"test": "mocha --recursive -reporter spec"
},
"repository": {
"type": "git",
Expand Down
32 changes: 0 additions & 32 deletions api/src/CL-IRIs.json

This file was deleted.

156 changes: 60 additions & 96 deletions api/src/api.js
Original file line number Diff line number Diff line change
@@ -1,133 +1,97 @@
const http = require('http');
const url = require('url');
const express = require('express');
const router = express.Router();
const mustache = require('mustache');
const config = require('./config');
const tableMap = require('./tableMap');
const {
MissingDataError,
MalformedArgumentError,
BigQueryManager,
BigQueryParser,
SqlLoader
} = require('./helperClasses');
const queryStrings = new SqlLoader(config.sqlFiles, './src/sql');

const DAYINSECONDS = 86400;
/**
* returns the table containing records from the given app id
* @param{string} id the app id
* @returns{string} the table name in SQL table format
*/
const getDataset = function (map, id, getHistoric) {
const { BigQuery } = require('@google-cloud/bigquery');
const fs = require('fs');

const getDataset = function (id) {
const map = tableMap;

const regex = /(^[a-z]{2,3})([\.][a-z0-9]*)+$/gmi;
if (!id.match(regex)) {
const msg = `cannot parse app id: '${id}'. Check formatting and try again`;
throw new MalformedArgumentError(msg);
}
const obj = map[id];
if (!obj ) {
if (!obj) {
const msg = `could not find a table for app id: '${id}'`;
const err = new MissingDataError(msg);
throw err;
}
return `${obj.project}.${obj.dataset}`;
}

function sendRows(res, rows, nextCursor) {
const parser = new BigQueryParser(config.sourceMapping);
const resObj = parser.formatRowsToJson(rows);
return res.status(200).json({nextCursor, size: resObj.length, data: resObj});
function getDayOffset(timestamp) {
return Math.ceil((new Date() - new Date(Math.round(timestamp / 1000))) / 1000 / 60 / 60 / 24)
}

/**
* handler for GET requests to /fetch_latest
* returns all records for app_id logged after the given cursor
* @param{obj} req the request
* @param{obj} res the response
*/
async function fetchLatestHandler (req, res, next) {
const searchParams = req.query;
async function fetchEvents(params) {
const { appId, source, from, country, limit } = params;
console.log(`Fetching data for: ${JSON.stringify(params)}`)

if (!searchParams.app_id) { //TODO: Edit proofreading to allow timestamp OR job hash
return res.status(400).send({msg: config.errNoId});
} else if (!searchParams.from && !searchParams.token) {
return res.status(400).send({msg: config.errNoCursor});
} else if (!searchParams.from.match(/[0-9]{1,10}/gmi)) {
return res.status(400).send({msg: config.errBadTimestamp});
}
const sql = queryStrings.getQueryString(config.sqlFiles.fetchLatest)
const dataset = getDataset(appId)
const sql = fs.readFileSync('src/sql/fetch_latest.sql').toString();

const query = mustache.render(sql, { dataset })
const options = {
query: sql,
location: config.fetchLatestQuery.loc,
query: query,
params: {
pkg_id: searchParams.app_id,
ref_id: searchParams.attribution_id || '',
user_id: searchParams.user_id || '',
event: searchParams.event || '',
cursor: Number(searchParams.from) * 1000000, //convert to micros
//only search back as far as we need to
range: Math.ceil(((Date.now()/1000) - searchParams.from)/DAYINSECONDS),
},
types: {
pkg_id: 'STRING',
ref_id: 'STRING',
cursor: 'INT64',
range: 'INT64',
traffic_source: source,
range: getDayOffset(from),
cursor: from,
country: country,
limit: limit,
},
location: 'US',
};

try{
const dataset = getDataset(tableMap, options.params.pkg_id);
options.query = mustache.render(options.query, {dataset: dataset});
const maxRows = config.fetchLatestQuery.MAXROWS;
const callback = (rows, id, token) => {
if(id && token) {
let combinedToken = encodeURIComponent(`${id}/${token}`);
sendRows(res, rows, combinedToken);
} else {
sendRows(res, rows, null);
}
};
if(searchParams.token) {
let params = decodeURIComponent(searchParams.token).split('/');
const job = {id: params[0], token: params[1]};
//TODO decode token into job id and token
const bq = new BigQueryManager(options, maxRows, job.id, job.token);
bq.fetchNext(callback);
}
else {
const bq = new BigQueryManager(options, maxRows);
bq.start(callback);
}
} catch (e) {
next(e)
}
const bigquery = new BigQuery();

const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);

const [rows] = await job.getQueryResults();
return rows
}

function apiErrorHandler(err, req, res, next) {
if(err.name === 'MalformedArgumentError') {
return res.status(400).send({
msg: err.message,
err: err,
});
async function fetchLatestHandler(req, res, next) {
const searchParams = req.query;

const { app_id: appId, from, traffic_source: source, limit, country } = searchParams;


// TODO: cleanup checks, preferably with a Type or something simple
if (!appId) {
return res.status(400).send({ msg: config.errNoId });
} else if (!from) {
return res.status(400).send({ msg: config.errNoCursor });
} else if (!from.match(/[0-9]{1,10}/gmi)) {
return res.status(400).send({ msg: config.errBadTimestamp });
} else if (!source) {
return res.status(400).send({ msg: "No source" })
}

try {
const rows = await fetchEvents({ appId, source, from: Number(from), country, limit: Number(limit) })
res.status(200).json({ data: rows })
}
if(err.name === 'MissingDataError') {
return res.status(404).send({
msg: err.message,
err: err,
});
catch (e) {
next(e)
}
console.log(err.stack);
}

function apiErrorHandler(err, _, res) {
return res.status(500).send({
msg: err.message,
err: err,
});
}

module.exports = (app) => {
app.use('/', router);
router.get('/fetch_latest*', fetchLatestHandler);
app.use('/', apiErrorHandler);
const router = express.Router();
router.get('/fetch_latest*', fetchLatestHandler);

app.use('/', router);
app.use('/', apiErrorHandler);
};
108 changes: 0 additions & 108 deletions api/src/bigQueryStatementBuilder.js

This file was deleted.

9 changes: 2 additions & 7 deletions api/src/config.json
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
{
"port":"3000",
"sqlFiles": {
"fetchLatest": "fetch_latest.sql"
},
"port": "3000",
"fetchLatestQuery": {
"loc": "US",
"cacheDuration": 300,
"MAXROWS": 1000
"loc": "US"
},
"errNoId": "Please provide an app id to search against",
"errNoAttr": "Please provide an attribution id to filter results",
Expand Down
Loading