Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • develop
  • drill-experiment
  • feature/diagrams
  • icde-2019-experiments
  • issue/20
  • issue/6
  • issue/63
  • issue/85
  • issue/97
  • master
  • refactor/engine
  • simmc-based
  • tg-felipe
  • wip-transformers
14 results

Target

Select target project
  • c3sl/blendb
  • pdts20/blendb
2 results
Select Git revision
  • develop
  • feature/diagrams
  • icde-2019-experiments
  • issue/20
  • issue/6
  • issue/63
  • issue/85
  • issue/97
  • master
  • refactor/engine
  • tg-felipe
  • wip-transformers
12 results
Show changes
Showing
with 7897 additions and 1223 deletions
-
alias: "view:pessoas:cad:unico"
origin: true
aliasAsName: true
dimensions:
- "dim:cidade:id"
- "dim:cadunico:familia:estrato"
- "dim:cadunico:familia:divisao:administrativa:uf"
- "dim:cadunico:familia:id"
- "dim:cadunico:pessoa:id"
- "dim:cadunico:pessoa:genero:cod"
- "dim:cadunico:pessoa:idade"
- "dim:cadunico:pessoa:relacao:parentesco"
- "dim:cadunico:pessoa:raca:cod"
- "dim:cadunico:pessoa:local:nascimento:cod"
- "dim:cadunico:pessoa:certidao:registrada"
- "dim:cadunico:pessoa:deficiencia"
- "dim:cadunico:pessoa:alfabetizado"
- "dim:cadunico:pessoa:frequenta:escola"
- "dim:cadunico:pessoa:local:escola"
- "dim:cadunico:pessoa:curso:frequenta"
- "dim:cadunico:pessoa:serie:frequenta"
- "dim:cadunico:pessoa:curso:frequentou"
- "dim:cadunico:pessoa:serie:frequentou"
- "dim:cadunico:pessoa:curso:conclusao"
- "dim:cadunico:pessoa:trabalhou:semana:passada"
- "dim:cadunico:pessoa:nao:trabalhou:semana:passada"
- "dim:cadunico:pessoa:atividade:extrativista"
- "dim:cadunico:pessoa:funcao:principal"
- "dim:cadunico:pessoa:remuneracao"
- "dim:cadunico:pessoa:trabalhou:ultimos:12:meses"
- "dim:cadunico:pessoa:quant:meses:trabalhados:ultimos:12"
- "dim:cadunico:pessoa:renda:bruta:ultimos:12:meses"
- "dim:cadunico:pessoa:renda:doacao"
- "dim:cadunico:pessoa:renda:aposentadoria"
- "dim:cadunico:pessoa:renda:seguro:desemprego"
- "dim:cadunico:pessoa:renda:pensao"
- "dim:cadunico:pessoa:renda:outras"
- "dim:cadunico:pessoa:peso"
- "dim:cadunico:pessoa:censo:ano"
metrics:
- "met:count:cadunico:pessoa:id"
- "met:min:cadunico:pessoa:idade"
- "met:max:cadunico:pessoa:idade"
- "met:avg:cadunico:pessoa:idade"
- "met:min:cadunico:pessoa:remuneracao"
- "met:max:cadunico:pessoa:remuneracao"
- "met:sum:cadunico:pessoa:remuneracao"
- "met:avg:cadunico:pessoa:remuneracao"
- "met:min:cadunico:pessoa:quant:meses:trabalhados:ultimos:12"
- "met:max:cadunico:pessoa:quant:meses:trabalhados:ultimos:12"
- "met:avg:cadunico:pessoa:quant:meses:trabalhados:ultimos:12"
- "met:min:cadunico:pessoa:renda:bruta:ultimos:12:meses"
- "met:max:cadunico:pessoa:renda:bruta:ultimos:12:meses"
- "met:sum:cadunico:pessoa:renda:bruta:ultimos:12:meses"
- "met:avg:cadunico:pessoa:renda:bruta:ultimos:12:meses"
- "met:min:cadunico:pessoa:renda:doacao"
- "met:max:cadunico:pessoa:renda:doacao"
- "met:sum:cadunico:pessoa:renda:doacao"
- "met:avg:cadunico:pessoa:renda:doacao"
- "met:min:cadunico:pessoa:renda:aposentadoria"
- "met:max:cadunico:pessoa:renda:aposentadoria"
- "met:sum:cadunico:pessoa:renda:aposentadoria"
- "met:avg:cadunico:pessoa:renda:aposentadoria"
- "met:min:cadunico:pessoa:renda:seguro:desemprego"
- "met:max:cadunico:pessoa:renda:seguro:desemprego"
- "met:sum:cadunico:pessoa:renda:seguro:desemprego"
- "met:avg:cadunico:pessoa:renda:seguro:desemprego"
- "met:min:cadunico:pessoa:renda:pensao"
- "met:max:cadunico:pessoa:renda:pensao"
- "met:sum:cadunico:pessoa:renda:pensao"
- "met:avg:cadunico:pessoa:renda:pensao"
- "met:min:cadunico:pessoa:renda:outras"
- "met:max:cadunico:pessoa:renda:outras"
- "met:sum:cadunico:pessoa:renda:outras"
- "met:avg:cadunico:pessoa:renda:outras"
- "met:min:cadunico:pessoa:peso"
- "met:max:cadunico:pessoa:peso"
- "met:avg:cadunico:pessoa:peso"
-
alias: "view:pnad"
origin: true
aliasAsName: true
dimensions:
- "dim:pnad:censo:ano"
- "dim:estado:id"
- "dim:pnad:sexo:cod"
- "dim:pnad:condicao:unidade:domiciliar"
- "dim:pnad:condicao:familia"
- "dim:pnad:raca:cod"
- "dim:pnad:alfabetizado"
- "dim:pnad:frequenta:escola:creche"
- "dim:pnad:nascimento:dia"
- "dim:pnad:nascimento:mes"
- "dim:pnad:nascimento:ano"
- "dim:pnad:idade"
- "dim:pnad:ocupacao:trabalho:posicao"
- "dim:pnad:situacao:censitaria"
- "dim:pnad:peso"
- "dim:pnad:renda:per:capita:domiciliar:mensal"
- "dim:pnad:renda:per:capita:domiciliar:mensal:faixa"
- "dim:pnad:escolaridade:familiar"
- "dim:pnad:renda:per:capita:familiar:mensal"
- "dim:pnad:nivel:ensino"
- "dim:pnad:quant:anos:estudo"
- "dim:pnad:ocupacao:trabalho:condicao"
- "dim:pnad:rede:ensino"
- "dim:pnad:curso"
- "dim:pnad:area:rede:publica:ensino"
- "dim:pnad:ocupacao:trabalho:cod"
- "dim:pnad:trabalho:setor"
- "dim:pnad:trabalho:area"
- "dim:pnad:trabalho:quant:horas:semanal"
- "dim:pnad:trabalho:renda:mensal"
- "dim:pnad:ocupacao:trabalho2:cod"
- "dim:pnad:trabalho2:setor"
- "dim:pnad:trabalho2:area"
- "dim:pnad:trabalho2:funcionario:publico"
- "dim:pnad:trabalho2:carteira:assinada"
- "dim:pnad:trabalho2:renda:mensal"
- "dim:pnad:trabalho2:quant:horas:semanal"
- "dim:pnad:area:localidade"
- "dim:pnad:faixa:etaria"
- "dim:pnad:professor:educacao:basica"
- "dim:pnad:trabalho:renda:mensal:normalizada:40:horas"
- "dim:pnad:renda:per:capita:domiciliar:mensal:salarios"
- "dim:pnad:renda:per:capita:domiciliar:mensal:quintil:nivel"
- "dim:pnad:renda:per:capita:domiciliar:mensal:nivel:extremo"
metrics:
- "met:min:pnad:nascimento:ano"
- "met:max:pnad:nascimento:ano"
- "met:min:pnad:idade"
- "met:max:pnad:idade"
- "met:avg:pnad:idade"
- "met:min:pnad:peso"
- "met:max:pnad:peso"
- "met:avg:pnad:peso"
- "met:min:pnad:renda:per:capita:domiciliar:mensal"
- "met:max:pnad:renda:per:capita:domiciliar:mensal"
- "met:sum:pnad:renda:per:capita:domiciliar:mensal"
- "met:avg:pnad:renda:per:capita:domiciliar:mensal"
- "met:min:pnad:renda:per:capita:familiar:mensal"
- "met:max:pnad:renda:per:capita:familiar:mensal"
- "met:sum:pnad:renda:per:capita:familiar:mensal"
- "met:avg:pnad:renda:per:capita:familiar:mensal"
- "met:min:pnad:quant:anos:estudo"
- "met:max:pnad:quant:anos:estudo"
- "met:avg:pnad:quant:anos:estudo"
- "met:min:pnad:trabalho:quant:horas:semanal"
- "met:max:pnad:trabalho:quant:horas:semanal"
- "met:avg:pnad:trabalho:quant:horas:semanal"
- "met:min:pnad:trabalho:renda:mensal"
- "met:max:pnad:trabalho:renda:mensal"
- "met:avg:pnad:trabalho:renda:mensal"
- "met:min:pnad:trabalho2:renda:mensal"
- "met:max:pnad:trabalho2:renda:mensal"
- "met:avg:pnad:trabalho2:renda:mensal"
- "met:min:pnad:trabalho2:quant:horas:semanal"
- "met:max:pnad:trabalho2:quant:horas:semanal"
- "met:avg:pnad:trabalho2:quant:horas:semanal"
- "met:min:pnad:trabalho:renda:mensal:normalizada:40:horas"
- "met:max:pnad:trabalho:renda:mensal:normalizada:40:horas"
- "met:avg:pnad:trabalho:renda:mensal:normalizada:40:horas"
- "met:min:pnad:renda:per:capita:domiciliar:mensal:salarios"
- "met:max:pnad:renda:per:capita:domiciliar:mensal:salarios"
-
alias: "view:prouni"
origin: true
aliasAsName: true
dimensions:
- "dim:es:prouni:censo:ano"
- "dim:es:instituicao:id"
- "dim:es:prouni:bolsa:tipo"
- "dim:es:prouni:bolsa:modalidade:ensino"
- "dim:es:prouni:bolsa:curso:nome"
- "dim:es:prouni:bolsa:curso:turno"
- "dim:es:prouni:beneficiario:cpf"
- "dim:es:prouni:beneficiario:sexo:nome"
- "dim:es:prouni:beneficiario:raca:nome"
- "dim:es:prouni:beneficiario:data:nascimento"
- "dim:es:prouni:beneficiario:deficiente"
- "dim:es:prouni:beneficiario:residencia:cidade:nome"
metrics:
- "met:count:es:prouni:beneficiario:cpf"
-
alias: "view:turma"
origin: true
aliasAsName: true
dimensions:
- "dim:turma:censo:ano"
- "dim:turma:id"
- "dim:turma:nome"
- "dim:turma:hora:inicio"
- "dim:turma:minuto:inicio"
- "dim:turma:duracao"
- "dim:turma:matricula"
- "dim:turma:etapa:ensino"
- "dim:turma:disciplina:profissionalizante:id"
- "dim:turma:tipo"
- "dim:turma:mais:educacao"
- "dim:turma:dia:domingo"
- "dim:turma:dia:segunda"
- "dim:turma:dia:terca"
- "dim:turma:dia:quarta"
- "dim:turma:dia:quinta"
- "dim:turma:dia:sexta"
- "dim:turma:dia:sabado"
- "dim:turma:codigo:atividade1:complementar"
- "dim:turma:codigo:atividade2:complementar"
- "dim:turma:codigo:atividade3:complementar"
- "dim:turma:codigo:atividade4:complementar"
- "dim:turma:codigo:atividade5:complementar"
- "dim:turma:codigo:atividade6:complementar"
- "dim:turma:braile"
- "dim:turma:recursos:baixa:visao"
- "dim:turma:processos:mentais"
- "dim:turma:orientacao:mobilidade"
- "dim:turma:sinais"
- "dim:turma:comunicacao:alternativa:aumentativa"
- "dim:turma:enriquecimento:curricular"
- "dim:turma:soroban"
- "dim:turma:informatica:acessivel"
- "dim:turma:portugues:escrita"
- "dim:turma:autonomia:escolar"
- "dim:turma:disciplina:quimica"
- "dim:turma:disciplina:fisica"
- "dim:turma:disciplina:matematica"
- "dim:turma:disciplina:biologia"
- "dim:turma:disciplina:ciencias"
- "dim:turma:disciplina:lingua:portuguesa"
- "dim:turma:disciplina:ingles"
- "dim:turma:disciplina:espanhol"
- "dim:turma:disciplina:frances"
- "dim:turma:disciplina:literatura:outras"
- "dim:turma:disciplina:lingua:indigena"
- "dim:turma:disciplina:artes"
- "dim:turma:disciplina:educacao:fisica"
- "dim:turma:disciplina:historia"
- "dim:turma:disciplina:geografia"
- "dim:turma:disciplina:filosofia"
- "dim:turma:disciplina:ensino:religioso"
- "dim:turma:disciplina:estudos:sociais"
- "dim:turma:disciplina:sociologia"
- "dim:turma:disciplina:informatica"
- "dim:turma:disciplina:profissionalizante"
- "dim:turma:disciplina:atendimento:especiais"
- "dim:turma:disciplina:sociocultural"
- "dim:turma:disciplina:libras"
- "dim:turma:disciplina:pedagogicas"
- "dim:turma:disciplina:outras"
- "dim:escola:id"
- "dim:turma:mediacao:didatico:pedagogica"
- "dim:turma:dias:atividades"
- "dim:turma:disciplina:estudos:sociais:sociologia"
- "dim:turma:turno"
- "dim:turma:etapa:ensino:segmento"
- "dim:turma:tempo:integral"
- "dim:turma:etapa:ensino:segmento:resumido"
metrics:
- "met:count:turma:id"
- "met:min:turma:hora:inicio"
- "met:max:turma:hora:inicio"
- "met:avg:turma:hora:inicio"
- "met:min:turma:minuto:inicio"
- "met:max:turma:minuto:inicio"
- "met:avg:turma:minuto:inicio"
- "met:min:turma:duracao"
- "met:max:turma:duracao"
- "met:avg:turma:duracao"
- "met:min:turma:matricula"
- "met:max:turma:matricula"
- "met:avg:turma:matricula"
- "met:sum:turma:matricula"
- "met:min:turma:dias:atividades"
- "met:max:turma:dias:atividades"
- "met:avg:turma:dias:atividades"
version: '3.3'
services:
blendb-postgres:
image: postgres:10
container_name: postgres
environment:
POSTGRES_PASSWORD: passwd
PGDATA: /var/lib/postgresql/docker/pgdata
ports:
- 5432:5432
restart: unless-stopped
security_opt:
- no-new-privileges
blendb:
# image: marula.c3sl.ufpr.br:5000/c3sl/blendb:latest
image: blendb
build: .
container_name: blendb
environment:
BLENDB_SCHEMA_FILE: config/config.yaml.example
PORT: 3000
BLENDB_N_DB: 1
BLENDB_DB0_USER: postgres
BLENDB_DB0_NAME: postgres
BLENDB_DB0_PASSWORD: passwd
BLENDB_DB0_HOST: postgres
BLENDB_DB0_PORT: 5432
BLENDB_DB0_ADAPTER: postgres
ports:
- 3000:3000
command: ["yarn","test-mode"]
restart: unless-stopped
read_only: 'true'
security_opt:
- no-new-privileges
volumes:
-
type: tmpfs
target: /tmp
{
"watch": ["src", "index.js"],
"ignore": ["*.spec.ts"],
"ext": "ts"
}
Source diff could not be displayed: it is too large. Options to address this: view the blob.
......@@ -4,12 +4,33 @@
"description": "BlenDB",
"main": "index.js",
"scripts": {
"start": "node index",
"start": "env $(cat config/config.env ) node index",
"lint": "tslint -s node_modules/tslint-stylish -t stylish src/**/*.ts test/**/*.ts",
"test": "ts-node node_modules/istanbul/lib/cli.js cover -x \"**/*.spec.ts\" -e .ts _mocha",
"test": "env $(cat config/test.env) ts-node node_modules/nyc/bin/nyc.js mocha",
"show-coverage": "xdg-open coverage/lcov-report/index.html",
"doc-api": "raml2html -i specs/blendb-api-v1.raml -o doc/api-v1-reference.html",
"database": "ts-node database/config.ts database"
"schema": "env $(cat config/config.env ) ts-node scripts/schema.ts config/config.yaml schema.sql",
"service": "./scripts/service.sh",
"test-mode": "ts-node scripts/loadTest.ts && node index",
"doc-code": "typedoc --mode 'file' --module 'commonjs' --target 'ES6' --ignoreCompilerErrors --exclude '**/*.spec.ts' --out 'doc/code' 'src'"
},
"nyc": {
"include": [
"src/**/*.ts"
],
"extension": [
".ts"
],
"require": [
"ts-node/register"
],
"reporter": [
"text-summary",
"text",
"lcov"
],
"sourceMap": true,
"instrument": true
},
"repository": {
"type": "git",
......@@ -18,28 +39,36 @@
"author": "Centro de Computação Científica e Software Livre (C3SL)",
"license": "GPL-3.0",
"dependencies": {
"@types/async": "^2.0.40",
"@types/chai": "^3.4.33",
"@types/express": "^4.0.33",
"@types/js-yaml": "^3.5.29",
"@types/pg": "^6.1.38",
"async": "=2.4.1",
"express": "^4.0.33",
"js-yaml": "^3.8.2",
"@types/async": "^3.0.1",
"@types/chai": "^4.2.0",
"@types/express": "^4.17.1",
"@types/js-yaml": "^3.12.1",
"@types/pg": "^7.11.0",
"async": "^3.1.0",
"express": "^4.17.1",
"js-yaml": "^3.13.1",
"json-2-csv": "^3.5.6",
"log4js": "^5.1.0",
"monetdb": "^1.1.4",
"osprey": "^0.3.2",
"pg": "^6.1.5",
"ts-node": "^3.1.0",
"typescript": "^2.4.1"
"pg": "^7.12.1",
"raml-parser": "^0.8.18"
},
"devDependencies": {
"@types/mocha": "^2.2.32",
"@types/supertest": "^2.0.0",
"chai": "^3.4.33",
"istanbul": "1.1.0-alpha.1",
"mocha": "^3.1.0",
"raml2html": "^3.0.1",
"supertest": "^3.0.0",
"tslint": "^3.15.1",
"tslint-stylish": "^2.1.0-beta"
"@types/mocha": "^5.2.7",
"@types/supertest": "^2.0.8",
"chai": "^4.2.0",
"mocha": "^6.2.0",
"nyc": "^14.1.1",
"raml2html": "^7.4.0",
"supertest": "^4.0.2",
"ts-node": "^8.10.2",
"tslint": "^5.19.0",
"tslint-stylish": "^2.1.0",
"typedoc": "^0.15.0",
"typescript": "^3.9.7"
},
"engines": {
"node": "^10.16.3"
}
}
#!/usr/bin/env node
/*
* Copyright (C) 2018 Centro de Computacao Cientifica e Software Livre
* Departamento de Informatica - Universidade Federal do Parana
*
* This file is part of blendb.
*
* blendb is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* blendb is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with blendb. If not, see <http://www.gnu.org/licenses/>.
*/
import { Fixture as FixPostgres } from "../test/postgres/fixture";
import { ConfigParser } from "../src/util/configParser";
let config: any;
let fixture;
let configPath;
if(process.env.BLENDB_SCHEMA_FILE){
configPath = process.env.BLENDB_SCHEMA_FILE;
}
else{
throw new Error("BLENDB_SCHEMA_FILE wasn't informed");
}
config = ConfigParser.parse(configPath);
fixture = new FixPostgres(config.connections[0]);
fixture.load(config.loadViews, (err) => {
if (err) {
throw err;
}
})
......@@ -23,6 +23,7 @@
import { Engine } from "../src/core/engine";
import { PostgresAdapter } from "../src/adapter/postgres";
import { ConfigParser } from "../src/util/configParser";
import { QueryOpts, Query } from "../src/common/query";
import * as path from "path";
import * as fs from "fs";
......@@ -37,13 +38,11 @@ const configFile = process.argv[2];
const schemaFile = process.argv[3];
const config = ConfigParser.parse(configFile);
const referencePath = path.dirname(configFile);
const engine = new Engine();
const adapter = new PostgresAdapter(config.connection);
let schema = "";
const engine = new Engine(config);
const adapter = new PostgresAdapter(config.connections[0]);
config.metrics.forEach ((met) => engine.addMetric(met));
config.dimensions.forEach ((dim) => engine.addDimension(dim));
let schema = "";
for (let i = 0; i < config.buildViews.length; ++i) {
const view = config.buildViews[i].view;
......@@ -52,8 +51,20 @@ for (let i = 0; i < config.buildViews.length; ++i) {
if (view.origin) {
// Cria uma tabela
let output = "-- View: " + alias + "\n";
output += "DROP VIEW IF EXISTS view_" + view.id + " CASCADE;\n";
output += "CREATE OR REPLACE VIEW view_" + view.id + " AS\n";
if (process.env.BLENDB_ADAPTER === "postgres") {
output += "DROP VIEW IF EXISTS " + view.name + " CASCADE;\n";
output += "CREATE OR REPLACE VIEW " + view.name + " AS\n";
}
else if (process.env.BLENDB_ADAPTER === "monet") {
output += "DROP VIEW " + view.name + " CASCADE;\n";
output += "CREATE VIEW " + view.name + " AS\n";
}
else {
console.log("The adapter value:", process.env.BLENDB_ADAPTER, "is invalid. Abort");
process.exit(1);
}
output += fs.readFileSync(referencePath + "/" + filePath, {encoding: "utf-8" });
schema += output + "\n";
engine.addView(view);
......@@ -65,17 +76,30 @@ for (let i = 0; i < config.buildViews.length; ++i) {
const view = config.buildViews[i].view;
const alias = config.buildViews[i].alias;
if (!view.origin) {
const materializedView = engine.query({
metrics: view.metrics,
dimensions: view.dimensions,
clauses: view.clauses
});
const qOpt: QueryOpts = { metrics: view.metrics,dimensions: view.dimensions,
clauses: view.clauses };
const materializedView = engine.query(new Query(qOpt));
const table = adapter.getQueryFromView(materializedView);
const query = "-- View: " + alias + "\n" +
"DROP MATERIALIZED VIEW IF EXISTS view_" + view.id + " CASCADE;\n" +
let query = "-- View: " + alias + "\n";
if (process.env.BLENDB_ADAPTER === "postgres") {
query += "DROP MATERIALIZED VIEW IF EXISTS view_" + view.id + " CASCADE;\n" +
"CREATE MATERIALIZED VIEW view_" + materializedView.id +
" AS " + table + "\n\n";
}
else if (process.env.BLENDB_ADAPTER === "monet") {
console.log("MonetDb does not support Materializaed views. Abort");
console.log("All views in config.yaml must be origin form MonetDB");
console.log("This is a known issue.");
console.log("In the future this script will create a table and copy the data.");
process.exit(1);
}
else {
console.log("The adapter value:", process.env.BLENDB_ADAPTER, "is invalid. Abort");
process.exit(1);
}
schema += query;
engine.addView(materializedView);
......@@ -88,4 +112,8 @@ fs.writeFile(schemaFile, schema, (error) => {
process.exitCode = 1;
return;
}
else {
console.log("File: " + schemaFile + " created");
}
});
#! /bin/bash
#HUGE work around to make this script allways execute at his location in dir struct
basePath=$(echo $BASH_SOURCE | rev | cut -c 11- | rev)
cd $basePath/..
mkdir -p service
WORKSPACE=$(pwd)
REAL_USER=$USER
BLENDB_PORT=3001
if [[ $1 != "" ]]; then
BLENDB_PORT=$1
fi
if [[ $2 != "" ]]; then
REAL_USER=$2
fi
cd service
echo "[Unit]" > blendb.service
echo "Description=BlenDB API" >> blendb.service
echo "After=network.target" >> blendb.service
echo "" > blendb.service
echo "[Service]" >> blendb.service
echo "User=$REAL_USER" >> blendb.service
echo "WorkingDirectory=$WORKSPACE" >> blendb.service
echo "ExecStart=$WORKSPACE/service/service.sh" >> blendb.service
echo "" >> blendb.service
echo "[Install]" >> blendb.service
echo "WantedBy=multi-user.target" >> blendb.service
echo "#! /bin/bash" > service.sh
USING_NVM="false"
VERSION=$(node --version)
error=$(source $NVM_DIR/nvm.sh)
if [[ $? -eq 0 ]]; then
source $NVM_DIR/nvm.sh
echo "source $NVM_DIR/nvm.sh" >> service.sh
echo "nvm use $VERSION" >> service.sh
USING_NVM="true"
else
echo "nvm not found, using native node version"
echo ""
fi
echo "PORT=$BLENDB_PORT npm start" >> service.sh
chmod +x service.sh
echo "The service files were created with this parameters"
echo "User that runs the service: $REAL_USER"
echo "PORT: $BLENDB_PORT"
echo "Blendb home dir (WORKSPACE): $WORKSPACE"
echo "Node Version: $VERSION"
echo "Using NVM: $USING_NVM"
echo ""
echo "Node version is get using 'node --version, use nvm to change it"
echo "To set different user and port use npm run service -- <port> [<user>]"
echo "Run this commands, as root (or sudo) to finish the process and start blendb"
SYSTEMD_PATH=/etc/systemd/system/blendb.service
mkdir -p /var/log/
touch /var/log/blendb.log
chown root:$REAL_USER /var/log/blendb.log
echo -n "rm -f $SYSTEMD_PATH && "
echo -n "ln -s $WORKSPACE/service/blendb.service $SYSTEMD_PATH && "
echo "systemctl daemon-reload && systemctl restart blendb.service"
......@@ -271,6 +271,27 @@ traits:
description: |
Fields to be returned.
type: string
- formatable:
queryParameters:
format:
description: |
Response format. Defines if the response objects will be a
json or a csv-like file. The default value is json.
The csv-like formats are: csv, ssv and tsv which the
separator is comma, semi-colon and tab respectively.
example: "ssv+"
required: false
pattern: "^json$|^csv$|^ssv$|^tsv$"
type: string
- taggable:
queryParameters:
tags:
description: |
Tags that restrict the elements returned for your request.
Similar to a filter, but used in Blendb elements, not in
query results.
required: false
type: string
/metrics:
description: |
......@@ -279,6 +300,15 @@ traits:
system and their descriptions.
securedBy: [ null, oauth_2_0 ]
get:
is: [ formatable, taggable ]
/sources:
description: |
A Source represents a type of object that can be inserted in the database.
This collection allows the user to list all the sources available in the
system and their descriptions
securedBy: [ null, oauth_2_0 ]
get:
is: [ formatable ]
/dimensions:
description: |
......@@ -287,7 +317,23 @@ traits:
the system and their descriptions.
securedBy: [ null, oauth_2_0 ]
get:
is: [ formatable, taggable ]
/enumtypes:
description: |
A EnumType is short for enumerable type. This is a special data type that only accepts a few possible values. This
collection allows the user to list all the enumerable types available in the system, their descriptions and possible
values.
get:
is: [ formatable ]
securedBy: [ null, oauth_2_0 ]
/tags:
description: |
A Tag can be placed in a metric or dimension to add some extra meaning
to it. Tags can be used to filter the amount of elements returned by a
route. Tags are like filters, but instead of filtering query results,
filter blendb elements.
get:
is: [ formatable ]
/data:
description: |
This is the main part of the API. You may query it for report
......@@ -296,7 +342,7 @@ traits:
start/end dates to refine your query.
type: base
get:
is: [ filtered ]
is: [ filtered, formatable ]
queryParameters:
metrics:
description: |
......
/*
* Copyright (C) 2018 Centro de Computacao Cientifica e Software Livre
* Departamento de Informatica - Universidade Federal do Parana
*
* This file is part of blend.
*
* blend is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* blend is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with blend. If not, see <http://www.gnu.org/licenses/>.
*/
import { SQLAdapter } from "./sql";
import { View } from "../core/view";
import { Source } from "../core/source";
import { FilterOperator } from "../core/filter";
import { DataType } from "../common/types";
/** @hidden */
const MDB = require("monetdb")();
/**
* Params required to connect with a MonetDB database and
* to create a MonetAdapter object.
*/
export interface MonetConfig {
/** Database hostname */
host: string;
/** Database port */
port: number;
/** Database name */
dbname: string;
/** Database user */
user: string;
/** Datavase password */
password: string;
}
/**
* Represent the data format returned by MonetDB.
* This interface is used to parse this format to the BlenDB Standart.
*/
interface MonetResult {
/** Query result as a list of values */
data: any[];
/** Meta data of each attribute required. */
structure: {type: string, column: string, index: number}[];
}
/**
* Adapter which connects with a MonetDB database.
*/
export class MonetAdapter extends SQLAdapter {
/** Information used to connect with a MonetDB database. */
private config: MonetConfig;
/**
* Creates a new adapter with the database connection configuration.
* @param conf - The information required to create a connection with
* the database.
*/
constructor (conf: MonetConfig) {
super();
this.config = conf;
}
/**
* Asynchronously reads all data from given view.
* In other words perform a SELECT query.
* @param view - "Location" from all data should be read.
* @param cb - Callback function which contains the data read.
* @param cb.error - Error information when the method fails.
* @param cb.result - Data got from view.
*/
public getDataFromView(view: View, cb: (error: Error, result?: any[]) => void): void {
const query = this.getQueryFromView(view);
this.executeQuery(query, cb);
}
/**
* Asynchronously executes a query and get its result.
* @param query - Query (SQL format) to be executed.
* @param cb - Callback function which contains the data read.
* @param cb.error - Error information when the method fails.
* @param cb.result - Query result.
*/
private executeQuery(query: string, cb: (error: Error, result?: any[]) => void): void {
let pool: any = new MDB(this.config);
pool.connect();
pool.query(query).then((result: MonetResult) => {
if (result) {
let res = result.data.map((item) => {
let obj: any = {};
for (let i = 0; i < result.structure.length; ++i) {
let struct = result.structure[i];
if (struct.type === "timestamp") {
obj[struct.column] = new Date(item[struct.index]);
}
else {
obj[struct.column] = item[struct.index];
}
}
return obj;
});
cb(null, res);
}
else {
cb(null, null);
}
}).fail((err: Error) => {
cb(err, null);
});
pool.close();
}
/**
* Asynchronously executes a query from Source and get its resut
* @param query - Query (SQL format) to be executed.
* @param cb - Callback function which contains the data read.
* @param cb.error - Error information when the method fails.
* @param cb.result - Query result.
*/
private executeQuerySource(query: string, cb: (error: Error, result?: any[]) => void): void {
let pool: any = new MDB(this.config);
pool.connect();
pool.query(query).then((result: any) => {
if (result) {
cb(null, result);
}
else {
cb(null, null);
}
}).fail((err: Error) => {
cb(err, null);
});
pool.close();
}
/**
* Asynchronously insert one register into a given Source.
* @param source - Insertion "location".
* @param data - Data to be inserted.
* @param cb - Callback function which contains the query result.
* @param cb.error - Error information when the method fails.
* @param cb.result - Query result.
*/
public insertIntoSource(source: Source, data: any[], cb: (err: Error, result: any[]) => void): void {
const query = this.getQueryFromSource(source, data);
this.executeQuerySource(query, cb);
}
/**
* Cast BlenDB data types to be used in MonetDB queries.
* @param quotedValue - SQL query attribute wrapped by quotes.
* @param dt - Attribute data type.
*/
protected typeCast(quotedValue: string, dt: DataType): string {
switch (dt) {
case DataType.DATE:
return "CAST(" + quotedValue + " AS TIMESTAMP)";
case DataType.INTEGER:
return "CAST(" + quotedValue + " AS INTEGER)";
case DataType.BOOLEAN:
return "CAST(" + quotedValue + " AS BOOLEAN)";
default:
return quotedValue;
}
}
/**
* Translate filter operator to be used in MonetDB queries.
* @param lSide - Operation left side operator.
* @param rSide - Operation right side operator.
* @param op - Operation to be performed.
*/
protected applyOperator(lSide: string, rSide: string, op: FilterOperator): string {
switch (op) {
case FilterOperator.EQUAL:
return lSide + " = " + rSide;
case FilterOperator.NOTEQUAL:
return "NOT(" + lSide + " = " + rSide + ")";
case FilterOperator.GREATER:
return lSide + " > " + rSide;
case FilterOperator.LOWER:
return lSide + " < " + rSide;
case FilterOperator.GREATEREQ:
return lSide + " >= " + rSide;
case FilterOperator.LOWEREQ:
return lSide + " <= " + rSide;
default:
return "";
}
}
}
......@@ -19,72 +19,84 @@
*/
import { expect } from "chai";
import { PostgresAdapter } from "./postgres";
import { MonetAdapter, MonetConfig } from "./monet";
import { Adapter } from "../core/adapter";
import { Fixture } from "../../test/postgres/fixture";
import { ConfigParser } from "../util/configParser";
import { Fixture as FixPostgres } from "../../test/postgres/fixture";
import { Fixture as FixMonet } from "../../test/monet/fixture";
import { adapterScenario } from "../../test/scenario";
import { eachOf } from "async";
describe("postgres adapter", () => {
let adapters: Adapter[] = [];
let fixture;
// Initializing
let config: any;
function loadDb(db: string, index: number, cb: (err: any, result: Adapter) => void): void {
let adapter: Adapter;
let fixture;
before((done) => {
config = ConfigParser.parse("config/test.yaml");
fixture = new Fixture(config.connection);
fixture.load(config.loadViews, config.struct.create, (err) => {
if (db === "postgres") {
fixture = new FixPostgres(adapterScenario.config.connections[index]);
fixture.load(adapterScenario.config.loadViews, (err) => {
if (err) {
throw err;
}
adapter = new PostgresAdapter(config.connection);
done();
});
});
// Tests
it("should get data from single materialized view", (done) => {
let view = adapterScenario.materializedView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
adapter = new PostgresAdapter(adapterScenario.config.connections[index]);
cb(null, adapter);
});
}
else if (db === "monet") {
fixture = new FixMonet(adapterScenario.config.connections[index]);
fixture.load(adapterScenario.config.loadViews, (err) => {
if (err) {
throw err;
}
let parsedConfig: MonetConfig = {
user: adapterScenario.config.connections[index].user,
dbname: adapterScenario.config.connections[index].database,
password: adapterScenario.config.connections[index].password,
host: adapterScenario.config.connections[index].host,
port: adapterScenario.config.connections[index].port
};
adapter = new MonetAdapter(parsedConfig);
cb(null, adapter);
});
it("should get data from join of 2 views (without selection)", (done) => {
let view = adapterScenario.noSelectionView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(1);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
}
else{
cb("invalid adapter", null);
}
}
describe("Sql adapter", () => {
// Initializing
before(function (done): void {
// Arrow function not used to get acces to this and skip the test
eachOf(adapterScenario.config.adapters , function(database, key: number, callback) {
loadDb(database, key, function(err, result ) {
if (err){
return callback(err);
}
else{
adapters[key] = result;
}
callback();
});
}, function (err){
if (err){
this.skip();
}
else{
done();
}
});
});
it("should get data from join of 2 views (with selection)", (done) => {
let view = adapterScenario.withSelectionView;
adapter.getDataFromView(view, (err, result) => {
for (let i = 0; i < adapterScenario.config.ndb; i++ ){
it(adapterScenario.config.adapters[i] + ": " + "should get data from single materialized view", (done) => {
let view = adapterScenario.materializedView;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
......@@ -95,12 +107,12 @@ describe("postgres adapter", () => {
done();
});
});
it("should get data from single view (with sub-dimension)", (done) => {
let view = adapterScenario.subDimensionView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should sort data from join of two views", (done) => {
let view = adapterScenario.sortView;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
expect(result).to.have.length(4);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
......@@ -112,9 +124,9 @@ describe("postgres adapter", () => {
done();
});
});
it("should get data from join of 4 views (with selection)", (done) => {
let view = adapterScenario.join4View;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from single view (with sub-dimension)", (done) => {
let view = adapterScenario.subDimensionView;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
......@@ -129,10 +141,9 @@ describe("postgres adapter", () => {
done();
});
});
it("should get data from different sub dimensions with same parent", (done) => {
let view = adapterScenario.dateView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from join of one view", (done) => {
let view = adapterScenario.joinWithOneView;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
......@@ -140,41 +151,16 @@ describe("postgres adapter", () => {
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
expect(row).to.have.property("dim:0:year", 2017);
});
done();
});
});
it("should get data from view with all types of agreggation", (done) => {
let view = adapterScenario.aggrView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(1);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
expect(parseInt(result[0]["met:0"], 10)).to.be.equal(15);
expect(parseInt(result[0]["met:1"], 10)).to.be.equal(3);
expect(parseInt(result[0]["met:6"], 10)).to.be.equal(5);
expect(parseInt(result[0]["met:10"], 10)).to.be.equal(5);
expect(parseInt(result[0]["met:11"], 10)).to.be.equal(1);
done();
});
});
it("should get data from view when a single clause exists", (done) => {
let view = adapterScenario.clauseView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from view with a single clause", (done) => {
let view = adapterScenario.filterWithEqual;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(1);
......@@ -186,70 +172,15 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
expect(parseInt(result[0]["met:0"], 10)).to.be.equal(1);
expect(parseInt(result[0]["met:1"], 10)).to.be.equal(1);
expect(parseInt(result[0]["met:2"], 10)).to.be.equal(1);
expect(result[0]["dim:0"].getDate()).to.be.equal(1);
done();
});
});
it("should get data from view with single clause, with more than on filter", (done) => {
let view = adapterScenario.multiFilterView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(2);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from view with multiple clauses", (done) => {
let view = adapterScenario.multiClauseView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(0);
done();
});
});
it("should get data from view with a clause with not equal operator", (done) => {
let view = adapterScenario.notEqualView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(2);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from view, using > AND < operators on filters", (done) => {
let view = adapterScenario.gtltView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from reduce with the same attributes of view", (done) => {
let view = adapterScenario.reduceAsView;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(1);
expect(result).to.have.length(5);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
......@@ -258,14 +189,12 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from view, using >= AND <= operators on filters", (done) => {
let view = adapterScenario.geleView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from join into reduce with a single clause", (done) => {
let view = adapterScenario.filterAverageBought;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(3);
......@@ -277,17 +206,15 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data when a filter does not have any effect on the query", (done) => {
let view = adapterScenario.notMatchFilterView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from view with multiple clauses", (done) => {
let view = adapterScenario.multipleClause;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
expect(result).to.have.length(1);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
......@@ -296,14 +223,12 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data using COUNT when the view is not origin", (done) => {
let view = adapterScenario.notOriginCount;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from view with single clause, with more than one filter", (done) => {
let view = adapterScenario.singleClause;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(4);
......@@ -315,14 +240,12 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from a unmaterializeble view", (done) => {
let view = adapterScenario.unMaterializebleView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from view with no metrics", (done) => {
let view = adapterScenario.joinWithNoMetrics;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
......@@ -334,14 +257,12 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from partial joins", (done) => {
let view = adapterScenario.partialJoinView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from join with one metric and one view", (done) => {
let view = adapterScenario.growOneView;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(5);
......@@ -353,36 +274,15 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from view when joins can be propagated", (done) => {
let view = adapterScenario.propagatedClauseView;
adapter.getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(2);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
keys = keys.concat(view.dimensions.map((item) => item.name));
result.forEach((row) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
it("should get data from view when joins propaged is in the aggregation", (done) => {
let view = adapterScenario.propagatedClauseAggrView;
adapter.getDataFromView(view, (err, result) => {
it(adapterScenario.config.adapters[i] + ": " + "should get data from join with unrelated dimension", (done) => {
let view = adapterScenario.JoinWithAncestors;
adapters[i].getDataFromView(view, (err, result) => {
expect(err).to.be.a("null");
expect(result).to.be.an("array");
expect(result).to.have.length(8);
expect(result).to.have.length(5);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(view.metrics.map((item) => item.name));
......@@ -391,8 +291,8 @@ describe("postgres adapter", () => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
done();
});
});
}
});
/*
* Copyright (C) 2016 Centro de Computacao Cientifica e Software Livre
* Copyright (C) 2018 Centro de Computacao Cientifica e Software Livre
* Departamento de Informatica - Universidade Federal do Parana
*
* This file is part of blend.
......@@ -18,46 +18,49 @@
* along with blend. If not, see <http://www.gnu.org/licenses/>.
*/
import { Adapter } from "../core/adapter";
import { Metric } from "../core/metric";
import { Dimension } from "../core/dimension";
import { Clause } from "../core/clause";
import { Filter, FilterOperator } from "../core/filter";
import { AggregationType, RelationType } from "../common/types";
import { SQLAdapter } from "./sql";
import { View } from "../core/view";
import { Source } from "../core/source";
import { FilterOperator } from "../core/filter";
import { Pool, PoolConfig } from "pg";
import { DataType } from "../common/types";
interface ExpandedView {
dimensions: Dimension[];
metrics: Metric[];
dimMetrics: Metric[];
keys: Dimension[];
clauses: Clause[];
from: string;
id: string;
origin: boolean;
}
interface DimInfo {
dim: Dimension;
views: ExpandedView[];
}
interface DimTranslation {
aliased: string;
noalias: string;
}
export class PostgresAdapter extends Adapter {
/** Adapter which connects with a PostgreSQL database. */
export class PostgresAdapter extends SQLAdapter {
/** Information used to connect with a PostgreSQL database. */
private pool: Pool;
/**
* Creates a new adapter with the database connection configuration.
* @param config - The information required to create a connection with
* the database.
*/
constructor (config: PoolConfig) {
super();
this.pool = new Pool(config);
}
/**
* Asynchronously reads all data from given view.
* In other words perform a SELECT query.
* @param view - "Location" from all data should be read.
* @param cb - Callback function which contains the data read.
* @param cb.error - Error information when the method fails.
* @param cb.result - Data got from view.
*/
public getDataFromView(view: View, cb: (error: Error, result?: any[]) => void): void {
const query = this.getQueryFromView(view);
this.executeQuery(query, cb);
}
/**
* Asynchronously executes a query and get its result.
* @param query - Query (SQL format) to be executed.
* @param cb - Callback function which contains the data read.
* @param cb.error - Error information when the method fails.
* @param cb.result - Query result.
*/
private executeQuery(query: string, cb: (err: Error, result?: any[]) => void): void{
this.pool.connect((err, client, done) => {
if (err) {
cb (err);
......@@ -71,782 +74,59 @@ export class PostgresAdapter extends Adapter {
});
}
public materializeView(view: View): boolean {
return false;
}
public getQueryFromView(view: View): string {
/*
Find the base (materialized) views that has this data and exapand
than (make a parse to the format used in the adapter)
*/
const materialized = this.searchMaterializedViews(view).sort((a, b) => {
return (a.id < b.id) ? -1 : 1;
}).map((item) => {
return {
id: item.id,
from: "view_" + item.id,
dimMetrics: [],
metrics: item.metrics.filter((i) => {
return view.metrics.some((j) => i.name === j.name);
}),
dimensions: item.dimensions,
keys: item.keys,
clauses: item.clauses,
origin: item.origin
};
});
// Remove repeated views from the result
let partialJoin = [materialized[0]];
for (let i = 1; i < materialized.length; ++i) {
if (materialized[i - 1].id !== materialized[i].id) {
partialJoin.push(materialized[i]);
}
}
/*
If there is more than one source of data (tables/views)
a join is needed.
Partial Join represents how many sources still exists,
every join reduces this number.
*/
let clausesToCover = view.clauses.map((i) => i);
while (partialJoin.length > 1) {
/*
Variable map finds what dimenensions are still needed to
complete this query, they are required for 2 reasons.
1 - To make joins
2 - Because they are in the query
For each view that has this dimension we add one score to
this dimension, if they are in the query the same.
Automatically if the dimension is in the query there will be
at least one view with this atribute (or the query could not be
completed) so dimensions in the query always have score of
at least 2.
To make a join the dimension must be in 2 different views,
creating a score of 2 either.
If the score is less than 2 so this dimension is not required
anymore and can be removed.
*/
let map: { [key: string]: number } = {};
let partialsChange = false;
for (let i = 0; i < partialJoin.length; ++i) {
const dims = partialJoin[i].dimensions;
for (let k = 0; k < dims.length; ++k) {
if (!map[dims[k].name]) {
map[dims[k].name] = 1;
}
else {
++map[dims[k].name];
}
}
}
for (let i = 0; i < view.dimensions.length; ++i) {
let dim = view.dimensions[i];
while (dim !== null) {
if (map[dim.name]) {
++map[dim.name];
}
dim = dim.parent;
}
}
/*
Also mark scores for dimensions inside clauses
*/
for (let i = 0; i < clausesToCover.length; ++i) {
for (let j = 0; j < clausesToCover[i].targets.length; ++j) {
if (map[clausesToCover[i].targets[j].name]) {
++map[clausesToCover[i].targets[j].name];
}
}
}
for (let i = 0; i < partialJoin.length; ++i) {
const dims = partialJoin[i].dimensions.filter((item) => {
return map[item.name] > 1;
});
const keys = partialJoin[i].keys.filter((item) => {
return map[item.name] > 1;
});
/*
At this point the dimensions with less than score 2
are removed, if this happens the view is agreggated
again, with less dimensions, removing this dimension
from the view.
*/
let coveredClauses: Clause[] = [];
let notCoveredClauses: Clause[] = [];
/*
If all dimensions in a clause are a sub set of the
dimensions of a view, this clause is apllied now,
propagating the clause to this point.
Then this clause is removed from the set of clauses
*/
for (let j = 0; j < clausesToCover.length; ++j) {
if (clausesToCover[j].isCovered(partialJoin[i].dimensions)) {
coveredClauses.push(clausesToCover[j]);
}
else {
notCoveredClauses.push(clausesToCover[j]);
}
}
clausesToCover = notCoveredClauses.filter((clause) => {
return !partialJoin[i].clauses.some((c) => c.id === clause.id);
});
if (dims.length < partialJoin[i].dimensions.length || coveredClauses.length > 0) {
const partial = new View({
metrics: partialJoin[i].metrics,
dimensions: dims,
keys: keys,
origin: false,
clauses: coveredClauses.concat(partialJoin[i].clauses),
materialized: false
});
const from = "(" +
this.buildQuery(partial, [partialJoin[i]]) +
") AS view_" + partial.id + "\n";
partialJoin[i].id = partial.id;
partialJoin[i].dimensions = partial.dimensions;
partialJoin[i].keys = partial.keys;
partialJoin[i].origin = partial.origin;
partialJoin[i].from = from;
partialsChange = true;
}
}
/*
If at least one of the views changed (have the number of
dimensions reduced) returns to the begining of the loop
again.
Othewise we need to make a join.
*/
if (!partialsChange) {
/*
Sorting the views by keys.
If the keys are identical, then they
will be in sequence, and views with identical
keys can be joined.
Sort an array of keys is the same as sort a
array of strings.
*/
const sorted = partialJoin.sort((a, b) => {
return this.compareKeys(a.keys, b.keys);
});
/*
First of all, the remaining views are splited in segments.
A segment contains views with the same keys that are great
to make joins. Joins like this do not create "dimensional
metrics".
In joins like this one row of each view will be connected
with at most one row of each other table.
*/
const segment = [[sorted[0]]];
let segmentId = 0;
for (let i = 1; i < sorted.length; ++i) {
if (this.compareKeys(sorted[i - 1].keys, sorted[i].keys) === 0) {
segment[segmentId].push(sorted[i]);
}
else {
++segmentId;
segment.push([sorted[i]]);
}
}
partialJoin = [];
let ableToJoin = false;
for (let i = 0; i < segment.length; ++i) {
/*
If a segment has more than one view, a join can be made
*/
if (segment[i].length > 1) {
let mets: Metric[] = [];
let clauses: Clause[] = [];
let dims: Dimension[] = [];
let dimMetrics: Metric[] = [];
for (let j = 0; j < segment[i].length; ++j) {
mets = mets.concat(segment[i][j].metrics);
clauses = clauses.concat(segment[i][j].clauses);
dims = dims.concat(segment[i][j].dimensions);
dimMetrics = dimMetrics.concat(segment[i][j].dimMetrics);
}
dims = this.removeDuplicatedDimensions(dims);
/*
Its atributes are just concatenated and the
duplicates removed.
*/
const partial = new View({
metrics: mets,
dimensions: dims,
keys: segment[i][0].keys,
origin: false,
clauses: clauses,
materialized: false
});
const viewFrom = "(" +
this.buildQuery(partial, segment[i]) +
") AS view_" + partial.id + "\n";
partialJoin.push({
id: partial.id,
from: viewFrom,
dimMetrics: dimMetrics,
metrics: partial.metrics,
dimensions: partial.dimensions,
keys: partial.keys,
clauses: partial.clauses,
origin: partial.origin
});
ableToJoin = true;
}
else {
/*
If the segment has just one view, anything can be
done at this point, so just reinsert this view in
set of views.
*/
partialJoin.push(segment[i][0]);
}
}
/*
If at least one join was made in the last part (a segment
with more than one view) than return to the begining of the
loop.
This permits after a join remove the dimensions that were
only choosen to this join, and are no longer required
Ideally the joins should be restrict the join method used
above, but in some cases this can not be done.
So if all the segments have only one view inside, move
to the next method.
*/
if (!ableToJoin) {
/*
At this point 2 views will be joined, first the
similarity with each pair of views is calculated,
the pair with the biggedt similarity will be joined.
Similarity is calculated with the number of common
dimensions in the keys.
*/
let similarity = 0;
let idx0 = 0;
let idx1 = 1;
for (let i = 0; i < partialJoin.length; ++i) {
for (let j = i + 1 ; j < partialJoin.length; ++j) {
const pi = partialJoin[i].keys;
const pj = partialJoin[j].keys;
let score = this.similarDimensions (pi, pj);
if (similarity < score) {
similarity = score;
idx0 = i;
idx1 = j;
}
}
}
const partial0 = partialJoin[idx0];
const partial1 = partialJoin[idx1];
partialJoin.splice(idx1, 1);
partialJoin.splice(idx0, 1);
/*
Once the views are select they are joined with the
same method, concatenedted its atributes and
removing duplicates, however the nasty effect of
this join is the creation of "dimensional metrics".
"Dimensional metrics" are metrics that can no longer
be aggregated, and at this point to the end
of a query they will act as dimensions.
This change happens to avoid inconsistency generated
by a join where one row of one table can be connected
to more than one of other table.
Take this example.
View0 : metrics [met0], dimensions [dim0]
values: [{met0: 10, dim0: 1}]
View1 : metrics [met1], dimensions [dim2]
values: [{met1: 10, dim2: 1}. {met1: 5, dim2: 2}]
View2 : metrics [], dimensions [dim0, dim1, dim2]
values: [
{dim0: 1, dim1: 1, dim2: 1},
{dim0: 1, dim1: 1, dim2: 2}
]
The query is metrics [met0, met1] and dimensions [dim1]
First a join of View0 and View1 is made, the result
is: [
{dim0: 1, dim1: 1, dim2: 1, met0: 10},
{dim0: 1, dim1: 1, dim2: 2, met0: 10}
]
Note that the value of met0 is duplicated.
Now dim0 is removed, than joined with view2 resulting
in: [
{met1: 10, dim1: 1, dim2: 1, met0: 10},
{met1: 5 , dim1: 1, dim2: 2, met0: 10}
]
Lets assume that the agregation is SUM
If we remove dim2 and re-agregate the result is: [
{met1: 15, dim1: 1, met0: 20}
]
This result is wrong. The replication of the value
met0 affects the result.
See if met1 was not required, first the dimemnsion would
be reduced, left dim0 and dim1, than joined that reduced
again resulting in the value [
{dim1:1, met0: 10}
]
Is this case there is no duplication and the aggregation
does not include more rows than should.
To solve this problem the met0 must become a dimension,
in other words, not aggregated again. If the met0 was
not agregated in the query met0, met1, dim1 the result
is: [
{met1: 15, dim1: 1, met0: 10}
]
what is compatible.
After this extreme long explanation what must be
known is: Joining views with diferent keys
generate "dimensional metrics".
Views with "dimensional metrics" can not used for future
queries because can not be re-agregated, so this must be
avoided and is one-query only views.
*/
let dimMetrics: Metric[];
let mets: Metric[];
let dims = partial0.dimensions.concat(partial1.dimensions);
dims = this.removeDuplicatedDimensions(dims);
let keys = partial0.keys.concat(partial1.keys);
keys = this.removeDuplicatedDimensions(keys);
if (partial0.keys.length === similarity) {
/*
Here the metrics become dimensions, but the effect
can be reduced. If the keys of partial0
is a sub set of the keys ou partial1
than the number of rows of partial 1 is not
affected, in other words the metrics of partial1
can be aggregated and does not need to become
dimensions.
*/
partial0.dimMetrics = partial0.dimMetrics.concat(partial0.metrics);
partial0.metrics = [];
mets = partial1.metrics;
}
else if (partial1.keys.length === similarity) {
/*
The same occurs if the keys of partia1 is a subset
of partial0.
/**
* Asynchronously insert one register into a given Source.
* @param source - Insertion "location".
* @param data - Data to be inserted.
* @param cb - Callback function which contains the query result.
* @param cb.error - Error information when the method fails.
* @param cb.result - Query result.
*/
partial1.dimMetrics = partial1.dimMetrics.concat(partial1.metrics);
partial1.metrics = [];
mets = partial0.metrics;
public insertIntoSource(source: Source, data: any[], cb: (err: Error, result?: any[]) => void): void {
const query = this.getQueryFromSource(source, data);
this.executeQuery(query, cb);
}
else {
/*
But if there is no sub set, than both sides have
the metrics turned in dimensions.
/**
* Cast BlenDB data types to be used in PostgreSQL queries.
* @param quotedValue - SQL query attribute wrapped by quotes.
* @param dt - Attribute data type.
*/
partial0.dimMetrics = partial0.dimMetrics.concat(partial0.metrics);
partial0.metrics = [];
partial1.dimMetrics = partial1.dimMetrics.concat(partial1.metrics);
partial1.metrics = [];
mets = [];
}
dimMetrics = partial0.dimMetrics.concat(partial1.dimMetrics);
const partial = new View({
metrics: mets,
dimensions: dims,
keys: keys,
origin: false,
clauses: partial0.clauses.concat(partial1.clauses),
materialized: false
});
const id = new View({
metrics: mets.concat(dimMetrics),
dimensions: dims,
keys: keys,
origin: false,
clauses: partial0.clauses.concat(partial1.clauses),
materialized: false
}).id;
const viewFrom = "(" +
this.buildQuery(partial, [partial0, partial1]) +
") AS view_" + id + "\n";
partialJoin.push({
id: id,
from: viewFrom,
dimMetrics: dimMetrics,
metrics: mets,
dimensions: dims,
keys: keys,
clauses: partial.clauses,
origin: false
});
}
}
}
/*
When only one view remain, the query is made and a ;
is added at the end.
TODO: Probrably this last line adds one more
layer to the query, that is in fact unnecessary.
Think a way to remove-it.
*/
return this.buildQuery(view, partialJoin) + ";";
}
private searchMaterializedViews(view: View): View[] {
let r: View[] = [];
if (view.materialized) {
return [view];
}
else {
let children = view.childViews;
for (let i = 0; i < children.length; ++i) {
r = r.concat(this.searchMaterializedViews(children[i]));
}
}
return r;
}
private buildQuery(target: View, views: ExpandedView[]) {
const metrics = target.metrics;
const dimensions = target.dimensions;
const clauses = target.clauses;
let dimMap: {[key: string]: DimInfo} = {};
let nameMap: {[key: string]: ExpandedView} = {};
for (let i = 0; i < views.length; ++i) {
const mets = views[i].metrics;
const dims = views[i].dimensions;
for (let j = 0; j < mets.length; ++j) {
if (!nameMap[mets[j].name]) {
nameMap[mets[j].name] = views[i];
}
}
for (let j = 0; j < dims.length; ++j) {
if (!dimMap[dims[j].name]) {
dimMap[dims[j].name] = {
dim: dims[j],
views: [views[i]]
};
nameMap[dims[j].name] = views[i];
}
else {
dimMap[dims[j].name].views.push(views[i]);
}
}
}
// Projection
const strMetrics = metrics.map((metric) => {
const view = nameMap[metric.name];
if (view) {
return this.translateMetric(metric, view);
}
return "";
}).filter((item) => item !== "");
const parsedDimensions = dimensions.map((dimension) => {
let dim = dimension;
while (!nameMap[dim.name]) {
dim = dim.parent;
}
const view = nameMap[dim.name];
return this.translateDimension(dimension, dim, view);
});
let parsedDimMetrics: DimTranslation[] = [];
for (let i = 0; i < views.length; ++i) {
const dimMets = views[i].dimMetrics.map((item) => {
return this.translateDimMetric(item, views[i]);
});
parsedDimMetrics = parsedDimMetrics.concat(dimMets);
}
const totalDimensions = parsedDimensions.concat(parsedDimMetrics);
const strDimensions = totalDimensions.map ((item) => item.aliased);
const grouped = totalDimensions.map((item) => item.noalias);
const elements = strMetrics.concat(strDimensions);
// Joins
let conds: string[] = [];
for (let i in dimMap) {
let remainViews = dimMap[i].views.slice();
let dim = dimMap[i].dim;
let leftSide = this.buildColumn(dim, remainViews.shift().id);
if (remainViews.length > 0) {
while (remainViews.length > 0) {
const id = remainViews.shift().id;
const rightSide = this.buildColumn(dim, id);
conds.push(leftSide + " = " + rightSide);
}
}
}
// Selection
let covered: Clause[] = [];
for (let i = 0; i < views.length; ++i) {
// Get the clauses that children already cover
covered = covered.concat(views[i].clauses);
}
const toCover = clauses.filter((item) => !covered.some ((clause) => {
return clause.id === item.id;
}));
toCover.forEach((item) => {
const clause = "(" + this.translateClause(item, nameMap) + ")";
if (clause !== "()") {
conds.push(clause);
}
});
// Assembly
const projection = "SELECT " + elements.join(",");
const source = " FROM " + views.map((view) => view.from).join(",");
const selection = (conds.length > 0) ? " WHERE " + conds.join(" AND ") : "";
let grouping = "";
if (grouped.length > 0) {
grouping = " GROUP BY " + grouped.join(",");
}
return projection + source + selection + grouping;
}
private getAggregateFunction(aggrType: AggregationType, origin: boolean): string {
switch (aggrType) {
case AggregationType.SUM:
return "SUM";
case AggregationType.AVG:
return "AVG";
case AggregationType.COUNT:
return (origin) ? "COUNT" : "SUM";
case AggregationType.MAX:
return "MAX";
case AggregationType.MIN:
return "MIN";
protected typeCast(quotedValue: string, dt: DataType): string {
switch (dt) {
case DataType.DATE:
return quotedValue + "::DATE";
case DataType.INTEGER:
return quotedValue + "::INTEGER";
case DataType.BOOLEAN:
return quotedValue + "::BOOLEAN";
default:
return "";
}
return quotedValue;
}
private translateRelation(relation: RelationType, arg: string): string {
switch (relation) {
case RelationType.DAY:
return this.applyRelation("EXTRACT", ["DAY FROM "], [arg]);
case RelationType.MONTH:
return this.applyRelation("EXTRACT", ["MONTH FROM "], [arg]);
case RelationType.YEAR:
return this.applyRelation("EXTRACT", ["YEAR FROM "], [arg]);
case RelationType.DAYOFWEEK:
return this.applyRelation("EXTRACT", ["DOW FROM "], [arg]);
default:
return "";
}
}
private applyRelation(name: string, args: string[], values: string[]): string {
/*
This adapter uses the concept of functions in Postgres to
implement BLENDB sub-dimention relations, this functions
applys the transformation to build the call of a Postgres
funtion. Note that this function can be native from postgres,
like EXTRACT, or even implemented on the database.
This function is short and only used in the translateRelation
method however is a bit complex and is possible to be used
several times, because of that is puted appart to make easyer update
and avoid problems
Example
applyRelation ("EXTRACT", "["DAY FROM"]", ["view_0.date"])
output: EXTRACT(DAY FROM view_0.date)
/**
* Translate filter operator to be used in PostgreSQL queries.
* @param lSide - Operation left side operator.
* @param rSide - Operation right side operator.
* @param op - Operation to be performed.
*/
return name + "(" + args.map((item, idx) => item + values[idx]).join(",") + ")";
}
private buildColumn (item: Metric|Dimension, id: string): string {
const quotedName = "\"" + item.name + "\"";
return "view_" + id + "." + quotedName;
}
private translateClause(clause: Clause, map: {[key: string]: ExpandedView}): string {
const r = clause.filters.map((item) => {
return this.translateFilter(item, map);
}).filter((item) => {
return item !== "";
});
return r.join(" OR ");
}
private translateFilter(filter: Filter, map: {[key: string]: ExpandedView}): string {
if (!map[filter.target.name]) {
return "";
}
const viewId = map[filter.target.name].id;
const leftSide = this.buildColumn(filter.target, viewId);
const op = this.translateOperator(filter.operator);
const dataType = this.translateDataType(filter.target.dataType);
const quotedValue = "'" + filter.value + "'";
return leftSide + op + quotedValue + dataType;
}
private translateMetric(metric: Metric, view: ExpandedView): string {
const func = this.getAggregateFunction(metric.aggregation, view.origin);
const quotedName = "\"" + metric.name + "\"";
const extMetric = func + "(" + this.buildColumn(metric, view.id) + ")";
return extMetric + " AS " + quotedName;
}
private translateDimMetric(metric: Metric, view: ExpandedView): DimTranslation {
const quotedName = "\"" + metric.name + "\"";
const extMetric = this.buildColumn(metric, view.id);
return { aliased: extMetric + " AS " + quotedName, noalias: extMetric };
}
private translateDimension(dimension: Dimension,
ancestor: Dimension,
view: ExpandedView): DimTranslation {
const quotedName = "\"" + dimension.name + "\"";
let extDimension = this.buildColumn(ancestor, view.id);
let aux = dimension;
while (aux.name !== ancestor.name) {
extDimension = this.translateRelation(aux.relation, extDimension);
aux = aux.parent;
}
return { aliased: extDimension + " AS " + quotedName, noalias: extDimension };
}
private translateOperator(op: FilterOperator): string {
protected applyOperator(lSide: string, rSide: string, op: FilterOperator): string {
switch (op) {
case FilterOperator.EQUAL:
return " = ";
return lSide + " = " + rSide;
case FilterOperator.NOTEQUAL:
return " != ";
return lSide + " != " + rSide;
case FilterOperator.GREATER:
return " > ";
return lSide + " > " + rSide;
case FilterOperator.LOWER:
return " < ";
return lSide + " < " + rSide;
case FilterOperator.GREATEREQ:
return " >= ";
return lSide + " >= " + rSide;
case FilterOperator.LOWEREQ:
return " <= ";
default:
return "";
}
}
private translateDataType(dt: string ): string {
switch (dt) {
case "date":
return "::DATE";
case "integer":
return "::INTEGER";
case "boolean":
return "::BOOLEAN";
return lSide + " <= " + rSide;
default:
return "";
}
}
private compareKeys(a: Dimension[], b: Dimension[]): number {
let length = 0;
let res = a.length - b.length;
if (a.length < b.length) {
length = a.length;
}
else {
length = b.length;
}
for (let i = 0; i < length; ++i) {
if (a[i].name < b[i].name) {
return -1;
}
else if (a[i].name > b[i].name) {
return 1;
}
}
return res;
}
private similarDimensions(a: Dimension[], b: Dimension[]): number {
let count = 0;
for (let i = 0; i < a.length; ++i) {
if (b.some((itemB) => a[i].name === itemB.name)) {
count++;
}
}
return count;
}
private removeDuplicatedDimensions(candidateDims: Dimension[]): Dimension[] {
let filterDims: { [key: string]: boolean } = {};
const dims = [];
for (let i = 0; i < candidateDims.length; ++i) {
if (!filterDims[candidateDims[i].name]) {
dims.push(candidateDims[i]);
filterDims[candidateDims[i].name] = true;
}
}
return dims;
}
}
/*
* Copyright (C) 2016 Centro de Computacao Cientifica e Software Livre
* Departamento de Informatica - Universidade Federal do Parana
*
* This file is part of blend.
*
* blend is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* blend is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with blend. If not, see <http://www.gnu.org/licenses/>.
*/
import { Adapter } from "../core/adapter";
import { Metric } from "../core/metric";
import { Source } from "../core/source";
import { Dimension } from "../core/dimension";
import { Clause } from "../core/clause";
import { Filter, FilterOperator } from "../core/filter";
import { AggregationType, RelationType, DataType } from "../common/types";
import { Operation, Opcode } from "../common/expression";
import { View } from "../core/view";
import { Tsort } from "../util/tsort";
/**
* Information required to make a join clause.
* The dimension and views that contain this dimension.
*/
interface DimInfo {
/** Dimension object. */
dim: Dimension;
/** Set of views that contain the dimension. */
views: View[];
}
/**
* Several translations of a dimension that will be used in different
* parts of a query
*/
interface DimTranslation {
/** Translation with the AS clause. Used on SELECT clause. */
aliased: string;
/** Translation without the AS clause. USED on GROUP BY clause. */
noalias: string;
/** The alias of the dimension. The quoted name. */
alias: string;
/** Expansion of a sub dimension. */
expanded: boolean;
}
/** Translated view, the query which gets all data and its name */
interface QueryAndName {
/** Query that returns all the data of a view. */
query: string;
/** The view name. */
name: string;
}
/** Dictonary indexed by view name, that returns the QueryAndName object. */
interface QNMap {
[key: string]: QueryAndName;
}
/**
* Two Dictionaries, both indexed with a dimension name.
* Used to get the views where a dimension is.
* The dimensions also have the Dimension object inside.
*/
interface DimAndNameMap {
/** Dictionary indexed by dimension name that returns DimInfo */
dimensions: {[key: string]: DimInfo};
/**
* Dictionary indexed by dimension name that returns
* a view where this dimension is. Used as right side
* of a join clause.
*/
views: {[key: string]: View};
}
/**
* Generic SQL adapter.
* Contains most implementation to perform a SQL query.
* However, which database has its peculiarities, so this
* adapter must be extended to each SGBD.
*/
export abstract class SQLAdapter extends Adapter {
/**
* Translate a view to a SQL query.
* @param view - View to be translated.
*/
public getQueryFromView(view: View): string {
const map = this.buildPartials(view);
const topSort = Tsort.view(view);
const partials = topSort.map((i) => {
return (map[i]) ? map[i] : {query : "", name: i};
}).filter((i) => {
return i.query !== "";
}).map((i) => {
return i.name + " AS (" + i.query + ")";
});
let withClause = "";
if (partials.length > 0) {
withClause = "WITH " + partials.join(", ") + " ";
}
let sort = "";
if (view.sort.length > 0) {
// Sorting
const order = view.sort.map((item) => {
return "\"" + item.name + "\"";
}).join(",");
sort = " ORDER BY " + order;
}
const dimensions = view.dimensions.map((item) => {
return "\"" + item.name + "\"";
}).join(",");
const metrics = view.metrics.map((item) => {
return "\"" + item.name + "\"";
}).join(",");
let attributes = "";
if (dimensions.length > 0) {
if (metrics.length > 0) {
attributes = dimensions + "," + metrics;
}
else {
attributes = dimensions;
}
}
else {
attributes = metrics;
}
return withClause + "SELECT " + attributes + " FROM " + view.name + sort + ";";
}
/**
* Translates a view to a SQL sub-query. The main diffence between
* this method and getQueryFromView is that a sub-query cannot have
* sorting properties. Also a sub-query is called a partial and
* partials can only have one metric.
* @param view - View which the partial will be built.
*/
private buildPartials(view: View): QNMap {
let op = view.operation;
let queue: View[] = op.values.map((i) => i);
const output: QNMap = {};
output[view.name] = {
query: this.operation(op, view),
name: view.name
};
const map: {[key: string]: boolean } = {};
while (queue.length > 0) {
const partial = queue.shift();
if (!map[partial.id]) {
const query = this.operation(partial.operation, partial);
if (query !== "") {
map[partial.id] = true;
if (!output[partial.name]) {
output[partial.name] = {
query: query,
name: partial.name
};
}
queue = queue.concat(partial.operation.values);
}
}
}
return output;
}
/**
* Constructs a query from a view based in a given operation
* @param op - Operation used to construct the view.
* @param view - View to be querified.
*/
private operation(op: Operation, view: View): string {
switch (op.opcode) {
case Opcode.REDUCE:
return this.buildOperation(view, op.values, false);
case Opcode.JOIN:
return this.buildOperation(view, op.values, true);
default:
// Is unnecessary make a push function. Push = materialized
// No need for materialized partials
return "";
}
}
/**
* Constructs a query to a expecific view, from a set of given partials.
* @param view - View to be built.
* @param partials - Partials (other views) required to built the view.
* @param isJoin - Used to check if join clauses must be added.
*/
private buildOperation(view: View, partials: View[], isJoin: boolean): string {
// Mapping, which views the metrics and dimensions are
const mapping = this.buildMaps(partials);
// Projection
const metrics = view.metrics.map((i) => {
const sourceView = mapping.views[i.name];
return this.translateMetric(i, sourceView);
});
const dimensions = view.dimensions.map((dimension) => {
let dim = dimension;
while (!mapping.views[dim.name]) {
dim = dim.parent;
}
const sourceView = mapping.views[dim.name];
return this.translateDimension(dimension, dim, sourceView);
});
const projElem = dimensions.map((i) => i.aliased).concat(metrics);
// Grouping
const grouped = dimensions.map((item) => {
return (item.expanded) ? item.alias : item.noalias;
});
// Selection
const conds = [];
const clauses = this.orphanClauses(view, partials);
for (let i = 0; i < clauses.length; ++i) {
const trClause = this.translateClause(clauses[i], mapping.views);
if (trClause) {
conds.push("(" + trClause + ")");
}
}
// Joinning
if (isJoin) {
const dimMap = mapping.dimensions;
for (let i of Object.keys(dimMap)) {
if (dimMap[i].views.length > 1) {
const views = dimMap[i].views;
const dim = dimMap[i].dim;
const leftSide = this.buildColumn(dim, views.shift().name);
while (views.length > 0) {
const rightSide = this.buildColumn(dim, views.shift().name);
conds.push("(" + leftSide + "=" + rightSide + ")");
}
}
}
}
// Assembly
const projection = "SELECT " + projElem.join(", ");
const sources = "FROM " + partials.map((i) => (i.name)).join(", ");
const selection = (conds.length > 0) ? " WHERE " + conds.join(" AND ") : "";
let grouping = "";
if (grouped.length > 0) {
grouping = " GROUP BY " + grouped.join(",");
}
return projection + sources + selection + grouping;
}
/**
* Constructs and returns the dictionaries which inform
* in which views a given dimension is.
* @param views - Set of views to be scored.
*/
private buildMaps(views: View[]): DimAndNameMap {
let dimMap: {[key: string]: DimInfo} = {};
let nameMap: {[key: string]: View} = {};
for (let i = 0; i < views.length; ++i) {
const mets = views[i].metrics;
const dims = views[i].dimensions;
for (let j = 0; j < mets.length; ++j) {
if (!nameMap[mets[j].name]) {
nameMap[mets[j].name] = views[i];
}
}
for (let j = 0; j < dims.length; ++j) {
if (!dimMap[dims[j].name]) {
dimMap[dims[j].name] = {
dim: dims[j],
views: [views[i]]
};
nameMap[dims[j].name] = views[i];
}
else {
dimMap[dims[j].name].views.push(views[i]);
}
}
}
return {
dimensions: dimMap,
views: nameMap
};
}
/**
* Returns a set o clauses of the given view that is not fulfilled
* by any of the partials. In other words, clauses that has not been
* added to the query yet.
* @param view - View with all clauses.
* @param partials - Views with some of the clauses.
*/
private orphanClauses(view: View, partials: View[]): Clause[] {
let parentClauses: Clause[] = [];
for (let i = 0; i < partials.length; ++i) {
parentClauses = parentClauses.concat(partials[i].clauses);
}
// return clauses that does not exist in the partials
return view.clauses.filter((i) => !parentClauses.some((j) => j.id === i.id));
}
/**
* Parse a BlenDB enumeration of agregation functions to a proper string
* to be used in the SQL query.
* @param aggrType - Aggregation function.
* @param origin - Will be applied in a view with origin flag ?
*/
private getAggregateFunction(aggrType: AggregationType, origin: boolean): string {
switch (aggrType) {
case AggregationType.SUM:
return "SUM";
case AggregationType.AVG:
return "AVG";
case AggregationType.COUNT:
return (origin) ? "COUNT" : "SUM";
case AggregationType.MAX:
return "MAX";
case AggregationType.MIN:
return "MIN";
default:
return "";
}
}
/**
* Parse a BlenDB enumeration of relationships to a proper string
* to be used in the SQL query.
* @param relation - The realtion function that will be used.
* @param arg - The attribute that is the function argument.
*/
private translateRelation(relation: RelationType, arg: string): string {
switch (relation) {
case RelationType.DAY:
return this.applyRelation("EXTRACT", ["DAY FROM "], [arg]);
case RelationType.MONTH:
return this.applyRelation("EXTRACT", ["MONTH FROM "], [arg]);
case RelationType.YEAR:
return this.applyRelation("EXTRACT", ["YEAR FROM "], [arg]);
default:
return "";
}
}
/**
* Generic translate a SQL function to a query
* @param name - Function name.
* @param args - Function argument names.
* @param values - Function argument values.
*/
private applyRelation(name: string, args: string[], values: string[]): string {
/*
This adapter uses the concept of functions in Postgres to
implement BLENDB sub-dimention relations, this functions
applys the transformation to build the call of a Postgres
funtion. Note that this function can be native from postgres,
like EXTRACT, or even implemented on the database.
This function is short and only used in the translateRelation
method however is a bit complex and is possible to be used
several times, because of that is puted appart to make easyer update
and avoid problems
Example
applyRelation ("EXTRACT", "["DAY FROM"]", ["view_0.date"])
output: EXTRACT(DAY FROM view_0.date)
*/
return name + "(" + args.map((item, idx) => item + values[idx]).join(",") + ")";
}
/**
* Add quotes and the proper view name to a attribute in a SQL query.
* @param item - Attribute to get the name quoted.
* @param viewName - View name used to build the view name.
*/
private buildColumn (item: Metric|Dimension, viewName: string): string {
const quotedName = "\"" + item.name + "\"";
return viewName + "." + quotedName;
}
/**
* Translate Clause object into a SQL query condition.
* @param clause - Clause to be translated.
* @param map - Dictionary indexed bu attributes name
* containing its locations.
*/
private translateClause(clause: Clause, map: {[key: string]: View}): string {
const r = clause.filters.map((item) => {
return this.translateFilter(item, map);
}).filter((item) => {
return item !== "";
});
return r.join(" OR ");
}
/**
* Translate Filter object into a SQL query condition.
* @param filter - Filter to be translated.
* @param map - Dictionary indexed bu attributes name
* containing its locations.
*/
private translateFilter(filter: Filter, map: {[key: string]: View}): string {
if (!map[filter.target.name]) {
return "";
}
const viewName = map[filter.target.name].name;
const leftSide = this.buildColumn(filter.target, viewName);
const quotedValue = "'" + filter.value + "'";
const castedValue = this.typeCast(quotedValue, filter.target.dataType);
return this.applyOperator(leftSide, castedValue, filter.operator);
}
/**
* Translate Metric object into a SQL query attribute.
* @param metric - Metric to be translated.
* @param view - View which contains the metric. Used to create the name
*/
private translateMetric(metric: Metric, view: View): string {
const func = this.getAggregateFunction(metric.aggregation, view.origin);
const quotedName = "\"" + metric.name + "\"";
const extMetric = func + "(" + this.buildColumn(metric, view.name) + ")";
return extMetric + " AS " + quotedName;
}
/**
* Translate Dimension object into a SQL query attribute.
* @param dimension - Dimension to be translated.
* @param ancestor - Dimension ancestor found in the view
* @param view - View which contains the dimenion/ancestor.
* Used to create the name.
*/
private translateDimension(dimension: Dimension,
ancestor: Dimension,
view: View): DimTranslation {
const quotedName = "\"" + dimension.name + "\"";
let extDimension = this.buildColumn(ancestor, view.name);
let aux = dimension;
let expanded = false;
while (aux.name !== ancestor.name) {
extDimension = this.translateRelation(aux.relation, extDimension);
aux = aux.parent;
expanded = true;
}
return {
aliased: extDimension + " AS " + quotedName,
noalias: extDimension,
alias: quotedName,
expanded: expanded
};
}
/**
* Generates a generic insertion query for one register.
* @param source - Data insertion "location".
* @param data - Data to be inserted.
*/
public getQueryFromSource(source: Source, data: any[]): string {
let consult: string;
let colums: any[] = [];
let values: string[] = [];
let tam = Object.keys(data).length;
colums = source.fields.map(o => o.name);
for (let i = 0; i < tam; i++){
values[i] = data[colums[i]];
}
consult = "INSERT INTO " + source.name + " (\"";
consult = consult.concat(colums.join("\",\""));
consult = consult.concat("\") VALUES ('");
consult = consult.concat(values.join("' , '"));
consult = consult.concat("')");
return consult;
}
/**
* Parse a filter operation. Varies in each database.
* @param lSide - Operation left side operator.
* @param rSide - Operation right side operator.
* @param op - Operation to be performed.
*/
protected abstract applyOperator(leftSide: string, rightSide: string, op: FilterOperator): string;
/**
* Cast BlenDB types to proper database types.
* @param quotedValue - SQL query attribute wrapped by quotes.
* @param dt - Attribute data type.
*/
protected abstract typeCast(quotedValue: string, dt: DataType): string;
}
......@@ -20,37 +20,339 @@
import * as request from "supertest";
import { expect } from "chai";
import * as server from "../../main";
import { Adapter } from "../../core/adapter";
import { ConfigParser, ParsedConfig } from "../../util/configParser";
import { Fixture as FixPostgres } from "../../../test/postgres/fixture";
import { Fixture as FixMonet } from "../../../test/monet/fixture";
import { MonetAdapter, MonetConfig } from "../../adapter/monet";
import { PostgresAdapter } from "../../adapter/postgres";
import { eachOf } from "async";
import { Source } from "../../core/source";
let adapter: Adapter[] = [];
let fixture;
let config: ParsedConfig;
function loadDb(db: string, index: number, cb: (err: any, result: Adapter) => void): void {
let adapter: Adapter;
if (db === "postgres") {
fixture = new FixPostgres(config.connections[index]);
fixture.loadSource(config.sources, (err) => {
if (err) {
throw err;
}
adapter = new PostgresAdapter(config.connections[index]);
cb(null, adapter);
});
}
else if (db === "monet") {
fixture = new FixMonet(config.connections[index]);
fixture.loadSource(config.sources, (err) => {
if (err) {
throw err;
}
let parsedConfig: MonetConfig = {
user: config.connections[index].user,
dbname: config.connections[index].database,
password: config.connections[index].password,
host: config.connections[index].host,
port: config.connections[index].port
};
adapter = new MonetAdapter(parsedConfig);
cb(null, adapter);
});
}
else{
cb("invalid adapter", null);
}
}
describe("API collect controller", () => {
// Initializing
before(function (done): void {
// Arrow function not used to get acces to this and skip the test
let configPath;
if(process.env.BLENDB_SCHEMA_FILE){
configPath = process.env.BLENDB_SCHEMA_FILE;
}
else{
throw new Error("BLENDB_SCHEMA_FILE wasn't informed");
}
config = ConfigParser.parse(configPath);
eachOf(config.adapters , function(database, key: number, callback) {
loadDb(database, key, function(err, result ) {
if (err){
return callback(err);
}
else{
adapter[key] = result;
}
callback();
});
}, function (err){
if (err){
this.skip();
}
else{
done();
}
});
});
it("should respond 500 when req.params.class does not exist on Sources", (done) => {
request(server)
.post("/v1/collect/thisisjustatest")
.send({"fields:1": 1, "fields:2": 2})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Engine Error] Required Source: 'thisisjustatest' do not exist in the database."+
" Check source spelling and database configuration.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
it("should respond 400 when _id field is in request body", (done) => {
})
.end(done);
});
it("should respond 500 when name does not exist in Product from market", (done) => {
request(server)
.post("/v1/collect/Product")
.send({
"pricein": 17.51,
"priceout": 30.55,
"validity": "2018-05-16",
"id": 5
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] 'name' is mandatory, but no data was received."+
" Review the data sent.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 200 when data has been stored on Product", (done) => {
request(server)
.post("/v1/collect/class")
.send({"_id": 1})
.expect(400)
.post("/v1/collect/Sell")
.send({
"Registered": false,
"Product.id": 1,
"Client.id": 1,
"Seller.id": 3,
"Quantity": 10,
"Datein" : "2017-10-30"
})
.expect(200)
.expect((res: any) => {
const message = "Property named \"_id\" is protected.";
const message = "Data has been successfully received and stored by the server";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
})
.end(done);
});
it("should respond 500 when value isn't defined on enumtype ", (done) => {
request(server)
.post("/v1/collect/Seller")
.send({
"name": "radom",
"age" : 25,
"sex" : "thisisjustatest",
"CPF" : "145.827.483-76",
"id" : 4
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] EnumType: 'thisisjustatest' from 'sex' isn't allowed on enumsex."
+ " Review configuration files."
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
it("should respond 500 on error on writing in the database", (done) => {
})
.end(done);
});
it("should respond 500 when dataType from id isn't integer", (done) => {
request(server)
.post("/v1/collect/class")
.send({"field1": 1, "field2": 2})
.post("/v1/collect/Product")
.send({
"name":"strategy",
"pricein": 17.51,
"priceout": 30.55,
"validity": "1991-05-16",
"id": "nope"
})
.expect(500)
.expect((res: any) => {
const message = "Error while writing to the database.";
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] Datatype: 'nope' from 'id' could not be converted to type: integer."+
" Review configuration files."
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 500 when dataType from pricein isn't float", (done) => {
request(server)
.post("/v1/collect/Product")
.send({
"name": "strategy",
"pricein": "notafloat",
"priceout": 30.55,
"validity": "1991-05-16",
"id": 5
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] Datatype: 'notafloat' from 'pricein' could not be converted to type: float." +
" Review configuration files."
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 500 when dataType from name isn't string", (done) => {
request(server)
.post("/v1/collect/Client")
.send({
"name": 5,
"CPF" : "500.345.583-65",
"id" : 5
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] Datatype: '5' from 'name' could not be converted to type: string."+
" Review configuration files.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 500 when dataType from fields:3 isn't boolean", (done) => {
request(server)
.post("/v1/collect/Sell")
.send({
"Registered": "notaboolean",
"Product.id": 1,
"Client.id": 9,
"Seller.id": 12,
"Quantity": 50,
"Datein" : "1980-01-30"
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] Datatype: 'notaboolean' from 'Registered' could not be converted to type: boolean."+
" Review configuration files.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 500 when the first dataType from Datein isn't a valid date", (done) => {
request(server)
.post("/v1/collect/Sell")
.send({
"Registered": "true",
"Product.id": 9,
"Client.id": 6,
"Seller.id": 8,
"Quantity": 23,
"Datein" : "1999-25-25"
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] Datatype: '1999-25-25' from 'Datein' could not be converted to type: date." +
" Review configuration files.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 500 when the first dataType from Datein isn't a valid format date", (done) => {
request(server)
.post("/v1/collect/Sell")
.send({
"Registered": "true",
"Product.id": 5,
"Client.id": 16,
"Seller.id": 13,
"Quantity": 7,
"Datein" : "1999/12/12"
})
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "[Collect error] Datatype: '1999/12/12' from 'Datein' could not be converted to type: date."+
" Review configuration files.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 200 when data has been stored on Seller", (done) => {
request(server)
.post("/v1/collect/Seller")
.send({
"name": "radom",
"age" : 25,
"sex" : "male",
"CPF" : "145.827.483-76",
"id" : 4
})
.expect(200)
.expect((res: any) => {
const message = "Data has been successfully received and stored by the server";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body.message).to.be.eql(message);
})
.end(done);
});
});
......@@ -19,16 +19,204 @@
*/
import * as express from "express";
import Request from "../types";
import { Source, Field } from "../../core/source";
import { EnumType } from "../../core/enumType";
import { DataType } from "../../common/types";
import { EnumHandler } from "../../util/enumHandler";
/**
* Dictionary indexed by a type name which return a
* validation function that returns true if the
* objetct is a valid object of that type
* or false otherwise.
*/
interface Valid {
[key: string]: (value: any) => boolean;
}
/**
* Constroller responsable for collect part from the API. In other
* words, controller responsable for inserting data in BlenDB.
*/
export class CollectCtrl {
public static write(req: express.Request, res: express.Response, next: express.NextFunction) {
if ("_id" in req.body) {
res.status(400)
.json({ message: "Property named \"_id\" is protected." });
/**
* Route that validates and insert data.
* @param req - Object with request information
* @param res - Object used to create and send the response
* @param next - Call next middleware or controller. Not used but required
* by typescript definition of route.
*/
public static write(
req: Request,
res: express.Response,
next: express.NextFunction
) {
const validador: Valid = {
integer: function (value: any) {
let test = parseInt(value, 10);
if (test === undefined || isNaN(value)) {
return false;
} else {
return true;
}
},
float: function (value: any) {
let test = parseFloat(value);
if (test === undefined || isNaN(value)) {
return false;
} else {
return true;
}
},
string: function (value: any) {
let test = typeof value;
if (test === "string") {
return true;
} else {
return false;
}
},
date: function (value: any) {
let test: string[] = [];
let date = new Date(value);
try {
test = date.toISOString().split("T");
} catch (e) {
return false;
}
if (test[0] === value) {
return true;
} else {
return false;
}
},
boolean: function (value: any) {
let test = typeof value;
if (test === "boolean") {
return true;
} else {
let test: string = value;
test = test.toLocaleLowerCase();
if (test === "true" || test === "false") {
return true;
} else {
return false;
}
}
},
};
let fields: Field[] = [];
let data: string[] = [];
let types: string[] = [];
let source: Source;
let enumType: EnumType;
const id: string = req.params.class;
try {
source = req.engine.getSourceByName(id);
// If source does not exist them return error
fields = source.fields;
for (let i = 0; i < fields.length; i++) {
data[i] = req.body[fields[i].name];
// check if the data is empty, however since data may be
// true/false, it must guarantee that it isn't a boolean
// then it'll test if it's empty
if (!(typeof data[i] === "boolean") && !data[i]) {
const message =
"[Collect error] '" +
fields[i].name +
"' is mandatory, but no data was received. Review the data sent.";
throw new Error(message);
}
req.log.debug(
"Sucessfuly accepted the data: " +
data[i] +
" from source: ",
source.name
);
}
for (let i = 0; i < fields.length; i++) {
// check if it's a valid enumtype
if (fields[i].dataType === DataType.ENUMTYPE) {
enumType = req.engine.getEnumTypeByName(fields[i].enumType);
types = enumType.values;
let found: boolean = false;
for (let j = 0; j < types.length; j++) {
if (data[i] === types[j]) {
found = true;
break;
}
}
if (!found) {
const message =
"[Collect error] EnumType: '" +
data[i] +
"' from '" +
fields[i].name +
"' isn't allowed on " +
fields[i].enumType +
". Review configuration files.";
throw new Error(message);
}
} else if (
!validador[
EnumHandler.stringfyDataType(fields[i].dataType)
](data[i]) === true
) {
const message =
"[Collect error] Datatype: '" +
data[i] +
"' from '" +
fields[i].name +
"' could not be converted to type: " +
[EnumHandler.stringfyDataType(fields[i].dataType)] +
". Review configuration files.";
throw new Error(message);
}
req.log.debug(
"Sucessfuly accepted the enumType data: " +
data[i] +
" from source: ",
source.name
);
}
} catch (e) {
const message =
"Query execution failed: " +
"Could not construct query with the given parameters.";
req.log.warn(message);
res.status(500).json({
message: message,
error: e.message,
});
return;
}
res.status(500)
.json({ message: "Error while writing to the database." });
req.adapter.insertIntoSource(
source,
req.body,
(err: Error, result: any[]) => {
if (err) {
const message = "Insertion has failed";
req.log.error(message, err);
res.status(500).json({
message: message,
error: err,
});
return;
} else {
const message =
"Data has been successfully received and stored by the server";
req.log.info(message);
res.status(200).json({ message: message });
return;
}
}
);
}
}
......@@ -20,15 +20,18 @@
import * as request from "supertest";
import { expect } from "chai";
import * as server from "../../main";
import { dataCtrlScenario as tests } from "../../../test/scenario";
import { Query } from "../../common/query";
import { waterfall } from "async";
import * as fs from "fs";
interface StrQuery {
metrics: string;
dimensions: string;
filters?: string;
sort?: string;
format?: string;
}
function parseQuery(obj: Query): StrQuery {
......@@ -51,8 +54,9 @@ describe("API data controller", () => {
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the paramters given.";
const error = "The metric named met:-1 was not found";
"Could not construct query with the given parameters.";
const error = "[Engine Error] Metric: 'met:this:is:just:a:test' do not exist in the database." +
" Check metric spelling and database configuration.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body).to.have.property("error");
......@@ -69,8 +73,30 @@ describe("API data controller", () => {
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the paramters given.";
const error = "The dimension named dim:11 was not found";
"Could not construct query with the given parameters.";
const error = "[Engine Error] Dimension: 'dim:this:is:just:a:test' do not exist in the database."+
" Check dimension spelling and database configuration.";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body).to.have.property("error");
expect(res.body.message).to.be.eql(message);
expect(res.body.error).to.be.eql(error);
})
.end(done);
});
it("should respond 500 when query has sort item that is not in query data", (done) => {
let query = parseQuery(tests.clausal);
query.sort = "dim:does:not:exist";
request(server)
.get("/v1/data")
.query(query)
.expect(500)
.expect((res: any) => {
const message = "Query execution failed: " +
"Could not construct query with the given parameters.";
const error = "The item 'dim:does:not:exist'" +
" is not present in neither metrics nor dimensions list";
expect(res.body).to.be.an("object");
expect(res.body).to.have.property("message");
expect(res.body).to.have.property("error");
......@@ -102,10 +128,8 @@ describe("API data controller", () => {
});
it("should respond 200 and get some data, using a single filter", (done) => {
// Clause does not come to scenario besause is a lot of work for
// only a single test
let query = parseQuery(tests.clausal);
query.filters = "dim:7==1";
query.filters = "dim:product:name==Bacon";
request(server)
.get("/v1/data")
.query(query)
......@@ -127,10 +151,8 @@ describe("API data controller", () => {
});
it("should respond 200 and get some data, using filters with OR", (done) => {
// Clause does not come to scenario besause is a lot of work for
// only a single test
let query = parseQuery(tests.clausal);
query.filters = "dim:7==1,dim:7==2";
query.filters = "dim:product:name==Trento,dim:product:name==Meat";
request(server)
.get("/v1/data")
.query(query)
......@@ -152,10 +174,77 @@ describe("API data controller", () => {
});
it("should respond 200 and get some data, using filters with AND", (done) => {
// Clause does not come to scenario besause is a lot of work for
// only a single test
let query = parseQuery(tests.clausal);
query.filters = "dim:7!=1;dim:0!=2017-01-01";
query.filters = "dim:product:name!=Bacon;dim:product:validity!=2018-05-10";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
let result = res.body;
expect(result).to.be.an("array");
expect(result).to.have.length(3);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(tests.clausal.metrics.map((item) => item.name));
keys = keys.concat(tests.clausal.dimensions.map((item) => item.name));
result.forEach((row: any) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
})
.end(done);
});
it("should respond 200 and get some data, sorted", (done) => {
let query = parseQuery(tests.clausal);
query.sort = "dim:product:id,met:product:avg:pricein";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
let result = res.body;
expect(result).to.be.an("array");
expect(result).to.have.length(5);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(tests.clausal.metrics.map((item) => item.name));
keys = keys.concat(tests.clausal.dimensions.map((item) => item.name));
for (let i = 0; i < result.length; ++i) {
const row = result[i];
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
// the magic number will be the first dim:product:id
expect(row["dim:product:id"]).to.be.eql(i + 53);
}
})
.end(done);
});
it("should respond 200 and get some data, using filters with GREATER", (done) => {
let query = parseQuery(tests.clausal);
query.filters = "dim:product:id>55";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
let result = res.body;
expect(result).to.be.an("array");
expect(result).to.have.length(2);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(tests.clausal.metrics.map((item) => item.name));
keys = keys.concat(tests.clausal.dimensions.map((item) => item.name));
result.forEach((row: any) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
})
.end(done);
});
it("should respond 200 and get some data, using filters with LOWEREQ", (done) => {
let query = parseQuery(tests.clausal);
query.filters = "dim:product:id<=55";
request(server)
.get("/v1/data")
.query(query)
......@@ -163,7 +252,7 @@ describe("API data controller", () => {
.expect((res: any) => {
let result = res.body;
expect(result).to.be.an("array");
expect(result).to.have.length(4);
expect(result).to.have.length(3);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(tests.clausal.metrics.map((item) => item.name));
......@@ -175,5 +264,118 @@ describe("API data controller", () => {
})
.end(done);
});
it("should respond 200 and get some data, using filters with default", (done) => {
let query = parseQuery(tests.seller);
query.filters = "dim:sell:registered==false";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
let result = res.body;
expect(result).to.be.an("array");
expect(result).to.have.length(1);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(tests.seller.metrics.map((item) => item.name));
keys = keys.concat(tests.seller.dimensions.map((item) => item.name));
result.forEach((row: any) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
})
.end(done);
});
it("should respond 200 and get some data, using filters with default", (done) => {
let query = parseQuery(tests.expensive);
query.sort = "met:product:max:pricein"
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
let result = res.body;
expect(result).to.be.an("array");
expect(result).to.have.length(5);
expect(result[0]).to.be.an("object");
let keys: string[] = [];
keys = keys.concat(tests.expensive.metrics.map((item) => item.name));
keys = keys.concat(tests.expensive.dimensions.map((item) => item.name));
result.forEach((row: any) => {
expect(row).to.be.an("object");
expect(row).to.have.all.keys(keys);
});
})
.end(done);
});
it("should respond 200 and get some data with format as csv", (done) => {
waterfall([(cb: (err: Error, data: string) => void) => {
fs.readFile("test/files/data.csv", "utf8", (err, data) => {
cb(err, data);
});
}
, (file: string, cb: (err: Error) => void) => {
let query = parseQuery(tests.csv);
query.format = "csv";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
expect(res.text).to.be.eql(file);
})
.end(cb);
}], (err) => {
expect(err).to.be.eql(null);
done();
});
});
it("should respond 200 and get some data with format as ssv", (done) => {
waterfall([(cb: (err: Error, data: string) => void) => {
fs.readFile("test/files/data.ssv", "utf8", (err, data) => {
cb(err, data);
});
}
, (file: string, cb: (err: Error) => void) => {
let query = parseQuery(tests.csv);
query.format = "ssv";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
expect(res.text).to.be.eql(file);
})
.end(cb);
}], (err) => {
expect(err).to.be.eql(null);
done();
});
});
it("should respond 200 and get some data with format as tsv", (done) => {
waterfall([(cb: (err: Error, data: string) => void) => {
fs.readFile("test/files/data.tsv", "utf8", (err, data) => {
cb(err, data);
});
}
, (file: string, cb: (err: Error) => void) => {
let query = parseQuery(tests.csv);
query.format = "tsv";
request(server)
.get("/v1/data")
.query(query)
.expect(200)
.expect((res: any) => {
expect(res.text).to.be.eql(file);
})
.end(cb);
}], (err) => {
expect(err).to.be.eql(null);
done();
});
});
});
......@@ -19,54 +19,140 @@
*/
import * as express from "express";
import { Request } from "../types";
import { Query } from "../../common/query";
import Request from "../types";
import { Query, QueryOpts } from "../../common/query";
/**
* Constroller responsable for data part from the API. In other
* words, controller responsable for reading data in BlenDB.
*/
export class DataCtrl {
public static read(req: Request, res: express.Response, next: express.NextFunction) {
let metrics = req.query.metrics.split(",").filter((item: string) => item !== "");
let dimensions = req.query.dimensions.split(",").filter((item: string) => item !== "");
let clauses = [];
/**
* Route that validates a query and returns the query data.
* @param req - Object with request information
* @param res - Object used to create and send the response
* @param next - Call next middleware or controller. Not used but required
* by typescript definition of route.
*/
public static read(
req: Request,
res: express.Response,
next: express.NextFunction
) {
req.log.info("Query: ", req.query);
let metrics = (<string>req.query.metrics)
.split(",")
.filter((item: string) => item !== "");
let dimensions = (<string>req.query.dimensions)
.split(",")
.filter((item: string) => item !== "");
let clauses: string[] = [];
let sort: string[] = [];
if (req.query.filters) {
clauses = req.query.filters.split(";").filter((item: string) => item !== "");
clauses = (<string>req.query.filters)
.split(";")
.filter((item: string) => item !== "");
}
if (req.query.sort) {
sort = (<string>req.query.sort)
.split(",")
.filter((item: string) => item !== "");
}
let format = "json";
if (req.query.format) {
format = <string>req.query.format;
}
let view;
try {
let query: Query = { metrics: [], dimensions: [], clauses: [] };
const qOpt: QueryOpts = { metrics: [], dimensions: [] };
let query = new Query(qOpt);
for (let i = 0; i < metrics.length; ++i) {
query.metrics.push(req.engine.getMetricByName(metrics[i]));
}
for (let i = 0; i < dimensions.length; ++i) {
query.dimensions.push(req.engine.getDimensionByName(dimensions[i]));
query.dimensions.push(
req.engine.getDimensionByName(dimensions[i])
);
}
for (let i = 0; i < clauses.length; ++i) {
query.clauses.push(req.engine.parseClause(clauses[i]));
}
view = req.engine.query(query);
for (let i = 0; i < sort.length; ++i) {
const m = query.metrics.find((item) => item.name === sort[i]);
if (!m) {
const d = query.dimensions.find(
(item) => item.name === sort[i]
);
if (!d) {
throw new Error(
"The item '" +
sort[i] +
"' is not present in neither metrics nor dimensions list"
);
} else {
query.sort.push(d);
}
} else {
query.sort.push(m);
}
catch (e) {
}
view = req.engine.query(query);
} catch (e) {
const message =
"Query execution failed: " +
"Could not construct query with the given parameters.";
req.log.warn(message, e);
res.status(500).json({
message: "Query execution failed: " +
"Could not construct query with the paramters given.",
error: e.message
message: message,
error: e.message,
});
return;
}
req.adapter.getDataFromView(view, (err: Error, result: any[]) => {
if (err) {
const message =
"Query execution failed " +
"failed on execute query on database.";
req.log.error(message, err);
res.status(500).json({
message: "Query execution failed " +
"failed on execute query on database.",
error: err
message: message,
error: err,
});
return;
}
if (format === "json") {
req.log.info("Response (json) send with success");
res.status(200).json(result);
} else {
req.csvParser(result, format, (error: Error, csv: string) => {
if (error) {
const message =
"Error generating csv file. " + "Try json format.";
req.log.error(message, error);
res.status(500).json({
message: message,
error: error,
});
return;
}
req.log.info("Response (csv) send with success");
res.setHeader("Content-Type", "text/csv");
res.setHeader(
"Content-disposition",
"attachment;filename=data.csv"
);
res.status(200).send(csv);
});
}
return;
});
}
......