Skip to content

Commit

Permalink
Merge pull request #10 from Cambio-Project/add-prometheus-custom-queries
Browse files Browse the repository at this point in the history
Add prometheus custom queries
  • Loading branch information
julianbrott authored Jan 24, 2024
2 parents 5b9bb7e + bbff50e commit 644ff29
Show file tree
Hide file tree
Showing 8 changed files with 574 additions and 471 deletions.
2 changes: 1 addition & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true

Expand Down
8 changes: 4 additions & 4 deletions proxy/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
"scripts": {},
"private": true,
"dependencies": {
"axios": "^1.6.5",
"axios": "^1.6.5",
"cors": "^2.8.5",
"express": "^4.18.2"
},
"devDependencies": { }
"express": "^4.18.2",
"get-stream": "^8.0.1"
}
}
54 changes: 46 additions & 8 deletions proxy/prometheus.proxy.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const express = require('express');
const axios = require('axios');
const cors = require('cors');
const stream = require("stream");
const app = express();
const port = 3000;

Expand All @@ -15,27 +16,59 @@ if (process.argv.indexOf('-h') > -1 || process.argv.indexOf('--help') > -1) {
}
const isDocker = process.argv.indexOf('--docker') > -1;

// Configure axios to automatically decompress the response
axios.defaults.decompress = true;

async function errorStreamToString(stream, res, statusCode) {
/**
* This function will attempt to convert the stream to a string
* and then parse the string as JSON. If the stream is not a JSON
* response, it will return an error.
*/
try {
const { default: getStream } = await import('get-stream');

// Convert the stream to a string
const body = await getStream(stream);

// Attempt to parse the string as JSON
const jsonResponse = JSON.parse(body);
console.log('Streamed error response:', jsonResponse);

// Send the parsed JSON as the response
return res.status(statusCode).send(jsonResponse);
} catch (err) {
// Log and respond with the error
console.error('Error handling the error stream:', err);
return res.status(statusCode).send({ error: 'Error processing the error response' });
}
}

// Enable CORS for all routes
app.use(cors());
app.use(express.json());

// Proxy endpoint
app.all('/proxy', async (req, res) => {

if (
!req.headers['x-target-url']
|| req.headers['x-target-url'] === undefined
) {
return res.status(400).send({error: 'Target URL is required'});
}

// Extracting target URL and basic auth credentials from headers
let targetUrl;
if (isDocker) {
targetUrl = req.headers['x-target-url'] = req.headers['x-target-url'].replace('localhost', 'host.docker.internal');
targetUrl = req.headers['x-target-url'].replace('localhost', 'host.docker.internal');
}
else {
targetUrl = req.headers['x-target-url'];
}

const authHeader = req.headers['authorization']; // Basic Auth Header

if (!targetUrl) {
return res.status(400).send({error: 'Target URL is required'});
}

console.log(`Proxying request to ${targetUrl}`);

try {
Expand All @@ -44,7 +77,7 @@ app.all('/proxy', async (req, res) => {
method: req.method,
url: targetUrl,
data: req.body,
headers: { ...req.headers, 'Authorization': authHeader },
headers: { ...req.headers, 'Authorization': authHeader, "Content-Type": "application/json"},
responseType: 'stream'
});

Expand All @@ -53,8 +86,13 @@ app.all('/proxy', async (req, res) => {
} catch (error) {
// Handling errors
if (error.response) {
// Forwarding the error response from the target server
return res.status(error.response.status).sendStatus(error.response.status);
if (error.response.data instanceof require('stream').Readable) {
// Handle the stream response with the handleErrorStream function
return await errorStreamToString(error.response.data, res, error.response.status);
} else {
// Handle non-stream response
return res.status(error.response.status).send(error.response.data);
}
} else {
return res.status(500).send({ error: error.message });
}
Expand Down
164 changes: 77 additions & 87 deletions src/app/core/services/data.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,108 +2,98 @@ import {HttpClient} from '@angular/common/http';
import {Injectable} from '@angular/core';
import {Papa} from 'ngx-papaparse';
import {Dataset} from '../../shared/models/dataset';

import {PrometheusService} from "./prometheus.service";


@Injectable({
providedIn: 'root'
providedIn: 'root'
})
export class DataService {

constructor(
private papa: Papa,
private http: HttpClient,
private prometheusService: PrometheusService,
) { }
constructor(
private papa: Papa,
private http: HttpClient,
private prometheusService: PrometheusService,
) {
}

parseCsvFile(file: File) {
return new Promise<Dataset>((resolve, reject) => {
this.papa.parse(file, {
delimiter: ',',
complete: result => {
const dataset = new Dataset(result.data, file);
resolve(dataset);
},
error: (err, file) => {
reject(err);
},
});
});
}
parseCsvFile(file: File) {
return new Promise<Dataset>((resolve, reject) => {
this.papa.parse(file, {
delimiter: ',',
complete: result => {
const dataset = new Dataset(result.data, file);
resolve(dataset);
},
error: (err, file) => {
reject(err);
},
});
});
}

parseCsvFileFromAssets(fileName: string) {
return new Promise<Dataset>((resolve, reject) => {
this.http.get(`assets/csv/${fileName}`, { responseType: 'arraybuffer' })
.subscribe(async data => {
const file = this.blobToFile(data, fileName);
try {
const dataset = await this.parseCsvFile(file);
resolve(dataset);
} catch (err) {
reject(err);
}
});
});
}
parseCsvFileFromAssets(fileName: string) {
return new Promise<Dataset>((resolve, reject) => {
this.http.get(`assets/csv/${fileName}`, {responseType: 'arraybuffer'})
.subscribe(async data => {
const file = this.blobToFile(data, fileName);
try {
const dataset = await this.parseCsvFile(file);
resolve(dataset);
} catch (err) {
reject(err);
}
});
});
}

blobToFile = (data: ArrayBuffer, fileName: string): File => {
const blob: any = new Blob([data]);
var b: any = blob;
b.lastModifiedDate = new Date();
b.name = fileName;
return <File>blob;
}
blobToFile = (data: ArrayBuffer, fileName: string): File => {
const blob: any = new Blob([data]);
var b: any = blob;
b.lastModifiedDate = new Date();
b.name = fileName;
return <File>blob;
}

setCredentials(proxyUrl: string, username: string, password: string): void {
this.prometheusService.setCredentials(proxyUrl, username, password);
}
setDbUrl(url: string): Promise<{ success: boolean; msg: string; }> {
return this.prometheusService.setDbUrl(url);
}

clearCredentials(): void {
this.prometheusService.clearCredentials();
}
getAvailableMetrics(): Promise<string[]> {
return this.prometheusService.getAvailableMetrics();
}

setDbUrl(dbUrl: string): Promise<{success: boolean, msg: string}> {
return this.prometheusService.setDbUrl(dbUrl);
}
getMetrics(query: string, start: Date, end: Date, step: string): {
queryType: string,
query: string,
data: Promise<any>
} {
return this.prometheusService.getMetrics(query, start, end, step);
}

getAvailableMetrics(): Promise<string[]> {
/**
* Get available metrics from prometheus database.
*
* @param dbUrl - prometheus database url
* @returns array of metric names
*/
return this.prometheusService.getAvailableMetrics();
}
setCredentials(proxyUrl: string, username: string, password: string): void {
this.prometheusService.setCredentials(proxyUrl, username, password);
}

getMetrics(metrics: string[], start: Date, end: Date, step: string): Promise<any> {
/**
* Get metrics from prometheus database.
*
* @param dbUrl - prometheus database url
* @param metrics - array of metric names
* @param start - start date
* @param end - end date
* @param step - step size
* @returns csv array
*/
return this.prometheusService.getMetrics(metrics, start, end, step);
}
clearCredentials(): void {
this.prometheusService.clearCredentials();
}

generateUUID() {
var d = new Date().getTime();//Timestamp
var d2 = ((typeof performance !== 'undefined') && performance.now && (performance.now() * 1000)) || 0;//Time in microseconds since page-load or 0 if unsupported
return 'p' + 'xxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = Math.random() * 16;//random number between 0 and 16
if (d > 0) {//Use timestamp until depleted
r = (d + r) % 16 | 0;
d = Math.floor(d / 16);
} else {//Use microseconds since page-load if supported
r = (d2 + r) % 16 | 0;
d2 = Math.floor(d2 / 16);
}
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
});
}
generateUUID() {
var d = new Date().getTime();//Timestamp
var d2 = ((typeof performance !== 'undefined') && performance.now && (performance.now() * 1000)) || 0;//Time in microseconds since page-load or 0 if unsupported
return 'p' + 'xxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = Math.random() * 16;//random number between 0 and 16
if (d > 0) {//Use timestamp until depleted
r = (d + r) % 16 | 0;
d = Math.floor(d / 16);
} else {//Use microseconds since page-load if supported
r = (d2 + r) % 16 | 0;
d2 = Math.floor(d2 / 16);
}
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
});
}

}
Loading

0 comments on commit 644ff29

Please sign in to comment.