mirror of
https://github.com/TryGhost/Ghost.git
synced 2025-04-01 02:41:39 -05:00
Refactored prometheus metrics instrumentKnex
method (#21627)
no issue - The `instrumentKnex` method was directly accessing the `promClient` instance to create custom metrics, and keeping track of them manually in a `customMetrics` map. This isn't necessary, since the metrics are all tracked within the `promClient` instance's registry. This method now uses the `prometheusClient.register...()` methods to create the metrics, and retrieves them with the `getMetric()` method to reduce duplication of work and manual bookkeeping - This also removes the query count metric, as there is a count already included in the query duration Summary metric
This commit is contained in:
parent
0ac587e94a
commit
3728a4eaea
2 changed files with 52 additions and 108 deletions
|
@ -31,7 +31,6 @@ export class PrometheusClient {
|
|||
|
||||
public client;
|
||||
public gateway: client.Pushgateway<client.RegistryContentType> | undefined; // public for testing
|
||||
public customMetrics: Map<string, client.Metric> = new Map();
|
||||
public queries: Map<string, Date> = new Map();
|
||||
|
||||
private config: PrometheusClientConfig;
|
||||
|
@ -245,76 +244,69 @@ export class PrometheusClient {
|
|||
*/
|
||||
instrumentKnex(knexInstance: Knex) {
|
||||
// Create some gauges for tracking the connection pool
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_max`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_max`,
|
||||
help: 'The maximum number of connections allowed in the pool',
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_max`,
|
||||
help: 'The maximum number of connections allowed in the pool',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.max);
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.max);
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_min`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_min`,
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_min`,
|
||||
help: 'The minimum number of connections allowed in the pool',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.min);
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.min);
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_active`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_active`,
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_active`,
|
||||
help: 'The number of active connections to the database, which can be in use or idle',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.numUsed() + knexInstance.client.pool.numFree());
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.numUsed() + knexInstance.client.pool.numFree());
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_used`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_used`,
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_used`,
|
||||
help: 'The number of connections currently in use by the database',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.numUsed());
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.numUsed());
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_idle`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_idle`,
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_idle`,
|
||||
help: 'The number of active connections currently idle in pool',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.numFree());
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.numFree());
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_pending_acquires`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_pending_acquires`,
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_pending_acquires`,
|
||||
help: 'The number of connections currently waiting to be acquired from the pool',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.numPendingAcquires());
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.numPendingAcquires());
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_connection_pool_pending_creates`, new this.client.Gauge({
|
||||
name: `${this.prefix}db_connection_pool_pending_creates`,
|
||||
this.registerGauge({
|
||||
name: `db_connection_pool_pending_creates`,
|
||||
help: 'The number of connections currently waiting to be created',
|
||||
collect() {
|
||||
this.set(knexInstance.client.pool.numPendingCreates());
|
||||
(this as unknown as client.Gauge).set(knexInstance.client.pool.numPendingCreates());
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_query_count`, new this.client.Counter({
|
||||
name: `${this.prefix}db_query_count`,
|
||||
help: 'The number of queries executed'
|
||||
}));
|
||||
|
||||
this.customMetrics.set(`${this.prefix}db_query_duration_milliseconds`, new this.client.Summary({
|
||||
name: `${this.prefix}db_query_duration_milliseconds`,
|
||||
this.registerSummary({
|
||||
name: `db_query_duration_milliseconds`,
|
||||
help: 'The duration of queries in milliseconds',
|
||||
percentiles: [0.5, 0.9, 0.99]
|
||||
}));
|
||||
});
|
||||
|
||||
knexInstance.on('query', (query) => {
|
||||
// Increment the query counter
|
||||
(this.customMetrics.get(`${this.prefix}db_query_count`) as client.Counter).inc();
|
||||
// Add the query to the map
|
||||
this.queries.set(query.__knexQueryUid, new Date());
|
||||
});
|
||||
|
@ -323,7 +315,7 @@ export class PrometheusClient {
|
|||
const start = this.queries.get(query.__knexQueryUid);
|
||||
if (start) {
|
||||
const duration = new Date().getTime() - start.getTime();
|
||||
(this.customMetrics.get(`${this.prefix}db_query_duration_milliseconds`) as client.Summary).observe(duration);
|
||||
(this.getMetric(`db_query_duration_milliseconds`) as client.Summary).observe(duration);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import type {Knex} from 'knex';
|
|||
import nock from 'nock';
|
||||
import {EventEmitter} from 'events';
|
||||
import type {EventEmitter as EventEmitterType} from 'events';
|
||||
import type {Gauge, Counter, Summary, Pushgateway, RegistryContentType, Metric} from 'prom-client';
|
||||
import type {Gauge, Summary, Pushgateway, RegistryContentType, Metric} from 'prom-client';
|
||||
|
||||
describe('Prometheus Client', function () {
|
||||
let instance: PrometheusClient;
|
||||
|
@ -287,40 +287,20 @@ describe('Prometheus Client', function () {
|
|||
sinon.restore();
|
||||
});
|
||||
|
||||
it('should create all the custom metrics for the connection pool and queries', function () {
|
||||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const metrics = Array.from(instance.customMetrics.keys());
|
||||
assert.deepEqual(metrics, [
|
||||
'ghost_db_connection_pool_max',
|
||||
'ghost_db_connection_pool_min',
|
||||
'ghost_db_connection_pool_active',
|
||||
'ghost_db_connection_pool_used',
|
||||
'ghost_db_connection_pool_idle',
|
||||
'ghost_db_connection_pool_pending_acquires',
|
||||
'ghost_db_connection_pool_pending_creates',
|
||||
'ghost_db_query_count',
|
||||
'ghost_db_query_duration_milliseconds'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should collect the connection pool max metric', async function () {
|
||||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolMaxGauge = instance.customMetrics.get('ghost_db_connection_pool_max') as Gauge;
|
||||
const result = await connectionPoolMaxGauge.get();
|
||||
assert.equal(result.values[0].value, 10);
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_max');
|
||||
assert.equal(metricValues?.[0].value, 10);
|
||||
});
|
||||
|
||||
it('should collect the connection pool min metric', async function () {
|
||||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolMinGauge = instance.customMetrics.get('ghost_db_connection_pool_min') as Gauge;
|
||||
const result = await connectionPoolMinGauge.get();
|
||||
assert.equal(result.values[0].value, 1);
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_min');
|
||||
assert.equal(metricValues?.[0].value, 1);
|
||||
});
|
||||
|
||||
it('should collect the connection pool active metric', async function () {
|
||||
|
@ -329,9 +309,8 @@ describe('Prometheus Client', function () {
|
|||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolActiveGauge = instance.customMetrics.get('ghost_db_connection_pool_active') as Gauge;
|
||||
const result = await connectionPoolActiveGauge.get();
|
||||
assert.equal(result.values[0].value, 10);
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_active');
|
||||
assert.equal(metricValues?.[0].value, 10);
|
||||
});
|
||||
|
||||
it('should collect the connection pool used metric', async function () {
|
||||
|
@ -339,9 +318,8 @@ describe('Prometheus Client', function () {
|
|||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolUsedGauge = instance.customMetrics.get('ghost_db_connection_pool_used') as Gauge;
|
||||
const result = await connectionPoolUsedGauge.get();
|
||||
assert.equal(result.values[0].value, 3);
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_used');
|
||||
assert.equal(metricValues?.[0].value, 3);
|
||||
});
|
||||
|
||||
it('should collect the connection pool idle metric', async function () {
|
||||
|
@ -349,9 +327,8 @@ describe('Prometheus Client', function () {
|
|||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolIdleGauge = instance.customMetrics.get('ghost_db_connection_pool_idle') as Gauge;
|
||||
const result = await connectionPoolIdleGauge.get();
|
||||
assert.equal(result.values[0].value, 7);
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_idle');
|
||||
assert.equal(metricValues?.[0].value, 7);
|
||||
});
|
||||
|
||||
it('should collect the connection pool pending acquires metric', async function () {
|
||||
|
@ -359,9 +336,8 @@ describe('Prometheus Client', function () {
|
|||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolPendingAcquiresGauge = instance.customMetrics.get('ghost_db_connection_pool_pending_acquires') as Gauge;
|
||||
const result = await connectionPoolPendingAcquiresGauge.get();
|
||||
assert.equal(result.values[0].value, 3);
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_pending_acquires');
|
||||
assert.equal(metricValues?.[0].value, 3);
|
||||
});
|
||||
|
||||
it('should collect the connection pool pending creates metric', async function () {
|
||||
|
@ -369,30 +345,8 @@ describe('Prometheus Client', function () {
|
|||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const connectionPoolPendingCreatesGauge = instance.customMetrics.get('ghost_db_connection_pool_pending_creates') as Gauge;
|
||||
const result = await connectionPoolPendingCreatesGauge.get();
|
||||
assert.equal(result.values[0].value, 3);
|
||||
});
|
||||
|
||||
it('should collect the db query count metric', async function () {
|
||||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
const dbQueryCountGauge = instance.customMetrics.get('ghost_db_query_count') as Counter;
|
||||
const result = await dbQueryCountGauge.get();
|
||||
assert.equal(result.values[0].value, 0);
|
||||
});
|
||||
|
||||
it('should increment the db query count metric when a query is executed', async function () {
|
||||
instance = new PrometheusClient();
|
||||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
eventEmitter.emit('query', {__knexQueryUid: '1', sql: 'SELECT 1'});
|
||||
const dbQueryCountGauge = instance.customMetrics.get('ghost_db_query_count') as Counter;
|
||||
const result = await dbQueryCountGauge.get();
|
||||
assert.equal(result.values[0].value, 1);
|
||||
assert.equal(instance.queries.size, 1);
|
||||
assert.ok(instance.queries.has('1'));
|
||||
const metricValues = await instance.getMetricValues('db_connection_pool_pending_creates');
|
||||
assert.equal(metricValues?.[0].value, 3);
|
||||
});
|
||||
|
||||
it('should collect the db query duration metric when a query is executed', async function () {
|
||||
|
@ -400,9 +354,8 @@ describe('Prometheus Client', function () {
|
|||
instance.init();
|
||||
instance.instrumentKnex(knexMock);
|
||||
eventEmitter.emit('query', {__knexQueryUid: '1', sql: 'SELECT 1'});
|
||||
const dbQueryDurationSummary = instance.customMetrics.get('ghost_db_query_duration_milliseconds') as Summary;
|
||||
const result = await dbQueryDurationSummary.get();
|
||||
assert.equal(result.values[0].value, 0);
|
||||
const metricValues = await instance.getMetricValues('db_query_duration_milliseconds');
|
||||
assert.equal(metricValues?.[0].value, 0);
|
||||
});
|
||||
|
||||
it('should accurately calculate the query duration of a query', async function () {
|
||||
|
@ -411,9 +364,8 @@ describe('Prometheus Client', function () {
|
|||
instance.instrumentKnex(knexMock);
|
||||
const durations = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000];
|
||||
simulateQueries(durations);
|
||||
const dbQueryDurationSummary = instance.customMetrics.get('ghost_db_query_duration_milliseconds') as Summary;
|
||||
const result = await dbQueryDurationSummary.get();
|
||||
assert.deepEqual(result.values, [
|
||||
const metricValues = await instance.getMetricValues('db_query_duration_milliseconds');
|
||||
assert.deepEqual(metricValues, [
|
||||
{labels: {quantile: 0.5}, value: 550},
|
||||
{labels: {quantile: 0.9}, value: 950},
|
||||
{labels: {quantile: 0.99}, value: 1000},
|
||||
|
|
Loading…
Add table
Reference in a new issue