Skip to content

Commit

Permalink
Beginning to add new usage metrics for 2024.3 release
Browse files Browse the repository at this point in the history
  • Loading branch information
ktuite committed Dec 12, 2024
1 parent ed0e390 commit c7180e7
Show file tree
Hide file tree
Showing 4 changed files with 164 additions and 21 deletions.
2 changes: 1 addition & 1 deletion config/default.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"analytics": {
"url": "https://data.getodk.cloud/v1/key/eOZ7S4bzyUW!g1PF6dIXsnSqktRuewzLTpmc6ipBtRq$LDfIMTUKswCexvE0UwJ9/projects/1/forms/odk-analytics/submissions",
"formId": "odk-analytics",
"version": "v2024.2.0_1"
"version": "v2024.3.0_1"
},
"s3blobStore": {}
}
Expand Down
21 changes: 20 additions & 1 deletion lib/data/analytics.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ const metricsTemplate = {
"num_unique_collectors": {},
"database_size": {},
"uses_external_db": 0,
"uses_external_blob_store": {},
"sso_enabled": 0,
"num_client_audit_attachments": 0,
"num_client_audit_attachments_failures": 0,
Expand All @@ -41,8 +42,14 @@ const metricsTemplate = {
"num_offline_entity_branches": 0,
"num_offline_entity_interrupted_branches": 0,
"num_offline_entity_submissions_reprocessed": 0,
"num_offline_entity_submissions_force_processed": {},
"max_entity_submission_delay": 0,
"avg_entity_submission_delay": 0
"avg_entity_submission_delay": 0,
"max_entity_branch_delay": {},
"num_xml_only_form_defs": {},
"num_blob_files": {},
"num_blob_files_on_s3": {},
"num_reset_failed_to_pending_count": {}
},
"projects": [
{
Expand Down Expand Up @@ -161,6 +168,18 @@ const metricsTemplate = {
"recent": 0,
"total": 0
},
"num_entity_creates_sub": {
"recent": 0,
"total": 0
},
"num_entity_creates_api": {
"recent": 0,
"total": 0
},
"num_entity_creates_bulk": {
"recent": 0,
"total": 0
},
"num_creation_forms": 0,
"num_followup_forms": 0,
"num_failed_entities": {
Expand Down
41 changes: 33 additions & 8 deletions lib/model/query/analytics.js
Original file line number Diff line number Diff line change
Expand Up @@ -523,10 +523,13 @@ FROM duplicateRuns;

// Number of submissions temporarily held in backlog but were automatically
// removed from backlog when preceeding submission came in
const countSubmissionReprocess = () => ({ oneFirst }) => oneFirst(sql`
SELECT COUNT(*)
const countSubmissionBacklogEvents = () => ({ one }) => one(sql`
SELECT
COUNT(CASE WHEN "action" = 'submission.backlog.hold' THEN 1 END) AS "submission.backlog.hold",
COUNT(CASE WHEN "action" = 'submission.backlog.reprocess' THEN 1 END) AS "submission.backlog.reprocess",
COUNT(CASE WHEN "action" = 'submission.backlog.force' THEN 1 END) AS "submission.backlog.force"
FROM audits
WHERE "action" = 'submission.backlog.reprocess'
WHERE "action" IN ('submission.backlog.hold', 'submission.backlog.reprocess', 'submission.backlog.force')
`);

// Measure how much time entities whose source is a submission.create
Expand Down Expand Up @@ -554,6 +557,21 @@ JOIN submission_defs as sd
ON eds."submissionDefId" = sd.id;
`);

const measureMaxEntityBranchTime = () => ({ oneFirst }) => oneFirst(sql`
SELECT
COALESCE(MAX(AGE(max_created_at, min_created_at)), '0 seconds'::interval) AS max_time_difference
FROM (
SELECT
"branchId",
"entityId",
MIN("createdAt") AS min_created_at,
MAX("createdAt") AS max_created_at
FROM entity_defs
GROUP BY "branchId", "entityId"
HAVING "branchId" IS NOT NULL
) AS subquery;
`);

// Other
const getProjectsWithDescriptions = () => ({ all }) => all(sql`
select id as "projectId", length(trim(description)) as description_length from projects where coalesce(trim(description),'')!=''`);
Expand Down Expand Up @@ -741,13 +759,14 @@ const previewMetrics = () => (({ Analytics }) => runSequentially([
Analytics.countClientAuditRows,
Analytics.countOfflineBranches,
Analytics.countInterruptedBranches,
Analytics.countSubmissionReprocess,
Analytics.countSubmissionBacklogEvents,
Analytics.measureEntityProcessingTime,
Analytics.measureMaxEntityBranchTime,
Analytics.projectMetrics
]).then(([db, encrypt, bigForm, admins, audits,
archived, managers, viewers, collectors,
caAttachments, caFailures, caRows,
oeBranches, oeInterruptedBranches, oeSubReprocess, oeProcessingTime,
oeBranches, oeInterruptedBranches, oeBacklogEvents, oeProcessingTime, oeBranchTime,
projMetrics]) => {
const metrics = clone(metricsTemplate);
// system
Expand Down Expand Up @@ -785,10 +804,15 @@ const previewMetrics = () => (({ Analytics }) => runSequentially([
// 2024.2.0 offline entity metrics
metrics.system.num_offline_entity_branches = oeBranches;
metrics.system.num_offline_entity_interrupted_branches = oeInterruptedBranches;
metrics.system.num_offline_entity_submissions_reprocessed = oeSubReprocess;
metrics.system.num_offline_entity_submissions_reprocessed = oeBacklogEvents['submission.backlog.reprocess'];
metrics.system.num_offline_entity_submissions_force_processed = oeBacklogEvents['submission.backlog.force'];

metrics.system.max_entity_submission_delay = oeProcessingTime.max_wait;
metrics.system.avg_entity_submission_delay = oeProcessingTime.avg_wait;

// 2024.3.0 offline entity metrics
metrics.system.max_entity_branch_delay = oeBranchTime;

return metrics;
}));

Expand Down Expand Up @@ -842,7 +866,8 @@ module.exports = {
getDatasetEvents,
countOfflineBranches,
countInterruptedBranches,
countSubmissionReprocess,
countSubmissionBacklogEvents,
measureEntityProcessingTime,
measureElapsedEntityTime
measureElapsedEntityTime,
measureMaxEntityBranchTime
};
121 changes: 110 additions & 11 deletions test/integration/other/analytics-queries.js
Original file line number Diff line number Diff line change
Expand Up @@ -1580,7 +1580,7 @@ describe('analytics task queries', function () {
countInterruptedBranches.should.equal(4);
}));

it('should count number of submission.backlog.reprocess events (submissions temporarily in the backlog)', testService(async (service, container) => {
it('should count number of submission.backlog.* events (submissions temporarily in the backlog)', testService(async (service, container) => {
await createTestForm(service, container, testData.forms.offlineEntity, 1);

const asAlice = await service.login('alice');
Expand Down Expand Up @@ -1625,8 +1625,12 @@ describe('analytics task queries', function () {
backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`);
backlogCount.should.equal(0);

let countReprocess = await container.Analytics.countSubmissionReprocess();
countReprocess.should.equal(1);
let countBacklogEvents = await container.Analytics.countSubmissionBacklogEvents();
countBacklogEvents.should.eql({
'submission.backlog.hold': 1,
'submission.backlog.reprocess': 1,
'submission.backlog.force': 0
});

// Send a future update that will get held in backlog
await asAlice.post('/v1/projects/1/forms/offlineEntity/submissions')
Expand All @@ -1643,19 +1647,27 @@ describe('analytics task queries', function () {
backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`);
backlogCount.should.equal(1);

// A submission being put in the backlog is not what is counted so this is still 1
countReprocess = await container.Analytics.countSubmissionReprocess();
countReprocess.should.equal(1);
// A submission being put in the backlog is not what is counted so reprocess count is still 1
countBacklogEvents = await container.Analytics.countSubmissionBacklogEvents();
countBacklogEvents.should.eql({
'submission.backlog.hold': 2,
'submission.backlog.reprocess': 1,
'submission.backlog.force': 0
});

// force processing the backlog
await container.Entities.processBacklog(true);

backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`);
backlogCount.should.equal(0);

// Force processing also doesn't change this count so it is still 1
countReprocess = await container.Analytics.countSubmissionReprocess();
countReprocess.should.equal(1);
// Force processing counted now, and reprocessing still only counted once
countBacklogEvents = await container.Analytics.countSubmissionBacklogEvents();
countBacklogEvents.should.eql({
'submission.backlog.hold': 2,
'submission.backlog.reprocess': 1,
'submission.backlog.force': 1
});

//----------

Expand Down Expand Up @@ -1689,8 +1701,12 @@ describe('analytics task queries', function () {
await exhaust(container);

// Two reprocessing events logged now
countReprocess = await container.Analytics.countSubmissionReprocess();
countReprocess.should.equal(2);
countBacklogEvents = await container.Analytics.countSubmissionBacklogEvents();
countBacklogEvents.should.eql({
'submission.backlog.hold': 3,
'submission.backlog.reprocess': 2,
'submission.backlog.force': 1
});
}));

it('should measure time from submission creation to entity version finished processing', testService(async (service, container) => {
Expand Down Expand Up @@ -1731,6 +1747,69 @@ describe('analytics task queries', function () {
waitTime.avg_wait.should.be.greaterThan(0);
}));

it('should measure max time between first submission on a branch received and last submission on that branch processed', testService(async (service, container) => {
await createTestForm(service, container, testData.forms.offlineEntity, 1);
const asAlice = await service.login('alice');

const emptyTime = await container.Analytics.measureMaxEntityBranchTime();
emptyTime.should.equal(0);

// Create entity to update
await asAlice.post('/v1/projects/1/datasets/people/entities')
.send({
uuid: '12345678-1234-4123-8234-123456789abc',
label: 'label'
})
.expect(200);

const branchId = uuid();

// Send first update
await asAlice.post('/v1/projects/1/forms/offlineEntity/submissions')
.send(testData.instances.offlineEntity.one
.replace('branchId=""', `branchId="${branchId}"`)
)
.set('Content-Type', 'application/xml')
.expect(200);

// Send second update
await asAlice.post('/v1/projects/1/forms/offlineEntity/submissions')
.send(testData.instances.offlineEntity.one
.replace('one', 'one-update2')
.replace('baseVersion="1"', 'baseVersion="2"')
.replace('branchId=""', `branchId="${branchId}"`)
)
.set('Content-Type', 'application/xml')
.expect(200);

// Send third update in its own branch
await asAlice.post('/v1/projects/1/forms/offlineEntity/submissions')
.send(testData.instances.offlineEntity.one
.replace('one', 'one-update3')
.replace('baseVersion="1"', 'baseVersion="3"')
.replace('trunkVersion="1"', 'trunkVersion="3"')
.replace('branchId=""', `branchId="${uuid()}"`)
)
.set('Content-Type', 'application/xml')
.expect(200);

await exhaust(container);

// Make another update via the API
await asAlice.patch('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc?baseVersion=4')
.send({ label: 'label update' })
.expect(200);

const time = await container.Analytics.measureMaxEntityBranchTime();
time.should.be.greaterThan(0);

// Set date of entity defs in first branch to 1 day apart
await container.run(sql`UPDATE entity_defs SET "createdAt" = '1999-01-01' WHERE version = 2`);
await container.run(sql`UPDATE entity_defs SET "createdAt" = '1999-01-02' WHERE version = 3`);
const longTime = await container.Analytics.measureMaxEntityBranchTime();
longTime.should.be.equal(86400); // number of seconds in a day
}));

it('should not see a delay for submissions processed with approvalRequired flag toggled', testService(async (service, container) => {
const asAlice = await service.login('alice');

Expand Down Expand Up @@ -1952,6 +2031,19 @@ describe('analytics task queries', function () {

await exhaust(container);

// sending in an update much later in the chain that will need to be force processed
await asAlice.post('/v1/projects/1/forms/offlineEntity/submissions')
.send(testData.instances.offlineEntity.one
.replace('one', 'one-update10')
.replace('baseVersion="1"', 'baseVersion="10"')
.replace('branchId=""', `branchId="${branchId}"`)
)
.set('Content-Type', 'application/xml')
.expect(200);

await exhaust(container);
await container.Entities.processBacklog(true);

// After the interesting stuff above, encrypt and archive the project

// encrypting a project
Expand Down Expand Up @@ -1984,6 +2076,13 @@ describe('analytics task queries', function () {
// can't easily test this metric
delete res.system.uses_external_db;
delete res.system.sso_enabled;
delete res.system.uses_external_blob_store;

// TODO stop deleting these in the tests once they are implemented
delete res.system.num_xml_only_form_defs;
delete res.system.num_blob_files;
delete res.system.num_blob_files_on_s3;
delete res.system.num_reset_failed_to_pending_count;

// everything in system filled in
Object.values(res.system).forEach((metric) =>
Expand Down

0 comments on commit c7180e7

Please sign in to comment.