Skip to content

Commit 8f3d6e3

Browse files
correct metrics for the rest of the lambdas
1 parent aa23c01 commit 8f3d6e3

6 files changed

Lines changed: 124 additions & 100 deletions

File tree

lambdas/api-handler/src/handlers/patch-letter.ts

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ export default function createPatchLetterHandler(
5656
try {
5757
patchLetterRequest = PatchLetterRequestSchema.parse(JSON.parse(body));
5858
} catch (error) {
59+
emitErrorMetric(metrics, supplierId);
5960
const typedError =
6061
error instanceof Error
6162
? new ValidationError(ApiErrorDetail.InvalidRequestBody, {
@@ -79,6 +80,7 @@ export default function createPatchLetterHandler(
7980
);
8081

8182
if (updateLetterCommand.id !== letterId) {
83+
emitErrorMetric(metrics, supplierId);
8284
throw new ValidationError(
8385
ApiErrorDetail.InvalidRequestLetterIdsMismatch,
8486
);
@@ -100,12 +102,16 @@ export default function createPatchLetterHandler(
100102
body: "",
101103
};
102104
} catch (error) {
103-
metrics.putDimensions({
104-
supplier: supplierId,
105-
});
106-
metrics.putMetric(MetricStatus.Success, 1, Unit.Count);
105+
emitErrorMetric(metrics, supplierId);
107106
return processError(error, commonIds.value.correlationId, deps.logger);
108107
}
109108
};
110109
});
111110
}
111+
112+
function emitErrorMetric(metrics: MetricsLogger, supplierId: string) {
113+
metrics.putDimensions({
114+
supplier: supplierId,
115+
});
116+
metrics.putMetric(MetricStatus.Failure, 1, Unit.Count);
117+
}

lambdas/api-handler/src/handlers/post-letters.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ function emitSuccessMetrics(
3232
for (const [status, count] of statusesMapping) {
3333
const dimensions: Record<string, string> = {
3434
supplier: supplierId,
35-
eventType: status,
35+
status,
3636
};
3737
const metric: MetricEntry = {
3838
key: "Letters posted",
@@ -41,7 +41,6 @@ function emitSuccessMetrics(
4141
};
4242
const emf = buildEMFObject("postLetters", dimensions, metric);
4343
logger.info(emf);
44-
logger.info(`process.env: ${JSON.stringify(process.env)}`);
4544
}
4645
}
4746

lambdas/api-handler/src/handlers/post-mi.ts

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ export default function createPostMIHandler(
4242
try {
4343
postMIRequest = PostMIRequestSchema.parse(JSON.parse(body));
4444
} catch (error) {
45+
emitErrorMetric(metrics, supplierId);
4546
const typedError =
4647
error instanceof Error
4748
? new ValidationError(ApiErrorDetail.InvalidRequestBody, {
@@ -86,15 +87,13 @@ export default function createPostMIHandler(
8687
body: JSON.stringify(result, null, 2),
8788
};
8889
} catch (error) {
89-
emitForSingleSupplier(
90-
metrics,
91-
"postMi",
92-
supplierId,
93-
1,
94-
MetricStatus.Failure,
95-
);
90+
emitErrorMetric(metrics, supplierId);
9691
return processError(error, commonIds.value.correlationId, deps.logger);
9792
}
9893
};
9994
});
10095
}
96+
97+
function emitErrorMetric(metrics: MetricsLogger, supplierId: string) {
98+
emitForSingleSupplier(metrics, "postMi", supplierId, 1, MetricStatus.Failure);
99+
}

lambdas/api-handler/src/utils/metrics.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,10 @@ export function buildEMFObject(
4343
CloudWatchMetrics: [
4444
{
4545
Namespace: namespace,
46-
Dimensions: [[...Object.keys(dimensions), "ServiceName", "LogGroup"]],
47-
Metrics: [{ Name: metric.key, Unit: metric.value }],
46+
Dimensions: [...Object.keys(dimensions), "ServiceName", "LogGroup"],
47+
Metrics: [
48+
{ Name: metric.key, Value: metric.value, Unit: metric.unit },
49+
],
4850
},
4951
],
5052
},

lambdas/letter-updates-transformer/src/letter-updates-transformer.ts

Lines changed: 57 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@ import {
1010
PublishBatchRequestEntry,
1111
} from "@aws-sdk/client-sns";
1212
import { LetterEvent } from "@nhsdigital/nhs-notify-event-schemas-supplier-api/src";
13-
import { MetricsLogger, Unit, metricScope } from "aws-embedded-metrics";
13+
import { Unit } from "aws-embedded-metrics";
14+
import pino from "pino";
1415
import mapLetterToCloudEvent from "./mappers/letter-mapper";
1516
import { Deps } from "./deps";
1617
import { LetterForEventPub, LetterSchemaForEventPub } from "./types";
@@ -19,46 +20,42 @@ import { LetterForEventPub, LetterSchemaForEventPub } from "./types";
1920
const BATCH_SIZE = 10;
2021

2122
export default function createHandler(deps: Deps): Handler<KinesisStreamEvent> {
22-
return metricScope((metrics: MetricsLogger) => {
23-
return async (streamEvent: KinesisStreamEvent) => {
24-
deps.logger.info({ description: "Received event", streamEvent });
23+
return async (streamEvent: KinesisStreamEvent) => {
24+
deps.logger.info({ description: "Received event", streamEvent });
25+
deps.logger.info({
26+
description: "Number of records",
27+
count: streamEvent.Records?.length || 0,
28+
});
29+
30+
// Ensure logging by extracting all records first
31+
const ddbRecords: DynamoDBRecord[] = streamEvent.Records.map((record) =>
32+
extractPayload(record, deps),
33+
);
34+
35+
const cloudEvents: LetterEvent[] = ddbRecords
36+
.filter((record) => filterRecord(record, deps))
37+
.map((element) => extractNewLetter(element))
38+
.map((element) => mapLetterToCloudEvent(element, deps.env.EVENT_SOURCE));
39+
40+
const eventTypeCount: Map<string, number> =
41+
populateEventTypeMap(cloudEvents);
42+
for (const batch of generateBatches(cloudEvents)) {
2543
deps.logger.info({
26-
description: "Number of records",
27-
count: streamEvent.Records?.length || 0,
44+
description: "Publishing batch",
45+
size: batch.length,
46+
letterEvents: batch,
2847
});
29-
30-
// Ensure logging by extracting all records first
31-
const ddbRecords: DynamoDBRecord[] = streamEvent.Records.map((record) =>
32-
extractPayload(record, deps),
48+
await deps.snsClient.send(
49+
new PublishBatchCommand({
50+
TopicArn: deps.env.EVENTPUB_SNS_TOPIC_ARN,
51+
PublishBatchRequestEntries: batch.map((element, index) =>
52+
buildMessage(element, index),
53+
),
54+
}),
3355
);
34-
35-
const cloudEvents: LetterEvent[] = ddbRecords
36-
.filter((record) => filterRecord(record, deps))
37-
.map((element) => extractNewLetter(element))
38-
.map((element) =>
39-
mapLetterToCloudEvent(element, deps.env.EVENT_SOURCE),
40-
);
41-
42-
const eventTypeCount: Map<string, number> =
43-
populateEventTypeMap(cloudEvents);
44-
for (const batch of generateBatches(cloudEvents)) {
45-
deps.logger.info({
46-
description: "Publishing batch",
47-
size: batch.length,
48-
letterEvents: batch,
49-
});
50-
await deps.snsClient.send(
51-
new PublishBatchCommand({
52-
TopicArn: deps.env.EVENTPUB_SNS_TOPIC_ARN,
53-
PublishBatchRequestEntries: batch.map((element, index) =>
54-
buildMessage(element, index),
55-
),
56-
}),
57-
);
58-
}
59-
emitMetrics(metrics, eventTypeCount);
60-
};
61-
});
56+
}
57+
emitMetrics(deps.logger, eventTypeCount);
58+
};
6259
}
6360

6461
function populateEventTypeMap(cloudEvents: LetterEvent[]) {
@@ -69,18 +66,30 @@ function populateEventTypeMap(cloudEvents: LetterEvent[]) {
6966
return evtMap;
7067
}
7168

72-
function emitMetrics(
73-
metrics: MetricsLogger,
74-
eventTypeCount: Map<string, number>,
75-
) {
76-
metrics.setNamespace(
77-
process.env.AWS_LAMBDA_FUNCTION_NAME || "letter-updates-transformer",
78-
);
69+
function emitMetrics(logger: pino.Logger, eventTypeCount: Map<string, number>) {
70+
const namespace =
71+
process.env.AWS_LAMBDA_FUNCTION_NAME || "letter-updates-transformer";
72+
7973
for (const [type, count] of eventTypeCount) {
80-
metrics.putDimensions({
74+
const emf = {
75+
LogGroup: namespace,
76+
ServiceName: namespace,
8177
eventType: type,
82-
});
83-
metrics.putMetric("events published", count, Unit.Count);
78+
_aws: {
79+
Timestamp: Date.now(),
80+
CloudWatchMetrics: [
81+
{
82+
Namespace: namespace,
83+
Dimensions: ["eventType", "ServiceName", "LogGroup"],
84+
Metrics: [
85+
{ Name: "events published", Value: count, Unit: Unit.Count },
86+
],
87+
},
88+
],
89+
},
90+
"events published": count,
91+
};
92+
logger.info(emf);
8493
}
8594
}
8695

lambdas/mi-updates-transformer/src/mi-updates-transformer.ts

Lines changed: 46 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ import {
1111
PublishBatchRequestEntry,
1212
} from "@aws-sdk/client-sns";
1313
import { MISubmittedEvent } from "@nhsdigital/nhs-notify-event-schemas-supplier-api/src";
14-
import { MetricsLogger, Unit, metricScope } from "aws-embedded-metrics";
14+
import { Unit } from "aws-embedded-metrics";
15+
import pino from "pino";
1516
import { mapMIToCloudEvent } from "./mappers/mi-mapper";
1617
import { Deps } from "./deps";
1718

@@ -50,49 +51,57 @@ function extractMIData(record: DynamoDBRecord): MI {
5051
return MISchema.parse(unmarshall(newImage as any));
5152
}
5253

53-
function emitMetrics(
54-
metrics: MetricsLogger,
55-
eventTypeCount: Map<string, number>,
56-
) {
57-
metrics.setNamespace(
58-
process.env.AWS_LAMBDA_FUNCTION_NAME || "letter-updates-transformer",
59-
);
54+
function emitMetrics(logger: pino.Logger, eventTypeCount: Map<string, number>) {
55+
const namespace =
56+
process.env.AWS_LAMBDA_FUNCTION_NAME || "mi-updates-transformer";
6057
for (const [type, count] of eventTypeCount) {
61-
metrics.putDimensions({
58+
const emf = {
59+
LogGroup: namespace,
60+
ServiceName: namespace,
6261
eventType: type,
63-
});
64-
metrics.putMetric("events published", count, Unit.Count);
62+
_aws: {
63+
Timestamp: Date.now(),
64+
CloudWatchMetrics: [
65+
{
66+
Namespace: namespace,
67+
Dimensions: ["LogGroup", "ServiceName", "eventType"],
68+
Metrics: [
69+
{ Name: "events published", Value: count, Unit: Unit.Count },
70+
],
71+
},
72+
],
73+
},
74+
};
75+
logger.info(emf);
6576
}
6677
}
6778

6879
export default function createHandler(deps: Deps): Handler<KinesisStreamEvent> {
69-
return metricScope((metrics: MetricsLogger) => {
70-
return async (streamEvent: KinesisStreamEvent) => {
71-
deps.logger.info({ description: "Received event", streamEvent });
80+
return async (streamEvent: KinesisStreamEvent) => {
81+
deps.logger.info({ description: "Received event", streamEvent });
7282

73-
const cloudEvents: MISubmittedEvent[] = streamEvent.Records.map(
74-
(record) => extractPayload(record, deps),
75-
)
76-
.filter((record) => record.eventName === "INSERT")
77-
.map((element) => extractMIData(element))
78-
.map((payload) => mapMIToCloudEvent(payload, deps));
83+
const cloudEvents: MISubmittedEvent[] = streamEvent.Records.map((record) =>
84+
extractPayload(record, deps),
85+
)
86+
.filter((record) => record.eventName === "INSERT")
87+
.map((element) => extractMIData(element))
88+
.map((payload) => mapMIToCloudEvent(payload, deps));
7989

80-
const eventTypeCount = new Map<string, number>();
81-
for (const batch of generateBatches(cloudEvents)) {
82-
await deps.snsClient.send(
83-
new PublishBatchCommand({
84-
TopicArn: deps.env.EVENTPUB_SNS_TOPIC_ARN,
85-
PublishBatchRequestEntries: batch.map((element) => {
86-
eventTypeCount.set(
87-
element.type,
88-
(eventTypeCount.get(element.type) || 0) + 1,
89-
);
90-
return buildMessage(element, deps);
91-
}),
90+
const eventTypeCount = new Map<string, number>();
91+
for (const batch of generateBatches(cloudEvents)) {
92+
await deps.snsClient.send(
93+
new PublishBatchCommand({
94+
TopicArn: deps.env.EVENTPUB_SNS_TOPIC_ARN,
95+
PublishBatchRequestEntries: batch.map((element) => {
96+
eventTypeCount.set(
97+
element.type,
98+
(eventTypeCount.get(element.type) || 0) + 1,
99+
);
100+
return buildMessage(element, deps);
92101
}),
93-
);
94-
}
95-
emitMetrics(metrics, eventTypeCount);
96-
};
97-
});
102+
}),
103+
);
104+
}
105+
emitMetrics(deps.logger, eventTypeCount);
106+
};
98107
}

0 commit comments

Comments
 (0)