diff --git a/convertors/move-timestamp-out-of-payload.js b/convertors/move-timestamp-out-of-payload.js new file mode 100644 index 000000000..3bea5bf60 --- /dev/null +++ b/convertors/move-timestamp-out-of-payload.js @@ -0,0 +1,231 @@ +require('dotenv').config(); +const { MongoClient } = require('mongodb'); + +/** + * Limit for one time documents selection, used to reduce the load on the database + */ +const documentsSelectionLimit = 10000; + +/** + * @param db - mongo db instance + * @param collectionName - name of the collection to be updated + */ +async function movePayloadTimestampToEventLevel(db, collectionName) { + const collection = db.collection(collectionName); + + const docsToUpdate = collection.find( + { timestamp: { $exists: false } }, + { projection: { _id: 1, 'payload.timestamp': 1 } } + ).limit(documentsSelectionLimit); + + const batchedOps = []; + + let currentCount = 0; + + for await (const doc of docsToUpdate) { + process.stdout.write(`\r${currentCount} documents added to batch`); + + if (!doc.payload.timestamp) { + continue; + } + + batchedOps.push({ + updateOne: { + filter: { _id: doc._id }, + update: { + $set: { timestamp: Number(doc.payload.timestamp)}, + $unset: {'payload.timestamp': ''}, + } + } + }) + + currentCount++; + } + + if (currentCount > 0) { + await collection.bulkWrite(batchedOps); + } + + return currentCount +} +/** + * @param db - mongo db instance + * @param repetitionCollectionName - repetitions collection to be updated + * @param projectId - project id of current repetitions collection + */ +async function backfillTimestampsFromEvents(db, repetitionCollectionName, projectId) { + const repetitions = db.collection(repetitionCollectionName); + const events = db.collection(`events:${projectId}`); + + let bulkOps = []; + let repetitionCount = 1; + + const repetitionsList = await repetitions.find( + { + timestamp: { $exists: false }, + }, + { projection: { _id: 1, groupHash: 1 } } + ).limit(documentsSelectionLimit).toArray(); + + const groupHashList = []; + + for (const repetition of repetitionsList) { + process.stdout.write(`\r[${repetitionCount} repetition] update with timestamp now have [${bulkOps.length + 1}] ops in bulkOps`); + groupHashList.push(repetition.groupHash); + repetitionCount++; + } + + const relatedEvents = await events.find( + { groupHash: { $in: groupHashList } }, + { projection: { timestamp: 1, groupHash: 1 } } + ).toArray(); + + const relatedEventsMap = new Map() + + relatedEvents.forEach(e => { + relatedEventsMap.set(e.groupHash, e); + }) + + for (const repetition of repetitionsList) { + const relatedEvent = relatedEventsMap.get(repetition.groupHash); + + if (!relatedEvent) { + bulkOps.push({ + deleteOne: { + filter: { _id: repetition._id } + } + }) + } else if (relatedEvent?.timestamp !== null) { + bulkOps.push({ + updateOne: { + filter: { _id: repetition._id }, + update: { $set: { timestamp: Number(relatedEvent.timestamp) } }, + }, + }); + } + } + + let processed = 0; + + if (bulkOps.length > 0) { + const result = await repetitions.bulkWrite(bulkOps); + const updated = result.modifiedCount; + const deleted = result.deletedCount; + processed = bulkOps.length; + console.log(` updates (${processed} processed, ${updated} updated, ${deleted} deleted)`); + + if (updated + deleted === 0) { + repetitionCollectionsToCheck.filter(collection => collection !== repetition) + } + } + + return processed; +} + +/** + * Method that runs convertor script + */ +async function run() { + const fullUri = 'mongodb://hawk_new:evieg9bauK0ahs2youhoh7aer7kohT@rc1d-2jltinutse1eadfs.mdb.yandexcloud.net:27018/hawk_events?authSource=admin&replicaSet=rs01&tls=true&tlsInsecure=true'; + + // Parse the Mongo URL manually + const mongoUrl = new URL(fullUri); + const databaseName = 'hawk_events'; + + // Extract query parameters + const queryParams = Object.fromEntries(mongoUrl.searchParams.entries()); + + // Compose connection options manually + const options = { + useNewUrlParser: true, + useUnifiedTopology: true, + authSource: queryParams.authSource || 'admin', + replicaSet: queryParams.replicaSet || undefined, + tls: queryParams.tls === 'true', + tlsInsecure: queryParams.tlsInsecure === 'true', + // connectTimeoutMS: 3600000, + // socketTimeoutMS: 3600000, + }; + + // Remove query string from URI + mongoUrl.search = ''; + const cleanUri = mongoUrl.toString(); + + console.log('Connecting to:', cleanUri); + console.log('With options:', options); + + const client = new MongoClient(cleanUri, options); + + await client.connect(); + const db = client.db(databaseName); + + console.log(`Connected to database: ${databaseName}`); + + const collections = await db.listCollections({}, { + authorizedCollections: true, + nameOnly: true, + }).toArray(); + + let eventCollectionsToCheck = collections.filter(col => /^events:/.test(col.name)).map(col => col.name); + let repetitionCollectionsToCheck = collections.filter(col => /^repetitions:/.test(col.name)).map(col => col.name); + + console.log(`Found ${eventCollectionsToCheck.length} event collections.`); + console.log(`Found ${repetitionCollectionsToCheck.length} repetition collections.`); + + // Convert events + let i = 1; + let documentsUpdatedCount = 1 + + while (documentsUpdatedCount != 0) { + documentsUpdatedCount = 0; + i = 1; + const collectionsToUpdateCount = eventCollectionsToCheck.length; + + for (const collectionName of eventCollectionsToCheck) { + console.log(`[${i}/${collectionsToUpdateCount}] Processing ${collectionName}`); + const updated = await movePayloadTimestampToEventLevel(db, collectionName); + + if (updated === 0) { + eventCollectionsToCheck = eventCollectionsToCheck.filter(collection => collection !== collectionName); + } + + documentsUpdatedCount += updated + i++; + } + } + + // Convert repetitions + backfill from events + documentsUpdatedCount = 1; + + while (documentsUpdatedCount != 0) { + documentsUpdatedCount = 0; + i = 1; + const collectionsToUpdateCount = repetitionCollectionsToCheck.length; + + for (const collectionName of repetitionCollectionsToCheck) { + console.log(`[${i}/${collectionsToUpdateCount}] Processing ${collectionName}`); + const projectId = collectionName.split(':')[1]; + + let updated = 0; + + updated += await movePayloadTimestampToEventLevel(db, collectionName); + updated += await backfillTimestampsFromEvents(db, collectionName, projectId); + + if (updated === 0) { + repetitionCollectionsToCheck = repetitionCollectionsToCheck.filter(collection => collection !== collectionName); + } + + documentsUpdatedCount += updated; + i++; + } + + console.log(`Conversion iteration complete. ${documentsUpdatedCount} documents updated`); + } + + await client.close(); +} + +run().catch(err => { + console.error('❌ Script failed:', err); + process.exit(1); +}); \ No newline at end of file diff --git a/lib/event-worker.ts b/lib/event-worker.ts index e44c849f9..a031ed9a8 100644 --- a/lib/event-worker.ts +++ b/lib/event-worker.ts @@ -1,7 +1,7 @@ import { Worker } from './worker'; import * as WorkerNames from './workerNames'; import { GroupWorkerTask } from 'hawk-worker-grouper/types/group-worker-task'; -import { EventWorkerTask } from './types/event-worker-task'; +import { CatcherMessageType, CatcherMessagePayload, CatcherMessageAccepted, ErrorsCatcherType } from '@hawk.so/types'; /** * Defines a Worker that handles events from Catcher. @@ -12,30 +12,31 @@ export abstract class EventWorker extends Worker { * Worker type (will pull tasks from Registry queue with the same name) * 'errors/nodejs' for example */ - public type = ''; + public type: ErrorsCatcherType; /** * Message handle function * - * @param {EventWorkerTask} event - event to handle + * @param {CatcherMessageAccepted} task - worker task to handle */ - public async handle(event: EventWorkerTask): Promise { - this.validate(event); + public async handle(task: CatcherMessageAccepted): Promise { + this.validate(task); await this.addTask(WorkerNames.GROUPER, { - projectId: event.projectId, - catcherType: this.type, - event: event.payload, - } as GroupWorkerTask); + projectId: task.projectId, + catcherType: this.type as CatcherMessageType, + payload: task.payload as CatcherMessagePayload, + timestamp: task.timestamp, + } as GroupWorkerTask); } /** * Validate passed event data * - * @param {EventWorkerTask} event - event to be validated + * @param {CatcherMessageAccepted} task - task to be validated */ - protected validate(event: EventWorkerTask): void { - if (!event.projectId || !event.payload) { + protected validate(task: CatcherMessageAccepted): void { + if (!task.projectId || !task.payload || !task.timestamp) { throw new Error('Bad data was given'); } } diff --git a/lib/types/event-worker-task.d.ts b/lib/types/event-worker-task.d.ts deleted file mode 100644 index 873e83f1b..000000000 --- a/lib/types/event-worker-task.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { WorkerTask } from './worker-task'; -import { DecodedEventData, EventAddons } from '@hawk.so/types'; - -/** - * Format of task that handled by Event Workers - */ -export interface EventWorkerTask extends WorkerTask { - /** - * User project's id extracted from Integration Token - */ - projectId: string; - - /** - * Hawk Catcher name - */ - catcherType: string; - - /** - * All information about the event - */ - payload: DecodedEventData; -} diff --git a/lib/types/worker-task.d.ts b/lib/types/worker-task.d.ts index b645811ab..ab1f668b7 100644 --- a/lib/types/worker-task.d.ts +++ b/lib/types/worker-task.d.ts @@ -4,4 +4,4 @@ * When you create new type of worker, describe its task's structure with interface inherited from WorkerTask */ export interface WorkerTask { -} +} \ No newline at end of file diff --git a/lib/utils/unsafeFields.ts b/lib/utils/unsafeFields.ts index f74564eaa..b89727f1f 100644 --- a/lib/utils/unsafeFields.ts +++ b/lib/utils/unsafeFields.ts @@ -50,6 +50,14 @@ export function encodeUnsafeFields(event: GroupedEventDBScheme | RepetitionDBSch * Repetition includes delta field, grouped event includes payload */ if ('delta' in event) { + /** + * We need to check if delta field exists but with undefined value + * It would mean that repetition payload is same with original event paylaod + */ + if (event.delta === undefined) { + return; + } + fieldValue = event.delta[field]; } else { fieldValue = event.payload[field]; diff --git a/lib/workerErrors.ts b/lib/workerErrors.ts index ed4a1a2e4..1dc57f649 100644 --- a/lib/workerErrors.ts +++ b/lib/workerErrors.ts @@ -2,7 +2,7 @@ * Class for critical errors * have to stop process */ -import { EventAddons, EventContext, EventDataAccepted } from '@hawk.so/types'; +import { EventAddons, EventContext, EventData } from '@hawk.so/types'; /** * Error class with additional error context for debugging @@ -71,8 +71,8 @@ export class DiffCalculationError extends NonCriticalError { */ constructor( msg: string | Error, - originalEvent: EventDataAccepted, - eventToCompare: EventDataAccepted + originalEvent: EventData, + eventToCompare: EventData ) { super(msg); this.context = { diff --git a/package.json b/package.json index 8822851d2..d27a60ab0 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,7 @@ "migration-add": "migrate-mongo create", "migrate:up": "migrate-mongo up", "migrate:down": "migrate-mongo down", + "convert": "node ./convertors/move-timestamp-out-of-payload.js", "lint": "eslint -c ./.eslintrc.js --ext .ts,.js --fix .", "test": "jest --coverage --runInBand", "test:archiver": "jest workers/archiver", @@ -48,7 +49,7 @@ }, "dependencies": { "@hawk.so/nodejs": "^3.1.1", - "@hawk.so/types": "^0.1.29", + "@hawk.so/types": "^0.1.32", "@types/amqplib": "^0.8.2", "@types/jest": "^29.2.3", "@types/mongodb": "^3.5.15", diff --git a/workers/archiver/src/index.ts b/workers/archiver/src/index.ts index 6c2124a88..39d07e412 100644 --- a/workers/archiver/src/index.ts +++ b/workers/archiver/src/index.ts @@ -168,7 +168,7 @@ export default class ArchiverWorker extends Worker { const repetitionsBulk = repetitionsCollection.initializeUnorderedBulkOp(); repetitionsBulk.find({ - 'payload.timestamp': { + timestamp: { $lt: maxOldTimestamp, }, }).delete(); diff --git a/workers/archiver/tests/repetitions.mock.ts b/workers/archiver/tests/repetitions.mock.ts index f5371c734..996972176 100644 --- a/workers/archiver/tests/repetitions.mock.ts +++ b/workers/archiver/tests/repetitions.mock.ts @@ -4,45 +4,31 @@ export const mockedRepetitions = [ */ { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586892935, - }, + timestamp: 1586892935, }, { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586892954, - }, + timestamp: 1586892954, }, { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586892999, - }, + timestamp: 1586892999, }, { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586893592, - }, + timestamp: 1586893592, }, { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586894214, - }, + timestamp: 1586894214, }, { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586894521, - }, + timestamp: 1586894521, }, { groupHash: '10a964aeb1ca8daa7a041c249b1c4d9cc351afe86ee7e13c5aa441c42c3f9b12', - payload: { - timestamp: 1586894553, - }, + timestamp: 1586894553, }, /** @@ -50,157 +36,105 @@ export const mockedRepetitions = [ */ { groupHash: '14b86d7afd3a4e8faba23faeed4b2587d1404528944616a2b8dee7d8a6bd6a38', - payload: { - timestamp: 1584980933, - }, + timestamp: 1584980933, }, { groupHash: '14b86d7afd3a4e8faba23faeed4b2587d1404528944616a2b8dee7d8a6bd6a38', - payload: { - timestamp: 1584980939, - }, + timestamp: 1584980939, }, { groupHash: '14b86d7afd3a4e8faba23faeed4b2587d1404528944616a2b8dee7d8a6bd6a38', - payload: { - timestamp: 1584989402, - }, + timestamp: 1584989402, }, { groupHash: '14b86d7afd3a4e8faba23faeed4b2587d1404528944616a2b8dee7d8a6bd6a38', - payload: { - timestamp: 1584989402, - }, + timestamp: 1584989402, }, { groupHash: '14b86d7afd3a4e8faba23faeed4b2587d1404528944616a2b8dee7d8a6bd6a38', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '5f24dd5d6be4226e846383e015fd976affcee9f02eaa1d5cafc7d48710a9dfe7', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '3e980313a7262b728e11feff38dc090e6187b379ea9fc7861506d122d3e5ccf2', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '3e980313a7262b728e11feff38dc090e6187b379ea9fc7861506d122d3e5ccf2', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '3e980313a7262b728e11feff38dc090e6187b379ea9fc7861506d122d3e5ccf2', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '5c9e6da01f91bb4b0a9d32acd862c23584a1ef037083d75f37a386a8a0275685', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '5c9e6da01f91bb4b0a9d32acd862c23584a1ef037083d75f37a386a8a0275685', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '5c9e6da01f91bb4b0a9d32acd862c23584a1ef037083d75f37a386a8a0275685', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '1143a799caa8f596740ed3741d280bbf6bac614797fc4e0e3cc628f218bab8ca', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '0e69096ae18dffb3128ccebbee103ad78ef755160ba25ae4dcbf3d1285f30590', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: 'ce59309fb0976b294fceaf0d132b0b5e05def0e70a35e7e328e35953248a2b64', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: 'ce59309fb0976b294fceaf0d132b0b5e05def0e70a35e7e328e35953248a2b64', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '6c59197338205460a0cb0bdfbe2c3bae6e575ab361c81b2e93bd91ce49646fde', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, { groupHash: '6c59197338205460a0cb0bdfbe2c3bae6e575ab361c81b2e93bd91ce49646fde', - payload: { - timestamp: 1584989420, - }, + timestamp: 1584989420, }, ]; diff --git a/workers/default/src/index.ts b/workers/default/src/index.ts index 3553cf58e..8c9c15d38 100644 --- a/workers/default/src/index.ts +++ b/workers/default/src/index.ts @@ -1,3 +1,4 @@ +import { ErrorsCatcherType } from '@hawk.so/types'; import { EventWorker } from '../../../lib/event-worker'; import * as pkg from '../package.json'; import { DefaultEventWorkerTask } from '../types/default-event-worker-task'; @@ -9,7 +10,7 @@ export default class DefaultEventWorker extends EventWorker { /** * Worker type (will pull tasks from Registry queue with the same name) */ - public type: string = pkg.workerType; + public type: ErrorsCatcherType = pkg.workerType as ErrorsCatcherType; /** * Message handle function diff --git a/workers/default/tests/index.test.ts b/workers/default/tests/index.test.ts index 1f449ead3..904a650f1 100644 --- a/workers/default/tests/index.test.ts +++ b/workers/default/tests/index.test.ts @@ -14,8 +14,8 @@ const testEventData = { title: 'TestError: Everything is fine.', type: null, backtrace: null, - timestamp: 1564948772936, }, + timestamp: 1564948772936, } as DefaultEventWorkerTask; describe('DefaultEventWorker', () => { diff --git a/workers/default/types/default-event-worker-task.d.ts b/workers/default/types/default-event-worker-task.d.ts index fe32832a1..dc0aee553 100644 --- a/workers/default/types/default-event-worker-task.d.ts +++ b/workers/default/types/default-event-worker-task.d.ts @@ -1,17 +1,12 @@ -import { EventWorkerTask } from '../../../lib/types/event-worker-task'; -import { DecodedEventData, DefaultAddons } from '@hawk.so/types'; +import { CatcherMessageAccepted, ErrorsCatcherType } from '@hawk.so/types'; /** - * Describe a context passed from Default Catcher + * Default Event Worker can process events with all types except 'errors/javascript' + * because it is handled by JavaScript Event Worker. */ -interface DefaultEventPayload extends DecodedEventData {} +export type DefaultCatcherMessageType = Exclude; /** * Format of task for Default Event Worker */ -export interface DefaultEventWorkerTask extends EventWorkerTask { - /** - * Language-specific payload - */ - payload: DefaultEventPayload; -} +export interface DefaultEventWorkerTask extends CatcherMessageAccepted {} diff --git a/workers/email/tests/provider.test.ts b/workers/email/tests/provider.test.ts index 89f39f407..d9fe9f401 100644 --- a/workers/email/tests/provider.test.ts +++ b/workers/email/tests/provider.test.ts @@ -62,9 +62,9 @@ describe('EmailProvider', () => { events: [{ event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [{ file: 'file', line: 1, @@ -114,9 +114,9 @@ describe('EmailProvider', () => { events: [{ event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [{ file: 'file', line: 1, @@ -165,9 +165,9 @@ describe('EmailProvider', () => { const vars: AssigneeTemplateVariables = { event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [{ file: 'file', line: 1, diff --git a/workers/grouper/src/data-filter.ts b/workers/grouper/src/data-filter.ts index 51fef51c4..40a4acf93 100644 --- a/workers/grouper/src/data-filter.ts +++ b/workers/grouper/src/data-filter.ts @@ -1,4 +1,4 @@ -import type { EventAddons, EventDataAccepted } from '@hawk.so/types'; +import type { EventAddons, EventData } from '@hawk.so/types'; import { unsafeFields } from '../../../lib/utils/unsafeFields'; /** @@ -60,7 +60,7 @@ export default class DataFilter { * * @param event - event to process */ - public processEvent(event: EventDataAccepted): void { + public processEvent(event: EventData): void { unsafeFields.forEach(field => { if (event[field]) { this.processField(event[field]); diff --git a/workers/grouper/src/index.ts b/workers/grouper/src/index.ts index 73bc589dc..d6a01caf0 100644 --- a/workers/grouper/src/index.ts +++ b/workers/grouper/src/index.ts @@ -6,7 +6,16 @@ import { Worker } from '../../../lib/worker'; import * as WorkerNames from '../../../lib/workerNames'; import * as pkg from '../package.json'; import type { GroupWorkerTask, RepetitionDelta } from '../types/group-worker-task'; -import type { EventAddons, EventDataAccepted, GroupedEventDBScheme, BacktraceFrame, SourceCodeLine, ProjectEventGroupingPatternsDBScheme } from '@hawk.so/types'; +import type { + EventAddons, + EventData, + GroupedEventDBScheme, + BacktraceFrame, + SourceCodeLine, + ProjectEventGroupingPatternsDBScheme, + ErrorsCatcherType, + CatcherMessagePayload +} from '@hawk.so/types'; import type { RepetitionDBScheme } from '../types/repetition'; import { DatabaseReadWriteError, DiffCalculationError, ValidationError } from '../../../lib/workerErrors'; import { decodeUnsafeFields, encodeUnsafeFields } from '../../../lib/utils/unsafeFields'; @@ -91,7 +100,7 @@ export default class GrouperWorker extends Worker { * * @param task - event to handle */ - public async handle(task: GroupWorkerTask): Promise { + public async handle(task: GroupWorkerTask): Promise { let uniqueEventHash = await this.getUniqueEventHash(task); /** @@ -103,7 +112,7 @@ export default class GrouperWorker extends Worker { * If we couldn't group by group hash (title), try grouping by Levenshtein distance or patterns */ if (!existedEvent) { - const similarEvent = await this.findSimilarEvent(task.projectId, task.event); + const similarEvent = await this.findSimilarEvent(task.projectId, task.payload); if (similarEvent) { this.logger.info(`similar event: ${JSON.stringify(similarEvent)}`); @@ -128,16 +137,16 @@ export default class GrouperWorker extends Worker { /** * Trim source code lines to prevent memory leaks */ - this.trimSourceCodeLines(task.event); + this.trimSourceCodeLines(task.payload); /** * Filter sensitive information */ - this.dataFilter.processEvent(task.event); + this.dataFilter.processEvent(task.payload); if (isFirstOccurrence) { try { - const incrementAffectedUsers = !!task.event.user; + const incrementAffectedUsers = !!task.payload.user; /** * Insert new event @@ -146,7 +155,8 @@ export default class GrouperWorker extends Worker { groupHash: uniqueEventHash, totalCount: 1, catcherType: task.catcherType, - payload: task.event, + payload: task.payload, + timestamp: task.timestamp, usersAffected: incrementAffectedUsers ? 1 : 0, } as GroupedEventDBScheme); @@ -198,16 +208,16 @@ export default class GrouperWorker extends Worker { /** * Calculate delta between original event and repetition */ - delta = computeDelta(existedEvent.payload, task.event); + delta = computeDelta(existedEvent.payload, task.payload); } catch (e) { console.error(e); - throw new DiffCalculationError(e, existedEvent.payload, task.event); + throw new DiffCalculationError(e, existedEvent.payload, task.payload); } const newRepetition = { groupHash: uniqueEventHash, delta: JSON.stringify(delta), - timestamp: task.event.timestamp, + timestamp: task.timestamp, } as RepetitionDBScheme; repetitionId = await this.saveRepetition(task.projectId, newRepetition); @@ -216,7 +226,7 @@ export default class GrouperWorker extends Worker { /** * Store events counter by days */ - await this.saveDailyEvents(task.projectId, uniqueEventHash, task.event.timestamp, repetitionId, incrementDailyAffectedUsers); + await this.saveDailyEvents(task.projectId, uniqueEventHash, task.timestamp, repetitionId, incrementDailyAffectedUsers); /** * Add task for NotifierWorker @@ -225,7 +235,7 @@ export default class GrouperWorker extends Worker { await this.addTask(WorkerNames.NOTIFIER, { projectId: task.projectId, event: { - title: task.event.title, + title: task.payload.title, groupHash: uniqueEventHash, isNew: isFirstOccurrence, }, @@ -238,7 +248,7 @@ export default class GrouperWorker extends Worker { * * @param event - event to process */ - private trimSourceCodeLines(event: EventDataAccepted): void { + private trimSourceCodeLines(event: EventData): void { if (!event.backtrace) { return; } @@ -262,10 +272,10 @@ export default class GrouperWorker extends Worker { * * @param task - worker task to create hash */ - private getUniqueEventHash(task: GroupWorkerTask): Promise { - return this.cache.get(`groupHash:${task.projectId}:${task.catcherType}:${task.event.title}`, () => { + private getUniqueEventHash(task: GroupWorkerTask): Promise { + return this.cache.get(`groupHash:${task.projectId}:${task.catcherType}:${task.payload.title}`, () => { return crypto.createHmac('sha256', process.env.EVENT_SECRET) - .update(task.catcherType + task.event.title) + .update(task.catcherType + task.payload.title) .digest('hex'); }); } @@ -276,7 +286,7 @@ export default class GrouperWorker extends Worker { * @param projectId - where to find * @param event - event to compare */ - private async findSimilarEvent(projectId: string, event: EventDataAccepted): Promise { + private async findSimilarEvent(projectId: string, event: EventData): Promise { const eventsCountToCompare = 60; const diffTreshold = 0.35; @@ -349,7 +359,7 @@ export default class GrouperWorker extends Worker { */ private async findMatchingPattern( patterns: ProjectEventGroupingPatternsDBScheme[], - event: EventDataAccepted + event: CatcherMessagePayload ): Promise { if (!patterns || patterns.length === 0) { return null; @@ -417,8 +427,8 @@ export default class GrouperWorker extends Worker { * @param existedEvent - original event to get its user * @returns {[boolean, boolean]} - whether to increment affected users for the repetition and the daily aggregation */ - private async shouldIncrementAffectedUsers(task: GroupWorkerTask, existedEvent: GroupedEventDBScheme): Promise<[boolean, boolean]> { - const eventUser = task.event.user; + private async shouldIncrementAffectedUsers(task: GroupWorkerTask, existedEvent: GroupedEventDBScheme): Promise<[boolean, boolean]> { + const eventUser = task.payload.user; /** * In case of no user, we don't need to increment affected users @@ -464,13 +474,13 @@ export default class GrouperWorker extends Worker { /** * Get midnight timestamps for the event and the next day */ - const eventMidnight = this.getMidnightByEventTimestamp(task.event.timestamp); - const eventNextMidnight = this.getMidnightByEventTimestamp(task.event.timestamp, true); + const eventMidnight = this.getMidnightByEventTimestamp(task.timestamp); + const eventNextMidnight = this.getMidnightByEventTimestamp(task.timestamp, true); /** * Check if incoming event has the same day as the original event */ - const isSameDay = existedEvent.payload.timestamp > eventMidnight && existedEvent.payload.timestamp < eventNextMidnight; + const isSameDay = existedEvent.timestamp > eventMidnight && existedEvent.timestamp < eventNextMidnight; /** * If incoming event has the same day as the original event and the same user, don't increment daily affected users @@ -487,7 +497,7 @@ export default class GrouperWorker extends Worker { .findOne({ groupHash: existedEvent.groupHash, 'payload.user.id': eventUser.id, - 'payload.timestamp': { + timestamp: { $gte: eventMidnight, $lt: eventNextMidnight, }, diff --git a/workers/grouper/src/utils/repetitionDiff.ts b/workers/grouper/src/utils/repetitionDiff.ts index 99ae48380..95f2834cd 100644 --- a/workers/grouper/src/utils/repetitionDiff.ts +++ b/workers/grouper/src/utils/repetitionDiff.ts @@ -1,4 +1,4 @@ -import type { EventAddons, EventDataAccepted } from '@hawk.so/types'; +import type { EventAddons, EventData } from '@hawk.so/types'; import { diff } from '@n1ru4l/json-patch-plus'; import type { RepetitionDelta } from '../../types/group-worker-task'; @@ -9,7 +9,7 @@ import type { RepetitionDelta } from '../../types/group-worker-task'; * @param repetition - one of remaining events * @returns delta {RepetitionDelta} */ -export function computeDelta(originalEvent: EventDataAccepted, repetition: EventDataAccepted): RepetitionDelta { +export function computeDelta(originalEvent: EventData, repetition: EventData): RepetitionDelta { const delta = diff({ left: originalEvent, right: repetition, diff --git a/workers/grouper/tests/data-filter.test.ts b/workers/grouper/tests/data-filter.test.ts index 15c8d4a83..d0a4c3af8 100644 --- a/workers/grouper/tests/data-filter.test.ts +++ b/workers/grouper/tests/data-filter.test.ts @@ -1,5 +1,5 @@ import '../../../env-test'; -import type { EventAddons, EventDataAccepted, Json } from '@hawk.so/types'; +import type { EventAddons, EventData, Json } from '@hawk.so/types'; import DataFilter from '../src/data-filter'; jest.mock('amqplib'); @@ -14,10 +14,9 @@ jest.mock('amqplib'); * @param [options.context] - generated event context * @param [options.addons] - generated event addons */ -function generateEvent({ context, addons }: {context?: Json, addons?: EventAddons}): EventDataAccepted { +function generateEvent({ context, addons }: {context?: Json, addons?: EventAddons}): EventData { return { title: 'Event with sensitive data', - timestamp: (new Date()).getTime(), backtrace: [], ...(context && { context, diff --git a/workers/grouper/tests/index.test.ts b/workers/grouper/tests/index.test.ts index aa955d737..bf3ea84e5 100644 --- a/workers/grouper/tests/index.test.ts +++ b/workers/grouper/tests/index.test.ts @@ -5,7 +5,7 @@ import type { RedisClientType } from 'redis'; import { createClient } from 'redis'; import type { Collection } from 'mongodb'; import { MongoClient } from 'mongodb'; -import type { EventAddons, EventDataAccepted } from '@hawk.so/types'; +import type { ErrorsCatcherType, EventAddons, EventData } from '@hawk.so/types'; import { MS_IN_SEC } from '../../../lib/utils/consts'; import * as mongodb from 'mongodb'; import { patch } from '@n1ru4l/json-patch-plus'; @@ -67,21 +67,25 @@ const projectMock = { }, unreadCount: 0, description: 'Test project for grouper worker tests', - eventGroupingPatterns: [ { _id: mongodb.ObjectId(), pattern: 'New error .*' }], + eventGroupingPatterns: [ { + _id: mongodb.ObjectId(), + pattern: 'New error .*', + } ], }; /** * Generates task for testing * * @param event - allows to override some event properties in generated task + * @param timestamp - timestamp of the event, defaults to current time */ -function generateTask(event: Partial> = undefined): GroupWorkerTask { +function generateTask(event: Partial> = undefined, timestamp: number = new Date().getTime()): GroupWorkerTask { return { projectId: projectIdMock, - catcherType: 'grouper', - event: Object.assign({ + catcherType: 'errors/javascript', + timestamp, + payload: Object.assign({ title: 'Hawk client catcher test', - timestamp: (new Date()).getTime(), backtrace: [], user: { id: generateRandomId(), @@ -233,22 +237,18 @@ describe('GrouperWorker', () => { const yesterday = today - secondsInDay; await worker.handle(generateTask({ - timestamp: yesterday, user: { id: 'customer1' }, - })); + }, yesterday)); await worker.handle(generateTask({ - timestamp: yesterday, user: { id: 'customer2' }, - })); + }, yesterday)); await worker.handle(generateTask({ - timestamp: today, user: { id: 'customer1' }, - })); + }, today)); await worker.handle(generateTask({ - timestamp: today, user: { id: 'customer2' }, - })); + }, today)); const dailyEvents = await dailyEventsCollection.find({}).toArray(); @@ -262,17 +262,14 @@ describe('GrouperWorker', () => { const today = (new Date()).getTime() / MS_IN_SEC; await worker.handle(generateTask({ - timestamp: yesterday, user: { id: 'customer1' }, - })); + }, yesterday)); await worker.handle(generateTask({ - timestamp: today, user: { id: 'customer1' }, - })); + }, today)); await worker.handle(generateTask({ - timestamp: today, user: { id: 'customer2' }, - })); + }, today)); /** * Get daily events ordered by timestamp desc @@ -329,7 +326,7 @@ describe('GrouperWorker', () => { test('Should save event even if its context is type of string', async () => { const task = generateTask(); - task.event.context = 'string context'; + task.payload.context = 'string context'; await worker.handle(task); expect((await eventsCollection.findOne({})).payload.context).toBe(null); @@ -383,8 +380,8 @@ describe('GrouperWorker', () => { await worker.handle(generateTask()); await worker.handle({ ...generatedTask, - event: { - ...generatedTask.event, + payload: { + ...generatedTask.payload, addons: { test: '8fred' }, context: { test: '8fred' }, }, @@ -401,22 +398,10 @@ describe('GrouperWorker', () => { const savedRepetition = await repetitionsCollection.findOne({}); - const savedDelta = savedRepetition.delta as string; - const parsedDelta = JSON.parse(savedDelta) as RepetitionDelta; + const savedDelta = savedRepetition.delta; + const parsedDelta = savedDelta as RepetitionDelta; - expect(parsedDelta.type).toBe(undefined); - expect(parsedDelta.backtrace).toBe(undefined); - expect(parsedDelta.context).toBe(undefined); - expect(parsedDelta.addons).toBe(undefined); - expect(parsedDelta.release).toBe(undefined); - expect(parsedDelta.user).toBe(undefined); - expect(parsedDelta.catcherVersion).toBe(undefined); - - /** - * Timestamp always unique, so it should be present in a stored payload diff - */ - expect(parsedDelta.timestamp).not.toBe(undefined); - expect(typeof parsedDelta.timestamp).toBe('object'); + expect(parsedDelta).toBe(null); }); test('Should correctly calculate diff after encoding original event when they are different', async () => { @@ -428,8 +413,8 @@ describe('GrouperWorker', () => { await worker.handle({ ...generatedTask, - event: { - ...generatedTask.event, + payload: { + ...generatedTask.payload, context: { testField: 9, }, @@ -490,8 +475,11 @@ describe('GrouperWorker', () => { }); test('should group events with titles matching one pattern', async () => { - jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ - { _id: new mongodb.ObjectId, pattern: 'New error .*' } + jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ + { + _id: new mongodb.ObjectId(), + pattern: 'New error .*', + }, ]); const findMatchingPatternSpy = jest.spyOn(GrouperWorker.prototype as any, 'findMatchingPattern'); @@ -509,9 +497,18 @@ describe('GrouperWorker', () => { test('should handle multiple patterns and match the first one that applies', async () => { jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ - { _id: mongodb.ObjectId(), pattern: 'Database error: .*' }, - { _id: mongodb.ObjectId(), pattern: 'Network error: .*' }, - { _id: mongodb.ObjectId(), pattern: 'New error: .*' }, + { + _id: mongodb.ObjectId(), + pattern: 'Database error: .*', + }, + { + _id: mongodb.ObjectId(), + pattern: 'Network error: .*', + }, + { + _id: mongodb.ObjectId(), + pattern: 'New error: .*', + }, ]); await worker.handle(generateTask({ title: 'Database error: connection failed' })); @@ -528,8 +525,14 @@ describe('GrouperWorker', () => { test('should handle complex regex patterns', async () => { jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ - { _id: mongodb.ObjectId(), pattern: 'Error \\d{3}: [A-Za-z\\s]+ in file .*\\.js$' }, - { _id: mongodb.ObjectId(), pattern: 'Warning \\d{3}: .*' }, + { + _id: mongodb.ObjectId(), + pattern: 'Error \\d{3}: [A-Za-z\\s]+ in file .*\\.js$', + }, + { + _id: mongodb.ObjectId(), + pattern: 'Warning \\d{3}: .*', + }, ]); await worker.handle(generateTask({ title: 'Error 404: Not Found in file index.js' })); @@ -546,8 +549,14 @@ describe('GrouperWorker', () => { test('should maintain separate groups for different patterns', async () => { jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ - { _id: mongodb.ObjectId(), pattern: 'TypeError: .*' }, - { _id: mongodb.ObjectId(), pattern: 'ReferenceError: .*' }, + { + _id: mongodb.ObjectId(), + pattern: 'TypeError: .*', + }, + { + _id: mongodb.ObjectId(), + pattern: 'ReferenceError: .*', + }, ]); await worker.handle(generateTask({ title: 'TypeError: null is not an object' })); @@ -568,8 +577,14 @@ describe('GrouperWorker', () => { test('should handle patterns with special regex characters', async () => { jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ - { _id: new mongodb.ObjectID(), pattern: 'Error \\[\\d+\\]: .*'} , - { _id: new mongodb.ObjectID(), pattern: 'Warning \\(code=\\d+\\): .*'} , + { + _id: new mongodb.ObjectID(), + pattern: 'Error \\[\\d+\\]: .*', + }, + { + _id: new mongodb.ObjectID(), + pattern: 'Warning \\(code=\\d+\\): .*', + }, ]); await worker.handle(generateTask({ title: 'Error [123]: Database connection failed' })); diff --git a/workers/grouper/tests/mocks/generateEvent.ts b/workers/grouper/tests/mocks/generateEvent.ts index a89a4d549..5d373d21b 100644 --- a/workers/grouper/tests/mocks/generateEvent.ts +++ b/workers/grouper/tests/mocks/generateEvent.ts @@ -1,4 +1,4 @@ -import type { EventAddons, EventDataAccepted } from '@hawk.so/types'; +import type { EventData, JavaScriptAddons } from '@hawk.so/types'; import { generateRandomId } from './randomId'; /** @@ -11,10 +11,9 @@ const userIdMock = generateRandomId(); * * @param event - Partial event data to override default values */ -export function generateEvent(event: Partial> = undefined): EventDataAccepted { +export function generateEvent(event: Partial> = undefined): EventData { return { title: 'Hawk client catcher test', - timestamp: (new Date()).getTime(), backtrace: [], user: { id: userIdMock, @@ -24,11 +23,19 @@ export function generateEvent(event: Partial> = u 'ima$ge.jpg': 'img', }, addons: { + window: { + innerWidth: 1024, + innerHeight: 768, + }, + userAgent: 'Hawk client catcher test', + url: 'https://hawk.so', vue: { props: { 'test-test': false, 'ima$ge.jpg': 'img', }, + lifecycle: 'Mounted', + component: 'TestComponent', }, }, ...event, diff --git a/workers/grouper/tests/mocks/generateTask.ts b/workers/grouper/tests/mocks/generateTask.ts index c92ed3ad3..57cb9f628 100644 --- a/workers/grouper/tests/mocks/generateTask.ts +++ b/workers/grouper/tests/mocks/generateTask.ts @@ -1,4 +1,4 @@ -import type { EventAddons, EventDataAccepted } from '@hawk.so/types'; +import type { ErrorsCatcherType, EventData, JavaScriptAddons } from '@hawk.so/types'; import type { GroupWorkerTask } from '../../types/group-worker-task'; import { projectIdMock } from './projectId'; import { generateEvent } from './generateEvent'; @@ -9,11 +9,12 @@ import { generateEvent } from './generateEvent'; * @param event - allows to override some event properties in generated task */ export function generateTask( - event: Partial> = undefined -): GroupWorkerTask { + event: Partial> = undefined +): GroupWorkerTask { return { projectId: projectIdMock, - catcherType: 'grouper', - event: generateEvent(event), + catcherType: 'errors/javascript', + payload: generateEvent(event), + timestamp: new Date().getTime(), }; } diff --git a/workers/grouper/types/group-worker-task.d.ts b/workers/grouper/types/group-worker-task.d.ts index 314983569..76dd54a45 100644 --- a/workers/grouper/types/group-worker-task.d.ts +++ b/workers/grouper/types/group-worker-task.d.ts @@ -1,4 +1,4 @@ -import type { EventDataAccepted, EventAddons } from '@hawk.so/types'; +import type { CatcherMessageAccepted, CatcherMessagePayload, ErrorsCatcherType } from '@hawk.so/types'; import type { WorkerTask } from '../../../lib/types/worker-task'; import type { Delta } from '@n1ru4l/json-patch-plus'; @@ -6,7 +6,7 @@ import type { Delta } from '@n1ru4l/json-patch-plus'; * Language-workers adds tasks for Group Worker in this format. * Group Worker gets this tasks (events from language-workers) and saves it to the DB */ -export interface GroupWorkerTask extends WorkerTask { +export interface GroupWorkerTask extends WorkerTask, CatcherMessageAccepted { /** * Project where error was occurred */ @@ -15,15 +15,20 @@ export interface GroupWorkerTask extends WorkerTask { /** * What type of event we've accept */ - catcherType: string; + catcherType: CatcherType; /** - * Event that should be grouped + * Payload of the event that should be grouped */ - event: EventDataAccepted; + payload: CatcherMessagePayload; + + /** + * Unix timestamp of the event + */ + timestamp: number; } /** * Delta of the original event and the repetition */ -export type RepetitionDelta = Delta; +export type RepetitionDelta = Delta | undefined; diff --git a/workers/javascript/src/index.ts b/workers/javascript/src/index.ts index 92b3ee660..5ad47fad0 100644 --- a/workers/javascript/src/index.ts +++ b/workers/javascript/src/index.ts @@ -9,7 +9,7 @@ import * as pkg from '../package.json'; import { JavaScriptEventWorkerTask } from '../types/javascript-event-worker-task'; import HawkCatcher from '@hawk.so/nodejs'; import Crypto from '../../../lib/utils/crypto'; -import { BacktraceFrame, SourceCodeLine, SourceMapDataExtended } from '@hawk.so/types'; +import { BacktraceFrame, CatcherMessagePayload, CatcherMessageType, ErrorsCatcherType, SourceCodeLine, SourceMapDataExtended } from '@hawk.so/types'; import { beautifyUserAgent } from './utils'; import { Collection } from 'mongodb'; import { parse } from '@babel/parser'; @@ -22,7 +22,7 @@ export default class JavascriptEventWorker extends EventWorker { /** * Worker type (will pull tasks from Registry queue with the same name) */ - public readonly type: string = pkg.workerType; + public readonly type: ErrorsCatcherType = pkg.workerType as ErrorsCatcherType; /** * Releases collection in database @@ -81,9 +81,10 @@ export default class JavascriptEventWorker extends EventWorker { await this.addTask(WorkerNames.GROUPER, { projectId: event.projectId, - catcherType: this.type, - event: event.payload, - } as GroupWorkerTask); + catcherType: this.type as CatcherMessageType, + payload: event.payload as CatcherMessagePayload, + timestamp: event.timestamp, + } as GroupWorkerTask); } /** @@ -251,7 +252,7 @@ export default class JavascriptEventWorker extends EventWorker { * * @param sourceCode - content of the source file * @param line - number of the line from the stack trace - * @returns - string of the function context or null if it could not be parsed + * @returns {string | null} - string of the function context or null if it could not be parsed */ private getFunctionContext(sourceCode: string, line: number): string | null { let functionName: string | null = null; diff --git a/workers/javascript/tests/index.test.ts b/workers/javascript/tests/index.test.ts index b13487a30..c02fb0d38 100644 --- a/workers/javascript/tests/index.test.ts +++ b/workers/javascript/tests/index.test.ts @@ -101,10 +101,10 @@ describe('JavaScript event worker', () => { return { catcherType: 'errors/javascript', projectId: objectIdAsString(), + timestamp: Date.now(), payload: { title: 'Mocked event for JS event worker', type: 'Error', - timestamp: Date.now(), release: '3fa0f290c014', addons: { window: { @@ -182,7 +182,7 @@ describe('JavaScript event worker', () => { expect.objectContaining({ projectId: workerEvent.projectId, catcherType: workerEvent.catcherType, - event: workerEvent.payload, + payload: workerEvent.payload, }) ); await worker.finish(); @@ -214,7 +214,8 @@ describe('JavaScript event worker', () => { { projectId: workerEvent.projectId, catcherType: workerEvent.catcherType, - event: { + timestamp: workerEvent.timestamp, + payload: { ...workerEvent.payload, addons: { ...workerEvent.payload.addons, @@ -262,7 +263,8 @@ describe('JavaScript event worker', () => { { projectId: workerEvent.projectId, catcherType: workerEvent.catcherType, - event: { + timestamp: workerEvent.timestamp, + payload: { ...workerEvent.payload, backtrace: [ expect.objectContaining(parsedBacktraceFrame), diff --git a/workers/javascript/types/javascript-event-worker-task.d.ts b/workers/javascript/types/javascript-event-worker-task.d.ts index bd51e6a3b..7e49aa30f 100644 --- a/workers/javascript/types/javascript-event-worker-task.d.ts +++ b/workers/javascript/types/javascript-event-worker-task.d.ts @@ -1,17 +1,6 @@ -import { EventWorkerTask } from '../../../lib/types/event-worker-task'; -import { DecodedEventData, JavaScriptAddons } from '@hawk.so/types'; - -/** - * Describe a context passed from JavaScript Catcher - */ -interface JavaScriptEventPayload extends DecodedEventData {} +import { CatcherMessageAccepted } from '@hawk.so/types'; /** * Format of task for JavaScript Event Worker */ -export interface JavaScriptEventWorkerTask extends EventWorkerTask { - /** - * Language-specific payload - */ - payload: JavaScriptEventPayload; -} +export interface JavaScriptEventWorkerTask extends CatcherMessageAccepted<'errors/javascript'> {} diff --git a/workers/limiter/src/dbHelper.ts b/workers/limiter/src/dbHelper.ts index 7ad4c3e53..a0e35e27a 100644 --- a/workers/limiter/src/dbHelper.ts +++ b/workers/limiter/src/dbHelper.ts @@ -124,16 +124,9 @@ export class DbHelper { const eventsCollection = this.eventsDbConnection.collection('events:' + project._id.toString()); const query = { - $or: [ { - timestamp: { - $gt: since, - }, + timestamp: { + $gt: since, }, - { - 'payload.timestamp': { - $gt: since, - }, - } ], }; const repetitionsCount = await repetitionsCollection.countDocuments(query); diff --git a/workers/limiter/src/index.ts b/workers/limiter/src/index.ts index 8bb2d2c32..5309a474e 100644 --- a/workers/limiter/src/index.ts +++ b/workers/limiter/src/index.ts @@ -131,9 +131,9 @@ export default class LimiterWorker extends Worker { const { updatedWorkspace } = await this.prepareWorkspaceUsageUpdate(workspace, workspaceProjects); updatedWorkspace.isBlocked = true; - await this.dbHelper.updateWorkspacesEventsCountAndIsBlocked([updatedWorkspace]); + await this.dbHelper.updateWorkspacesEventsCountAndIsBlocked([ updatedWorkspace ]); - this.logger.info('workspace blocked in db ', event.workspaceId) + this.logger.info('workspace blocked in db ', event.workspaceId); await this.redis.appendBannedProjects(projectIds); @@ -175,7 +175,7 @@ export default class LimiterWorker extends Worker { updatedWorkspace.isBlocked = false; - await this.dbHelper.updateWorkspacesEventsCountAndIsBlocked([updatedWorkspace]); + await this.dbHelper.updateWorkspacesEventsCountAndIsBlocked([ updatedWorkspace ]); await this.redis.removeBannedProjects(projectIds); this.sendSingleWorkspaceReport(workspaceProjects, updatedWorkspace, 'unblocked'); diff --git a/workers/limiter/tests/dbHelper.test.ts b/workers/limiter/tests/dbHelper.test.ts index bdff1cb57..11a1bf20d 100644 --- a/workers/limiter/tests/dbHelper.test.ts +++ b/workers/limiter/tests/dbHelper.test.ts @@ -72,8 +72,8 @@ describe('DbHelper', () => { groupHash: 'ade987831d0d0d167aeea685b49db164eb4e113fd027858eef7f69d049357f62', payload: { title: 'Mocked event', - timestamp: 1586892935, }, + timestamp: 1586892935, }; }; diff --git a/workers/limiter/tests/index.test.ts b/workers/limiter/tests/index.test.ts index 2ca0f8d51..f20e18fbf 100644 --- a/workers/limiter/tests/index.test.ts +++ b/workers/limiter/tests/index.test.ts @@ -87,9 +87,9 @@ describe('Limiter worker', () => { usersAffected: 0, visitedBy: [], groupHash: 'ade987831d0d0d167aeea685b49db164eb4e113fd027858eef7f69d049357f62', + timestamp: 1586892935, payload: { title: 'Mocked event', - timestamp: 1586892935, }, }; }; diff --git a/workers/notifier/types/notifier-task.ts b/workers/notifier/types/notifier-task.ts index e7feb7c88..f773660c9 100644 --- a/workers/notifier/types/notifier-task.ts +++ b/workers/notifier/types/notifier-task.ts @@ -1,10 +1,10 @@ -import { EventDataAccepted, EventAddons } from '@hawk.so/types'; +import { EventAddons, EventData } from '@hawk.so/types'; import { WorkerTask } from '../../../lib/types/worker-task'; /** * Data needed for NotificationWorker from GrouperWorker */ -export type NotifierEvent = Pick, 'title'> & { +export type NotifierEvent = Pick, 'title'> & { /** * Event group hash */ diff --git a/workers/release/src/index.ts b/workers/release/src/index.ts index 6fd3a8201..68b004a08 100644 --- a/workers/release/src/index.ts +++ b/workers/release/src/index.ts @@ -194,7 +194,7 @@ export default class ReleaseWorker extends Worker { */ savedFiles.forEach(file => { delete file.content; - }) + }); /** * Filter unsaved maps @@ -234,9 +234,7 @@ export default class ReleaseWorker extends Worker { }, { session }); }); } catch (error) { - this.logger.error('Can\'t extract release info:\n', { - error, - }); + this.logger.error(`Can't extract release info:\n${JSON.stringify(error)}`); throw new NonCriticalError('Can\'t parse source-map file'); } finally { diff --git a/workers/sender/tests/worker.test.ts b/workers/sender/tests/worker.test.ts index ead84b6fc..73a9b5ae4 100644 --- a/workers/sender/tests/worker.test.ts +++ b/workers/sender/tests/worker.test.ts @@ -60,9 +60,9 @@ const projectQueryMock = jest.fn(() => ({ const eventsQueryMock = jest.fn(() => ({ totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [ { file: 'file', line: 1, diff --git a/workers/sentry/src/index.ts b/workers/sentry/src/index.ts index d66a32c8e..79660f931 100644 --- a/workers/sentry/src/index.ts +++ b/workers/sentry/src/index.ts @@ -6,7 +6,7 @@ import { Envelope, EnvelopeItem, EventEnvelope, EventItem, parseEnvelope } from import { Worker } from '../../../lib/worker'; import { composeAddons, composeBacktrace, composeContext, composeTitle, composeUserData } from './utils/converter'; import { b64decode } from './utils/base64'; -import { DecodedEventData, EventAddons } from '@hawk.so/types'; +import { CatcherMessagePayload } from '@hawk.so/types'; import { TextDecoder } from 'util'; import { JavaScriptEventWorkerTask } from '../../javascript/types/javascript-event-worker-task'; /** @@ -67,9 +67,6 @@ export default class SentryEventWorker extends Worker { if (itemHeader.type !== 'event') { return; } - - const hawkEvent = this.transformToHawkFormat(envelopeHeaders as EventEnvelope[0], item as EventItem, projectId); - const payloadHasSDK = typeof itemPayload === 'object' && 'sdk' in itemPayload; /** @@ -77,16 +74,18 @@ export default class SentryEventWorker extends Worker { */ const sentryJsSDK = ['browser', 'react', 'vue', 'angular', 'capacirtor', 'electron']; + const isJsSDK = payloadHasSDK && sentryJsSDK.includes(itemPayload.sdk.name); + + const hawkEvent = this.transformToHawkFormat(envelopeHeaders as EventEnvelope[0], item as EventItem, projectId, isJsSDK); + /** * If we have release attached to the event */ - if (payloadHasSDK && sentryJsSDK.includes(itemPayload.sdk.name) && hawkEvent.payload.release !== undefined) { + if (isJsSDK) { await this.addTask(WorkerNames.JAVASCRIPT, hawkEvent as JavaScriptEventWorkerTask); - - return; + } else { + await this.addTask(WorkerNames.DEFAULT, hawkEvent as DefaultEventWorkerTask); } - - await this.addTask(WorkerNames.DEFAULT, hawkEvent as DefaultEventWorkerTask); } catch (error) { this.logger.error('Error handling envelope item:', error); this.logger.info('👇 Here is the problematic item:'); @@ -101,11 +100,13 @@ export default class SentryEventWorker extends Worker { * @param envelopeHeader - Sentry envelope header * @param eventItem - Sentry event item * @param projectId - Hawk project ID + * @param isJsSDK - Whether the event is from a Sentry JavaScript-related SDK */ private transformToHawkFormat( envelopeHeader: EventEnvelope[0], eventItem: EventItem, - projectId: string + projectId: string, + isJsSDK: boolean ): DefaultEventWorkerTask | JavaScriptEventWorkerTask { /* eslint-disable @typescript-eslint/naming-convention */ const { sent_at, trace } = envelopeHeader; @@ -138,10 +139,9 @@ export default class SentryEventWorker extends Worker { const user = composeUserData(eventPayload); const addons = composeAddons(eventPayload); - const event: DecodedEventData = { + const event: CatcherMessagePayload<'errors/default' | 'errors/javascript'> = { title, type: eventPayload.level || 'error', - timestamp: sentAtUnix, catcherVersion: pkg.version, }; @@ -158,7 +158,9 @@ export default class SentryEventWorker extends Worker { } if (addons) { - event.addons = addons; + event.addons = { + sentry: addons, + }; } /** @@ -169,10 +171,18 @@ export default class SentryEventWorker extends Worker { event.release = eventPayload.release || trace?.release; } - return { - projectId, - catcherType: this.type, - payload: event, - }; + return isJsSDK + ? { + projectId, + catcherType: 'errors/javascript', + payload: event as CatcherMessagePayload<'errors/javascript'>, + timestamp: sentAtUnix, + } + : { + projectId, + catcherType: 'errors/default', + payload: event as CatcherMessagePayload<'errors/default'>, + timestamp: sentAtUnix, + }; } } diff --git a/workers/sentry/src/utils/converter.ts b/workers/sentry/src/utils/converter.ts index 5f7bc1029..b3fac8492 100644 --- a/workers/sentry/src/utils/converter.ts +++ b/workers/sentry/src/utils/converter.ts @@ -1,4 +1,4 @@ -import { BacktraceFrame, DefaultAddons, EventContext, EventData, Json } from '@hawk.so/types'; +import { BacktraceFrame, DefaultAddons, EventContext, EventData, Json, SentryAddons } from '@hawk.so/types'; import { Event as SentryEvent } from '@sentry/core'; /** @@ -111,7 +111,7 @@ export function composeContext(eventPayload: SentryEvent): EventContext | undefi * * @param eventPayload - Sentry event payload */ -export function composeAddons(eventPayload: SentryEvent): EventData['addons'] { +export function composeAddons(eventPayload: SentryEvent): SentryAddons { const addons: Record = {}; const fieldsToInclude: (keyof SentryEvent)[] = [ @@ -122,7 +122,6 @@ export function composeAddons(eventPayload: SentryEvent): EventData { platform: 'javascript', // eslint-disable-next-line @typescript-eslint/naming-convention server_name: 'test-server', - release: '1.0.0', dist: '1.0.0', environment: 'production', request: { url: 'https://test.com' }, diff --git a/workers/sentry/tests/index.test.ts b/workers/sentry/tests/index.test.ts index e8296fb5e..a458ef380 100644 --- a/workers/sentry/tests/index.test.ts +++ b/workers/sentry/tests/index.test.ts @@ -3,14 +3,14 @@ import '../../../env-test'; import { mockedAmqpChannel } from '../../../jest.setup.js'; import { EventEnvelope, serializeEnvelope, SeverityLevel } from '@sentry/core'; import { b64encode, base64toBuffer } from '../src/utils/base64'; -import { EventWorkerTask } from '../../../lib/types/event-worker-task'; +import { CatcherMessagePayload, CatcherMessageType } from '@hawk.so/types'; import { SentryEventWorkerTask } from '../types/sentry-event-worker-task'; /** * Worker adds a task to the queue with buffered payload * So we need to get parse it back to compare */ -function getAddTaskPayloadFromLastCall(): EventWorkerTask { +function getAddTaskPayloadFromLastCall(): CatcherMessagePayload { /** * Get last rabbit sendToQueue call */ @@ -270,6 +270,8 @@ describe('SentryEventWorker', () => { '2024-01-01T00:00:00.000+00:00', ]; + jest.spyOn(worker, 'addTask'); + for (const timestamp of timestamps) { const eventEnvelope: EventEnvelope = [ { @@ -291,13 +293,21 @@ describe('SentryEventWorker', () => { catcherType: 'external/sentry', }); - const addedTaskPayload = getAddTaskPayloadFromLastCall(); - - expect(addedTaskPayload).toMatchObject({ - payload: expect.objectContaining({ - timestamp: 1704067200, - }), - }); + expect(worker.addTask).toHaveBeenCalledWith('errors/default', expect.objectContaining({ + catcherType: 'errors/default', + timestamp: 1704067200, + projectId: '123', + payload: { + addons: { + sentry: { + message: 'Test timestamp', + }, + }, + catcherVersion: '1.0.1', + title: 'Unknown: ', + type: 'error', + }, + })); } }); @@ -455,9 +465,11 @@ describe('SentryEventWorker', () => { expect(addedTaskPayload).toMatchObject({ payload: expect.objectContaining({ addons: { - platform: 'javascript', - environment: 'production', - request: { url: 'https://test.com' }, + sentry: { + platform: 'javascript', + environment: 'production', + request: { url: 'https://test.com' }, + }, }, }), }); @@ -602,7 +614,7 @@ describe('SentryEventWorker', () => { }); expect(worker.addTask).toHaveBeenCalledWith('errors/javascript', expect.objectContaining({ - catcherType: 'external/sentry', + catcherType: 'errors/javascript', projectId: '123', payload: expect.objectContaining({ release: '1.0.0', @@ -638,48 +650,13 @@ describe('SentryEventWorker', () => { }); expect(worker.addTask).toHaveBeenCalledWith('errors/default', expect.objectContaining({ - catcherType: 'external/sentry', + catcherType: 'errors/default', projectId: '123', payload: expect.objectContaining({ release: '1.0.0', }), })); }); - - it('should route to Default worker when SDK is in sentryJsSDK list but release is missing', async () => { - const eventEnvelope: EventEnvelope = [ - { - /* eslint-disable @typescript-eslint/naming-convention */ - event_id: '123e4567-e89b-12d3-a456-426614174000', - sent_at: '2024-01-01T00:00:00.000Z', - /* eslint-enable @typescript-eslint/naming-convention */ - }, - [ - [ - { type: 'event' }, - { - sdk: { name: 'browser' }, - }, - ], - ], - ]; - - await worker.handle({ - payload: { - envelope: b64encode(serializeEnvelope(eventEnvelope) as string), - }, - projectId: '123', - catcherType: 'external/sentry', - }); - - expect(worker.addTask).toHaveBeenCalledWith('errors/default', expect.objectContaining({ - catcherType: 'external/sentry', - projectId: '123', - payload: expect.not.objectContaining({ - release: expect.any(String), - }), - })); - }); }); describe('(real-data tests)', () => { @@ -696,9 +673,9 @@ describe('SentryEventWorker', () => { expect(addedTaskPayload).toMatchObject({ /* eslint-disable @typescript-eslint/naming-convention */ + timestamp: 1734530412, payload: expect.objectContaining({ release: '35c42f2ddf524bbdafe22439b981ddfd0fb3b5ad', - timestamp: 1734530412, type: 'error', title: 'ZeroDivisionError: division by zero', backtrace: [ @@ -762,6 +739,7 @@ describe('SentryEventWorker', () => { ], }, ], + catcherVersion: '1.0.1', context: { runtime: { build: '3.13.1 (main, Dec 3 2024, 17:59:52) [Clang 16.0.0 (clang-1600.0.26.4)]', @@ -775,21 +753,22 @@ describe('SentryEventWorker', () => { }, }, addons: { - environment: 'production', - platform: 'python', - extra: { - 'sys.argv': [ 'sentry-prod.py' ], - }, - level: 'error', - modules: { - certifi: '2024.8.30', - pip: '24.3.1', - 'sentry-sdk': '2.19.0', - urllib3: '2.2.3', + sentry: { + environment: 'production', + platform: 'python', + extra: { + 'sys.argv': [ 'sentry-prod.py' ], + }, + level: 'error', + modules: { + certifi: '2024.8.30', + pip: '24.3.1', + 'sentry-sdk': '2.19.0', + urllib3: '2.2.3', + }, + server_name: 'MacBook-Pro-Aleksandr-5.local', + timestamp: '2024-12-18T14:00:12.863684Z', }, - release: '35c42f2ddf524bbdafe22439b981ddfd0fb3b5ad', - server_name: 'MacBook-Pro-Aleksandr-5.local', - timestamp: '2024-12-18T14:00:12.863684Z', }, }), /* eslint-enable @typescript-eslint/naming-convention */ diff --git a/workers/slack/tests/__mocks__/event-notify.ts b/workers/slack/tests/__mocks__/event-notify.ts index fb98c08e7..77a53b54a 100644 --- a/workers/slack/tests/__mocks__/event-notify.ts +++ b/workers/slack/tests/__mocks__/event-notify.ts @@ -11,9 +11,9 @@ export default { { event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [ { file: 'file', line: 1, diff --git a/workers/slack/tests/__mocks__/several-events-notify.ts b/workers/slack/tests/__mocks__/several-events-notify.ts index 4ccdff5c0..24ed30e87 100644 --- a/workers/slack/tests/__mocks__/several-events-notify.ts +++ b/workers/slack/tests/__mocks__/several-events-notify.ts @@ -12,9 +12,9 @@ export default { { event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [ { file: 'file', line: 1, diff --git a/workers/telegram/tests/provider.test.ts b/workers/telegram/tests/provider.test.ts index 712e6ee7e..7cb4ca4b2 100644 --- a/workers/telegram/tests/provider.test.ts +++ b/workers/telegram/tests/provider.test.ts @@ -22,9 +22,9 @@ describe('TelegramProvider', () => { events: [ { event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [ { file: 'file', line: 1, @@ -74,9 +74,9 @@ describe('TelegramProvider', () => { events: [ { event: { totalCount: 10, + timestamp: Date.now(), payload: { title: 'New event', - timestamp: Date.now(), backtrace: [ { file: 'file', line: 1, diff --git a/yarn.lock b/yarn.lock index 6de3c0452..2180bae44 100644 --- a/yarn.lock +++ b/yarn.lock @@ -428,10 +428,10 @@ dependencies: "@types/mongodb" "^3.5.34" -"@hawk.so/types@^0.1.29": - version "0.1.29" - resolved "https://registry.yarnpkg.com/@hawk.so/types/-/types-0.1.29.tgz#95b32d2a9ea7771ecceede50e419741f2fc0746a" - integrity sha512-skPS8ypvlR0hAMDxxJ6Yx96EhssLuOSrNm6lZS08FZESA/crldxK4D7OHV/EPxJccmEJLegXq3PEk4zJfC76YA== +"@hawk.so/types@^0.1.32": + version "0.1.32" + resolved "https://registry.yarnpkg.com/@hawk.so/types/-/types-0.1.32.tgz#5eb662e91da922e1cbab7af0cf03bfa567ee98df" + integrity sha512-7gdx/fq31iLxmc0hZKs+wPYqtRT4rLvTi9p4am2My9SQ4t/l8if5QEfxh6G4WABKOmhiZLchivFA9Oj+Nn5EcQ== dependencies: "@types/mongodb" "^3.5.34"