diff --git a/migrations/20241111000000-add-payload-title-index-for-events.js b/migrations/20241111000000-add-payload-title-index-for-events.js new file mode 100644 index 00000000..e142becf --- /dev/null +++ b/migrations/20241111000000-add-payload-title-index-for-events.js @@ -0,0 +1,55 @@ +/** + * This migration creates indexes for all collections on payload.title field + */ + +/** + * Index name for payload.title field + */ +const payloadTitleIndexName = 'payloadTitle'; + +module.exports = { + async up(db) { + const collections = await db.listCollections({}, { + authorizedCollections: true, + nameOnly: true, + }).toArray(); + + const targetCollections = []; + + collections.forEach((collection) => { + if (/events/.test(collection.name)) { + targetCollections.push(collection.name); + } + }); + + for (const collectionName of targetCollections) { + const hasIndexAlready = await db.collection(collectionName).indexExists(payloadTitleIndexName); + + if (!hasIndexAlready) { + await db.collection(collectionName).createIndex({ + 'payload.title': 1, + }, { + name: payloadTitleIndexName, + }); + } + } + }, + async down(db) { + const collections = await db.listCollections({}, { + authorizedCollections: true, + nameOnly: true, + }).toArray(); + + const targetCollections = []; + + collections.forEach((collection) => { + if (/events/.test(collection.name)) { + targetCollections.push(collection.name); + } + }); + + for (const collectionName of targetCollections) { + await db.collection(collectionName).dropIndex(payloadTitleIndexName); + } + }, +}; \ No newline at end of file diff --git a/package.json b/package.json index a02a10d5..c81aee2e 100644 --- a/package.json +++ b/package.json @@ -47,7 +47,7 @@ }, "dependencies": { "@hawk.so/nodejs": "^3.1.1", - "@hawk.so/types": "^0.1.26", + "@hawk.so/types": "^0.1.28", "@types/amqplib": "^0.8.2", "@types/jest": "^29.2.3", "@types/mongodb": "^3.5.15", diff --git a/workers/archiver/tests/index.test.ts b/workers/archiver/tests/index.test.ts index 1a088310..d214533c 100644 --- a/workers/archiver/tests/index.test.ts +++ b/workers/archiver/tests/index.test.ts @@ -18,6 +18,7 @@ process.env.MAX_DAYS_NUMBER = '30'; const mockedProject: ProjectDBScheme = { notifications: [], + eventGroupingPatterns: [], token: '5342', integrationId: 'eyJpbnRlZ3JhdGlvbklkIjoiMzg3NGNkOWMtZjJiYS00ZDVkLTk5ZmQtM2UzZjYzMDcxYmJhIiwic2VjcmV0IjoiMGZhM2JkM2EtYmMyZC00YWRiLThlMWMtNjg2OGY0MzM1YjRiIn0=', uidAdded: new ObjectId('5e4ff518628a6c714515f4db'), @@ -53,7 +54,7 @@ describe('Archiver worker', () => { beforeEach(async () => { await db.collection('releases').deleteMany({}); - }) + }); test('Should correctly remove old events', async () => { /** @@ -129,7 +130,7 @@ describe('Archiver worker', () => { /** * Insert one release with object id based on current time, it should not be removed */ - await db.collection('releases').insert(releasesToStay) + await db.collection('releases').insert(releasesToStay); const worker = new ArchiverWorker(); @@ -173,9 +174,9 @@ describe('Archiver worker', () => { expect(newReleasesCollection).toEqual([ mockedReleases[mockedReleasesLength - 2], mockedReleases[mockedReleasesLength - 1], - ]) + ]); await worker.finish(); - }) + }); afterAll(async () => { await db.dropCollection('releases'); diff --git a/workers/grouper/src/index.ts b/workers/grouper/src/index.ts index a4731fac..7e35ecc8 100644 --- a/workers/grouper/src/index.ts +++ b/workers/grouper/src/index.ts @@ -32,9 +32,14 @@ export default class GrouperWorker extends Worker { public readonly type: string = pkg.workerType; /** - * Database Controller + * Events database Controller */ - private db: DatabaseController = new DatabaseController(process.env.MONGO_EVENTS_DATABASE_URI); + private eventsDb: DatabaseController = new DatabaseController(process.env.MONGO_EVENTS_DATABASE_URI); + + /** + * Accounts database Controller + */ + private accountsDb: DatabaseController = new DatabaseController(process.env.MONGO_ACCOUNTS_DATABASE_URI); /** * This class will filter sensitive information @@ -52,7 +57,8 @@ export default class GrouperWorker extends Worker { public async start(): Promise { console.log('starting grouper worker'); - await this.db.connect(); + await this.eventsDb.connect(); + await this.accountsDb.connect(); this.prepareCache(); console.log('redis initializing'); @@ -67,7 +73,8 @@ export default class GrouperWorker extends Worker { public async finish(): Promise { await super.finish(); this.prepareCache(); - await this.db.close(); + await this.eventsDb.close(); + await this.accountsDb.close(); await this.redis.close(); } @@ -85,12 +92,13 @@ export default class GrouperWorker extends Worker { let existedEvent = await this.getEvent(task.projectId, uniqueEventHash); /** - * If we couldn't group by group hash (title), try grouping by Levenshtein distance with last N events + * If we couldn't group by group hash (title), try grouping by Levenshtein distance or patterns */ if (!existedEvent) { const similarEvent = await this.findSimilarEvent(task.projectId, task.event); if (similarEvent) { + this.logger.info(`similar event: ${JSON.stringify(similarEvent)}`); /** * Override group hash with found event's group hash */ @@ -226,7 +234,7 @@ export default class GrouperWorker extends Worker { } /** - * Tries to find events with a small Levenshtein distance of a title + * Tries to find events with a small Levenshtein distance of a title or by matching grouping patterns * * @param projectId - where to find * @param event - event to compare @@ -237,12 +245,89 @@ export default class GrouperWorker extends Worker { const lastUniqueEvents = await this.findLastEvents(projectId, eventsCountToCompare); - return lastUniqueEvents.filter(prevEvent => { + /** + * First try to find by Levenshtein distance + */ + const similarByLevenshtein = lastUniqueEvents.filter(prevEvent => { const distance = levenshtein(event.title, prevEvent.payload.title); const threshold = event.title.length * diffTreshold; return distance < threshold; }).pop(); + + if (similarByLevenshtein) { + return similarByLevenshtein; + } + + /** + * If no match by Levenshtein, try matching by patterns + */ + const patterns = await this.getProjectPatterns(projectId); + + if (patterns && patterns.length > 0) { + const matchingPattern = await this.findMatchingPattern(patterns, event); + + if (matchingPattern !== null) { + const originalEvent = await this.cache.get(`${projectId}:${matchingPattern}:originalEvent`, async () => { + return await this.eventsDb.getConnection() + .collection(`events:${projectId}`) + .findOne( + { 'payload.title': { $regex: matchingPattern } }, + { sort: { _id: 1 } } + ); + }); + + this.logger.info(`original event for pattern: ${JSON.stringify(originalEvent)}`); + + if (originalEvent) { + return originalEvent; + } + } + } + + return undefined; + } + + /** + * Method that returns matched pattern for event, if event do not match any of patterns return null + * + * @param patterns - list of the patterns of the related project + * @param event - event which title would be cheched + * @returns {string | null} matched pattern or null if no match + */ + private async findMatchingPattern(patterns: string[], event: EventDataAccepted): Promise { + if (!patterns || patterns.length === 0) { + return null; + } + + return patterns.filter(pattern => { + const patternRegExp = new RegExp(pattern); + + return event.title.match(patternRegExp); + }).pop() || null; + } + + /** + * Method that gets event patterns for a project + * + * @param projectId - id of the project to find related event patterns + * @returns {string[]} EventPatterns object with projectId and list of patterns + */ + private async getProjectPatterns(projectId: string): Promise { + return this.cache.get(`project:${projectId}:patterns`, async () => { + const project = await this.accountsDb.getConnection() + .collection('projects') + .findOne({ + _id: new mongodb.ObjectId(projectId), + }); + + return project?.eventGroupingPatterns || []; + }, + /** + * Cache project patterns for 5 minutes since they don't change frequently + */ + /* eslint-disable-next-line @typescript-eslint/no-magic-numbers */ + 5 * TimeMs.MINUTE / MS_IN_SEC); } /** @@ -250,10 +335,11 @@ export default class GrouperWorker extends Worker { * * @param projectId - where to find * @param count - how many events to return + * @returns {GroupedEventDBScheme[]} list of the last N unique events */ private findLastEvents(projectId: string, count): Promise { return this.cache.get(`last:${count}:eventsOf:${projectId}`, async () => { - return this.db.getConnection() + return this.eventsDb.getConnection() .collection(`events:${projectId}`) .find() .sort({ @@ -308,7 +394,7 @@ export default class GrouperWorker extends Worker { */ const repetitionCacheKey = `repetitions:${task.projectId}:${existedEvent.groupHash}:${eventUser.id}`; const repetition = await this.cache.get(repetitionCacheKey, async () => { - return this.db.getConnection().collection(`repetitions:${task.projectId}`) + return this.eventsDb.getConnection().collection(`repetitions:${task.projectId}`) .findOne({ groupHash: existedEvent.groupHash, 'payload.user.id': eventUser.id, @@ -342,7 +428,7 @@ export default class GrouperWorker extends Worker { */ const repetitionDailyCacheKey = `repetitions:${task.projectId}:${existedEvent.groupHash}:${eventUser.id}:${eventMidnight}`; const repetitionDaily = await this.cache.get(repetitionDailyCacheKey, async () => { - return this.db.getConnection().collection(`repetitions:${task.projectId}`) + return this.eventsDb.getConnection().collection(`repetitions:${task.projectId}`) .findOne({ groupHash: existedEvent.groupHash, 'payload.user.id': eventUser.id, @@ -377,7 +463,7 @@ export default class GrouperWorker extends Worker { * Returns finds event by query from project with passed ID * * @param projectId - project's identifier - * @param groupHash - group hash of the event + * @param groupHash - group hash of the event */ private async getEvent(projectId: string, groupHash: string): Promise { if (!mongodb.ObjectID.isValid(projectId)) { @@ -387,7 +473,7 @@ export default class GrouperWorker extends Worker { const eventCacheKey = await this.getEventCacheKey(projectId, groupHash); return this.cache.get(eventCacheKey, async () => { - return this.db.getConnection() + return this.eventsDb.getConnection() .collection(`events:${projectId}`) .findOne({ groupHash, @@ -400,12 +486,13 @@ export default class GrouperWorker extends Worker { /** * Method that returns event cache key based on projectId and groupHash + * * @param projectId - used for cache key creation * @param groupHash - used for cache key creation - * @returns cache key + * @returns {string} cache key for event */ private async getEventCacheKey(projectId: string, groupHash: string): Promise { - return `${projectId}:${JSON.stringify({groupHash: groupHash})}` + return `${projectId}:${JSON.stringify({ groupHash: groupHash })}`; } /** @@ -421,7 +508,7 @@ export default class GrouperWorker extends Worker { throw new ValidationError('Controller.saveEvent: Project ID is invalid or missed'); } - const collection = this.db.getConnection().collection(`events:${projectId}`); + const collection = this.eventsDb.getConnection().collection(`events:${projectId}`); encodeUnsafeFields(groupedEventData); @@ -441,7 +528,7 @@ export default class GrouperWorker extends Worker { } try { - const collection = this.db.getConnection().collection(`repetitions:${projectId}`); + const collection = this.eventsDb.getConnection().collection(`repetitions:${projectId}`); encodeUnsafeFields(repetition); @@ -480,7 +567,7 @@ export default class GrouperWorker extends Worker { }, }; - return (await this.db.getConnection() + return (await this.eventsDb.getConnection() .collection(`events:${projectId}`) .updateOne(query, updateQuery)).modifiedCount; } catch (err) { @@ -512,7 +599,7 @@ export default class GrouperWorker extends Worker { try { const midnight = this.getMidnightByEventTimestamp(eventTimestamp); - await this.db.getConnection() + await this.eventsDb.getConnection() .collection(`dailyEvents:${projectId}`) .updateOne( { diff --git a/workers/grouper/tests/index.test.ts b/workers/grouper/tests/index.test.ts index 28a1ee81..526e243b 100644 --- a/workers/grouper/tests/index.test.ts +++ b/workers/grouper/tests/index.test.ts @@ -5,6 +5,7 @@ import { createClient, RedisClientType } from 'redis'; import { Collection, MongoClient } from 'mongodb'; import { EventAddons, EventDataAccepted } from '@hawk.so/types'; import { MS_IN_SEC } from '../../../lib/utils/consts'; +import * as mongodb from 'mongodb'; jest.mock('amqplib'); @@ -24,7 +25,7 @@ jest.mock('../../../lib/cache/controller', () => { // eslint-disable-next-line @typescript-eslint/no-empty-function, jsdoc/require-jsdoc public set(): void { } - + // eslint-disable-next-line @typescript-eslint/no-empty-function, jsdoc/require-jsdoc public del(): void { } }; @@ -50,6 +51,22 @@ const secondsInDay = 24 * 60 * 60; */ const projectIdMock = '5d206f7f9aaf7c0071d64596'; +/** + * Mock project data + */ +const projectMock = { + _id: new mongodb.ObjectId(projectIdMock), + id: projectIdMock, + name: 'Test Project', + token: 'test-token', + uidAdded: { + id: 'test-user-id', + }, + unreadCount: 0, + description: 'Test project for grouper worker tests', + eventGroupingPatterns: [ 'New error .*' ], +}; + /** * Generates task for testing * @@ -84,9 +101,11 @@ function generateTask(event: Partial> = undefined describe('GrouperWorker', () => { let connection: MongoClient; + let accountsConnection: MongoClient; let eventsCollection: Collection; let dailyEventsCollection: Collection; let repetitionsCollection: Collection; + let projectsCollection: Collection; let redisClient: RedisClientType; let worker: GrouperWorker; @@ -98,15 +117,26 @@ describe('GrouperWorker', () => { useNewUrlParser: true, useUnifiedTopology: true, }); + accountsConnection = await MongoClient.connect(process.env.MONGO_ACCOUNTS_DATABASE_URI, { + useNewUrlParser: true, + useUnifiedTopology: true, + }); + eventsCollection = connection.db().collection('events:' + projectIdMock); dailyEventsCollection = connection.db().collection('dailyEvents:' + projectIdMock); repetitionsCollection = connection.db().collection('repetitions:' + projectIdMock); + projectsCollection = accountsConnection.db().collection('projects'); /** * Create unique index for groupHash */ await eventsCollection.createIndex({ groupHash: 1 }, { unique: true }); + /** + * Insert mock project into accounts database + */ + await projectsCollection.insertOne(projectMock); + redisClient = createClient({ url: process.env.REDIS_URL }); await redisClient.connect(); @@ -409,11 +439,112 @@ describe('GrouperWorker', () => { groupHash: originalEvent.groupHash, }).toArray()).length).toBe(2); }); + + describe('Pattern matching', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + test('should group events with titles matching one pattern', async () => { + jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ 'New error .*' ]); + const findMatchingPatternSpy = jest.spyOn(GrouperWorker.prototype as any, 'findMatchingPattern'); + + await worker.handle(generateTask({ title: 'New error 0000000000000000' })); + await worker.handle(generateTask({ title: 'New error 1111111111111111' })); + await worker.handle(generateTask({ title: 'New error 2222222222222222' })); + + const originalEvent = await eventsCollection.findOne({}); + + expect(findMatchingPatternSpy).toHaveBeenCalledTimes(3); + expect((await repetitionsCollection.find({ + groupHash: originalEvent.groupHash, + }).toArray()).length).toBe(2); + }); + + test('should handle multiple patterns and match the first one that applies', async () => { + jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ + 'Database error: .*', + 'Network error: .*', + 'New error: .*', + ]); + + await worker.handle(generateTask({ title: 'Database error: connection failed' })); + await worker.handle(generateTask({ title: 'Database error: timeout' })); + await worker.handle(generateTask({ title: 'Network error: timeout' })); + + const databaseEvents = await eventsCollection.find({ 'payload.title': /Database error.*/ }).toArray(); + const networkEvents = await eventsCollection.find({ 'payload.title': /Network error.*/ }).toArray(); + + expect(databaseEvents.length).toBe(1); + expect(networkEvents.length).toBe(1); + expect(await repetitionsCollection.find().count()).toBe(1); + }); + + test('should handle complex regex patterns', async () => { + jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ + 'Error \\d{3}: [A-Za-z\\s]+ in file .*\\.js$', + 'Warning \\d{3}: .*', + ]); + + await worker.handle(generateTask({ title: 'Error 404: Not Found in file index.js' })); + await worker.handle(generateTask({ title: 'Error 404: Missing Route in file router.js' })); + await worker.handle(generateTask({ title: 'Warning 301: Deprecated feature' })); + + const error404Events = await eventsCollection.find({ 'payload.title': /Error 404.*/ }).toArray(); + const warningEvents = await eventsCollection.find({ 'payload.title': /Warning.*/ }).toArray(); + + expect(error404Events.length).toBe(1); + expect(warningEvents.length).toBe(1); + expect(await repetitionsCollection.find().count()).toBe(1); + }); + + test('should maintain separate groups for different patterns', async () => { + jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ + 'TypeError: .*', + 'ReferenceError: .*', + ]); + + await worker.handle(generateTask({ title: 'TypeError: null is not an object' })); + await worker.handle(generateTask({ title: 'TypeError: undefined is not a function' })); + await worker.handle(generateTask({ title: 'ReferenceError: x is not defined' })); + await worker.handle(generateTask({ title: 'ReferenceError: y is not defined' })); + + const typeErrors = await eventsCollection.find({ 'payload.title': /TypeError.*/ }).toArray(); + const referenceErrors = await eventsCollection.find({ 'payload.title': /ReferenceError.*/ }).toArray(); + + expect(typeErrors.length).toBe(1); + expect(referenceErrors.length).toBe(1); + expect(await repetitionsCollection.find().count()).toBe(2); + + // Verify that events are grouped separately + expect(typeErrors[0].groupHash).not.toBe(referenceErrors[0].groupHash); + }); + + test('should handle patterns with special regex characters', async () => { + jest.spyOn(GrouperWorker.prototype as any, 'getProjectPatterns').mockResolvedValue([ + 'Error \\[\\d+\\]: .*', + 'Warning \\(code=\\d+\\): .*', + ]); + + await worker.handle(generateTask({ title: 'Error [123]: Database connection failed' })); + await worker.handle(generateTask({ title: 'Error [123]: Query timeout' })); + await worker.handle(generateTask({ title: 'Warning (code=456): Cache miss' })); + + const errorEvents = await eventsCollection.find({ 'payload.title': /Error \[\d+\].*/ }).toArray(); + const warningEvents = await eventsCollection.find({ 'payload.title': /Warning \(code=\d+\).*/ }).toArray(); + + expect(errorEvents.length).toBe(1); + expect(warningEvents.length).toBe(1); + expect(await repetitionsCollection.find().count()).toBe(1); + }); + }); }); afterAll(async () => { await redisClient.quit(); await worker.finish(); + await projectsCollection.deleteMany({}); + await accountsConnection.close(); await connection.close(); }); }); diff --git a/workers/javascript/src/index.ts b/workers/javascript/src/index.ts index c5a1d81c..9652be22 100644 --- a/workers/javascript/src/index.ts +++ b/workers/javascript/src/index.ts @@ -228,6 +228,7 @@ export default class JavascriptEventWorker extends EventWorker { * Fixes bug: https://github.com/codex-team/hawk.workers/issues/121 */ if (originalLocation.source) { + console.log('original location source found') /** * Get 5 lines above and 5 below */ @@ -246,10 +247,9 @@ export default class JavascriptEventWorker extends EventWorker { sourceCode: lines, }) as BacktraceFrame; } - + /** * Method that is used to parse full function context of the code position - * * @param sourceCode - content of the source file * @param line - number of the line from the stack trace * @returns - string of the function context or null if it could not be parsed @@ -257,46 +257,42 @@ export default class JavascriptEventWorker extends EventWorker { private getFunctionContext(sourceCode: string, line: number): string | null { let functionName: string | null = null; let className: string | null = null; - let isAsync = false; + let isAsync: boolean = false; try { const ast = parse(sourceCode, { - sourceType: 'module', + sourceType: "module", plugins: [ - 'typescript', - 'jsx', - 'classProperties', - 'decorators', - 'optionalChaining', - 'nullishCoalescingOperator', - 'dynamicImport', - 'bigInt', - 'topLevelAwait', - ], + "typescript", + "jsx", + "classProperties", + "decorators", + "optionalChaining", + "nullishCoalescingOperator", + "dynamicImport", + "bigInt", + "topLevelAwait" + ] }); - /** - * Ast-tree has same Node[] structure, but types are incompatible so we need cast to any - */ - /* eslint-disable-next-line @typescript-eslint/no-explicit-any*/ traverse(ast as any, { /** * It is used to get class decorator of the position, it will save class that is related to original position - * - * @param path - node of the ast tree to be checked with this handler */ ClassDeclaration(path) { + console.log(`class declaration: loc: ${path.node.loc}, line: ${line}, node.start.line: ${path.node.loc.start.line}, node.end.line: ${path.node.loc.end.line}`) + if (path.node.loc && path.node.loc.start.line <= line && path.node.loc.end.line >= line) { - className = path.node.id?.name || null; + className = path.node.id.name || null; } }, /** * It is used to get class and its method decorator of the position * It will save class and method, that are related to original position - * - * @param path - node of the ast tree to be checked with this handler */ ClassMethod(path) { + console.log(`class declaration: loc: ${path.node.loc}, line: ${line}, node.start.line: ${path.node.loc.start.line}, node.end.line: ${path.node.loc.end.line}`) + if (path.node.loc && path.node.loc.start.line <= line && path.node.loc.end.line >= line) { // Handle different key types if (path.node.key.type === 'Identifier') { @@ -307,42 +303,42 @@ export default class JavascriptEventWorker extends EventWorker { }, /** * It is used to get function name that is declared out of class - * - * @param path - node of the ast tree to be checked with this handler */ FunctionDeclaration(path) { + console.log(`function declaration: loc: ${path.node.loc}, line: ${line}, node.start.line: ${path.node.loc.start.line}, node.end.line: ${path.node.loc.end.line}`) + if (path.node.loc && path.node.loc.start.line <= line && path.node.loc.end.line >= line) { - functionName = path.node.id?.name || null; + functionName = path.node.id.name || null; isAsync = path.node.async; } }, /** * It is used to get anonimous function names in function expressions or arrow function expressions - * - * @param path - node of the ast tree to be checked with this handler */ VariableDeclarator(path) { + console.log(`variable declaration: node.type: ${path.node.init.type}, line: ${line}, `) + if ( path.node.init && - (path.node.init.type === 'FunctionExpression' || path.node.init.type === 'ArrowFunctionExpression') && + (path.node.init.type === "FunctionExpression" || path.node.init.type === "ArrowFunctionExpression") && path.node.loc && path.node.loc.start.line <= line && path.node.loc.end.line >= line ) { // Handle different id types - if (path.node.id?.type === 'Identifier') { + if (path.node.id.type === 'Identifier') { functionName = path.node.id.name; } - isAsync = (path.node.init).async; + isAsync = (path.node.init as any).async; } - }, + } }); } catch (e) { - console.error(`Failed to parse source code: ${e}`); + console.error(`Failed to parse source code: ${e.message}`); } - return functionName ? `${isAsync ? 'async ' : ''}${className ? `${className}.` : ''}${functionName}` : null; - } + return functionName ? `${isAsync ? "async " : ""}${className ? `${className}.` : ""}${functionName}` : null; +} /** * Downloads source map file from Grid FS diff --git a/workers/limiter/src/index.ts b/workers/limiter/src/index.ts index 07ebe273..f9c9c590 100644 --- a/workers/limiter/src/index.ts +++ b/workers/limiter/src/index.ts @@ -237,6 +237,7 @@ export default class LimiterWorker extends Worker { */ private async getWorkspacesWithTariffPlans(): Promise { this.logger.info('analyzeWorkspacesLimits -> getWorkspacesWithTariffPlans'); + return this.workspacesCollection.aggregate([ { $lookup: { diff --git a/workers/limiter/tests/index.test.ts b/workers/limiter/tests/index.test.ts index 8d80375a..e82af4fb 100644 --- a/workers/limiter/tests/index.test.ts +++ b/workers/limiter/tests/index.test.ts @@ -74,6 +74,7 @@ describe('Limiter worker', () => { notifications: [], token: '', uidAdded: undefined, + eventGroupingPatterns: [], }; }; diff --git a/yarn.lock b/yarn.lock index 238d7c5b..a0cb9a4d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -354,6 +354,14 @@ debug "^4.3.1" globals "^11.1.0" +"@babel/types@7.26.9", "@babel/types@^7.26.9": + version "7.26.9" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.26.9.tgz#08b43dec79ee8e682c2ac631c010bdcac54a21ce" + integrity sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw== + dependencies: + "@babel/helper-string-parser" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/types@^7.0.0", "@babel/types@^7.15.4", "@babel/types@^7.3.0", "@babel/types@^7.3.3": version "7.15.6" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.15.6.tgz#99abdc48218b2881c058dd0a7ab05b99c9be758f" @@ -428,10 +436,10 @@ dependencies: "@types/mongodb" "^3.5.34" -"@hawk.so/types@^0.1.26": - version "0.1.26" - resolved "https://registry.yarnpkg.com/@hawk.so/types/-/types-0.1.26.tgz#780d68c317024cd918011f1edfee4ef4001c4ad6" - integrity sha512-7WYhvfGgb3Q9pj3cWjpIFdcoxKNVsK+iqt1LgFdFqfCyLVLZXo9qxujaoTHB6OlC2IJ7WNjeTDUvb6yD4k+oIw== +"@hawk.so/types@^0.1.28": + version "0.1.28" + resolved "https://registry.yarnpkg.com/@hawk.so/types/-/types-0.1.28.tgz#a479f411a4ae1855a6661084fa4396c7f323b170" + integrity sha512-W8xNlbkQuffwhVn/ja5Bo4EglN0waSM0Rx3R+jGmcrbYi1a4g6kGPQFYkMSd0WadikOH1nd9NrfmyJB9cVOBWA== dependencies: "@types/mongodb" "^3.5.34"