Skip to content
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions lib/event-worker.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Worker } from './worker';
import * as WorkerNames from './workerNames';
import { GroupWorkerTask } from 'hawk-worker-grouper/types/group-worker-task';
import { EventWorkerTask } from './types/event-worker-task';
import { CatcherMessageType, CatcherMessagePayload, CatcherMessageAccepted, ErrorsCatcherType } from '@hawk.so/types';

/**
* Defines a Worker that handles events from Catcher.
Expand All @@ -12,30 +12,31 @@ export abstract class EventWorker extends Worker {
* Worker type (will pull tasks from Registry queue with the same name)
* 'errors/nodejs' for example
*/
public type = '';
public type: ErrorsCatcherType;

/**
* Message handle function
*
* @param {EventWorkerTask} event - event to handle
* @param {CatcherMessageAccepted<CatcherMessageType>} task - worker task to handle
*/
public async handle(event: EventWorkerTask): Promise<void> {
this.validate(event);
public async handle(task: CatcherMessageAccepted<CatcherMessageType>): Promise<void> {
this.validate(task);

await this.addTask(WorkerNames.GROUPER, {
projectId: event.projectId,
catcherType: this.type,
event: event.payload,
} as GroupWorkerTask);
projectId: task.projectId,
catcherType: this.type as CatcherMessageType,
payload: task.payload as CatcherMessagePayload<ErrorsCatcherType>,
timestamp: task.timestamp,
} as GroupWorkerTask<ErrorsCatcherType>);
}

/**
* Validate passed event data
*
* @param {EventWorkerTask} event - event to be validated
* @param {CatcherMessageAccepted<CatcherMessageType>} task - task to be validated
*/
protected validate(event: EventWorkerTask): void {
if (!event.projectId || !event.payload) {
protected validate(task: CatcherMessageAccepted<CatcherMessageType>): void {
if (!task.projectId || !task.payload || !task.timestamp) {
throw new Error('Bad data was given');
}
}
Expand Down
22 changes: 0 additions & 22 deletions lib/types/event-worker-task.d.ts

This file was deleted.

2 changes: 1 addition & 1 deletion lib/types/worker-task.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
* When you create new type of worker, describe its task's structure with interface inherited from WorkerTask
*/
export interface WorkerTask {
}
}
8 changes: 8 additions & 0 deletions lib/utils/unsafeFields.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@ export function encodeUnsafeFields(event: GroupedEventDBScheme | RepetitionDBSch
* Repetition includes delta field, grouped event includes payload
*/
if ('delta' in event) {
/**
* We need to check if delta field exists but with undefined value
* It would mean that repetition payload is same with original event paylaod
*/
if (event.delta === undefined) {
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

comment pls

return;
}

fieldValue = event.delta[field];
} else {
fieldValue = event.payload[field];
Expand Down
6 changes: 3 additions & 3 deletions lib/workerErrors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
* Class for critical errors
* have to stop process
*/
import { EventAddons, EventContext, EventDataAccepted } from '@hawk.so/types';
import { EventAddons, EventContext, EventData } from '@hawk.so/types';

/**
* Error class with additional error context for debugging
Expand Down Expand Up @@ -71,8 +71,8 @@ export class DiffCalculationError extends NonCriticalError {
*/
constructor(
msg: string | Error,
originalEvent: EventDataAccepted<EventAddons>,
eventToCompare: EventDataAccepted<EventAddons>
originalEvent: EventData<EventAddons>,
eventToCompare: EventData<EventAddons>
) {
super(msg);
this.context = {
Expand Down
162 changes: 162 additions & 0 deletions migrations/20250707160120-move-timestamp-to-event-level.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
/**
* This migration moves `payload.timestamp` to top-level `timestamp`
* for both `events:*` and `repetitions:*` collections.
*/

module.exports = {
async up(db) {
const collections = await db.listCollections({}, {
authorizedCollections: true,
nameOnly: true,
}).toArray();

// Separate collections by prefix
const eventCollections = collections
.filter(col => /^events:/.test(col.name))
.map(col => col.name);

const repetitionCollections = collections
.filter(col => /^repetitions:/.test(col.name))
.map(col => col.name);

// Step 1: Process event collections
for (const collectionName of eventCollections) {
await db.collection(collectionName).updateMany(
{ 'payload.timestamp': { $exists: true } },
[
{
$set: {
timestamp: { $toDouble: "$payload.timestamp" },
},
},
{
$unset: "payload.timestamp",
},
]
);
}

// Step 2: Process repetition collections
for (const collectionName of repetitionCollections) {
const projectId = collectionName.split(':')[1];
const collection = db.collection(collectionName);

// Step 2.1: First, handle documents where payload.timestamp exists
await collection.updateMany(
{ 'payload.timestamp': { $exists: true } },
[
{
$set: {
timestamp: { $toDouble: "$payload.timestamp" }, // Convert payload.timestamp to number
},
},
{
$unset: "payload.timestamp", // Remove payload.timestamp
},
]
);

const pipeline = [
{
$match: {
$or : [
{ "payload.timestamp": { $exists: false } },
{ payload: { $exists: false } },
],
timestamp: { $exists: false },
groupHash: { $exists: true }
}
},
{
$lookup: {
from: `events:${projectId}`, // dynamically referencing the events collection
localField: "groupHash", // field from repetitions collection
foreignField: "groupHash", // field in the events collection
as: "eventData" // alias for the matched data
}
},
{
$unwind: {
path: "$eventData", // we expect only one match per groupHash
preserveNullAndEmptyArrays: true // allow documents with no matching event
}
},
{
$match: {
"eventData.timestamp": { $exists: true } // only proceed if event.timestamp exists
}
},
{
$project: {
_id: 1,
eventTimestamp: { $toDouble: "$eventData.timestamp"}
}
}
];

const matchedDocs = await collection.aggregate(pipeline).toArray();

const bulkOps = matchedDocs.map(doc => ({
updateOne: {
filter: { _id: doc._id },
update: {
$set: { timestamp: doc.eventTimestamp } // Set the timestamp from the event
}
}
}));

if (bulkOps.length > 0) {
await collection.bulkWrite(bulkOps);
}
}
},

async down(db) {
const collections = await db.listCollections({}, {
authorizedCollections: true,
nameOnly: true,
}).toArray();

const eventCollections = collections
.filter(col => /^events:/.test(col.name))
.map(col => col.name);

const repetitionCollections = collections
.filter(col => /^repetitions:/.test(col.name))
.map(col => col.name);

// Revert event collections
for (const collectionName of eventCollections) {
await db.collection(collectionName).updateMany(
{ timestamp: { $exists: true } },
[
{
$set: {
'payload.timestamp': '$timestamp',
},
},
{
$unset: { 'timestamp': "" },
},
]
);
}

// Revert repetition collections
for (const collectionName of repetitionCollections) {
await db.collection(collectionName).updateMany(
{ timestamp: { $exists: true } },
[
{
$set: {
'payload.timestamp': '$timestamp',
},
},
{
$unset: { 'timestamp': "" },
},
]
);
}
},
};
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
},
"dependencies": {
"@hawk.so/nodejs": "^3.1.1",
"@hawk.so/types": "^0.1.29",
"@hawk.so/types": "^0.1.32",
"@types/amqplib": "^0.8.2",
"@types/jest": "^29.2.3",
"@types/mongodb": "^3.5.15",
Expand Down
2 changes: 1 addition & 1 deletion workers/archiver/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ export default class ArchiverWorker extends Worker {
const repetitionsBulk = repetitionsCollection.initializeUnorderedBulkOp();

repetitionsBulk.find({
'payload.timestamp': {
timestamp: {
$lt: maxOldTimestamp,
},
}).delete();
Expand Down
7 changes: 7 additions & 0 deletions workers/archiver/tests/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ describe('Archiver worker', () => {

const changedProject = await projectCollection.findOne({ _id: mockedProject._id });

// console.log('Changed project:', changedProject);
// console.log('repetitions: ', await repetitionsCollection.find({projectId: changedProject._id}).toArray());
// console.log('events: ', await eventsCollection.find({projectId: changedProject._id}).toArray());

Comment thread
neSpecc marked this conversation as resolved.
Outdated
let originalEventsDeletedCount = 0;

mockedEvents.forEach(event => {
Expand All @@ -110,6 +114,9 @@ describe('Archiver worker', () => {
}
});

// console.log('originalEventsDeletedCount:', originalEventsDeletedCount);
// console.log('archiveEventsCount:', archiveEventsCount);

Comment thread
neSpecc marked this conversation as resolved.
Outdated
expect(changedProject.archivedEventsCount).toBe(archiveEventsCount + originalEventsDeletedCount);
});

Expand Down
Loading
Loading