diff --git a/CHANGELOG.md b/CHANGELOG.md index 9945d7d4002..21fea346ecd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,38 @@ -## Version 25.03.X +## Version 25.03.36 +Enterprise fixes: +- [journey] Workflow fixes +- [users] UI events table fixes + +## Version 25.03.35 +Fixes: +- [core] Fixes for search bar in standart table component + +Enterprise fixes: +- [journeys] Fixes for journey data updates on incoming data. +- [surveys] Return error message if invalid widget_id passed on template loading +- [users] Show content and journey events in user profile +- [users] Display profile group name in table column +- [users] When exporting user profiles, replace user name with device id if user name does not exist +- [users] Use user profile endpoint for exporting data instead of the generic export endpoint + +## Version 25.03.34 Fixes: - [core] Fix period calculation +- [dashboards] Update dialog button color when deleting dashboard/widget +- [star-rating] Fix rating number when exporting data + +Enterprise Fixes: +- [content] Uniform journey and content block actions +- [content] Fix overflow and missing translations in content blocks +- [content] Fix button management when creating fullscreen content blocks +- [crash_symbolication] Use countlyfs for JavaScript symbolication +- [funnels] Fix funnel name tooltip content +- [surveys] Allow surveys to resize and reposition when user rotates devices or adjust browser window +- [nps] Allow nps to resize and reposition when user rotates devices or adjust browser window +- [groups] Dealing with invalid values for group permission +- [geo] Update table row cursor to indicate that it's clickable +- [users] Change table column min-width to width so it can be resized even smaller +- [users] Display filtered user count instead of all user count in the table summary ## Version 25.03.33 Fixes: diff --git a/Dockerfile-core b/Dockerfile-core index 33dc3f9931f..df66adc1a06 100644 --- a/Dockerfile-core +++ b/Dockerfile-core @@ -58,7 +58,7 @@ RUN useradd -r -M -U -d /opt/countly -s /bin/false countly && \ \ # npm dependencies ./bin/docker/modify.sh && \ - HOME=/tmp npm install --unsafe-perm && \ + HOME=/tmp npm ci --unsafe-perm && \ ./bin/docker/preinstall.sh && \ \ # web sdk diff --git a/Dockerfile-unified b/Dockerfile-unified index cc62d986aec..5951f2ed8f2 100644 --- a/Dockerfile-unified +++ b/Dockerfile-unified @@ -132,10 +132,14 @@ WORKDIR /opt/countly COPY . . # 3.1.2 copy OpenTelemetry files from build context +# otel.js and otelexpress.js both require('./metrics'), so metrics.js +# must be copied alongside each entry point. RUN if [ -f observability/otel.js ]; then \ echo "Copying OpenTelemetry files..."; \ cp observability/otel.js api/utils/otel.js; \ + cp observability/metrics.js api/utils/metrics.js; \ cp observability/otelexpress.js frontend/express/otel.js; \ + cp observability/metrics.js frontend/express/metrics.js; \ else \ echo "Observability files not found, skipping..."; \ fi diff --git a/Gruntfile.js b/Gruntfile.js index b91b86fad19..ab22f5a8943 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -57,6 +57,7 @@ module.exports = function(grunt) { 'frontend/express/public/javascripts/utils/Sortable.min.js', 'frontend/express/public/javascripts/utils/vue/vuedraggable.umd.min.js', 'frontend/express/public/javascripts/utils/lodash.mergeWith.js', + 'frontend/express/public/javascripts/utils/cronstrue.min.js', 'frontend/express/public/javascripts/utils/element-tiptap.umd.min.js' ], dest: 'frontend/express/public/javascripts/min/countly.utils.concat.js' diff --git a/api/eventSink/KafkaEventSink.ts b/api/eventSink/KafkaEventSink.ts index b030cf347b6..15bc0dda331 100644 --- a/api/eventSink/KafkaEventSink.ts +++ b/api/eventSink/KafkaEventSink.ts @@ -14,6 +14,28 @@ const EventSinkInterface = require('./EventSinkInterface.ts').default; const { transformToKafkaEventFormat } = require('../utils/eventTransformer'); const Log = require('../utils/log.js'); +// OTel metrics — opt-in, zero overhead when OTEL_ENABLED is not set +const _otelEnabled = /^(1|true|yes)$/i.test(process.env.OTEL_ENABLED || ''); +let _sinkMetrics: { duration: any; eventsTotal: any } | null = null; +function getSinkMetrics() { + if (_sinkMetrics) { + return _sinkMetrics; + } + if (!_otelEnabled) { + return null; + } + try { + const { metrics } = require('@opentelemetry/api'); + const meter = metrics.getMeter('countly-event-sink'); + _sinkMetrics = { + duration: meter.createHistogram('countly_event_sink_duration_seconds', { unit: 's' }), + eventsTotal: meter.createCounter('countly_event_sink_events_total'), + }; + } + catch (_e) { /* no-op */ } + return _sinkMetrics; +} + /** * Logger interface for type safety */ @@ -192,6 +214,8 @@ class KafkaEventSink extends EventSinkInterface { if (result.success) { this.#log.d(`Successfully sent ${result.sent} events to Kafka in ${duration}ms`); + getSinkMetrics()?.duration.record(duration / 1000, { sink: 'kafka' }); + getSinkMetrics()?.eventsTotal.add(result.sent ?? 0, { sink: 'kafka', result: 'success' }); return this._createResult(true, result.sent ?? 0, 'Events sent to Kafka successfully', { duration, @@ -205,6 +229,8 @@ class KafkaEventSink extends EventSinkInterface { } catch (error) { const duration = Date.now() - startTime; + getSinkMetrics()?.duration.record(duration / 1000, { sink: 'kafka' }); + getSinkMetrics()?.eventsTotal.add(transformedEvents, { sink: 'kafka', result: 'error' }); this.#log.e(`Failed to write ${transformedEvents} events to Kafka in ${duration}ms:`); this.#log.e('Error writing events to Kafka:', error); throw error; diff --git a/api/eventSource/KafkaEventSource.js b/api/eventSource/KafkaEventSource.js index a3609de717d..ef62320f4cd 100644 --- a/api/eventSource/KafkaEventSource.js +++ b/api/eventSource/KafkaEventSource.js @@ -1,6 +1,33 @@ const EventSourceInterface = require('./EventSourceInterface'); const Log = require('../utils/log.js'); +// OTel metrics — opt-in, zero overhead when OTEL_ENABLED is not set +const _otelEnabled = /^(1|true|yes)$/i.test(process.env.OTEL_ENABLED || ''); +let _sourceMetrics = null; + +/** + * Initialize OTel metrics for event source + * @returns {Object|null} OTel metrics object or null if OTel is not enabled + * @private + */ +function getSourceMetrics() { + if (_sourceMetrics) { + return _sourceMetrics; + } + if (!_otelEnabled) { + return null; + } + try { + const { metrics } = require('@opentelemetry/api'); + const meter = metrics.getMeter('countly-event-source'); + _sourceMetrics = { + batchWait: meter.createHistogram('countly_event_source_batch_wait_seconds', { unit: 's' }), + }; + } + catch (_e) { /* no-op */ } + return _sourceMetrics; +} + /** * Kafka implementation of EventSourceInterface * Supports async iteration with auto-acknowledgment and proper at-least-once delivery @@ -563,6 +590,7 @@ class KafkaEventSource extends EventSourceInterface { return checkAndReturnBatch(batch); } // Wait for next batch from Kafka + const waitStart = Date.now(); await new Promise((resolve) => { // Race condition check: batch might have arrived while creating promise if (this.#currentBatch) { @@ -571,6 +599,11 @@ class KafkaEventSource extends EventSourceInterface { } this.#batchAvailable = resolve; }); + const waitDuration = (Date.now() - waitStart) / 1000; + getSourceMetrics()?.batchWait.record(waitDuration, { + group_id: this.#effectiveGroupId || this.#name, + topic: this.#kafkaOptions?.topics?.[0] || 'unknown' + }); // Return the batch that arrived if (this.#currentBatch) { diff --git a/api/ingestor/requestProcessor.ts b/api/ingestor/requestProcessor.ts index 398968c0dcd..eb0fd8b80e0 100644 --- a/api/ingestor/requestProcessor.ts +++ b/api/ingestor/requestProcessor.ts @@ -9,7 +9,6 @@ import { createRequire } from 'module'; import usage from './usage.js'; import common from '../utils/common.js'; -import url from 'url'; import logModule from '../utils/log.js'; import crypto from 'crypto'; import { ignorePossibleDevices, checksumSaltVerification, validateRedirect } from '../utils/requestProcessorCommon.js'; @@ -378,7 +377,7 @@ interface RequestParams { /** Href */ href?: string; /** URL parts */ - urlParts?: ReturnType; + urlParts?: { pathname: string; path: string }; /** Path segments */ paths?: string[]; /** API path */ @@ -1246,14 +1245,15 @@ const processRequest = (params: RequestParams): boolean | void => { } params.tt = Date.now().valueOf(); - const urlParts = url.parse(params.req.url, true); - const queryString = urlParts.query; - const paths = urlParts.pathname.split('/'); + // base URL is required by WHATWG URL API for relative paths, only pathname and query are used + const parsedUrl = new URL(params.req.url, 'http://localhost'); + const queryString = Object.fromEntries(parsedUrl.searchParams); + const paths = parsedUrl.pathname.split('/'); - params.href = urlParts.href; + params.href = parsedUrl.pathname + parsedUrl.search; params.qstring = params.qstring || {}; params.res = params.res || {} as ServerResponse; - params.urlParts = urlParts; + params.urlParts = { pathname: parsedUrl.pathname, path: parsedUrl.pathname + parsedUrl.search }; params.paths = paths; params.req.headers = params.req.headers || {}; diff --git a/api/jobs/mutationManagerJob.ts b/api/jobs/mutationManagerJob.ts index 2dd1f50221c..d44391edd0b 100644 --- a/api/jobs/mutationManagerJob.ts +++ b/api/jobs/mutationManagerJob.ts @@ -26,11 +26,12 @@ interface JobConfig { interface MutationTask extends Document { _id: ObjectId | string; - type: 'delete' | 'update'; + type: 'delete' | 'update' | 'native_ch'; db: string; collection: string; query: Record; update?: Record; + native_sql?: string; running: boolean; status: string; hb?: number; @@ -272,7 +273,7 @@ class MutationManagerJob extends Job { */ async processTask(task: MutationTask, summary: SummaryEntry[], jobConfig: JobConfig = jobConfigState || DEFAULT_JOB_CONFIG): Promise { const type = task.type; - if (type !== 'delete' && type !== 'update') { + if (type !== 'delete' && type !== 'update' && type !== 'native_ch') { await common.db.collection('mutation_manager').updateOne( { _id: task._id }, { @@ -288,7 +289,7 @@ class MutationManagerJob extends Job { const clickhouseEnabled = mutationManager.isClickhouseEnabled(); const hasClickhouseDelete = clickhouseEnabled && !!(clickHouseRunner && clickHouseRunner.deleteGranularDataByQuery); const hasClickhouseUpdate = clickhouseEnabled && !!(clickHouseRunner && clickHouseRunner.updateGranularDataByQuery); - const hasClickhouse = (type === 'update' ? hasClickhouseUpdate : hasClickhouseDelete); + const hasClickhouse = type === 'native_ch' ? clickhouseEnabled : (type === 'update' ? hasClickhouseUpdate : hasClickhouseDelete); if (!mongoDb && !hasClickhouse) { const reason = `mongo_db_unavailable:${task.db || 'missing'}`; @@ -317,7 +318,11 @@ class MutationManagerJob extends Job { } let mongoOk = true; - if (mongoDb) { + if (type === 'native_ch') { + // Native CH mutations skip MongoDB entirely + log.d('Native CH mutation - skipping MongoDB', { taskId: task._id }); + } + else if (mongoDb) { if (type === 'update') { mongoOk = await this.updateMongo(task, mongoDb); } @@ -330,7 +335,10 @@ class MutationManagerJob extends Job { } let chScheduledOk = true; - if (type === 'update' && hasClickhouseUpdate) { + if (type === 'native_ch' && clickhouseEnabled) { + chScheduledOk = await this.executeNativeClickhouse(task); + } + else if (type === 'update' && hasClickhouseUpdate) { chScheduledOk = await this.updateClickhouse(task); } else if (type === 'delete' && hasClickhouseDelete) { @@ -422,8 +430,10 @@ class MutationManagerJob extends Job { for (const task of awaiting) { try { if (chHealth && typeof chHealth.getMutationStatus === 'function') { - // In cluster mode, mutations target _local tables, so validation must check _local - const validationTable = isClusterMode ? task.collection + '_local' : task.collection; + // In cluster mode, mutations target _local tables, so validation must check _local. + // native_ch tasks may already have _local in collection name — avoid doubling. + const needsLocalSuffix = isClusterMode && !task.collection.endsWith('_local'); + const validationTable = needsLocalSuffix ? task.collection + '_local' : task.collection; const status = await chHealth.getMutationStatus({ validation_command_id: task.validation_command_id, table: validationTable, database: task.db }); if (status && status.is_done) { await common.db.collection('mutation_manager').updateOne( @@ -677,6 +687,95 @@ class MutationManagerJob extends Job { } } + /** + * Build validated ClickHouse mutation SQL with an embedded command-id for tracking. + * - Strips trailing semicolon + * - Validates ALTER TABLE ... DELETE/UPDATE ... WHERE ... shape + * - Injects tautological AND before any SETTINGS clause + * @returns Final SQL string, or null if the shape is invalid + */ + private buildValidatedNativeClickhouseSql(baseSql: string, commandId: string): string | null { + if (!baseSql || typeof baseSql !== 'string') { + return null; + } + let sql = baseSql.trim(); + if (sql.endsWith(';')) { + sql = sql.slice(0, -1).trimEnd(); + } + const upper = sql.toUpperCase(); + if (!upper.startsWith('ALTER TABLE ')) { + return null; + } + if (!/\b(DELETE|UPDATE)\b/.test(upper)) { + return null; + } + if (!/\bWHERE\b/.test(upper)) { + return null; + } + // Find SETTINGS clause (if any) — inject command-id BEFORE it + const settingsMatch = upper.match(/\bSETTINGS\b/); + const settingsIdx = settingsMatch?.index ?? -1; + const injection = ` AND '${commandId}' = '${commandId}'`; + if (settingsIdx !== -1) { + return sql.slice(0, settingsIdx) + injection + sql.slice(settingsIdx); + } + return sql + injection; + } + + /** + * Executes a native ClickHouse SQL mutation directly. + * Used for complex mutations (e.g., deduplication) that cannot be expressed as Mongo-style queries. + * Embeds validation_command_id for tracking via system.mutations. + * @param task - The mutation task with native_sql field + */ + async executeNativeClickhouse(task: MutationTask): Promise { + if (!task.native_sql || typeof task.native_sql !== 'string') { + log.e('Skipping native CH mutation (empty sql)', { taskId: task._id }); + await this.markFailedOrRetry(task, 'empty_native_sql'); + return false; + } + + if (!common.clickhouseQueryService) { + log.e('ClickHouse query service not available for native mutation', { taskId: task._id }); + await this.markFailedOrRetry(task, 'ch_query_service_unavailable'); + return false; + } + + try { + const retryIndex = Number(task.fail_count || 0); + const commandId = `nm_${String(task._id)}_${retryIndex}`; + + const sql = this.buildValidatedNativeClickhouseSql(task.native_sql, commandId); + if (!sql) { + log.e('Skipping native CH mutation (invalid SQL shape)', { + taskId: task._id, + native_sql: task.native_sql + }); + await this.markFailedOrRetry(task, 'invalid_native_sql_shape'); + return false; + } + + // Persist command_id BEFORE executing mutation (crash safety: if we crash + // between execution and this update, validation can still find the command_id) + await common.db.collection('mutation_manager').updateOne( + { _id: task._id }, + { $set: { validation_command_id: commandId } } + ); + + await common.clickhouseQueryService.executeMutation({ query: sql }); + log.d('Native CH mutation scheduled', { taskId: task._id, commandId }); + return true; + } + catch (err) { + log.e('Native CH mutation failed', { + taskId: task._id, + error: (err as Error)?.message || String(err) + }); + await this.markFailedOrRetry(task, 'native_ch_error: ' + ((err as Error)?.message || err + '')); + return false; + } + } + /** * Marks a task as failed or schedules it for a retry based on the number of previous failures. * @param task - The task object to update. diff --git a/api/parts/data/QueryRunner.ts b/api/parts/data/QueryRunner.ts index 3a1240153e3..4c666e9fdbc 100644 --- a/api/parts/data/QueryRunner.ts +++ b/api/parts/data/QueryRunner.ts @@ -15,6 +15,28 @@ const require = createRequire(import.meta.url); const config = require('../../config.js'); const logModule = require('../../utils/log.js'); +// OTel metrics — opt-in, zero overhead when OTEL_ENABLED is not set +const _otelEnabled = /^(1|true|yes)$/i.test(process.env.OTEL_ENABLED || ''); +let _queryMetrics: { duration: any; total: any } | null = null; +function getQueryMetrics() { + if (_queryMetrics) { + return _queryMetrics; + } + if (!_otelEnabled) { + return null; + } + try { + const { metrics } = require('@opentelemetry/api'); + const m = metrics.getMeter('countly-query-runner'); + _queryMetrics = { + duration: m.createHistogram('countly_query_duration_seconds', { unit: 's' }), + total: m.createCounter('countly_query_total'), + }; + } + catch (_e) { /* no-op */ } + return _queryMetrics; +} + const log = logModule('query-runner') as { d: (...args: unknown[]) => void; e: (...args: unknown[]) => void; @@ -218,6 +240,7 @@ class QueryRunner { transformOptions: TransformOptions = {} ): Promise { const startTime = Date.now(); + let selectedAdapter = 'unknown'; try { if (!queryDef || !queryDef.adapters) { throw new Error('Invalid query definition: must have adapters'); @@ -237,7 +260,7 @@ class QueryRunner { return await this.executeQueryWithComparison(queryDef, params, options, transformOptions); } - const selectedAdapter = this.selectAdapterForDef(queryDef, options.adapter); + selectedAdapter = this.selectAdapterForDef(queryDef, options.adapter); let result = await this.executeOnAdapter(queryDef, selectedAdapter, params, options); // Apply adapter-specific transformation if provided @@ -261,19 +284,32 @@ class QueryRunner { } } - const duration = Date.now() - startTime; - log.d(`Query completed: ${queryName} on ${selectedAdapter} in ${duration}ms`); - // Enforce QueryRunner convention: handlers must return { _queryMeta, data } + // Check BEFORE recording success metric to avoid double-counting on throw if (!result || typeof result !== 'object' || !Object.prototype.hasOwnProperty.call(result, '_queryMeta') || !Object.prototype.hasOwnProperty.call(result, 'data')) { throw new Error(`Handler for query '${queryName}' on adapter '${selectedAdapter}' must return object with '_queryMeta' and 'data' properties`); } + const duration = Date.now() - startTime; + const qm = getQueryMetrics(); + if (qm) { + const attrs = { query_name: queryName, adapter: selectedAdapter, result: 'success' }; + qm.total.add(1, attrs); + qm.duration.record(duration / 1000, attrs); + } + log.d(`Query completed: ${queryName} on ${selectedAdapter} in ${duration}ms`); + return result.data; } catch (error) { const duration = Date.now() - startTime; const queryName = queryDef?.name || 'unnamed_query'; + const qm = getQueryMetrics(); + if (qm) { + const attrs = { query_name: queryName, adapter: selectedAdapter, result: 'error' }; + qm.total.add(1, attrs); + qm.duration.record(duration / 1000, attrs); + } log.e(`Query execution failed: ${queryName} after ${duration}ms`, error); throw error; } diff --git a/api/parts/data/exports.ts b/api/parts/data/exports.ts index a66ae6e69ba..3238cfce43f 100644 --- a/api/parts/data/exports.ts +++ b/api/parts/data/exports.ts @@ -837,7 +837,8 @@ const exportsModule = { fromDatabase, fromRequest, fromRequestQuery, - fromData + fromData, + transformValuesInObject }; export default exportsModule; @@ -850,6 +851,7 @@ export { fromDatabase, fromRequest, fromRequestQuery, - fromData + fromData, + transformValuesInObject }; export type { ExportOptions, Params, Mapper }; diff --git a/api/parts/mgmt/app_users.js b/api/parts/mgmt/app_users.js index f1e14332b2a..f856c72986a 100644 --- a/api/parts/mgmt/app_users.js +++ b/api/parts/mgmt/app_users.js @@ -1230,6 +1230,7 @@ usersApi.export = function(app_id, query, params, callback) { export_commands: export_commands, query: query, uids: res[0].uid, + export_id: export_id, export_folder: export_folder }, function() { var commands = []; diff --git a/api/parts/mgmt/users.js b/api/parts/mgmt/users.js index 9e36ec4ff5b..dc3068b91f8 100644 --- a/api/parts/mgmt/users.js +++ b/api/parts/mgmt/users.js @@ -305,7 +305,8 @@ usersApi.createUser = async function(params) { * @param {string} userId - id of the user for which to remove sessions **/ function killAllSessionForUser(userId) { - common.db.collection('sessions_').find({"session": { $regex: userId }}).toArray(function(err, sessions) { + const userIdString = userId + ""; + common.db.collection('sessions_').find({"session": { $regex: userIdString }}).toArray(function(err, sessions) { var delete_us = []; sessions = sessions || []; @@ -317,7 +318,7 @@ function killAllSessionForUser(userId) { catch (SyntaxError) { console.log('Parse ' + sessions[i].session + ' JSON failed'); } - if (parsed_data && parsed_data.uid === userId) { + if (parsed_data && (parsed_data.uid + "") === userIdString) { delete_us.push(sessions[i]._id); } } @@ -730,8 +731,10 @@ function argon2Hash(str) { function verifyMemberArgon2Hash(username, password, callback) { common.db.collection('members').findOne({$and: [{ $or: [ {"username": username}, {"email": username}]}]}, (err, member) => { if (member) { + const secret = countlyConfig.passwordSecret || ""; + const effectivePassword = password + secret; if (isArgon2Hash(member.password)) { - verifyArgon2Hash(member.password, password).then(match => { + verifyArgon2Hash(member.password, effectivePassword).then(match => { if (match) { callback(undefined, member); } @@ -959,7 +962,7 @@ usersApi.saveNote = async function(params) { } common.db.collection('notes').insert(note, (_err) => { if (_err) { - common.returnMessage(params, 503, 'Insert Note failed.'); + return common.returnMessage(params, 503, 'Insert Note failed.'); } common.returnMessage(params, 200, 'Success'); }); @@ -1051,7 +1054,7 @@ usersApi.fetchUserAppIds = async function(params) { * @returns {boolean} true **/ usersApi.fetchNotes = async function(params) { - countlyCommon.getPeriodObj(params); + const periodObj = countlyCommon.getPeriodObj(params); // const timestampRange = countlyCommon.getTimestampRangeQuery(params, false); let appIds = []; @@ -1073,7 +1076,7 @@ usersApi.fetchNotes = async function(params) { } const query = { 'app_id': {$in: filteredAppIds}, - 'ts': {$gte: params.qstring.period[0], $lte: params.qstring.period[1]}, + 'ts': {$gte: periodObj.start, $lte: periodObj.end}, $or: [ {'owner': params.member._id + ""}, {'noteType': 'public'}, @@ -1169,4 +1172,4 @@ usersApi.ackNotification = function(params) { }); }; -module.exports = usersApi; \ No newline at end of file +module.exports = usersApi; diff --git a/api/utils/common.js b/api/utils/common.js index 22bc16ba020..09438d73845 100644 --- a/api/utils/common.js +++ b/api/utils/common.js @@ -3303,7 +3303,9 @@ class DataTable { */ _getSearchField() { if (this.searchStrategy === "regex") { - return {$regex: this.searchTerm, $options: 'i'}; + const term = String(this.searchTerm); + const escaped = term.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + return ({$regex: escaped, $options: 'i'}); } return this.searchTerm; } diff --git a/api/utils/mutationManager.ts b/api/utils/mutationManager.ts index 715e97b6f3a..28215c1db59 100644 --- a/api/utils/mutationManager.ts +++ b/api/utils/mutationManager.ts @@ -137,6 +137,68 @@ plugins.register('/core/update_granular_data', async function(ob: { db: string; }); }); +/** + * Validate that a native ClickHouse SQL statement is a safe mutation. + * Only allows: ALTER TABLE ... DELETE ... WHERE ... or ALTER TABLE ... UPDATE ... WHERE ... + * Rejects multi-statement SQL, forbidden DDL (DROP, TRUNCATE, RENAME, DETACH, ATTACH). + */ +function isSafeNativeChMutation(sql: string): string | null { + if (!sql || typeof sql !== 'string') { + return 'missing_or_invalid_sql'; + } + const trimmed = sql.trim(); + if (!trimmed) { + return 'empty_sql'; + } + // Reject multi-statement: only allow optional trailing semicolon + const firstSemicolon = trimmed.indexOf(';'); + if (firstSemicolon !== -1 && firstSemicolon !== trimmed.length - 1) { + return 'multiple_statements_not_allowed'; + } + const upper = trimmed.replace(/\s+/g, ' ').toUpperCase(); + // Reject forbidden DDL + if (/\b(DROP|TRUNCATE|RENAME|DETACH|ATTACH)\b/.test(upper)) { + return 'forbidden_command'; + } + // Must start with ALTER TABLE + if (!upper.startsWith('ALTER TABLE ')) { + return 'only_alter_table_mutations_allowed'; + } + // Must contain DELETE or UPDATE + if (!/\b(DELETE|UPDATE)\b/.test(upper)) { + return 'must_be_delete_or_update_mutation'; + } + // Must have WHERE clause (required for command-id injection) + if (!upper.includes(' WHERE ')) { + return 'missing_where_clause'; + } + return null; +} + +plugins.register('/core/execute_native_ch_mutation', async function(ob: { sql: string; db: string; collection: string; metadata?: Record }) { + const { sql, db, collection, metadata } = ob; + + const validationError = isSafeNativeChMutation(sql); + if (validationError) { + const errMsg = `Native CH mutation rejected: ${validationError}`; + log.e(errMsg); + throw new Error(errMsg); + } + log.d('Mutation (native_ch) queued:' + JSON.stringify({ db, collection })); + const now = Date.now(); + + await common.db.collection('mutation_manager').insertOne({ + db, + collection, + type: 'native_ch', + native_sql: sql, + query: metadata || {}, + ts: now, + status: MUTATION_STATUS.QUEUED, + running: false + }); +}); + plugins.register('/system/observability/collect', async function(ob: { params?: Params }): Promise { try { const filters = buildQueueFilters(ob && ob.params); diff --git a/api/utils/requestProcessor.js b/api/utils/requestProcessor.js index 511208d6494..08d75bdb496 100644 --- a/api/utils/requestProcessor.js +++ b/api/utils/requestProcessor.js @@ -12,7 +12,6 @@ */ const Promise = require('bluebird'); -const url = require('url'); const common = require('./common.js'); const countlyCommon = require('../lib/countly.common.js'); const { validateAppAdmin, validateUser, validateRead, validateUserForRead, validateUserForWrite, validateGlobalAdmin, dbUserHasAccessToCollection, validateUpdate, validateDelete, validateCreate, getBaseAppFilter } = require('./rights.js'); @@ -115,13 +114,14 @@ const processRequest = (params) => { return common.returnMessage(params, 400, "Please provide request data"); } - const urlParts = url.parse(params.req.url, true), - queryString = urlParts.query, - paths = urlParts.pathname.split("/"); - params.href = urlParts.href; + // base URL is required by WHATWG URL API for relative paths, only pathname and query are used + const parsedUrl = new URL(params.req.url, 'http://localhost'), + queryString = Object.fromEntries(parsedUrl.searchParams), + paths = parsedUrl.pathname.split("/"); + params.href = parsedUrl.pathname + parsedUrl.search; params.qstring = params.qstring || {}; params.res = params.res || {}; - params.urlParts = urlParts; + params.urlParts = { pathname: parsedUrl.pathname, path: parsedUrl.pathname + parsedUrl.search }; params.paths = paths; //request object fillers @@ -181,7 +181,7 @@ const processRequest = (params) => { validateUserForDataWriteAPI: validateUserForDataWriteAPI, validateUserForGlobalAdmin: validateUserForGlobalAdmin, paths: paths, - urlParts: urlParts + urlParts: params.urlParts }); if (!params.cancelRequest) { @@ -1614,25 +1614,35 @@ const processRequest = (params) => { eid = eid[0]; var cursor = common.db.collection("exports").find({"_eid": eid}, {"_eid": 0, "_id": 0}); - var options = {"type": "stream", "filename": eid + ".json", params: params}; params.res.writeHead(200, { - 'Content-Type': 'application/x-gzip', - 'Content-Disposition': 'inline; filename="' + eid + '.json' + 'Content-Type': 'application/json', + 'Content-Disposition': 'inline; filename="' + eid + '.json"' }); - options.streamOptions = {}; - if (options.type === "stream" || options.type === "json") { - options.streamOptions.transform = function(doc) { + + var isFirst = true; + params.res.write('['); + cursor.forEach(function(doc) { + if (doc) { doc._id = doc.__id; delete doc.__id; - return JSON.stringify(doc); - }; - } - - options.output = options.output || function(stream) { - countlyApi.data.exports.stream(options.params, stream, options); - }; - options.output(cursor); - + if (!isFirst) { + params.res.write(','); + } + isFirst = false; + params.res.write(JSON.stringify(doc)); + } + }).then(function() { + params.res.write(']'); + params.res.end(); + }).catch(function(err) { + log.e('Error streaming export data:', err); + if (!params.res.headersSent) { + common.returnMessage(params, 500, 'Error streaming export data'); + } + else { + params.res.end(); + } + }); } else { diff --git a/api/utils/taskmanager.js b/api/utils/taskmanager.js index a1a7c7578f4..fc1cb0a704e 100644 --- a/api/utils/taskmanager.js +++ b/api/utils/taskmanager.js @@ -500,6 +500,9 @@ var taskmanager = { */ nameResult: function(options, data, callback) { options.db = options.db || common.db; + if (typeof data === 'function' && callback === undefined) { + callback = data; + } options.db.collection('long_tasks').update({_id: options.id}, {$set: {name: options.name}}, {'upsert': false}, callback); }, diff --git a/bin/commands/countly.sh b/bin/commands/countly.sh index 81be212b6f6..b5cef771475 100755 --- a/bin/commands/countly.sh +++ b/bin/commands/countly.sh @@ -121,6 +121,8 @@ countly_upgrade (){ fi (cd "$DIR/../.." ; + echo "Installing plugins..."; + node "$DIR/../scripts/install_plugins.js" --skip-production; echo "Preparing production files..."; countly task dist-all; echo "Restarting Countly..."; diff --git a/bin/config/nginx.server.block.conf b/bin/config/nginx.server.block.conf index f1937922a0d..981f7429cc7 100644 --- a/bin/config/nginx.server.block.conf +++ b/bin/config/nginx.server.block.conf @@ -88,6 +88,8 @@ server { # /at/* # /campaign/* # /dashboards/images/screenshots/screenshot_*.png + # /_external/content/ --> for content blocks + # /_external/content/asset/ --> for content block images location ~ (/pixel.png|/images/pre-login/countly-logo-dark.svg|/images/pre-login/countly-logo.svg|/images/dashboard/countly_logo.svg|/sdk/web/countly.min.js|/views/heatmap.js|/views/javascripts/simpleheat.js|/views/stylesheets/heatmap.css|/stylesheets/font-awesome/css/*|/stylesheets/font-awesome/fonts/*|/fonts/*|/surveys/*|/stylesheets/ionicons/*|/feedback|/feedback/*|/star-rating/stylesheets/countly-feedback-web.css|/star-rating/stylesheets/countly-feedback.css|/star-rating/javascripts/tippy.all.min.js|/star-rating/images/star-rating/*|/javascripts/dom/jquery/*|/stylesheets/font-awesome/css/*|/stylesheets/font-awesome/fonts/*|/fonts/*|/stylesheets/ionicons/*|/at/*|/campaign/*|/images/dashboard/logo.png|/appimages/*.png|/dashboards/images/screenshots/screenshot_*.png) { if ($http_content_type = "text/ping") { diff --git a/bin/upgrade/DEV/add_creation_date_for_existing_alerts.js b/bin/upgrade/26.01/scripts/add_creation_date_for_existing_alerts.js similarity index 90% rename from bin/upgrade/DEV/add_creation_date_for_existing_alerts.js rename to bin/upgrade/26.01/scripts/add_creation_date_for_existing_alerts.js index 7ad693b0331..ccba03a9b6d 100644 --- a/bin/upgrade/DEV/add_creation_date_for_existing_alerts.js +++ b/bin/upgrade/26.01/scripts/add_creation_date_for_existing_alerts.js @@ -1,6 +1,6 @@ // Script that adds creation date for existing alerts. -const pluginManager = require('../../../plugins/pluginManager.js'); +const pluginManager = require('../../../../plugins/pluginManager.js'); pluginManager.dbConnection().then(async(countlyDb) => { try { diff --git a/bin/upgrade/dev/scripts/merge_views_collections.js b/bin/upgrade/26.01/scripts/merge_views_collections.js similarity index 100% rename from bin/upgrade/dev/scripts/merge_views_collections.js rename to bin/upgrade/26.01/scripts/merge_views_collections.js diff --git a/bin/upgrade/DEV/scripts/remove_unused_configs.js b/bin/upgrade/26.01/scripts/remove_unused_configs.js similarity index 100% rename from bin/upgrade/DEV/scripts/remove_unused_configs.js rename to bin/upgrade/26.01/scripts/remove_unused_configs.js diff --git a/bin/upgrade/DEV/scripts/update_content_cooldown_settings.js b/bin/upgrade/26.01/scripts/update_content_cooldown_settings.js similarity index 100% rename from bin/upgrade/DEV/scripts/update_content_cooldown_settings.js rename to bin/upgrade/26.01/scripts/update_content_cooldown_settings.js diff --git a/bin/upgrade/DEV/scripts/update_journey_versions.js b/bin/upgrade/26.01/scripts/update_journey_versions.js similarity index 100% rename from bin/upgrade/DEV/scripts/update_journey_versions.js rename to bin/upgrade/26.01/scripts/update_journey_versions.js diff --git a/bin/upgrade/DEV/upgrade.sh b/bin/upgrade/26.01/upgrade.sh similarity index 100% rename from bin/upgrade/DEV/upgrade.sh rename to bin/upgrade/26.01/upgrade.sh diff --git a/bin/upgrade/DEV/upgrade_db.sh b/bin/upgrade/26.01/upgrade_db.sh similarity index 82% rename from bin/upgrade/DEV/upgrade_db.sh rename to bin/upgrade/26.01/upgrade_db.sh index 0c7d233ff3f..610c241ce1b 100644 --- a/bin/upgrade/DEV/upgrade_db.sh +++ b/bin/upgrade/26.01/upgrade_db.sh @@ -1,6 +1,6 @@ #!/bin/bash -VER="25.06" +VER="26.01" CONTINUE="$(countly check before upgrade db "$VER")" @@ -31,8 +31,10 @@ then nodejs "$DIR/scripts/add_indexes.js" #run upgrade scripts - nodejs "$SCRIPTS/scripts/merge_events_collections.js" - + nodejs "$SCRIPTS/scripts/add_creation_date_for_existing_alerts.js" + nodejs "$SCRIPTS/scripts/merge_views_collections.js" + nodejs "$SCRIPTS/scripts/remove_unused_configs.js" + nodejs "$SCRIPTS/scripts/update_content_cooldown_settings.js" if [ "$1" != "combined" ]; then countly upgrade; diff --git a/bin/upgrade/DEV/upgrade_fs.sh b/bin/upgrade/26.01/upgrade_fs.sh similarity index 98% rename from bin/upgrade/DEV/upgrade_fs.sh rename to bin/upgrade/26.01/upgrade_fs.sh index 20455907020..bf9d9cfba79 100644 --- a/bin/upgrade/DEV/upgrade_fs.sh +++ b/bin/upgrade/26.01/upgrade_fs.sh @@ -2,7 +2,7 @@ echo "Running filesystem modifications" -VER="25.06" +VER="26.01" CONTINUE="$(countly check before upgrade fs "$VER")" @@ -24,6 +24,7 @@ then bash "$DIR/scripts/detect.init.sh" rm -rf "$DIR/../plugins/old-ui-compatibility" + countly plugin enable content; countly plugin enable journey_engine; diff --git a/bin/upgrade/dev/scripts/update_drill_collections.js b/bin/upgrade/dev/scripts/update_drill_collections.js deleted file mode 100644 index 1d9018a1fdb..00000000000 --- a/bin/upgrade/dev/scripts/update_drill_collections.js +++ /dev/null @@ -1,25 +0,0 @@ -const { first } = require("underscore"); -const common = require("../../../../api/utils/common"); -var pluginManager = require("../../../pluginManager.js"); -Promise.all( - [ - pluginManager.dbConnection("countly"), - pluginManager.dbConnection("countly_drill") - ]) - .spread(async function(countlyDB, drill_db) { - console.log("Fixing viws events"); - - await common.drill_db.updateMany({"e": "[CLY]_view", "n": {"$exists": false}}, [{"$set": {"n": "$sg.name"}}]); - await common.drill_db.updateMany({"e": "[CLY]_action", "n": {"$exists": false}}, {"$set": {"n": "$sg.view"}}); - - //Fixing custom events - //get list of custom events. Run update query for each - var events = await countlyDB.collection("events").find({}, {_id: 1, list: 1}).toArray(); - for (var z = 0; z < events.list.length; z++) { - var aa = await common.drill_db.updateMany({"a": events._id + "", "e": events.list[z], "n": {"$exists": false}}, {"$set": {"n": event.name, "e": "[CLY]_custom"}}); - } - - }).catch(function(err) { - console.log(err); - } - ); \ No newline at end of file diff --git a/frontend/express/app.js b/frontend/express/app.js index 0391cf6bde9..7f278cc067f 100644 --- a/frontend/express/app.js +++ b/frontend/express/app.js @@ -64,7 +64,6 @@ var versionInfo = require('./version.info'), request = require('countly-request')(plugins.getConfig("security")), countlyConfig = require('./config', 'dont-enclose'), log = require('../../api/utils/log.js')('core:app'), - url = require('url'), /** @type {import('../../types/authorizer').Authorizer} */ authorize = require('../../api/utils/authorizer.js'), //for token validations languages = require('../../frontend/express/locale.conf'), @@ -116,7 +115,8 @@ function paramsGenerator(obj) { req: obj.req, res: obj.res, qstring: obj.req.query, - fullPath: url.parse(obj.req.url, true).pathname + // base URL is required by WHATWG URL API for relative paths, only pathname is used + fullPath: new URL(obj.req.url, 'http://localhost').pathname }; params.qstring.auth_token = obj.req.session.auth_token; @@ -836,6 +836,7 @@ Promise.all([plugins.dbConnection(countlyConfig), plugins.dbConnection("countly_ app.post('*', checkRequestForSession); app.get(countlyConfig.path + '/logout', function(req, res) { + membersUtility.logout(req, res); if (req.query.message) { res.redirect(countlyConfig.path + '/login?message=' + req.query.message); } diff --git a/frontend/express/libs/members.js b/frontend/express/libs/members.js index 1a8dd173acd..c2b177330f6 100755 --- a/frontend/express/libs/members.js +++ b/frontend/express/libs/members.js @@ -19,7 +19,6 @@ var common = require('./../../../api/utils/common.js'); var plugins = require('./../../../plugins/pluginManager.ts'); var configs = require('./../config', 'dont-enclose'); var countlyMail = require('./../../../api/parts/mgmt/mail.js'); -var url = require('url'); var crypto = require('crypto'); var argon2 = require('argon2'); @@ -478,7 +477,8 @@ function killOtherSessionsForUser(userId, my_token, my_session, countlyDb) { membersUtility.loginWithToken = function(req, callback) { var token = req.params.token; var pathUrl = req.url.replace(membersUtility.countlyConfig.path, ""); - var urlParts = url.parse(pathUrl, true); + // base URL is required by WHATWG URL API for relative paths, only pathname is used + var urlParts = new URL(pathUrl, 'http://localhost'); var fullPath = urlParts.pathname; authorize.verify_return({ @@ -766,6 +766,10 @@ membersUtility.reset = function(req, callback) { var secret = membersUtility.countlyConfig.passwordSecret || ""; argon2Hash(req.body.password + secret).then(password => { membersUtility.db.collection('password_reset').findOne({ prid: req.body.prid }, function(err, passwordReset) { + if (err || !passwordReset || !passwordReset.user_id) { + callback(false, undefined); + return; + } membersUtility.db.collection('members').findAndModify({ _id: passwordReset.user_id }, {}, { '$set': { "password": password } }, function(err2, member) { member = member && member.ok ? member.value : null; killOtherSessionsForUser(passwordReset.user_id + "", null, null, membersUtility.db); diff --git a/frontend/express/public/core/health-manager/javascripts/countly.views.js b/frontend/express/public/core/health-manager/javascripts/countly.views.js index b4e6df985af..d53849218a7 100644 --- a/frontend/express/public/core/health-manager/javascripts/countly.views.js +++ b/frontend/express/public/core/health-manager/javascripts/countly.views.js @@ -577,7 +577,7 @@ var AggregatorStatusView = countlyVue.views.create({ ]; }, hasKafkaData: function() { - return this.kafkaEnabled && (this.aggregatorPartitions.length > 0 || this.aggregatorConsumers.length > 0); + return this.kafkaEnabled && ((this.aggregatorPartitions && this.aggregatorPartitions.length > 0) || (this.aggregatorConsumers && this.aggregatorConsumers.length > 0)); }, hasAggregatorData: function() { return this.aggregatorData && this.aggregatorData.length > 0; diff --git a/frontend/express/public/core/health-manager/templates/aggregator-status.html b/frontend/express/public/core/health-manager/templates/aggregator-status.html index b37063d486d..85fb63c14b1 100644 --- a/frontend/express/public/core/health-manager/templates/aggregator-status.html +++ b/frontend/express/public/core/health-manager/templates/aggregator-status.html @@ -38,7 +38,7 @@ - + - +