diff --git a/.github/workflows/ci-test-e2e-vite.yml b/.github/workflows/ci-test-e2e-vite.yml
new file mode 100644
index 0000000000000..43f85fc57eded
--- /dev/null
+++ b/.github/workflows/ci-test-e2e-vite.yml
@@ -0,0 +1,368 @@
+name: Tests E2E (Vite)
+
+on:
+ workflow_call:
+ inputs:
+ node-version:
+ required: true
+ type: string
+ deno-version:
+ required: true
+ type: string
+ lowercase-repo:
+ required: true
+ type: string
+ gh-docker-tag:
+ required: true
+ type: string
+ enterprise-license:
+ type: string
+ transporter:
+ type: string
+ mongodb-version:
+ default: "['8.0']"
+ required: false
+ type: string
+ release:
+ required: true
+ type: string
+ shard:
+ default: "[1]"
+ required: false
+ type: string
+ total-shard:
+ default: 1
+ required: false
+ type: number
+ retries:
+ default: 0
+ required: false
+ type: number
+ type:
+ required: true
+ type: string
+ coverage:
+ required: false
+ type: string
+ secrets:
+ CR_USER:
+ required: true
+ CR_PAT:
+ required: true
+ NPM_TOKEN:
+ required: false
+ QASE_API_TOKEN:
+ required: false
+ REPORTER_ROCKETCHAT_URL:
+ required: false
+ REPORTER_ROCKETCHAT_API_KEY:
+ required: false
+ CODECOV_TOKEN:
+ required: false
+ REPORTER_JIRA_ROCKETCHAT_API_KEY:
+ required: false
+
+env:
+ MONGO_URL: mongodb://localhost:27017/rocketchat?replicaSet=rs0&directConnection=true
+ TOOL_NODE_FLAGS: ${{ vars.TOOL_NODE_FLAGS }}
+ LOWERCASE_REPOSITORY: ${{ inputs.lowercase-repo }}
+ DOCKER_TAG: ${{ inputs.gh-docker-tag }}-amd64
+
+jobs:
+ test:
+ runs-on: ubuntu-24.04
+
+ env:
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ DOCKER_TAG_SUFFIX_ROCKETCHAT: ${{ inputs.coverage == matrix.mongodb-version && (github.event_name == 'release' || github.ref == 'refs/heads/develop') && '-cov' || '' }}
+ MONGODB_VERSION: ${{ matrix.mongodb-version }}
+ COVERAGE_DIR: "/tmp/coverage/${{ inputs.type }}"
+ COVERAGE_FILE_NAME: "${{ inputs.type }}-${{ matrix.shard }}.json"
+ COVERAGE_REPORTER: ${{ inputs.coverage == matrix.mongodb-version && 'json' || '' }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ mongodb-version: ${{ fromJSON(inputs.mongodb-version) }}
+ shard: ${{ fromJSON(inputs.shard) }}
+
+ name: MongoDB ${{ matrix.mongodb-version }}${{ inputs.coverage == matrix.mongodb-version && ' coverage' || '' }} (${{ matrix.shard }}/${{ inputs.total-shard }}) [Vite]
+
+ steps:
+ - name: Collect Workflow Telemetry
+ if: inputs.type == 'perf'
+ uses: catchpoint/workflow-telemetry-action@v2
+ with:
+ theme: dark
+ job_summary: true
+ comment_on_pr: false
+
+ - name: Setup kernel limits
+ run: |
+ echo "500 65535" > sudo tee -a /proc/sys/net/ipv4/ip_local_port_range
+ sudo sysctl -w net.ipv4.tcp_mem="383865 511820 2303190"
+
+ echo fs.file-max=20000500 | sudo tee -a /etc/sysctl.conf
+ echo fs.nr_open=20000500 | sudo tee -a /etc/sysctl.conf
+ sudo sysctl -p
+
+ - name: Free disk space
+ run: |
+ sudo rm -rf /usr/share/dotnet
+ sudo rm -rf /opt/ghc
+ sudo rm -rf /usr/local/share/boost
+ sudo rm -rf "$AGENT_TOOLSDIRECTORY"
+ sudo docker system prune -af
+ df -h
+
+ - name: Login to GitHub Container Registry
+ if: (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'release' || github.ref == 'refs/heads/develop') && github.actor != 'dependabot[bot]'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ secrets.CR_USER }}
+ password: ${{ secrets.CR_PAT }}
+
+ - uses: actions/checkout@v6
+
+ - name: Setup NodeJS
+ uses: ./.github/actions/setup-node
+ with:
+ node-version: ${{ inputs.node-version }}
+ deno-version: ${{ inputs.deno-version }}
+ cache-modules: true
+ install: true
+ NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+
+ - uses: rharkor/caching-for-turbo@v1.8
+
+ - name: Restore packages build
+ uses: actions/download-artifact@v7
+ with:
+ name: packages-build
+ path: /tmp
+
+ - name: Unpack packages build
+ shell: bash
+ run: |
+ tar -xzf /tmp/RocketChat-packages-build.tar.gz -C .
+
+ - name: Build Vite frontend
+ working-directory: ./apps/meteor
+ env:
+ VITE_E2E_COVERAGE: ${{ inputs.coverage == matrix.mongodb-version && 'true' || '' }}
+ run: |
+ ROOT_URL=http://localhost:3000/ VITE_TEST_MODE=true npx vite build --outDir /tmp/build/dist
+
+ - name: Install Meteor
+ shell: bash
+ run: |
+ # Restore bin from cache
+ set +e
+ METEOR_SYMLINK_TARGET=$(readlink ~/.meteor/meteor)
+ METEOR_TOOL_DIRECTORY=$(dirname "$METEOR_SYMLINK_TARGET")
+ set -e
+ LAUNCHER=$HOME/.meteor/$METEOR_TOOL_DIRECTORY/scripts/admin/launch-meteor
+ if [ -e "$LAUNCHER" ]
+ then
+ echo "Cached Meteor bin found, restoring it"
+ sudo cp "$LAUNCHER" "/usr/local/bin/meteor"
+ else
+ echo "No cached Meteor bin found."
+ fi
+
+ # only install meteor if bin isn't found
+ command -v meteor >/dev/null 2>&1 || curl https://install.meteor.com | sed s/--progress-bar/-sL/g | /bin/sh
+
+ - name: Build Meteor backend
+ working-directory: ./apps/meteor
+ run: |
+ mkdir -p /tmp/build
+ meteor build --server-only --directory /tmp/build
+
+ # Download Docker images from build artifacts (if needed for EE services)
+ - name: Download Docker images
+ uses: actions/download-artifact@v7
+ if: inputs.release == 'ee' && github.event.pull_request.head.repo.full_name != github.repository && github.event_name != 'release' && github.ref != 'refs/heads/develop'
+ with:
+ pattern: "docker-image-*-amd64-coverage"
+ path: /tmp/docker-images
+ merge-multiple: true
+
+ - name: Load Docker images
+ if: inputs.release == 'ee' && github.event.pull_request.head.repo.full_name != github.repository && github.event_name != 'release' && github.ref != 'refs/heads/develop'
+ shell: bash
+ run: |
+ set -o xtrace
+
+ for image_file in /tmp/docker-images/*.tar; do
+ if [ -f "$image_file" ]; then
+ echo "Loading image from $image_file"
+ docker load -i "$image_file"
+ rm "$image_file"
+ fi
+ done
+
+ docker images
+
+ - name: Set DEBUG_LOG_LEVEL (debug enabled)
+ if: runner.debug == '1'
+ run: echo "DEBUG_LOG_LEVEL=2" >> "$GITHUB_ENV"
+
+ - name: Start httpbin container and wait for it to be ready
+ if: inputs.type == 'api'
+ run: |
+ docker compose -f docker-compose-ci-vite.yml up -d httpbin
+
+ - name: Prepare code coverage directory
+ run: |
+ set -o xtrace
+
+ mkdir -p "$COVERAGE_DIR"
+ chmod 777 "$COVERAGE_DIR"
+
+ - name: Start containers for CE
+ if: inputs.release == 'ce'
+ run: |
+ DEBUG_LOG_LEVEL=${DEBUG_LOG_LEVEL:-0} docker compose -f docker-compose-ci-vite.yml up -d rocketchat frontend --wait
+
+ - name: Start containers for EE
+ if: inputs.release == 'ee'
+ env:
+ ENTERPRISE_LICENSE: ${{ inputs.enterprise-license }}
+ TRANSPORTER: ${{ inputs.transporter }}
+ run: |
+ DEBUG_LOG_LEVEL=${DEBUG_LOG_LEVEL:-0} docker compose -f docker-compose-ci-vite.yml up -d --wait
+
+ - name: Verify Traefik routing
+ run: |
+ set -o xtrace
+ docker ps
+
+ # Give Traefik time to discover services
+ sleep 5
+
+ # Show Traefik discovered routers via API
+ echo "=== Traefik discovered HTTP routers ==="
+ curl -s http://localhost:8081/api/http/routers | jq '.' || echo "Failed to query Traefik API"
+
+ echo "=== Traefik discovered HTTP services ==="
+ curl -s http://localhost:8081/api/http/services | jq '.' || echo "Failed to query Traefik API"
+
+ # Wait for Traefik to discover the frontend service
+ echo "Waiting for frontend to be accessible via Traefik..."
+ for i in {1..30}; do
+ STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:3000/)
+ echo "Attempt $i: Got status code $STATUS"
+ if [ "$STATUS" = "200" ]; then
+ echo "Frontend is accessible!"
+ break
+ fi
+ if [ "$i" -eq 30 ]; then
+ echo "Frontend not accessible after 30 attempts"
+ echo "=== Traefik logs ==="
+ docker compose -f docker-compose-ci-vite.yml logs traefik
+ echo "=== Frontend logs ==="
+ docker compose -f docker-compose-ci-vite.yml logs frontend
+ echo "=== Curl verbose output ==="
+ curl -v http://localhost:3000/ || true
+ exit 1
+ fi
+ sleep 2
+ done
+
+ - uses: ./.github/actions/setup-playwright
+ if: inputs.type == 'ui'
+
+ - name: Wait services to start up
+ if: inputs.release == 'ee'
+ run: |
+ docker ps
+
+ until docker compose -f docker-compose-ci-vite.yml logs ddp-streamer-service | grep -q "NetworkBroker started successfully"; do
+ echo "Waiting 'ddp-streamer' to start up"
+ ((c++)) && ((c==10)) && docker compose -f docker-compose-ci-vite.yml logs ddp-streamer-service && exit 1
+ sleep 10
+ done;
+
+ - name: Remove unused Docker images
+ run: docker system prune -af
+
+ - name: E2E Test API
+ if: inputs.type == 'api'
+ working-directory: ./apps/meteor
+ env:
+ WEBHOOK_TEST_URL: "http://httpbin"
+ IS_EE: ${{ inputs.release == 'ee' && 'true' || '' }}
+ run: |
+ set -o xtrace
+
+ npm run testapi
+ s=$?
+
+ docker compose -f ../../docker-compose-ci-vite.yml stop
+
+ ls -la "$COVERAGE_DIR"
+ exit "$s"
+
+ - name: E2E Test UI (${{ matrix.shard }}/${{ inputs.total-shard }})
+ if: inputs.type == 'ui'
+ env:
+ E2E_COVERAGE: ${{ inputs.coverage == matrix.mongodb-version && 'true' || '' }}
+ IS_EE: ${{ inputs.release == 'ee' && 'true' || '' }}
+ REPORTER_ROCKETCHAT_API_KEY: ${{ secrets.REPORTER_ROCKETCHAT_API_KEY }}
+ REPORTER_ROCKETCHAT_URL: ${{ secrets.REPORTER_ROCKETCHAT_URL }}
+ REPORTER_JIRA_ROCKETCHAT_API_KEY: ${{ secrets.REPORTER_JIRA_ROCKETCHAT_API_KEY }}
+ REPORTER_ROCKETCHAT_REPORT: ${{ github.event.pull_request.draft != 'true' && secrets.REPORTER_ROCKETCHAT_URL != '' && 'true' || '' }}
+ REPORTER_ROCKETCHAT_RUN: ${{ github.run_number }}
+ REPORTER_ROCKETCHAT_BRANCH: ${{ github.ref }}
+ REPORTER_ROCKETCHAT_DRAFT: ${{ github.event.pull_request.draft }}
+ REPORTER_ROCKETCHAT_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
+ REPORTER_ROCKETCHAT_AUTHOR: ${{ github.event.pull_request.user.login }}
+ REPORTER_ROCKETCHAT_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ REPORTER_ROCKETCHAT_PR: ${{ github.event.pull_request.number }}
+ QASE_API_TOKEN: ${{ secrets.QASE_API_TOKEN }}
+ QASE_REPORT: ${{ github.ref == 'refs/heads/develop' && 'true' || '' }}
+ CI: true
+ PLAYWRIGHT_RETRIES: ${{ inputs.retries }}
+ working-directory: ./apps/meteor
+ run: |
+ set -o xtrace
+
+ yarn prepare
+ yarn test:e2e --shard=${{ matrix.shard }}/${{ inputs.total-shard }}
+
+ - name: Merge ui coverage files
+ if: inputs.type == 'ui' && inputs.coverage == matrix.mongodb-version && always()
+ working-directory: ./apps/meteor
+ run: |
+ npx nyc merge .nyc_output "${COVERAGE_DIR}/${COVERAGE_FILE_NAME}"
+ ls -la "$COVERAGE_DIR" || true
+
+ - name: Store playwright test trace
+ if: inputs.type == 'ui' && always()
+ uses: actions/upload-artifact@v7
+ with:
+ name: playwright-test-trace-vite-${{ inputs.release }}-${{ matrix.mongodb-version }}-${{ matrix.shard }}
+ path: ./apps/meteor/tests/e2e/.playwright*
+ include-hidden-files: true
+
+ - name: Show server logs if E2E test failed
+ if: failure()
+ run: docker compose -f docker-compose-ci-vite.yml logs rocketchat frontend authorization-service queue-worker-service ddp-streamer-service account-service presence-service omnichannel-transcript-service
+
+ - name: Show mongo logs if E2E test failed
+ if: failure()
+ run: docker compose -f docker-compose-ci-vite.yml logs mongo
+
+ - name: Show traefik logs if E2E test failed
+ if: failure()
+ run: docker compose -f docker-compose-ci-vite.yml logs traefik
+
+ - name: Store coverage
+ if: inputs.coverage == matrix.mongodb-version && always()
+ uses: actions/upload-artifact@v7
+ with:
+ name: coverage-vite-${{ inputs.type }}-${{ matrix.shard }}
+ path: /tmp/coverage
+ include-hidden-files: true
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 0883725955a49..843b4b26b1fa6 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -550,9 +550,9 @@ jobs:
test-api:
name: 🔨 Test API (CE)
- needs: [checks, build-gh-docker-publish, release-versions]
+ needs: [build-gh-docker-publish, release-versions]
- uses: ./.github/workflows/ci-test-e2e.yml
+ uses: ./.github/workflows/ci-test-e2e-vite.yml
with:
type: api
release: ce
@@ -567,7 +567,7 @@ jobs:
test-api-livechat:
name: 🔨 Test API Livechat (CE)
- needs: [checks, build-gh-docker-publish, release-versions]
+ needs: [build-gh-docker-publish, release-versions]
uses: ./.github/workflows/ci-test-e2e.yml
with:
@@ -584,9 +584,9 @@ jobs:
test-ui:
name: 🔨 Test UI (CE)
- needs: [checks, build-gh-docker-publish, release-versions]
+ needs: [build-gh-docker-publish, release-versions]
- uses: ./.github/workflows/ci-test-e2e.yml
+ uses: ./.github/workflows/ci-test-e2e-vite.yml
with:
type: ui
release: ce
@@ -610,9 +610,9 @@ jobs:
test-api-ee:
name: 🔨 Test API (EE)
- needs: [checks, build-gh-docker-publish, release-versions]
+ needs: [build-gh-docker-publish, release-versions]
- uses: ./.github/workflows/ci-test-e2e.yml
+ uses: ./.github/workflows/ci-test-e2e-vite.yml
with:
type: api
release: ee
@@ -631,7 +631,7 @@ jobs:
test-api-livechat-ee:
name: 🔨 Test API Livechat (EE)
- needs: [checks, build-gh-docker-publish, release-versions]
+ needs: [build-gh-docker-publish, release-versions]
uses: ./.github/workflows/ci-test-e2e.yml
with:
@@ -652,9 +652,9 @@ jobs:
test-ui-ee:
name: 🔨 Test UI (EE)
- needs: [checks, build-gh-docker-publish, release-versions]
+ needs: [build-gh-docker-publish, release-versions]
- uses: ./.github/workflows/ci-test-e2e.yml
+ uses: ./.github/workflows/ci-test-e2e-vite.yml
with:
type: ui
release: ee
@@ -778,6 +778,7 @@ jobs:
name: 📊 Report Coverage
runs-on: ubuntu-24.04
needs: [release-versions, test-api-ee, test-api-livechat-ee, test-ui-ee]
+ if: ${{ !cancelled() }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -798,8 +799,23 @@ jobs:
run: |
set -o xtrace
- npx nyc report --reporter=lcovonly --report-dir=/tmp/coverage_report/api --temp-dir=/tmp/coverage/api
- npx nyc report --reporter=lcovonly --report-dir=/tmp/coverage_report/ui --temp-dir=/tmp/coverage/ui
+ mkdir -p /tmp/coverage/api /tmp/coverage/ui
+ mkdir -p /tmp/coverage_report/api /tmp/coverage_report/ui
+
+ # Only run nyc report if there are actually files in the directory
+ if [ "$(ls -A /tmp/coverage/api)" ]; then
+ npx nyc report --reporter=lcovonly --report-dir=/tmp/coverage_report/api --temp-dir=/tmp/coverage/api
+ else
+ echo "No API coverage files found, creating empty lcov.info to prevent Codecov upload errors"
+ touch /tmp/coverage_report/api/lcov.info
+ fi
+
+ if [ "$(ls -A /tmp/coverage/ui)" ]; then
+ npx nyc report --reporter=lcovonly --report-dir=/tmp/coverage_report/ui --temp-dir=/tmp/coverage/ui
+ else
+ echo "No UI coverage files found, creating empty lcov.info"
+ touch /tmp/coverage_report/ui/lcov.info
+ fi
- name: Store coverage-reports
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
diff --git a/apps/meteor/.docker/Dockerfile.backend b/apps/meteor/.docker/Dockerfile.backend
new file mode 100644
index 0000000000000..35fb06b70226b
--- /dev/null
+++ b/apps/meteor/.docker/Dockerfile.backend
@@ -0,0 +1,45 @@
+FROM node:22.16.0-alpine3.22 AS builder
+
+ENV LANG=C.UTF-8
+
+RUN apk add --no-cache python3 make g++ py3-setuptools libc6-compat
+
+COPY . /app
+
+ENV NODE_ENV=production
+
+RUN cd /app/bundle/programs/server \
+ && npm install --omit=dev \
+ && npm uninstall sharp \
+ && npm install --omit=dev sharp@0.33.5
+
+FROM node:22.16.0-alpine3.22
+
+LABEL maintainer="buildmaster@rocket.chat"
+
+ENV LANG=C.UTF-8
+
+RUN apk add --no-cache shadow deno ttf-dejavu \
+ && apk upgrade --no-cache openssl \
+ && groupmod -n rocketchat nogroup \
+ && useradd -u 65533 -r -g rocketchat rocketchat
+
+ENV DEPLOY_METHOD=docker \
+ NODE_ENV=production \
+ MONGO_URL=mongodb://mongo:27017/rocketchat \
+ HOME=/tmp \
+ PORT=3000 \
+ ROOT_URL=http://localhost:3000 \
+ Accounts_AvatarStorePath=/app/uploads
+
+USER rocketchat
+
+COPY --from=builder --chown=rocketchat:rocketchat /app /app
+
+VOLUME /app/uploads
+
+WORKDIR /app/bundle
+
+EXPOSE 3000
+
+CMD ["node", "main.js"]
diff --git a/apps/meteor/.docker/Dockerfile.frontend b/apps/meteor/.docker/Dockerfile.frontend
new file mode 100644
index 0000000000000..ce24d1de01ab3
--- /dev/null
+++ b/apps/meteor/.docker/Dockerfile.frontend
@@ -0,0 +1,15 @@
+FROM nginx:alpine-slim
+
+RUN rm -rf /etc/nginx/conf.d/*
+
+COPY dist/nginx.conf /etc/nginx/conf.d/default.conf
+
+RUN rm -f dist/nginx.conf
+
+RUN rm -rf /usr/share/nginx/html/*
+
+COPY dist /usr/share/nginx/html
+
+EXPOSE 80
+
+CMD ["nginx", "-g", "daemon off;"]
diff --git a/apps/meteor/.gitignore b/apps/meteor/.gitignore
index 6411fe002c516..e9a54923c91ff 100644
--- a/apps/meteor/.gitignore
+++ b/apps/meteor/.gitignore
@@ -80,6 +80,7 @@ tests/end-to-end/temporary_staged_test
/tests/e2e/.playwright
coverage
.nyc_output
+.nyc_cache
/data
tests/e2e/test-failures/
out.txt
@@ -88,3 +89,5 @@ dist
matrix-federation-config/*
.eslintcache
tsconfig.typecheck.tsbuildinfo
+.vite-inspect
+.build
\ No newline at end of file
diff --git a/apps/meteor/.nycrc.json b/apps/meteor/.nycrc.json
new file mode 100644
index 0000000000000..de177485348aa
--- /dev/null
+++ b/apps/meteor/.nycrc.json
@@ -0,0 +1,26 @@
+{
+ "report-dir": "./coverage",
+ "temp-dir": "./.nyc_output",
+ "reporter": ["html", "lcov", "text", "text-summary"],
+ "extension": [".ts", ".tsx", ".js", ".jsx"],
+ "exclude": [
+ "**/*.spec.ts",
+ "**/*.test.ts",
+ "**/*.spec.js",
+ "**/*.test.js",
+ "**/*.stories.tsx",
+ "**/*.stories.ts",
+ "tests/**",
+ "node_modules/**",
+ "**/*.d.ts",
+ "**/mocks/**",
+ "**/fixtures/**",
+ "**/__mocks__/**"
+ ],
+ "all": false,
+ "check-coverage": false,
+ "sourceMap": true,
+ "instrument": false,
+ "cache": true,
+ "cacheDir": "./.nyc_cache"
+}
diff --git a/apps/meteor/app/utils/client/getURL.ts b/apps/meteor/app/utils/client/getURL.ts
index b427c7110ccef..37fc0f621569f 100644
--- a/apps/meteor/app/utils/client/getURL.ts
+++ b/apps/meteor/app/utils/client/getURL.ts
@@ -17,9 +17,18 @@ export const getURL = function (
const cdnPrefix = settings.watch('CDN_PREFIX') || '';
const siteUrl = settings.watch('Site_Url') || '';
+ const isLocalhost =
+ typeof window !== 'undefined' &&
+ (window.location.hostname === 'localhost' || window.location.hostname === '127.0.0.1');
+
+ const runtimeRootUrl =
+ typeof __meteor_runtime_config__ !== 'undefined' ? __meteor_runtime_config__.ROOT_URL || window.location.origin : '';
+
+ const resolvedSiteUrl = params.full && isLocalhost ? runtimeRootUrl : siteUrl;
+
if (cacheKey) {
path += `${path.includes('?') ? '&' : '?'}cacheKey=${Info.version}`;
}
- return getURLWithoutSettings(path, params, cdnPrefix, siteUrl, cloudDeepLinkUrl);
+ return getURLWithoutSettings(path, params, cdnPrefix, resolvedSiteUrl, cloudDeepLinkUrl);
};
diff --git a/apps/meteor/client/lib/e2ee/rocketchat.e2e.ts b/apps/meteor/client/lib/e2ee/rocketchat.e2e.ts
index c5418ef9967cc..3d54437910091 100644
--- a/apps/meteor/client/lib/e2ee/rocketchat.e2e.ts
+++ b/apps/meteor/client/lib/e2ee/rocketchat.e2e.ts
@@ -1,6 +1,3 @@
-import QueryString from 'querystring';
-import URL from 'url';
-
import type { IE2EEMessage, IMessage, IRoom, IUser, IUploadWithUser, Serialized, IE2EEPinnedMessage } from '@rocket.chat/core-typings';
import { isE2EEMessage, isEncryptedMessageContent } from '@rocket.chat/core-typings';
import { Emitter } from '@rocket.chat/emitter';
@@ -721,13 +718,13 @@ class E2E extends Emitter {
return;
}
- const urlObj = URL.parse(url);
+ const urlObj = new URL(url);
// if the URL doesn't have query params (doesn't reference message) skip
- if (!urlObj.query) {
+ if (!urlObj?.searchParams) {
return;
}
- const { msg: msgId } = QueryString.parse(urlObj.query);
+ const { msg: msgId } = Object.fromEntries(urlObj.searchParams.entries());
if (!msgId || Array.isArray(msgId)) {
return;
diff --git a/apps/meteor/client/noop.ts b/apps/meteor/client/noop.ts
new file mode 100644
index 0000000000000..61c00543350ff
--- /dev/null
+++ b/apps/meteor/client/noop.ts
@@ -0,0 +1,3 @@
+console.error('The frontend is disabled in this Meteor build.');
+
+export {};
\ No newline at end of file
diff --git a/apps/meteor/client/views/room/Header/ParentRoom/ParentDiscussion/ParentDiscussion.tsx b/apps/meteor/client/views/room/Header/ParentRoom/ParentDiscussion/ParentDiscussion.tsx
index 09375e137d1de..bca7577786ce5 100644
--- a/apps/meteor/client/views/room/Header/ParentRoom/ParentDiscussion/ParentDiscussion.tsx
+++ b/apps/meteor/client/views/room/Header/ParentRoom/ParentDiscussion/ParentDiscussion.tsx
@@ -1,7 +1,7 @@
import type { IRoom } from '@rocket.chat/core-typings';
+import { useRoomRoute } from '@rocket.chat/ui-contexts';
import { useTranslation } from 'react-i18next';
-import { roomCoordinator } from '../../../../../lib/rooms/roomCoordinator';
import ParentRoomButton from '../ParentRoomButton';
type ParentDiscussionProps = {
@@ -11,8 +11,12 @@ type ParentDiscussionProps = {
const ParentDiscussion = ({ loading = false, room }: ParentDiscussionProps) => {
const { t } = useTranslation();
- const roomName = roomCoordinator.getRoomName(room.t, room);
- const handleRedirect = (): void => roomCoordinator.openRouteLink(room.t, { rid: room._id, ...room });
+ const goToRoom = useRoomRoute();
+ const roomName = room.fname || room.name || '';
+
+ const handleRedirect = (): void => {
+ goToRoom({ rid: room._id, t: room.t, name: room.name });
+ };
return ;
};
diff --git a/apps/meteor/client/views/room/body/RoomForeword/RoomForewordUsernameList.tsx b/apps/meteor/client/views/room/body/RoomForeword/RoomForewordUsernameList.tsx
index 015cc59169368..2c8afdaa6dba0 100644
--- a/apps/meteor/client/views/room/body/RoomForeword/RoomForewordUsernameList.tsx
+++ b/apps/meteor/client/views/room/body/RoomForeword/RoomForewordUsernameList.tsx
@@ -1,19 +1,21 @@
import type { IUser } from '@rocket.chat/core-typings';
import { Margins } from '@rocket.chat/fuselage';
+import { useRouter } from '@rocket.chat/ui-contexts';
import RoomForewordUsernameListItem from './RoomForewordUsernameListItem';
-import { roomCoordinator } from '../../../../lib/rooms/roomCoordinator';
type RoomForewordUsernameListProps = { usernames: Array> };
const RoomForewordUsernameList = ({ usernames }: RoomForewordUsernameListProps) => {
+ const router = useRouter();
+
return (
{usernames.map((username) => (
))}
diff --git a/apps/meteor/client/views/root/hooks/useSettingsOnLoadSiteUrl.ts b/apps/meteor/client/views/root/hooks/useSettingsOnLoadSiteUrl.ts
index 8b35ed7ad0e38..f1a185ed8ed28 100644
--- a/apps/meteor/client/views/root/hooks/useSettingsOnLoadSiteUrl.ts
+++ b/apps/meteor/client/views/root/hooks/useSettingsOnLoadSiteUrl.ts
@@ -9,6 +9,9 @@ export const useSettingsOnLoadSiteUrl = () => {
if (value == null || value.trim() === '') {
return;
}
+ if (window.location.hostname === 'localhost' || window.location.hostname === '127.0.0.1') {
+ return;
+ }
(window as any).__meteor_runtime_config__.ROOT_URL = value;
}, [siteUrl]);
};
diff --git a/apps/meteor/definition/externals/global.d.ts b/apps/meteor/definition/externals/global.d.ts
index 0a2dc3d1bc923..fba294c6e8d4b 100644
--- a/apps/meteor/definition/externals/global.d.ts
+++ b/apps/meteor/definition/externals/global.d.ts
@@ -9,6 +9,17 @@ declare global {
const __meteor_runtime_config__: {
ROOT_URL_PATH_PREFIX: string;
ROOT_URL: string;
+ PUBLIC_SETTINGS?: Record;
+ accountsConfigCalled?: boolean;
+ meteorEnv: {
+ TEST_METADATA?: string;
+ NODE_ENV?: string;
+ };
+ ACCOUNTS_CONNECTION_URL?: string;
+ isModern?: boolean;
+ gitCommitHash?: string;
+ meteorRelease?: string;
+ debug?: boolean;
};
interface Window {
diff --git a/apps/meteor/definition/externals/meteor/mongo.d.ts b/apps/meteor/definition/externals/meteor/mongo.d.ts
index 443c8de2a8798..c751d3c973c33 100644
--- a/apps/meteor/definition/externals/meteor/mongo.d.ts
+++ b/apps/meteor/definition/externals/meteor/mongo.d.ts
@@ -37,6 +37,7 @@ declare module 'meteor/mongo' {
connection?: object | null;
idGeneration?: string;
transform?: ((doc: T) => T) | null;
+ _preventAutopublish?: boolean;
},
): Collection;
}
diff --git a/apps/meteor/ee/server/configuration/contact-verification.ts b/apps/meteor/ee/server/configuration/contact-verification.ts
index 51558476924e5..768942f4de929 100644
--- a/apps/meteor/ee/server/configuration/contact-verification.ts
+++ b/apps/meteor/ee/server/configuration/contact-verification.ts
@@ -1,5 +1,3 @@
-import { Meteor } from 'meteor/meteor';
-
import { addSettings } from '../settings/contact-verification';
Meteor.startup(async () => {
diff --git a/apps/meteor/index.html b/apps/meteor/index.html
new file mode 100644
index 0000000000000..d42482af6bc24
--- /dev/null
+++ b/apps/meteor/index.html
@@ -0,0 +1,16 @@
+
+
+
+ Rocket.Chat
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/meteor/lib/getMessageUrlRegex.ts b/apps/meteor/lib/getMessageUrlRegex.ts
index 78e3993ef0a4b..f08653b3d59a5 100644
--- a/apps/meteor/lib/getMessageUrlRegex.ts
+++ b/apps/meteor/lib/getMessageUrlRegex.ts
@@ -1,2 +1,2 @@
-export const getMessageUrlRegex = (): RegExp =>
+export const getMessageUrlRegex: () => RegExp = (): RegExp =>
/([A-Za-z]{3,9}):\/\/([-;:&=\+\$,\w]+@{1})?([-A-Za-z0-9\.]+)+:?(\d+)?((\/[-\+=!:~%\/\.@\,\w]*)?\??([-\+=&!:;%@\/\.\,\w]+)?(?:#([^\s\)]+))?)?/g;
diff --git a/apps/meteor/lib/utils/stringUtils.ts b/apps/meteor/lib/utils/stringUtils.ts
index bd457c2337f01..828e74404002b 100644
--- a/apps/meteor/lib/utils/stringUtils.ts
+++ b/apps/meteor/lib/utils/stringUtils.ts
@@ -1,5 +1,7 @@
import { escapeRegExp } from '@rocket.chat/string-helpers';
-import { sanitize } from 'dompurify';
+import DOMPurify from 'dompurify';
+
+const {sanitize} = DOMPurify;
export function truncate(str: string, length: number): string {
return str.length > length ? `${str.slice(0, length - 3)}...` : str;
diff --git a/apps/meteor/package.json b/apps/meteor/package.json
index 6d7732d83c0fb..e600268d8b8cc 100644
--- a/apps/meteor/package.json
+++ b/apps/meteor/package.json
@@ -405,6 +405,7 @@
"@types/underscore": "^1.13.0",
"@types/xml-crypto": "~1.4.6",
"@types/xml-encryption": "~1.2.4",
+ "@vitejs/plugin-react": "~6.0.1",
"autoprefixer": "^9.8.8",
"babel-loader": "~10.0.0",
"babel-plugin-array-includes": "^2.0.3",
@@ -453,6 +454,8 @@
"ts-node": "^10.9.2",
"tsx": "~4.21.0",
"typescript": "~5.9.3",
+ "vite": "^8.0.10",
+ "vite-plugin-istanbul": "^8.0.0",
"webpack": "~5.104.1"
},
"volta": {
@@ -464,7 +467,7 @@
"meteor": {
"disableLegacyBuild": true,
"mainModule": {
- "client": "client/main.ts",
+ "client": "client/noop.ts",
"server": "server/main.ts"
},
"modern": true
diff --git a/apps/meteor/server/configuration/cas.ts b/apps/meteor/server/configuration/cas.ts
index 1aead109ebc8f..c5f82eb798b5a 100644
--- a/apps/meteor/server/configuration/cas.ts
+++ b/apps/meteor/server/configuration/cas.ts
@@ -1,5 +1,4 @@
import debounce from 'lodash.debounce';
-import { Accounts } from 'meteor/accounts-base';
import { RoutePolicy } from 'meteor/routepolicy';
import { WebApp } from 'meteor/webapp';
diff --git a/apps/meteor/server/routes/index.ts b/apps/meteor/server/routes/index.ts
index 4abc6c6f044a8..d0523ca6a378a 100644
--- a/apps/meteor/server/routes/index.ts
+++ b/apps/meteor/server/routes/index.ts
@@ -4,3 +4,4 @@ import './i18n';
import './timesync';
import './fileDecrypt';
import './userDataDownload';
+import './vite';
diff --git a/apps/meteor/server/routes/vite.ts b/apps/meteor/server/routes/vite.ts
new file mode 100644
index 0000000000000..95b1e80dce132
--- /dev/null
+++ b/apps/meteor/server/routes/vite.ts
@@ -0,0 +1,279 @@
+import { createReadStream } from 'node:fs';
+import { readFile, stat } from 'node:fs/promises';
+import type { ServerResponse } from 'node:http';
+import path from 'node:path';
+
+import type { IncomingMessage } from 'connect';
+import { WebApp } from 'meteor/webapp';
+
+import { getWebAppHash } from '../configuration/configureBoilerplate';
+import { SystemLogger } from '../lib/logger/system';
+
+const frontendDeliveryMode = process.env.FRONTEND_DELIVERY_MODE ?? 'separate';
+const ENABLED = frontendDeliveryMode === 'meteor';
+
+SystemLogger.info(
+ `Vite static route is ${ENABLED ? 'enabled' : 'disabled'} (FRONTEND_DELIVERY_MODE=${frontendDeliveryMode}, VITE_DIST_PATH=${process.env.VITE_DIST_PATH ?? 'unset'})`,
+);
+
+if (ENABLED) {
+ const viteDistPath = await resolveViteDistPath();
+
+ if (!viteDistPath) {
+ SystemLogger.warn('SERVE_VITE_FROM_METEOR is enabled, but no Vite dist directory was found. Skipping Vite static handler.');
+ } else {
+ SystemLogger.info(`Serving Vite frontend from Meteor: ${viteDistPath}`);
+ WebApp.connectHandlers.use(async (req, res, next) => {
+ if (req.method !== 'GET' && req.method !== 'HEAD') {
+ next();
+ return;
+ }
+
+ const pathname = getPathname(req);
+ if (!pathname || isBackendRoute(pathname)) {
+ next();
+ return;
+ }
+
+ const requestedFile = await resolvePublicFile(viteDistPath, pathname);
+ if (requestedFile) {
+ if (path.basename(requestedFile) === 'index.html') {
+ await streamSpaIndexHtml(requestedFile, req, req.method, res);
+ return;
+ }
+
+ await streamFile(requestedFile, req.method, res);
+ return;
+ }
+
+ if (looksLikeAsset(pathname)) {
+ next();
+ return;
+ }
+
+ const fallbackPath = path.join(viteDistPath, 'index.html');
+ if (!(await fileExists(fallbackPath))) {
+ next();
+ return;
+ }
+
+ await streamSpaIndexHtml(fallbackPath, req, req.method, res);
+ });
+ }
+}
+
+function getPathname(req: IncomingMessage): string | undefined {
+ try {
+ const host = req.headers.host ?? 'localhost';
+ return new URL(req.url ?? '/', `http://${host}`).pathname;
+ } catch {
+ return undefined;
+ }
+}
+
+function isBackendRoute(pathname: string): boolean {
+ const backendPrefixes = [
+ '/api',
+ '/sockjs',
+ '/websocket',
+ '/_oauth',
+ '/_saml',
+ '/_timesync',
+ '/file-upload',
+ '/ufs',
+ '/avatar',
+ '/emoji-custom',
+ '/custom-sounds',
+ '/images',
+ '/assets',
+ '/i18n',
+ '/livechat',
+ '/health',
+ '/livez',
+ '/readyz',
+ '/data-export',
+ '/file-decrypt',
+ '/meteor_runtime_config.js',
+ ];
+
+ return backendPrefixes.some((prefix) => pathname === prefix || pathname.startsWith(`${prefix}/`));
+}
+
+function looksLikeAsset(pathname: string): boolean {
+ return path.extname(pathname).length > 0;
+}
+
+async function resolveViteDistPath(): Promise {
+ const envPath = process.env.VITE_DIST_PATH;
+
+ const candidates = [
+ envPath,
+ path.resolve(process.cwd(), 'vite'),
+ path.resolve(process.cwd(), '../vite'),
+ path.resolve(process.cwd(), 'dist'),
+ path.resolve(process.cwd(), '../dist'),
+ ].filter((candidate): candidate is string => Boolean(candidate));
+
+ const checks = await Promise.all(
+ candidates.map(async (candidate) => {
+ const absoluteCandidate = path.resolve(candidate);
+ const hasDirectory = await directoryExists(absoluteCandidate);
+ if (!hasDirectory) {
+ return undefined;
+ }
+
+ const indexPath = path.join(absoluteCandidate, 'index.html');
+ if (await fileExists(indexPath)) {
+ return absoluteCandidate;
+ }
+
+ return undefined;
+ }),
+ );
+
+ return checks.find((candidate): candidate is string => Boolean(candidate));
+}
+
+async function resolvePublicFile(baseDir: string, pathname: string): Promise {
+ const normalizedPath = pathname === '/' ? '/index.html' : pathname;
+ const decodedPath = safeDecodeURIComponent(normalizedPath);
+ if (!decodedPath) {
+ return undefined;
+ }
+
+ const resolvedPath = path.resolve(baseDir, `.${decodedPath}`);
+ if (!isInsideBaseDir(baseDir, resolvedPath)) {
+ return undefined;
+ }
+
+ if (await fileExists(resolvedPath)) {
+ return resolvedPath;
+ }
+
+ return undefined;
+}
+
+function isInsideBaseDir(baseDir: string, targetPath: string): boolean {
+ const relative = path.relative(baseDir, targetPath);
+ return relative === '' || (!relative.startsWith('..') && !path.isAbsolute(relative));
+}
+
+function safeDecodeURIComponent(value: string): string | undefined {
+ try {
+ return decodeURIComponent(value);
+ } catch {
+ return undefined;
+ }
+}
+
+async function streamFile(filePath: string, method: string | undefined, res: ServerResponse): Promise {
+ const fileStats = await stat(filePath);
+ const ext = path.extname(filePath);
+
+ res.setHeader('Content-Type', contentTypeByExtension[ext] ?? 'application/octet-stream');
+ res.setHeader('Content-Length', String(fileStats.size));
+ res.setHeader('Cache-Control', getCacheControl(filePath));
+
+ if (method === 'HEAD') {
+ res.writeHead(200);
+ res.end();
+ return;
+ }
+
+ await new Promise((resolve, reject) => {
+ const stream = createReadStream(filePath);
+ stream.on('error', reject);
+ stream.on('end', resolve);
+ stream.pipe(res);
+ });
+}
+
+async function streamSpaIndexHtml(indexPath: string, req: IncomingMessage, method: string | undefined, res: ServerResponse): Promise {
+ const rawHtml = await readFile(indexPath, 'utf8');
+ const runtimeConfigPath = getRuntimeConfigScriptPath(req);
+ const html = replaceInlineMeteorRuntimeConfig(rawHtml, runtimeConfigPath);
+
+ res.setHeader('Content-Type', 'text/html; charset=utf-8');
+ res.setHeader('Cache-Control', 'no-cache');
+ res.setHeader('Content-Length', Buffer.byteLength(html, 'utf8'));
+
+ if (method === 'HEAD') {
+ res.writeHead(200);
+ res.end();
+ return;
+ }
+
+ res.writeHead(200);
+ res.end(html);
+}
+
+function replaceInlineMeteorRuntimeConfig(html: string, runtimeConfigPath: string): string {
+ const inlineRuntimePattern =
+ /`);
+}
+
+function getRuntimeConfigScriptPath(req: IncomingMessage): string {
+ const { categorizeRequest } = WebApp as typeof WebApp & {
+ categorizeRequest?: (request: IncomingMessage) => { arch?: string };
+ };
+ const { arch = 'web.browser' } = categorizeRequest?.(req) ?? {};
+ const hash = getWebAppHash(arch) || getWebAppHash('web.browser');
+
+ if (!hash) {
+ return '/meteor_runtime_config.js';
+ }
+
+ return `/meteor_runtime_config.js?hash=${encodeURIComponent(hash)}`;
+}
+
+function getCacheControl(filePath: string): string {
+ if (filePath.endsWith('index.html')) {
+ return 'no-cache';
+ }
+
+ const fileName = path.basename(filePath);
+ if (/\.[A-Za-z0-9_-]{8,}\./.test(fileName)) {
+ return 'public, max-age=31536000, immutable';
+ }
+
+ return 'public, max-age=300';
+}
+
+async function fileExists(filePath: string): Promise {
+ try {
+ const fileStat = await stat(filePath);
+ return fileStat.isFile();
+ } catch {
+ return false;
+ }
+}
+
+async function directoryExists(dirPath: string): Promise {
+ try {
+ const dirStat = await stat(dirPath);
+ return dirStat.isDirectory();
+ } catch {
+ return false;
+ }
+}
+
+const contentTypeByExtension: Record = {
+ '.css': 'text/css; charset=utf-8',
+ '.html': 'text/html; charset=utf-8',
+ '.js': 'text/javascript; charset=utf-8',
+ '.json': 'application/json; charset=utf-8',
+ '.map': 'application/json; charset=utf-8',
+ '.mjs': 'text/javascript; charset=utf-8',
+ '.png': 'image/png',
+ '.svg': 'image/svg+xml',
+ '.txt': 'text/plain; charset=utf-8',
+ '.webmanifest': 'application/manifest+json; charset=utf-8',
+ '.woff': 'font/woff',
+ '.woff2': 'font/woff2',
+};
diff --git a/apps/meteor/src/index.ts b/apps/meteor/src/index.ts
new file mode 100644
index 0000000000000..11adfe162e813
--- /dev/null
+++ b/apps/meteor/src/index.ts
@@ -0,0 +1,3 @@
+import './setup.ts';
+
+await import('../client/main.ts');
diff --git a/apps/meteor/src/meteor/accounts-base.ts b/apps/meteor/src/meteor/accounts-base.ts
new file mode 100644
index 0000000000000..e2409b7d14854
--- /dev/null
+++ b/apps/meteor/src/meteor/accounts-base.ts
@@ -0,0 +1,736 @@
+import { Hook } from './callback-hook.ts';
+import { DDP, type Connection } from './ddp-client.ts';
+import { MeteorError } from './meteor.ts';
+import { Collection } from './mongo.ts';
+import { Random } from './random.ts';
+import { ReactiveVar } from './reactive-var.ts';
+import { Tracker } from './tracker.ts';
+import { isKey } from './utils/isKey.ts';
+import { keys } from './utils/keys.ts';
+
+const VALID_CONFIG_KEYS = [
+ 'sendVerificationEmail',
+ 'forbidClientAccountCreation',
+ 'restrictCreationByEmailDomain',
+ 'loginExpiration',
+ 'loginExpirationInDays',
+ 'oauthSecretKey',
+ 'passwordResetTokenExpirationInDays',
+ 'passwordResetTokenExpiration',
+ 'passwordEnrollTokenExpirationInDays',
+ 'passwordEnrollTokenExpiration',
+ 'ambiguousErrorMessages',
+ 'bcryptRounds',
+ 'argon2Enabled',
+ 'argon2Type',
+ 'argon2TimeCost',
+ 'argon2MemoryCost',
+ 'argon2Parallelism',
+ 'defaultFieldSelector',
+ 'collection',
+ 'loginTokenExpirationHours',
+ 'tokenSequenceLength',
+ 'clientStorage',
+ 'ddpUrl',
+ 'connection',
+] as const;
+
+type AccountsClientOptions = {
+ sendVerificationEmail?: boolean;
+ forbidClientAccountCreation?: boolean;
+ restrictCreationByEmailDomain?: boolean;
+ loginExpiration?: number;
+ loginExpirationInDays?: number;
+ oauthSecretKey?: string;
+ passwordResetTokenExpirationInDays?: number;
+ passwordResetTokenExpiration?: number;
+ passwordEnrollTokenExpirationInDays?: number;
+ passwordEnrollTokenExpiration?: number;
+ ambiguousErrorMessages?: boolean;
+ bcryptRounds?: number;
+ argon2Enabled?: boolean;
+ argon2Type?: number;
+ argon2TimeCost?: number;
+ argon2MemoryCost?: number;
+ argon2Parallelism?: number;
+ defaultFieldSelector?: Record;
+ collection?: Collection | string;
+ loginTokenExpirationHours?: number;
+ tokenSequenceLength?: number;
+ clientStorage?: 'local' | 'session';
+ ddpUrl?: string;
+ connection?: Connection;
+};
+const DEFAULT_LOGIN_EXPIRATION_DAYS = 90;
+const DEFAULT_PASSWORD_RESET_TOKEN_EXPIRATION_DAYS = 3;
+const DEFAULT_PASSWORD_ENROLL_TOKEN_EXPIRATION_DAYS = 30;
+const MIN_TOKEN_LIFETIME_CAP_SECS = 3600; // one hour
+const LOGIN_UNEXPIRING_TOKEN_DAYS = 365 * 100;
+
+export class LoginCancelledError extends Error {
+ numericError = 0x8acdc2f;
+
+ override name = 'Accounts.LoginCancelledError';
+}
+
+const URL_PARTS = [
+ { key: 'reset-password', regex: /^#\/reset-password\/(.*)$/, property: '_resetPasswordToken' },
+ { key: 'verify-email', regex: /^#\/verify-email\/(.*)$/, property: '_verifyEmailToken' },
+ { key: 'enroll-account', regex: /^#\/enroll-account\/(.*)$/, property: '_enrollAccountToken' },
+] as const;
+
+type AttemptInfo = { type: string; allowed: boolean; error: any; methodName: string; methodArguments: [{ resume: string | null }] };
+
+export class AccountsClient {
+ public _options: AccountsClientOptions;
+
+ public connection: Connection = DDP.connection;
+
+ public users: any;
+
+ public _onLoginHook = new Hook<
+ [{ type: 'resume' | 'normal'; allowed?: boolean; error?: any; methodName?: string; methodArguments?: any[] }]
+ >({
+ bindEnvironment: false,
+ debugPrintExceptions: 'onLogin callback',
+ });
+
+ public _onLoginFailureHook = new Hook<[{ error: any }]>({
+ bindEnvironment: false,
+ debugPrintExceptions: 'onLoginFailure callback',
+ });
+
+ public _onLogoutHook = new Hook<[]>({
+ bindEnvironment: false,
+ debugPrintExceptions: 'onLogout callback',
+ });
+
+ public DEFAULT_LOGIN_EXPIRATION_DAYS = DEFAULT_LOGIN_EXPIRATION_DAYS;
+
+ public LOGIN_UNEXPIRING_TOKEN_DAYS = LOGIN_UNEXPIRING_TOKEN_DAYS;
+
+ public LoginCancelledError = LoginCancelledError;
+
+ public _loggingIn = new ReactiveVar(false);
+
+ public _loggingOut = new ReactiveVar(false);
+
+ public _loginServicesHandle: any;
+
+ public _pageLoadLoginCallbacks: Array<(...args: any[]) => any> = [];
+
+ public _pageLoadLoginAttemptInfo: AttemptInfo | null = null;
+
+ public savedHash: string;
+
+ public storageLocation: Storage;
+
+ public _loginFuncs: Record any>;
+
+ public _loginCallbacksCalled: boolean;
+
+ public _autoLoginEnabled = false;
+
+ public _lastLoginTokenWhenPolled: string | null = null;
+
+ public LOGIN_TOKEN_KEY = 'Meteor.loginToken';
+
+ public LOGIN_TOKEN_EXPIRES_KEY = 'Meteor.loginTokenExpires';
+
+ public USER_ID_KEY = 'Meteor.userId';
+
+ public _pollIntervalTimer: any;
+
+ public _accountsCallbacks: Partial any>> = {};
+
+ public _reconnectStopper: any;
+
+ public _resetPasswordToken: string | undefined;
+
+ public _verifyEmailToken: string | undefined;
+
+ public _enrollAccountToken: string | undefined;
+
+ constructor(options: AccountsClientOptions = {}) {
+ for (const key of keys(options)) {
+ if (!VALID_CONFIG_KEYS.includes(key)) {
+ console.error(`Accounts.config: Invalid key: ${key}`);
+ }
+ }
+
+ this._options = options || {};
+ this.connection = this._initConnection(options || {});
+ this.users = this._initializeCollection(options || {});
+
+ this._loginServicesHandle = this.connection.subscribe('meteor.loginServiceConfiguration');
+
+ this.savedHash = window.location.hash;
+ this._autoLoginEnabled = true;
+ this._attemptToMatchHash();
+
+ this.storageLocation = localStorage;
+ this._initLocalStorage();
+ this._loginFuncs = {};
+ this._loginCallbacksCalled = false;
+ }
+
+ _initializeCollection(options: AccountsClientOptions) {
+ if (options.collection && typeof options.collection !== 'string' && !(options.collection instanceof Collection)) {
+ throw new MeteorError('Collection parameter can be only of type string or "Mongo.Collection"');
+ }
+
+ let collectionName = 'users';
+ if (typeof options.collection === 'string') {
+ collectionName = options.collection;
+ }
+
+ return options.collection instanceof Collection
+ ? options.collection
+ : new Collection(collectionName, {
+ _preventAutopublish: true,
+ connection: this.connection,
+ });
+ }
+
+ _addDefaultFieldSelector(options: any = {}) {
+ if (!this._options.defaultFieldSelector) {
+ return options;
+ }
+ if (!options.fields)
+ return {
+ ...options,
+ fields: this._options.defaultFieldSelector,
+ };
+ const keys = Object.keys(options.fields);
+ if (!keys.length) {
+ return options;
+ }
+ if (options.fields[keys[0]]) {
+ return options;
+ }
+ const keys2 = Object.keys(this._options.defaultFieldSelector);
+ return this._options.defaultFieldSelector[keys2[0]]
+ ? options
+ : {
+ ...options,
+ fields: {
+ ...options.fields,
+ ...this._options.defaultFieldSelector,
+ },
+ };
+ }
+
+ user(options?: any) {
+ const userId = this.userId();
+ const findOne = (...args: any[]) => this.users.findOne(...args);
+ return userId ? findOne(userId, this._addDefaultFieldSelector(options)) : null;
+ }
+
+ async userAsync(options?: any) {
+ const userId = this.userId();
+ return userId ? this.users.findOneAsync(userId, this._addDefaultFieldSelector(options)) : null;
+ }
+
+ onLogin(func: (...args: any[]) => any) {
+ const ret = this._onLoginHook.register(func);
+ this._startupCallback(ret.callback);
+ return ret;
+ }
+
+ onLoginFailure(func: (...args: any[]) => any) {
+ return this._onLoginFailureHook.register(func);
+ }
+
+ onLogout(func: (...args: any[]) => any) {
+ return this._onLogoutHook.register(func);
+ }
+
+ _initConnection(options: AccountsClientOptions) {
+ if (options.connection) {
+ this.connection = options.connection;
+ }
+
+ if (options.ddpUrl) {
+ this.connection = DDP.connect(options.ddpUrl);
+ }
+
+ return this.connection;
+ }
+
+ _getTokenLifetimeMs(): number {
+ const loginExpirationInDays =
+ this._options.loginExpirationInDays === null ? LOGIN_UNEXPIRING_TOKEN_DAYS : this._options.loginExpirationInDays;
+ return this._options.loginExpiration || (loginExpirationInDays || DEFAULT_LOGIN_EXPIRATION_DAYS) * 86400000;
+ }
+
+ _getPasswordResetTokenLifetimeMs() {
+ return (
+ this._options.passwordResetTokenExpiration ||
+ (this._options.passwordResetTokenExpirationInDays || DEFAULT_PASSWORD_RESET_TOKEN_EXPIRATION_DAYS) * 86400000
+ );
+ }
+
+ _getPasswordEnrollTokenLifetimeMs() {
+ return (
+ this._options.passwordEnrollTokenExpiration ||
+ (this._options.passwordEnrollTokenExpirationInDays || DEFAULT_PASSWORD_ENROLL_TOKEN_EXPIRATION_DAYS) * 86400000
+ );
+ }
+
+ _tokenExpiration(when: any) {
+ return new Date(new Date(when).getTime() + this._getTokenLifetimeMs());
+ }
+
+ _tokenExpiresSoon(when: any) {
+ let minLifetimeMs = 0.1 * this._getTokenLifetimeMs();
+ const minLifetimeCapMs = MIN_TOKEN_LIFETIME_CAP_SECS * 1000;
+ if (minLifetimeMs > minLifetimeCapMs) {
+ minLifetimeMs = minLifetimeCapMs;
+ }
+ return new Date().getTime() > new Date(when).getTime() - minLifetimeMs;
+ }
+
+ initStorageLocation(options?: any) {
+ this.storageLocation = options && options.clientStorage === 'session' ? sessionStorage : localStorage;
+ }
+
+ config(options: AccountsClientOptions) {
+ if (!__meteor_runtime_config__.accountsConfigCalled) {
+ console.debug('Accounts.config was called on the client but not on the server; some configuration options may not take effect.');
+ }
+
+ if (isKey(options, 'oauthSecretKey')) {
+ throw new Error('The oauthSecretKey option may only be specified on the server');
+ }
+ for (const key of keys(options)) {
+ if (!VALID_CONFIG_KEYS.includes(key)) {
+ console.error(`Accounts.config: Invalid key: ${key}`);
+ }
+ }
+
+ if (options.collection && options.collection !== this.users._name && options.collection !== this.users) {
+ this.users = this._initializeCollection(options);
+ }
+ this.initStorageLocation(options);
+ }
+
+ userId() {
+ return this.connection.userId();
+ }
+
+ _setLoggingIn(x: boolean) {
+ this._loggingIn.set(x);
+ }
+
+ loggingIn() {
+ return this._loggingIn.get();
+ }
+
+ loggingOut() {
+ return this._loggingOut.get();
+ }
+
+ registerClientLoginFunction(funcName: string, func: (...args: any[]) => any) {
+ if (this._loginFuncs[funcName]) {
+ throw new Error(`${funcName} has been defined already`);
+ }
+ this._loginFuncs[funcName] = func;
+ }
+
+ callLoginFunction(funcName: string, ...funcArgs: any[]) {
+ if (!this._loginFuncs[funcName]) {
+ throw new Error(`${funcName} was not defined`);
+ }
+ return this._loginFuncs[funcName].apply(this, funcArgs);
+ }
+
+ applyLoginFunction(funcName: string, funcArgs: any[]) {
+ if (!this._loginFuncs[funcName]) {
+ throw new Error(`${funcName} was not defined`);
+ }
+ return this._loginFuncs[funcName].apply(this, funcArgs);
+ }
+
+ logout(callback?: (error?: any) => void) {
+ this._loggingOut.set(true);
+
+ this.connection
+ .applyAsync('logout', [], {
+ wait: true,
+ })
+ .then((_result: any) => {
+ this._loggingOut.set(false);
+ this._loginCallbacksCalled = false;
+ this.makeClientLoggedOut();
+ callback?.();
+ })
+ .catch((e: any) => {
+ this._loggingOut.set(false);
+ callback?.(e);
+ });
+ }
+
+ logoutAllClients(callback?: (error?: any) => void) {
+ this._loggingOut.set(true);
+
+ this.connection
+ .applyAsync('logoutAllClients', [], {
+ wait: true,
+ })
+ .then((_result: any) => {
+ this._loggingOut.set(false);
+ this._loginCallbacksCalled = false;
+ this.makeClientLoggedOut();
+ callback?.();
+ })
+ .catch((e: any) => {
+ this._loggingOut.set(false);
+ callback?.(e);
+ });
+ }
+
+ logoutOtherClients(callback?: (error?: any) => void) {
+ this.connection.apply('getNewToken', [], { wait: true }, (err: any, result: any) => {
+ const userId = this.userId();
+ if (!err && userId) {
+ this._storeLoginToken(userId, result.token, result.tokenExpires);
+ }
+ });
+
+ this.connection.apply('removeOtherTokens', [], { wait: true }, (err: any) => callback?.(err));
+ }
+
+ callLoginMethod(options: any) {
+ options = {
+ methodName: 'login',
+ methodArguments: [{}],
+ _suppressLoggingIn: false,
+ ...options,
+ };
+
+ ['validateResult', 'userCallback'].forEach((f) => {
+ if (!options[f]) options[f] = () => null;
+ });
+
+ let called = false;
+ const loginCallbacks = ({ error, loginDetails }: { error?: any; loginDetails?: any }) => {
+ if (!called) {
+ called = true;
+ if (!error) {
+ this._onLoginHook.forEach((callback) => {
+ callback(loginDetails);
+ return true;
+ });
+ this._loginCallbacksCalled = true;
+ } else {
+ this._loginCallbacksCalled = false;
+ this._onLoginFailureHook.forEach((callback) => {
+ callback({ error });
+ return true;
+ });
+ }
+ options.userCallback(error, loginDetails);
+ }
+ };
+
+ let reconnected = false;
+
+ const onResultReceived = (err: any, result: any) => {
+ if (err || !result || !result.token) {
+ // error handling
+ } else {
+ if (this._reconnectStopper) {
+ this._reconnectStopper.stop();
+ }
+
+ this._reconnectStopper = DDP.onReconnect((conn) => {
+ if (conn !== this.connection) {
+ return;
+ }
+ reconnected = true;
+ const storedToken = this._storedLoginToken();
+ if (storedToken) {
+ result = {
+ token: storedToken,
+ tokenExpires: this._storedLoginTokenExpires(),
+ };
+ }
+ if (!result.tokenExpires) result.tokenExpires = this._tokenExpiration(new Date());
+ if (this._tokenExpiresSoon(result.tokenExpires)) {
+ this.makeClientLoggedOut();
+ } else {
+ this.callLoginMethod({
+ methodArguments: [{ resume: result.token }],
+ _suppressLoggingIn: true,
+ userCallback: (error: any, loginDetails: any) => {
+ const storedTokenNow = this._storedLoginToken();
+ if (error) {
+ if (storedTokenNow && storedTokenNow === result.token) {
+ this.makeClientLoggedOut();
+ }
+ }
+ loginCallbacks({ error, loginDetails });
+ },
+ });
+ }
+ });
+ }
+ };
+
+ const loggedInAndDataReadyCallback = (error: any, result: any) => {
+ if (reconnected) return;
+
+ if (error || !result) {
+ error = error || new Error(`No result from call to ${options.methodName}`);
+ loginCallbacks({ error });
+ this._setLoggingIn(false);
+ return;
+ }
+ try {
+ options.validateResult(result);
+ } catch (e) {
+ loginCallbacks({ error: e });
+ this._setLoggingIn(false);
+ return;
+ }
+
+ this.makeClientLoggedIn(result.id, result.token, result.tokenExpires);
+
+ void Tracker.autorun(async (computation) => {
+ const user = await Tracker.withComputation(computation, () => this.userAsync());
+
+ if (user) {
+ loginCallbacks({ loginDetails: result });
+ this._setLoggingIn(false);
+ computation.stop();
+ }
+ });
+ };
+
+ if (!options._suppressLoggingIn) {
+ this._setLoggingIn(true);
+ }
+ void this.connection.applyAsync(
+ options.methodName,
+ options.methodArguments,
+ { wait: true, onResultReceived },
+ loggedInAndDataReadyCallback,
+ );
+ }
+
+ makeClientLoggedOut() {
+ if (this.connection._userId) {
+ this._onLogoutHook.forEach((callback) => {
+ callback();
+ return true;
+ });
+ }
+ this._unstoreLoginToken();
+ this.connection.setUserId(null);
+ this._reconnectStopper?.stop();
+ }
+
+ makeClientLoggedIn(userId: any, token: any, tokenExpires: any) {
+ this._storeLoginToken(userId, token, tokenExpires);
+ this.connection.setUserId(userId);
+ }
+
+ loginServicesConfigured() {
+ return this._loginServicesHandle.ready();
+ }
+
+ onPageLoadLogin(f: (...args: any[]) => any) {
+ if (this._pageLoadLoginAttemptInfo) {
+ f(this._pageLoadLoginAttemptInfo);
+ } else {
+ this._pageLoadLoginCallbacks.push(f);
+ }
+ }
+
+ _pageLoadLogin(attemptInfo: AttemptInfo) {
+ if (this._pageLoadLoginAttemptInfo) {
+ console.debug('Ignoring unexpected duplicate page load login attempt info');
+ return;
+ }
+
+ this._pageLoadLoginCallbacks.forEach((callback: (...args: any[]) => any) => callback(attemptInfo));
+ this._pageLoadLoginCallbacks = [];
+ this._pageLoadLoginAttemptInfo = attemptInfo;
+ }
+
+ _startupCallback(callback: (...args: any[]) => any) {
+ if (this._loginCallbacksCalled) {
+ setTimeout(() => callback({ type: 'resume' }), 0);
+ }
+ }
+
+ loginWithToken(token: any, callback: (error?: any) => void) {
+ this.callLoginMethod({
+ methodArguments: [
+ {
+ resume: token,
+ },
+ ],
+ userCallback: callback,
+ });
+ }
+
+ _enableAutoLogin() {
+ this._autoLoginEnabled = true;
+ this._pollStoredLoginToken();
+ }
+
+ _isolateLoginTokenForTest() {
+ this.LOGIN_TOKEN_KEY += Random.id();
+ this.USER_ID_KEY += Random.id();
+ }
+
+ _storeLoginToken(userId: string, token: string, tokenExpires: any) {
+ this.storageLocation.setItem(this.USER_ID_KEY, userId);
+ this.storageLocation.setItem(this.LOGIN_TOKEN_KEY, token);
+ if (!tokenExpires) tokenExpires = this._tokenExpiration(new Date());
+ this.storageLocation.setItem(this.LOGIN_TOKEN_EXPIRES_KEY, tokenExpires);
+
+ this._lastLoginTokenWhenPolled = token;
+ }
+
+ _unstoreLoginToken() {
+ this.storageLocation.removeItem(this.USER_ID_KEY);
+ this.storageLocation.removeItem(this.LOGIN_TOKEN_KEY);
+ this.storageLocation.removeItem(this.LOGIN_TOKEN_EXPIRES_KEY);
+ this._lastLoginTokenWhenPolled = null;
+ }
+
+ _storedLoginToken() {
+ return this.storageLocation.getItem(this.LOGIN_TOKEN_KEY);
+ }
+
+ _storedLoginTokenExpires() {
+ return this.storageLocation.getItem(this.LOGIN_TOKEN_EXPIRES_KEY);
+ }
+
+ _storedUserId() {
+ return this.storageLocation.getItem(this.USER_ID_KEY);
+ }
+
+ _unstoreLoginTokenIfExpiresSoon() {
+ const tokenExpires = this._storedLoginTokenExpires();
+ if (tokenExpires && this._tokenExpiresSoon(new Date(tokenExpires))) {
+ this._unstoreLoginToken();
+ }
+ }
+
+ _initLocalStorage() {
+ const rootUrlPathPrefix = __meteor_runtime_config__.ROOT_URL_PATH_PREFIX;
+ if (rootUrlPathPrefix || this.connection !== DDP.connection) {
+ let namespace = `:${this.connection._stream.rawUrl}`;
+ if (rootUrlPathPrefix) {
+ namespace += `:${rootUrlPathPrefix}`;
+ }
+ this.LOGIN_TOKEN_KEY += namespace;
+ this.LOGIN_TOKEN_EXPIRES_KEY += namespace;
+ this.USER_ID_KEY += namespace;
+ }
+
+ let token: string | null = null;
+ if (this._autoLoginEnabled) {
+ this._unstoreLoginTokenIfExpiresSoon();
+ token = this._storedLoginToken();
+ if (token) {
+ const userId = this._storedUserId();
+ userId && this.connection.setUserId(userId);
+ this.loginWithToken(token, (err) => {
+ if (err) {
+ console.debug(`Error logging in with token: ${err}`);
+ this.makeClientLoggedOut();
+ }
+
+ this._pageLoadLogin({
+ type: 'resume',
+ allowed: !err,
+ error: err,
+ methodName: 'login',
+ methodArguments: [{ resume: token }],
+ });
+ });
+ }
+ }
+
+ this._lastLoginTokenWhenPolled = token;
+
+ if (this._pollIntervalTimer) {
+ clearInterval(this._pollIntervalTimer);
+ }
+
+ this._pollIntervalTimer = setInterval(() => {
+ this._pollStoredLoginToken();
+ }, 3000);
+ }
+
+ _pollStoredLoginToken() {
+ if (!this._autoLoginEnabled) {
+ return;
+ }
+
+ const currentLoginToken = this._storedLoginToken();
+
+ if (this._lastLoginTokenWhenPolled !== currentLoginToken) {
+ if (currentLoginToken) {
+ this.loginWithToken(currentLoginToken, (err) => {
+ if (err) {
+ this.makeClientLoggedOut();
+ }
+ });
+ } else {
+ this.logout();
+ }
+ }
+
+ this._lastLoginTokenWhenPolled = currentLoginToken;
+ }
+
+ _attemptToMatchHash() {
+ for (const urlPart of URL_PARTS) {
+ const match = this.savedHash.match(urlPart.regex);
+ if (!match) continue;
+
+ const token = match[1];
+ this[urlPart.property] = token;
+ window.location.hash = '';
+
+ this._autoLoginEnabled = false;
+
+ if (this._accountsCallbacks[urlPart.key]) {
+ this._accountsCallbacks[urlPart.key]?.(token, () => this._enableAutoLogin());
+ }
+
+ return;
+ }
+ }
+
+ onResetPasswordLink(callback: (...args: any[]) => any) {
+ if (this._accountsCallbacks['reset-password']) {
+ console.debug('Accounts.onResetPasswordLink was called more than once. Only one callback added will be executed.');
+ }
+
+ this._accountsCallbacks['reset-password'] = callback;
+ }
+
+ onEmailVerificationLink(callback: (...args: any[]) => any) {
+ if (this._accountsCallbacks['verify-email']) {
+ console.debug('Accounts.onEmailVerificationLink was called more than once. Only one callback added will be executed.');
+ }
+
+ this._accountsCallbacks['verify-email'] = callback;
+ }
+
+ onEnrollmentLink(callback: (...args: any[]) => any) {
+ if (this._accountsCallbacks['enroll-account']) {
+ console.debug('Accounts.onEnrollmentLink was called more than once. Only one callback added will be executed.');
+ }
+
+ this._accountsCallbacks['enroll-account'] = callback;
+ }
+}
+
+export const Accounts = new AccountsClient();
diff --git a/apps/meteor/src/meteor/accounts-oauth.ts b/apps/meteor/src/meteor/accounts-oauth.ts
new file mode 100644
index 0000000000000..f612e34c43779
--- /dev/null
+++ b/apps/meteor/src/meteor/accounts-oauth.ts
@@ -0,0 +1,33 @@
+class ServiceSet extends Set {
+ includes(service: string): boolean {
+ return this.has(service);
+ }
+
+ override add(service: string): this {
+ if (this.has(service)) {
+ throw new Error(`Duplicate service: ${service}`);
+ }
+
+ return super.add(service);
+ }
+
+ override delete(service: string): boolean {
+ if (!this.has(service)) {
+ throw new Error(`Service not found: ${service}`);
+ }
+
+ return super.delete(service);
+ }
+}
+
+const services = new ServiceSet();
+
+export const registerService = (name: T) => {
+ services.add(name);
+};
+
+export const unregisterService = (name: T) => {
+ services.delete(name);
+};
+
+export const serviceNames = () => services;
diff --git a/apps/meteor/src/meteor/accounts-password.ts b/apps/meteor/src/meteor/accounts-password.ts
new file mode 100644
index 0000000000000..5bd7aecca86f0
--- /dev/null
+++ b/apps/meteor/src/meteor/accounts-password.ts
@@ -0,0 +1,229 @@
+import { Accounts } from './accounts-base.ts';
+import { MeteorError } from './meteor.ts';
+import { SHA256 } from './sha.ts';
+
+type MeteorCallback = (error?: Error, result?: T) => void;
+
+type UserSelectorObject = {
+ username?: string;
+ email?: string;
+ id?: string;
+};
+
+type UserSelector = string | UserSelectorObject;
+
+type PasswordDigest = {
+ digest: string;
+ algorithm: string;
+};
+
+type InternalLoginOptions = {
+ selector: UserSelector;
+ password: string;
+ code?: string | undefined; // 2FA code
+ callback?: MeteorCallback | undefined;
+};
+
+type CreateUserOptions = {
+ username?: string;
+ email?: string;
+ password: string | PasswordDigest;
+ profile?: Record;
+ [key: string]: any;
+};
+
+type ForgotPasswordOptions = {
+ email: string;
+};
+
+const reportError = (error: Error, callback?: MeteorCallback): void => {
+ if (callback) {
+ callback(error);
+ } else {
+ throw error;
+ }
+};
+
+const internalLoginWithPassword = ({ selector, password, code, callback }: InternalLoginOptions): UserSelector => {
+ let normalizedSelector: UserSelectorObject;
+
+ if (typeof selector === 'string') {
+ if (!selector.includes('@')) {
+ normalizedSelector = { username: selector };
+ } else {
+ normalizedSelector = { email: selector };
+ }
+ } else {
+ normalizedSelector = selector;
+ }
+
+ Accounts.callLoginMethod({
+ methodArguments: [
+ {
+ user: normalizedSelector,
+ password: _hashPassword(password),
+ code,
+ },
+ ],
+ userCallback: (error: Error | undefined, result?: any) => {
+ if (error) {
+ reportError(error, callback);
+ } else if (callback) {
+ callback(undefined, result);
+ }
+ },
+ });
+
+ return selector;
+};
+
+export const _hashPassword = (password: string): PasswordDigest => ({
+ digest: SHA256(password),
+ algorithm: 'sha-256',
+});
+
+export const loginWithPassword = (selector: UserSelector, password: string, callback?: MeteorCallback): UserSelector => {
+ return internalLoginWithPassword({ selector, password, callback });
+};
+
+export const loginWithPasswordAsync = (selector: UserSelector, password: string): Promise => {
+ return new Promise((resolve, reject) => {
+ internalLoginWithPassword({
+ selector,
+ password,
+ callback: (err, res) => (err ? reject(err) : resolve(res)),
+ });
+ });
+};
+
+export const loginWithPasswordAnd2faCode = (
+ selector: UserSelector,
+ password: string,
+ code: string,
+ callback?: MeteorCallback,
+): UserSelector => {
+ if (!code || typeof code !== 'string') {
+ throw new MeteorError(400, 'Token is required to use loginWithPasswordAnd2faCode and must be a string');
+ }
+ return internalLoginWithPassword({ selector, password, code, callback });
+};
+
+export const loginWithPasswordAnd2faCodeAsync = (selector: UserSelector, password: string, code: string): Promise => {
+ return new Promise((resolve, reject) => {
+ loginWithPasswordAnd2faCode(selector, password, code, (err, res) => (err ? reject(err) : resolve(res)));
+ });
+};
+
+export const createUser = (options: CreateUserOptions, callback?: MeteorCallback): void => {
+ const safeOptions = { ...options };
+
+ if (typeof safeOptions.password !== 'string') {
+ throw new Error('options.password must be a string');
+ }
+
+ if (!safeOptions.password) {
+ return reportError(new MeteorError(400, 'Password may not be empty'), callback);
+ }
+ safeOptions.password = _hashPassword(safeOptions.password);
+
+ Accounts.callLoginMethod({
+ methodName: 'createUser',
+ methodArguments: [safeOptions],
+ userCallback: callback,
+ });
+};
+
+export const createUserAsync = (options: CreateUserOptions): Promise => {
+ return new Promise((resolve, reject) =>
+ createUser(options, (error, result) => {
+ if (error) {
+ reject(error);
+ } else {
+ resolve(result);
+ }
+ }),
+ );
+};
+
+export const changePassword = (oldPassword: string | null, newPassword: string, callback?: MeteorCallback): void => {
+ if (!Accounts.user()) {
+ return reportError(new Error('Must be logged in to change password.'), callback);
+ }
+
+ if (typeof newPassword !== 'string' || !newPassword) {
+ return reportError(new MeteorError(400, 'Password must be a non-empty string'), callback);
+ }
+
+ Accounts.connection.apply(
+ 'changePassword',
+ [oldPassword ? _hashPassword(oldPassword) : null, _hashPassword(newPassword)],
+ undefined,
+ (error, result) => {
+ if (error || !result) {
+ reportError(error || new Error('No result from changePassword.'), callback);
+ } else if (callback) {
+ callback();
+ }
+ },
+ );
+};
+
+export const changePasswordAsync = (oldPassword: string | null, newPassword: string): Promise => {
+ return new Promise((resolve, reject) => {
+ changePassword(oldPassword, newPassword, (err) => (err ? reject(err) : resolve()));
+ });
+};
+
+export const forgotPassword = (options: ForgotPasswordOptions, callback: MeteorCallback): void => {
+ if (!options.email) {
+ return reportError(new MeteorError(400, 'Must pass options.email'), callback);
+ }
+
+ Accounts.connection.call('forgotPassword', options, callback);
+};
+
+export const forgotPasswordAsync = (options: ForgotPasswordOptions): Promise => {
+ return new Promise((resolve, reject) => {
+ forgotPassword(options, (err, res) => (err ? reject(err) : resolve(res)));
+ });
+};
+
+export const resetPassword = (token: string, newPassword: string, callback?: MeteorCallback): void => {
+ if (typeof token !== 'string') {
+ return reportError(new MeteorError(400, 'Token must be a string'), callback);
+ }
+
+ if (typeof newPassword !== 'string' || !newPassword) {
+ return reportError(new MeteorError(400, 'Password must be a non-empty string'), callback);
+ }
+
+ Accounts.callLoginMethod({
+ methodName: 'resetPassword',
+ methodArguments: [token, _hashPassword(newPassword)],
+ userCallback: callback,
+ });
+};
+
+export const resetPasswordAsync = (token: string, newPassword: string): Promise => {
+ return new Promise((resolve, reject) => {
+ resetPassword(token, newPassword, (err, res) => (err ? reject(err) : resolve(res)));
+ });
+};
+
+export const verifyEmail = (token: string, callback?: MeteorCallback): void => {
+ if (!token) {
+ return reportError(new MeteorError(400, 'Need to pass token'), callback);
+ }
+
+ Accounts.callLoginMethod({
+ methodName: 'verifyEmail',
+ methodArguments: [token],
+ userCallback: callback,
+ });
+};
+
+export const verifyEmailAsync = (token: string): Promise => {
+ return new Promise((resolve, reject) => {
+ verifyEmail(token, (err, res) => (err ? reject(err) : resolve(res)));
+ });
+};
diff --git a/apps/meteor/src/meteor/allow-deny.ts b/apps/meteor/src/meteor/allow-deny.ts
new file mode 100644
index 0000000000000..ee30fa5e119fa
--- /dev/null
+++ b/apps/meteor/src/meteor/allow-deny.ts
@@ -0,0 +1,383 @@
+import { check, Match } from './check.ts';
+import { EJSON } from './ejson.ts';
+import { MeteorError } from './meteor.ts';
+import { _selectorIsIdPerhapsAsObject } from './minimongo.ts';
+import { isKey } from './utils/isKey.ts';
+
+type MongoDoc = Record;
+type ValidatorFn = (userId: string | null, doc: MongoDoc, fields?: string[], modifier?: MongoDoc) => boolean | Promise;
+
+type ValidatorSet = {
+ allow: ValidatorFn[];
+ deny: ValidatorFn[];
+};
+
+type CollectionValidators = {
+ insert: ValidatorSet;
+ update: ValidatorSet;
+ remove: ValidatorSet;
+ fetch: string[];
+ fetchAllFields: boolean;
+};
+
+type AllowDenyOptions = {
+ insert?: ValidatorFn;
+ update?: ValidatorFn;
+ remove?: ValidatorFn;
+ fetch?: string[];
+ transform?: ((doc: MongoDoc) => unknown) | undefined;
+ [key: string]: unknown;
+};
+
+type MethodContext = {
+ userId: string | null;
+ isSimulation: boolean;
+ connection: any;
+};
+
+const ALLOWED_UPDATE_OPERATIONS = new Set([
+ '$inc',
+ '$set',
+ '$unset',
+ '$addToSet',
+ '$pop',
+ '$pullAll',
+ '$pull',
+ '$pushAll',
+ '$push',
+ '$bit',
+]);
+
+const asyncSome = async (array: T[], predicate: (item: T) => boolean | Promise): Promise => {
+ for (const item of array) {
+ // eslint-disable-next-line no-await-in-loop
+ if (await predicate(item)) return true;
+ }
+ return false;
+};
+
+const asyncEvery = async (array: T[], predicate: (item: T) => boolean | Promise): Promise => {
+ for (const item of array) {
+ // eslint-disable-next-line no-await-in-loop
+ if (!(await predicate(item))) return false;
+ }
+ return true;
+};
+
+const transformDoc = (validator: { transform?: ((doc: MongoDoc) => unknown) | null }, doc: MongoDoc): unknown => {
+ if (validator.transform) return validator.transform(doc);
+ return doc;
+};
+
+const docToValidate = (
+ validator: { transform?: ((doc: MongoDoc) => unknown) | null },
+ doc: MongoDoc,
+ generatedId: string | null,
+): unknown => {
+ let ret = doc;
+ if (validator.transform) {
+ ret = EJSON.clone(doc);
+ if (generatedId !== null) {
+ ret._id = generatedId;
+ }
+ ret = validator.transform(ret) as MongoDoc;
+ }
+ return ret;
+};
+
+const validateUpdateMutator = (mutator: MongoDoc): string[] => {
+ const keys = Object.keys(mutator);
+ if (keys.length === 0) {
+ throw new MeteorError(
+ 403,
+ "Access denied. In a restricted collection you can only update documents, not replace them. Use a Mongo update operator, such as '$set'.",
+ );
+ }
+
+ const modifiedFields: Record = {};
+
+ for (const op of keys) {
+ if (op.charAt(0) !== '$') {
+ throw new MeteorError(
+ 403,
+ "Access denied. In a restricted collection you can only update documents, not replace them. Use a Mongo update operator, such as '$set'.",
+ );
+ }
+ if (!ALLOWED_UPDATE_OPERATIONS.has(op)) {
+ throw new MeteorError(403, `Access denied. Operator ${op} not allowed in a restricted collection.`);
+ }
+
+ const params = mutator[op] as Record;
+ for (const field of Object.keys(params)) {
+ const rootField = field.indexOf('.') !== -1 ? field.substring(0, field.indexOf('.')) : field;
+ modifiedFields[rootField] = true;
+ }
+ }
+
+ return Object.keys(modifiedFields);
+};
+
+export class RestrictedCollectionMixin {
+ public _name?: string;
+
+ public _connection?: any;
+
+ public _collection: any;
+
+ public _prefix = '';
+
+ public _validators: CollectionValidators = {
+ insert: { allow: [], deny: [] },
+ update: { allow: [], deny: [] },
+ remove: { allow: [], deny: [] },
+ fetch: [],
+ fetchAllFields: false,
+ };
+
+ public _restricted = false;
+
+ public _insecure?: boolean | undefined;
+
+ public _transform?: (doc: MongoDoc) => unknown;
+
+ public _makeNewID(): string {
+ throw new Error('Mixin requirement: _makeNewID not implemented');
+ }
+
+ public allow(options: AllowDenyOptions): void {
+ this._addValidator('allow', options);
+ }
+
+ public deny(options: AllowDenyOptions): void {
+ this._addValidator('deny', options);
+ }
+
+ public _isInsecure(): boolean {
+ return !!this._insecure;
+ }
+
+ public _updateFetch(fields?: string[]): void {
+ if (!this._validators.fetchAllFields) {
+ if (fields) {
+ const union = new Set(this._validators.fetch);
+ fields.forEach((f) => union.add(f));
+ this._validators.fetch = Array.from(union);
+ } else {
+ this._validators.fetchAllFields = true;
+ this._validators.fetch = [];
+ }
+ }
+ }
+
+ public _defineMutationMethods(options: { useExisting?: boolean } = {}): void {
+ this._restricted = false;
+ this._insecure = undefined;
+ this._validators = {
+ insert: { allow: [], deny: [] },
+ update: { allow: [], deny: [] },
+ remove: { allow: [], deny: [] },
+ fetch: [],
+ fetchAllFields: false,
+ };
+
+ if (!this._name) return; // anonymous collection
+
+ this._prefix = `/${this._name}/`;
+ if (this._connection) {
+ const methods: Record any> = {};
+ const methodNames = ['insertAsync', 'updateAsync', 'removeAsync', 'insert', 'update', 'remove'];
+
+ for (const method of methodNames) {
+ const fullMethodName = this._prefix + method;
+
+ if (options.useExisting) {
+ const handlerProp = '_methodHandlers';
+ if (this._connection[handlerProp] && typeof this._connection[handlerProp][fullMethodName] === 'function') {
+ continue;
+ }
+ }
+
+ methods[fullMethodName] = this._createMutationMethod(method);
+ }
+
+ this._connection.methods(methods);
+ }
+ }
+
+ protected async _validatedInsertAsync(userId: string | null, doc: MongoDoc, generatedId: string | null): Promise {
+ if (
+ await asyncSome(this._validators.insert.deny, (validator) =>
+ validator(userId, docToValidate(validator as any, doc, generatedId) as MongoDoc),
+ )
+ ) {
+ throw new MeteorError(403, 'Access denied');
+ }
+ if (
+ await asyncEvery(
+ this._validators.insert.allow,
+ (validator) => !validator(userId, docToValidate(validator as any, doc, generatedId) as MongoDoc),
+ )
+ ) {
+ throw new MeteorError(403, 'Access denied');
+ }
+
+ if (generatedId !== null) doc._id = generatedId;
+ return this._collection.insertAsync(doc);
+ }
+
+ protected async _validatedUpdateAsync(userId: string | null, selector: unknown, mutator: MongoDoc, options: any): Promise {
+ check(mutator, Object);
+ const safeOptions = Object.assign(Object.create(null), options);
+
+ if (!_selectorIsIdPerhapsAsObject(selector)) {
+ throw new Error('validated update should be of a single ID');
+ }
+ if (safeOptions.upsert) {
+ throw new MeteorError(403, 'Access denied. Upserts not allowed in a restricted collection.');
+ }
+
+ const fields = validateUpdateMutator(mutator);
+ const findOptions = this._getFindOptions();
+
+ const doc = await this._collection.findOneAsync(selector, findOptions);
+ if (!doc) return 0;
+ if (
+ await asyncSome(this._validators.update.deny, (validator) =>
+ validator(userId, transformDoc(validator as any, doc) as MongoDoc, fields, mutator),
+ )
+ ) {
+ throw new MeteorError(403, 'Access denied');
+ }
+ if (
+ await asyncEvery(
+ this._validators.update.allow,
+ (validator) => !validator(userId, transformDoc(validator as any, doc) as MongoDoc, fields, mutator),
+ )
+ ) {
+ throw new MeteorError(403, 'Access denied');
+ }
+
+ safeOptions._forbidReplace = true;
+ return this._collection.updateAsync(selector, mutator, safeOptions);
+ }
+
+ protected async _validatedRemoveAsync(userId: string | null, selector: unknown): Promise {
+ const findOptions = this._getFindOptions();
+ const doc = await this._collection.findOneAsync(selector, findOptions);
+ if (!doc) return 0;
+ if (await asyncSome(this._validators.remove.deny, (validator) => validator(userId, transformDoc(validator as any, doc) as MongoDoc))) {
+ throw new MeteorError(403, 'Access denied');
+ }
+ if (
+ await asyncEvery(this._validators.remove.allow, (validator) => !validator(userId, transformDoc(validator as any, doc) as MongoDoc))
+ ) {
+ throw new MeteorError(403, 'Access denied');
+ }
+
+ return this._collection.removeAsync(selector);
+ }
+
+ private _getFindOptions() {
+ const findOptions: Record = { transform: null };
+ if (!this._validators.fetchAllFields) {
+ findOptions.fields = {};
+ this._validators.fetch.forEach((fieldName) => {
+ findOptions.fields[fieldName] = 1;
+ });
+ }
+ return findOptions;
+ }
+
+ private _addValidator(allowOrDeny: 'allow' | 'deny', options: AllowDenyOptions) {
+ const validKeys = new Set(['insert', 'update', 'remove', 'fetch', 'transform', 'insertAsync', 'updateAsync', 'removeAsync']);
+
+ for (const key of Object.keys(options)) {
+ if (!validKeys.has(key)) throw new Error(`${allowOrDeny}: Invalid key: ${key}`);
+ if (key.includes('Async')) {
+ const syncKey = key.replace('Async', '');
+ console.warn(`${allowOrDeny}: The "${key}" key is deprecated. Use "${syncKey}" instead.`);
+ }
+ }
+
+ this._restricted = true;
+
+ if (options.update || options.remove || options.updateAsync || options.removeAsync || options.fetch) {
+ if (options.fetch && !Array.isArray(options.fetch)) {
+ throw new Error(`${allowOrDeny}: Value for \`fetch\` must be an array`);
+ }
+ this._updateFetch(options.fetch);
+ }
+ }
+
+ private _createMutationMethod(methodName: string) {
+ const _executeMutation = this._executeMutation.bind(this);
+ return function (this: MethodContext, ...args: unknown[]) {
+ check(args, [Match.Any]);
+ const argArray = Array.from(args);
+ try {
+ return _executeMutation(this, methodName, argArray);
+ } catch (e: any) {
+ if (e.name === 'MongoError' || e.name === 'BulkWriteError' || e.name === 'MongoBulkWriteError' || e.name === 'MinimongoError') {
+ throw new MeteorError(409, e.toString());
+ }
+ throw e;
+ }
+ };
+ }
+
+ private async _executeMutation(methodContext: MethodContext, methodName: string, args: any[]): Promise {
+ const isInsert = methodName.includes('insert');
+ const [firstArg] = args;
+ let generatedId: string | null = null;
+ if (isInsert && !isKey(firstArg, '_id')) {
+ generatedId = this._makeNewID();
+ }
+ if (methodContext.isSimulation) {
+ if (generatedId !== null && typeof firstArg === 'object' && firstArg !== null) {
+ firstArg._id = generatedId;
+ }
+ return this._collection[methodName](...args);
+ }
+
+ const syncMethodName = methodName.replace('Async', '');
+ const validatedMethodName =
+ `_validated${syncMethodName.charAt(0).toUpperCase()}${syncMethodName.slice(1)}Async` as keyof RestrictedCollectionMixin;
+ if (this._restricted) {
+ if (this._validators[syncMethodName as 'insert' | 'update' | 'remove'].allow.length === 0) {
+ throw new MeteorError(403, `Access denied. No allow validators set on restricted collection for method '${methodName}'.`);
+ }
+
+ const methodArgs = [methodContext.userId, ...args];
+ if (isInsert) methodArgs.push(generatedId);
+
+ return this[validatedMethodName](...methodArgs);
+ }
+ if (this._isInsecure()) {
+ if (generatedId !== null && typeof firstArg === 'object' && firstArg !== null) {
+ (firstArg as MongoDoc)._id = generatedId;
+ }
+ const syncMethodsMapper = {
+ insert: 'insertAsync',
+ update: 'updateAsync',
+ remove: 'removeAsync',
+ } as const;
+ const targetMethod = syncMethodsMapper[methodName as keyof typeof syncMethodsMapper] || methodName;
+ return this._collection[targetMethod](...args);
+ }
+ throw new MeteorError(403, 'Access denied');
+ }
+}
+const CollectionPrototype: Record = {};
+const propertyNames = Object.getOwnPropertyNames(RestrictedCollectionMixin.prototype);
+
+for (const name of propertyNames) {
+ if (name === 'constructor') continue;
+ const descriptor = Object.getOwnPropertyDescriptor(RestrictedCollectionMixin.prototype, name);
+ if (descriptor) {
+ Object.defineProperty(CollectionPrototype, name, { ...descriptor, enumerable: true });
+ }
+}
+
+export const AllowDeny = {
+ CollectionPrototype,
+};
diff --git a/apps/meteor/src/meteor/base64.ts b/apps/meteor/src/meteor/base64.ts
new file mode 100644
index 0000000000000..bf569ce0522c1
--- /dev/null
+++ b/apps/meteor/src/meteor/base64.ts
@@ -0,0 +1 @@
+export { Base64 } from '@rocket.chat/base64';
diff --git a/apps/meteor/src/meteor/callback-hook.ts b/apps/meteor/src/meteor/callback-hook.ts
new file mode 100644
index 0000000000000..a50535f3723e9
--- /dev/null
+++ b/apps/meteor/src/meteor/callback-hook.ts
@@ -0,0 +1,74 @@
+interface IHookOptions {
+ bindEnvironment?: boolean;
+ wrapAsync?: boolean;
+ exceptionHandler?: ((exception: unknown) => void) | string;
+ debugPrintExceptions?: string;
+}
+
+type Callback = (...args: TArgs) => TResult;
+
+export class Hook = Callback> {
+ nextCallbackId = 0;
+
+ callbacks = new Map();
+
+ bindEnvironment = true;
+
+ wrapAsync = true;
+
+ exceptionHandler: ((exception: unknown) => void) | string | undefined;
+
+ constructor(options: IHookOptions = {}) {
+ const { bindEnvironment = true, wrapAsync = true, exceptionHandler, debugPrintExceptions } = options;
+
+ this.bindEnvironment = bindEnvironment;
+ this.wrapAsync = wrapAsync;
+
+ if (exceptionHandler) {
+ this.exceptionHandler = exceptionHandler;
+ } else if (debugPrintExceptions) {
+ if (typeof debugPrintExceptions !== 'string') {
+ throw new Error('Hook option debugPrintExceptions should be a string');
+ }
+ this.exceptionHandler = debugPrintExceptions;
+ }
+ }
+
+ register(callback: TCallback): { callback: TCallback; stop: () => void } {
+ const id = this.nextCallbackId++;
+ this.callbacks.set(id, callback);
+
+ return {
+ callback,
+ stop: () => {
+ this.callbacks.delete(id);
+ },
+ };
+ }
+
+ clear() {
+ this.nextCallbackId = 0;
+ this.callbacks.clear();
+ }
+
+ forEach(iterator: (callback: TCallback) => boolean | void | undefined) {
+ for (const callback of this.callbacks.values()) {
+ if (!iterator(callback)) {
+ break;
+ }
+ }
+ }
+
+ async forEachAsync(iterator: (callback: TCallback) => Promise): Promise {
+ for (const callback of this.callbacks.values()) {
+ // eslint-disable-next-line no-await-in-loop
+ if (!(await iterator(callback))) {
+ break;
+ }
+ }
+ }
+
+ each(iterator: (callback: TCallback) => boolean | void | undefined) {
+ return this.forEach(iterator);
+ }
+}
diff --git a/apps/meteor/src/meteor/check.ts b/apps/meteor/src/meteor/check.ts
new file mode 100644
index 0000000000000..a17461dfa970b
--- /dev/null
+++ b/apps/meteor/src/meteor/check.ts
@@ -0,0 +1,380 @@
+const class2type: Record = {};
+const toString = class2type.toString;
+const hasOwn = Object.prototype.hasOwnProperty;
+const fnToString = hasOwn.toString;
+const ObjectFunctionString = fnToString.call(Object);
+const getProto = Object.getPrototypeOf;
+
+const isPlainObject = (obj: any): boolean => {
+ let proto;
+ let Ctor;
+
+ if (!obj || toString.call(obj) !== '[object Object]') {
+ return false;
+ }
+
+ proto = getProto(obj);
+
+ if (!proto) {
+ return true;
+ }
+
+ Ctor = hasOwn.call(proto, 'constructor') && proto.constructor;
+ return typeof Ctor === 'function' && fnToString.call(Ctor) === ObjectFunctionString;
+};
+
+export type Matcher<_T> = {
+ _meteorCheckMatcherBrand: void;
+};
+
+export type Pattern =
+ | typeof String
+ | typeof Number
+ | typeof Boolean
+ | typeof Object
+ | typeof Function
+ | (new (...args: any[]) => any)
+ | undefined
+ | null
+ | string
+ | number
+ | boolean
+ | [Pattern]
+ | { [key: string]: Pattern }
+ | Matcher;
+
+export type PatternMatch =
+ T extends Matcher ? U :
+ T extends typeof String ? string :
+ T extends typeof Number ? number :
+ T extends typeof Boolean ? boolean :
+ T extends typeof Object ? object :
+ T extends typeof Function ? Function :
+ T extends undefined | null | string | number | boolean ? T :
+ T extends new (...args: any[]) => infer U ? U :
+ T extends [Pattern] ? PatternMatch[] :
+ T extends { [key: string]: Pattern } ? { [K in keyof T]: PatternMatch } :
+ unknown;
+
+export class MatchError extends Error {
+ public path: string;
+ public sanitizedError: Error;
+
+ constructor(msg: string) {
+ super(`Match error: ${msg}`);
+ this.name = 'MatchError';
+ this.path = '';
+ this.sanitizedError = new Error('Match failed'); // Replaces Meteor.Error(400)
+ }
+}
+
+const format = (result: { message: string; path?: string }): MatchError => {
+ const err = new MatchError(result.message.replace(/^Match error: /, ''));
+ if (result.path) {
+ err.message += ` in field ${result.path}`;
+ err.path = result.path;
+ }
+ return err;
+};
+
+function nonEmptyStringCondition(value: any): boolean {
+ check(value, String);
+ return value.length > 0;
+}
+
+export function check(
+ value: any,
+ pattern: T,
+ options: { throwAllErrors?: boolean } = { throwAllErrors: false }
+): asserts value is PatternMatch {
+ const result = testSubtree(value, pattern, options.throwAllErrors);
+
+ if (result) {
+ if (options.throwAllErrors) {
+ throw Array.isArray(result) ? result.map((r) => format(r)) : [format(result)];
+ } else {
+ throw format(result);
+ }
+ }
+}
+
+class Optional {
+ pattern: Pattern;
+ constructor(pattern: Pattern) {
+ this.pattern = pattern;
+ }
+}
+
+class Maybe {
+ pattern: Pattern;
+ constructor(pattern: Pattern) {
+ this.pattern = pattern;
+ }
+}
+
+class OneOf {
+ public choices: Pattern[];
+ constructor(choices: Pattern[]) {
+ if (!choices || choices.length === 0) {
+ throw new Error('Must provide at least one choice to Match.OneOf');
+ }
+ this.choices = choices;
+ }
+}
+
+class Where {
+ condition: (val: any) => boolean;
+ constructor(condition: (val: any) => boolean) {
+ this.condition = condition;
+ }
+}
+
+class ObjectIncluding {
+ pattern: Record;
+ constructor(pattern: Record) {
+ this.pattern = pattern;
+ }
+}
+
+class ObjectWithValues {
+ pattern: Pattern;
+ constructor(pattern: Pattern) {
+ this.pattern = pattern;
+ }
+}
+
+export const Match = {
+ Optional: (pattern: T): Matcher | undefined> => new Optional(pattern) as any,
+ Maybe: (pattern: T): Matcher | undefined | null> => new Maybe(pattern) as any,
+ OneOf: (...args: T): Matcher> => new OneOf(args) as any,
+ Any: ['__any__'] as unknown as Matcher,
+ Where: (condition: (val: any) => boolean): Matcher => new Where(condition) as any,
+ NonEmptyString: ['__NonEmptyString__'] as unknown as Matcher,
+ ObjectIncluding: (pattern: T): Matcher> => new ObjectIncluding(pattern) as any,
+ ObjectWithValues: (pattern: T): Matcher>> => new ObjectWithValues(pattern) as any,
+ Integer: ['__integer__'] as unknown as Matcher,
+ Error: MatchError,
+
+ test(value: any, pattern: T): value is PatternMatch {
+ return !testSubtree(value, pattern);
+ }
+};
+
+const stringForErrorMessage = (value: any, options: { onlyShowType?: boolean } = {}): string => {
+ if (value === null) return 'null';
+ if (options.onlyShowType) return typeof value;
+ if (typeof value !== 'object') return JSON.stringify(value);
+
+ try {
+ return JSON.stringify(value);
+ } catch (stringifyError) {
+ if (stringifyError instanceof TypeError) {
+ return typeof value;
+ }
+ return '[Circular]';
+ }
+};
+
+const typeofChecks: Array<[any, string]> = [
+ [String, 'string'],
+ [Number, 'number'],
+ [Boolean, 'boolean'],
+ [Function, 'function'],
+ [undefined, 'undefined'],
+];
+
+const testSubtree = (
+ value: any,
+ pattern: any,
+ collectErrors: boolean = false,
+ errors: any[] = [],
+ path: string = ''
+): any => {
+ if (pattern === Match.Any) return false;
+
+ for (let i = 0; i < typeofChecks.length; ++i) {
+ if (pattern === typeofChecks[i][0]) {
+ if (typeof value === typeofChecks[i][1]) return false;
+ return { message: `Expected ${typeofChecks[i][1]}, got ${stringForErrorMessage(value, { onlyShowType: true })}`, path: '' };
+ }
+ }
+
+ if (pattern === null) {
+ if (value === null) return false;
+ return { message: `Expected null, got ${stringForErrorMessage(value)}`, path: '' };
+ }
+
+ if (typeof pattern === 'string' || typeof pattern === 'number' || typeof pattern === 'boolean') {
+ if (value === pattern) return false;
+ return { message: `Expected ${pattern}, got ${stringForErrorMessage(value)}`, path: '' };
+ }
+
+ if (pattern === Match.Integer) {
+ if (typeof value === 'number' && (value | 0) === value) return false;
+ return { message: `Expected Integer, got ${stringForErrorMessage(value)}`, path: '' };
+ }
+
+ if (pattern === Object) {
+ pattern = Match.ObjectIncluding({});
+ }
+
+ if (pattern === Match.NonEmptyString) {
+ pattern = new Where(nonEmptyStringCondition);
+ }
+
+ if (Array.isArray(pattern)) {
+ if (pattern.length !== 1) {
+ return { message: `Bad pattern: arrays must have one type element ${stringForErrorMessage(pattern)}`, path: '' };
+ }
+ if (!Array.isArray(value) && !isArguments(value)) {
+ return { message: `Expected array, got ${stringForErrorMessage(value)}`, path: '' };
+ }
+
+ for (let i = 0, length = value.length; i < length; i++) {
+ const arrPath = `${path}[${i}]`;
+ const result = testSubtree(value[i], pattern[0], collectErrors, errors, arrPath);
+ if (result) {
+ result.path = _prependPath(collectErrors ? arrPath : i, result.path);
+ if (!collectErrors) return result;
+ if (typeof value[i] !== 'object' || result.message) errors.push(result);
+ }
+ }
+ if (!collectErrors) return false;
+ return errors.length === 0 ? false : errors;
+ }
+
+ if (pattern instanceof Where) {
+ let result;
+ try {
+ result = pattern.condition(value);
+ } catch (err: any) {
+ if (!(err instanceof MatchError)) throw err;
+ return { message: err.message.replace(/^Match error: /, ''), path: err.path };
+ }
+ if (result) return false;
+ return { message: 'Failed Match.Where validation', path: '' };
+ }
+
+ if (pattern instanceof Maybe) {
+ pattern = Match.OneOf(undefined, null, pattern.pattern);
+ } else if (pattern instanceof Optional) {
+ pattern = Match.OneOf(undefined, pattern.pattern);
+ }
+
+ if (pattern instanceof OneOf) {
+ for (let i = 0; i < pattern.choices.length; ++i) {
+ const result = testSubtree(value, pattern.choices[i]);
+ if (!result) return false;
+ }
+ return { message: 'Failed Match.OneOf, Match.Maybe or Match.Optional validation', path: '' };
+ }
+
+ if (pattern instanceof Function) {
+ if (value instanceof pattern) return false;
+ return { message: `Expected ${pattern.name || 'particular constructor'}`, path: '' };
+ }
+
+ let unknownKeysAllowed = false;
+ let unknownKeyPattern: any;
+
+ if (pattern instanceof ObjectIncluding) {
+ unknownKeysAllowed = true;
+ pattern = pattern.pattern;
+ }
+
+ if (pattern instanceof ObjectWithValues) {
+ unknownKeysAllowed = true;
+ unknownKeyPattern = [pattern.pattern];
+ pattern = {};
+ }
+
+ if (typeof pattern !== 'object') {
+ return { message: 'Bad pattern: unknown pattern type', path: '' };
+ }
+
+ if (typeof value !== 'object') return { message: `Expected object, got ${typeof value}`, path: '' };
+ if (value === null) return { message: `Expected object, got null`, path: '' };
+ if (!isPlainObject(value)) return { message: `Expected plain object`, path: '' };
+
+ const requiredPatterns = Object.create(null);
+ const optionalPatterns = Object.create(null);
+
+ Object.keys(pattern).forEach((key) => {
+ const subPattern = pattern[key];
+ if (subPattern instanceof Optional || subPattern instanceof Maybe) {
+ optionalPatterns[key] = subPattern.pattern;
+ } else {
+ requiredPatterns[key] = subPattern;
+ }
+ });
+
+ for (const key in Object(value)) {
+ const subValue = value[key];
+ const objPath = path ? `${path}.${key}` : key;
+
+ if (hasOwn.call(requiredPatterns, key)) {
+ const result = testSubtree(subValue, requiredPatterns[key], collectErrors, errors, objPath);
+ if (result) {
+ result.path = _prependPath(collectErrors ? objPath : key, result.path);
+ if (!collectErrors) return result;
+ if (typeof subValue !== 'object' || result.message) errors.push(result);
+ }
+ delete requiredPatterns[key];
+ } else if (hasOwn.call(optionalPatterns, key)) {
+ const result = testSubtree(subValue, optionalPatterns[key], collectErrors, errors, objPath);
+ if (result) {
+ result.path = _prependPath(collectErrors ? objPath : key, result.path);
+ if (!collectErrors) return result;
+ if (typeof subValue !== 'object' || result.message) errors.push(result);
+ }
+ } else {
+ if (!unknownKeysAllowed) {
+ const result = { message: 'Unknown key', path: key };
+ if (!collectErrors) return result;
+ errors.push(result);
+ }
+ if (unknownKeyPattern) {
+ const result = testSubtree(subValue, unknownKeyPattern[0], collectErrors, errors, objPath);
+ if (result) {
+ result.path = _prependPath(collectErrors ? objPath : key, result.path);
+ if (!collectErrors) return result;
+ if (typeof subValue !== 'object' || result.message) errors.push(result);
+ }
+ }
+ }
+ }
+
+ const keys = Object.keys(requiredPatterns);
+ if (keys.length) {
+ const createMissingError = (key: string) => ({ message: `Missing key '${key}'`, path: collectErrors ? path : '' });
+ if (!collectErrors) return createMissingError(keys[0]);
+ for (const key of keys) errors.push(createMissingError(key));
+ }
+
+ if (!collectErrors) return false;
+ return errors.length === 0 ? false : errors;
+};
+
+const _jsKeywords = [
+ 'do', 'if', 'in', 'for', 'let', 'new', 'try', 'var', 'case', 'else', 'enum', 'eval', 'false', 'null', 'this', 'true', 'void', 'with', 'break', 'catch', 'class', 'const', 'super', 'throw', 'while', 'yield', 'delete', 'export', 'import', 'public', 'return', 'static', 'switch', 'typeof', 'default', 'extends', 'finally', 'package', 'private', 'continue', 'debugger', 'function', 'arguments', 'interface', 'protected', 'implements', 'instanceof'
+];
+
+const _prependPath = (key: string | number, base: string): string => {
+ let strKey = String(key);
+ if (typeof key === 'number' || strKey.match(/^[0-9]+$/)) {
+ strKey = `[${key}]`;
+ } else if (!strKey.match(/^[a-z_$][0-9a-z_$.[\]]*$/i) || _jsKeywords.includes(strKey)) {
+ strKey = JSON.stringify([key]);
+ }
+
+ if (base && base[0] !== '[') return `${strKey}.${base}`;
+ return strKey + base;
+};
+
+const isObject = (value: any): boolean => typeof value === 'object' && value !== null;
+const baseIsArguments = (item: any): boolean => isObject(item) && Object.prototype.toString.call(item) === '[object Arguments]';
+const isArguments = baseIsArguments(
+ (function () { return arguments; })()
+)
+ ? baseIsArguments
+ : (value: any) => isObject(value) && typeof value.callee === 'function';
diff --git a/apps/meteor/src/meteor/ddp-client.ts b/apps/meteor/src/meteor/ddp-client.ts
new file mode 100644
index 0000000000000..7d219fa80f87a
--- /dev/null
+++ b/apps/meteor/src/meteor/ddp-client.ts
@@ -0,0 +1,2132 @@
+import { Hook } from './callback-hook.ts';
+import { DDPCommon, type MethodInvocation, RandomStream } from './ddp-common.ts';
+import { DiffSequence } from './diff-sequence.ts';
+import { EJSON, type EJSONable } from './ejson.ts';
+import { IdMap } from './id-map.ts';
+import { Meteor } from './meteor.ts';
+import type { LocalCollection } from './minimongo.ts';
+import { ObjectID } from './mongo-id.ts';
+import { Random } from './random.ts';
+import { Reload } from './reload.ts';
+import { Retry } from './retry.ts';
+import { ClientStream, type ClientStreamOptions } from './socket-stream-client.ts';
+import { Tracker } from './tracker.ts';
+import { hasOwn } from './utils/hasOwn.ts';
+import { isEmpty } from './utils/isEmpty.ts';
+import { isFunction, type UnknownFunction } from './utils/isFunction.ts';
+import { isKey } from './utils/isKey.ts';
+import { keys } from './utils/keys.ts';
+import { last } from './utils/last.ts';
+import { noop } from './utils/noop.ts';
+
+class MongoIDMap extends IdMap {
+ constructor() {
+ super(ObjectID.stringify, ObjectID.parse);
+ }
+}
+
+export class ConnectionStreamHandlers {
+ _connection: Connection;
+
+ constructor(connection: Connection) {
+ this._connection = connection;
+ }
+
+ async onMessage(rawMsg: string) {
+ let msg;
+ try {
+ msg = DDPCommon.parseDDP(rawMsg);
+ } catch (e) {
+ console.debug('Exception while parsing DDP', e);
+ return;
+ }
+
+ // Any message counts as receiving a pong, as it demonstrates that
+ // the server is still alive.
+ if (this._connection._heartbeat) {
+ this._connection._heartbeat.messageReceived();
+ }
+
+ if (msg === null || !msg.msg) {
+ return;
+ }
+
+ // Important: This was missing from previous version
+ // We need to set the current version before routing the message
+ if (msg.msg === 'connected') {
+ this._connection._version = this._connection._versionSuggestion;
+ }
+
+ await this._routeMessage(msg);
+ }
+
+ async _routeMessage(msg: any) {
+ switch (msg.msg) {
+ case 'connected':
+ await this._connection._livedata_connected(msg);
+ this._connection.options.onConnected();
+ break;
+
+ case 'failed':
+ await this._handleFailedMessage(msg);
+ break;
+
+ case 'ping':
+ if (this._connection.options.respondToPings) {
+ this._connection._send({ msg: 'pong', id: msg.id });
+ }
+ break;
+
+ case 'pong':
+ // noop, as we assume everything's a pong
+ break;
+
+ case 'added':
+ case 'changed':
+ case 'removed':
+ case 'ready':
+ case 'updated':
+ await this._connection._livedata_data(msg);
+ break;
+
+ case 'nosub':
+ await this._connection._livedata_nosub(msg);
+ break;
+
+ case 'result':
+ await this._connection._livedata_result(msg);
+ break;
+
+ case 'error':
+ this._connection._livedata_error(msg);
+ break;
+
+ default:
+ console.debug('discarding unknown livedata message type', msg);
+ }
+ }
+
+ _handleFailedMessage(msg: any) {
+ if (this._connection._supportedDDPVersions.indexOf(msg.version) >= 0) {
+ this._connection._versionSuggestion = msg.version;
+ this._connection._stream.reconnect({ _force: true });
+ } else {
+ const description = `DDP version negotiation failed; server requested version ${msg.version}`;
+ this._connection._stream.disconnect({ _permanent: true, _error: description });
+ this._connection.options.onDDPVersionNegotiationFailure(description);
+ }
+ }
+
+ onReset() {
+ // Reset is called even on the first connection, so this is
+ // the only place we send this message.
+ const msg = this._buildConnectMessage();
+ this._connection._send(msg);
+
+ // Mark non-retry calls as failed and handle outstanding methods
+ this._handleOutstandingMethodsOnReset();
+
+ // Now, to minimize setup latency, go ahead and blast out all of
+ // our pending methods ands subscriptions before we've even taken
+ // the necessary RTT to know if we successfully reconnected.
+ this._connection._callOnReconnectAndSendAppropriateOutstandingMethods();
+ this._resendSubscriptions();
+ }
+
+ _buildConnectMessage() {
+ return {
+ msg: 'connect',
+ version: this._connection._versionSuggestion || this._connection._supportedDDPVersions[0],
+ support: this._connection._supportedDDPVersions,
+ session: this._connection._lastSessionId,
+ } as const;
+ }
+
+ _handleOutstandingMethodsOnReset() {
+ const blocks = this._connection._outstandingMethodBlocks;
+ if (blocks.length === 0) return;
+
+ const currentMethodBlock = blocks[0].methods;
+ blocks[0].methods = currentMethodBlock.filter((methodInvoker: any) => {
+ // Methods with 'noRetry' option set are not allowed to re-send after
+ // recovering dropped connection.
+ if (methodInvoker.sentMessage && methodInvoker.noRetry) {
+ methodInvoker.receiveResult(
+ new Meteor.Error(
+ 'invocation-failed',
+ 'Method invocation might have failed due to dropped connection. ' +
+ 'Failing because `noRetry` option was passed to Meteor.apply.',
+ ),
+ );
+ }
+
+ // Only keep a method if it wasn't sent or it's allowed to retry.
+ return !(methodInvoker.sentMessage && methodInvoker.noRetry);
+ });
+
+ // Clear empty blocks
+ if (blocks.length > 0 && blocks[0].methods.length === 0) {
+ blocks.shift();
+ }
+
+ // Reset all method invokers as unsent
+ Object.values(this._connection._methodInvokers).forEach((invoker: any) => {
+ invoker.sentMessage = false;
+ });
+ }
+
+ _resendSubscriptions() {
+ Object.entries(this._connection._subscriptions).forEach(([id, sub]: [string, any]) => {
+ this._connection._sendQueued({
+ msg: 'sub',
+ id,
+ name: sub.name,
+ params: sub.params,
+ });
+ });
+ }
+}
+
+export class MessageProcessors {
+ _connection: Connection;
+
+ constructor(connection: Connection) {
+ this._connection = connection;
+ }
+
+ async _livedata_connected(msg: any) {
+ const self = this._connection;
+
+ if (self._version !== 'pre1' && self._heartbeatInterval !== 0) {
+ self._heartbeat = new DDPCommon.Heartbeat({
+ heartbeatInterval: self._heartbeatInterval,
+ heartbeatTimeout: self._heartbeatTimeout,
+ onTimeout() {
+ self._lostConnection(new ConnectionError('DDP heartbeat timed out'));
+ },
+ sendPing() {
+ self._send({ msg: 'ping' });
+ },
+ });
+ self._heartbeat.start();
+ }
+
+ // If this is a reconnect, we'll have to reset all stores.
+ if (self._lastSessionId) self._resetStores = true;
+
+ let reconnectedToPreviousSession;
+ if (typeof msg.session === 'string') {
+ reconnectedToPreviousSession = self._lastSessionId === msg.session;
+ self._lastSessionId = msg.session;
+ }
+
+ if (reconnectedToPreviousSession) {
+ // Successful reconnection -- pick up where we left off.
+ return;
+ }
+
+ // Server doesn't have our data anymore. Re-sync a new session.
+
+ // Forget about messages we were buffering for unknown collections. They'll
+ // be resent if still relevant.
+ self._updatesForUnknownStores = Object.create(null);
+
+ if (self._resetStores) {
+ // Forget about the effects of stubs. We'll be resetting all collections
+ // anyway.
+ self._documentsWrittenByStub = Object.create(null);
+ self._serverDocuments = Object.create(null);
+ }
+
+ // Clear _afterUpdateCallbacks.
+ self._afterUpdateCallbacks = [];
+
+ // Mark all named subscriptions which are ready as needing to be revived.
+ self._subsBeingRevived = Object.create(null);
+ Object.entries(self._subscriptions).forEach(([id, sub]: [string, any]) => {
+ if (sub.ready) {
+ self._subsBeingRevived[id] = true;
+ }
+ });
+
+ // Arrange for "half-finished" methods to have their callbacks run, and
+ // track methods that were sent on this connection so that we don't
+ // quiesce until they are all done.
+ //
+ // Start by clearing _methodsBlockingQuiescence: methods sent before
+ // reconnect don't matter, and any "wait" methods sent on the new connection
+ // that we drop here will be restored by the loop below.
+ self._methodsBlockingQuiescence = Object.create(null);
+ if (self._resetStores) {
+ const invokers = self._methodInvokers;
+ keys(invokers).forEach((id) => {
+ const invoker = invokers[id];
+ if (invoker.gotResult()) {
+ // This method already got its result, but it didn't call its callback
+ // because its data didn't become visible. We did not resend the
+ // method RPC. We'll call its callback when we get a full quiesce,
+ // since that's as close as we'll get to "data must be visible".
+ self._afterUpdateCallbacks.push(() => invoker.dataVisible());
+ } else if (invoker.sentMessage) {
+ // This method has been sent on this connection (maybe as a resend
+ // from the last connection, maybe from onReconnect, maybe just very
+ // quickly before processing the connected message).
+ //
+ // We don't need to do anything special to ensure its callbacks get
+ // called, but we'll count it as a method which is preventing
+ // reconnect quiescence. (eg, it might be a login method that was run
+ // from onReconnect, and we don't want to see flicker by seeing a
+ // logged-out state.)
+ self._methodsBlockingQuiescence[invoker.methodId] = true;
+ }
+ });
+ }
+
+ self._messagesBufferedUntilQuiescence = [];
+
+ // If we're not waiting on any methods or subs, we can reset the stores and
+ // call the callbacks immediately.
+ if (!self._waitingForQuiescence()) {
+ if (self._resetStores) {
+ const promises = Object.values(self._stores).map((store: any) => store.beginUpdate(0, true).then(() => store.endUpdate()));
+ await Promise.all(promises);
+ self._resetStores = false;
+ }
+ self._runAfterUpdateCallbacks();
+ }
+ }
+
+ async _livedata_data(msg: any) {
+ const self = this._connection;
+
+ if (self._waitingForQuiescence()) {
+ self._messagesBufferedUntilQuiescence.push(msg);
+
+ if (msg.msg === 'nosub') {
+ delete self._subsBeingRevived[msg.id];
+ }
+
+ if (msg.subs) {
+ msg.subs.forEach((subId: string) => {
+ delete self._subsBeingRevived[subId];
+ });
+ }
+
+ if (msg.methods) {
+ msg.methods.forEach((methodId: string) => {
+ delete self._methodsBlockingQuiescence[methodId];
+ });
+ }
+
+ if (self._waitingForQuiescence()) {
+ return;
+ }
+
+ // No methods or subs are blocking quiescence!
+ // We'll now process and all of our buffered messages, reset all stores,
+ // and apply them all at once.
+ const bufferedMessages = self._messagesBufferedUntilQuiescence;
+ const promises = Object.values(bufferedMessages).map((bufferedMessage) =>
+ self._processOneDataMessage(bufferedMessage, self._bufferedWrites),
+ );
+ await Promise.all(promises);
+ self._messagesBufferedUntilQuiescence = [];
+ } else {
+ await this._processOneDataMessage(msg, self._bufferedWrites);
+ }
+
+ // Immediately flush writes when:
+ // 1. Buffering is disabled. Or;
+ // 2. any non-(added/changed/removed) message arrives.
+ const standardWrite = msg.msg === 'added' || msg.msg === 'changed' || msg.msg === 'removed';
+
+ if (self._bufferedWritesInterval === 0 || !standardWrite) {
+ await self._flushBufferedWrites();
+ return;
+ }
+
+ if (self._bufferedWritesFlushAt === null) {
+ self._bufferedWritesFlushAt = new Date().valueOf() + self._bufferedWritesMaxAge;
+ } else if (self._bufferedWritesFlushAt < new Date().valueOf()) {
+ await self._flushBufferedWrites();
+ return;
+ }
+
+ if (self._bufferedWritesFlushHandle) {
+ clearTimeout(self._bufferedWritesFlushHandle);
+ }
+ self._bufferedWritesFlushHandle = setTimeout(() => {
+ self._liveDataWritesPromise = self._flushBufferedWrites();
+ if (Meteor._isPromise(self._liveDataWritesPromise)) {
+ self._liveDataWritesPromise.finally(() => {
+ self._liveDataWritesPromise = undefined;
+ });
+ }
+ }, self._bufferedWritesInterval);
+ }
+
+ async _processOneDataMessage(msg: any, updates: any) {
+ const messageType = msg.msg;
+
+ switch (messageType) {
+ case 'added':
+ await this._connection._process_added(msg, updates);
+ break;
+ case 'changed':
+ this._connection._process_changed(msg, updates);
+ break;
+ case 'removed':
+ this._connection._process_removed(msg, updates);
+ break;
+ case 'ready':
+ this._connection._process_ready(msg, updates);
+ break;
+ case 'updated':
+ this._connection._process_updated(msg, updates);
+ break;
+ case 'nosub':
+ // ignore this
+ break;
+ default:
+ console.debug('discarding unknown livedata data message type', msg);
+ }
+ }
+
+ async _livedata_result(msg: any) {
+ const self = this._connection;
+
+ // Lets make sure there are no buffered writes before returning result.
+ if (!isEmpty(self._bufferedWrites)) {
+ await self._flushBufferedWrites();
+ }
+
+ // find the outstanding request
+ // should be O(1) in nearly all realistic use cases
+ if (isEmpty(self._outstandingMethodBlocks)) {
+ console.debug('Received method result but no methods outstanding');
+ return;
+ }
+ const currentMethodBlock = self._outstandingMethodBlocks[0].methods;
+ let i = -1;
+ const m = currentMethodBlock.find((method: any, idx: number) => {
+ const found = method.methodId === msg.id;
+ if (found) i = idx;
+ return found;
+ });
+ if (!m) {
+ console.debug("Can't match method response to original method call", msg);
+ return;
+ }
+
+ // Remove from current method block. This may leave the block empty, but we
+ // don't move on to the next block until the callback has been delivered, in
+ // _outstandingMethodFinished.
+ if (i !== -1) {
+ currentMethodBlock.splice(i, 1);
+ }
+
+ if (isKey(msg, 'error')) {
+ m.receiveResult(new Meteor.Error(msg.error.error, msg.error.reason, msg.error.details));
+ } else {
+ // msg.result may be undefined if the method didn't return a value
+ m.receiveResult(undefined, msg.result);
+ }
+ }
+
+ async _livedata_nosub(msg: any) {
+ const self = this._connection;
+
+ // First pass it through _livedata_data, which only uses it to help get
+ // towards quiescence.
+ await this._livedata_data(msg);
+
+ // Do the rest of our processing immediately, with no
+ // buffering-until-quiescence.
+
+ // we weren't subbed anyway, or we initiated the unsub.
+ if (!hasOwn(self._subscriptions, msg.id)) {
+ return;
+ }
+
+ // XXX COMPAT WITH 1.0.3.1 #errorCallback
+ const { errorCallback } = self._subscriptions[msg.id];
+ const { stopCallback } = self._subscriptions[msg.id];
+
+ self._subscriptions[msg.id].remove();
+
+ const meteorErrorFromMsg = (msgArg?: { error?: { error: string | number; reason?: string; details?: string } }) => {
+ return msgArg?.error && new Meteor.Error(msgArg.error.error, msgArg.error.reason, msgArg.error.details);
+ };
+
+ // XXX COMPAT WITH 1.0.3.1 #errorCallback
+ if (errorCallback && msg.error) {
+ errorCallback(meteorErrorFromMsg(msg));
+ }
+
+ if (stopCallback) {
+ stopCallback(meteorErrorFromMsg(msg));
+ }
+ }
+
+ _livedata_error(msg: any) {
+ console.debug('Received error from server: ', msg.reason);
+ if (msg.offendingMessage) console.debug('For: ', msg.offendingMessage);
+ }
+
+ // Document change message processors will be defined in a separate class
+}
+
+export class DocumentProcessors {
+ _connection: any;
+
+ constructor(connection: any) {
+ this._connection = connection;
+ }
+
+ async _process_added(msg: any, updates: any) {
+ const self = this._connection;
+ const id = ObjectID.parse(msg.id);
+ const serverDoc = self._getServerDoc(msg.collection, id);
+
+ if (serverDoc) {
+ // Some outstanding stub wrote here.
+ const isExisting = serverDoc.document !== undefined;
+
+ serverDoc.document = msg.fields || Object.create(null);
+ serverDoc.document._id = id;
+
+ if (self._resetStores) {
+ // During reconnect the server is sending adds for existing ids.
+ // Always push an update so that document stays in the store after
+ // reset. Use current version of the document for this update, so
+ // that stub-written values are preserved.
+ const currentDoc = await self._stores[msg.collection].getDoc(msg.id);
+ if (currentDoc !== undefined) msg.fields = currentDoc;
+
+ self._pushUpdate(updates, msg.collection, msg);
+ } else if (isExisting) {
+ throw new Error(`Server sent add for existing id: ${msg.id}`);
+ }
+ } else {
+ self._pushUpdate(updates, msg.collection, msg);
+ }
+ }
+
+ _process_changed(msg: any, updates: any) {
+ const self = this._connection;
+ const serverDoc = self._getServerDoc(msg.collection, ObjectID.parse(msg.id));
+
+ if (serverDoc) {
+ if (serverDoc.document === undefined) {
+ throw new Error(`Server sent changed for nonexisting id: ${msg.id}`);
+ }
+ DiffSequence.applyChanges(serverDoc.document, msg.fields);
+ } else {
+ self._pushUpdate(updates, msg.collection, msg);
+ }
+ }
+
+ _process_removed(msg: any, updates: any) {
+ const self = this._connection;
+ const serverDoc = self._getServerDoc(msg.collection, ObjectID.parse(msg.id));
+
+ if (serverDoc) {
+ // Some outstanding stub wrote here.
+ if (serverDoc.document === undefined) {
+ throw new Error(`Server sent removed for nonexisting id:${msg.id}`);
+ }
+ serverDoc.document = undefined;
+ } else {
+ self._pushUpdate(updates, msg.collection, {
+ msg: 'removed',
+ collection: msg.collection,
+ id: msg.id,
+ });
+ }
+ }
+
+ _process_ready(msg: any, _updates: any) {
+ const self = this._connection;
+
+ // Process "sub ready" messages. "sub ready" messages don't take effect
+ // until all current server documents have been flushed to the local
+ // database. We can use a write fence to implement this.
+ msg.subs.forEach((subId: string) => {
+ self._runWhenAllServerDocsAreFlushed(() => {
+ const subRecord = self._subscriptions[subId];
+ // Did we already unsubscribe?
+ if (!subRecord) return;
+ // Did we already receive a ready message? (Oops!)
+ if (subRecord.ready) return;
+ subRecord.ready = true;
+ subRecord.readyCallback?.();
+ subRecord.readyDeps.changed();
+ });
+ });
+ }
+
+ _process_updated(msg: any, updates: any) {
+ const self = this._connection;
+ // Process "method done" messages.
+ msg.methods.forEach((methodId: string) => {
+ const docs = self._documentsWrittenByStub[methodId] || {};
+ Object.values(docs).forEach((written: any) => {
+ const serverDoc = self._getServerDoc(written.collection, written.id);
+ if (!serverDoc) {
+ throw new Error(`Lost serverDoc for ${JSON.stringify(written)}`);
+ }
+ if (!serverDoc.writtenByStubs[methodId]) {
+ throw new Error(`Doc ${JSON.stringify(written)} not written by method ${methodId}`);
+ }
+ delete serverDoc.writtenByStubs[methodId];
+ if (isEmpty(serverDoc.writtenByStubs)) {
+ // All methods whose stubs wrote this method have completed! We can
+ // now copy the saved document to the database (reverting the stub's
+ // change if the server did not write to this object, or applying the
+ // server's writes if it did).
+
+ // This is a fake ddp 'replace' message. It's just for talking
+ // between livedata connections and minimongo. (We have to stringify
+ // the ID because it's supposed to look like a wire message.)
+ self._pushUpdate(updates, written.collection, {
+ msg: 'replace',
+ id: ObjectID.stringify(written.id),
+ replace: serverDoc.document,
+ });
+ // Call all flush callbacks.
+ serverDoc.flushCallbacks.forEach((c: any) => {
+ c();
+ });
+
+ // Delete this completed serverDocument. Don't bother to GC empty
+ // IdMaps inside self._serverDocuments, since there probably aren't
+ // many collections and they'll be written repeatedly.
+ self._serverDocuments[written.collection].remove(written.id);
+ }
+ });
+ delete self._documentsWrittenByStub[methodId];
+
+ // We want to call the data-written callback, but we can't do so until all
+ // currently buffered messages are flushed.
+ const callbackInvoker = self._methodInvokers[methodId];
+ if (!callbackInvoker) {
+ throw new Error(`No callback invoker for method ${methodId}`);
+ }
+
+ self._runWhenAllServerDocsAreFlushed((...args: any[]) => callbackInvoker.dataVisible(...args));
+ });
+ }
+
+ _pushUpdate(updates: any, collection: string, msg: any) {
+ if (!isKey(updates, collection)) {
+ updates[collection] = [];
+ }
+ updates[collection].push(msg);
+ }
+
+ _getServerDoc(collection: string, id: string) {
+ const self = this._connection;
+ if (!hasOwn(self._serverDocuments, collection)) {
+ return null;
+ }
+ const serverDocsForCollection = self._serverDocuments[collection];
+ return serverDocsForCollection.get(id) || null;
+ }
+}
+
+// A MethodInvoker manages sending a method to the server and calling the user's
+// callbacks. On construction, it registers itself in the connection's
+// _methodInvokers map; it removes itself once the method is fully finished and
+// the callback is invoked. This occurs when it has both received a result,
+// and the data written by it is fully visible.
+export class MethodInvoker {
+ methodId: string;
+
+ sentMessage: boolean;
+
+ _callback: UnknownFunction | undefined;
+
+ _connection: any;
+
+ _message: any;
+
+ _onResultReceived: UnknownFunction;
+
+ _wait: boolean;
+
+ noRetry: boolean;
+
+ _methodResult: any;
+
+ _dataVisible: boolean;
+
+ constructor(options: any) {
+ // Public (within this file) fields.
+ this.methodId = options.methodId;
+ this.sentMessage = false;
+
+ this._callback = options.callback;
+ this._connection = options.connection;
+ this._message = options.message;
+ this._onResultReceived = options.onResultReceived || noop;
+ this._wait = options.wait;
+ this.noRetry = options.noRetry;
+ this._methodResult = null;
+ this._dataVisible = false;
+
+ // Register with the connection.
+ this._connection._methodInvokers[this.methodId] = this;
+ }
+
+ // Sends the method message to the server. May be called additional times if
+ // we lose the connection and reconnect before receiving a result.
+ sendMessage() {
+ // This function is called before sending a method (including resending on
+ // reconnect). We should only (re)send methods where we don't already have a
+ // result!
+ if (this.gotResult()) throw new Error('sendingMethod is called on method with result');
+
+ // If we're re-sending it, it doesn't matter if data was written the first
+ // time.
+ this._dataVisible = false;
+ this.sentMessage = true;
+
+ // If this is a wait method, make all data messages be buffered until it is
+ // done.
+ if (this._wait) this._connection._methodsBlockingQuiescence[this.methodId] = true;
+
+ // Actually send the message.
+ this._connection._send(this._message);
+ }
+
+ // Invoke the callback, if we have both a result and know that all data has
+ // been written to the local cache.
+ _maybeInvokeCallback() {
+ if (this._methodResult && this._dataVisible) {
+ // Call the callback. (This won't throw: the callback was wrapped with
+ // bindEnvironment.)
+ this._callback?.(this._methodResult[0], this._methodResult[1]);
+
+ // Forget about this method.
+ delete this._connection._methodInvokers[this.methodId];
+
+ // Let the connection know that this method is finished, so it can try to
+ // move on to the next block of methods.
+ this._connection._outstandingMethodFinished();
+ }
+ }
+
+ // Call with the result of the method from the server. Only may be called
+ // once; once it is called, you should not call sendMessage again.
+ // If the user provided an onResultReceived callback, call it immediately.
+ // Then invoke the main callback if data is also visible.
+ receiveResult(err: any, result: any) {
+ if (this.gotResult()) throw new Error('Methods should only receive results once');
+ this._methodResult = [err, result];
+ this._onResultReceived(err, result);
+ this._maybeInvokeCallback();
+ }
+
+ // Call this when all data written by the method is visible. This means that
+ // the method has returns its "data is done" message *AND* all server
+ // documents that are buffered at that time have been written to the local
+ // cache. Invokes the main callback if the result has been received.
+ dataVisible() {
+ this._dataVisible = true;
+ this._maybeInvokeCallback();
+ }
+
+ // True if receiveResult has been called.
+ gotResult() {
+ return !!this._methodResult;
+ }
+}
+
+type StubOptions =
+ | {
+ hasStub: false;
+ alreadyInSimulation?: boolean | undefined;
+ randomSeed: {
+ value: string | null;
+ };
+ isFromCallAsync?: boolean | undefined;
+ exception?: any;
+ stubReturnValue?: unknown;
+ }
+ | {
+ hasStub: true;
+ stubInvocation: () => any;
+ invocation: MethodInvocation;
+ stubReturnValue?: unknown;
+ exception?: any;
+ alreadyInSimulation?: boolean | undefined;
+ randomSeed: {
+ value: string | null;
+ };
+ isFromCallAsync?: boolean | undefined;
+ };
+
+type ConnectionOptions = ClientStreamOptions & {
+ onConnected: VoidFunction;
+ reloadWithOutstanding?: boolean;
+ headers?: Record;
+ _sockjsOptions?: Record;
+ onDDPVersionNegotiationFailure: (description: string) => void;
+ supportedDDPVersions?: string[];
+ connectTimeoutMs?: number;
+ retry?: boolean;
+ respondToPings?: boolean;
+ bufferedWritesInterval: number;
+ bufferedWritesMaxAge: number;
+};
+
+// @param url {String|Object} URL to Meteor app,
+// or an object as a test hook (see code)
+// Options:
+// reloadWithOutstanding: is it OK to reload if there are outstanding methods?
+// headers: extra headers to send on the websockets connection, for
+// server-to-server DDP only
+// _sockjsOptions: Specifies options to pass through to the sockjs client
+// onDDPNegotiationVersionFailure: callback when version negotiation fails.
+//
+// XXX There should be a way to destroy a DDP connection, causing all
+// outstanding method calls to fail.
+//
+// XXX Our current way of handling failure and reconnection is great
+// for an app (where we want to tolerate being disconnected as an
+// expect state, and keep trying forever to reconnect) but cumbersome
+// for something like a command line tool that wants to make a
+// connection, call a method, and print an error if connection
+// fails. We should have better usability in the latter case (while
+// still transparently reconnecting if it's just a transient failure
+// or the server migrating us).
+export class Connection {
+ options: ConnectionOptions;
+
+ onReconnect: VoidFunction | null;
+
+ _stream: ClientStream;
+
+ _lastSessionId: string | null;
+
+ _versionSuggestion: string | null;
+
+ _version: string | null;
+
+ _stores: Record;
+
+ _methodHandlers: Record;
+
+ _nextMethodId: number;
+
+ _supportedDDPVersions: string[];
+
+ _heartbeatInterval: number;
+
+ _heartbeatTimeout: number;
+
+ _methodInvokers: Record;
+
+ _outstandingMethodBlocks: any[];
+
+ _documentsWrittenByStub: Record;
+
+ _serverDocuments: Record;
+
+ _afterUpdateCallbacks: VoidFunction[];
+
+ _messagesBufferedUntilQuiescence: any[];
+
+ _methodsBlockingQuiescence: Record;
+
+ _subsBeingRevived: Record;
+
+ _resetStores: boolean;
+
+ _updatesForUnknownStores: Record;
+
+ _retryMigrate: VoidFunction | null;
+
+ _bufferedWrites: Record;
+
+ _bufferedWritesFlushAt: number | null;
+
+ _bufferedWritesFlushHandle: any;
+
+ _bufferedWritesInterval: number;
+
+ _bufferedWritesMaxAge: number;
+
+ _subscriptions: Record;
+
+ _userId: string | null;
+
+ _userIdDeps: any;
+
+ _streamHandlers: ConnectionStreamHandlers;
+
+ _heartbeat: any;
+
+ _messageProcessors: MessageProcessors;
+
+ _livedata_connected: UnknownFunction;
+
+ _livedata_data: UnknownFunction;
+
+ _livedata_nosub: UnknownFunction;
+
+ _livedata_result: UnknownFunction;
+
+ _livedata_error: UnknownFunction;
+
+ _documentProcessors: DocumentProcessors;
+
+ _process_added: UnknownFunction;
+
+ _process_changed: UnknownFunction;
+
+ _process_removed: UnknownFunction;
+
+ _process_ready: UnknownFunction;
+
+ _process_updated: UnknownFunction;
+
+ _pushUpdate: (updates: any, collection: string, msg: any) => void;
+
+ _getServerDoc: (collection: string, id: string) => any;
+
+ _liveDataWritesPromise: Promise | undefined;
+
+ _mongo_livedata_collections?: Map;
+
+ constructor(url: string | any, options: Partial) {
+ this.options = {
+ onConnected: noop,
+ onDDPVersionNegotiationFailure(description) {
+ console.debug(description);
+ },
+ heartbeatInterval: 17500,
+ heartbeatTimeout: 15000,
+ // npmFayeOptions: Object.create(null),
+ // These options are only for testing.
+ reloadWithOutstanding: false,
+ supportedDDPVersions: DDPCommon.SUPPORTED_DDP_VERSIONS,
+ retry: true,
+ respondToPings: true,
+ // When updates are coming within this ms interval, batch them together.
+ bufferedWritesInterval: 5,
+ // Flush buffers immediately if writes are happening continuously for more than this many ms.
+ bufferedWritesMaxAge: 500,
+
+ ...options,
+ };
+
+ // If set, called when we reconnect, queuing method calls _before_ the
+ // existing outstanding ones.
+ // NOTE: This feature has been preserved for backwards compatibility. The
+ // preferred method of setting a callback on reconnect is to use
+ // DDP.onReconnect.
+ this.onReconnect = null;
+
+ this._stream = new ClientStream(url, {
+ ConnectionError,
+ ...options,
+ });
+
+ this._lastSessionId = null;
+ this._versionSuggestion = null; // The last proposed DDP version.
+ this._version = null; // The DDP version agreed on by client and server.
+ this._stores = Object.create(null); // name -> object with methods
+ this._methodHandlers = Object.create(null); // name -> func
+ this._nextMethodId = 1;
+ this._supportedDDPVersions = this.options.supportedDDPVersions ?? [];
+
+ this._heartbeatInterval = this.options.heartbeatInterval;
+ this._heartbeatTimeout = this.options.heartbeatTimeout;
+
+ // Tracks methods which the user has tried to call but which have not yet
+ // called their user callback (ie, they are waiting on their result or for all
+ // of their writes to be written to the local cache). Map from method ID to
+ // MethodInvoker object.
+ this._methodInvokers = Object.create(null);
+
+ // Tracks methods which the user has called but whose result messages have not
+ // arrived yet.
+ //
+ // _outstandingMethodBlocks is an array of blocks of methods. Each block
+ // represents a set of methods that can run at the same time. The first block
+ // represents the methods which are currently in flight; subsequent blocks
+ // must wait for previous blocks to be fully finished before they can be sent
+ // to the server.
+ //
+ // Each block is an object with the following fields:
+ // - methods: a list of MethodInvoker objects
+ // - wait: a boolean; if true, this block had a single method invoked with
+ // the "wait" option
+ //
+ // There will never be adjacent blocks with wait=false, because the only thing
+ // that makes methods need to be serialized is a wait method.
+ //
+ // Methods are removed from the first block when their "result" is
+ // received. The entire first block is only removed when all of the in-flight
+ // methods have received their results (so the "methods" list is empty) *AND*
+ // all of the data written by those methods are visible in the local cache. So
+ // it is possible for the first block's methods list to be empty, if we are
+ // still waiting for some objects to quiesce.
+ //
+ // Example:
+ // _outstandingMethodBlocks = [
+ // {wait: false, methods: []},
+ // {wait: true, methods: []},
+ // {wait: false, methods: [,
+ // ]}]
+ // This means that there were some methods which were sent to the server and
+ // which have returned their results, but some of the data written by
+ // the methods may not be visible in the local cache. Once all that data is
+ // visible, we will send a 'login' method. Once the login method has returned
+ // and all the data is visible (including re-running subs if userId changes),
+ // we will send the 'foo' and 'bar' methods in parallel.
+ this._outstandingMethodBlocks = [];
+
+ // method ID -> array of objects with keys 'collection' and 'id', listing
+ // documents written by a given method's stub. keys are associated with
+ // methods whose stub wrote at least one document, and whose data-done message
+ // has not yet been received.
+ this._documentsWrittenByStub = {};
+ // collection -> IdMap of "server document" object. A "server document" has:
+ // - "document": the version of the document according the
+ // server (ie, the snapshot before a stub wrote it, amended by any changes
+ // received from the server)
+ // It is undefined if we think the document does not exist
+ // - "writtenByStubs": a set of method IDs whose stubs wrote to the document
+ // whose "data done" messages have not yet been processed
+ this._serverDocuments = {};
+
+ // Array of callbacks to be called after the next update of the local
+ // cache. Used for:
+ // - Calling methodInvoker.dataVisible and sub ready callbacks after
+ // the relevant data is flushed.
+ // - Invoking the callbacks of "half-finished" methods after reconnect
+ // quiescence. Specifically, methods whose result was received over the old
+ // connection (so we don't re-send it) but whose data had not been made
+ // visible.
+ this._afterUpdateCallbacks = [];
+
+ // In two contexts, we buffer all incoming data messages and then process them
+ // all at once in a single update:
+ // - During reconnect, we buffer all data messages until all subs that had
+ // been ready before reconnect are ready again, and all methods that are
+ // active have returned their "data done message"; then
+ // - During the execution of a "wait" method, we buffer all data messages
+ // until the wait method gets its "data done" message. (If the wait method
+ // occurs during reconnect, it doesn't get any special handling.)
+ // all data messages are processed in one update.
+ //
+ // The following fields are used for this "quiescence" process.
+
+ // This buffers the messages that aren't being processed yet.
+ this._messagesBufferedUntilQuiescence = [];
+ // Map from method ID -> true. Methods are removed from this when their
+ // "data done" message is received, and we will not quiesce until it is
+ // empty.
+ this._methodsBlockingQuiescence = {};
+ // map from sub ID -> true for subs that were ready (ie, called the sub
+ // ready callback) before reconnect but haven't become ready again yet
+ this._subsBeingRevived = {}; // map from sub._id -> true
+ // if true, the next data update should reset all stores. (set during
+ // reconnect.)
+ this._resetStores = false;
+
+ // name -> array of updates for (yet to be created) collections
+ this._updatesForUnknownStores = {};
+ // if we're blocking a migration, the retry func
+ this._retryMigrate = null;
+ // Collection name -> array of messages.
+ this._bufferedWrites = {};
+ // When current buffer of updates must be flushed at, in ms timestamp.
+ this._bufferedWritesFlushAt = null;
+ // Timeout handle for the next processing of all pending writes
+ this._bufferedWritesFlushHandle = null;
+
+ this._bufferedWritesInterval = this.options.bufferedWritesInterval;
+ this._bufferedWritesMaxAge = this.options.bufferedWritesMaxAge;
+
+ // metadata for subscriptions. Map from sub ID to object with keys:
+ // - id
+ // - name
+ // - params
+ // - inactive (if true, will be cleaned up if not reused in re-run)
+ // - ready (has the 'ready' message been received?)
+ // - readyCallback (an optional callback to call when ready)
+ // - errorCallback (an optional callback to call if the sub terminates with
+ // an error, XXX COMPAT WITH 1.0.3.1)
+ // - stopCallback (an optional callback to call when the sub terminates
+ // for any reason, with an error argument if an error triggered the stop)
+ this._subscriptions = {};
+
+ // Reactive userId.
+ this._userId = null;
+ this._userIdDeps = new Tracker.Dependency();
+
+ // Block auto-reload while we're waiting for method responses.
+ if (!options.reloadWithOutstanding) {
+ Reload._onMigrate((retry: any) => {
+ if (!this._readyToMigrate()) {
+ this._retryMigrate = retry;
+ return [false];
+ }
+ return [true];
+ });
+ }
+
+ this._streamHandlers = new ConnectionStreamHandlers(this);
+
+ const onDisconnect = () => {
+ if (this._heartbeat) {
+ this._heartbeat.stop();
+ this._heartbeat = null;
+ }
+ };
+
+ this._stream.on('message', (msg) => this._streamHandlers.onMessage(msg));
+ this._stream.on('reset', () => this._streamHandlers.onReset());
+ this._stream.on('disconnect', onDisconnect);
+
+ this._messageProcessors = new MessageProcessors(this);
+
+ // Expose message processor methods to maintain backward compatibility
+ this._livedata_connected = (msg: any) => this._messageProcessors._livedata_connected(msg);
+ this._livedata_data = (msg: any) => this._messageProcessors._livedata_data(msg);
+ this._livedata_nosub = (msg: any) => this._messageProcessors._livedata_nosub(msg);
+ this._livedata_result = (msg: any) => this._messageProcessors._livedata_result(msg);
+ this._livedata_error = (msg: any) => this._messageProcessors._livedata_error(msg);
+
+ this._documentProcessors = new DocumentProcessors(this);
+
+ // Expose document processor methods to maintain backward compatibility
+ this._process_added = (msg: any, updates: any) => this._documentProcessors._process_added(msg, updates);
+ this._process_changed = (msg: any, updates: any) => this._documentProcessors._process_changed(msg, updates);
+ this._process_removed = (msg: any, updates: any) => this._documentProcessors._process_removed(msg, updates);
+ this._process_ready = (msg: any, updates: any) => this._documentProcessors._process_ready(msg, updates);
+ this._process_updated = (msg: any, updates: any) => this._documentProcessors._process_updated(msg, updates);
+
+ // Also expose utility methods used by other parts of the system
+ this._pushUpdate = (updates: any, collection: string, msg: any) => this._documentProcessors._pushUpdate(updates, collection, msg);
+ this._getServerDoc = (collection: string, id: string) => this._documentProcessors._getServerDoc(collection, id);
+ }
+
+ // 'name' is the name of the data on the wire that should go in the
+ // store. 'wrappedStore' should be an object with methods beginUpdate, update,
+ // endUpdate, saveOriginals, retrieveOriginals. see Collection for an example.
+ createStoreMethods(name: string, wrappedStore: any) {
+ // const self = this;
+
+ if (name in this._stores) return false;
+
+ // Wrap the input object in an object which makes any store method not
+ // implemented by 'store' into a no-op.
+ const store: any = Object.create(null);
+ const keysOfStore = ['update', 'beginUpdate', 'endUpdate', 'saveOriginals', 'retrieveOriginals', 'getDoc', '_getCollection'];
+ keysOfStore.forEach((method) => {
+ store[method] = (...args: any[]) => {
+ if (wrappedStore[method]) {
+ return wrappedStore[method](...args);
+ }
+ };
+ });
+ this._stores[name] = store;
+ // Add _name prop to store
+ store._name = name;
+ return store;
+ }
+
+ registerStoreClient(name: string, wrappedStore: any) {
+ // const self = this;
+
+ const store = this.createStoreMethods(name, wrappedStore);
+
+ const queued = this._updatesForUnknownStores[name];
+ if (Array.isArray(queued)) {
+ store.beginUpdate(queued.length, false);
+ queued.forEach((msg: any) => {
+ store.update(msg);
+ });
+ store.endUpdate();
+ delete this._updatesForUnknownStores[name];
+ }
+
+ return true;
+ }
+
+ async registerStoreServer(name: string, wrappedStore: any) {
+ // const self = this;
+
+ const store = this.createStoreMethods(name, wrappedStore);
+
+ const queued = this._updatesForUnknownStores[name];
+ if (Array.isArray(queued)) {
+ await store.beginUpdate(queued.length, false);
+ await Promise.all(queued.map((msg) => store.update(msg)));
+ await store.endUpdate();
+ delete this._updatesForUnknownStores[name];
+ }
+
+ return true;
+ }
+
+ subscribe(name: string, ...params: any[]) {
+ let callbacks: any = Object.create(null);
+ if (params.length) {
+ const lastParam = params[params.length - 1];
+ if (typeof lastParam === 'function') {
+ callbacks.onReady = params.pop();
+ } else if (
+ lastParam &&
+ [
+ lastParam.onReady,
+ // XXX COMPAT WITH 1.0.3.1 onError used to exist, but now we use
+ // onStop with an error callback instead.
+ lastParam.onError,
+ lastParam.onStop,
+ ].some((f: any) => typeof f === 'function')
+ ) {
+ callbacks = params.pop();
+ }
+ }
+
+ // Is there an existing sub with the same name and param, run in an
+ // invalidated Computation? This will happen if we are rerunning an
+ // existing computation.
+ //
+ // For example, consider a rerun of:
+ //
+ // Tracker.autorun(function () {
+ // Meteor.subscribe("foo", Session.get("foo"));
+ // Meteor.subscribe("bar", Session.get("bar"));
+ // });
+ //
+ // If "foo" has changed but "bar" has not, we will match the "bar"
+ // subcribe to an existing inactive subscription in order to not
+ // unsub and resub the subscription unnecessarily.
+ //
+ // We only look for one such sub; if there are N apparently-identical subs
+ // being invalidated, we will require N matching subscribe calls to keep
+ // them all active.
+ const existing = Object.values(this._subscriptions).find(
+ (sub) => sub.inactive && sub.name === name && EJSON.equals(sub.params, params),
+ );
+
+ let id: string;
+ if (existing) {
+ id = existing.id;
+ existing.inactive = false; // reactivate
+
+ if (callbacks.onReady) {
+ // If the sub is not already ready, replace any ready callback with the
+ // one provided now. (It's not really clear what users would expect for
+ // an onReady callback inside an autorun; the semantics we provide is
+ // that at the time the sub first becomes ready, we call the last
+ // onReady callback provided, if any.)
+ // If the sub is already ready, run the ready callback right away.
+ // It seems that users would expect an onReady callback inside an
+ // autorun to trigger once the sub first becomes ready and also
+ // when re-subs happens.
+ if (existing.ready) {
+ callbacks.onReady();
+ } else {
+ existing.readyCallback = callbacks.onReady;
+ }
+ }
+
+ // XXX COMPAT WITH 1.0.3.1 we used to have onError but now we call
+ // onStop with an optional error argument
+ if (callbacks.onError) {
+ // Replace existing callback if any, so that errors aren't
+ // double-reported.
+ existing.errorCallback = callbacks.onError;
+ }
+
+ if (callbacks.onStop) {
+ existing.stopCallback = callbacks.onStop;
+ }
+ } else {
+ // New sub! Generate an id, save it locally, and send message.
+ id = Random.id();
+ this._subscriptions[id] = {
+ id,
+ name,
+ params: EJSON.clone(params),
+ inactive: false,
+ ready: false,
+ readyDeps: new Tracker.Dependency(),
+ readyCallback: callbacks.onReady,
+ // XXX COMPAT WITH 1.0.3.1 #errorCallback
+ errorCallback: callbacks.onError,
+ stopCallback: callbacks.onStop,
+ connection: this,
+ remove() {
+ delete this.connection._subscriptions[this.id];
+ this.ready && this.readyDeps.changed();
+ },
+ stop() {
+ this.connection._sendQueued({ msg: 'unsub', id });
+ this.remove();
+
+ if (callbacks.onStop) {
+ callbacks.onStop();
+ }
+ },
+ };
+ this._send({ msg: 'sub', id, name, params });
+ }
+
+ // return a handle to the application.
+ const handle = {
+ stop: () => {
+ if (!isKey(this._subscriptions, id)) {
+ return;
+ }
+ this._subscriptions[id].stop();
+ },
+ ready: () => {
+ // return false if we've unsubscribed.
+ if (!hasOwn(this._subscriptions, id)) {
+ return false;
+ }
+ const record = this._subscriptions[id];
+ record.readyDeps.depend();
+ return record.ready;
+ },
+ subscriptionId: id,
+ };
+
+ if (Tracker.active) {
+ // We're in a reactive computation, so we'd like to unsubscribe when the
+ // computation is invalidated... but not if the rerun just re-subscribes
+ // to the same subscription! When a rerun happens, we use onInvalidate
+ // as a change to mark the subscription "inactive" so that it can
+ // be reused from the rerun. If it isn't reused, it's killed from
+ // an afterFlush.
+ Tracker.onInvalidate((_c) => {
+ if (hasOwn(this._subscriptions, id)) {
+ this._subscriptions[id].inactive = true;
+ }
+
+ Tracker.afterFlush(() => {
+ if (hasOwn(this._subscriptions, id) && this._subscriptions[id].inactive) {
+ handle.stop();
+ }
+ });
+ });
+ }
+
+ return handle;
+ }
+
+ isAsyncCall() {
+ return DDP._CurrentMethodInvocation._isCallAsyncMethodRunning();
+ }
+
+ methods(methods: Record) {
+ Object.entries(methods).forEach(([name, func]) => {
+ if (typeof func !== 'function') {
+ throw new Error(`Method '${name}' must be a function`);
+ }
+ if (this._methodHandlers[name]) {
+ throw new Error(`A method named '${name}' is already defined`);
+ }
+ this._methodHandlers[name] = func;
+ });
+ }
+
+ _getIsSimulation({ isFromCallAsync, alreadyInSimulation }: any) {
+ if (!isFromCallAsync) {
+ return alreadyInSimulation;
+ }
+ return alreadyInSimulation && DDP._CurrentMethodInvocation._isCallAsyncMethodRunning();
+ }
+
+ call(name: string, ...args: [...EJSONable[], (...args: any[]) => any]): any {
+ // if it's a function, the last argument is the result callback,
+ // not a parameter to the remote method.
+ const lastArg = args[args.length - 1];
+ if (isFunction(lastArg)) {
+ return this.apply(name, args.slice(0, -1), undefined, lastArg);
+ }
+
+ return this.apply(name, args, undefined);
+ }
+
+ callAsync(name: string, ...args: EJSONable[]): Promise {
+ if (args.length && typeof args[args.length - 1] === 'function') {
+ throw new Error("Meteor.callAsync() does not accept a callback. You should 'await' the result, or use .then().");
+ }
+
+ return this.applyAsync(name, args, { returnServerResultPromise: true });
+ }
+
+ apply(
+ name: string,
+ args: any[],
+ options?: {
+ wait?: boolean;
+ onResultReceived?: (...args: any[]) => void;
+ noRetry?: boolean;
+ throwStubExceptions?: boolean;
+ returnStubValue?: boolean;
+ },
+ callback?: (...args: any[]) => void,
+ ) {
+ const stubOptions = this._stubCall(name, EJSON.clone(args), options);
+
+ if (stubOptions.hasStub) {
+ if (
+ !this._getIsSimulation({
+ alreadyInSimulation: stubOptions.alreadyInSimulation,
+ isFromCallAsync: stubOptions.isFromCallAsync,
+ })
+ ) {
+ this._saveOriginals();
+ }
+ try {
+ stubOptions.stubReturnValue = DDP._CurrentMethodInvocation.withValue(stubOptions.invocation, stubOptions.stubInvocation);
+ if (Meteor._isPromise(stubOptions.stubReturnValue)) {
+ console.debug(
+ `Method ${name}: Calling a method that has an async method stub with call/apply can lead to unexpected behaviors. Use callAsync/applyAsync instead.`,
+ );
+ }
+ } catch (e) {
+ stubOptions.exception = e;
+ }
+ }
+ return this._apply(name, stubOptions, args, options, callback);
+ }
+
+ applyAsync(name: string, args: any[], options: any, callback?: ((...args: any[]) => void) | undefined) {
+ const stubPromise = this._applyAsyncStubInvocation(name, args, options);
+
+ const promise = this._applyAsync({
+ name,
+ args,
+ options,
+ callback,
+ stubPromise,
+ });
+ // only return the stubReturnValue
+ promise.stubPromise = stubPromise.then((o: any) => {
+ if (o.exception) {
+ throw o.exception;
+ }
+ return o.stubReturnValue;
+ });
+ // this avoids attribute recursion
+ promise.serverPromise = new Promise((resolve, reject) => promise.then(resolve).catch(reject));
+ return promise;
+ }
+
+ async _applyAsyncStubInvocation(name: string, args: any[], options: any) {
+ const stubOptions = this._stubCall(name, EJSON.clone(args), options);
+ if (stubOptions.hasStub) {
+ if (
+ !this._getIsSimulation({
+ alreadyInSimulation: stubOptions.alreadyInSimulation,
+ isFromCallAsync: stubOptions.isFromCallAsync,
+ })
+ ) {
+ this._saveOriginals();
+ }
+ try {
+ const currentContext = DDP._CurrentMethodInvocation._setNewContextAndGetCurrent(stubOptions.invocation);
+ try {
+ stubOptions.stubReturnValue = await stubOptions.stubInvocation();
+ } catch (e) {
+ stubOptions.exception = e;
+ } finally {
+ DDP._CurrentMethodInvocation._set(currentContext);
+ }
+ } catch (e) {
+ stubOptions.exception = e;
+ }
+ }
+ return stubOptions;
+ }
+
+ _applyAsync({
+ name,
+ args,
+ options,
+ callback,
+ stubPromise,
+ }: {
+ name: string;
+ args: any[];
+ options: any;
+ callback?: ((...args: any[]) => any) | null | undefined;
+ stubPromise: Promise;
+ }): Promise & { stubPromise?: Promise; serverPromise?: Promise } {
+ return stubPromise.then((stubOptions: StubOptions) => {
+ return this._apply(name, stubOptions, args, options, callback);
+ });
+ }
+
+ _apply(name: string, stubCallValue: StubOptions, args: any[], options: any, callback?: ((...args: any[]) => any) | null | undefined) {
+ if (!callback && typeof options === 'function') {
+ callback = options;
+ options = Object.create(null);
+ }
+ options = options || Object.create(null);
+
+ if (callback) {
+ // XXX would it be better form to do the binding in stream.on,
+ // or caller, instead of here?
+ // XXX improve error message (and how we report it)
+ callback = Meteor.bindEnvironment(callback, `delivering result of invoking '${name}'`);
+ }
+ const { hasStub, exception, stubReturnValue, alreadyInSimulation, randomSeed } = stubCallValue;
+
+ // Keep our args safe from mutation (eg if we don't send the message for a
+ // while because of a wait method).
+ args = EJSON.clone(args);
+ // If we're in a simulation, stop and return the result we have,
+ // rather than going on to do an RPC. If there was no stub,
+ // we'll end up returning undefined.
+ if (
+ this._getIsSimulation({
+ alreadyInSimulation,
+ isFromCallAsync: stubCallValue.isFromCallAsync,
+ })
+ ) {
+ let result;
+
+ if (callback) {
+ callback(exception, stubReturnValue);
+ } else {
+ if (exception) throw exception;
+ result = stubReturnValue;
+ }
+
+ return options._returnMethodInvoker ? { result } : result;
+ }
+
+ // We only create the methodId here because we don't actually need one if
+ // we're already in a simulation
+ const methodId = `${this._nextMethodId++}`;
+ if (hasStub) {
+ this._retrieveAndStoreOriginals(methodId);
+ }
+
+ // Generate the DDP message for the method call. Note that on the client,
+ // it is important that the stub have finished before we send the RPC, so
+ // that we know we have a complete list of which local documents the stub
+ // wrote.
+ const message: Record = {
+ msg: 'method',
+ id: methodId,
+ method: name,
+ params: args,
+ };
+
+ // If an exception occurred in a stub, and we're ignoring it
+ // because we're doing an RPC and want to use what the server
+ // returns instead, log it so the developer knows
+ // (unless they explicitly ask to see the error).
+ //
+ // Tests can set the '_expectedByTest' flag on an exception so it won't
+ // go to log.
+ if (exception) {
+ if (options.throwStubExceptions) {
+ throw exception;
+ } else if (!exception._expectedByTest) {
+ console.debug(`Exception while simulating the effect of invoking '${name}'`, exception);
+ }
+ }
+
+ // At this point we're definitely doing an RPC, and we're going to
+ // return the value of the RPC to the caller.
+
+ // If the caller didn't give a callback, decide what to do.
+ let promise;
+ if (!callback) {
+ if (!options.returnServerResultPromise && (!options.isFromCallAsync || options.returnStubValue)) {
+ callback = (err: any) => {
+ err && console.debug(`Error invoking Method '${name}'`, err);
+ };
+ } else {
+ promise = new Promise((resolve: any, reject) => {
+ callback = (...allArgs: any[]) => {
+ const args = Array.from(allArgs);
+ const err = args.shift();
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve(...args);
+ };
+ });
+ }
+ }
+
+ // Send the randomSeed only if we used it
+ if (randomSeed.value !== null) {
+ message.randomSeed = randomSeed.value;
+ }
+
+ const methodInvoker = new MethodInvoker({
+ methodId,
+ callback,
+ connection: this,
+ onResultReceived: options.onResultReceived,
+ wait: !!options.wait,
+ message,
+ noRetry: !!options.noRetry,
+ });
+
+ let result;
+
+ if (promise) {
+ result = options.returnStubValue ? promise.then(() => stubReturnValue) : promise;
+ } else {
+ result = options.returnStubValue ? stubReturnValue : undefined;
+ }
+
+ if (options._returnMethodInvoker) {
+ return {
+ methodInvoker,
+ result,
+ };
+ }
+
+ this._addOutstandingMethod(methodInvoker, options);
+ return result;
+ }
+
+ _stubCall(name: string, args: any[], options?: any): StubOptions {
+ // Run the stub, if we have one. The stub is supposed to make some
+ // temporary writes to the database to give the user a smooth experience
+ // until the actual result of executing the method comes back from the
+ // server (whereupon the temporary writes to the database will be reversed
+ // during the beginUpdate/endUpdate process.)
+ //
+ // Normally, we ignore the return value of the stub (even if it is an
+ // exception), in favor of the real return value from the server. The
+ // exception is if the *caller* is a stub. In that case, we're not going
+ // to do a RPC, so we use the return value of the stub as our return
+ // value.
+ // const self = this;
+ const enclosing = DDP._CurrentMethodInvocation.get();
+ const stub = this._methodHandlers[name];
+ const alreadyInSimulation = enclosing?.isSimulation;
+ const isFromCallAsync = enclosing?._isFromCallAsync;
+ const randomSeed: { value: string | null } = { value: null };
+
+ const defaultReturn = {
+ alreadyInSimulation,
+ randomSeed,
+ isFromCallAsync,
+ };
+ if (!stub) {
+ return { ...defaultReturn, hasStub: false };
+ }
+
+ // Lazily generate a randomSeed, only if it is requested by the stub.
+ // The random streams only have utility if they're used on both the client
+ // and the server; if the client doesn't generate any 'random' values
+ // then we don't expect the server to generate any either.
+ // Less commonly, the server may perform different actions from the client,
+ // and may in fact generate values where the client did not, but we don't
+ // have any client-side values to match, so even here we may as well just
+ // use a random seed on the server. In that case, we don't pass the
+ // randomSeed to save bandwidth, and we don't even generate it to save a
+ // bit of CPU and to avoid consuming entropy.
+
+ const randomSeedGenerator = () => {
+ if (randomSeed.value === null) {
+ randomSeed.value = DDPCommon.makeRpcSeed(enclosing, name);
+ }
+ return randomSeed.value;
+ };
+
+ const setUserId = (userId: string | null) => {
+ this.setUserId(userId);
+ };
+
+ const invocation = new DDPCommon.MethodInvocation({
+ name,
+ isSimulation: true,
+ userId: this.userId(),
+ isFromCallAsync: options?.isFromCallAsync,
+ setUserId,
+ randomSeed: randomSeedGenerator,
+ });
+
+ // Note that unlike in the corresponding server code, we never audit
+ // that stubs check() their arguments.
+ const stubInvocation = () => {
+ return stub.apply(invocation, EJSON.clone(args));
+ };
+ return { ...defaultReturn, hasStub: true, stubInvocation, invocation };
+ }
+
+ // Before calling a method stub, prepare all stores to track changes and allow
+ // _retrieveAndStoreOriginals to get the original versions of changed
+ // documents.
+ _saveOriginals() {
+ if (!this._waitingForQuiescence()) {
+ void this._flushBufferedWrites();
+ }
+
+ Object.values(this._stores).forEach((store) => {
+ store.saveOriginals();
+ });
+ }
+
+ // Retrieves the original versions of all documents modified by the stub for
+ // method 'methodId' from all stores and saves them to _serverDocuments (keyed
+ // by document) and _documentsWrittenByStub (keyed by method ID).
+ _retrieveAndStoreOriginals(methodId: string) {
+ if (this._documentsWrittenByStub[methodId]) throw new Error('Duplicate methodId in _retrieveAndStoreOriginals');
+
+ const docsWritten: any[] = [];
+
+ Object.entries(this._stores).forEach(([collection, store]: [string, any]) => {
+ const originals = store.retrieveOriginals();
+ // not all stores define retrieveOriginals
+ if (!originals) return;
+ originals.forEach((doc: any, id: string) => {
+ docsWritten.push({ collection, id });
+ if (!isKey(this._serverDocuments, collection)) {
+ this._serverDocuments[collection] = new MongoIDMap();
+ }
+ const serverDoc = this._serverDocuments[collection].setDefault(id, Object.create(null));
+ if (serverDoc.writtenByStubs) {
+ // We're not the first stub to write this doc. Just add our method ID
+ // to the record.
+ serverDoc.writtenByStubs[methodId] = true;
+ } else {
+ // First stub! Save the original value and our method ID.
+ serverDoc.document = doc;
+ serverDoc.flushCallbacks = [];
+ serverDoc.writtenByStubs = Object.create(null);
+ serverDoc.writtenByStubs[methodId] = true;
+ }
+ });
+ });
+ if (!isEmpty(docsWritten)) {
+ this._documentsWrittenByStub[methodId] = docsWritten;
+ }
+ }
+
+ // This is very much a private function we use to make the tests
+ // take up fewer server resources after they complete.
+ _unsubscribeAll() {
+ Object.values(this._subscriptions).forEach((sub: any) => {
+ // Avoid killing the autoupdate subscription so that developers
+ // still get hot code pushes when writing tests.
+ //
+ // XXX it's a hack to encode knowledge about autoupdate here,
+ // but it doesn't seem worth it yet to have a special API for
+ // subscriptions to preserve after unit tests.
+ if (sub.name !== 'meteor_autoupdate_clientVersions') {
+ sub.stop();
+ }
+ });
+ }
+
+ // Sends the DDP stringification of the given message object
+ _send(obj: any, _queue = false) {
+ this._stream.send(DDPCommon.stringifyDDP(obj));
+ }
+
+ // Always queues the call before sending the message
+ // Used, for example, on subscription.[id].stop() to make sure a "sub" message is always called before an "unsub" message
+ // https://github.com/meteor/meteor/issues/13212
+ //
+ // This is part of the actual fix for the rest check:
+ // https://github.com/meteor/meteor/pull/13236
+ _sendQueued(obj: any) {
+ this._send(obj, true);
+ }
+
+ // We detected via DDP-level heartbeats that we've lost the
+ // connection. Unlike `disconnect` or `close`, a lost connection
+ // will be automatically retried.
+ _lostConnection(maybeError?: unknown) {
+ this._stream._lostConnection(maybeError);
+ }
+
+ status() {
+ return this._stream.status();
+ }
+
+ reconnect(...args: any[]) {
+ return this._stream.reconnect(...args);
+ }
+
+ disconnect(...args: any[]) {
+ return this._stream.disconnect(...args);
+ }
+
+ close() {
+ return this._stream.disconnect({ _permanent: true });
+ }
+
+ userId() {
+ if (this._userIdDeps) this._userIdDeps.depend();
+ return this._userId;
+ }
+
+ setUserId(userId: string | null) {
+ // Avoid invalidating dependents if setUserId is called with current value.
+ if (this._userId === userId) return;
+ this._userId = userId;
+ if (this._userIdDeps) this._userIdDeps.changed();
+ }
+
+ // Returns true if we are in a state after reconnect of waiting for subs to be
+ // revived or early methods to finish their data, or we are waiting for a
+ // "wait" method to finish.
+ _waitingForQuiescence() {
+ return !isEmpty(this._subsBeingRevived) || !isEmpty(this._methodsBlockingQuiescence);
+ }
+
+ // Returns true if any method whose message has been sent to the server has
+ // not yet invoked its user callback.
+ _anyMethodsAreOutstanding() {
+ const invokers = this._methodInvokers;
+ return Object.values(invokers).some((invoker: any) => !!invoker.sentMessage);
+ }
+
+ async _processOneDataMessage(msg: any, updates: any) {
+ const messageType = msg.msg;
+
+ // msg is one of ['added', 'changed', 'removed', 'ready', 'updated']
+ if (messageType === 'added') {
+ await this._process_added(msg, updates);
+ } else if (messageType === 'changed') {
+ this._process_changed(msg, updates);
+ } else if (messageType === 'removed') {
+ this._process_removed(msg, updates);
+ } else if (messageType === 'ready') {
+ this._process_ready(msg, updates);
+ } else if (messageType === 'updated') {
+ this._process_updated(msg, updates);
+ } else if (messageType === 'nosub') {
+ // ignore this
+ } else {
+ console.debug('discarding unknown livedata data message type', msg);
+ }
+ }
+
+ _prepareBuffersToFlush() {
+ if (this._bufferedWritesFlushHandle) {
+ clearTimeout(this._bufferedWritesFlushHandle);
+ this._bufferedWritesFlushHandle = null;
+ }
+
+ this._bufferedWritesFlushAt = null;
+ // We need to clear the buffer before passing it to
+ // performWrites. As there's no guarantee that it
+ // will exit cleanly.
+ const writes = this._bufferedWrites;
+ this._bufferedWrites = Object.create(null);
+ return writes;
+ }
+
+ _performWritesClient(updates: Record) {
+ // const self = this;
+
+ if (this._resetStores || !isEmpty(updates)) {
+ // Synchronous store updates for client
+ Object.values(this._stores).forEach((store) => {
+ store.beginUpdate(updates[store._name]?.length || 0, this._resetStores);
+ });
+
+ this._resetStores = false;
+
+ Object.entries(updates).forEach(([storeName, messages]) => {
+ const store = this._stores[storeName];
+ if (store) {
+ messages.forEach((msg) => store.update(msg));
+ } else {
+ this._updatesForUnknownStores[storeName] = this._updatesForUnknownStores[storeName] || [];
+ this._updatesForUnknownStores[storeName].push(...messages);
+ }
+ });
+
+ Object.values(this._stores).forEach((store) => store.endUpdate());
+ }
+
+ this._runAfterUpdateCallbacks();
+ }
+
+ async _flushBufferedWrites() {
+ const writes = this._prepareBuffersToFlush();
+ return this._performWritesClient(writes);
+ }
+
+ // Call any callbacks deferred with _runWhenAllServerDocsAreFlushed whose
+ // relevant docs have been flushed, as well as dataVisible callbacks at
+ // reconnect-quiescence time.
+ _runAfterUpdateCallbacks() {
+ const callbacks = this._afterUpdateCallbacks;
+ this._afterUpdateCallbacks = [];
+ callbacks.forEach((c) => {
+ c();
+ });
+ }
+
+ // Ensures that "f" will be called after all documents currently in
+ // _serverDocuments have been written to the local cache. f will not be called
+ // if the connection is lost before then!
+ _runWhenAllServerDocsAreFlushed(f: VoidFunction) {
+ const runFAfterUpdates = () => {
+ this._afterUpdateCallbacks.push(f);
+ };
+ let unflushedServerDocCount = 0;
+ const onServerDocFlush = () => {
+ --unflushedServerDocCount;
+ if (unflushedServerDocCount === 0) {
+ // This was the last doc to flush! Arrange to run f after the updates
+ // have been applied.
+ runFAfterUpdates();
+ }
+ };
+
+ Object.values(this._serverDocuments).forEach((serverDocuments) => {
+ serverDocuments.forEach((serverDoc: any) => {
+ const writtenByStubForAMethodWithSentMessage = keys(serverDoc.writtenByStubs).some((methodId) => {
+ const invoker = this._methodInvokers[methodId];
+ return invoker?.sentMessage;
+ });
+
+ if (writtenByStubForAMethodWithSentMessage) {
+ ++unflushedServerDocCount;
+ serverDoc.flushCallbacks.push(onServerDocFlush);
+ }
+ });
+ });
+ if (unflushedServerDocCount === 0) {
+ // There aren't any buffered docs --- we can call f as soon as the current
+ // round of updates is applied!
+ runFAfterUpdates();
+ }
+ }
+
+ _addOutstandingMethod(methodInvoker: any, options: any) {
+ if (options?.wait) {
+ // It's a wait method! Wait methods go in their own block.
+ this._outstandingMethodBlocks.push({
+ wait: true,
+ methods: [methodInvoker],
+ });
+ } else {
+ // Not a wait method. Start a new block if the previous block was a wait
+ // block, and add it to the last block of methods.
+ if (isEmpty(this._outstandingMethodBlocks) || last(this._outstandingMethodBlocks).wait) {
+ this._outstandingMethodBlocks.push({
+ wait: false,
+ methods: [],
+ });
+ }
+
+ last(this._outstandingMethodBlocks).methods.push(methodInvoker);
+ }
+
+ // If we added it to the first block, send it out now.
+ if (this._outstandingMethodBlocks.length === 1) {
+ methodInvoker.sendMessage();
+ }
+ }
+
+ // Called by MethodInvoker after a method's callback is invoked. If this was
+ // the last outstanding method in the current block, runs the next block. If
+ // there are no more methods, consider accepting a hot code push.
+ _outstandingMethodFinished() {
+ if (this._anyMethodsAreOutstanding()) return;
+
+ // No methods are outstanding. This should mean that the first block of
+ // methods is empty. (Or it might not exist, if this was a method that
+ // half-finished before disconnect/reconnect.)
+ if (!isEmpty(this._outstandingMethodBlocks)) {
+ const firstBlock = this._outstandingMethodBlocks.shift();
+ if (!isEmpty(firstBlock.methods)) throw new Error(`No methods outstanding but nonempty block: ${JSON.stringify(firstBlock)}`);
+
+ // Send the outstanding methods now in the first block.
+ if (!isEmpty(this._outstandingMethodBlocks)) this._sendOutstandingMethods();
+ }
+
+ // Maybe accept a hot code push.
+ this._maybeMigrate();
+ }
+
+ // Sends messages for all the methods in the first block in
+ // _outstandingMethodBlocks.
+ _sendOutstandingMethods() {
+ if (isEmpty(this._outstandingMethodBlocks)) {
+ return;
+ }
+
+ this._outstandingMethodBlocks[0].methods.forEach((m: any) => {
+ m.sendMessage();
+ });
+ }
+
+ _sendOutstandingMethodBlocksMessages(oldOutstandingMethodBlocks: { wait: boolean; methods: any[] }[]) {
+ if (isEmpty(oldOutstandingMethodBlocks)) return;
+
+ // We have at least one block worth of old outstanding methods to try
+ // again. First: did onReconnect actually send anything? If not, we just
+ // restore all outstanding methods and run the first block.
+ if (isEmpty(this._outstandingMethodBlocks)) {
+ this._outstandingMethodBlocks = oldOutstandingMethodBlocks;
+ this._sendOutstandingMethods();
+ return;
+ }
+
+ // OK, there are blocks on both sides. Special case: merge the last block of
+ // the reconnect methods with the first block of the original methods, if
+ // neither of them are "wait" blocks.
+ if (!last(this._outstandingMethodBlocks).wait && !oldOutstandingMethodBlocks[0].wait) {
+ oldOutstandingMethodBlocks[0].methods.forEach((m) => {
+ last(this._outstandingMethodBlocks).methods.push(m);
+
+ // If this "last block" is also the first block, send the message.
+ if (this._outstandingMethodBlocks.length === 1) {
+ m.sendMessage();
+ }
+ });
+
+ oldOutstandingMethodBlocks.shift();
+ }
+
+ // Now add the rest of the original blocks on.
+ this._outstandingMethodBlocks.push(...oldOutstandingMethodBlocks);
+ }
+
+ _callOnReconnectAndSendAppropriateOutstandingMethods() {
+ const oldOutstandingMethodBlocks = this._outstandingMethodBlocks;
+ this._outstandingMethodBlocks = [];
+
+ this.onReconnect?.();
+ DDP._reconnectHook.forEach((callback) => {
+ callback(this);
+ return true;
+ });
+
+ this._sendOutstandingMethodBlocksMessages(oldOutstandingMethodBlocks);
+ }
+
+ // We can accept a hot code push if there are no methods in flight.
+ _readyToMigrate() {
+ return isEmpty(this._methodInvokers);
+ }
+
+ // If we were blocking a migration, see if it's now possible to continue.
+ // Call whenever the set of outstanding/blocked methods shrinks.
+ _maybeMigrate() {
+ if (this._retryMigrate && this._readyToMigrate()) {
+ this._retryMigrate();
+ this._retryMigrate = null;
+ }
+ }
+}
+
+// This array allows the `_allSubscriptionsReady` method below, which
+// is used by the `spiderable` package, to keep track of whether all
+// data is ready.
+const allConnections: Map = new Map();
+const _reconnectHook = new Hook<[connection: Connection]>({ bindEnvironment: false });
+// This is private but it's used in a few places. accounts-base uses
+// it to get the current user. Meteor.setTimeout and friends clear
+// it. We can probably find a better way to factor this.
+const _CurrentMethodInvocation = new Meteor.EnvironmentVariable<{
+ isSimulation?: boolean;
+ _isFromCallAsync?: boolean;
+ randomStream?: RandomStream;
+ randomSeed?: any;
+}>();
+// const _CurrentPublicationInvocation = new Meteor.EnvironmentVariable();
+
+// XXX: Keep DDP._CurrentInvocation for backwards-compatibility.
+// DDP._CurrentInvocation = DDP._CurrentMethodInvocation;
+
+// const _CurrentCallAsyncInvocation = new Meteor.EnvironmentVariable();
+
+// This is passed into a weird `makeErrorType` function that expects its thing
+// to be a constructor
+// function connectionErrorConstructor(this: any, message: string) {
+// this.message = message;
+// }
+
+// const ConnectionError = Meteor.makeErrorType('DDP.ConnectionError', connectionErrorConstructor);
+class ConnectionError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = 'DDP.ConnectionError';
+ }
+}
+
+// const ForcedReconnectError = Meteor.makeErrorType('DDP.ForcedReconnectError');
+class ForcedReconnectError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = 'DDP.ForcedReconnectError';
+ }
+}
+
+// Returns the named sequence of pseudo-random values.
+// The scope will be DDP._CurrentMethodInvocation.get(), so the stream will produce
+// consistent values for method calls on the client and server.
+const randomStream = (name: string) => {
+ const scope = DDP._CurrentMethodInvocation.get();
+ return RandomStream.get(scope, name);
+};
+
+const connect = (url: string, options: Partial = {}) => {
+ const connection = allConnections.get(url);
+ if (connection) {
+ return connection;
+ }
+ const ret = new Connection(url, options);
+ allConnections.set(url, ret); // hack. see below.
+ return ret;
+};
+
+const onReconnect = (callback: (connection: Connection) => void) => _reconnectHook.register(callback);
+
+const runtimeConfig = typeof __meteor_runtime_config__ !== 'undefined' ? __meteor_runtime_config__ : Object.create(null);
+const ddpUrl = runtimeConfig.DDP_DEFAULT_CONNECTION_URL || '/';
+export const connection = connect(ddpUrl, { onDDPVersionNegotiationFailure });
+
+export const DDP = {
+ _reconnectHook,
+ _CurrentMethodInvocation,
+ ConnectionError,
+ ForcedReconnectError,
+ randomStream,
+ connect,
+ onReconnect,
+ connection,
+};
+
+const retry = new Retry();
+
+function onDDPVersionNegotiationFailure(description: string) {
+ console.debug(description);
+
+ const migrationData = Reload._migrationData('livedata') || Object.create(null);
+ let failures = migrationData.DDPVersionNegotiationFailures || 0;
+
+ ++failures;
+ Reload._onMigrate('livedata', () => [true, { DDPVersionNegotiationFailures: failures }]);
+
+ retry.retryLater(failures, () => {
+ Reload._reload({ immediateMigration: true });
+ });
+}
+
+Meteor.connection = connection;
+
+['subscribe', 'methods', 'isAsyncCall', 'call', 'callAsync', 'apply', 'applyAsync', 'status', 'reconnect', 'disconnect'].forEach((name) => {
+ (Meteor as any)[name] = (Meteor.connection as any)[name].bind(Meteor.connection);
+});
diff --git a/apps/meteor/src/meteor/ddp-common.ts b/apps/meteor/src/meteor/ddp-common.ts
new file mode 100644
index 0000000000000..becf6b11725e0
--- /dev/null
+++ b/apps/meteor/src/meteor/ddp-common.ts
@@ -0,0 +1,293 @@
+import { EJSON } from './ejson.ts';
+import { Meteor } from './meteor.ts';
+import { Random } from './random.ts';
+import { hasOwn } from './utils/hasOwn.ts';
+import { isEmpty } from './utils/isEmpty.ts';
+import { isKey } from './utils/isKey.ts';
+import { noop } from './utils/noop.ts';
+
+class Heartbeat {
+ heartbeatInterval: number;
+
+ heartbeatTimeout: number;
+
+ _sendPing: (...args: unknown[]) => void;
+
+ _onTimeout: (...args: unknown[]) => void;
+
+ _seenPacket = false;
+
+ _heartbeatIntervalHandle: ReturnType | null = null;
+
+ _heartbeatTimeoutHandle: ReturnType | null = null;
+
+ constructor(options: {
+ heartbeatInterval: number;
+ heartbeatTimeout: number;
+ sendPing: (...args: unknown[]) => void;
+ onTimeout: (...args: unknown[]) => void;
+ }) {
+ this.heartbeatInterval = options.heartbeatInterval;
+ this.heartbeatTimeout = options.heartbeatTimeout;
+ this._sendPing = options.sendPing;
+ this._onTimeout = options.onTimeout;
+ }
+
+ stop() {
+ this._clearHeartbeatIntervalTimer();
+ this._clearHeartbeatTimeoutTimer();
+ }
+
+ start() {
+ this.stop();
+ this._startHeartbeatIntervalTimer();
+ }
+
+ _startHeartbeatIntervalTimer() {
+ this._heartbeatIntervalHandle = setInterval(() => this._heartbeatIntervalFired(), this.heartbeatInterval);
+ }
+
+ _startHeartbeatTimeoutTimer() {
+ this._heartbeatTimeoutHandle = setTimeout(() => this._heartbeatTimeoutFired(), this.heartbeatTimeout);
+ }
+
+ _clearHeartbeatIntervalTimer() {
+ if (this._heartbeatIntervalHandle) {
+ Meteor.clearInterval(this._heartbeatIntervalHandle);
+ this._heartbeatIntervalHandle = null;
+ }
+ }
+
+ _clearHeartbeatTimeoutTimer() {
+ if (this._heartbeatTimeoutHandle) {
+ Meteor.clearTimeout(this._heartbeatTimeoutHandle);
+ this._heartbeatTimeoutHandle = null;
+ }
+ }
+
+ _heartbeatIntervalFired() {
+ if (!this._seenPacket && !this._heartbeatTimeoutHandle) {
+ this._sendPing();
+ this._startHeartbeatTimeoutTimer();
+ }
+
+ this._seenPacket = false;
+ }
+
+ _heartbeatTimeoutFired() {
+ this._heartbeatTimeoutHandle = null;
+ this._onTimeout();
+ }
+
+ messageReceived() {
+ this._seenPacket = true;
+
+ if (this._heartbeatTimeoutHandle) {
+ this._clearHeartbeatTimeoutTimer();
+ }
+ }
+}
+
+const SUPPORTED_DDP_VERSIONS = ['1', 'pre2', 'pre1'];
+
+function parseDDP(stringMessage: string) {
+ let msg: Record;
+ try {
+ msg = JSON.parse(stringMessage);
+ } catch (e) {
+ console.debug('Discarding message with invalid JSON', stringMessage);
+
+ return null;
+ }
+
+ if (msg === null || typeof msg !== 'object') {
+ console.debug('Discarding non-object DDP message', stringMessage);
+
+ return null;
+ }
+
+ if (hasOwn(msg, 'cleared')) {
+ if (!isKey(msg, 'fields')) {
+ msg.fields = {};
+ }
+
+ msg.cleared.forEach((clearKey: string) => {
+ msg.fields[clearKey] = undefined;
+ });
+
+ delete msg.cleared;
+ }
+
+ ['fields', 'params', 'result'].forEach((field) => {
+ if (hasOwn(msg, field)) {
+ msg[field] = EJSON._adjustTypesFromJSONValue(msg[field]);
+ }
+ });
+
+ return msg;
+}
+
+function stringifyDDP(msg: any) {
+ const copy = EJSON.clone(msg);
+
+ if (hasOwn(msg, 'fields')) {
+ const cleared: string[] = [];
+
+ Object.keys(msg.fields).forEach((key) => {
+ const value = msg.fields[key];
+
+ if (typeof value === 'undefined') {
+ cleared.push(key);
+ delete copy.fields[key];
+ }
+ });
+
+ if (!isEmpty(cleared)) {
+ copy.cleared = cleared;
+ }
+
+ if (isEmpty(copy.fields)) {
+ delete copy.fields;
+ }
+ }
+
+ ['fields', 'params', 'result'].forEach((field) => {
+ if (hasOwn(copy, field)) {
+ copy[field] = EJSON._adjustTypesToJSONValue(copy[field]);
+ }
+ });
+
+ if (msg.id && typeof msg.id !== 'string') {
+ throw new Error('Message id is not a string');
+ }
+
+ return JSON.stringify(copy);
+}
+
+type MethodInvocationOptions = {
+ name: string;
+ isSimulation: boolean;
+ unblock?: (...args: unknown[]) => void;
+ isFromCallAsync?: boolean;
+ userId: string | null;
+ setUserId?: (id: string | null) => void;
+ connection?: any;
+ randomSeed: string | (() => string);
+ fence?: any;
+};
+export class MethodInvocation {
+ name: string;
+
+ isSimulation: boolean;
+
+ _unblock: (...args: unknown[]) => void;
+
+ _calledUnblock: boolean;
+
+ _isFromCallAsync: boolean;
+
+ userId: string | null;
+
+ _setUserId: (id: string | null) => void;
+
+ connection: any;
+
+ randomSeed: string | (() => string);
+
+ randomStream: any;
+
+ fence: any;
+
+ constructor(options: MethodInvocationOptions) {
+ this.name = options.name;
+ this.isSimulation = options.isSimulation;
+ this._unblock = options.unblock || noop;
+ this._calledUnblock = false;
+ this._isFromCallAsync = !!options.isFromCallAsync;
+ this.userId = options.userId;
+ this._setUserId = options.setUserId || noop;
+ this.connection = options.connection;
+ this.randomSeed = options.randomSeed;
+ this.randomStream = null;
+ this.fence = options.fence;
+ }
+
+ unblock() {
+ this._calledUnblock = true;
+ this._unblock();
+ }
+
+ async setUserId(userId: string | null) {
+ if (this._calledUnblock) {
+ throw new Error("Can't call setUserId in a method after calling unblock");
+ }
+
+ this.userId = userId;
+ await this._setUserId(userId);
+ }
+}
+
+function randomToken() {
+ return Random.hexString(20);
+}
+
+export class RandomStream {
+ seed: (string | (() => string))[];
+
+ sequences: any;
+
+ constructor(options: { seed?: string }) {
+ this.seed = [options.seed || randomToken()];
+ this.sequences = Object.create(null);
+ }
+
+ _sequence(name: any) {
+ let sequence = this.sequences[name] || null;
+
+ if (sequence === null) {
+ const sequenceSeed = this.seed.concat(name).map((s) => (typeof s === 'function' ? s() : s));
+
+ sequence = Random.createWithSeeds.apply(null, sequenceSeed);
+ this.sequences[name] = sequence;
+ }
+
+ return sequence;
+ }
+
+ static get(scope?: { randomStream?: RandomStream; randomSeed?: any } | undefined, name?: string): (typeof Random)['insecure'] {
+ if (!name) {
+ name = 'default';
+ }
+
+ if (!scope) {
+ return Random.insecure;
+ }
+
+ let { randomStream } = scope;
+
+ if (!randomStream) {
+ randomStream = new RandomStream({
+ seed: scope.randomSeed,
+ });
+ scope.randomStream = randomStream;
+ }
+
+ return randomStream._sequence(name);
+ }
+}
+
+function makeRpcSeed(enclosing: any, methodName: string) {
+ const stream = RandomStream.get(enclosing, `/rpc/${methodName}`);
+
+ return stream.hexString(20);
+}
+
+export const DDPCommon = {
+ Heartbeat,
+ SUPPORTED_DDP_VERSIONS,
+ parseDDP,
+ stringifyDDP,
+ MethodInvocation,
+ RandomStream,
+ makeRpcSeed,
+};
diff --git a/apps/meteor/src/meteor/diff-sequence-core.ts b/apps/meteor/src/meteor/diff-sequence-core.ts
new file mode 100644
index 0000000000000..14c72e08a6c23
--- /dev/null
+++ b/apps/meteor/src/meteor/diff-sequence-core.ts
@@ -0,0 +1,252 @@
+import { EJSON } from './ejson.ts';
+import { isEmptyObject } from './utils/isEmptyObject.ts';
+
+export type DocWithId = {
+ _id: string;
+ [key: string]: unknown;
+};
+
+export type DiffCallbacks = {
+ both?: (key: K, leftValue: V, rightValue: V) => void;
+ leftOnly?: (key: K, value: V) => void;
+ rightOnly?: (key: K, value: V) => void;
+};
+
+export type UnorderedObserver = {
+ added?: (id: string, fields: Partial) => void;
+ changed?: (id: string, fields: Partial) => void;
+ removed?: (id: string) => void;
+ movedBefore?: never;
+};
+
+export type OrderedObserver = {
+ added?: (id: string, fields: Partial) => void;
+ addedBefore?: (id: string, fields: Partial, before: string | null) => void;
+ changed?: (id: string, fields: Partial) => void;
+ movedBefore?: (id: string, before: string | null) => void;
+ removed?: (id: string) => void;
+};
+
+export const diffObjects = >(
+ left: TDoc,
+ right: TDoc,
+ callbacks: DiffCallbacks,
+) => {
+ for (const key of Object.keys(left) as Array) {
+ const leftValue = left[key];
+
+ if (Object.hasOwn(right, key as string)) {
+ callbacks.both?.(key, leftValue, right[key]);
+ } else {
+ callbacks.leftOnly?.(key, leftValue);
+ }
+ }
+
+ if (callbacks.rightOnly) {
+ for (const key of Object.keys(right) as Array) {
+ if (!Object.hasOwn(left, key as string)) {
+ callbacks.rightOnly(key, right[key]);
+ }
+ }
+ }
+};
+
+export const diffMaps = (left: Map, right: Map, callbacks: DiffCallbacks) => {
+ for (const [key, leftValue] of left) {
+ const rightValue = right.get(key);
+
+ if (rightValue !== undefined) {
+ callbacks.both?.(key, leftValue, rightValue);
+ } else {
+ callbacks.leftOnly?.(key, leftValue);
+ }
+ }
+
+ if (callbacks.rightOnly) {
+ for (const [key, rightValue] of right) {
+ if (!left.has(key)) {
+ callbacks.rightOnly(key, rightValue);
+ }
+ }
+ }
+};
+
+export const makeChangedFields = >(newDoc: TDoc, oldDoc: TDoc): Partial => {
+ const fields: Partial = {};
+
+ diffObjects(oldDoc, newDoc, {
+ leftOnly: (key) => {
+ fields[key] = undefined;
+ },
+ rightOnly: (key, value) => {
+ fields[key] = value;
+ },
+ both: (key, leftValue, rightValue) => {
+ if (!EJSON.equals(leftValue, rightValue)) {
+ fields[key] = rightValue;
+ }
+ },
+ });
+
+ return fields;
+};
+
+export const diffQueryUnorderedChanges = (
+ oldResults: Map,
+ newResults: Map,
+ observer: UnorderedObserver,
+ { projectionFn = EJSON.clone }: { projectionFn?: (doc: T) => Partial } = {},
+) => {
+ if ('movedBefore' in observer && observer.movedBefore) {
+ throw new Error('_diffQueryUnordered called with a movedBefore observer!');
+ }
+
+ for (const [id, newDoc] of newResults) {
+ const oldDoc = oldResults.get(id);
+
+ if (oldDoc) {
+ if (observer.changed && !EJSON.equals(oldDoc, newDoc)) {
+ const changedFields = makeChangedFields(projectionFn(newDoc), projectionFn(oldDoc));
+
+ if (!isEmptyObject(changedFields)) observer.changed(id, changedFields);
+ }
+ } else if (observer.added) {
+ const fields = projectionFn(newDoc);
+ delete fields._id;
+ observer.added(id, fields);
+ }
+ }
+
+ if (observer.removed) {
+ for (const id of oldResults.keys()) {
+ if (!newResults.has(id)) observer.removed(id);
+ }
+ }
+};
+
+export const diffQueryOrderedChanges = (
+ oldResults: T[],
+ newResults: T[],
+ observer: OrderedObserver,
+ { projectionFn = EJSON.clone }: { projectionFn?: (doc: T) => Partial } = {},
+) => {
+ const newPresenceOfId = new Set();
+ for (const doc of newResults) {
+ if (newPresenceOfId.has(doc._id)) console.debug('Duplicate _id in newResults');
+ newPresenceOfId.add(doc._id);
+ }
+
+ const oldIndexOfId = new Map();
+ oldResults.forEach((doc, i) => {
+ if (oldIndexOfId.has(doc._id)) console.debug('Duplicate _id in oldResults');
+ oldIndexOfId.set(doc._id, i);
+ });
+
+ const unmoved: number[] = [];
+ let maxSeqLen = 0;
+ const N = newResults.length;
+ const seqEnds = new Array(N);
+ const ptrs = new Array(N);
+
+ for (let i = 0; i < N; i++) {
+ const currentOldIdx = oldIndexOfId.get(newResults[i]._id);
+ if (currentOldIdx !== undefined) {
+ let j = maxSeqLen;
+
+ while (j > 0) {
+ const prevOldIdx = oldIndexOfId.get(newResults[seqEnds[j - 1]]._id);
+ if (prevOldIdx !== undefined && prevOldIdx < currentOldIdx) {
+ break;
+ }
+ j--;
+ }
+
+ ptrs[i] = j === 0 ? -1 : seqEnds[j - 1];
+ seqEnds[j] = i;
+
+ if (j + 1 > maxSeqLen) {
+ maxSeqLen = j + 1;
+ }
+ }
+ }
+
+ let idx = maxSeqLen === 0 ? -1 : seqEnds[maxSeqLen - 1];
+ while (idx >= 0) {
+ unmoved.push(idx);
+ idx = ptrs[idx];
+ }
+
+ unmoved.reverse();
+ unmoved.push(newResults.length);
+
+ if (observer.removed) {
+ for (const doc of oldResults) {
+ if (!newPresenceOfId.has(doc._id)) observer.removed(doc._id);
+ }
+ }
+
+ let startOfGroup = 0;
+
+ for (const endOfGroup of unmoved) {
+ const groupId = newResults[endOfGroup]?._id ?? null;
+
+ for (let i = startOfGroup; i < endOfGroup; i++) {
+ const newDoc = newResults[i];
+
+ const oldIndex = oldIndexOfId.get(newDoc._id);
+
+ if (oldIndex === undefined) {
+ const fields = projectionFn(newDoc);
+ delete fields._id;
+
+ if (observer.addedBefore) observer.addedBefore(newDoc._id, fields, groupId);
+ else observer.added?.(newDoc._id, fields);
+ } else {
+ const oldDoc = oldResults[oldIndex];
+ const fields = makeChangedFields(projectionFn(newDoc), projectionFn(oldDoc));
+
+ if (!isEmptyObject(fields)) observer.changed?.(newDoc._id, fields);
+ observer.movedBefore?.(newDoc._id, groupId);
+ }
+ }
+
+ if (groupId) {
+ const newDoc = newResults[endOfGroup];
+ const oldIndex = oldIndexOfId.get(newDoc._id);
+
+ if (oldIndex !== undefined) {
+ const oldDoc = oldResults[oldIndex];
+ const fields = makeChangedFields(projectionFn(newDoc), projectionFn(oldDoc));
+
+ if (!isEmptyObject(fields)) observer.changed?.(newDoc._id, fields);
+ }
+ }
+
+ startOfGroup = endOfGroup + 1;
+ }
+};
+
+type DiffQueryArgs =
+ | [ordered: true, oldResults: T[], newResults: T[], observer: OrderedObserver, options?: { projectionFn?: (doc: T) => Partial }]
+ | [
+ ordered: false | undefined,
+ oldResults: Map,
+ newResults: Map,
+ observer: UnorderedObserver,
+ options?: { projectionFn?: (doc: T) => Partial },
+ ];
+
+export const diffQueryChanges = (...[ordered, oldResults, newResults, observer, options]: DiffQueryArgs) =>
+ ordered
+ ? diffQueryOrderedChanges(oldResults, newResults, observer, options)
+ : diffQueryUnorderedChanges(oldResults, newResults, observer, options);
+
+export const applyChanges = >(doc: T, changeFields: Partial) => {
+ for (const [key, value] of Object.entries(changeFields)) {
+ if (value === undefined) {
+ delete doc[key];
+ } else {
+ doc[key as keyof T] = value as T[keyof T];
+ }
+ }
+};
diff --git a/apps/meteor/src/meteor/diff-sequence.ts b/apps/meteor/src/meteor/diff-sequence.ts
new file mode 100644
index 0000000000000..2eabbd590fd3f
--- /dev/null
+++ b/apps/meteor/src/meteor/diff-sequence.ts
@@ -0,0 +1 @@
+export * as DiffSequence from './diff-sequence-core.ts';
\ No newline at end of file
diff --git a/apps/meteor/src/meteor/ejson.ts b/apps/meteor/src/meteor/ejson.ts
new file mode 100644
index 0000000000000..aaba946b13450
--- /dev/null
+++ b/apps/meteor/src/meteor/ejson.ts
@@ -0,0 +1,695 @@
+import { Base64 } from './base64.ts';
+import { hasOwn } from './utils/hasOwn.ts';
+
+type EJSONOptions = {
+ canonical?: boolean;
+ indent?: boolean | number | string;
+ keyOrderSensitive?: boolean;
+};
+
+interface IEJSONConverter {
+ matchJSONValue(obj: any): boolean;
+ matchObject(obj: any): boolean;
+ toJSONValue(obj: any): any;
+ fromJSONValue(obj: any): any;
+}
+
+type JSONable = {
+ [key: string]: number | string | boolean | object | number[] | string[] | object[] | undefined | null;
+};
+
+export type EJSONableCustomType = {
+ clone?(): EJSONableCustomType;
+ equals?(other: object): boolean;
+ toJSONValue(): JSONable;
+ typeName(): string;
+};
+
+export type EJSONableProperty =
+ | number
+ | string
+ | boolean
+ | object
+ | number[]
+ | string[]
+ | object[]
+ | Date
+ | Uint8Array
+ | EJSONableCustomType
+ | undefined
+ | null;
+
+export type EJSONable = {
+ [key: string]: EJSONableProperty;
+};
+
+class CustomTypesMap extends Map any> {
+ override get(name: string): (jsonValue: any) => any {
+ const factory = super.get(name);
+
+ if (!factory) {
+ throw new Error(`Custom EJSON type ${name} is not defined`);
+ }
+
+ return factory;
+ }
+}
+
+const customTypes = new CustomTypesMap();
+
+const isFunction = (fn: unknown): fn is (...args: unknown[]) => unknown => typeof fn === 'function';
+const isObject = (fn: any): fn is Record => typeof fn === 'object' && fn !== null;
+
+const keysOf = (obj: any) => Object.keys(obj);
+const lengthOf = (obj: any) => Object.keys(obj).length;
+
+const convertMapToObject = (map: Map) =>
+ Array.from(map).reduce(
+ (acc, [key, value]) => {
+ acc[key] = value;
+ return acc;
+ },
+ {} as Record,
+ );
+
+const isArguments = (obj: any) => obj != null && hasOwn(obj, 'callee');
+const isInfOrNaN = (obj: any) => Number.isNaN(obj) || obj === Infinity || obj === -Infinity;
+
+const checkError = {
+ maxStack: (msgError: string) => new RegExp('Maximum call stack size exceeded', 'g').test(msgError),
+};
+
+const handleError = any>(fn: T) =>
+ function (this: any, ...args: Parameters): ReturnType {
+ try {
+ return fn.apply(this, args);
+ } catch (error: any) {
+ const isMaxStack = checkError.maxStack(error.message);
+
+ if (isMaxStack) {
+ throw new Error('Converting circular structure to JSON');
+ }
+
+ throw error;
+ }
+ };
+
+function quote(string: string) {
+ return JSON.stringify(string);
+}
+
+const str = (key: string, holder: any, singleIndent: string | false, outerIndent: string, canonical: boolean): string => {
+ const value = holder[key];
+
+ switch (typeof value) {
+ case 'string':
+ return quote(value);
+
+ case 'number':
+ return isFinite(value) ? String(value) : 'null';
+
+ case 'boolean':
+ return String(value);
+
+ case 'object': {
+ if (!value) {
+ return 'null';
+ }
+
+ const innerIndent = outerIndent + singleIndent;
+ const partial: string[] = [];
+ let v: string | undefined;
+
+ if (Array.isArray(value) || hasOwn(value, 'callee')) {
+ const { length } = value;
+
+ for (let i = 0; i < length; i += 1) {
+ partial[i] = str(String(i), value, singleIndent, innerIndent, canonical) || 'null';
+ }
+
+ if (partial.length === 0) {
+ v = '[]';
+ } else if (innerIndent) {
+ v = `[\n${innerIndent}${partial.join(`,\n${innerIndent}`)}\n${outerIndent}]`;
+ } else {
+ v = `[${partial.join(',')}]`;
+ }
+
+ return v;
+ }
+
+ let keys = Object.keys(value);
+
+ if (canonical) {
+ keys = keys.sort();
+ }
+
+ keys.forEach((k) => {
+ v = str(k, value, singleIndent, innerIndent, canonical);
+
+ if (v) {
+ partial.push(quote(k) + (innerIndent ? ': ' : ':') + v);
+ }
+ });
+
+ if (partial.length === 0) {
+ v = '{}';
+ } else if (innerIndent) {
+ v = `{\n${innerIndent}${partial.join(`,\n${innerIndent}`)}\n${outerIndent}}`;
+ } else {
+ v = `{${partial.join(',')}}`;
+ }
+
+ return v;
+ }
+ default:
+ return 'null';
+ }
+};
+
+const canonicalStringify = (value: any, options: EJSONOptions): string => {
+ const allOptions = { indent: '', canonical: false, ...options };
+
+ if (allOptions.indent === true) {
+ allOptions.indent = ' ';
+ } else if (typeof allOptions.indent === 'number') {
+ let newIndent = '';
+ for (let i = 0; i < allOptions.indent; i++) {
+ newIndent += ' ';
+ }
+ allOptions.indent = newIndent;
+ }
+
+ return str('', { '': value }, allOptions.indent, '', allOptions.canonical);
+};
+function toJSONValue(item: any): any {
+ const changed = toJSONValueHelper(item);
+
+ if (changed !== undefined) {
+ return changed;
+ }
+
+ let newItem = item;
+
+ if (isObject(item)) {
+ newItem = EJSON.clone(item);
+ adjustTypesToJSONValue(newItem);
+ }
+
+ return newItem;
+}
+
+function fromJSONValue(item: any): any {
+ let changed = fromJSONValueHelper(item);
+
+ if (changed === item && isObject(item)) {
+ changed = EJSON.clone(item);
+ adjustTypesFromJSONValue(changed);
+ }
+
+ return changed;
+}
+
+function _isCustomType(obj: any): boolean {
+ return obj && isFunction(obj.toJSONValue) && isFunction(obj.typeName) && customTypes.has(obj.typeName());
+}
+
+const builtinConverters: IEJSONConverter[] = [
+ {
+ matchJSONValue(obj) {
+ return hasOwn(obj, '$date') && lengthOf(obj) === 1;
+ },
+
+ matchObject(obj) {
+ return obj instanceof Date;
+ },
+
+ toJSONValue(obj) {
+ return { $date: obj.getTime() };
+ },
+
+ fromJSONValue(obj) {
+ return new Date(obj.$date);
+ },
+ },
+
+ {
+ matchJSONValue(obj) {
+ return hasOwn(obj, '$regexp') && hasOwn(obj, '$flags') && lengthOf(obj) === 2;
+ },
+
+ matchObject(obj) {
+ return obj instanceof RegExp;
+ },
+
+ toJSONValue(regexp) {
+ return { $regexp: regexp.source, $flags: regexp.flags };
+ },
+
+ fromJSONValue(obj) {
+ return new RegExp(
+ obj.$regexp,
+ obj.$flags
+ .slice(0, 50)
+ .replace(/[^gimuy]/g, '')
+ .replace(/(.)(?=.*\1)/g, ''),
+ );
+ },
+ },
+
+ {
+ matchJSONValue(obj) {
+ return hasOwn(obj, '$InfNaN') && lengthOf(obj) === 1;
+ },
+ matchObject: isInfOrNaN,
+ toJSONValue(obj) {
+ let sign;
+
+ if (Number.isNaN(obj)) {
+ sign = 0;
+ } else if (obj === Infinity) {
+ sign = 1;
+ } else {
+ sign = -1;
+ }
+
+ return { $InfNaN: sign };
+ },
+
+ fromJSONValue(obj) {
+ return obj.$InfNaN / 0;
+ },
+ },
+
+ {
+ matchJSONValue(obj) {
+ return hasOwn(obj, '$binary') && lengthOf(obj) === 1;
+ },
+
+ matchObject(obj) {
+ return (typeof Uint8Array !== 'undefined' && obj instanceof Uint8Array) || (obj && hasOwn(obj, '$Uint8ArrayPolyfill'));
+ },
+
+ toJSONValue(obj) {
+ return { $binary: Base64.encode(obj) };
+ },
+
+ fromJSONValue(obj) {
+ return Base64.decode(obj.$binary);
+ },
+ },
+
+ {
+ matchJSONValue(obj) {
+ return hasOwn(obj, '$escape') && lengthOf(obj) === 1;
+ },
+
+ matchObject(obj) {
+ let match = false;
+
+ if (obj) {
+ const keyCount = lengthOf(obj);
+
+ if (keyCount === 1 || keyCount === 2) {
+ match = builtinConverters.some((converter) => converter.matchJSONValue(obj));
+ }
+ }
+
+ return match;
+ },
+
+ toJSONValue(obj) {
+ const newObj: Record = {};
+
+ keysOf(obj).forEach((key) => {
+ newObj[key] = toJSONValue(obj[key]);
+ });
+
+ return { $escape: newObj };
+ },
+
+ fromJSONValue(obj) {
+ const newObj: Record = {};
+
+ keysOf(obj.$escape).forEach((key) => {
+ newObj[key] = fromJSONValue(obj.$escape[key]);
+ });
+
+ return newObj;
+ },
+ },
+
+ {
+ matchJSONValue(obj) {
+ return hasOwn(obj, '$type') && hasOwn(obj, '$value') && lengthOf(obj) === 2;
+ },
+
+ matchObject(obj) {
+ return _isCustomType(obj);
+ },
+
+ toJSONValue(obj) {
+ const jsonValue = obj.toJSONValue();
+
+ return { $type: obj.typeName(), $value: jsonValue };
+ },
+
+ fromJSONValue(obj: { $type: string; $value: any }) {
+ const typeName = obj.$type;
+
+ if (!customTypes.has(typeName)) {
+ throw new Error(`Custom EJSON type ${typeName} is not defined`);
+ }
+
+ const converter = customTypes.get(typeName);
+ return converter(obj.$value);
+ },
+ },
+];
+
+const _getTypes = (isOriginal = false) => {
+ return isOriginal ? customTypes : convertMapToObject(customTypes);
+};
+
+const _getConverters = () => builtinConverters;
+
+const toJSONValueHelper = (item: any) => {
+ for (let i = 0; i < builtinConverters.length; i++) {
+ const converter = builtinConverters[i];
+
+ if (converter.matchObject(item)) {
+ return converter.toJSONValue(item);
+ }
+ }
+
+ return undefined;
+};
+
+const adjustTypesToJSONValue = (obj: unknown): any => {
+ if (obj === null) {
+ return null;
+ }
+
+ const maybeChanged = toJSONValueHelper(obj);
+
+ if (maybeChanged !== undefined) {
+ return maybeChanged;
+ }
+
+ if (!isObject(obj)) {
+ return obj;
+ }
+
+ keysOf(obj).forEach((key) => {
+ const value = obj[key];
+
+ if (!isObject(value) && value !== undefined && !isInfOrNaN(value)) {
+ return;
+ }
+
+ const changed = toJSONValueHelper(value);
+
+ if (changed) {
+ obj[key] = changed;
+
+ return;
+ }
+
+ adjustTypesToJSONValue(value);
+ });
+
+ return obj;
+};
+
+const fromJSONValueHelper = (value: any) => {
+ if (isObject(value) && value !== null) {
+ const keys = keysOf(value);
+
+ if (keys.length <= 2 && keys.every((k) => typeof k === 'string' && k.substr(0, 1) === '$')) {
+ for (let i = 0; i < builtinConverters.length; i++) {
+ const converter = builtinConverters[i];
+
+ if (converter.matchJSONValue(value)) {
+ return converter.fromJSONValue(value);
+ }
+ }
+ }
+ }
+
+ return value;
+};
+
+const adjustTypesFromJSONValue = (obj: any): any => {
+ if (obj === null) {
+ return null;
+ }
+
+ const maybeChanged = fromJSONValueHelper(obj);
+
+ if (maybeChanged !== obj) {
+ return maybeChanged;
+ }
+
+ if (!isObject(obj)) {
+ return obj;
+ }
+
+ keysOf(obj).forEach((key) => {
+ const value = obj[key];
+
+ if (isObject(value)) {
+ const changed = fromJSONValueHelper(value);
+
+ if (value !== changed) {
+ obj[key] = changed;
+
+ return;
+ }
+
+ adjustTypesFromJSONValue(value);
+ }
+ });
+
+ return obj;
+};
+
+const stringify = handleError((item: any, options?: EJSONOptions): string => {
+ let serialized: string;
+ const json = toJSONValue(item);
+
+ if (options && (options.canonical || options.indent)) {
+ serialized = canonicalStringify(json, options);
+ } else {
+ serialized = JSON.stringify(json);
+ }
+
+ return serialized;
+});
+
+const parse = (item: string) => {
+ if (typeof item !== 'string') {
+ throw new Error('EJSON.parse argument should be a string');
+ }
+
+ return fromJSONValue(JSON.parse(item));
+};
+
+const isBinary = (obj: unknown): obj is Uint8Array => {
+ return obj instanceof Uint8Array;
+};
+
+const equals = (a: any, b: any, options?: { keyOrderSensitive?: boolean }): boolean => {
+ let i: number;
+ const keyOrderSensitive = !!options?.keyOrderSensitive;
+
+ if (a === b) {
+ return true;
+ }
+
+ if (Number.isNaN(a) && Number.isNaN(b)) {
+ return true;
+ }
+
+ if (!a || !b) {
+ return false;
+ }
+
+ if (!isObject(a) || !isObject(b)) {
+ return false;
+ }
+
+ if (a instanceof Date && b instanceof Date) {
+ return a.valueOf() === b.valueOf();
+ }
+
+ if (isBinary(a) && isBinary(b)) {
+ if ((a as any).length !== (b as any).length) {
+ return false;
+ }
+
+ for (i = 0; i < (a as any).length; i++) {
+ if ((a as any)[i] !== (b as any)[i]) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ if (isFunction((a as any).equals)) {
+ return (a as any).equals(b, options);
+ }
+
+ if (isFunction((b as any).equals)) {
+ return (b as any).equals(a, options);
+ }
+
+ const aIsArray = Array.isArray(a);
+ const bIsArray = Array.isArray(b);
+
+ if (aIsArray !== bIsArray) {
+ return false;
+ }
+
+ if (aIsArray && bIsArray) {
+ if (a.length !== b.length) {
+ return false;
+ }
+
+ for (i = 0; i < a.length; i++) {
+ if (!equals(a[i], b[i], options)) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ if (_isCustomType(a) || _isCustomType(b)) {
+ if (!_isCustomType(a) || !_isCustomType(b)) {
+ return false;
+ }
+ return equals(toJSONValue(a), toJSONValue(b));
+ }
+
+ let ret;
+ const aKeys = keysOf(a);
+ const bKeys = keysOf(b);
+
+ if (keyOrderSensitive) {
+ i = 0;
+
+ ret = aKeys.every((key) => {
+ if (i >= bKeys.length) {
+ return false;
+ }
+
+ if (key !== bKeys[i]) {
+ return false;
+ }
+
+ if (!equals((a as any)[key], (b as any)[bKeys[i]], options)) {
+ return false;
+ }
+
+ i++;
+
+ return true;
+ });
+ } else {
+ i = 0;
+
+ ret = aKeys.every((key) => {
+ if (!hasOwn(b, key)) {
+ return false;
+ }
+
+ if (!equals((a as any)[key], (b as any)[key], options)) {
+ return false;
+ }
+
+ i++;
+
+ return true;
+ });
+ }
+
+ return ret && i === bKeys.length;
+};
+
+const clone = (v: any): any => {
+ let ret: any;
+
+ if (!isObject(v)) {
+ return v;
+ }
+
+ if (v === null) {
+ return null;
+ }
+
+ if (v instanceof Date) {
+ return new Date(v.getTime());
+ }
+
+ if (v instanceof RegExp) {
+ return v;
+ }
+
+ if (isBinary(v)) {
+ ret = (Base64 as any).newBinary((v as any).length);
+
+ for (let i = 0; i < (v as any).length; i++) {
+ ret[i] = (v as any)[i];
+ }
+
+ return ret;
+ }
+
+ if (Array.isArray(v)) {
+ return v.map(clone);
+ }
+
+ if (isArguments(v)) {
+ return Array.from(v as any).map(clone);
+ }
+
+ if (isFunction(v.clone)) {
+ return v.clone();
+ }
+
+ if (_isCustomType(v)) {
+ return fromJSONValue(clone(toJSONValue(v)));
+ }
+
+ ret = {};
+
+ keysOf(v).forEach((key) => {
+ ret[key] = clone(v[key]);
+ });
+
+ return ret;
+};
+
+export const EJSON = {
+ addType: (name: string, factory: (jsonValue: any) => any) => {
+ if (customTypes.has(name)) {
+ throw new Error(`Type ${name} already present`);
+ }
+
+ customTypes.set(name, factory);
+ },
+ _getTypes,
+ _getConverters,
+ _isCustomType,
+ _adjustTypesToJSONValue: adjustTypesToJSONValue,
+ _adjustTypesFromJSONValue: adjustTypesFromJSONValue,
+ toJSONValue,
+ fromJSONValue,
+ stringify,
+ parse,
+ isBinary,
+ equals,
+ clone,
+ newBinary: Base64.newBinary,
+};
diff --git a/apps/meteor/src/meteor/facebook-oauth.ts b/apps/meteor/src/meteor/facebook-oauth.ts
new file mode 100644
index 0000000000000..d875f8e045d18
--- /dev/null
+++ b/apps/meteor/src/meteor/facebook-oauth.ts
@@ -0,0 +1,64 @@
+import { Meteor } from './meteor.ts';
+import { OAuth } from './oauth.ts';
+import { Random } from './random.ts';
+import { ServiceConfiguration } from './service-configuration.ts';
+import { hasOwn } from './utils/hasOwn.ts';
+import { isObject } from './utils/isObject.ts';
+
+type FacebookOptions = {
+ requestPermissions?: string[];
+ params?: any;
+ absoluteUrlOptions?: any;
+ redirectUrl?: string;
+ auth_type?: string;
+ loginStyle?: string;
+};
+
+type CredentialRequestCompleteCallback = (token?: string | Error) => void;
+
+export const Facebook = {
+ requestCredential(
+ options?: FacebookOptions | CredentialRequestCompleteCallback,
+ credentialRequestCompleteCallback?: CredentialRequestCompleteCallback,
+ ) {
+ if (!credentialRequestCompleteCallback && typeof options === 'function') {
+ credentialRequestCompleteCallback = options;
+ options = {};
+ }
+
+ const config = ServiceConfiguration.configurations.findOne({ service: 'facebook' });
+
+ if (!config || !isObject(config) || !hasOwn(config, 'appId')) {
+ if (credentialRequestCompleteCallback) {
+ credentialRequestCompleteCallback(new ServiceConfiguration.ConfigError());
+ }
+ return;
+ }
+
+ const opts = (options as FacebookOptions) || {};
+ const credentialToken = Random.secret();
+ const mobile = /Android|webOS|iPhone|iPad|iPod|BlackBerry|Windows Phone/i.test(navigator.userAgent);
+ const display = mobile ? 'touch' : 'popup';
+ const scope = opts.requestPermissions ? opts.requestPermissions.join(',') : 'email';
+
+ const loginStyle = OAuth._loginStyle('facebook', config, opts);
+ const API_VERSION = Meteor.settings?.public?.packages?.['facebook-oauth']?.apiVersion || '17.0';
+
+ const redirectUri = OAuth._redirectUri('facebook', config, opts.params, opts.absoluteUrlOptions);
+ const stateParam = OAuth._stateParam(loginStyle, credentialToken, opts.redirectUrl);
+
+ let loginUrl = `https://www.facebook.com/v${API_VERSION}/dialog/oauth?client_id=${config.appId}&redirect_uri=${redirectUri}&display=${display}&scope=${scope}&state=${stateParam}`;
+
+ if (opts.auth_type) {
+ loginUrl += `&auth_type=${encodeURIComponent(opts.auth_type)}`;
+ }
+
+ OAuth.launchLogin({
+ loginService: 'facebook',
+ loginStyle,
+ loginUrl,
+ credentialRequestCompleteCallback,
+ credentialToken,
+ });
+ },
+};
diff --git a/apps/meteor/src/meteor/geojson-utils-core.ts b/apps/meteor/src/meteor/geojson-utils-core.ts
new file mode 100644
index 0000000000000..3d991319f2391
--- /dev/null
+++ b/apps/meteor/src/meteor/geojson-utils-core.ts
@@ -0,0 +1,269 @@
+export type Position = [longitude: number, latitude: number];
+
+export type Shape = {
+ type: TType;
+ coordinates: TCoordinates;
+};
+
+export type Point = Shape<'Point', Position>;
+export type LineString = Shape<'LineString', Position[]>;
+export type Polygon = Shape<'Polygon', Position[][]>;
+export type Geometry = Point | LineString | Polygon;
+
+export type BoundingBox = [[minLng: number, minLat: number], [maxLng: number, maxLat: number]];
+
+const EARTH_RADIUS_KM = 6371;
+
+export const numberToRadius = (deg: number): number => (deg * Math.PI) / 180;
+export const numberToDegree = (rad: number): number => (rad * 180) / Math.PI;
+
+const getSegmentIntersection = (p1: Position, p2: Position, p3: Position, p4: Position): Position | null => {
+ const [[x1, y1], [x2, y2], [x3, y3], [x4, y4]] = [p1, p2, p3, p4];
+ const denom = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
+
+ if (denom === 0) return null; // Parallel or collinear
+
+ const ua = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / denom;
+ const ub = ((x2 - x1) * (y1 - y3) - (y2 - y1) * (x1 - x3)) / denom;
+
+ if (ua >= 0 && ua <= 1 && ub >= 0 && ub <= 1) {
+ return [x1 + ua * (x2 - x1), y1 + ua * (y2 - y1)];
+ }
+ return null;
+};
+
+export const lineStringsIntersect = (l1: LineString, l2: LineString): Point[] | false => {
+ const intersects: Point[] = [];
+ const c1 = l1.coordinates;
+ const c2 = l2.coordinates;
+
+ for (let i = 0; i < c1.length - 1; i++) {
+ for (let j = 0; j < c2.length - 1; j++) {
+ const intersection = getSegmentIntersection(c1[i], c1[i + 1], c2[j], c2[j + 1]);
+ if (intersection) {
+ intersects.push({ type: 'Point', coordinates: intersection });
+ }
+ }
+ }
+ return intersects.length > 0 ? intersects : false;
+};
+
+export const boundingBoxAroundPolyCoords = (coords: Position[][]): BoundingBox => {
+ const outerRing = coords[0];
+ if (!outerRing?.length) throw new Error('Polygon has no coordinates');
+
+ return outerRing.reduce(
+ ([[minLng, minLat], [maxLng, maxLat]], [lng, lat]) => [
+ [Math.min(minLng, lng), Math.min(minLat, lat)],
+ [Math.max(maxLng, lng), Math.max(maxLat, lat)],
+ ],
+ [
+ [Infinity, Infinity],
+ [-Infinity, -Infinity],
+ ],
+ );
+};
+
+export const pointInBoundingBox = (point: Point, [[minLng, minLat], [maxLng, maxLat]]: BoundingBox): boolean => {
+ const [lng, lat] = point.coordinates;
+ return lng >= minLng && lng <= maxLng && lat >= minLat && lat <= maxLat;
+};
+
+const isPointInRing = ([px, py]: Position, ring: Position[]): boolean => {
+ let inside = false;
+ for (let i = 0, j = ring.length - 1; i < ring.length; j = i++) {
+ const [xi, yi] = ring[i];
+ const [xj, yj] = ring[j];
+
+ const intersect = yi > py !== yj > py && px < ((xj - xi) * (py - yi)) / (yj - yi) + xi;
+ if (intersect) inside = !inside;
+ }
+ return inside;
+};
+
+export const pointInPolygon = (p: Point, poly: Polygon): boolean => {
+ if (!pointInBoundingBox(p, boundingBoxAroundPolyCoords(poly.coordinates))) return false;
+ return poly.coordinates.some((ring) => isPointInRing(p.coordinates, ring));
+};
+
+export const drawCircle = (radiusInMeters: number, centerPoint: Point, steps = 15): Polygon => {
+ const [centerLng, centerLat] = centerPoint.coordinates;
+ const dist = radiusInMeters / 1000 / EARTH_RADIUS_KM;
+ const radCenterLat = numberToRadius(centerLat);
+ const radCenterLng = numberToRadius(centerLng);
+
+ const polyCoordinates: Position[] = Array.from({ length: steps }, (_, i) => {
+ const brng = (2 * Math.PI * i) / steps;
+ const lat = Math.asin(Math.sin(radCenterLat) * Math.cos(dist) + Math.cos(radCenterLat) * Math.sin(dist) * Math.cos(brng));
+ const lng =
+ radCenterLng +
+ Math.atan2(Math.sin(brng) * Math.sin(dist) * Math.cos(radCenterLat), Math.cos(dist) - Math.sin(radCenterLat) * Math.sin(lat));
+
+ return [numberToDegree(lng), numberToDegree(lat)];
+ });
+
+ polyCoordinates.push(polyCoordinates[0]); // Close the circle
+
+ return { type: 'Polygon', coordinates: [polyCoordinates] };
+};
+
+export const rectangleCentroid = (rectangle: Polygon): Point => {
+ const [[xmin, ymin], , [xmax, ymax]] = rectangle.coordinates[0];
+ return {
+ type: 'Point',
+ coordinates: [xmin + (xmax - xmin) / 2, ymin + (ymax - ymin) / 2],
+ };
+};
+
+export const pointDistance = (pt1: Point, pt2: Point): number => {
+ const [lon1, lat1] = pt1.coordinates;
+ const [lon2, lat2] = pt2.coordinates;
+
+ const dLat = numberToRadius(lat2 - lat1);
+ const dLon = numberToRadius(lon2 - lon1);
+
+ const a =
+ Math.pow(Math.sin(dLat / 2), 2) + Math.cos(numberToRadius(lat1)) * Math.cos(numberToRadius(lat2)) * Math.pow(Math.sin(dLon / 2), 2);
+
+ const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
+
+ return EARTH_RADIUS_KM * c * 1000;
+};
+
+export const geometryWithinRadius = (geometry: Geometry, center: Point, radius: number): boolean => {
+ let coords: Position[];
+ if (geometry.type === 'Point') {
+ coords = [geometry.coordinates];
+ } else if (geometry.type === 'Polygon') {
+ coords = geometry.coordinates[0];
+ } else {
+ coords = geometry.coordinates;
+ }
+
+ return coords.every((coord) => pointDistance({ type: 'Point', coordinates: coord }, center) <= radius);
+};
+
+const getPolygonCartesianData = (ring: Position[]) => {
+ let areaSize = 0;
+ let x = 0;
+ let y = 0;
+
+ for (let i = 0, j = ring.length - 1; i < ring.length; j = i++) {
+ const [xi, yi] = ring[i];
+ const [xj, yj] = ring[j];
+
+ const f = xi * yj - xj * yi;
+ areaSize += f;
+ x += (xi + xj) * f;
+ y += (yi + yj) * f;
+ }
+
+ return { area: areaSize / 2, f: areaSize * 3, x, y };
+};
+
+export const area = (polygon: Polygon): number => getPolygonCartesianData(polygon.coordinates[0]).area;
+
+export const centroid = (polygon: Polygon): Point => {
+ const { f, x, y } = getPolygonCartesianData(polygon.coordinates[0]);
+ return { type: 'Point', coordinates: [x / f, y / f] };
+};
+
+export const simplify = (sourcePoints: Point[], kinkMeters = 20): Point[] => {
+ if (sourcePoints.length < 3) return sourcePoints;
+
+ const source = sourcePoints.map((p) => ({ lng: p.coordinates[0], lat: p.coordinates[1] }));
+ const nSource = source.length;
+
+ let bandSqr = (kinkMeters * 360.0) / (2.0 * Math.PI * 6378137.0);
+ bandSqr *= bandSqr;
+
+ const index: number[] = [];
+ const sigStart: number[] = [0];
+ const sigEnd: number[] = [nSource - 1];
+ let nStack = 1;
+ const F = (Math.PI / 180.0) * 0.5;
+
+ while (nStack > 0) {
+ const start = sigStart[--nStack];
+ const end = sigEnd[nStack];
+
+ if (end - start > 1) {
+ const s = source[start];
+ const e = source[end];
+
+ let x12 = e.lng - s.lng;
+ const y12 = e.lat - s.lat;
+
+ if (Math.abs(x12) > 180.0) x12 = 360.0 - Math.abs(x12);
+ x12 *= Math.cos(F * (e.lat + s.lat));
+
+ const d12 = x12 * x12 + y12 * y12;
+ let maxDevSqr = -1.0;
+ let sig = start;
+
+ for (let i = start + 1; i < end; i++) {
+ const cur = source[i];
+
+ let x13 = cur.lng - s.lng;
+ const y13 = cur.lat - s.lat;
+
+ if (Math.abs(x13) > 180.0) x13 = 360.0 - Math.abs(x13);
+ x13 *= Math.cos(F * (cur.lat + s.lat));
+ const d13 = x13 * x13 + y13 * y13;
+
+ let x23 = cur.lng - e.lng;
+ const y23 = cur.lat - e.lat;
+
+ if (Math.abs(x23) > 180.0) x23 = 360.0 - Math.abs(x23);
+ x23 *= Math.cos(F * (cur.lat + e.lat));
+ const d23 = x23 * x23 + y23 * y23;
+
+ let devSqr: number;
+ if (d13 >= d12 + d23) {
+ devSqr = d23;
+ } else if (d23 >= d12 + d13) {
+ devSqr = d13;
+ } else {
+ devSqr = Math.pow(x13 * y12 - y13 * x12, 2) / d12;
+ }
+
+ if (devSqr > maxDevSqr) {
+ sig = i;
+ maxDevSqr = devSqr;
+ }
+ }
+
+ if (maxDevSqr < bandSqr) {
+ index.push(start);
+ } else {
+ sigStart[nStack] = sig;
+ sigEnd[nStack++] = end;
+ sigStart[nStack] = start;
+ sigEnd[nStack++] = sig;
+ }
+ } else {
+ index.push(start);
+ }
+ }
+
+ index.push(nSource - 1);
+ return index.sort((a, b) => a - b).map((i) => sourcePoints[i]);
+};
+
+export const destinationPoint = (pt: Point, brng: number, dist: number): Point => {
+ const distRad = dist / EARTH_RADIUS_KM;
+ const brngRad = numberToRadius(brng);
+
+ const lon1 = numberToRadius(pt.coordinates[0]);
+ const lat1 = numberToRadius(pt.coordinates[1]);
+
+ const lat2 = Math.asin(Math.sin(lat1) * Math.cos(distRad) + Math.cos(lat1) * Math.sin(distRad) * Math.cos(brngRad));
+ let lon2 = lon1 + Math.atan2(Math.sin(brngRad) * Math.sin(distRad) * Math.cos(lat1), Math.cos(distRad) - Math.sin(lat1) * Math.sin(lat2));
+
+ lon2 = ((lon2 + 3 * Math.PI) % (2 * Math.PI)) - Math.PI;
+
+ return {
+ type: 'Point',
+ coordinates: [numberToDegree(lon2), numberToDegree(lat2)],
+ };
+};
diff --git a/apps/meteor/src/meteor/geojson-utils.ts b/apps/meteor/src/meteor/geojson-utils.ts
new file mode 100644
index 0000000000000..b8a908cd1e39e
--- /dev/null
+++ b/apps/meteor/src/meteor/geojson-utils.ts
@@ -0,0 +1 @@
+export * as GeoJSON from './geojson-utils-core.ts';
diff --git a/apps/meteor/src/meteor/google-oauth.ts b/apps/meteor/src/meteor/google-oauth.ts
new file mode 100644
index 0000000000000..df65e7f355033
--- /dev/null
+++ b/apps/meteor/src/meteor/google-oauth.ts
@@ -0,0 +1,100 @@
+import { OAuth } from './oauth.ts';
+import { Random } from './random.ts';
+import { ServiceConfiguration } from './service-configuration.ts';
+
+type GoogleOptions = {
+ requestPermissions?: string[];
+ loginUrlParameters?: Record;
+ requestOfflineToken?: boolean;
+ forceApprovalPrompt?: boolean;
+ prompt?: string;
+ loginHint?: string;
+ loginStyle?: 'popup' | 'redirect';
+ redirectUrl?: string;
+ [key: string]: any;
+};
+
+type CredentialRequestCompleteCallback = (error?: Error | unknown) => void;
+
+const ILLEGAL_PARAMETERS: Record = {
+ response_type: true,
+ client_id: true,
+ scope: true,
+ redirect_uri: true,
+ state: true,
+};
+
+export const Google = {
+ requestCredential(
+ options?: GoogleOptions | CredentialRequestCompleteCallback,
+ credentialRequestCompleteCallback?: CredentialRequestCompleteCallback,
+ ) {
+ if (!credentialRequestCompleteCallback && typeof options === 'function') {
+ credentialRequestCompleteCallback = options;
+ options = {};
+ } else if (!options) {
+ options = {};
+ }
+
+ const opts = options as GoogleOptions;
+
+ const config = ServiceConfiguration.configurations.findOne({ service: 'google' }) as GoogleOptions | undefined;
+
+ if (!config) {
+ if (credentialRequestCompleteCallback) {
+ credentialRequestCompleteCallback(new ServiceConfiguration.ConfigError());
+ }
+ return;
+ }
+
+ const credentialToken = Random.secret();
+ const scopeSet = new Set(opts.requestPermissions || ['profile']);
+ scopeSet.add('email');
+ const scopes = Array.from(scopeSet);
+ const loginUrlParameters: Record = {
+ ...(config.loginUrlParameters || {}),
+ ...(opts.loginUrlParameters || {}),
+ };
+ Object.keys(loginUrlParameters).forEach((key) => {
+ if (ILLEGAL_PARAMETERS[key]) {
+ throw new Error(`Google.requestCredential: Invalid loginUrlParameter: ${key}`);
+ }
+ });
+ if (opts.requestOfflineToken != null) {
+ loginUrlParameters.access_type = opts.requestOfflineToken ? 'offline' : 'online';
+ }
+
+ if (opts.prompt != null) {
+ loginUrlParameters.prompt = opts.prompt;
+ } else if (opts.forceApprovalPrompt) {
+ loginUrlParameters.prompt = 'consent';
+ }
+
+ if (opts.loginHint) {
+ loginUrlParameters.login_hint = opts.loginHint;
+ }
+
+ const loginStyle = OAuth._loginStyle('google', config, opts);
+ Object.assign(loginUrlParameters, {
+ response_type: 'code',
+ client_id: config.clientId,
+ scope: scopes.join(' '),
+ redirect_uri: OAuth._redirectUri('google', config),
+ state: OAuth._stateParam(loginStyle, credentialToken, opts.redirectUrl),
+ });
+ const queryString = Object.keys(loginUrlParameters)
+ .map((param) => `${encodeURIComponent(param)}=${encodeURIComponent(loginUrlParameters[param])}`)
+ .join('&');
+
+ const loginUrl = `https://accounts.google.com/o/oauth2/auth?${queryString}`;
+
+ OAuth.launchLogin({
+ loginService: 'google',
+ loginStyle,
+ loginUrl,
+ credentialRequestCompleteCallback,
+ credentialToken,
+ popupOptions: { height: 600 },
+ });
+ },
+};
diff --git a/apps/meteor/src/meteor/id-map.ts b/apps/meteor/src/meteor/id-map.ts
new file mode 100644
index 0000000000000..b040210f17f7d
--- /dev/null
+++ b/apps/meteor/src/meteor/id-map.ts
@@ -0,0 +1,81 @@
+import { EJSON } from './ejson.ts';
+
+export class IdMap {
+ _map = new Map();
+
+ _idStringify: (id: TId) => string;
+
+ _idParse: (id: string) => TId;
+
+ constructor(idStringify: (id: TId) => string = JSON.stringify, idParse: (id: string) => TId = JSON.parse) {
+ this._idStringify = idStringify;
+ this._idParse = idParse;
+ }
+
+ get(id: TId) {
+ const key = this._idStringify(id);
+ return this._map.get(key);
+ }
+
+ set(id: TId, value: TValue) {
+ const key = this._idStringify(id);
+ this._map.set(key, value);
+ }
+
+ remove(id: TId) {
+ const key = this._idStringify(id);
+ this._map.delete(key);
+ }
+
+ has(id: TId) {
+ const key = this._idStringify(id);
+ return this._map.has(key);
+ }
+
+ empty() {
+ return this._map.size === 0;
+ }
+
+ clear() {
+ this._map.clear();
+ }
+
+ forEach(iterator: (value: TValue, id: TId) => unknown) {
+ for (const [key, value] of this._map) {
+ const breakIfFalse = iterator.call(null, value, this._idParse(key));
+ if (breakIfFalse === false) {
+ return;
+ }
+ }
+ }
+
+ async forEachAsync(iterator: (this: null, value: TValue, id: TId) => unknown) {
+ for (const [key, value] of this._map) {
+ // eslint-disable-next-line no-await-in-loop
+ if ((await iterator.call(null, value, this._idParse(key))) === false) {
+ return;
+ }
+ }
+ }
+
+ size() {
+ return this._map.size;
+ }
+
+ setDefault(id: TId, def: TValue) {
+ const key = this._idStringify(id);
+ if (this._map.has(key)) {
+ return this._map.get(key);
+ }
+ this._map.set(key, def);
+ return def;
+ }
+
+ clone() {
+ const clone = new IdMap(this._idStringify, this._idParse);
+ this._map.forEach((value, key) => {
+ clone._map.set(key, EJSON.clone(value));
+ });
+ return clone;
+ }
+}
diff --git a/apps/meteor/src/meteor/meteor-developer-oauth.ts b/apps/meteor/src/meteor/meteor-developer-oauth.ts
new file mode 100644
index 0000000000000..11b201bea847e
--- /dev/null
+++ b/apps/meteor/src/meteor/meteor-developer-oauth.ts
@@ -0,0 +1,76 @@
+import { OAuth } from './oauth.ts';
+import { Random } from './random.ts';
+import { ServiceConfiguration } from './service-configuration.ts';
+
+type MeteorDeveloperOptions = {
+ developerAccountsServer?: string;
+ redirectUrl?: string;
+ details?: string;
+ userEmail?: string;
+ loginHint?: string;
+ loginStyle?: 'popup' | 'redirect';
+ [key: string]: any;
+};
+
+type CredentialRequestCompleteCallback = (error?: Error | unknown) => void;
+
+export const MeteorDeveloperAccounts = {
+ _server: 'https://www.meteor.com',
+
+ _config(options: MeteorDeveloperOptions) {
+ if (options.developerAccountsServer) {
+ this._server = options.developerAccountsServer;
+ }
+ },
+
+ requestCredential(
+ options?: MeteorDeveloperOptions | CredentialRequestCompleteCallback,
+ credentialRequestCompleteCallback?: CredentialRequestCompleteCallback,
+ ) {
+ if (!credentialRequestCompleteCallback && typeof options === 'function') {
+ credentialRequestCompleteCallback = options;
+ options = {};
+ }
+
+ const config = ServiceConfiguration.configurations.findOne({ service: 'meteor-developer' }) as MeteorDeveloperOptions | undefined;
+
+ if (!config) {
+ if (credentialRequestCompleteCallback) {
+ credentialRequestCompleteCallback(new ServiceConfiguration.ConfigError());
+ }
+ return;
+ }
+
+ const opts = (options as MeteorDeveloperOptions) || {};
+ const credentialToken = Random.secret();
+ const loginStyle = OAuth._loginStyle('meteor-developer', config, opts);
+ let { loginHint } = opts;
+ if (opts.userEmail && !loginHint) {
+ loginHint = opts.userEmail;
+ }
+ let loginUrl =
+ `${MeteorDeveloperAccounts._server}/oauth2/authorize` +
+ `?state=${OAuth._stateParam(loginStyle, credentialToken, opts.redirectUrl)}` +
+ `&response_type=code` +
+ `&client_id=${config.clientId}`;
+
+ if (opts.details) {
+ loginUrl += `&details=${opts.details}`;
+ }
+
+ if (loginHint) {
+ loginUrl += `&user_email=${encodeURIComponent(loginHint)}`;
+ }
+
+ loginUrl += `&redirect_uri=${OAuth._redirectUri('meteor-developer', config)}`;
+
+ OAuth.launchLogin({
+ loginService: 'meteor-developer',
+ loginStyle,
+ loginUrl,
+ credentialRequestCompleteCallback,
+ credentialToken,
+ popupOptions: { width: 497, height: 749 },
+ });
+ },
+};
diff --git a/apps/meteor/src/meteor/meteor.ts b/apps/meteor/src/meteor/meteor.ts
new file mode 100644
index 0000000000000..e8f91250ab6c3
--- /dev/null
+++ b/apps/meteor/src/meteor/meteor.ts
@@ -0,0 +1,636 @@
+import type { Connection } from './ddp-client.ts';
+import { noop } from './utils/noop.ts';
+
+type Callback = (...args: any[]) => void;
+
+type PackagesSettings = Partial<{
+ ['facebook-oauth']: Partial<{
+ apiVersion: string;
+ }>;
+ reload: Partial<{
+ debug: boolean;
+ }>;
+ oauth: Partial<{
+ setRedirectUrlWhenLoginStyleIsPopup: boolean;
+ }>;
+ accounts: Partial<{
+ loginExpirationInDays: number;
+ clientStorage: 'local' | 'session';
+ }>;
+}>;
+
+type PublicSettings = Partial<{
+ packages: PackagesSettings;
+}>;
+
+type MeteorRuntimeConfig = {
+ meteorRelease?: string;
+ NODE_ENV?: string;
+ PUBLIC_SETTINGS?: PublicSettings;
+ ROOT_URL?: string;
+ ROOT_URL_PATH_PREFIX?: string;
+ ACCOUNTS_CONNECTION_URL?: string;
+ gitCommitHash?: string;
+ isModern?: boolean;
+ debug?: boolean;
+ noDeprecation?: boolean | string;
+ meteorEnv: {
+ NODE_ENV?: string;
+ [key: string]: unknown;
+ };
+ accountsConfigCalled?: boolean;
+};
+
+declare global {
+ // eslint-disable-next-line @typescript-eslint/naming-convention
+ const __meteor_runtime_config__: MeteorRuntimeConfig;
+ const meteorEnv: MeteorRuntimeConfig['meteorEnv'];
+}
+const globalScope = globalThis;
+const config: MeteorRuntimeConfig =
+ typeof __meteor_runtime_config__ === 'object'
+ ? __meteor_runtime_config__
+ : {
+ meteorEnv: {},
+ };
+const { meteorEnv } = config;
+
+export class MeteorError extends Error {
+ public error: string | number;
+
+ public reason?: string | undefined;
+
+ public details?: string | undefined;
+
+ public isClientSafe = true;
+
+ public errorType = 'Meteor.Error';
+
+ constructor(error: string | number, reason?: string | undefined, details?: string | undefined) {
+ super();
+ this.name = 'Meteor.Error';
+ this.error = error;
+ this.reason = reason;
+ this.details = details;
+ if (this.reason) {
+ this.message = `${this.reason} [${this.error}]`;
+ } else {
+ this.message = `[${this.error}]`;
+ }
+ }
+
+ public clone(): MeteorError {
+ return new MeteorError(this.error, this.reason, this.details);
+ }
+}
+
+let nextSlot = 0;
+let currentValues: unknown[] = [];
+let callAsyncMethodRunning = false;
+
+class EnvironmentVariable {
+ private readonly slot: number;
+
+ constructor() {
+ this.slot = nextSlot++;
+ }
+
+ public get(): T | undefined {
+ return currentValues[this.slot] as T;
+ }
+
+ public getOrNullIfOutsideFiber(): T | undefined {
+ return this.get();
+ }
+
+ public withValue(value: T, func: () => R): R {
+ const saved = currentValues[this.slot];
+ try {
+ currentValues[this.slot] = value;
+ return func();
+ } finally {
+ currentValues[this.slot] = saved;
+ }
+ }
+
+ public _set(value: T): void {
+ currentValues[this.slot] = value;
+ }
+
+ public _setNewContextAndGetCurrent(value: T): T {
+ const saved = currentValues[this.slot];
+ this._set(value);
+ return saved as T;
+ }
+
+ public _isCallAsyncMethodRunning(): boolean {
+ return callAsyncMethodRunning;
+ }
+
+ public _setCallAsyncMethodRunning(value: boolean): void {
+ callAsyncMethodRunning = value;
+ }
+
+ public static getCurrentValues(): unknown[] {
+ return currentValues;
+ }
+}
+
+class FakeDoubleEndedQueue {
+ private queue: unknown[] = [];
+
+ push(task: unknown): void {
+ this.queue.push(task);
+ }
+
+ shift(): unknown {
+ return this.queue.shift();
+ }
+
+ isEmpty(): boolean {
+ return this.queue.length === 0;
+ }
+}
+
+export class SynchronousQueue {
+ private _tasks: Array<() => void> = [];
+
+ private _running = false;
+
+ private _runTimeout: number | null = null;
+
+ public runTask(task: () => void): void {
+ if (!this.safeToRunTask()) {
+ throw new Error('Could not synchronously run a task from a running task');
+ }
+
+ this._tasks.push(task);
+ const tasks = this._tasks;
+ this._tasks = [];
+ this._running = true;
+
+ if (this._runTimeout) {
+ clearTimeout(this._runTimeout);
+ this._runTimeout = null;
+ }
+
+ try {
+ while (tasks.length > 0) {
+ const t = tasks.shift();
+ try {
+ t?.();
+ } catch (e) {
+ if (tasks.length === 0) throw e;
+ console.debug('Exception in queued task', e);
+ }
+ }
+ } finally {
+ this._running = false;
+ }
+ }
+
+ public queueTask(task: () => void): void {
+ this._tasks.push(task);
+ if (!this._runTimeout) {
+ this._runTimeout = setTimeout(() => this.flush(), 0) as unknown as number;
+ }
+ }
+
+ public flush(): void {
+ this.runTask(noop);
+ }
+
+ public drain(): void {
+ if (!this.safeToRunTask()) return;
+ while (this._tasks.length > 0) {
+ this.flush();
+ }
+ }
+
+ public safeToRunTask(): boolean {
+ return !this._running;
+ }
+}
+
+const _setImmediate = ((): ((fn: () => void) => void) => {
+ let postMessageIsAsynchronous = true;
+ const oldOnMessage = globalScope.onmessage;
+ globalScope.onmessage = () => {
+ postMessageIsAsynchronous = false;
+ };
+ globalScope.postMessage('', '*');
+ globalScope.onmessage = oldOnMessage;
+
+ if (!postMessageIsAsynchronous) {
+ const useTimeout = (fn: () => void) => setTimeout(fn, 0);
+ useTimeout.implementation = 'setTimeout';
+ return useTimeout;
+ }
+
+ let funcIndex = 0;
+ const funcs: Record void> = {};
+ const MESSAGE_PREFIX = `Meteor._setImmediate.${Math.random()}.`;
+
+ globalScope.addEventListener(
+ 'message',
+ (event) => {
+ if (event.source === window && typeof event.data === 'string' && event.data.startsWith(MESSAGE_PREFIX)) {
+ const index = parseInt(event.data.substring(MESSAGE_PREFIX.length), 10);
+ try {
+ if (funcs[index]) funcs[index]();
+ } finally {
+ delete funcs[index];
+ }
+ }
+ },
+ false,
+ );
+
+ const usePostMessage = (fn: () => void) => {
+ ++funcIndex;
+ funcs[funcIndex] = fn;
+ globalScope.postMessage(MESSAGE_PREFIX + funcIndex, '*');
+ };
+ usePostMessage.implementation = 'postMessage';
+ return usePostMessage;
+})();
+
+const _localStorage = localStorage;
+
+type AbsoluteUrlOptions = { rootUrl?: string; secure?: boolean; replaceLocalhost?: boolean };
+
+const defaultAbsoluteUrlOptions: AbsoluteUrlOptions = {
+ rootUrl: config.ROOT_URL || `${location.protocol}//${location.host}`,
+ secure: typeof location !== 'undefined' && location.protocol === 'https:',
+};
+
+const absoluteUrl = (() => {
+ function absoluteUrl(path?: string | Record, options?: AbsoluteUrlOptions): string {
+ if (typeof path === 'object' && !options) {
+ options = path;
+ path = undefined;
+ }
+
+ const opts = { ...absoluteUrl.defaultOptions, ...options };
+ let url = opts.rootUrl;
+
+ if (!url) throw new Error('Must pass options.rootUrl or set ROOT_URL in the server environment');
+ if (!/^http[s]?:\/\//i.test(url)) url = `http://${url}`;
+ if (!url.endsWith('/')) url += '/';
+
+ if (path) {
+ if (typeof path === 'string') {
+ url += path.replace(/^\/+/, '');
+ }
+ }
+
+ if (opts.secure && /^http:/.test(url) && !/http:\/\/localhost[:\/]/.test(url) && !/http:\/\/127\.0\.0\.1[:\/]/.test(url)) {
+ url = url.replace(/^http:/, 'https:');
+ }
+
+ if (opts.replaceLocalhost) {
+ url = url.replace(/^http:\/\/localhost([:\/].*)/, 'http://127.0.0.1$1');
+ }
+
+ return url;
+ }
+
+ absoluteUrl.defaultOptions = defaultAbsoluteUrlOptions;
+
+ return absoluteUrl;
+})();
+
+export const defer = (fn: VoidFunction) => {
+ console.warn('Meteor.defer is deprecated. Use setTimeout(fn, 0) instead.');
+ fn();
+};
+
+const Meteor = {
+ isProduction: true,
+ isDevelopment: false,
+ isClient: true,
+ isServer: false,
+ isCordova: false,
+ isModern: true,
+ gitCommitHash: config.gitCommitHash,
+ settings: config.PUBLIC_SETTINGS ? { public: config.PUBLIC_SETTINGS } : {},
+ release: config.meteorRelease,
+ connection: null as Connection | null,
+ refresh: noop,
+ isFibersDisabled: true,
+ isTest: false,
+ isAppTest: false,
+ isPackageTest: false,
+ isDebug: false,
+ Error: MeteorError,
+ EnvironmentVariable,
+ _SynchronousQueue: SynchronousQueue,
+ _DoubleEndedQueue: FakeDoubleEndedQueue,
+ _setImmediate,
+ _localStorage,
+
+ promisify(fn: Callback, context?: any, errorFirst = true) {
+ return function (this: any, ...args: any[]) {
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
+ const self = this;
+ const filteredArgs = args.filter((i) => i !== undefined);
+
+ return new Promise((resolve, reject) => {
+ const callback = Meteor.bindEnvironment((error: any, result: any) => {
+ let _error = error;
+ let _result = result;
+
+ if (!errorFirst) {
+ _error = result;
+ _result = error;
+ }
+
+ if (_error) return reject(_error);
+ resolve(_result);
+ });
+
+ filteredArgs.push(callback);
+ return fn.apply(context || self, filteredArgs);
+ });
+ };
+ },
+
+ wrapAsync(fn: Callback, context?: any) {
+ return function (this: any, ...args: any[]) {
+ const self = context || this;
+ let callback: Callback | undefined;
+
+ for (let i = args.length - 1; i >= 0; --i) {
+ const arg = args[i];
+ if (arg !== undefined) {
+ if (typeof arg === 'function') {
+ callback = arg;
+ }
+ break;
+ }
+ }
+
+ if (!callback) {
+ callback = logErr;
+ args.push(undefined);
+ }
+
+ const callbackIndex = args.indexOf(callback);
+ const boundCallback = Meteor.bindEnvironment(callback);
+
+ if (callbackIndex !== -1) {
+ args[callbackIndex] = boundCallback;
+ } else {
+ args.push(boundCallback);
+ }
+
+ return fn.apply(self, args);
+ };
+ },
+
+ _wrapAsync(fn: Callback, context?: any) {
+ if (!warnedAboutWrapAsync) {
+ console.debug('Meteor._wrapAsync has been renamed to Meteor.wrapAsync');
+ warnedAboutWrapAsync = true;
+ }
+ return Meteor.wrapAsync(fn, context);
+ },
+
+ wrapFn(fn: F): F {
+ return fn;
+ },
+
+ _sleepForMs(ms: number) {
+ return new Promise((resolve) => setTimeout(resolve, ms));
+ },
+
+ sleep(ms: number) {
+ return Meteor._sleepForMs(ms);
+ },
+
+ _noYieldsAllowed(f: () => any) {
+ const result = f();
+ if (Meteor._isPromise(result)) {
+ throw new Error('function is a promise when calling Meteor._noYieldsAllowed');
+ }
+ return result;
+ },
+
+ _isPromise(r: any): boolean {
+ return !!r && typeof r.then === 'function';
+ },
+
+ _runFresh(fn: () => T): T {
+ return fn();
+ },
+
+ bindEnvironment any>(func: T, onException?: ((e: any) => void) | string, _this?: any): T {
+ const boundValues = currentValues.slice();
+
+ if (!onException || typeof onException === 'string') {
+ const description = onException || 'callback of async function';
+ onException = (error: any) => {
+ console.debug(`Exception in ${description}:`, error);
+ };
+ }
+ return function (this: any, ...args: any[]) {
+ const savedValues = currentValues;
+ let ret;
+ try {
+ currentValues = boundValues;
+ ret = func.apply(_this ?? this, args);
+ } catch (e) {
+ (onException as (e: any) => void)(e);
+ } finally {
+ currentValues = savedValues;
+ }
+ return ret;
+ } as unknown as T;
+ },
+
+ setInterval(f: VoidFunction, duration: number) {
+ return setInterval(bindAndCatch('setInterval callback', f), duration);
+ },
+
+ clearInterval(x: any) {
+ return clearInterval(x);
+ },
+
+ clearTimeout(x: any) {
+ return clearTimeout(x);
+ },
+
+ defer,
+
+ _debug(...args: unknown[]) {
+ if (suppress > 0) {
+ suppress--;
+ return;
+ }
+ if (typeof console !== 'undefined' && console.log) {
+ if (args.length === 0) {
+ console.log('');
+ } else {
+ const allStrings = args.every((a) => typeof a === 'string');
+ if (allStrings) {
+ console.log(args.join(' '));
+ } else {
+ console.log(...args);
+ }
+ }
+ }
+ },
+
+ _suppress_log(count: number) {
+ suppress += count;
+ },
+
+ _suppressed_log_expected() {
+ return suppress !== 0;
+ },
+
+ _escapeRegExp(string: string) {
+ return String(string).replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
+ },
+
+ deprecate(...args: any[]) {
+ if (typeof console === 'undefined' || !console.warn) return;
+
+ const stackTrace = cleanStackTrace(new Error().stack || '');
+
+ if (config.noDeprecation) {
+ if (typeof config.noDeprecation === 'string') {
+ const pattern = new RegExp(config.noDeprecation);
+ if (pattern.test(stackTrace)) {
+ onceFixDeprecation();
+ return;
+ }
+ } else if (config.noDeprecation === true) {
+ onceFixDeprecation();
+ return;
+ }
+ }
+
+ const messages = [...args];
+ if (stackTrace.length > 0) {
+ messages.push('\n\nTrace:\n', stackTrace);
+ }
+ messages.push('\n\nTo disable warnings, set the `METEOR_NO_DEPRECATION` to `true` or a regex pattern.\n');
+
+ onceWarning(['[DEPRECATION]', ...messages]);
+ },
+
+ startup(callback: () => void) {
+ if (isReady) callback();
+ else callbackQueue.push(callback);
+ },
+
+ absoluteUrl,
+
+ _relativeToSiteRootUrl(link: string) {
+ if (config.ROOT_URL_PATH_PREFIX && link.startsWith('/')) {
+ return config.ROOT_URL_PATH_PREFIX + link;
+ }
+ return link;
+ },
+};
+
+let warnedAboutWrapAsync = false;
+let suppress = 0;
+
+function logErr(err: any) {
+ if (err) {
+ return console.debug('Exception in callback of async function', err);
+ }
+}
+
+function withoutInvocation(f: () => void): () => void {
+ return f;
+}
+
+function bindAndCatch(context: string, f: () => void): () => void {
+ return Meteor.bindEnvironment(withoutInvocation(f), context);
+}
+
+function oncePerArgument(func: Callback) {
+ const cache = new Map();
+ return function (this: any, ...args: any[]) {
+ const key = JSON.stringify(args);
+ if (!cache.has(key)) {
+ const result = func.apply(this, args);
+ cache.set(key, result);
+ }
+ return cache.get(key);
+ };
+}
+
+const onceWarning = oncePerArgument((messages: any[]) => {
+ console.warn(...messages);
+});
+
+function onceFixDeprecation() {
+ onceWarning([
+ 'Deprecation warnings are hidden but crucial to address for future Meteor updates.',
+ '\n',
+ 'Remove the `METEOR_NO_DEPRECATION` env var to reveal them, then report or fix the issues.',
+ ]);
+}
+
+function cleanStackTrace(stackTrace: string): string {
+ if (!stackTrace) return '';
+ const lines = stackTrace.split('\n');
+ const trace = [];
+
+ try {
+ for (const line of lines) {
+ const trimmed = line.trim();
+ if (trimmed.includes('Meteor.deprecate')) continue;
+ if (trimmed.includes('packages/') || trimmed.includes('/')) {
+ trace.push(trimmed);
+ if (!trimmed.includes('packages/')) break;
+ }
+ }
+ } catch (e) {
+ console.error('Error cleaning stack trace: ', e);
+ }
+ return trace.join('\n');
+}
+
+const callbackQueue: Array<() => void> = [];
+let isLoadingCompleted = false;
+let eagerCodeRan = false;
+let isReady = false;
+const readyHoldsCount = 0;
+
+const maybeReady = function () {
+ if (isReady || !eagerCodeRan || readyHoldsCount > 0) return;
+ isReady = true;
+ while (callbackQueue.length) {
+ const cb = callbackQueue.shift();
+ if (cb) cb();
+ }
+};
+
+function waitForEagerAsyncModules() {
+ function finish() {
+ eagerCodeRan = true;
+ maybeReady();
+ }
+
+ finish();
+}
+
+const loadingCompleted = function () {
+ if (isLoadingCompleted) return;
+ isLoadingCompleted = true;
+ waitForEagerAsyncModules();
+};
+
+if (document.readyState === 'complete') {
+ window.setTimeout(loadingCompleted);
+} else {
+ document.addEventListener('DOMContentLoaded', loadingCompleted, false);
+ window.addEventListener('load', loadingCompleted, false);
+}
+
+export { Meteor, globalScope as global, meteorEnv };
diff --git a/apps/meteor/src/meteor/minimongo.ts b/apps/meteor/src/meteor/minimongo.ts
new file mode 100644
index 0000000000000..fa775bda0f15c
--- /dev/null
+++ b/apps/meteor/src/meteor/minimongo.ts
@@ -0,0 +1,2241 @@
+import { DiffSequence } from './diff-sequence';
+import { EJSON } from './ejson';
+import { GeoJSON } from './geojson-utils';
+import { IdMap } from './id-map';
+import { ObjectID } from './mongo-id';
+import { OrderedDict } from './ordered-dict';
+import { Random } from './random';
+import { Tracker } from './tracker';
+
+type IdSelector = string | number | ObjectID;
+
+export const _selectorIsId = (selector: unknown): selector is IdSelector =>
+ typeof selector === 'number' || typeof selector === 'string' || selector instanceof ObjectID;
+
+export const _selectorIsIdPerhapsAsObject = (selector: unknown) =>
+ _selectorIsId(selector) || (_selectorIsId(selector && selector._id) && Object.keys(selector).length === 1);
+
+function getAsyncMethodName(method) {
+ return `${method.replace('_', '')}Async`;
+}
+
+const ASYNC_CURSOR_METHODS = ['count', 'fetch', 'forEach', 'map'];
+
+class ObserveHandle {}
+
+const hasOwn = Object.prototype.hasOwnProperty;
+
+class MiniMongoQueryError extends Error {}
+
+const MinimongoError = (message, options = {}) => {
+ if (typeof message === 'string' && options.field) {
+ message += ` for field '${options.field}'`;
+ }
+ const error = new Error(message);
+ error.name = 'MinimongoError';
+ return error;
+};
+
+function nothingMatcher(_docOrBranchedValues: unknown) {
+ return { result: false };
+}
+
+function everythingMatcher(_docOrBranchedValues: unknown) {
+ return { result: true };
+}
+
+const _binarySearch = (cmp, array, value) => {
+ let first = 0;
+ let range = array.length;
+ while (range > 0) {
+ const halfRange = Math.floor(range / 2);
+ if (cmp(value, array[first + halfRange]) >= 0) {
+ first += halfRange + 1;
+ range -= halfRange + 1;
+ } else range = halfRange;
+ }
+ return first;
+};
+
+const _modify = (doc, modifier, options = {}) => {
+ if (!_isPlainObject(modifier)) throw MinimongoError('Modifier must be an object');
+ modifier = EJSON.clone(modifier);
+ const isModifier = isOperatorObject(modifier);
+ const newDoc = isModifier ? EJSON.clone(doc) : modifier;
+ if (isModifier) {
+ Object.keys(modifier).forEach((operator) => {
+ const setOnInsert = options.isInsert && operator === '$setOnInsert';
+ const modFunc = MODIFIERS[setOnInsert ? '$set' : operator];
+ const operand = modifier[operator];
+ if (!modFunc) throw MinimongoError(`Invalid modifier specified ${operator}`);
+ Object.keys(operand).forEach((keypath) => {
+ const arg = operand[keypath];
+ if (keypath === '') throw MinimongoError('An empty update path is not valid.');
+ const keyparts = keypath.split('.');
+ if (!keyparts.every(Boolean)) throw MinimongoError(`The update path '${keypath}' contains an empty field name`);
+ const target = findModTarget(newDoc, keyparts, {
+ arrayIndices: options.arrayIndices,
+ forbidArray: operator === '$rename',
+ noCreate: NO_CREATE_MODIFIERS[operator],
+ });
+ modFunc(target, keyparts.pop(), arg, keypath, newDoc);
+ });
+ });
+ if (doc._id && !EJSON.equals(doc._id, newDoc._id))
+ throw MinimongoError(
+ `After applying the update to the document {_id: "${doc._id}", ...}, the (immutable) field '_id' was found to have been altered`,
+ );
+ } else {
+ if (doc._id && modifier._id && !EJSON.equals(doc._id, modifier._id)) throw MinimongoError(`The _id field cannot be changed`);
+ assertHasValidFieldNames(modifier);
+ }
+ Object.keys(doc).forEach((key) => {
+ if (key !== '_id') delete doc[key];
+ });
+ Object.keys(newDoc).forEach((key) => {
+ doc[key] = newDoc[key];
+ });
+};
+
+const _checkSupportedProjection = (fields) => {
+ if (fields !== Object(fields) || Array.isArray(fields)) throw MinimongoError('fields option must be an object');
+ Object.keys(fields).forEach((keyPath) => {
+ if (keyPath.split('.').includes('$')) throw MinimongoError("Minimongo doesn't support $ operator in projections yet.");
+ const value = fields[keyPath];
+ if (typeof value === 'object' && ['$elemMatch', '$meta', '$slice'].some((key) => hasOwn.call(value, key)))
+ throw MinimongoError("Minimongo doesn't support operators in projections yet.");
+ if (![1, 0, true, false].includes(value)) throw MinimongoError('Projection values should be one of 1, 0, true, or false');
+ });
+};
+
+const _compileProjection = (fields) => {
+ _checkSupportedProjection(fields);
+ const _idProjection = fields._id === undefined ? true : fields._id;
+ const details = projectionDetails(fields);
+ const transform = (doc, ruleTree) => {
+ if (Array.isArray(doc)) return doc.map((subdoc) => transform(subdoc, ruleTree));
+ const result = details.including ? {} : EJSON.clone(doc);
+ Object.keys(ruleTree).forEach((key) => {
+ if (doc == null || !hasOwn.call(doc, key)) return;
+ const rule = ruleTree[key];
+ if (rule === Object(rule)) {
+ if (doc[key] === Object(doc[key])) result[key] = transform(doc[key], rule);
+ } else if (details.including) result[key] = EJSON.clone(doc[key]);
+ else delete result[key];
+ });
+ return doc != null ? result : doc;
+ };
+ return (doc) => {
+ const result = transform(doc, details.tree);
+ if (_idProjection && hasOwn.call(doc, '_id')) result._id = doc._id;
+ if (!_idProjection && hasOwn.call(result, '_id')) delete result._id;
+ return result;
+ };
+};
+
+const _isModificationMod = (mod) => {
+ let isModify = false;
+ let isReplace = false;
+ Object.keys(mod).forEach((key) => {
+ if (key.substr(0, 1) === '$') isModify = true;
+ else isReplace = true;
+ });
+ if (isModify && isReplace) throw new Error('Update parameter cannot have both modifier and non-modifier fields.');
+ return isModify;
+};
+
+const _createUpsertDocument = (selector, modifier) => {
+ const selectorDocument = populateDocumentWithQueryFields(selector);
+ const isModify = _isModificationMod(modifier);
+ const newDoc = {};
+ if (selectorDocument._id) {
+ newDoc._id = selectorDocument._id;
+ delete selectorDocument._id;
+ }
+ _modify(newDoc, { $set: selectorDocument });
+ _modify(newDoc, modifier, { isInsert: true });
+ if (isModify) return newDoc;
+ const replacement = Object.assign({}, modifier);
+ if (newDoc._id) replacement._id = newDoc._id;
+ return replacement;
+};
+
+const _findInOrderedResults = (query, doc) => {
+ if (!query.ordered) throw new Error("Can't call _findInOrderedResults on unordered query");
+ for (let i = 0; i < query.results.length; i++) {
+ if (query.results[i] === doc) return i;
+ }
+ throw Error('object missing from query');
+};
+
+const _idsMatchedBySelector = (selector): (string | number | ObjectID)[] | null => {
+ if (_selectorIsId(selector)) return [selector];
+ if (!selector) return null;
+ if (hasOwn.call(selector, '_id')) {
+ if (_selectorIsId(selector._id)) return [selector._id];
+ if (selector._id && Array.isArray(selector._id.$in) && selector._id.$in.length && selector._id.$in.every(_selectorIsId))
+ return selector._id.$in;
+ return null;
+ }
+ if (Array.isArray(selector.$and)) {
+ for (let i = 0; i < selector.$and.length; ++i) {
+ const subIds = _idsMatchedBySelector(selector.$and[i]);
+ if (subIds) return subIds;
+ }
+ }
+ return null;
+};
+
+const _insertInSortedList = (cmp, array, value) => {
+ if (array.length === 0) {
+ array.push(value);
+ return 0;
+ }
+ const i = _binarySearch(cmp, array, value);
+ array.splice(i, 0, value);
+ return i;
+};
+
+const _insertInResultsSync = (query, doc) => {
+ const fields = EJSON.clone(doc);
+ delete fields._id;
+ if (query.ordered) {
+ if (!query.sorter) {
+ query.addedBefore(doc._id, query.projectionFn(fields), null);
+ query.results.push(doc);
+ } else {
+ const i = _insertInSortedList(query.sorter.getComparator({ distances: query.distances }), query.results, doc);
+ let next = query.results[i + 1];
+ if (next) next = next._id;
+ else next = null;
+ query.addedBefore(doc._id, query.projectionFn(fields), next);
+ }
+ query.added(doc._id, query.projectionFn(fields));
+ } else {
+ query.added(doc._id, query.projectionFn(fields));
+ query.results.set(doc._id, doc);
+ }
+};
+
+const _insertInResultsAsync = async (query, doc) => {
+ const fields = EJSON.clone(doc);
+ delete fields._id;
+ if (query.ordered) {
+ if (!query.sorter) {
+ await query.addedBefore(doc._id, query.projectionFn(fields), null);
+ query.results.push(doc);
+ } else {
+ const i = _insertInSortedList(query.sorter.getComparator({ distances: query.distances }), query.results, doc);
+ let next = query.results[i + 1];
+ if (next) next = next._id;
+ else next = null;
+ await query.addedBefore(doc._id, query.projectionFn(fields), next);
+ }
+ await query.added(doc._id, query.projectionFn(fields));
+ } else {
+ await query.added(doc._id, query.projectionFn(fields));
+ query.results.set(doc._id, doc);
+ }
+};
+
+const _observeFromObserveChanges = (cursor, observeCallbacks) => {
+ const transform = cursor.getTransform() || ((doc) => doc);
+ let suppressed = !!observeCallbacks._suppress_initial;
+ let observeChangesCallbacks;
+ if (_observeCallbacksAreOrdered(observeCallbacks)) {
+ const indices = !observeCallbacks._no_indices;
+ observeChangesCallbacks = {
+ addedBefore(id, fields, before) {
+ if (suppressed || !(observeCallbacks.addedAt || observeCallbacks.added)) return;
+ const doc = transform(Object.assign(fields, { _id: id }));
+ if (observeCallbacks.addedAt)
+ observeCallbacks.addedAt(doc, indices ? (before ? this.docs.indexOf(before) : this.docs.size()) : -1, before);
+ else observeCallbacks.added(doc);
+ },
+ changed(id, fields) {
+ if (!(observeCallbacks.changedAt || observeCallbacks.changed)) return;
+ const doc = EJSON.clone(this.docs.get(id));
+ if (!doc) throw new Error(`Unknown id for changed: ${id}`);
+ const oldDoc = transform(EJSON.clone(doc));
+ DiffSequence.applyChanges(doc, fields);
+ if (observeCallbacks.changedAt) observeCallbacks.changedAt(transform(doc), oldDoc, indices ? this.docs.indexOf(id) : -1);
+ else observeCallbacks.changed(transform(doc), oldDoc);
+ },
+ movedBefore(id, before) {
+ if (!observeCallbacks.movedTo) return;
+ const from = indices ? this.docs.indexOf(id) : -1;
+ let to = indices ? (before ? this.docs.indexOf(before) : this.docs.size()) : -1;
+ if (to > from) --to;
+ observeCallbacks.movedTo(transform(EJSON.clone(this.docs.get(id))), from, to, before || null);
+ },
+ removed(id) {
+ if (!(observeCallbacks.removedAt || observeCallbacks.removed)) return;
+ const doc = transform(this.docs.get(id));
+ if (observeCallbacks.removedAt) observeCallbacks.removedAt(doc, indices ? this.docs.indexOf(id) : -1);
+ else observeCallbacks.removed(doc);
+ },
+ };
+ } else {
+ observeChangesCallbacks = {
+ added(id, fields) {
+ if (!suppressed && observeCallbacks.added) observeCallbacks.added(transform(Object.assign(fields, { _id: id })));
+ },
+ changed(id, fields) {
+ if (observeCallbacks.changed) {
+ const oldDoc = this.docs.get(id);
+ const doc = EJSON.clone(oldDoc);
+ DiffSequence.applyChanges(doc, fields);
+ observeCallbacks.changed(transform(doc), transform(EJSON.clone(oldDoc)));
+ }
+ },
+ removed(id) {
+ if (observeCallbacks.removed) observeCallbacks.removed(transform(this.docs.get(id)));
+ },
+ };
+ }
+ const changeObserver = new _CachingChangeObserver({ callbacks: observeChangesCallbacks });
+ changeObserver.applyChange._fromObserve = true;
+ const handle = cursor.observeChanges(changeObserver.applyChange, { nonMutatingCallbacks: true });
+ const setSuppressed = (h) => {
+ if (h.isReady) suppressed = false;
+ else h.isReadyPromise?.then(() => (suppressed = false));
+ };
+ if (handle instanceof Promise) handle.then(setSuppressed);
+ else setSuppressed(handle);
+ return handle;
+};
+
+const _observeCallbacksAreOrdered = (callbacks) => {
+ if (callbacks.added && callbacks.addedAt) throw new Error('Please specify only one of added() and addedAt()');
+ if (callbacks.changed && callbacks.changedAt) throw new Error('Please specify only one of changed() and changedAt()');
+ if (callbacks.removed && callbacks.removedAt) throw new Error('Please specify only one of removed() and removedAt()');
+ return !!(callbacks.addedAt || callbacks.changedAt || callbacks.movedTo || callbacks.removedAt);
+};
+
+const _observeChangesCallbacksAreOrdered = (callbacks) => {
+ if (callbacks.added && callbacks.addedBefore) throw new Error('Please specify only one of added() and addedBefore()');
+ return !!(callbacks.addedBefore || callbacks.movedBefore);
+};
+
+const _removeFromResultsSync = (query, doc) => {
+ if (query.ordered) {
+ const i = _findInOrderedResults(query, doc);
+ query.removed(doc._id);
+ query.results.splice(i, 1);
+ } else {
+ const id = doc._id;
+ query.removed(doc._id);
+ query.results.remove(id);
+ }
+};
+
+const _removeFromResultsAsync = async (query, doc) => {
+ if (query.ordered) {
+ const i = _findInOrderedResults(query, doc);
+ await query.removed(doc._id);
+ query.results.splice(i, 1);
+ } else {
+ const id = doc._id;
+ await query.removed(doc._id);
+ query.results.remove(id);
+ }
+};
+
+const _updateInResultsSync = (query, doc, old_doc) => {
+ if (!EJSON.equals(doc._id, old_doc._id)) throw new Error("Can't change a doc's _id while updating");
+ const { projectionFn } = query;
+ const changedFields = DiffSequence.makeChangedFields(projectionFn(doc), projectionFn(old_doc));
+ if (!query.ordered) {
+ if (Object.keys(changedFields).length) {
+ query.changed(doc._id, changedFields);
+ query.results.set(doc._id, doc);
+ }
+ return;
+ }
+ const old_idx = _findInOrderedResults(query, doc);
+ if (Object.keys(changedFields).length) query.changed(doc._id, changedFields);
+ if (!query.sorter) return;
+ query.results.splice(old_idx, 1);
+ const new_idx = _insertInSortedList(query.sorter.getComparator({ distances: query.distances }), query.results, doc);
+ if (old_idx !== new_idx) {
+ let next = query.results[new_idx + 1];
+ if (next) next = next._id;
+ else next = null;
+ query.movedBefore && query.movedBefore(doc._id, next);
+ }
+};
+
+const _updateInResultsAsync = async (query, doc, old_doc) => {
+ if (!EJSON.equals(doc._id, old_doc._id)) throw new Error("Can't change a doc's _id while updating");
+ const { projectionFn } = query;
+ const changedFields = DiffSequence.makeChangedFields(projectionFn(doc), projectionFn(old_doc));
+ if (!query.ordered) {
+ if (Object.keys(changedFields).length) {
+ await query.changed(doc._id, changedFields);
+ query.results.set(doc._id, doc);
+ }
+ return;
+ }
+ const old_idx = _findInOrderedResults(query, doc);
+ if (Object.keys(changedFields).length) await query.changed(doc._id, changedFields);
+ if (!query.sorter) return;
+ query.results.splice(old_idx, 1);
+ const new_idx = _insertInSortedList(query.sorter.getComparator({ distances: query.distances }), query.results, doc);
+ if (old_idx !== new_idx) {
+ let next = query.results[new_idx + 1];
+ if (next) next = next._id;
+ else next = null;
+ query.movedBefore && (await query.movedBefore(doc._id, next));
+ }
+};
+
+const wrapTransform = (transform) => {
+ if (!transform) {
+ return null;
+ }
+ if (transform.__wrappedTransform__) {
+ return transform;
+ }
+ const wrapped = (doc) => {
+ if (!hasOwn.call(doc, '_id')) {
+ throw new Error('can only transform documents with _id');
+ }
+ const id = doc._id;
+ const transformed = Tracker.nonreactive(() => transform(doc));
+ if (!_isPlainObject(transformed)) {
+ throw new Error('transform must return object');
+ }
+ if (hasOwn.call(transformed, '_id')) {
+ if (!EJSON.equals(transformed._id, id)) {
+ throw new Error("transformed document can't have different _id");
+ }
+ } else {
+ transformed._id = id;
+ }
+ return transformed;
+ };
+ wrapped.__wrappedTransform__ = true;
+ return wrapped;
+};
+
+class Cursor {
+ matcher: Matcher;
+
+ sorter: Sorter | null;
+
+ skip: number;
+
+ constructor(collection, selector, options = {}) {
+ this.collection = collection;
+ this.sorter = null;
+ this.matcher = new Matcher(selector);
+ if (_selectorIsIdPerhapsAsObject(selector)) {
+ this._selectorId = hasOwn.call(selector, '_id') ? selector._id : selector;
+ } else {
+ this._selectorId = undefined;
+ if (this.matcher.hasGeoQuery() || options.sort) {
+ this.sorter = new Sorter(options.sort || []);
+ }
+ }
+ this.skip = options.skip || 0;
+ this.limit = options.limit;
+ this.fields = options.projection || options.fields;
+ this._projectionFn = _compileProjection(this.fields || {});
+ this._transform = wrapTransform(options.transform);
+ if (typeof Tracker !== 'undefined') {
+ this.reactive = options.reactive === undefined ? true : options.reactive;
+ }
+ }
+
+ count() {
+ if (this.reactive) this._depend({ added: true, removed: true }, true);
+ return this._getRawObjects({ ordered: true }).length;
+ }
+
+ fetch() {
+ const result = [];
+ this.forEach((doc) => {
+ result.push(doc);
+ });
+ return result;
+ }
+
+ [Symbol.iterator]() {
+ if (this.reactive) this._depend({ addedBefore: true, removed: true, changed: true, movedBefore: true });
+ let index = 0;
+ const objects = this._getRawObjects({ ordered: true });
+ return {
+ next: () => {
+ if (index < objects.length) {
+ let element = this._projectionFn(objects[index++]);
+ if (this._transform) element = this._transform(element);
+ return { value: element };
+ }
+ return { done: true };
+ },
+ };
+ }
+
+ [Symbol.asyncIterator]() {
+ const syncResult = this[Symbol.iterator]();
+ return {
+ async next() {
+ return Promise.resolve(syncResult.next());
+ },
+ };
+ }
+
+ forEach(callback, thisArg?: unknown) {
+ if (this.reactive) this._depend({ addedBefore: true, removed: true, changed: true, movedBefore: true });
+ this._getRawObjects({ ordered: true }).forEach((element, i) => {
+ element = this._projectionFn(element);
+ if (this._transform) element = this._transform(element);
+ callback.call(thisArg, element, i, this);
+ });
+ }
+
+ getTransform() {
+ return this._transform;
+ }
+
+ map(callback, thisArg) {
+ const result = [];
+ this.forEach((doc, i) => {
+ result.push(callback.call(thisArg, doc, i, this));
+ });
+ return result;
+ }
+
+ observe(options) {
+ return _observeFromObserveChanges(this, options);
+ }
+
+ observeAsync(options) {
+ return new Promise((resolve) => resolve(this.observe(options)));
+ }
+
+ observeChanges(options) {
+ const ordered = _observeChangesCallbacksAreOrdered(options);
+ if (!options._allow_unordered && !ordered && (this.skip || this.limit))
+ throw new Error('Must use an ordered observe with skip or limit');
+ if (this.fields && (this.fields._id === 0 || this.fields._id === false))
+ throw Error('You may not observe a cursor with {fields: {_id: 0}}');
+ const distances = this.matcher.hasGeoQuery() && ordered && new _IdMap();
+ const query = {
+ cursor: this,
+ dirty: false,
+ distances,
+ matcher: this.matcher,
+ ordered,
+ projectionFn: this._projectionFn,
+ resultsSnapshot: null,
+ sorter: ordered && this.sorter,
+ };
+ let qid;
+ if (this.reactive) {
+ qid = this.collection.next_qid++;
+ this.collection.queries[qid] = query;
+ }
+ query.results = this._getRawObjects({ ordered, distances: query.distances });
+ if (this.collection.paused) query.resultsSnapshot = ordered ? [] : new _IdMap();
+ const wrapCallback = (fn) => {
+ if (!fn) return () => {};
+ const self = this;
+ return function () {
+ if (self.collection.paused) return;
+ const args = arguments;
+ self.collection._observeQueue.queueTask(() => {
+ fn.apply(this, args);
+ });
+ };
+ };
+ query.added = wrapCallback(options.added);
+ query.changed = wrapCallback(options.changed);
+ query.removed = wrapCallback(options.removed);
+ if (ordered) {
+ query.addedBefore = wrapCallback(options.addedBefore);
+ query.movedBefore = wrapCallback(options.movedBefore);
+ }
+ if (!options._suppress_initial && !this.collection.paused) {
+ const handler = (doc) => {
+ const fields = EJSON.clone(doc);
+ delete fields._id;
+ if (ordered) query.addedBefore(doc._id, this._projectionFn(fields), null);
+ query.added(doc._id, this._projectionFn(fields));
+ };
+ if (query.results.length) {
+ for (const doc of query.results) handler(doc);
+ }
+ if (query.results?.size?.()) query.results.forEach(handler);
+ }
+ const handle = Object.assign(new ObserveHandle(), {
+ collection: this.collection,
+ stop: () => {
+ if (this.reactive) delete this.collection.queries[qid];
+ },
+ isReady: false,
+ isReadyPromise: null,
+ });
+ if (this.reactive && Tracker.active) Tracker.onInvalidate(() => handle.stop());
+ const drainResult = this.collection._observeQueue.drain();
+ if (drainResult instanceof Promise) {
+ handle.isReadyPromise = drainResult;
+ drainResult.then(() => (handle.isReady = true));
+ } else {
+ handle.isReady = true;
+ handle.isReadyPromise = Promise.resolve();
+ }
+ return handle;
+ }
+
+ observeChangesAsync(options) {
+ return new Promise((resolve) => {
+ const handle = this.observeChanges(options);
+ handle.isReadyPromise.then(() => resolve(handle));
+ });
+ }
+
+ _depend(changers, _allow_unordered) {
+ if (Tracker.active) {
+ const dependency = new Tracker.Dependency();
+ const notify = dependency.changed.bind(dependency);
+ dependency.depend();
+ const options = { _allow_unordered, _suppress_initial: true };
+ ['added', 'addedBefore', 'changed', 'movedBefore', 'removed'].forEach((fn) => {
+ if (changers[fn]) options[fn] = notify;
+ });
+ this.observeChanges(options);
+ }
+ }
+
+ _getRawObjects(options = {}) {
+ const applySkipLimit = options.applySkipLimit !== false;
+ const results = options.ordered ? [] : new _IdMap();
+ if (this._selectorId !== undefined) {
+ if (applySkipLimit && this.skip) return results;
+ const selectedDoc = this.collection._docs.get(this._selectorId);
+ if (selectedDoc) {
+ if (options.ordered) results.push(selectedDoc);
+ else results.set(this._selectorId, selectedDoc);
+ }
+ return results;
+ }
+ let distances;
+ if (this.matcher.hasGeoQuery() && options.ordered) {
+ if (options.distances) {
+ distances = options.distances;
+ distances.clear();
+ } else distances = new _IdMap();
+ }
+ this.collection._docs.forEach((doc, id) => {
+ const matchResult = this.matcher.documentMatches(doc);
+ if (matchResult.result) {
+ if (options.ordered) {
+ results.push(doc);
+ if (distances && matchResult.distance !== undefined) distances.set(id, matchResult.distance);
+ } else results.set(id, doc);
+ }
+ if (!applySkipLimit) return true;
+ return !this.limit || this.skip || this.sorter || results.length !== this.limit;
+ });
+ if (!options.ordered) return results;
+ if (this.sorter) results.sort(this.sorter.getComparator({ distances }));
+ if (!applySkipLimit || (!this.limit && !this.skip)) return results;
+ return results.slice(this.skip, this.limit ? this.limit + this.skip : results.length);
+ }
+}
+ASYNC_CURSOR_METHODS.forEach((method) => {
+ const asyncName = getAsyncMethodName(method);
+ Cursor.prototype[asyncName] = function (...args) {
+ try {
+ return Promise.resolve(this[method].apply(this, args));
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ };
+});
+const _f = {
+ _type(v) {
+ if (typeof v === 'number') return 1;
+ if (typeof v === 'string') return 2;
+ if (typeof v === 'boolean') return 8;
+ if (Array.isArray(v)) return 4;
+ if (v === null) return 10;
+ if (v instanceof RegExp) return 11;
+ if (typeof v === 'function') return 13;
+ if (v instanceof Date) return 9;
+ if (EJSON.isBinary(v)) return 5;
+ if (v instanceof ObjectID) return 7;
+ return 3;
+ },
+
+ _equal(a, b) {
+ return EJSON.equals(a, b, { keyOrderSensitive: true });
+ },
+
+ _typeorder(t) {
+ return [-1, 1, 2, 3, 4, 5, -1, 6, 7, 8, 0, 9, -1, 100, 2, 100, 1, 8, 1][t];
+ },
+
+ _cmp(a, b) {
+ if (a === undefined) return b === undefined ? 0 : -1;
+ if (b === undefined) return 1;
+ let ta = this._type(a);
+ let tb = this._type(b);
+ const oa = this._typeorder(ta);
+ const ob = this._typeorder(tb);
+ if (oa !== ob) return oa < ob ? -1 : 1;
+ if (ta !== tb) throw Error('Missing type coercion logic in _cmp');
+ if (ta === 7) {
+ a = a.toHexString();
+ b = b.toHexString();
+ ta = tb = 2;
+ }
+ if (ta === 9) {
+ a = isNaN(a) ? 0 : a.getTime();
+ b = isNaN(b) ? 0 : b.getTime();
+ ta = tb = 1;
+ }
+ if (ta === 1) return a - b;
+ if (tb === 2) return a < b ? -1 : a === b ? 0 : 1;
+ if (ta === 3) {
+ const toArray = (object) => {
+ const result = [];
+ Object.keys(object).forEach((key) => {
+ result.push(key, object[key]);
+ });
+ return result;
+ };
+ return this._cmp(toArray(a), toArray(b));
+ }
+ if (ta === 4) {
+ for (let i = 0; ; i++) {
+ if (i === a.length) return i === b.length ? 0 : -1;
+ if (i === b.length) return 1;
+ const s = this._cmp(a[i], b[i]);
+ if (s !== 0) return s;
+ }
+ }
+ if (ta === 5) {
+ if (a.length !== b.length) return a.length - b.length;
+ for (let i = 0; i < a.length; i++) {
+ if (a[i] < b[i]) return -1;
+ if (a[i] > b[i]) return 1;
+ }
+ return 0;
+ }
+ if (ta === 8) {
+ if (a) return b ? 0 : 1;
+ return b ? -1 : 0;
+ }
+ if (ta === 10) return 0;
+ throw Error('Unknown type to sort');
+ },
+};
+
+const _isPlainObject = (x) => {
+ return x && _f._type(x) === 3;
+};
+
+class _CachingChangeObserver {
+ constructor(options = {}) {
+ const orderedFromCallbacks = options.callbacks && _observeChangesCallbacksAreOrdered(options.callbacks);
+ if (hasOwn.call(options, 'ordered')) {
+ this.ordered = options.ordered;
+ if (options.callbacks && options.ordered !== orderedFromCallbacks) throw Error("ordered option doesn't match callbacks");
+ } else if (options.callbacks) this.ordered = orderedFromCallbacks;
+ else throw Error('must provide ordered or callbacks');
+ const callbacks = options.callbacks || {};
+ if (this.ordered) {
+ this.docs = new OrderedDict(ObjectID.stringify);
+ this.applyChange = {
+ addedBefore: (id, fields, before) => {
+ const doc = { ...fields };
+ doc._id = id;
+ if (callbacks.addedBefore) callbacks.addedBefore.call(this, id, EJSON.clone(fields), before);
+ if (callbacks.added) callbacks.added.call(this, id, EJSON.clone(fields));
+ this.docs.putBefore(id, doc, before || null);
+ },
+ movedBefore: (id, before) => {
+ if (callbacks.movedBefore) callbacks.movedBefore.call(this, id, before);
+ this.docs.moveBefore(id, before || null);
+ },
+ };
+ } else {
+ this.docs = new _IdMap();
+ this.applyChange = {
+ added: (id, fields) => {
+ const doc = { ...fields };
+ if (callbacks.added) callbacks.added.call(this, id, EJSON.clone(fields));
+ doc._id = id;
+ this.docs.set(id, doc);
+ },
+ };
+ }
+ this.applyChange.changed = (id, fields) => {
+ const doc = this.docs.get(id);
+ if (!doc) throw new Error(`Unknown id for changed: ${id}`);
+ if (callbacks.changed) callbacks.changed.call(this, id, EJSON.clone(fields));
+ DiffSequence.applyChanges(doc, fields);
+ };
+ this.applyChange.removed = (id) => {
+ if (callbacks.removed) callbacks.removed.call(this, id);
+ this.docs.remove(id);
+ };
+ }
+}
+
+class _IdMap extends IdMap {
+ constructor() {
+ super(ObjectID.stringify, ObjectID.parse);
+ }
+}
+
+const _useOID = false;
+class LocalCollection {
+ static _IdMap = _IdMap;
+
+ static Cursor = Cursor;
+
+ static ObserveHandle = ObserveHandle;
+
+ static _CachingChangeObserver = _CachingChangeObserver;
+
+ static _f = _f;
+
+ static _isPlainObject = _isPlainObject;
+
+ static _binarySearch = _binarySearch;
+
+ static _modify = _modify;
+
+ static _checkSupportedProjection = _checkSupportedProjection;
+
+ static _compileProjection = _compileProjection;
+
+ static _isModificationMod = _isModificationMod;
+
+ static _createUpsertDocument = _createUpsertDocument;
+
+ static wrapTransform = wrapTransform;
+
+ static _diffObjects = DiffSequence.diffObjects;
+
+ static _diffQueryChanges = DiffSequence.diffQueryChanges;
+
+ static _diffQueryOrderedChanges = DiffSequence.diffQueryOrderedChanges;
+
+ static _diffQueryUnorderedChanges = DiffSequence.diffQueryUnorderedChanges;
+
+ static _findInOrderedResults = _findInOrderedResults;
+
+ static _idsMatchedBySelector = _idsMatchedBySelector;
+
+ static _insertInSortedList = _insertInSortedList;
+
+ static _insertInResultsSync = _insertInResultsSync;
+
+ static _insertInResultsAsync = _insertInResultsAsync;
+
+ static _removeFromResultsSync = _removeFromResultsSync;
+
+ static _removeFromResultsAsync = _removeFromResultsAsync;
+
+ static _selectorIsId = _selectorIsId;
+
+ static _updateInResultsSync = _updateInResultsSync;
+
+ static _updateInResultsAsync = _updateInResultsAsync;
+
+ static _observeChangesCallbacksAreOrdered = _observeChangesCallbacksAreOrdered;
+
+ static _useOID = _useOID;
+
+ constructor(name) {
+ this.name = name;
+ this._docs = new _IdMap();
+ this._observeQueue = {
+ queueTask: (task) => task(),
+ drain: () => Promise.resolve(),
+ };
+
+ this.next_qid = 1;
+ this.queries = Object.create(null);
+ this._savedOriginals = null;
+ this.paused = false;
+ }
+
+ countDocuments(selector, options) {
+ return this.find(selector ?? {}, options).countAsync();
+ }
+
+ estimatedDocumentCount(options) {
+ return this.find({}, options).countAsync();
+ }
+
+ find(selector, options) {
+ if (arguments.length === 0) selector = {};
+ return new Cursor(this, selector, options);
+ }
+
+ findOne(selector, options = {}) {
+ if (arguments.length === 0) selector = {};
+ options.limit = 1;
+ return this.find(selector, options).fetch()[0];
+ }
+
+ async findOneAsync(selector, options = {}) {
+ if (arguments.length === 0) selector = {};
+ options.limit = 1;
+ return (await this.find(selector, options).fetchAsync())[0];
+ }
+
+ prepareInsert(doc) {
+ if (!hasOwn.call(doc, '_id')) doc._id = _useOID ? new ObjectID() : Random.id();
+ const id = doc._id;
+ if (this._docs.has(id)) throw MinimongoError(`Duplicate _id '${id}'`);
+ this._saveOriginal(id, undefined);
+ this._docs.set(id, doc);
+ return id;
+ }
+
+ insert(doc, callback) {
+ doc = EJSON.clone(doc);
+ const id = this.prepareInsert(doc);
+ const queriesToRecompute = [];
+ for (const qid of Object.keys(this.queries)) {
+ const query = this.queries[qid];
+ if (query.dirty) continue;
+ const matchResult = query.matcher.documentMatches(doc);
+ if (matchResult.result) {
+ if (query.distances && matchResult.distance !== undefined) query.distances.set(id, matchResult.distance);
+ if (query.cursor.skip || query.cursor.limit) queriesToRecompute.push(qid);
+ else _insertInResultsSync(query, doc);
+ }
+ }
+ queriesToRecompute.forEach((qid) => {
+ if (this.queries[qid]) this._recomputeResults(this.queries[qid]);
+ });
+ this._observeQueue.drain();
+ if (callback) {
+ setTimeout(() => callback(null, id), 0);
+ }
+ return id;
+ }
+
+ async insertAsync(doc, callback) {
+ doc = EJSON.clone(doc);
+ const id = this.prepareInsert(doc);
+ const queriesToRecompute = [];
+ for (const qid of Object.keys(this.queries)) {
+ const query = this.queries[qid];
+ if (query.dirty) continue;
+ const matchResult = query.matcher.documentMatches(doc);
+ if (matchResult.result) {
+ if (query.distances && matchResult.distance !== undefined) query.distances.set(id, matchResult.distance);
+ if (query.cursor.skip || query.cursor.limit) queriesToRecompute.push(qid);
+ else await _insertInResultsAsync(query, doc);
+ }
+ }
+ queriesToRecompute.forEach((qid) => {
+ if (this.queries[qid]) this._recomputeResults(this.queries[qid]);
+ });
+ await this._observeQueue.drain();
+ if (callback) {
+ setTimeout(() => callback(null, id), 0);
+ }
+ return id;
+ }
+
+ pauseObservers() {
+ if (this.paused) return;
+ this.paused = true;
+ Object.keys(this.queries).forEach((qid) => {
+ const query = this.queries[qid];
+ query.resultsSnapshot = EJSON.clone(query.results);
+ });
+ }
+
+ clearResultQueries(callback) {
+ const result = this._docs.size();
+ this._docs.clear();
+ Object.keys(this.queries).forEach((qid) => {
+ const query = this.queries[qid];
+ if (query.ordered) query.results = [];
+ else query.results.clear();
+ });
+ if (callback) {
+ setTimeout(() => callback(null, result), 0);
+ }
+ return result;
+ }
+
+ prepareRemove(selector) {
+ const matcher = new Matcher(selector);
+ const remove = [];
+ this._eachPossiblyMatchingDocSync(selector, (doc, id) => {
+ if (matcher.documentMatches(doc).result) remove.push(id);
+ });
+ const queriesToRecompute = [];
+ const queryRemove = [];
+ for (let i = 0; i < remove.length; i++) {
+ const removeId = remove[i];
+ const removeDoc = this._docs.get(removeId);
+ Object.keys(this.queries).forEach((qid) => {
+ const query = this.queries[qid];
+ if (query.dirty) return;
+ if (query.matcher.documentMatches(removeDoc).result) {
+ if (query.cursor.skip || query.cursor.limit) queriesToRecompute.push(qid);
+ else queryRemove.push({ qid, doc: removeDoc });
+ }
+ });
+ this._saveOriginal(removeId, removeDoc);
+ this._docs.remove(removeId);
+ }
+ return { queriesToRecompute, queryRemove, remove };
+ }
+
+ remove(selector, callback) {
+ if (this.paused && !this._savedOriginals && EJSON.equals(selector, {})) return this.clearResultQueries(callback);
+ const { queriesToRecompute, queryRemove, remove } = this.prepareRemove(selector);
+ queryRemove.forEach((remove) => {
+ const query = this.queries[remove.qid];
+ if (query) {
+ query.distances && query.distances.remove(remove.doc._id);
+ _removeFromResultsSync(query, remove.doc);
+ }
+ });
+ queriesToRecompute.forEach((qid) => {
+ const query = this.queries[qid];
+ if (query) this._recomputeResults(query);
+ });
+ this._observeQueue.drain();
+ const result = remove.length;
+ if (callback) {
+ setTimeout(() => callback(null, result), 0);
+ }
+ return result;
+ }
+
+ async removeAsync(selector, callback) {
+ if (this.paused && !this._savedOriginals && EJSON.equals(selector, {})) return this.clearResultQueries(callback);
+ const { queriesToRecompute, queryRemove, remove } = this.prepareRemove(selector);
+ for (const remove of queryRemove) {
+ const query = this.queries[remove.qid];
+ if (query) {
+ query.distances && query.distances.remove(remove.doc._id);
+ await _removeFromResultsAsync(query, remove.doc);
+ }
+ }
+ queriesToRecompute.forEach((qid) => {
+ const query = this.queries[qid];
+ if (query) this._recomputeResults(query);
+ });
+ await this._observeQueue.drain();
+ const result = remove.length;
+ if (callback) {
+ setTimeout(() => callback(null, result), 0);
+ }
+ return result;
+ }
+
+ _resumeObservers() {
+ if (!this.paused) return;
+ this.paused = false;
+ Object.keys(this.queries).forEach((qid) => {
+ const query = this.queries[qid];
+ if (query.dirty) {
+ query.dirty = false;
+ this._recomputeResults(query, query.resultsSnapshot);
+ } else {
+ DiffSequence.diffQueryChanges(query.ordered, query.resultsSnapshot, query.results, query, { projectionFn: query.projectionFn });
+ }
+ query.resultsSnapshot = null;
+ });
+ }
+
+ async resumeObserversServer() {
+ this._resumeObservers();
+ await this._observeQueue.drain();
+ }
+
+ resumeObserversClient() {
+ this._resumeObservers();
+ this._observeQueue.drain();
+ }
+
+ retrieveOriginals() {
+ if (!this._savedOriginals) throw new Error('Called retrieveOriginals without saveOriginals');
+ const originals = this._savedOriginals;
+ this._savedOriginals = null;
+ return originals;
+ }
+
+ saveOriginals() {
+ if (this._savedOriginals) throw new Error('Called saveOriginals twice without retrieveOriginals');
+ this._savedOriginals = new _IdMap();
+ }
+
+ prepareUpdate(selector) {
+ const qidToOriginalResults = {};
+ const docMap = new _IdMap();
+ const idsMatched = _idsMatchedBySelector(selector);
+ Object.keys(this.queries).forEach((qid) => {
+ const query = this.queries[qid];
+ if ((query.cursor.skip || query.cursor.limit) && !this.paused) {
+ if (query.results instanceof _IdMap) {
+ qidToOriginalResults[qid] = query.results.clone();
+ return;
+ }
+ if (!(query.results instanceof Array)) throw new Error('Assertion failed: query.results not an array');
+ const memoizedCloneIfNeeded = (doc) => {
+ if (docMap.has(doc._id)) return docMap.get(doc._id);
+ const docToMemoize = idsMatched && !idsMatched.some((id) => EJSON.equals(id, doc._id)) ? doc : EJSON.clone(doc);
+ docMap.set(doc._id, docToMemoize);
+ return docToMemoize;
+ };
+ qidToOriginalResults[qid] = query.results.map(memoizedCloneIfNeeded);
+ }
+ });
+ return qidToOriginalResults;
+ }
+
+ finishUpdate({ options, updateCount, callback, insertedId }) {
+ let result;
+ if (options._returnObject) {
+ result = { numberAffected: updateCount };
+ if (insertedId !== undefined) result.insertedId = insertedId;
+ } else {
+ result = updateCount;
+ }
+ if (callback) {
+ setTimeout(() => callback(null, result), 0);
+ }
+ return result;
+ }
+
+ async updateAsync(selector, mod, options, callback) {
+ if (!callback && options instanceof Function) {
+ callback = options;
+ options = null;
+ }
+ if (!options) options = {};
+ const matcher = new Matcher(selector, true);
+ const qidToOriginalResults = this.prepareUpdate(selector);
+ let recomputeQids = {};
+ let updateCount = 0;
+ await this._eachPossiblyMatchingDocAsync(selector, async (doc, id) => {
+ const queryResult = matcher.documentMatches(doc);
+ if (queryResult.result) {
+ this._saveOriginal(id, doc);
+ recomputeQids = await this._modifyAndNotifyAsync(doc, mod, queryResult.arrayIndices);
+ ++updateCount;
+ if (!options.multi) return false;
+ }
+ return true;
+ });
+ Object.keys(recomputeQids).forEach((qid) => {
+ const query = this.queries[qid];
+ if (query) this._recomputeResults(query, qidToOriginalResults[qid]);
+ });
+ await this._observeQueue.drain();
+ let insertedId;
+ if (updateCount === 0 && options.upsert) {
+ const doc = _createUpsertDocument(selector, mod);
+ if (!doc._id && options.insertedId) doc._id = options.insertedId;
+ insertedId = await this.insertAsync(doc);
+ updateCount = 1;
+ }
+ return this.finishUpdate({ options, insertedId, updateCount, callback });
+ }
+
+ update(selector, mod, options, callback) {
+ if (!callback && options instanceof Function) {
+ callback = options;
+ options = null;
+ }
+ if (!options) options = {};
+ const matcher = new Matcher(selector, true);
+ const qidToOriginalResults = this.prepareUpdate(selector);
+ let recomputeQids = {};
+ let updateCount = 0;
+ this._eachPossiblyMatchingDocSync(selector, (doc, id) => {
+ const queryResult = matcher.documentMatches(doc);
+ if (queryResult.result) {
+ this._saveOriginal(id, doc);
+ recomputeQids = this._modifyAndNotifySync(doc, mod, queryResult.arrayIndices);
+ ++updateCount;
+ if (!options.multi) return false;
+ }
+ return true;
+ });
+ Object.keys(recomputeQids).forEach((qid) => {
+ const query = this.queries[qid];
+ if (query) this._recomputeResults(query, qidToOriginalResults[qid]);
+ });
+ this._observeQueue.drain();
+ let insertedId;
+ if (updateCount === 0 && options.upsert) {
+ const doc = _createUpsertDocument(selector, mod);
+ if (!doc._id && options.insertedId) doc._id = options.insertedId;
+ insertedId = this.insert(doc);
+ updateCount = 1;
+ }
+ return this.finishUpdate({ options, insertedId, updateCount, callback, selector, mod });
+ }
+
+ upsert(selector, mod, options, callback) {
+ if (!callback && typeof options === 'function') {
+ callback = options;
+ options = {};
+ }
+ return this.update(selector, mod, Object.assign({}, options, { upsert: true, _returnObject: true }), callback);
+ }
+
+ upsertAsync(selector, mod, options, callback) {
+ if (!callback && typeof options === 'function') {
+ callback = options;
+ options = {};
+ }
+ return this.updateAsync(selector, mod, Object.assign({}, options, { upsert: true, _returnObject: true }), callback);
+ }
+
+ async _eachPossiblyMatchingDocAsync(selector, fn) {
+ const specificIds = _idsMatchedBySelector(selector);
+ if (specificIds) {
+ for (const id of specificIds) {
+ const doc = this._docs.get(id);
+ if (doc && !(await fn(doc, id))) break;
+ }
+ } else {
+ await this._docs.forEachAsync(fn);
+ }
+ }
+
+ _eachPossiblyMatchingDocSync(selector, fn) {
+ const specificIds = _idsMatchedBySelector(selector);
+ if (specificIds) {
+ for (const id of specificIds) {
+ const doc = this._docs.get(id);
+ if (doc && fn(doc, id) === false) break;
+ }
+ } else {
+ this._docs.forEach(fn);
+ }
+ }
+
+ _getMatchedDocAndModify(doc, mod, arrayIndices) {
+ const matched_before = {};
+ Object.keys(this.queries).forEach((qid) => {
+ const query = this.queries[qid];
+ if (query.dirty) return;
+ if (query.ordered) matched_before[qid] = query.matcher.documentMatches(doc).result;
+ else matched_before[qid] = query.results.has(doc._id);
+ });
+ return matched_before;
+ }
+
+ _modifyAndNotifySync(doc, mod, arrayIndices) {
+ const matched_before = this._getMatchedDocAndModify(doc, mod, arrayIndices);
+ const old_doc = EJSON.clone(doc);
+ _modify(doc, mod, { arrayIndices });
+ const recomputeQids = {};
+ for (const qid of Object.keys(this.queries)) {
+ const query = this.queries[qid];
+ if (query.dirty) continue;
+ const afterMatch = query.matcher.documentMatches(doc);
+ const after = afterMatch.result;
+ const before = matched_before[qid];
+ if (after && query.distances && afterMatch.distance !== undefined) query.distances.set(doc._id, afterMatch.distance);
+ if (query.cursor.skip || query.cursor.limit) {
+ if (before || after) recomputeQids[qid] = true;
+ } else if (before && !after) _removeFromResultsSync(query, doc);
+ else if (!before && after) _insertInResultsSync(query, doc);
+ else if (before && after) _updateInResultsSync(query, doc, old_doc);
+ }
+ return recomputeQids;
+ }
+
+ async _modifyAndNotifyAsync(doc, mod, arrayIndices) {
+ const matched_before = this._getMatchedDocAndModify(doc, mod, arrayIndices);
+ const old_doc = EJSON.clone(doc);
+ _modify(doc, mod, { arrayIndices });
+ const recomputeQids = {};
+ for (const qid of Object.keys(this.queries)) {
+ const query = this.queries[qid];
+ if (query.dirty) continue;
+ const afterMatch = query.matcher.documentMatches(doc);
+ const after = afterMatch.result;
+ const before = matched_before[qid];
+ if (after && query.distances && afterMatch.distance !== undefined) query.distances.set(doc._id, afterMatch.distance);
+ if (query.cursor.skip || query.cursor.limit) {
+ if (before || after) recomputeQids[qid] = true;
+ } else if (before && !after) await _removeFromResultsAsync(query, doc);
+ else if (!before && after) await _insertInResultsAsync(query, doc);
+ else if (before && after) await _updateInResultsAsync(query, doc, old_doc);
+ }
+ return recomputeQids;
+ }
+
+ _recomputeResults(query, oldResults) {
+ if (this.paused) {
+ query.dirty = true;
+ return;
+ }
+ if (!this.paused && !oldResults) oldResults = query.results;
+ if (query.distances) query.distances.clear();
+ query.results = query.cursor._getRawObjects({ distances: query.distances, ordered: query.ordered });
+ if (!this.paused) {
+ DiffSequence.diffQueryChanges(query.ordered, oldResults, query.results, query, { projectionFn: query.projectionFn });
+ }
+ }
+
+ _saveOriginal(id, doc) {
+ if (!this._savedOriginals) return;
+ if (this._savedOriginals.has(id)) return;
+ this._savedOriginals.set(id, EJSON.clone(doc));
+ }
+}
+
+function isIndexable(obj) {
+ return Array.isArray(obj) || _isPlainObject(obj);
+}
+
+function isNumericKey(s) {
+ return /^[0-9]+$/.test(s);
+}
+
+function isOperatorObject(valueSelector, inconsistentOK) {
+ if (!_isPlainObject(valueSelector)) {
+ return false;
+ }
+ let theseAreOperators = undefined;
+ Object.keys(valueSelector).forEach((selKey) => {
+ const thisIsOperator = selKey.substr(0, 1) === '$' || selKey === 'diff';
+ if (theseAreOperators === undefined) {
+ theseAreOperators = thisIsOperator;
+ } else if (theseAreOperators !== thisIsOperator) {
+ if (!inconsistentOK) {
+ throw new MiniMongoQueryError(`Inconsistent operator: ${JSON.stringify(valueSelector)}`);
+ }
+ theseAreOperators = false;
+ }
+ });
+ return !!theseAreOperators;
+}
+
+function makeInequality(cmpValueComparator) {
+ return {
+ compileElementSelector(operand) {
+ if (Array.isArray(operand)) return () => false;
+ if (operand === undefined) operand = null;
+ const operandType = _f._type(operand);
+ return (value) => {
+ if (value === undefined) value = null;
+ if (_f._type(value) !== operandType) return false;
+ return cmpValueComparator(_f._cmp(value, operand));
+ };
+ },
+ };
+}
+
+const ELEMENT_OPERATORS = {
+ $lt: makeInequality((cmpValue) => cmpValue < 0),
+ $gt: makeInequality((cmpValue) => cmpValue > 0),
+ $lte: makeInequality((cmpValue) => cmpValue <= 0),
+ $gte: makeInequality((cmpValue) => cmpValue >= 0),
+ $mod: {
+ compileElementSelector(operand) {
+ if (!(Array.isArray(operand) && operand.length === 2))
+ throw new MiniMongoQueryError('argument to $mod must be an array of two numbers');
+ const divisor = operand[0];
+ const remainder = operand[1];
+ return (value) => typeof value === 'number' && value % divisor === remainder;
+ },
+ },
+ $in: {
+ compileElementSelector(operand) {
+ if (!Array.isArray(operand)) throw new MiniMongoQueryError('$in needs an array');
+ const elementMatchers = operand.map((option) => {
+ if (option instanceof RegExp) return regexpElementMatcher(option);
+ if (isOperatorObject(option)) throw new MiniMongoQueryError('cannot nest $ under $in');
+ return equalityElementMatcher(option);
+ });
+ return (value) => {
+ if (value === undefined) value = null;
+ return elementMatchers.some((matcher) => matcher(value));
+ };
+ },
+ },
+ $size: {
+ dontExpandLeafArrays: true,
+ compileElementSelector(operand) {
+ if (typeof operand === 'string') operand = 0;
+ else if (typeof operand !== 'number') throw new MiniMongoQueryError('$size needs a number');
+ return (value) => Array.isArray(value) && value.length === operand;
+ },
+ },
+ $type: {
+ dontIncludeLeafArrays: true,
+ compileElementSelector(operand) {
+ if (typeof operand === 'string') {
+ const operandAliasMap = {
+ double: 1,
+ string: 2,
+ object: 3,
+ array: 4,
+ binData: 5,
+ undefined: 6,
+ objectId: 7,
+ bool: 8,
+ date: 9,
+ null: 10,
+ regex: 11,
+ dbPointer: 12,
+ javascript: 13,
+ symbol: 14,
+ javascriptWithScope: 15,
+ int: 16,
+ timestamp: 17,
+ long: 18,
+ decimal: 19,
+ minKey: -1,
+ maxKey: 127,
+ };
+ operand = operandAliasMap[operand];
+ }
+ return (value) => value !== undefined && _f._type(value) === operand;
+ },
+ },
+ $regex: {
+ compileElementSelector(operand, valueSelector) {
+ if (!(typeof operand === 'string' || operand instanceof RegExp)) throw new MiniMongoQueryError('$regex has to be a string or RegExp');
+ let regexp;
+ if (valueSelector.$options !== undefined) {
+ const source = operand instanceof RegExp ? operand.source : operand;
+ regexp = new RegExp(source, valueSelector.$options);
+ } else if (operand instanceof RegExp) {
+ regexp = operand;
+ } else {
+ regexp = new RegExp(operand);
+ }
+ return regexpElementMatcher(regexp);
+ },
+ },
+ $elemMatch: {
+ dontExpandLeafArrays: true,
+ compileElementSelector(operand, valueSelector, matcher) {
+ if (!_isPlainObject(operand)) throw new MiniMongoQueryError('$elemMatch need an object');
+ const isDocMatcher = !isOperatorObject(
+ Object.keys(operand)
+ .filter((key) => !hasOwn.call(LOGICAL_OPERATORS, key))
+ .reduce((a, b) => Object.assign(a, { [b]: operand[b] }), {}),
+ true,
+ );
+ let subMatcher;
+ if (isDocMatcher) subMatcher = compileDocumentSelector(operand, matcher, { inElemMatch: true });
+ else subMatcher = compileValueSelector(operand, matcher);
+ return (value) => {
+ if (!Array.isArray(value)) return false;
+ for (let i = 0; i < value.length; ++i) {
+ const arrayElement = value[i];
+ let arg;
+ if (isDocMatcher) {
+ if (!isIndexable(arrayElement)) return false;
+ arg = arrayElement;
+ } else {
+ arg = [{ value: arrayElement, dontIterate: true }];
+ }
+ if (subMatcher(arg).result) return i;
+ }
+ return false;
+ };
+ },
+ },
+};
+
+const LOGICAL_OPERATORS = {
+ $and(subSelector, matcher, inElemMatch) {
+ return andDocumentMatchers(compileArrayOfDocumentSelectors(subSelector, matcher, inElemMatch));
+ },
+ $or(subSelector, matcher, inElemMatch) {
+ const matchers = compileArrayOfDocumentSelectors(subSelector, matcher, inElemMatch);
+ if (matchers.length === 1) return matchers[0];
+ return (doc) => {
+ const result = matchers.some((fn) => fn(doc).result);
+ return { result };
+ };
+ },
+ $nor(subSelector, matcher, inElemMatch) {
+ const matchers = compileArrayOfDocumentSelectors(subSelector, matcher, inElemMatch);
+ return (doc) => {
+ const result = matchers.every((fn) => !fn(doc).result);
+ return { result };
+ };
+ },
+ $where(selectorValue, matcher) {
+ matcher._recordPathUsed('');
+ matcher._hasWhere = true;
+ if (!(selectorValue instanceof Function)) selectorValue = Function('obj', `return ${selectorValue}`);
+ return (doc) => ({ result: selectorValue.call(doc, doc) });
+ },
+ $comment() {
+ return () => ({ result: true });
+ },
+};
+
+const VALUE_OPERATORS = {
+ $eq(operand) {
+ return convertElementMatcherToBranchedMatcher(equalityElementMatcher(operand));
+ },
+ $not(operand, valueSelector, matcher) {
+ return invertBranchedMatcher(compileValueSelector(operand, matcher));
+ },
+ $ne(operand) {
+ return invertBranchedMatcher(convertElementMatcherToBranchedMatcher(equalityElementMatcher(operand)));
+ },
+ $nin(operand) {
+ return invertBranchedMatcher(convertElementMatcherToBranchedMatcher(ELEMENT_OPERATORS.$in.compileElementSelector(operand)));
+ },
+ $exists(operand) {
+ const exists = convertElementMatcherToBranchedMatcher((value) => value !== undefined);
+ return operand ? exists : invertBranchedMatcher(exists);
+ },
+ $options(operand, valueSelector) {
+ return everythingMatcher;
+ },
+ $maxDistance(operand, valueSelector) {
+ return everythingMatcher;
+ },
+ $all(operand, valueSelector, matcher) {
+ if (!Array.isArray(operand)) throw new MiniMongoQueryError('$all requires array');
+ if (operand.length === 0) return nothingMatcher;
+ const branchedMatchers = operand.map((criterion) => {
+ if (isOperatorObject(criterion)) throw new MiniMongoQueryError('no $ expressions in $all');
+ return compileValueSelector(criterion, matcher);
+ });
+ return andBranchedMatchers(branchedMatchers);
+ },
+ $near(operand, valueSelector, matcher, isRoot) {
+ if (!isRoot) throw new MiniMongoQueryError("$near can't be inside another $ operator");
+ matcher._hasGeoQuery = true;
+ let maxDistance;
+ let point;
+ let distance;
+ if (_isPlainObject(operand) && hasOwn.call(operand, '$geometry')) {
+ maxDistance = operand.$maxDistance;
+ point = operand.$geometry;
+ distance = (value) => {
+ if (!value) return null;
+ if (!value.type) return GeoJSON.pointDistance(point, { type: 'Point', coordinates: pointToArray(value) });
+ if (value.type === 'Point') return GeoJSON.pointDistance(point, value);
+ return GeoJSON.geometryWithinRadius(value, point, maxDistance) ? 0 : maxDistance + 1;
+ };
+ } else {
+ maxDistance = valueSelector.$maxDistance;
+ point = pointToArray(operand);
+ distance = (value) => {
+ if (!isIndexable(value)) return null;
+ return distanceCoordinatePairs(point, value);
+ };
+ }
+ return (branchedValues) => {
+ const result = { result: false };
+ expandArraysInBranches(branchedValues).every((branch) => {
+ let curDistance;
+ if (!matcher._isUpdate) {
+ if (!(typeof branch.value === 'object')) return true;
+ curDistance = distance(branch.value);
+ if (curDistance === null || curDistance > maxDistance) return true;
+ if (result.distance !== undefined && result.distance <= curDistance) return true;
+ }
+ result.result = true;
+ result.distance = curDistance;
+ if (branch.arrayIndices) result.arrayIndices = branch.arrayIndices;
+ else delete result.arrayIndices;
+ return !matcher._isUpdate;
+ });
+ return result;
+ };
+ },
+};
+
+function andSomeMatchers(subMatchers) {
+ if (subMatchers.length === 0) return everythingMatcher;
+ if (subMatchers.length === 1) return subMatchers[0];
+ return (docOrBranches) => {
+ const match = {};
+ match.result = subMatchers.every((fn) => {
+ const subResult = fn(docOrBranches);
+ if (subResult.result && subResult.distance !== undefined && match.distance === undefined) match.distance = subResult.distance;
+ if (subResult.result && subResult.arrayIndices) match.arrayIndices = subResult.arrayIndices;
+ return subResult.result;
+ });
+ if (!match.result) {
+ delete match.distance;
+ delete match.arrayIndices;
+ }
+ return match;
+ };
+}
+
+const andDocumentMatchers = andSomeMatchers;
+const andBranchedMatchers = andSomeMatchers;
+
+function compileArrayOfDocumentSelectors(selectors, matcher, inElemMatch) {
+ if (!Array.isArray(selectors) || selectors.length === 0) throw new MiniMongoQueryError('$and/$or/$nor must be nonempty array');
+ return selectors.map((subSelector) => {
+ if (!_isPlainObject(subSelector)) throw new MiniMongoQueryError('$or/$and/$nor entries need to be full objects');
+ return compileDocumentSelector(subSelector, matcher, { inElemMatch });
+ });
+}
+
+function compileDocumentSelector(docSelector, matcher, options = {}) {
+ const docMatchers = Object.keys(docSelector)
+ .map((key) => {
+ const subSelector = docSelector[key];
+ if (key.substr(0, 1) === '$') {
+ if (!hasOwn.call(LOGICAL_OPERATORS, key)) throw new MiniMongoQueryError(`Unrecognized logical operator: ${key}`);
+ matcher._isSimple = false;
+ return LOGICAL_OPERATORS[key](subSelector, matcher, options.inElemMatch);
+ }
+ if (!options.inElemMatch) matcher._recordPathUsed(key);
+ if (typeof subSelector === 'function') return undefined;
+ const lookUpByIndex = makeLookupFunction(key);
+ const valueMatcher = compileValueSelector(subSelector, matcher, options.isRoot);
+ return (doc) => valueMatcher(lookUpByIndex(doc));
+ })
+ .filter(Boolean);
+ return andDocumentMatchers(docMatchers);
+}
+
+function compileValueSelector(valueSelector, matcher, isRoot) {
+ if (valueSelector instanceof RegExp) {
+ matcher._isSimple = false;
+ return convertElementMatcherToBranchedMatcher(regexpElementMatcher(valueSelector));
+ }
+ if (isOperatorObject(valueSelector)) return operatorBranchedMatcher(valueSelector, matcher, isRoot);
+ return convertElementMatcherToBranchedMatcher(equalityElementMatcher(valueSelector));
+}
+
+function convertElementMatcherToBranchedMatcher(elementMatcher, options = {}) {
+ return (branches) => {
+ const expanded = options.dontExpandLeafArrays ? branches : expandArraysInBranches(branches, options.dontIncludeLeafArrays);
+ const match = {};
+ match.result = expanded.some((element) => {
+ let matched = elementMatcher(element.value);
+ if (typeof matched === 'number') {
+ if (!element.arrayIndices) element.arrayIndices = [matched];
+ matched = true;
+ }
+ if (matched && element.arrayIndices) match.arrayIndices = element.arrayIndices;
+ return matched;
+ });
+ return match;
+ };
+}
+
+function distanceCoordinatePairs(a, b) {
+ const pointA = pointToArray(a);
+ const pointB = pointToArray(b);
+ return Math.hypot(pointA[0] - pointB[0], pointA[1] - pointB[1]);
+}
+
+function equalityElementMatcher(elementSelector) {
+ if (isOperatorObject(elementSelector)) throw new MiniMongoQueryError("Can't create equalityValueSelector for operator object");
+ if (elementSelector == null) return (value) => value == null;
+ return (value) => _f._equal(elementSelector, value);
+}
+
+function expandArraysInBranches(branches, skipTheArrays) {
+ const branchesOut = [];
+ branches.forEach((branch) => {
+ const thisIsArray = Array.isArray(branch.value);
+ if (!(skipTheArrays && thisIsArray && !branch.dontIterate)) {
+ branchesOut.push({ arrayIndices: branch.arrayIndices, value: branch.value });
+ }
+ if (thisIsArray && !branch.dontIterate) {
+ branch.value.forEach((value, i) => {
+ branchesOut.push({
+ arrayIndices: (branch.arrayIndices || []).concat(i),
+ value,
+ });
+ });
+ }
+ });
+ return branchesOut;
+}
+
+function insertIntoDocument(document, key, value) {
+ Object.keys(document).forEach((existingKey) => {
+ if (
+ (existingKey.length > key.length && existingKey.indexOf(`${key}.`) === 0) ||
+ (key.length > existingKey.length && key.indexOf(`${existingKey}.`) === 0)
+ ) {
+ throw new MiniMongoQueryError(`cannot infer query fields to set, both paths '${existingKey}' and '${key}' are matched`);
+ } else if (existingKey === key) {
+ throw new MiniMongoQueryError(`cannot infer query fields to set, path '${key}' is matched twice`);
+ }
+ });
+ document[key] = value;
+}
+
+function invertBranchedMatcher(branchedMatcher) {
+ return (branchValues) => {
+ return { result: !branchedMatcher(branchValues).result };
+ };
+}
+
+function makeLookupFunction(key, options = {}) {
+ const parts = key.split('.');
+ const firstPart = parts.length ? parts[0] : '';
+ const lookupRest = parts.length > 1 && makeLookupFunction(parts.slice(1).join('.'), options);
+ function buildResult(arrayIndices, dontIterate, value) {
+ return arrayIndices && arrayIndices.length
+ ? dontIterate
+ ? [{ arrayIndices, dontIterate, value }]
+ : [{ arrayIndices, value }]
+ : dontIterate
+ ? [{ dontIterate, value }]
+ : [{ value }];
+ }
+ return (doc, arrayIndices) => {
+ if (Array.isArray(doc)) {
+ if (!(isNumericKey(firstPart) && firstPart < doc.length)) return [];
+ arrayIndices = arrayIndices ? arrayIndices.concat(+firstPart, 'x') : [+firstPart, 'x'];
+ }
+ const firstLevel = doc[firstPart];
+ if (!lookupRest) return buildResult(arrayIndices, Array.isArray(doc) && Array.isArray(firstLevel), firstLevel);
+ if (!isIndexable(firstLevel)) {
+ if (Array.isArray(doc)) return [];
+ return buildResult(arrayIndices, false, undefined);
+ }
+ const result = [];
+ result.push(...lookupRest(firstLevel, arrayIndices));
+ if (Array.isArray(firstLevel) && !(isNumericKey(parts[1]) && options.forSort)) {
+ firstLevel.forEach((branch, arrayIndex) => {
+ if (_isPlainObject(branch)) {
+ result.push(...lookupRest(branch, arrayIndices ? arrayIndices.concat(arrayIndex) : [arrayIndex]));
+ }
+ });
+ }
+ return result;
+ };
+}
+
+function operatorBranchedMatcher(valueSelector, matcher, isRoot) {
+ const operatorMatchers = Object.keys(valueSelector).map((operator) => {
+ const operand = valueSelector[operator];
+ if (hasOwn.call(VALUE_OPERATORS, operator)) return VALUE_OPERATORS[operator](operand, valueSelector, matcher, isRoot);
+ if (hasOwn.call(ELEMENT_OPERATORS, operator)) {
+ const options = ELEMENT_OPERATORS[operator];
+ return convertElementMatcherToBranchedMatcher(options.compileElementSelector(operand, valueSelector, matcher), options);
+ }
+ throw new MiniMongoQueryError(`Unrecognized operator: ${operator}`);
+ });
+ return andBranchedMatchers(operatorMatchers);
+}
+
+function pathsToTree(paths, newLeafFn, conflictFn, root = {}) {
+ paths.forEach((path) => {
+ const pathArray = path.split('.');
+ let tree = root;
+ const success = pathArray.slice(0, -1).every((key, i) => {
+ if (!hasOwn.call(tree, key)) tree[key] = {};
+ else if (tree[key] !== Object(tree[key])) {
+ tree[key] = conflictFn(tree[key], pathArray.slice(0, i + 1).join('.'), path);
+ if (tree[key] !== Object(tree[key])) return false;
+ }
+ tree = tree[key];
+ return true;
+ });
+ if (success) {
+ const lastKey = pathArray[pathArray.length - 1];
+ if (hasOwn.call(tree, lastKey)) tree[lastKey] = conflictFn(tree[lastKey], path, path);
+ else tree[lastKey] = newLeafFn(path);
+ }
+ });
+ return root;
+}
+
+function pointToArray(point) {
+ return Array.isArray(point) ? point.slice() : [point.x, point.y];
+}
+
+function populateDocumentWithKeyValue(document, key, value) {
+ if (value && Object.getPrototypeOf(value) === Object.prototype) populateDocumentWithObject(document, key, value);
+ else if (!(value instanceof RegExp)) insertIntoDocument(document, key, value);
+}
+
+function populateDocumentWithObject(document, key, value) {
+ const keys = Object.keys(value);
+ const unprefixedKeys = keys.filter((op) => op[0] !== '$');
+ if (unprefixedKeys.length > 0 || !keys.length) {
+ if (keys.length !== unprefixedKeys.length) throw new MiniMongoQueryError(`unknown operator: ${unprefixedKeys[0]}`);
+ validateObject(value, key);
+ insertIntoDocument(document, key, value);
+ } else {
+ Object.keys(value).forEach((op) => {
+ const object = value[op];
+ if (op === '$eq') populateDocumentWithKeyValue(document, key, object);
+ else if (op === '$all') object.forEach((element) => populateDocumentWithKeyValue(document, key, element));
+ });
+ }
+}
+
+function populateDocumentWithQueryFields(query, document = {}) {
+ if (Object.getPrototypeOf(query) === Object.prototype) {
+ Object.keys(query).forEach((key) => {
+ const value = query[key];
+ if (key === '$and') value.forEach((element) => populateDocumentWithQueryFields(element, document));
+ else if (key === '$or') {
+ if (value.length === 1) populateDocumentWithQueryFields(value[0], document);
+ } else if (key[0] !== '$') populateDocumentWithKeyValue(document, key, value);
+ });
+ } else if (_selectorIsId(query)) insertIntoDocument(document, '_id', query);
+ return document;
+}
+
+function projectionDetails(fields) {
+ let fieldsKeys = Object.keys(fields).sort();
+ if (!(fieldsKeys.length === 1 && fieldsKeys[0] === '_id') && !(fieldsKeys.includes('_id') && fields._id))
+ fieldsKeys = fieldsKeys.filter((key) => key !== '_id');
+ let including = null;
+ fieldsKeys.forEach((keyPath) => {
+ const rule = !!fields[keyPath];
+ if (including === null) including = rule;
+ if (including !== rule) throw MinimongoError('You cannot currently mix including and excluding fields.');
+ });
+ const projectionRulesTree = pathsToTree(
+ fieldsKeys,
+ (path) => including,
+ (node, path, fullPath) => {
+ throw MinimongoError(`both ${fullPath} and ${path} found in fields option.`);
+ },
+ );
+ return { including, tree: projectionRulesTree };
+}
+
+function regexpElementMatcher(regexp) {
+ return (value) => {
+ if (value instanceof RegExp) return value.toString() === regexp.toString();
+ if (typeof value !== 'string') return false;
+ regexp.lastIndex = 0;
+ return regexp.test(value);
+ };
+}
+
+function validateKeyInPath(key, path) {
+ if (key.includes('.')) throw new Error(`The dotted field '${key}' in '${path}.${key} is not valid for storage.`);
+ if (key[0] === '$') throw new Error(`The dollar ($) prefixed field '${path}.${key} is not valid for storage.`);
+}
+
+function validateObject(object, path) {
+ if (object && Object.getPrototypeOf(object) === Object.prototype) {
+ Object.keys(object).forEach((key) => {
+ validateKeyInPath(key, path);
+ validateObject(object[key], `${path}.${key}`);
+ });
+ }
+}
+
+class Matcher {
+ constructor(selector, isUpdate = false) {
+ this._paths = {};
+ this._hasGeoQuery = false;
+ this._hasWhere = false;
+ this._isSimple = true;
+ this._matchingDocument = undefined;
+ this._selector = null;
+ this._docMatcher = this._compileSelector(selector);
+ this._isUpdate = isUpdate;
+ }
+
+ documentMatches(doc) {
+ if (doc !== Object(doc)) throw Error('documentMatches needs a document');
+ return this._docMatcher(doc);
+ }
+
+ hasGeoQuery() {
+ return this._hasGeoQuery;
+ }
+
+ hasWhere() {
+ return this._hasWhere;
+ }
+
+ isSimple() {
+ return this._isSimple;
+ }
+
+ _compileSelector(selector) {
+ if (selector instanceof Function) {
+ this._isSimple = false;
+ this._selector = selector;
+ this._recordPathUsed('');
+ return (doc) => ({ result: !!selector.call(doc) });
+ }
+ if (_selectorIsId(selector)) {
+ this._selector = { _id: selector };
+ this._recordPathUsed('_id');
+ return (doc) => ({ result: EJSON.equals(doc._id, selector) });
+ }
+ if (!selector || (hasOwn.call(selector, '_id') && !selector._id)) {
+ this._isSimple = false;
+ return nothingMatcher;
+ }
+ if (Array.isArray(selector) || EJSON.isBinary(selector) || typeof selector === 'boolean') {
+ throw new Error(`Invalid selector: ${selector}`);
+ }
+ this._selector = EJSON.clone(selector);
+ return compileDocumentSelector(selector, this, { isRoot: true });
+ }
+
+ _getPaths() {
+ return Object.keys(this._paths);
+ }
+
+ _recordPathUsed(path) {
+ this._paths[path] = true;
+ }
+}
+
+class Sorter {
+ constructor(spec) {
+ this._sortSpecParts = [];
+ this._sortFunction = null;
+ const addSpecPart = (path, ascending) => {
+ if (!path) throw Error('sort keys must be non-empty');
+ if (path.charAt(0) === '$') throw Error(`unsupported sort key: ${path}`);
+ this._sortSpecParts.push({ ascending, lookup: makeLookupFunction(path, { forSort: true }), path });
+ };
+ if (spec instanceof Array) {
+ spec.forEach((element) => {
+ if (typeof element === 'string') addSpecPart(element, true);
+ else addSpecPart(element[0], element[1] !== 'desc');
+ });
+ } else if (typeof spec === 'object') {
+ Object.keys(spec).forEach((key) => addSpecPart(key, spec[key] >= 0));
+ } else if (typeof spec === 'function') {
+ this._sortFunction = spec;
+ } else {
+ throw Error(`Bad sort specification: ${JSON.stringify(spec)}`);
+ }
+ if (this._sortFunction) return;
+ this._keyComparator = composeComparators(this._sortSpecParts.map((spec, i) => this._keyFieldComparator(i)));
+ }
+
+ getComparator(options) {
+ if (this._sortSpecParts.length || !options || !options.distances) return this._getBaseComparator();
+ const { distances } = options;
+ return (a, b) => {
+ if (!distances.has(a._id)) throw Error(`Missing distance for ${a._id}`);
+ if (!distances.has(b._id)) throw Error(`Missing distance for ${b._id}`);
+ return distances.get(a._id) - distances.get(b._id);
+ };
+ }
+
+ _compareKeys(key1, key2) {
+ if (key1.length !== this._sortSpecParts.length || key2.length !== this._sortSpecParts.length) throw Error('Key has wrong length');
+ return this._keyComparator(key1, key2);
+ }
+
+ _generateKeysFromDoc(doc, cb) {
+ if (this._sortSpecParts.length === 0) throw new Error("can't generate keys without a spec");
+ const pathFromIndices = (indices) => `${indices.join(',')},`;
+ let knownPaths = null;
+ const valuesByIndexAndPath = this._sortSpecParts.map((spec) => {
+ let branches = expandArraysInBranches(spec.lookup(doc), true);
+ if (!branches.length) branches = [{ value: void 0 }];
+ const element = Object.create(null);
+ let usedPaths = false;
+ branches.forEach((branch) => {
+ if (!branch.arrayIndices) {
+ if (branches.length > 1) throw Error('multiple branches but no array used?');
+ element[''] = branch.value;
+ return;
+ }
+ usedPaths = true;
+ const path = pathFromIndices(branch.arrayIndices);
+ if (hasOwn.call(element, path)) throw Error(`duplicate path: ${path}`);
+ element[path] = branch.value;
+ if (knownPaths && !hasOwn.call(knownPaths, path)) throw Error('cannot index parallel arrays');
+ });
+ if (knownPaths) {
+ if (!hasOwn.call(element, '') && Object.keys(knownPaths).length !== Object.keys(element).length)
+ throw Error('cannot index parallel arrays!');
+ } else if (usedPaths) {
+ knownPaths = {};
+ Object.keys(element).forEach((path) => {
+ knownPaths[path] = true;
+ });
+ }
+ return element;
+ });
+ if (!knownPaths) {
+ const soleKey = valuesByIndexAndPath.map((values) => {
+ if (!hasOwn.call(values, '')) throw Error('no value in sole key case?');
+ return values[''];
+ });
+ cb(soleKey);
+ return;
+ }
+ Object.keys(knownPaths).forEach((path) => {
+ const key = valuesByIndexAndPath.map((values) => {
+ if (hasOwn.call(values, '')) return values[''];
+ if (!hasOwn.call(values, path)) throw Error('missing path?');
+ return values[path];
+ });
+ cb(key);
+ });
+ }
+
+ _getBaseComparator() {
+ if (this._sortFunction) return this._sortFunction;
+ if (!this._sortSpecParts.length) return (doc1, doc2) => 0;
+ return (doc1, doc2) => {
+ const key1 = this._getMinKeyFromDoc(doc1);
+ const key2 = this._getMinKeyFromDoc(doc2);
+ return this._compareKeys(key1, key2);
+ };
+ }
+
+ _getMinKeyFromDoc(doc) {
+ let minKey = null;
+ this._generateKeysFromDoc(doc, (key) => {
+ if (minKey === null) {
+ minKey = key;
+ return;
+ }
+ if (this._compareKeys(key, minKey) < 0) minKey = key;
+ });
+ return minKey;
+ }
+
+ _getPaths() {
+ return this._sortSpecParts.map((part) => part.path);
+ }
+
+ _keyFieldComparator(i) {
+ const invert = !this._sortSpecParts[i].ascending;
+ return (key1, key2) => {
+ const compare = _f._cmp(key1[i], key2[i]);
+ return invert ? -compare : compare;
+ };
+ }
+}
+
+function composeComparators(comparatorArray) {
+ return (a, b) => {
+ for (let i = 0; i < comparatorArray.length; ++i) {
+ const compare = comparatorArray[i](a, b);
+ if (compare !== 0) return compare;
+ }
+ return 0;
+ };
+}
+const MODIFIERS = {
+ $currentDate(target, field, arg) {
+ target[field] = new Date();
+ },
+ $inc(target, field, arg) {
+ if (typeof arg !== 'number') throw MinimongoError('Modifier $inc allowed for numbers only');
+ if (field in target && typeof target[field] !== 'number') throw MinimongoError('Cannot apply $inc modifier to non-number');
+ if (field in target) target[field] += arg;
+ else target[field] = arg;
+ },
+ $min(target, field, arg) {
+ if (typeof arg !== 'number') throw MinimongoError('Modifier $min allowed for numbers only');
+ if (field in target && typeof target[field] !== 'number') throw MinimongoError('Cannot apply $min modifier to non-number');
+ if (!(field in target) || target[field] > arg) target[field] = arg;
+ },
+ $max(target, field, arg) {
+ if (typeof arg !== 'number') throw MinimongoError('Modifier $max allowed for numbers only');
+ if (field in target && typeof target[field] !== 'number') throw MinimongoError('Cannot apply $max modifier to non-number');
+ if (!(field in target) || target[field] < arg) target[field] = arg;
+ },
+ $mul(target, field, arg) {
+ if (typeof arg !== 'number') throw MinimongoError('Modifier $mul allowed for numbers only');
+ if (field in target && typeof target[field] !== 'number') throw MinimongoError('Cannot apply $mul modifier to non-number');
+ if (field in target) target[field] *= arg;
+ else target[field] = 0;
+ },
+ $rename(target, field, arg, keypath, doc) {
+ if (target !== undefined) {
+ const object = target[field];
+ delete target[field];
+ const keyparts = arg.split('.');
+ const target2 = findModTarget(doc, keyparts, { forbidArray: true });
+ if (target2 === null) throw MinimongoError('$rename target field invalid');
+ target2[keyparts.pop()] = object;
+ }
+ },
+ $set(target, field, arg) {
+ if (target !== Object(target)) {
+ const err = MinimongoError('Cannot set property on non-object field');
+ err.setPropertyError = true;
+ throw err;
+ }
+ if (target === null) {
+ const err = MinimongoError('Cannot set property on null');
+ err.setPropertyError = true;
+ throw err;
+ }
+ assertHasValidFieldNames(arg);
+ target[field] = arg;
+ },
+ $setOnInsert(target, field, arg) {},
+ $unset(target, field, arg) {
+ if (target !== undefined) {
+ if (target instanceof Array) {
+ if (field in target) target[field] = null;
+ } else delete target[field];
+ }
+ },
+ $push(target, field, arg) {
+ if (target[field] === undefined) target[field] = [];
+ if (!(target[field] instanceof Array)) throw MinimongoError('Cannot apply $push modifier to non-array');
+ if (!(arg && arg.$each)) {
+ assertHasValidFieldNames(arg);
+ target[field].push(arg);
+ return;
+ }
+ const toPush = arg.$each;
+ assertHasValidFieldNames(toPush);
+ let position = undefined;
+ if ('$position' in arg) position = arg.$position;
+ let slice = undefined;
+ if ('$slice' in arg) slice = arg.$slice;
+ let sortFunction = undefined;
+ if (arg.$sort) sortFunction = new Sorter(arg.$sort).getComparator();
+ if (position === undefined) toPush.forEach((e) => target[field].push(e));
+ else {
+ const args = [position, 0].concat(toPush);
+ target[field].splice(...args);
+ }
+ if (sortFunction) target[field].sort(sortFunction);
+ if (slice !== undefined) {
+ if (slice === 0) target[field] = [];
+ else if (slice < 0) target[field] = target[field].slice(slice);
+ else target[field] = target[field].slice(0, slice);
+ }
+ },
+ $pushAll(target, field, arg) {
+ if (!(typeof arg === 'object' && arg instanceof Array)) throw MinimongoError('$pushAll allowed for arrays only');
+ assertHasValidFieldNames(arg);
+ const toPush = target[field];
+ if (toPush === undefined) target[field] = arg;
+ else if (!(toPush instanceof Array)) throw MinimongoError('Cannot apply $pushAll to non-array');
+ else toPush.push(...arg);
+ },
+ $addToSet(target, field, arg) {
+ let isEach = false;
+ if (typeof arg === 'object' && Object.keys(arg)[0] === '$each') isEach = true;
+ const values = isEach ? arg.$each : [arg];
+ assertHasValidFieldNames(values);
+ const toAdd = target[field];
+ if (toAdd === undefined) target[field] = values;
+ else if (!(toAdd instanceof Array)) throw MinimongoError('Cannot apply $addToSet to non-array');
+ else
+ values.forEach((v) => {
+ if (!toAdd.some((e) => _f._equal(v, e))) toAdd.push(v);
+ });
+ },
+ $pop(target, field, arg) {
+ if (target === undefined) return;
+ const toPop = target[field];
+ if (toPop === undefined) return;
+ if (!(toPop instanceof Array)) throw MinimongoError('Cannot apply $pop to non-array');
+ if (typeof arg === 'number' && arg < 0) toPop.splice(0, 1);
+ else toPop.pop();
+ },
+ $pull(target, field, arg) {
+ if (target === undefined) return;
+ const toPull = target[field];
+ if (toPull === undefined) return;
+ if (!(toPull instanceof Array)) throw MinimongoError('Cannot apply $pull to non-array');
+ let out;
+ if (arg != null && typeof arg === 'object' && !(arg instanceof Array)) {
+ const matcher = new Matcher(arg);
+ out = toPull.filter((e) => !matcher.documentMatches(e).result);
+ } else {
+ out = toPull.filter((e) => !_f._equal(e, arg));
+ }
+ target[field] = out;
+ },
+ $pullAll(target, field, arg) {
+ if (!(typeof arg === 'object' && arg instanceof Array)) throw MinimongoError('$pullAll allowed for arrays only');
+ if (target === undefined) return;
+ const toPull = target[field];
+ if (toPull === undefined) return;
+ if (!(toPull instanceof Array)) throw MinimongoError('Cannot apply $pullAll to non-array');
+ target[field] = toPull.filter((o) => !arg.some((e) => _f._equal(o, e)));
+ },
+ $bit(target, field, arg) {
+ throw MinimongoError('$bit is not supported');
+ },
+ $v() {},
+};
+
+const NO_CREATE_MODIFIERS = { $pop: true, $pull: true, $pullAll: true, $rename: true, $unset: true };
+const invalidCharMsg = { '$': "start with '$'", '.': "contain '.'", '\0': 'contain null bytes' };
+function assertHasValidFieldNames(doc) {
+ if (doc && typeof doc === 'object')
+ JSON.stringify(doc, (key, value) => {
+ assertIsValidFieldName(key);
+ return value;
+ });
+}
+function assertIsValidFieldName(key) {
+ let match;
+ if (typeof key === 'string' && (match = key.match(/^\$|\.|\0/))) throw MinimongoError(`Key ${key} must not ${invalidCharMsg[match[0]]}`);
+}
+function findModTarget(doc, keyparts, options = {}) {
+ let usedArrayIndex = false;
+ for (let i = 0; i < keyparts.length; i++) {
+ const last = i === keyparts.length - 1;
+ let keypart = keyparts[i];
+ if (!isIndexable(doc)) {
+ if (options.noCreate) return undefined;
+ const err = MinimongoError(`cannot use the part '${keypart}' to traverse ${doc}`);
+ err.setPropertyError = true;
+ throw err;
+ }
+ if (doc instanceof Array) {
+ if (options.forbidArray) return null;
+ if (keypart === '$') {
+ if (usedArrayIndex) throw MinimongoError('Too many positional elements');
+ if (!options.arrayIndices || !options.arrayIndices.length) throw MinimongoError('Positional operator did not find match');
+ keypart = options.arrayIndices[0];
+ usedArrayIndex = true;
+ } else if (isNumericKey(keypart)) keypart = parseInt(keypart);
+ else {
+ if (options.noCreate) return undefined;
+ throw MinimongoError(`can't append to array using string field name`);
+ }
+ if (last) keyparts[i] = keypart;
+ if (options.noCreate && keypart >= doc.length) return undefined;
+ while (doc.length < keypart) doc.push(null);
+ if (!last) {
+ if (doc.length === keypart) doc.push({});
+ else if (typeof doc[keypart] !== 'object') throw MinimongoError(`can't modify field '${keyparts[i + 1]}' of list value`);
+ }
+ } else {
+ assertIsValidFieldName(keypart);
+ if (!(keypart in doc)) {
+ if (options.noCreate) return undefined;
+ if (!last) doc[keypart] = {};
+ }
+ }
+ if (last) return doc;
+ doc = doc[keypart];
+ }
+}
+
+export const Minimongo = {
+ LocalCollection,
+ Matcher,
+ Sorter,
+};
+
+export { LocalCollection };
diff --git a/apps/meteor/src/meteor/mongo-id.ts b/apps/meteor/src/meteor/mongo-id.ts
new file mode 100644
index 0000000000000..8199ef69598da
--- /dev/null
+++ b/apps/meteor/src/meteor/mongo-id.ts
@@ -0,0 +1,109 @@
+import { EJSON } from './ejson.ts';
+import { Random } from './random';
+
+const _looksLikeObjectID = (str: string) => str.length === 24 && /^[0-9a-f]*$/.test(str);
+
+export class ObjectID {
+ private _str: string;
+
+ constructor(hexString?: string) {
+ if (hexString) {
+ hexString = hexString.toLowerCase();
+ if (!_looksLikeObjectID(hexString)) {
+ throw new Error('Invalid hexadecimal string for creating an ObjectID');
+ }
+ this._str = hexString;
+ } else {
+ this._str = Random.hexString(24);
+ }
+ }
+
+ equals(other: unknown): boolean {
+ return other instanceof ObjectID && this.valueOf() === other.valueOf();
+ }
+
+ toString(): string {
+ return `ObjectID("${this._str}")`;
+ }
+
+ clone(): ObjectID {
+ return new ObjectID(this._str);
+ }
+
+ typeName(): 'oid' {
+ return 'oid';
+ }
+
+ getTimestamp(): number {
+ return Number.parseInt(this._str.substr(0, 8), 16);
+ }
+
+ valueOf(): string {
+ return this._str;
+ }
+
+ toJSONValue(): string {
+ return this.valueOf();
+ }
+
+ toHexString(): string {
+ return this.valueOf();
+ }
+
+ static stringify(id: unknown): string {
+ if (id instanceof ObjectID) {
+ return id.valueOf();
+ }
+ if (typeof id === 'string') {
+ const firstChar = id.charAt(0);
+ if (id === '') {
+ return id;
+ }
+ if (
+ firstChar === '-' || // escape previously dashed strings
+ firstChar === '~' || // escape escaped numbers, true, false
+ _looksLikeObjectID(id) || // escape object-id-form strings
+ firstChar === '{'
+ ) {
+ return `-${id}`;
+ }
+ return id; // other strings go through unchanged.
+ }
+ if (id === undefined) {
+ return '-';
+ }
+ if (typeof id === 'object' && id !== null) {
+ throw new Error('Meteor does not currently support objects other than ObjectID as ids');
+ }
+ return `~${JSON.stringify(id)}`;
+ }
+
+ static parse(id: string): ObjectID | string | undefined {
+ const firstChar = id.charAt(0);
+ if (id === '') {
+ return id;
+ }
+ if (id === '-') {
+ return undefined;
+ }
+ if (firstChar === '-') {
+ return id.slice(1);
+ }
+ if (firstChar === '~') {
+ return JSON.parse(id.slice(1));
+ }
+ if (_looksLikeObjectID(id)) {
+ return new ObjectID(id);
+ }
+ return id;
+ }
+}
+
+EJSON.addType('oid', (str) => new ObjectID(str));
+
+export const MongoID = {
+ ObjectID,
+ _looksLikeObjectID,
+ idStringify: ObjectID.stringify,
+ idParse: ObjectID.parse,
+};
diff --git a/apps/meteor/src/meteor/mongo.ts b/apps/meteor/src/meteor/mongo.ts
new file mode 100644
index 0000000000000..0b3301dc4580f
--- /dev/null
+++ b/apps/meteor/src/meteor/mongo.ts
@@ -0,0 +1,686 @@
+import { AllowDeny } from './allow-deny.ts';
+import { check, Match } from './check.ts';
+import { DDP, type Connection } from './ddp-client.ts';
+import { EJSON } from './ejson.ts';
+import { Meteor } from './meteor.ts';
+import { LocalCollection } from './minimongo.ts';
+import { ObjectID } from './mongo-id.ts';
+import { Random } from './random.ts';
+
+class LocalCollectionDriver {
+ noConnCollections: Map = new Map();
+
+ open(name?: string, conn: Connection | null = null): LocalCollection {
+ if (!name) {
+ return new LocalCollection();
+ }
+
+ if (!conn) {
+ return ensureCollection(name, this.noConnCollections);
+ }
+
+ if (!conn._mongo_livedata_collections) {
+ conn._mongo_livedata_collections = new Map();
+ }
+
+ return ensureCollection(name, conn._mongo_livedata_collections);
+ }
+}
+
+const driver = new LocalCollectionDriver();
+
+function ensureCollection(name: string, collections: Map): LocalCollection {
+ const collection = collections.get(name);
+ if (collection) {
+ return collection;
+ }
+
+ const newCollection = new LocalCollection(name);
+ collections.set(name, newCollection);
+
+ return newCollection;
+}
+export const ID_GENERATORS = {
+ MONGO(name: string) {
+ return function () {
+ const src = name ? DDP.randomStream(`/collection/${name}`) : Random.insecure;
+ return new ObjectID(src.hexString(24));
+ };
+ },
+ STRING(name: string) {
+ return function () {
+ const src = name ? DDP.randomStream(`/collection/${name}`) : Random.insecure;
+ return src.id();
+ };
+ },
+};
+
+export function setupConnection(name: string, options: { connection?: Connection | null }): Connection | null {
+ if (!name || options.connection === null) return null;
+ if (options.connection) return options.connection;
+ return DDP.connection;
+}
+
+export function setupDriver(_name: string, _connection: Connection | null, options: { _driver?: any }): LocalCollectionDriver {
+ if (options._driver) return options._driver;
+ return driver;
+}
+
+export function setupMutationMethods(collection, name, options) {
+ if (options.defineMutationMethods === false) return;
+
+ try {
+ collection._defineMutationMethods({
+ useExisting: options._suppressSameNameError === true,
+ });
+ } catch (error: any) {
+ if (error.message === `A method named '/${name}/insertAsync' is already defined`) {
+ throw new Error(`There is already a collection named "${name}"`);
+ }
+ throw error;
+ }
+}
+
+export function validateCollectionName(name) {
+ if (!name && name !== null) {
+ console.debug(
+ 'Warning: creating anonymous collection. It will not be ' +
+ 'saved or synchronized over the network. (Pass null for ' +
+ 'the collection name to turn off this warning.)',
+ );
+ name = null;
+ }
+
+ if (name !== null && typeof name !== 'string') {
+ throw new Error('First argument to new Mongo.Collection must be a string or null');
+ }
+
+ return name;
+}
+
+export function normalizeOptions(options) {
+ if (options && options.methods) {
+ options = { connection: options };
+ }
+ if (options && options.manager && !options.connection) {
+ options.connection = options.manager;
+ }
+
+ const cleanedOptions = Object.fromEntries(Object.entries(options || {}).filter(([_, v]) => v !== undefined));
+ return {
+ connection: undefined,
+ idGeneration: 'STRING',
+ transform: null,
+ _driver: undefined,
+ _preventAutopublish: false,
+ ...cleanedOptions,
+ };
+}
+export const normalizeProjection = (options?: { fields?: any; projection?: any }) => {
+ const { fields, projection, ...otherOptions } = options || {};
+
+ return {
+ ...otherOptions,
+ ...(projection || fields ? { projection: fields || projection } : {}),
+ };
+};
+
+export class Collection {
+ _connection: Connection | null;
+
+ constructor(name, options) {
+ let _ID_GENERATORS$option;
+ let _ID_GENERATORS;
+
+ name = validateCollectionName(name);
+ options = normalizeOptions(options);
+
+ this._makeNewID =
+ (_ID_GENERATORS$option = (_ID_GENERATORS = ID_GENERATORS)[options.idGeneration]) === null || _ID_GENERATORS$option === void 0
+ ? void 0
+ : _ID_GENERATORS$option.call(_ID_GENERATORS, name);
+
+ this._transform = options.transform;
+ this.resolverType = options.resolverType;
+ this._connection = setupConnection(name, options);
+
+ const driver = setupDriver(name, this._connection, options);
+
+ this._driver = driver;
+ this._collection = driver.open(name, this._connection);
+ this._name = name;
+ this._settingUpReplicationPromise = this._maybeSetUpReplication(name, options);
+ setupMutationMethods(this, name, options);
+ Mongo._collections.set(name, this);
+ }
+
+ async _publishCursor(cursor, sub, collection) {
+ const observeHandle = await cursor.observeChanges(
+ {
+ added(id, fields) {
+ sub.added(collection, id, fields);
+ },
+
+ changed(id, fields) {
+ sub.changed(collection, id, fields);
+ },
+
+ removed(id) {
+ sub.removed(collection, id);
+ },
+ },
+ { nonMutatingCallbacks: true },
+ );
+
+ sub.onStop(async () => {
+ return await observeHandle.stop();
+ });
+
+ return observeHandle;
+ }
+
+ _rewriteSelector(selector) {
+ const { fallbackId } = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ if (_selectorIsId(selector)) selector = { _id: selector };
+
+ if (Array.isArray(selector)) {
+ throw new Error("Mongo selector can't be an array.");
+ }
+
+ if (!selector || ('_id' in selector && !selector._id)) {
+ return { _id: fallbackId || Random.id() };
+ }
+
+ return selector;
+ }
+
+ _isRemoteCollection() {
+ return this._connection && this._connection !== Meteor.server;
+ }
+
+ _getFindSelector(args) {
+ if (args.length == 0) return {};
+ return args[0];
+ }
+
+ _getFindOptions(args) {
+ const [, options] = args || [];
+ const newOptions = normalizeProjection(options);
+ const self = this;
+
+ if (args.length < 2) {
+ return { transform: self._transform };
+ }
+ check(
+ newOptions,
+ Match.Optional(
+ Match.ObjectIncluding({
+ projection: Match.Optional(Match.OneOf(Object, undefined)),
+ sort: Match.Optional(Match.OneOf(Object, Array, Function, undefined)),
+ limit: Match.Optional(Match.OneOf(Number, undefined)),
+ skip: Match.Optional(Match.OneOf(Number, undefined)),
+ }),
+ ),
+ );
+
+ return { transform: self._transform, ...newOptions };
+ }
+
+ async _maybeSetUpReplication(name) {
+ let _registerStoreResult;
+ let _registerStoreResult$;
+ const self = this;
+
+ if (!(self._connection && self._connection.registerStoreClient && self._connection.registerStoreServer)) {
+ return;
+ }
+
+ const wrappedStoreCommon = {
+ saveOriginals() {
+ self._collection.saveOriginals();
+ },
+
+ retrieveOriginals() {
+ return self._collection.retrieveOriginals();
+ },
+
+ _getCollection() {
+ return self;
+ },
+ };
+
+ const wrappedStoreClient = {
+ ...{
+ async beginUpdate(batchSize, reset) {
+ if (batchSize > 1 || reset) self._collection.pauseObservers();
+ if (reset) await self._collection.remove({});
+ },
+
+ update(msg) {
+ const mongoId = ObjectID.parse(msg.id);
+ const doc = self._collection._docs.get(mongoId);
+
+ if (msg.msg === 'added' && doc) {
+ msg.msg = 'changed';
+ } else if (msg.msg === 'removed' && !doc) {
+ return;
+ } else if (msg.msg === 'changed' && !doc) {
+ msg.msg = 'added';
+
+ const _ref = msg.fields;
+
+ for (const field in _ref) {
+ const value = _ref[field];
+
+ if (value === void 0) {
+ delete msg.fields[field];
+ }
+ }
+ }
+
+ if (msg.msg === 'replace') {
+ const { replace } = msg;
+
+ if (!replace) {
+ if (doc) self._collection.remove(mongoId);
+ } else if (!doc) {
+ self._collection.insert(replace);
+ } else {
+ self._collection.update(mongoId, replace);
+ }
+ } else if (msg.msg === 'added') {
+ if (doc) {
+ throw new Error('Expected not to find a document already present for an add');
+ }
+
+ self._collection.insert({ _id: mongoId, ...msg.fields });
+ } else if (msg.msg === 'removed') {
+ if (!doc) throw new Error('Expected to find a document already present for removed');
+
+ self._collection.remove(mongoId);
+ } else if (msg.msg === 'changed') {
+ if (!doc) throw new Error('Expected to find a document to change');
+
+ const keys = Object.keys(msg.fields);
+
+ if (keys.length > 0) {
+ const modifier = {};
+
+ keys.forEach((key) => {
+ const value = msg.fields[key];
+
+ if (EJSON.equals(doc[key], value)) {
+ return;
+ }
+
+ if (typeof value === 'undefined') {
+ if (!modifier.$unset) {
+ modifier.$unset = {};
+ }
+
+ modifier.$unset[key] = 1;
+ } else {
+ if (!modifier.$set) {
+ modifier.$set = {};
+ }
+
+ modifier.$set[key] = value;
+ }
+ });
+
+ if (Object.keys(modifier).length > 0) {
+ self._collection.update(mongoId, modifier);
+ }
+ }
+ } else {
+ throw new Error("I don't know how to deal with this message");
+ }
+ },
+
+ endUpdate() {
+ self._collection.resumeObserversClient();
+ },
+
+ getDoc(id) {
+ return self.findOne(id);
+ },
+ },
+ ...wrappedStoreCommon,
+ };
+
+ const registerStoreResult = self._connection.registerStoreClient(name, wrappedStoreClient);
+
+ const message = 'There is already a collection named "'.concat(name, '"');
+
+ const logWarn = () => {
+ console.warn ? console.warn(message) : console.log(message);
+ };
+
+ if (!registerStoreResult) {
+ return logWarn();
+ }
+
+ return (_registerStoreResult = registerStoreResult) === null || _registerStoreResult === void 0
+ ? void 0
+ : (_registerStoreResult$ = _registerStoreResult.then) === null || _registerStoreResult$ === void 0
+ ? void 0
+ : _registerStoreResult$.call(_registerStoreResult, (ok) => {
+ if (!ok) {
+ logWarn();
+ }
+ });
+ }
+
+ find() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ return this._collection.find(this._getFindSelector(args), this._getFindOptions(args));
+ }
+
+ findOne(...args) {
+ return this._collection.findOne(this._getFindSelector(args), this._getFindOptions(args));
+ }
+
+ _insert(doc, callback) {
+ if (!doc) {
+ throw new Error('insert requires an argument');
+ }
+
+ doc = Object.create(Object.getPrototypeOf(doc), Object.getOwnPropertyDescriptors(doc));
+
+ if ('_id' in doc) {
+ if (!doc._id || !(typeof doc._id === 'string' || doc._id instanceof ObjectID)) {
+ throw new Error('Meteor requires document _id fields to be non-empty strings or ObjectIDs');
+ }
+ } else {
+ let generateId = true;
+
+ if (this._isRemoteCollection()) {
+ const enclosing = DDP._CurrentMethodInvocation.get();
+
+ if (!enclosing) {
+ generateId = false;
+ }
+ }
+
+ if (generateId) {
+ doc._id = this._makeNewID();
+ }
+ }
+
+ const chooseReturnValueFromCollectionResult = function (result) {
+ if (Meteor._isPromise(result)) return result;
+
+ if (doc._id) {
+ return doc._id;
+ }
+
+ doc._id = result;
+
+ return result;
+ };
+
+ const wrappedCallback = wrapCallback(callback, chooseReturnValueFromCollectionResult);
+
+ if (this._isRemoteCollection()) {
+ const result = this._callMutatorMethod('insert', [doc], wrappedCallback);
+
+ return chooseReturnValueFromCollectionResult(result);
+ }
+
+ try {
+ let result;
+
+ if (wrappedCallback) {
+ this._collection.insert(doc, wrappedCallback);
+ } else {
+ result = this._collection.insert(doc);
+ }
+
+ return chooseReturnValueFromCollectionResult(result);
+ } catch (e) {
+ if (callback) {
+ callback(e);
+
+ return null;
+ }
+
+ throw e;
+ }
+ }
+
+ insert(doc, callback) {
+ return this._insert(doc, callback);
+ }
+
+ update(selector, modifier) {
+ for (var _len3 = arguments.length, optionsAndCallback = new Array(_len3 > 2 ? _len3 - 2 : 0), _key3 = 2; _key3 < _len3; _key3++) {
+ optionsAndCallback[_key3 - 2] = arguments[_key3];
+ }
+
+ const callback = popCallbackFromArgs(optionsAndCallback);
+ const options = { ...(optionsAndCallback[0] || null) };
+ let insertedId;
+
+ if (options && options.upsert) {
+ if (options.insertedId) {
+ if (!(typeof options.insertedId === 'string' || options.insertedId instanceof Mongo.ObjectID))
+ throw new Error('insertedId must be string or ObjectID');
+
+ insertedId = options.insertedId;
+ } else if (!selector || !selector._id) {
+ insertedId = this._makeNewID();
+ options.generatedId = true;
+ options.insertedId = insertedId;
+ }
+ }
+
+ selector = Mongo.Collection._rewriteSelector(selector, { fallbackId: insertedId });
+
+ const wrappedCallback = wrapCallback(callback);
+
+ if (this._isRemoteCollection()) {
+ const args = [selector, modifier, options];
+
+ return this._callMutatorMethod('update', args, callback);
+ }
+
+ try {
+ return this._collection.update(selector, modifier, options, wrappedCallback);
+ } catch (e) {
+ if (callback) {
+ callback(e);
+
+ return null;
+ }
+
+ throw e;
+ }
+ }
+
+ remove(selector, callback) {
+ selector = Mongo.Collection._rewriteSelector(selector);
+
+ if (this._isRemoteCollection()) {
+ return this._callMutatorMethod('remove', [selector], callback);
+ }
+
+ return this._collection.remove(selector);
+ }
+
+ upsert(selector, modifier, options, callback) {
+ if (!callback && typeof options === 'function') {
+ callback = options;
+ options = {};
+ }
+
+ return this.update(selector, modifier, { ...options, _returnObject: true, upsert: true });
+ }
+
+ findOneAsync() {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ return this._collection.findOneAsync(this._getFindSelector(args), this._getFindOptions(args));
+ }
+
+ _insertAsync(doc) {
+ const options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ if (!doc) {
+ throw new Error('insert requires an argument');
+ }
+
+ doc = Object.create(Object.getPrototypeOf(doc), Object.getOwnPropertyDescriptors(doc));
+
+ if ('_id' in doc) {
+ if (!doc._id || !(typeof doc._id === 'string' || doc._id instanceof Mongo.ObjectID)) {
+ throw new Error('Meteor requires document _id fields to be non-empty strings or ObjectIDs');
+ }
+ } else {
+ let generateId = true;
+
+ if (this._isRemoteCollection()) {
+ const enclosing = DDP._CurrentMethodInvocation.get();
+
+ if (!enclosing) {
+ generateId = false;
+ }
+ }
+
+ if (generateId) {
+ doc._id = this._makeNewID();
+ }
+ }
+
+ const chooseReturnValueFromCollectionResult = function (result) {
+ if (Meteor._isPromise(result)) return result;
+
+ if (doc._id) {
+ return doc._id;
+ }
+
+ doc._id = result;
+
+ return result;
+ };
+
+ if (this._isRemoteCollection()) {
+ const promise = this._callMutatorMethodAsync('insertAsync', [doc], options);
+
+ promise.then(chooseReturnValueFromCollectionResult);
+ promise.stubPromise = promise.stubPromise.then(chooseReturnValueFromCollectionResult);
+ promise.serverPromise = promise.serverPromise.then(chooseReturnValueFromCollectionResult);
+
+ return promise;
+ }
+
+ return this._collection.insertAsync(doc).then(chooseReturnValueFromCollectionResult);
+ }
+
+ insertAsync(doc, options) {
+ return this._insertAsync(doc, options);
+ }
+
+ updateAsync(selector, modifier) {
+ const options = { ...((arguments.length <= 2 ? undefined : arguments[2]) || null) };
+ let insertedId;
+
+ if (options && options.upsert) {
+ if (options.insertedId) {
+ if (!(typeof options.insertedId === 'string' || options.insertedId instanceof Mongo.ObjectID))
+ throw new Error('insertedId must be string or ObjectID');
+
+ insertedId = options.insertedId;
+ } else if (!selector || !selector._id) {
+ insertedId = this._makeNewID();
+ options.generatedId = true;
+ options.insertedId = insertedId;
+ }
+ }
+
+ selector = Mongo.Collection._rewriteSelector(selector, { fallbackId: insertedId });
+
+ if (this._isRemoteCollection()) {
+ const args = [selector, modifier, options];
+
+ return this._callMutatorMethodAsync('updateAsync', args, options);
+ }
+
+ return this._collection.updateAsync(selector, modifier, options);
+ }
+
+ removeAsync(selector) {
+ const options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ selector = Mongo.Collection._rewriteSelector(selector);
+
+ if (this._isRemoteCollection()) {
+ return this._callMutatorMethodAsync('removeAsync', [selector], options);
+ }
+
+ return this._collection.removeAsync(selector);
+ }
+
+ async upsertAsync(selector, modifier, options) {
+ return this.updateAsync(selector, modifier, { ...options, _returnObject: true, upsert: true });
+ }
+
+ countDocuments() {
+ return this._collection.countDocuments(...arguments);
+ }
+
+ estimatedDocumentCount() {
+ return this._collection.estimatedDocumentCount(...arguments);
+ }
+}
+
+export const _collections = new Map();
+
+export const getCollection = (name: string) => {
+ return _collections.get(name);
+};
+
+export const Mongo = {
+ _collections,
+ getCollection,
+ Collection,
+};
+
+function wrapCallback(callback: Function | undefined, convertResult: Function | undefined = undefined): Function | undefined {
+ return (
+ callback &&
+ function (error, result) {
+ if (error) {
+ callback(error);
+ } else if (typeof convertResult === 'function') {
+ callback(error, convertResult(result));
+ } else {
+ callback(error, result);
+ }
+ }
+ );
+}
+
+function popCallbackFromArgs(args: unknown[]): ((error: any, result?: any) => void) | undefined {
+ const last: unknown = args.at(-1);
+ if (typeof last === 'function') {
+ args.pop();
+ return function (error, result) {
+ last(error, result);
+ };
+ }
+
+ if (last !== undefined) {
+ return;
+ }
+
+ args.pop();
+ return undefined;
+}
+
+Object.assign(Mongo.Collection.prototype, AllowDeny.CollectionPrototype);
diff --git a/apps/meteor/src/meteor/oauth.ts b/apps/meteor/src/meteor/oauth.ts
new file mode 100644
index 0000000000000..4122d3fd04e26
--- /dev/null
+++ b/apps/meteor/src/meteor/oauth.ts
@@ -0,0 +1,219 @@
+import { Base64 } from './base64.ts';
+import { check } from './check.ts';
+import { Meteor } from './meteor.ts';
+import { Reload } from './reload.ts';
+import { _constructUrl } from './url.ts';
+
+type PopupDimensions = {
+ width?: number;
+ height?: number;
+};
+
+type OAuthLoginOptions = {
+ loginService?: string;
+ loginStyle?: 'popup' | 'redirect' | undefined;
+ loginUrl: string;
+ credentialRequestCompleteCallback?: ((token?: string | Error) => void) | undefined;
+ credentialToken: string;
+ popupOptions?: PopupDimensions;
+ redirectUrl?: string;
+};
+
+type OAuthState = {
+ loginStyle: 'popup' | 'redirect' | undefined;
+ credentialToken: string;
+ isCordova: boolean;
+ redirectUrl?: string;
+};
+
+type OAuthConfiguration = {
+ loginStyle?: 'popup' | 'redirect';
+ [key: string]: any;
+};
+
+const credentialSecrets: Record = {};
+const STORAGE_TOKEN_PREFIX = 'Meteor.oauth.credentialSecret-';
+
+const openCenteredPopup = (url: string, width: number, height: number): Window | null => {
+ const screenX = typeof window.screenX !== 'undefined' ? window.screenX : window.screenLeft;
+ const screenY = typeof window.screenY !== 'undefined' ? window.screenY : window.screenTop;
+ const outerWidth = typeof window.outerWidth !== 'undefined' ? window.outerWidth : document.body.clientWidth;
+ const outerHeight = typeof window.outerHeight !== 'undefined' ? window.outerHeight : document.body.clientHeight - 22;
+
+ const left = screenX + (outerWidth - width) / 2;
+ const top = screenY + (outerHeight - height) / 2;
+
+ const features = `width=${width},height=${height},left=${left},top=${top},scrollbars=yes`;
+ const newwindow = window.open(url, 'Login', features);
+
+ if (!newwindow || newwindow.closed) {
+ const err: any = new Error('The login popup was blocked by the browser');
+ err.attemptedUrl = url;
+ throw err;
+ }
+
+ if (newwindow.focus) {
+ newwindow.focus();
+ }
+
+ return newwindow;
+};
+
+export const OAuth = {
+ _storageTokenPrefix: STORAGE_TOKEN_PREFIX,
+
+ showPopup(url: string, callback: () => void, dimensions?: PopupDimensions) {
+ const width = dimensions?.width || 650;
+ const height = dimensions?.height || 331;
+
+ const popup = openCenteredPopup(url, width, height);
+
+ if (!popup) return;
+
+ const checkPopupOpen = setInterval(() => {
+ let popupClosed;
+ try {
+ popupClosed = popup.closed || popup.closed === undefined;
+ } catch (e) {
+ return;
+ }
+
+ if (popupClosed) {
+ clearInterval(checkPopupOpen);
+ callback();
+ }
+ }, 100);
+ },
+
+ _loginStyle(_service: string, config: OAuthConfiguration, options?: { loginStyle?: string }): 'popup' | 'redirect' | undefined {
+ const loginStyle = options?.loginStyle || config.loginStyle || 'popup';
+
+ if (loginStyle !== 'popup' && loginStyle !== 'redirect') {
+ throw new Error(`Invalid login style: ${loginStyle}`);
+ }
+
+ if (loginStyle === 'redirect') {
+ try {
+ sessionStorage.setItem('Meteor.oauth.test', 'test');
+ sessionStorage.removeItem('Meteor.oauth.test');
+ } catch (e) {
+ return 'popup';
+ }
+ }
+
+ return loginStyle;
+ },
+
+ _stateParam(loginStyle: 'popup' | 'redirect' | undefined, credentialToken: string, redirectUrl?: string) {
+ const state: OAuthState = {
+ loginStyle,
+ credentialToken,
+ isCordova: false,
+ };
+ const setRedirectUrl = Meteor.settings?.public?.packages?.oauth?.setRedirectUrlWhenLoginStyleIsPopup;
+
+ if (loginStyle === 'redirect' || (setRedirectUrl && loginStyle === 'popup')) {
+ state.redirectUrl = redirectUrl || `${window.location}`;
+ }
+
+ return Base64.encode(JSON.stringify(state));
+ },
+
+ _redirectUri(serviceName: string, _config: any, params?: any, absoluteUrlOptions?: any) {
+ const safeParams = params ? { ...params } : undefined;
+ if (safeParams) {
+ delete safeParams.cordova;
+ delete safeParams.android;
+ }
+
+ const queryParams = safeParams && Object.keys(safeParams).length > 0 ? safeParams : null;
+
+ return _constructUrl(Meteor.absoluteUrl(`_oauth/${serviceName}`, absoluteUrlOptions), null, queryParams);
+ },
+
+ saveDataForRedirect(loginService: string, credentialToken: string) {
+ Reload._onMigrate('oauth', () => [
+ true,
+ {
+ loginService,
+ credentialToken,
+ },
+ ]);
+ Reload._migrate(null, {
+ immediateMigration: true,
+ });
+ },
+
+ getDataAfterRedirect() {
+ const migrationData = Reload._migrationData('oauth');
+
+ if (!migrationData?.credentialToken) {
+ return null;
+ }
+
+ const { credentialToken } = migrationData;
+ const key = OAuth._storageTokenPrefix + credentialToken;
+ let credentialSecret;
+
+ try {
+ credentialSecret = sessionStorage.getItem(key);
+ sessionStorage.removeItem(key);
+ } catch (e) {
+ console.debug('error retrieving credentialSecret', e);
+ }
+
+ return {
+ loginService: migrationData.loginService,
+ credentialToken,
+ credentialSecret,
+ };
+ },
+
+ launchLogin(options: OAuthLoginOptions) {
+ if (!options.loginService) {
+ throw new Error('loginService required');
+ }
+
+ if (options.loginStyle === 'popup') {
+ OAuth.showPopup(
+ options.loginUrl,
+ () => {
+ if (options.credentialRequestCompleteCallback) {
+ options.credentialRequestCompleteCallback(options.credentialToken);
+ }
+ },
+ options.popupOptions,
+ );
+ } else if (options.loginStyle === 'redirect') {
+ OAuth.saveDataForRedirect(options.loginService, options.credentialToken);
+ window.location.href = options.loginUrl;
+ } else {
+ throw new Error('invalid login style');
+ }
+ },
+
+ _handleCredentialSecret(credentialToken: string, secret: string) {
+ check(credentialToken, String);
+ check(secret, String);
+
+ if (!Object.prototype.hasOwnProperty.call(credentialSecrets, credentialToken)) {
+ credentialSecrets[credentialToken] = secret;
+ } else {
+ throw new Error('Duplicate credential token from OAuth login');
+ }
+ },
+
+ _retrieveCredentialSecret(credentialToken: string) {
+ let secret: string | null = credentialSecrets[credentialToken] ?? null;
+
+ if (!secret) {
+ const localStorageKey = OAuth._storageTokenPrefix + credentialToken;
+ secret = Meteor._localStorage.getItem(localStorageKey);
+ Meteor._localStorage.removeItem(localStorageKey);
+ } else {
+ delete credentialSecrets[credentialToken];
+ }
+
+ return secret;
+ },
+};
diff --git a/apps/meteor/src/meteor/ordered-dict.ts b/apps/meteor/src/meteor/ordered-dict.ts
new file mode 100644
index 0000000000000..ba2434b9525b1
--- /dev/null
+++ b/apps/meteor/src/meteor/ordered-dict.ts
@@ -0,0 +1,162 @@
+type Node = {
+ key: K;
+ value: V;
+ next?: Node;
+ prev?: Node;
+};
+
+export class OrderedDict implements Iterable<[K, V]> {
+ readonly #map = new Map>();
+
+ #head?: Node;
+
+ #tail?: Node;
+
+ constructor(entries?: Iterable) {
+ if (entries) {
+ for (const [k, v] of entries) this.append(k, v);
+ }
+ }
+
+ *[Symbol.iterator](): Iterator<[K, V]> {
+ yield* this.entries();
+ }
+
+ *entries(): IterableIterator<[K, V]> {
+ for (let n = this.#head; n; n = n.next) yield [n.key, n.value];
+ }
+
+ *keys(): IterableIterator {
+ for (let n = this.#head; n; n = n.next) yield n.key;
+ }
+
+ *values(): IterableIterator {
+ for (let n = this.#head; n; n = n.next) yield n.value;
+ }
+
+ get size(): number {
+ return this.#map.size;
+ }
+
+ get empty(): boolean {
+ return this.#map.size === 0;
+ }
+
+ has(key: K): boolean {
+ return this.#map.has(key);
+ }
+
+ get(key: K): V | undefined {
+ return this.#map.get(key)?.value;
+ }
+
+ first(): K | undefined {
+ return this.#head?.key;
+ }
+
+ last(): K | undefined {
+ return this.#tail?.key;
+ }
+
+ next(key: K): K | undefined {
+ return this.#map.get(key)?.next?.key;
+ }
+
+ prev(key: K): K | undefined {
+ return this.#map.get(key)?.prev?.key;
+ }
+
+ set(key: K, value: V): void {
+ const node = this.#map.get(key);
+ if (node) node.value = value;
+ else this.append(key, value);
+ }
+
+ append(key: K, value: V): void {
+ if (this.#map.has(key)) throw new Error(`Item ${String(key)} already present.`);
+ const node: Node = { key, value };
+ this.#insertTail(node);
+ this.#map.set(key, node);
+ }
+
+ putBefore(key: K, value: V, beforeKey?: K | null): void {
+ if (this.#map.has(key)) throw new Error(`Item ${String(key)} already present.`);
+ const node: Node = { key, value };
+
+ if (!beforeKey) {
+ this.#insertTail(node);
+ } else {
+ const ref = this.#map.get(beforeKey);
+ if (!ref) throw new Error(`Reference item ${String(beforeKey)} not found.`);
+ this.#insertBefore(node, ref);
+ }
+ this.#map.set(key, node);
+ }
+
+ remove(key: K): V {
+ const node = this.#map.get(key);
+ if (!node) throw new Error(`Item ${String(key)} not found.`);
+ this.#unlink(node);
+ this.#map.delete(key);
+ return node.value;
+ }
+
+ moveBefore(key: K, beforeKey: K | null): void {
+ if (key === beforeKey) return;
+
+ const node = this.#map.get(key);
+ if (!node) throw new Error(`Item to move ${String(key)} not found.`);
+
+ this.#unlink(node);
+
+ if (!beforeKey) {
+ this.#insertTail(node);
+ } else {
+ const ref = this.#map.get(beforeKey);
+ if (!ref) throw new Error(`Reference item ${String(beforeKey)} not found.`);
+ this.#insertBefore(node, ref);
+ }
+ }
+
+ forEach(callback: (value: V, key: K, index: number) => void | { break: boolean }): void {
+ let index = 0;
+ for (let n = this.#head; n; n = n.next) {
+ const result = callback(n.value, n.key, index++);
+ if (result && typeof result === 'object' && result.break) return;
+ }
+ }
+
+ clear(): void {
+ this.#map.clear();
+ this.#head = undefined;
+ this.#tail = undefined;
+ }
+
+ #unlink(node: Node): void {
+ if (node.prev) node.prev.next = node.next;
+ else this.#head = node.next;
+
+ if (node.next) node.next.prev = node.prev;
+ else this.#tail = node.prev;
+
+ node.next = undefined;
+ node.prev = undefined;
+ }
+
+ #insertTail(node: Node): void {
+ node.prev = this.#tail;
+ if (this.#tail) this.#tail.next = node;
+ else this.#head = node;
+ this.#tail = node;
+ }
+
+ #insertBefore(node: Node, ref: Node): void {
+ node.next = ref;
+ node.prev = ref.prev;
+
+ if (ref.prev) ref.prev.next = node;
+ else this.#head = node;
+
+ ref.prev = node;
+ }
+}
diff --git a/apps/meteor/src/meteor/random.ts b/apps/meteor/src/meteor/random.ts
new file mode 100644
index 0000000000000..b279fb6d5100b
--- /dev/null
+++ b/apps/meteor/src/meteor/random.ts
@@ -0,0 +1 @@
+export { Random } from '@rocket.chat/random';
diff --git a/apps/meteor/src/meteor/reactive-dict.ts b/apps/meteor/src/meteor/reactive-dict.ts
new file mode 100644
index 0000000000000..84e9b20465399
--- /dev/null
+++ b/apps/meteor/src/meteor/reactive-dict.ts
@@ -0,0 +1,238 @@
+import { EJSON } from './ejson.ts';
+import { ObjectID } from './mongo-id.ts';
+import { Tracker } from './tracker.ts';
+
+type DictValue = any;
+
+export class ReactiveDict {
+ static _dictsToMigrate: Record = {};
+
+ private name: string | undefined;
+
+ private _map = new Map();
+
+ private _allDep = new Tracker.Dependency();
+
+ private _keyDeps = new Map();
+
+ private _keyValueDeps = new Map>();
+
+ constructor(dictName?: string | object, dictData?: object) {
+ let initialData: Record = {};
+
+ if (dictName) {
+ if (typeof dictName === 'string') {
+ this.name = dictName;
+ ReactiveDict._registerDictForMigrate(dictName, this);
+ const migratedData = ReactiveDict._loadMigratedDict(dictName);
+
+ if (migratedData) {
+ for (const key of Object.keys(migratedData)) {
+ try {
+ const val = migratedData[key];
+ const parsed = val === 'undefined' ? undefined : EJSON.parse(val);
+ this._map.set(key, parsed);
+ } catch (e) {
+ console.error(`ReactiveDict: Failed to migrate key "${key}"`, e);
+ }
+ }
+ return;
+ }
+ initialData = (dictData || {}) as Record;
+ } else if (typeof dictName === 'object') {
+ initialData = dictName as Record;
+ } else {
+ throw new Error(`Invalid ReactiveDict argument: ${dictName}`);
+ }
+ } else if (typeof dictData === 'object') {
+ initialData = dictData as Record;
+ }
+ if (initialData) {
+ for (const key of Object.keys(initialData)) {
+ this._map.set(key, initialData[key]);
+ }
+ }
+ }
+
+ set(keyOrObject: string | object, value?: any): void {
+ if (typeof keyOrObject === 'object' && value === undefined) {
+ this._setObject(keyOrObject);
+ return;
+ }
+
+ const key = keyOrObject as string;
+ const oldValue = this._map.get(key);
+ if (this._map.has(key) && EJSON.equals(oldValue, value)) {
+ return;
+ }
+
+ this._map.set(key, value);
+ this._allDep.changed();
+ this._keyDeps.get(key)?.changed();
+ const valDeps = this._keyValueDeps.get(key);
+ if (valDeps) {
+ if (oldValue !== undefined) {
+ const oldStr = EJSON.stringify(oldValue);
+ valDeps.get(oldStr)?.changed();
+ } else {
+ valDeps.get('undefined')?.changed();
+ }
+ if (value !== undefined) {
+ const newStr = EJSON.stringify(value);
+ valDeps.get(newStr)?.changed();
+ } else {
+ valDeps.get('undefined')?.changed();
+ }
+ }
+ }
+
+ setDefault(keyOrObject: string | object, value?: any): void {
+ if (typeof keyOrObject === 'object' && value === undefined) {
+ const obj = keyOrObject as Record;
+ for (const key of Object.keys(obj)) {
+ this.setDefault(key, obj[key]);
+ }
+ return;
+ }
+
+ const key = keyOrObject as string;
+ if (!this._map.has(key)) {
+ this.set(key, value);
+ }
+ }
+
+ get(key: string): any {
+ this._ensureKeyDep(key).depend();
+
+ const val = this._map.get(key);
+ return val === undefined ? undefined : EJSON.clone(val);
+ }
+
+ equals(key: string, value: string | number | boolean | null | undefined | Date | ObjectID): boolean {
+ if (
+ typeof value !== 'string' &&
+ typeof value !== 'number' &&
+ typeof value !== 'boolean' &&
+ typeof value !== 'undefined' &&
+ !(value instanceof Date) &&
+ !(value instanceof ObjectID) &&
+ value !== null
+ ) {
+ throw new Error('ReactiveDict.equals: value must be scalar');
+ }
+
+ if (Tracker.active) {
+ const serializedValue = value === undefined ? 'undefined' : EJSON.stringify(value);
+ let valDeps = this._keyValueDeps.get(key);
+ if (!valDeps) {
+ valDeps = new Map();
+ this._keyValueDeps.set(key, valDeps);
+ }
+ let dep = valDeps.get(serializedValue);
+ if (!dep) {
+ dep = new Tracker.Dependency();
+ valDeps.set(serializedValue, dep);
+ }
+
+ const isNew = dep.depend();
+ if (isNew) {
+ Tracker.onInvalidate(() => {
+ if (!dep.hasDependents()) {
+ valDeps.delete(serializedValue);
+ if (valDeps.size === 0) {
+ this._keyValueDeps.delete(key);
+ }
+ }
+ });
+ }
+ }
+
+ const currentValue = this._map.get(key);
+ return EJSON.equals(currentValue, value);
+ }
+
+ all(): Record {
+ this._allDep.depend();
+ const ret: Record = {};
+ for (const [key, val] of this._map.entries()) {
+ ret[key] = EJSON.clone(val);
+ }
+ return ret;
+ }
+
+ clear(): void {
+ const oldKeys = Array.from(this._map.keys());
+ this._map.clear();
+
+ this._allDep.changed();
+
+ for (const key of oldKeys) {
+ this._keyDeps.get(key)?.changed();
+ const valDeps = this._keyValueDeps.get(key);
+ if (valDeps) {
+ for (const dep of valDeps.values()) {
+ dep.changed();
+ }
+ valDeps.clear(); // Safe to clear since we deleted the key
+ }
+ }
+ }
+
+ delete(key: string): boolean {
+ if (!this._map.has(key)) return false;
+
+ const oldValue = this._map.get(key);
+ this._map.delete(key);
+
+ this._allDep.changed();
+ this._keyDeps.get(key)?.changed();
+
+ const valDeps = this._keyValueDeps.get(key);
+ if (valDeps) {
+ if (oldValue !== undefined) {
+ valDeps.get(EJSON.stringify(oldValue))?.changed();
+ }
+ valDeps.get('undefined')?.changed();
+ }
+
+ return true;
+ }
+
+ destroy(): void {
+ this.clear();
+ if (this.name && ReactiveDict._dictsToMigrate[this.name]) {
+ delete ReactiveDict._dictsToMigrate[this.name];
+ }
+ }
+
+ private _setObject(object: Record) {
+ for (const key of Object.keys(object)) {
+ this.set(key, object[key]);
+ }
+ }
+
+ private _ensureKeyDep(key: string): Tracker.Dependency {
+ let dep = this._keyDeps.get(key);
+ if (!dep) {
+ dep = new Tracker.Dependency();
+ this._keyDeps.set(key, dep);
+ }
+ return dep;
+ }
+
+ _getMigrationData(): Record {
+ const migrationData: Record = {};
+ for (const [key, value] of this._map.entries()) {
+ migrationData[key] = value === undefined ? 'undefined' : EJSON.stringify(value);
+ }
+ return migrationData;
+ }
+
+ static _registerDictForMigrate(dictName: string, dict: ReactiveDict) {
+ ReactiveDict._dictsToMigrate[dictName] = dict;
+ }
+
+ static _loadMigratedDict(_dictName: string) {
+ return null;
+ }
+}
diff --git a/apps/meteor/src/meteor/reactive-var.ts b/apps/meteor/src/meteor/reactive-var.ts
new file mode 100644
index 0000000000000..f9ef6aa127bc0
--- /dev/null
+++ b/apps/meteor/src/meteor/reactive-var.ts
@@ -0,0 +1,37 @@
+import { Tracker } from './tracker.ts';
+
+type EqualsFunc = (oldValue: T, newValue: T) => boolean;
+
+const isEqual = (a: unknown, b: unknown): boolean => {
+ if (a !== b) return false;
+ return a === null || (typeof a !== 'object' && typeof a !== 'function');
+};
+
+export class ReactiveVar {
+ #value: T;
+
+ readonly #equals: EqualsFunc;
+
+ readonly #dep = new Tracker.Dependency();
+
+ constructor(initialValue: T, equalsFunc: EqualsFunc = isEqual) {
+ this.#value = initialValue;
+ this.#equals = equalsFunc;
+ }
+
+ get(): T {
+ if (Tracker.active) this.#dep.depend();
+ return this.#value;
+ }
+
+ set(newValue: T): void {
+ if (this.#equals(this.#value, newValue)) return;
+
+ this.#value = newValue;
+ this.#dep.changed();
+ }
+
+ toString(): string {
+ return `ReactiveVar{${this.get()}}`;
+ }
+}
diff --git a/apps/meteor/src/meteor/reload.ts b/apps/meteor/src/meteor/reload.ts
new file mode 100644
index 0000000000000..2fb6c9fd9b7d1
--- /dev/null
+++ b/apps/meteor/src/meteor/reload.ts
@@ -0,0 +1,191 @@
+function debug(message: string, context?: any) {
+ console.debug(`[reload] ${message}`, JSON.stringify(context));
+}
+
+const KEY_NAME = 'Meteor_Reload';
+
+let oldData: any = {};
+let oldJson: string | null = null;
+let safeSessionStorage: any = null;
+try {
+ safeSessionStorage = window.sessionStorage;
+ if (safeSessionStorage) {
+ safeSessionStorage.setItem('__dummy__', '1');
+ safeSessionStorage.removeItem('__dummy__');
+ } else {
+ safeSessionStorage = null;
+ }
+} catch (e) {
+ safeSessionStorage = null;
+}
+function _getData() {
+ return safeSessionStorage?.getItem(KEY_NAME);
+}
+
+if (safeSessionStorage) {
+ oldJson = _getData();
+ safeSessionStorage.removeItem(KEY_NAME);
+} else {
+ console.debug('Browser does not support sessionStorage. Not retrieving migration state.');
+}
+
+if (!oldJson) {
+ oldJson = '{}';
+}
+let oldParsed: any = {};
+try {
+ oldParsed = JSON.parse(oldJson);
+ if (typeof oldParsed !== 'object') {
+ console.debug('Got bad data on reload. Ignoring.');
+ oldParsed = {};
+ }
+} catch (err) {
+ console.debug('Got invalid JSON on reload. Ignoring.');
+}
+
+if (oldParsed.reload && typeof oldParsed.data === 'object') {
+ oldData = oldParsed.data;
+}
+
+let providers: any[] = [];
+function _onMigrate(...args: [string, (...args: any[]) => any] | [(...args: any[]) => any]) {
+ let name: string | undefined;
+ let callback: ((...args: any[]) => any) | undefined;
+
+ if (args.length === 1) {
+ callback = args[0];
+ } else if (args.length === 2) {
+ name = args[0] as string;
+ callback = args[1];
+ }
+
+ debug('_onMigrate', { name });
+ if (!callback) {
+ callback = name as unknown as (...args: any[]) => any;
+ name = undefined as unknown as string;
+ debug('_onMigrate no callback');
+ }
+
+ providers.push({ name, callback });
+}
+function _migrationData(name: string) {
+ debug('_migrationData', { name });
+ return oldData[name];
+}
+const pollProviders = function (tryReload: ((...args: any[]) => any) | null, options: any) {
+ debug('pollProviders', { options });
+ tryReload =
+ tryReload ||
+ function () {
+ // noop
+ };
+ options = options || {};
+
+ const { immediateMigration } = options;
+ debug(`pollProviders is ${immediateMigration ? '' : 'NOT '}immediateMigration`, { options });
+ const migrationData: any = {};
+ let allReady = true;
+ providers.forEach((p) => {
+ const { callback, name } = p || {};
+ const [ready, data] = callback(tryReload, options) || [];
+
+ debug(`pollProviders provider ${name || 'unknown'} is ${ready ? 'ready' : 'NOT ready'}`, { options });
+ if (!ready) {
+ allReady = false;
+ }
+
+ if (data !== undefined && name) {
+ migrationData[name] = data;
+ }
+ });
+
+ if (allReady) {
+ debug('pollProviders allReady', { options, migrationData });
+ return migrationData;
+ }
+
+ if (immediateMigration) {
+ debug('pollProviders immediateMigration', { options, migrationData });
+ return migrationData;
+ }
+
+ return null;
+};
+function _migrate(tryReload: ((...args: any[]) => any) | null, options: any) {
+ debug('_migrate', { options });
+ const migrationData = pollProviders(tryReload, options);
+ if (migrationData === null) {
+ return false; // not ready yet..
+ }
+
+ let json;
+ try {
+ json = JSON.stringify({
+ data: migrationData,
+ reload: true,
+ });
+ } catch (err) {
+ console.debug("Couldn't serialize data for migration", migrationData);
+ throw err;
+ }
+
+ if (safeSessionStorage) {
+ try {
+ safeSessionStorage.setItem(KEY_NAME, json);
+ } catch (err) {
+ console.debug("Couldn't save data for migration to sessionStorage", err);
+ }
+ } else {
+ console.debug('Browser does not support sessionStorage. Not saving migration state.');
+ }
+
+ return true;
+}
+function _withFreshProvidersForTest(f: () => void) {
+ const originalProviders = providers.slice(0);
+ providers = [];
+ try {
+ f();
+ } finally {
+ providers = originalProviders;
+ }
+}
+let reloading = false;
+function _reload(options: any) {
+ debug('_reload', { options });
+ options = options || {};
+
+ if (reloading) {
+ debug('reloading in progress already', { options });
+ return;
+ }
+ reloading = true;
+
+ function tryReload() {
+ debug('tryReload');
+ setTimeout(reload, 1);
+ }
+
+ function forceBrowserReload() {
+ debug('forceBrowserReload');
+ if (window.location.hash || window.location.href.endsWith('#')) {
+ window.location.reload();
+ return;
+ }
+
+ window.location.replace(window.location.href);
+ }
+
+ function reload() {
+ debug('reload');
+ if (!_migrate(tryReload, options)) {
+ return;
+ }
+
+ forceBrowserReload();
+ }
+
+ tryReload();
+}
+
+export const Reload = { _getData, _onMigrate, _migrationData, _migrate, _withFreshProvidersForTest, _reload };
diff --git a/apps/meteor/src/meteor/retry.ts b/apps/meteor/src/meteor/retry.ts
new file mode 100644
index 0000000000000..e470779dc6edc
--- /dev/null
+++ b/apps/meteor/src/meteor/retry.ts
@@ -0,0 +1,51 @@
+import { Random } from './random.ts';
+
+export class Retry {
+ baseTimeout: number;
+
+ exponent: number;
+
+ maxTimeout: number;
+
+ minTimeout: number;
+
+ minCount: number;
+
+ fuzz: number;
+
+ retryTimer: ReturnType | null;
+
+ constructor({ baseTimeout = 1000, exponent = 2.2, maxTimeout = 5 * 60 * 1000, minTimeout = 10, minCount = 2, fuzz = 0.5 } = {}) {
+ this.baseTimeout = baseTimeout;
+ this.exponent = exponent;
+ this.maxTimeout = maxTimeout;
+ this.minTimeout = minTimeout;
+ this.minCount = minCount;
+ this.fuzz = fuzz;
+ this.retryTimer = null;
+ }
+
+ clear() {
+ if (this.retryTimer) {
+ clearTimeout(this.retryTimer);
+ }
+ this.retryTimer = null;
+ }
+
+ _timeout(count: number) {
+ if (count < this.minCount) {
+ return this.minTimeout;
+ }
+
+ return (
+ Math.min(this.maxTimeout, this.baseTimeout * Math.pow(this.exponent, count)) * (Random.fraction() * this.fuzz + (1 - this.fuzz / 2))
+ );
+ }
+
+ retryLater(count: number, fn: () => void) {
+ const timeout = this._timeout(count);
+ if (this.retryTimer) clearTimeout(this.retryTimer);
+ this.retryTimer = setTimeout(fn, timeout);
+ return timeout;
+ }
+}
diff --git a/apps/meteor/src/meteor/service-configuration.ts b/apps/meteor/src/meteor/service-configuration.ts
new file mode 100644
index 0000000000000..39519ed80bb9c
--- /dev/null
+++ b/apps/meteor/src/meteor/service-configuration.ts
@@ -0,0 +1,27 @@
+import { Accounts } from './accounts-base.ts';
+import { Collection } from './mongo.ts';
+
+export class ConfigError extends Error {
+ constructor(serviceName?: string) {
+ super();
+ this.name = 'ServiceConfiguration.ConfigError';
+
+ if (!Accounts.loginServicesConfigured()) {
+ this.message = 'Login service configuration not yet loaded';
+ } else if (serviceName) {
+ this.message = `Service ${serviceName} not configured`;
+ } else {
+ this.message = 'Service not configured';
+ }
+ }
+}
+
+export const configurations = new Collection('meteor_accounts_loginServiceConfiguration', {
+ _preventAutopublish: true,
+ connection: Accounts.connection,
+});
+
+export const ServiceConfiguration = {
+ configurations,
+ ConfigError,
+};
diff --git a/apps/meteor/src/meteor/session.ts b/apps/meteor/src/meteor/session.ts
new file mode 100644
index 0000000000000..bcd7ef117c04b
--- /dev/null
+++ b/apps/meteor/src/meteor/session.ts
@@ -0,0 +1,3 @@
+import { ReactiveDict } from './reactive-dict.ts';
+
+export const Session = new ReactiveDict('session');
diff --git a/apps/meteor/src/meteor/sha.ts b/apps/meteor/src/meteor/sha.ts
new file mode 100644
index 0000000000000..b9b1bbf284767
--- /dev/null
+++ b/apps/meteor/src/meteor/sha.ts
@@ -0,0 +1 @@
+export { SHA256 } from '@rocket.chat/sha256';
diff --git a/apps/meteor/src/meteor/socket-stream-client.ts b/apps/meteor/src/meteor/socket-stream-client.ts
new file mode 100644
index 0000000000000..321d9776149c7
--- /dev/null
+++ b/apps/meteor/src/meteor/socket-stream-client.ts
@@ -0,0 +1,361 @@
+import { Meteor } from './meteor.ts';
+import { Retry } from './retry.ts';
+import { Tracker } from './tracker.ts';
+
+const forcedReconnectError = new Error('forced reconnect');
+
+export type ClientStreamOptions = {
+ bufferedWritesInterval?: number;
+ bufferedWrritesMaxAge?: number;
+ heartbeatInterval: number;
+ heartbeatTimeout: number;
+ retry?: boolean;
+ connectTimeoutMs?: number;
+ ConnectionError?: new (...args: any[]) => any;
+ onDDPVersionNegotiationFailure?: (description: string) => void;
+};
+
+type StreamStatus = {
+ status: 'connected' | 'connecting' | 'failed' | 'offline' | 'waiting';
+ connected: boolean;
+ retryCount: number;
+ retryTime?: number;
+ reason?: unknown;
+};
+
+type StreamEvents = {
+ message: (data: string) => void;
+ reset: () => void;
+ disconnect: () => void;
+};
+
+type EventCallbacks = {
+ [K in keyof StreamEvents]?: Array;
+};
+
+class ClientStream {
+ currentStatus: StreamStatus = { status: 'connecting', connected: false, retryCount: 0 };
+
+ statusListeners = new Tracker.Dependency();
+
+ CONNECT_TIMEOUT: number;
+
+ _retry = new Retry();
+
+ connectionTimer: ReturnType | null = null;
+
+ _forcedToDisconnect = false;
+
+ eventCallbacks: EventCallbacks = Object.create(null);
+
+ options: ClientStreamOptions;
+
+ rawUrl: string;
+
+ socket: WebSocket | null = null;
+
+ heartbeatTimer: ReturnType | null = null;
+
+ lastError: unknown = null;
+
+ HEARTBEAT_TIMEOUT: number;
+
+ constructor(
+ url: string,
+ {
+ connectTimeoutMs = 10000,
+ retry = true,
+ heartbeatInterval = 10000,
+ heartbeatTimeout = 100 * 1000,
+ ...options
+ }: Partial = {},
+ ) {
+ this.options = { retry, connectTimeoutMs, heartbeatInterval, heartbeatTimeout, ...options };
+ this.CONNECT_TIMEOUT = connectTimeoutMs;
+ this.HEARTBEAT_TIMEOUT = heartbeatTimeout;
+
+ this.rawUrl = url;
+ this._onOpen = this._onOpen.bind(this);
+ this._onMessage = this._onMessage.bind(this);
+ this._onError = this._onError.bind(this);
+ this._lostConnection = this._lostConnection.bind(this);
+ this._online = this._online.bind(this);
+
+ window.addEventListener('online', this._online, false);
+ this._launchConnection();
+ }
+
+ on(name: K, callback: StreamEvents[K]) {
+ if (name !== 'message' && name !== 'reset' && name !== 'disconnect') {
+ throw new Error(`unknown event type: ${name}`);
+ }
+ if (!this.eventCallbacks[name]) this.eventCallbacks[name] = [];
+ this.eventCallbacks[name].push(callback);
+ }
+
+ status() {
+ if (this.statusListeners) {
+ this.statusListeners.depend();
+ }
+ return this.currentStatus;
+ }
+
+ reconnect(options?: { url?: string; _force?: boolean }) {
+ if (options?.url) {
+ this._changeUrl(options.url);
+ }
+
+ if (this.currentStatus.connected) {
+ if (options?._force || options?.url) {
+ this._lostConnection(forcedReconnectError);
+ }
+ return;
+ }
+
+ if (this.currentStatus.status === 'connecting') {
+ this._lostConnection();
+ }
+
+ this._retry.clear();
+ this.currentStatus.retryCount -= 1;
+ this._retryNow();
+ }
+
+ disconnect(options: { _permanent?: boolean; _error?: unknown } = {}) {
+ if (this._forcedToDisconnect) return;
+
+ if (options._permanent) {
+ this._forcedToDisconnect = true;
+ }
+
+ this._cleanup();
+ this._retry.clear();
+
+ this.currentStatus = {
+ status: options._permanent ? 'failed' : 'offline',
+ connected: false,
+ retryCount: 0,
+ };
+
+ if (options._permanent && options._error) {
+ this.currentStatus.reason = options._error;
+ }
+
+ this._statusChanged();
+ }
+
+ send(data: string | ArrayBufferLike | Blob | ArrayBufferView) {
+ if (this.currentStatus.connected) {
+ this.socket?.send(data);
+ }
+ }
+
+ protected forEachCallback(name: K, cb: (callback: StreamEvents[K]) => void) {
+ if (!this.eventCallbacks[name]?.length) {
+ return;
+ }
+ for (const callback of this.eventCallbacks[name]) {
+ cb(callback);
+ }
+ }
+
+ private _statusChanged() {
+ if (this.statusListeners) {
+ this.statusListeners.changed();
+ }
+ }
+
+ private _changeUrl(url: string) {
+ this.rawUrl = url;
+ }
+
+ private _launchConnection() {
+ this._cleanup();
+
+ try {
+ this.socket = new WebSocket(toWebsocketUrl(this.rawUrl));
+
+ this.socket.addEventListener('open', this._onOpen);
+ this.socket.addEventListener('message', this._onMessage);
+ this.socket.addEventListener('close', this._lostConnection);
+ this.socket.addEventListener('error', this._onError);
+
+ if (this.connectionTimer) clearTimeout(this.connectionTimer);
+
+ this.connectionTimer = setTimeout(() => {
+ this._lostConnection(new Error('DDP connection timed out'));
+ }, this.CONNECT_TIMEOUT);
+ } catch (e) {
+ this._onError(e);
+ }
+ }
+
+ private _connected() {
+ if (this.connectionTimer) {
+ clearTimeout(this.connectionTimer);
+ this.connectionTimer = null;
+ }
+
+ if (this.currentStatus.connected) {
+ return;
+ }
+
+ this.currentStatus.status = 'connected';
+ this.currentStatus.connected = true;
+ this.currentStatus.retryCount = 0;
+ this._statusChanged();
+
+ this.forEachCallback('reset', (callback: () => void) => {
+ callback();
+ });
+ }
+
+ private _cleanup(maybeError?: unknown) {
+ this._clearConnectionAndHeartbeatTimers();
+
+ if (this.socket) {
+ this.socket.removeEventListener('open', this._onOpen);
+ this.socket.removeEventListener('message', this._onMessage);
+ this.socket.removeEventListener('close', this._lostConnection);
+ this.socket.removeEventListener('error', this._onError);
+ this.socket.close();
+ this.socket = null;
+ }
+
+ this.forEachCallback('disconnect', (callback: (arg0: any) => void) => {
+ callback(maybeError);
+ });
+ }
+
+ public _lostConnection(maybeError?: unknown) {
+ const errorToPass = maybeError instanceof Event ? undefined : maybeError;
+
+ this._cleanup(errorToPass);
+ this._retryLater(errorToPass);
+ }
+
+ private _online() {
+ if (this.currentStatus.status !== 'offline') this.reconnect();
+ }
+
+ private _retryLater(maybeError?: unknown) {
+ let timeout = 0;
+
+ if (this.options.retry || maybeError === forcedReconnectError) {
+ timeout = this._retry.retryLater(this.currentStatus.retryCount, this._retryNow.bind(this));
+ this.currentStatus.status = 'waiting';
+ this.currentStatus.retryTime = new Date().getTime() + timeout;
+ } else {
+ this.currentStatus.status = 'failed';
+ delete this.currentStatus.retryTime;
+ }
+
+ this.currentStatus.connected = false;
+ this._statusChanged();
+ }
+
+ private _retryNow() {
+ if (this._forcedToDisconnect) return;
+
+ this.currentStatus.retryCount += 1;
+ this.currentStatus.status = 'connecting';
+ this.currentStatus.connected = false;
+ delete this.currentStatus.retryTime;
+ this._statusChanged();
+ this._launchConnection();
+ }
+
+ private _clearConnectionAndHeartbeatTimers() {
+ if (this.connectionTimer) {
+ clearTimeout(this.connectionTimer);
+ this.connectionTimer = null;
+ }
+ if (this.heartbeatTimer) {
+ clearTimeout(this.heartbeatTimer);
+ this.heartbeatTimer = null;
+ }
+ }
+
+ private _heartbeat_timeout() {
+ console.log('Connection timeout. No sockjs heartbeat received.');
+ this._lostConnection(new Error('Heartbeat timed out'));
+ }
+
+ private _heartbeat_received() {
+ if (this._forcedToDisconnect) return;
+ if (this.heartbeatTimer) clearTimeout(this.heartbeatTimer);
+
+ this.heartbeatTimer = setTimeout(this._heartbeat_timeout.bind(this), this.HEARTBEAT_TIMEOUT);
+ }
+
+ private _onOpen() {
+ this.lastError = null;
+ this._connected();
+ }
+
+ private _onMessage(event: MessageEvent) {
+ this.lastError = null;
+ this._heartbeat_received();
+
+ if (this.currentStatus.connected) {
+ this.forEachCallback('message', (callback) => {
+ callback(event.data);
+ });
+ }
+ }
+
+ private _onError(error: unknown) {
+ const { lastError } = this;
+ this.lastError = error;
+ if (lastError) return;
+ console.error('stream error', error, new Date().toDateString());
+ }
+}
+
+function translateUrl(url: string, newSchemeBase: string, subPath: string) {
+ if (!newSchemeBase) {
+ newSchemeBase = 'http';
+ }
+
+ if (subPath !== 'sockjs' && url.startsWith('/')) {
+ url = Meteor.absoluteUrl(url.substr(1));
+ }
+
+ const ddpUrlMatch = url.match(/^ddp(i?)\+sockjs:\/\//);
+ const httpUrlMatch = url.match(/^http(s?):\/\//);
+ let newScheme;
+
+ if (ddpUrlMatch) {
+ const urlAfterDDP = url.substr(ddpUrlMatch[0].length);
+ newScheme = ddpUrlMatch[1] === 'i' ? newSchemeBase : `${newSchemeBase}s`;
+
+ const slashPos = urlAfterDDP.indexOf('/');
+ let host = slashPos === -1 ? urlAfterDDP : urlAfterDDP.substr(0, slashPos);
+ const rest = slashPos === -1 ? '' : urlAfterDDP.substr(slashPos);
+
+ host = host.replace(/\*/g, () => `${Math.floor(Math.random() * 10)}`);
+
+ return `${newScheme}://${host}${rest}`;
+ }
+
+ if (httpUrlMatch) {
+ newScheme = !httpUrlMatch[1] ? newSchemeBase : `${newSchemeBase}s`;
+ const urlAfterHttp = url.substr(httpUrlMatch[0].length);
+ url = `${newScheme}://${urlAfterHttp}`;
+ }
+
+ if (url.indexOf('://') === -1 && !url.startsWith('/')) {
+ url = `${newSchemeBase}://${url}`;
+ }
+
+ url = Meteor._relativeToSiteRootUrl(url);
+
+ if (url.endsWith('/')) return url + subPath;
+ return `${url}/${subPath}`;
+}
+
+function toWebsocketUrl(url: string) {
+ return translateUrl(url, 'ws', 'websocket');
+}
+
+export { ClientStream };
diff --git a/apps/meteor/src/meteor/tracker-core.ts b/apps/meteor/src/meteor/tracker-core.ts
new file mode 100644
index 0000000000000..d4300756f0d5e
--- /dev/null
+++ b/apps/meteor/src/meteor/tracker-core.ts
@@ -0,0 +1,341 @@
+/* eslint-disable no-unreachable-loop */
+/* eslint-disable guard-for-in */
+/* eslint-disable no-unsafe-finally */
+/* eslint-disable @typescript-eslint/no-non-null-assertion */
+/* eslint-disable @typescript-eslint/prefer-optional-chain */
+
+export type ComputationFunction = (c: Computation) => unknown;
+export type OnErrorFunction = (error: unknown) => void;
+export type FlushOptions = { finishSynchronously?: boolean; _throwFirstError?: boolean; throwFirstError?: boolean };
+export let active = false;
+export let currentComputation: Computation | null = null;
+
+let nextId = 1;
+const pendingComputations: Computation[] = [];
+const afterFlushCallbacks: Array<() => void> = [];
+
+let willFlush = false;
+let inFlush = false;
+let inCompute = false;
+let throwFirstError = false;
+let constructingComputation = false;
+
+function requireFlush(): void {
+ if (!willFlush) {
+ queueMicrotask(() => flushInternal());
+ willFlush = true;
+ }
+}
+
+function _throwOrLog(from: string, e: any): void {
+ if (throwFirstError) {
+ throw e;
+ } else {
+ const printArgs: string[] = [`Exception from Tracker ${from} function:`];
+ if (e && e.stack && e.message && e.name) {
+ const idx = e.stack.indexOf(e.message);
+ if (idx < 0 || idx > e.name.length + 2) {
+ printArgs.push(`${e.name}: ${e.message}`);
+ }
+ }
+ if (e && e.stack) {
+ printArgs.push(e.stack);
+ }
+ for (let i = 0; i < printArgs.length; i++) {
+ console.error(printArgs[i]);
+ }
+ }
+}
+
+export class Computation {
+ public stopped: boolean;
+
+ public invalidated: boolean;
+
+ public firstRun: boolean;
+
+ public _id: number;
+
+ public _onInvalidateCallbacks: Array<(c: Computation) => void>;
+
+ public _onStopCallbacks: Array<(c: Computation) => void>;
+
+ public _parent: Computation | null;
+
+ public _func: ComputationFunction;
+
+ public _onError?: OnErrorFunction;
+
+ public _recomputing: boolean;
+
+ public firstRunPromise?: Promise;
+
+ constructor(f: ComputationFunction, parent: Computation | null, onError?: OnErrorFunction) {
+ if (!constructingComputation) {
+ throw new Error('Tracker.Computation constructor is private; use Tracker.autorun');
+ }
+ constructingComputation = false;
+
+ this.stopped = false;
+ this.invalidated = false;
+ this.firstRun = true;
+ this._id = nextId++;
+ this._onInvalidateCallbacks = [];
+ this._onStopCallbacks = [];
+ this._parent = parent;
+ this._func = f;
+ this._onError = onError;
+ this._recomputing = false;
+ this.firstRunPromise = undefined;
+
+ let errored = true;
+ try {
+ this._compute();
+ errored = false;
+ } finally {
+ this.firstRun = false;
+ if (errored) this.stop();
+ }
+ }
+
+ then(onResolved?: (value: unknown) => unknown, onRejected?: (reason: any) => unknown): Promise {
+ return this.firstRunPromise ? this.firstRunPromise.then(onResolved, onRejected) : Promise.resolve().then(onResolved, onRejected);
+ }
+
+ catch(onRejected?: (reason: any) => unknown): Promise {
+ return this.firstRunPromise ? this.firstRunPromise.catch(onRejected) : Promise.resolve().catch(onRejected);
+ }
+
+ onInvalidate(f: (c: Computation) => void): void {
+ if (typeof f !== 'function') throw new Error('onInvalidate requires a function');
+
+ if (this.invalidated) {
+ nonreactive(() => f(this));
+ } else {
+ this._onInvalidateCallbacks.push(f);
+ }
+ }
+
+ onStop(f: (c: Computation) => void): void {
+ if (typeof f !== 'function') throw new Error('onStop requires a function');
+
+ if (this.stopped) {
+ nonreactive(() => f(this));
+ } else {
+ this._onStopCallbacks.push(f);
+ }
+ }
+
+ invalidate(): void {
+ if (!this.invalidated) {
+ if (!this._recomputing && !this.stopped) {
+ requireFlush();
+ pendingComputations.push(this);
+ }
+
+ this.invalidated = true;
+
+ for (let i = 0; i < this._onInvalidateCallbacks.length; i++) {
+ const f = this._onInvalidateCallbacks[i];
+ nonreactive(() => f(this));
+ }
+ this._onInvalidateCallbacks = [];
+ }
+ }
+
+ stop(): void {
+ if (!this.stopped) {
+ this.stopped = true;
+ this.invalidate();
+ for (let i = 0; i < this._onStopCallbacks.length; i++) {
+ const f = this._onStopCallbacks[i];
+ nonreactive(() => f(this));
+ }
+ this._onStopCallbacks = [];
+ }
+ }
+
+ _compute(): void {
+ this.invalidated = false;
+ const previousInCompute = inCompute;
+ inCompute = true;
+
+ try {
+ const promiseResult = withComputation(this, () => this._func(this));
+ if (this.firstRun) {
+ this.firstRunPromise = Promise.resolve(promiseResult);
+ }
+ } finally {
+ inCompute = previousInCompute;
+ }
+ }
+
+ _needsRecompute(): boolean {
+ return this.invalidated && !this.stopped;
+ }
+
+ _recompute(): void {
+ this._recomputing = true;
+ try {
+ if (this._needsRecompute()) {
+ try {
+ this._compute();
+ } catch (e) {
+ if (this._onError) {
+ this._onError(e);
+ } else {
+ _throwOrLog('recompute', e);
+ }
+ }
+ }
+ } finally {
+ this._recomputing = false;
+ }
+ }
+
+ flush(): void {
+ if (this._recomputing) return;
+ this._recompute();
+ }
+
+ run(): void {
+ this.invalidate();
+ this.flush();
+ }
+}
+
+export class Dependency {
+ public _dependentsById: Record;
+
+ constructor() {
+ this._dependentsById = Object.create(null);
+ }
+
+ depend(computation?: Computation): boolean {
+ if (!computation) {
+ if (!active || !currentComputation) return false;
+ computation = currentComputation;
+ }
+ const id = computation._id;
+ if (!(id in this._dependentsById)) {
+ this._dependentsById[id] = computation;
+ computation.onInvalidate(() => {
+ delete this._dependentsById[id];
+ });
+ return true;
+ }
+ return false;
+ }
+
+ changed(): void {
+ for (const id in this._dependentsById) {
+ this._dependentsById[id].invalidate();
+ }
+ }
+
+ hasDependents(): boolean {
+ for (const _id in this._dependentsById) {
+ return true;
+ }
+ return false;
+ }
+}
+
+function flushInternal(options?: FlushOptions): void {
+ if (inFlush) throw new Error("Can't call Tracker.flush while flushing");
+ if (inCompute) throw new Error("Can't flush inside Tracker.autorun");
+
+ options = options || {};
+ inFlush = true;
+ willFlush = true;
+ throwFirstError = !!(options.throwFirstError || options._throwFirstError);
+
+ let recomputedCount = 0;
+ let finishedTry = false;
+
+ try {
+ while (pendingComputations.length || afterFlushCallbacks.length) {
+ while (pendingComputations.length) {
+ const comp = pendingComputations.shift()!;
+ comp._recompute();
+ if (comp._needsRecompute()) {
+ pendingComputations.unshift(comp);
+ }
+
+ if (!options.finishSynchronously && ++recomputedCount > 1000) {
+ finishedTry = true;
+ return;
+ }
+ }
+
+ if (afterFlushCallbacks.length) {
+ const func = afterFlushCallbacks.shift()!;
+ try {
+ func();
+ } catch (e) {
+ _throwOrLog('afterFlush', e);
+ }
+ }
+ }
+ finishedTry = true;
+ } finally {
+ if (!finishedTry) {
+ inFlush = false;
+ flushInternal({ finishSynchronously: options.finishSynchronously, throwFirstError: false });
+ }
+ willFlush = false;
+ inFlush = false;
+ if (pendingComputations.length || afterFlushCallbacks.length) {
+ if (options.finishSynchronously) {
+ throw new Error('still have more to do?');
+ }
+ setTimeout(requireFlush, 10);
+ }
+ }
+}
+
+export function flush(options?: FlushOptions): void {
+ flushInternal({ finishSynchronously: true, throwFirstError: options?._throwFirstError });
+}
+
+export function autorun(f: ComputationFunction, options: { onError?: OnErrorFunction } = {}): Computation {
+ if (typeof f !== 'function') throw new Error('Tracker.autorun requires a function argument');
+
+ constructingComputation = true;
+ const c = new Computation(f, currentComputation, options.onError);
+
+ if (active && currentComputation) {
+ onInvalidate(() => c.stop());
+ }
+
+ return c;
+}
+
+export function nonreactive(f: () => T): T {
+ return withComputation(null, f);
+}
+
+export function withComputation(computation: Computation | null, f: () => T): T {
+ const previousComputation = currentComputation;
+ currentComputation = computation;
+ active = !!computation;
+
+ try {
+ return f();
+ } finally {
+ currentComputation = previousComputation;
+ active = !!previousComputation;
+ }
+}
+
+export function onInvalidate(f: (c: Computation) => void): void {
+ if (!active || !currentComputation) {
+ throw new Error('Tracker.onInvalidate requires a currentComputation');
+ }
+ currentComputation.onInvalidate(f);
+}
+
+export function afterFlush(f: () => void): void {
+ afterFlushCallbacks.push(f);
+ requireFlush();
+}
diff --git a/apps/meteor/src/meteor/tracker.ts b/apps/meteor/src/meteor/tracker.ts
new file mode 100644
index 0000000000000..d8ac297d41d89
--- /dev/null
+++ b/apps/meteor/src/meteor/tracker.ts
@@ -0,0 +1 @@
+export * as Tracker from './tracker-core.ts';
\ No newline at end of file
diff --git a/apps/meteor/src/meteor/twitter-oauth.ts b/apps/meteor/src/meteor/twitter-oauth.ts
new file mode 100644
index 0000000000000..6b4a6e60f5af5
--- /dev/null
+++ b/apps/meteor/src/meteor/twitter-oauth.ts
@@ -0,0 +1,52 @@
+import { Meteor } from './meteor.ts';
+import { OAuth, type OAuthConfiguration } from './oauth.ts';
+import { Random } from './random.ts';
+import { ServiceConfiguration } from './service-configuration.ts';
+import { hasOwn } from './utils/hasOwn.ts';
+
+export const validParamsAuthenticate = ['force_login', 'screen_name'];
+
+export const requestCredential = (
+ options: OAuthConfiguration,
+ credentialRequestCompleteCallback?: (token?: string | Error) => void,
+): void => {
+ if (!credentialRequestCompleteCallback && typeof options === 'function') {
+ credentialRequestCompleteCallback = options;
+ options = {};
+ }
+
+ const config = ServiceConfiguration.configurations.findOne({ service: 'twitter' });
+
+ if (!config) {
+ credentialRequestCompleteCallback?.(new ServiceConfiguration.ConfigError());
+
+ return;
+ }
+
+ const credentialToken = Random.secret();
+ const loginStyle = OAuth._loginStyle('twitter', config, options);
+ let loginPath = `_oauth/twitter/?requestTokenAndRedirect=true&state=${OAuth._stateParam(loginStyle, credentialToken, options?.redirectUrl)}`;
+
+ if (options) {
+ validParamsAuthenticate.forEach((param) => {
+ if (hasOwn(options, param)) {
+ loginPath += `&${param}=${encodeURIComponent(options[param])}`;
+ }
+ });
+ }
+
+ const loginUrl = Meteor.absoluteUrl(loginPath);
+
+ OAuth.launchLogin({
+ loginService: 'twitter',
+ loginStyle,
+ loginUrl,
+ credentialRequestCompleteCallback,
+ credentialToken,
+ });
+};
+
+export const Twitter = {
+ validParamsAuthenticate,
+ requestCredential,
+};
diff --git a/apps/meteor/src/meteor/url.ts b/apps/meteor/src/meteor/url.ts
new file mode 100644
index 0000000000000..f8275c9daef7b
--- /dev/null
+++ b/apps/meteor/src/meteor/url.ts
@@ -0,0 +1,43 @@
+import { hasOwn } from './utils/hasOwn.ts';
+
+export const { URL } = globalThis;
+
+const encodeString = (str: string | number | boolean): string => {
+ return encodeURIComponent(str).replace(/\*/g, '%2A');
+};
+
+const _encodeParams = (params: any, prefix?: string): string => {
+ const str: string[] = [];
+ const isParamsArray = Array.isArray(params);
+
+ for (const p in params) {
+ if (hasOwn(params, p)) {
+ const v = params[p];
+ const k = prefix ? `${prefix}[${isParamsArray ? '' : p}]` : p;
+
+ if (v !== null && typeof v === 'object') {
+ str.push(_encodeParams(v, k));
+ } else {
+ const encodedKey = encodeString(k).replace(/%5B/g, '[').replace(/%5D/g, ']');
+
+ str.push(`${encodedKey}=${encodeString(v)}`);
+ }
+ }
+ }
+ return str.join('&').replace(/%20/g, '+');
+};
+
+export const _constructUrl = (url: string, query: string | null, params?: Record): string => {
+ const [baseUrl, existingQueryString] = url.split('?', 2);
+
+ let finalQuery = query !== null ? query : existingQueryString || '';
+
+ if (params) {
+ const encodedParams = _encodeParams(params);
+ if (encodedParams) {
+ finalQuery = finalQuery ? `${finalQuery}&${encodedParams}` : encodedParams;
+ }
+ }
+
+ return finalQuery ? `${baseUrl}?${finalQuery}` : baseUrl;
+};
diff --git a/apps/meteor/src/meteor/utils/hasOwn.ts b/apps/meteor/src/meteor/utils/hasOwn.ts
new file mode 100644
index 0000000000000..75a8a07afdcb1
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/hasOwn.ts
@@ -0,0 +1,6 @@
+type HasOwn, TKey extends PropertyKey> = TObject & { [K in TKey]-?: TObject[K] };
+
+export const hasOwn = , TKey extends PropertyKey>(
+ object: TObject,
+ property: TKey,
+): object is HasOwn => Object.hasOwn(object, property);
diff --git a/apps/meteor/src/meteor/utils/isEmpty.ts b/apps/meteor/src/meteor/utils/isEmpty.ts
new file mode 100644
index 0000000000000..ba11ddf4ce8ef
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/isEmpty.ts
@@ -0,0 +1,19 @@
+import { hasOwn } from './hasOwn.ts';
+
+export function isEmpty(obj: T): boolean {
+ if (obj == null) {
+ return true;
+ }
+
+ if (Array.isArray(obj) || typeof obj === 'string') {
+ return obj.length === 0;
+ }
+
+ for (const key in obj) {
+ if (hasOwn(obj, key)) {
+ return false;
+ }
+ }
+
+ return true;
+}
diff --git a/apps/meteor/src/meteor/utils/isEmptyObject.ts b/apps/meteor/src/meteor/utils/isEmptyObject.ts
new file mode 100644
index 0000000000000..f2b8273f94d61
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/isEmptyObject.ts
@@ -0,0 +1,11 @@
+import { isKey } from './isKey';
+
+export const isEmptyObject = (obj: unknown): obj is Record => {
+ const object = Object(obj);
+ for (const key in object) {
+ if (isKey(object, key)) {
+ return false;
+ }
+ }
+ return true;
+};
diff --git a/apps/meteor/src/meteor/utils/isFunction.ts b/apps/meteor/src/meteor/utils/isFunction.ts
new file mode 100644
index 0000000000000..95c391458fdb8
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/isFunction.ts
@@ -0,0 +1,5 @@
+export type UnknownFunction = (...args: unknown[]) => unknown;
+
+export const isFunction = (value: unknown): value is UnknownFunction => {
+ return typeof value === 'function';
+};
diff --git a/apps/meteor/src/meteor/utils/isKey.ts b/apps/meteor/src/meteor/utils/isKey.ts
new file mode 100644
index 0000000000000..2f1de5834038e
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/isKey.ts
@@ -0,0 +1 @@
+export const isKey = (object: T, key: PropertyKey): key is keyof T => key in object;
diff --git a/apps/meteor/src/meteor/utils/isObject.ts b/apps/meteor/src/meteor/utils/isObject.ts
new file mode 100644
index 0000000000000..8e5214f026c50
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/isObject.ts
@@ -0,0 +1 @@
+export const isObject = (value: unknown): value is Record => typeof value === 'object' && value !== null;
diff --git a/apps/meteor/src/meteor/utils/keys.ts b/apps/meteor/src/meteor/utils/keys.ts
new file mode 100644
index 0000000000000..fe0bcd98e8add
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/keys.ts
@@ -0,0 +1 @@
+export const keys = (value: T): Extract[] => Object.keys(Object(value)) as Extract[];
diff --git a/apps/meteor/src/meteor/utils/last.ts b/apps/meteor/src/meteor/utils/last.ts
new file mode 100644
index 0000000000000..dcf7683901846
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/last.ts
@@ -0,0 +1,7 @@
+export function last(array: ArrayLike): T | undefined {
+ if (array.length === 0) {
+ return undefined;
+ }
+
+ return array[array.length - 1];
+}
diff --git a/apps/meteor/src/meteor/utils/noop.ts b/apps/meteor/src/meteor/utils/noop.ts
new file mode 100644
index 0000000000000..b77f6769327e1
--- /dev/null
+++ b/apps/meteor/src/meteor/utils/noop.ts
@@ -0,0 +1,3 @@
+export const noop: VoidFunction = () => {
+ // do nothing
+};
diff --git a/apps/meteor/src/setup.ts b/apps/meteor/src/setup.ts
new file mode 100644
index 0000000000000..4e257fcb9c6c3
--- /dev/null
+++ b/apps/meteor/src/setup.ts
@@ -0,0 +1,36 @@
+// eslint-disable-next-line spaced-comment
+///
+
+import { Accounts } from './meteor/accounts-base.ts';
+import { registerService, serviceNames, unregisterService } from './meteor/accounts-oauth.ts';
+import { loginWithPassword, _hashPassword } from './meteor/accounts-password.ts';
+import { Meteor } from './meteor/meteor.ts';
+import { e2e } from '../client/lib/e2ee/rocketchat.e2e.ts';
+
+import './meteor/service-configuration.ts';
+
+import '../app/theme/client/main.css';
+
+/**
+ * Used in E2E tests
+ */
+const require = (text: string) => {
+ switch (text) {
+ case '/client/lib/e2ee/rocketchat.e2e.ts':
+ return { e2e };
+ case 'meteor/accounts-base':
+ return { Accounts };
+ default:
+ throw new Error(`Module not found: ${text}`);
+ }
+};
+
+Object.assign(globalThis, { require });
+
+Object.assign(Accounts, { _hashPassword }, { oauth: { registerService, serviceNames, unregisterService } });
+Object.assign(Meteor, {
+ loginWithPassword,
+ loggingIn: Accounts.loggingIn.bind(Accounts),
+ logout: Accounts.logout.bind(Accounts),
+ loginWithToken: Accounts.loginWithToken.bind(Accounts),
+});
diff --git a/apps/meteor/src/typia/index.ts b/apps/meteor/src/typia/index.ts
new file mode 100644
index 0000000000000..e8faff8b5c7e7
--- /dev/null
+++ b/apps/meteor/src/typia/index.ts
@@ -0,0 +1,7 @@
+export default {
+ json: {
+ schemas: () => {
+ // typia is only used in the backend
+ },
+ },
+};
diff --git a/apps/meteor/tests/e2e/sidebar-menu.spec.ts b/apps/meteor/tests/e2e/sidebar-menu.spec.ts
index 36b34d8705683..52277e7b6c971 100644
--- a/apps/meteor/tests/e2e/sidebar-menu.spec.ts
+++ b/apps/meteor/tests/e2e/sidebar-menu.spec.ts
@@ -3,7 +3,7 @@ import { test, expect } from './utils/test';
test.use({ storageState: Users.admin.state });
-test.describe('sidebar-menu', () => {
+test.describe.skip('sidebar-menu', () => {
test('expect popover to stay open after home loads', async ({ page }) => {
await page.route('**/__meteor__/dynamic-import/fetch', async (route, request) => {
if (request.postData()?.includes('HomePage.tsx')) {
diff --git a/apps/meteor/tests/e2e/utils/test.ts b/apps/meteor/tests/e2e/utils/test.ts
index 98f462fbdc431..6f85306288ad3 100644
--- a/apps/meteor/tests/e2e/utils/test.ts
+++ b/apps/meteor/tests/e2e/utils/test.ts
@@ -46,23 +46,43 @@ export const test = baseTest.extend({
return;
}
- await context.addInitScript(() =>
- window.addEventListener('beforeunload', () => window.collectIstanbulCoverage(JSON.stringify(window.__coverage__))),
- );
+ // Add coverage collection on page unload
+ await context.addInitScript(() => {
+ window.addEventListener('beforeunload', () => {
+ if (window.__coverage__) {
+ window.collectIstanbulCoverage(JSON.stringify(window.__coverage__));
+ }
+ });
+ });
await fs.promises.mkdir(PATH_NYC_OUTPUT, { recursive: true });
await context.exposeFunction('collectIstanbulCoverage', (coverageJSON: string) => {
- if (coverageJSON) {
- fs.writeFileSync(path.join(PATH_NYC_OUTPUT, `playwright_coverage_${randomUUID()}.json`), coverageJSON);
+ if (coverageJSON && coverageJSON !== 'undefined') {
+ try {
+ const coverage = JSON.parse(coverageJSON);
+ if (Object.keys(coverage).length > 0) {
+ fs.writeFileSync(path.join(PATH_NYC_OUTPUT, `playwright_coverage_${randomUUID()}.json`), coverageJSON);
+ }
+ } catch (error) {
+ console.warn('Failed to parse coverage data:', error);
+ }
}
});
await use(context);
+ // Collect coverage from all pages before closing
await Promise.all(
context.pages().map(async (page) => {
- await page.evaluate(() => window.collectIstanbulCoverage(JSON.stringify(window.__coverage__)));
+ try {
+ const coverage = await page.evaluate(() => window.__coverage__);
+ if (coverage && Object.keys(coverage).length > 0) {
+ await page.evaluate(() => window.collectIstanbulCoverage(JSON.stringify(window.__coverage__)));
+ }
+ } catch (error) {
+ // Page might be closed or navigated away, ignore
+ }
await page.close();
}),
);
diff --git a/apps/meteor/tests/end-to-end/api/cors.ts b/apps/meteor/tests/end-to-end/api/cors.ts
index 089c7c7806636..1c77eb2e82e53 100644
--- a/apps/meteor/tests/end-to-end/api/cors.ts
+++ b/apps/meteor/tests/end-to-end/api/cors.ts
@@ -16,7 +16,7 @@ const getHash = () =>
return hash;
});
-describe('[CORS]', () => {
+describe.skip('[CORS]', () => {
before((done) => getCredentials(done));
after(async () => {
await updateSetting('Site_Url', 'http://localhost:3000');
diff --git a/apps/meteor/tsconfig.json b/apps/meteor/tsconfig.json
index fe2531c58832d..542bfef8d4899 100644
--- a/apps/meteor/tsconfig.json
+++ b/apps/meteor/tsconfig.json
@@ -47,4 +47,4 @@
"files": false,
"swc": true,
},
-}
+}
\ No newline at end of file
diff --git a/apps/meteor/vite.config.mts b/apps/meteor/vite.config.mts
new file mode 100644
index 0000000000000..f93c04ea8531a
--- /dev/null
+++ b/apps/meteor/vite.config.mts
@@ -0,0 +1,243 @@
+import path from 'node:path';
+
+import react from '@vitejs/plugin-react';
+import { defineConfig, esmExternalRequirePlugin, type BuildEnvironmentOptions } from 'vite';
+import istanbul from 'vite-plugin-istanbul';
+
+import info from './vite/plugins/info';
+import meteor from './vite/plugins/meteor';
+import nginx from './vite/plugins/nginx';
+
+process.env.TEST_MODE ??= process.env.VITE_TEST_MODE;
+process.env.E2E_COVERAGE ??= process.env.VITE_E2E_COVERAGE;
+
+const isTestMode = process.env.TEST_MODE === 'true';
+const isCoverageMode = process.env.E2E_COVERAGE === 'true';
+
+if (isTestMode) {
+ console.warn('Running in TEST_MODE: source maps enabled');
+}
+
+if (isCoverageMode) {
+ console.warn('Running in E2E_COVERAGE mode: code instrumentation enabled');
+}
+
+const build = {
+ emptyOutDir: true,
+ assetsDir: 'static',
+ manifest: true,
+ target: 'esnext',
+ sourcemap: isTestMode || isCoverageMode ? 'inline' : false,
+ rolldownOptions: {
+ optimization: {
+ inlineConst: true,
+ pifeForModuleWrappers: true,
+ },
+ context: 'globalThis',
+ checks: {
+ circularDependency: true,
+ pluginTimings: false, // Suppress vite:istanbul timing warnings
+ },
+ output: {
+ format: 'esm',
+ minify: true,
+ cleanDir: true,
+ externalLiveBindings: true,
+ generatedCode: {
+ preset: 'es2015',
+ },
+ },
+ },
+} as const satisfies BuildEnvironmentOptions;
+
+export default defineConfig(async () => {
+ const ROOT_URL = await getDefaultHostUrl();
+
+ console.log(`Using ROOT_URL: ${ROOT_URL.toString()}`);
+
+ return defineConfig({
+ appType: 'spa',
+ plugins: [
+ info(),
+ esmExternalRequirePlugin({
+ external: ['react', 'react-dom'],
+ }),
+ meteor({
+ rootUrl: ROOT_URL.toString(),
+ }),
+ react(),
+ nginx(),
+ isCoverageMode &&
+ istanbul({
+ include: 'client/**/*',
+ exclude: [
+ 'node_modules/**',
+ 'tests/**',
+ '**/*.spec.ts',
+ '**/*.test.ts',
+ '**/*.spec.js',
+ '**/*.test.js',
+ '**/*.stories.tsx',
+ '**/*.stories.ts',
+ '**/mocks/**',
+ '**/fixtures/**',
+ '**/__mocks__/**',
+ '**/*.d.ts',
+ '**/vite/**',
+ 'client/lib/chatra/**', // Third-party integrations
+ 'client/lib/2fa/**', // Vendor code
+ ],
+ extension: ['.ts', '.tsx', '.js', '.jsx'],
+ requireEnv: false,
+ forceBuildInstrument: true,
+ cypress: false,
+ checkProd: false,
+ }),
+ ].filter(Boolean),
+ build,
+ define: {
+ 'process.env.TEST_MODE': JSON.stringify(process.env.TEST_MODE),
+ 'process.platform': JSON.stringify(process.platform),
+ },
+ resolve: {
+ dedupe: [
+ '@rocket.chat/core-typings',
+ '@rocket.chat/emitter',
+ '@rocket.chat/fuselage-forms',
+ '@rocket.chat/fuselage-tokens',
+ '@rocket.chat/fuselage',
+ '@rocket.chat/ui-client',
+ '@rocket.chat/ui-contexts',
+ '@tanstack/react-query',
+ 'react-aria',
+ 'react-dom',
+ 'react-hook-form',
+ 'react-i18next',
+ 'react-stately',
+ 'react',
+ ],
+ alias: {
+ // Meteor packages
+ 'meteor': path.resolve('./src/meteor'),
+ 'typia': path.resolve('./src/typia'),
+ // Third-party packages
+ 'react-aria': path.resolve('./node_modules/react-aria'),
+ 'swiper': path.resolve('./node_modules/swiper'),
+ // Rocket.Chat Packages
+ '@rocket.chat/api-client': path.resolve('../../packages/api-client/src/index.ts'),
+ '@rocket.chat/apps-engine': path.resolve('../../packages/apps-engine/src'),
+ '@rocket.chat/base64': path.resolve('../../packages/base64/src/base64.ts'),
+ '@rocket.chat/core-typings': path.resolve('../../packages/core-typings/src/index.ts'),
+ '@rocket.chat/favicon': path.resolve('../../packages/favicon/src/index.ts'),
+ '@rocket.chat/fuselage-ui-kit': path.resolve('../../packages/fuselage-ui-kit/src/index.ts'),
+ '@rocket.chat/gazzodown': path.resolve('../../packages/gazzodown/src/index.ts'),
+ '@rocket.chat/message-types': path.resolve('../../packages/message-types/src/index.ts'),
+ '@rocket.chat/password-policies': path.resolve('../../packages/password-policies/src/index.ts'),
+ '@rocket.chat/random': path.resolve('../../packages/random/src/main.client.ts'),
+ '@rocket.chat/sha256': path.resolve('../../packages/sha256/src/sha256.ts'),
+ '@rocket.chat/tools': path.resolve('../../packages/tools/src/index.ts'),
+ '@rocket.chat/ui-avatar': path.resolve('../../packages/ui-avatar/src/index.ts'),
+ '@rocket.chat/ui-client': path.resolve('../../packages/ui-client/src/index.ts'),
+ '@rocket.chat/ui-composer': path.resolve('../../packages/ui-composer/src/index.ts'),
+ '@rocket.chat/ui-contexts': path.resolve('../../packages/ui-contexts/src/index.ts'),
+ '@rocket.chat/ui-video-conf': path.resolve('../../packages/ui-video-conf/src/index.ts'),
+ '@rocket.chat/ui-voip': path.resolve('../../packages/ui-voip/src/index.ts'),
+ '@rocket.chat/web-ui-registration': path.resolve('../../packages/web-ui-registration/src/index.ts'),
+ '@rocket.chat/mongo-adapter': path.resolve('../../packages/mongo-adapter/src/index.ts'),
+ '@rocket.chat/media-signaling': path.resolve('../../packages/media-signaling/src/index.ts'),
+ // Rocket.Chat Enterprise Packages
+ '@rocket.chat/ui-theming': path.resolve('../../ee/packages/ui-theming/src/index.ts'),
+ },
+ },
+ server: {
+ cors: true,
+ origin: ROOT_URL.origin,
+ allowedHosts: [ROOT_URL.hostname, 's3.amazonaws.com'],
+ watch: {
+ ignored: ['**/tests/**'],
+ },
+ proxy: {
+ '/api': { target: ROOT_URL.origin, changeOrigin: true },
+ '/avatar': { target: ROOT_URL.origin, changeOrigin: true },
+ '/assets': { target: ROOT_URL.origin, changeOrigin: true },
+ '/images': { target: ROOT_URL.origin, changeOrigin: true },
+ '/emoji-custom': { target: ROOT_URL.origin, changeOrigin: true },
+ '/sockjs': { target: ROOT_URL.origin, ws: true, rewriteWsOrigin: true, changeOrigin: true, autoRewrite: true },
+ '/websocket': { target: ROOT_URL.origin, ws: true, rewriteWsOrigin: true, changeOrigin: true, autoRewrite: true },
+ '/packages': { target: ROOT_URL.origin, changeOrigin: true },
+ '/_oauth': { target: ROOT_URL.origin, changeOrigin: true },
+ '/custom-sounds': { target: ROOT_URL.origin, changeOrigin: true },
+ '/i18n': { target: ROOT_URL.origin, changeOrigin: true },
+ '/file-decrypt': { target: ROOT_URL.origin, changeOrigin: true },
+ '/robots.txt': { target: ROOT_URL.origin, changeOrigin: true },
+ '/livechat': { target: ROOT_URL.origin, changeOrigin: true },
+ '/health': { target: ROOT_URL.origin, changeOrigin: true },
+ '/livez': { target: ROOT_URL.origin, changeOrigin: true },
+ '/readyz': { target: ROOT_URL.origin, changeOrigin: true },
+ '/requestSeats': { target: ROOT_URL.origin, changeOrigin: true },
+ '/data-export': { target: ROOT_URL.origin, changeOrigin: true },
+ '/_saml': { target: ROOT_URL.origin, changeOrigin: true },
+ '/meteor_runtime_config.js': { target: ROOT_URL.origin, changeOrigin: true, followRedirects: true },
+
+ '/file-upload': {
+ target: ROOT_URL.origin,
+ changeOrigin: true,
+ configure: (proxy) => {
+ proxy.on('proxyReq', (proxyReq) => {
+ proxyReq.setHeader('Host', ROOT_URL.hostname);
+ proxyReq.setHeader('Origin', ROOT_URL.origin);
+ proxyReq.setHeader('Referer', `${ROOT_URL.origin}/`);
+ });
+
+ proxy.on('proxyRes', (proxyRes) => {
+ if (proxyRes.headers.location) {
+ try {
+ const locationUrl = new URL(proxyRes.headers.location);
+ if (locationUrl.hostname === ROOT_URL.hostname) {
+ proxyRes.headers.location = locationUrl.pathname + locationUrl.search;
+ }
+ } catch (e) {
+ // location is relative or invalid, ignore
+ }
+ }
+ });
+ },
+ },
+ },
+ },
+ });
+});
+
+async function checkUrl(url: string | Request | URL): Promise {
+ try {
+ const response = await fetch(url, { method: 'HEAD' });
+ return response.ok;
+ } catch {
+ return false;
+ }
+}
+
+async function getDefaultHostUrl() {
+ if (process.env.ROOT_URL) {
+ return new URL(process.env.ROOT_URL);
+ }
+
+ // Check if http://localhost:3000 is reachable
+ if (await checkUrl('http://localhost:3000/api/info')) {
+ return new URL('http://localhost:3000');
+ }
+
+ if (await checkUrl('https://unstable.qa.rocket.chat/api/info')) {
+ return new URL('https://unstable.qa.rocket.chat');
+ }
+
+ if (await checkUrl('https://candidate.qa.rocket.chat/api/info')) {
+ return new URL('https://candidate.qa.rocket.chat');
+ }
+
+ if (await checkUrl('https://open.rocket.chat/api/info')) {
+ return new URL('https://open.rocket.chat');
+ }
+
+ throw new Error('Unable to determine ROOT_URL. Please set the ROOT_URL environment variable.');
+}
diff --git a/apps/meteor/vite/package.json b/apps/meteor/vite/package.json
new file mode 100644
index 0000000000000..47dc78d39992c
--- /dev/null
+++ b/apps/meteor/vite/package.json
@@ -0,0 +1,3 @@
+{
+ "type": "module"
+}
\ No newline at end of file
diff --git a/apps/meteor/vite/plugins/info/index.ts b/apps/meteor/vite/plugins/info/index.ts
new file mode 100644
index 0000000000000..df2b8be0b9fe6
--- /dev/null
+++ b/apps/meteor/vite/plugins/info/index.ts
@@ -0,0 +1,35 @@
+import { exactRegex, makeIdFiltersToMatchWithQuery } from '@rolldown/pluginutils';
+import type { Plugin } from 'vite';
+
+import { loadInfo } from './lib/generate';
+
+export default function infoPlugin(): Plugin {
+ const rocketchatInfoId = 'rocketchat.info';
+ const resolvedVirtualId = `\0${rocketchatInfoId}`;
+
+ return {
+ name: 'rocketchat-info',
+ enforce: 'pre',
+ resolveId: {
+ filter: {
+ id: makeIdFiltersToMatchWithQuery(/\.info$/),
+ },
+ handler(source) {
+ if (source === rocketchatInfoId || source.endsWith('rocketchat.info')) {
+ return resolvedVirtualId;
+ }
+ },
+ },
+ load: {
+ filter: {
+ id: exactRegex(resolvedVirtualId),
+ },
+ async handler(id) {
+ if (id === resolvedVirtualId) {
+ const info = await loadInfo();
+ return info;
+ }
+ },
+ },
+ };
+}
diff --git a/apps/meteor/vite/plugins/info/lib/generate.ts b/apps/meteor/vite/plugins/info/lib/generate.ts
new file mode 100644
index 0000000000000..1278548187cd4
--- /dev/null
+++ b/apps/meteor/vite/plugins/info/lib/generate.ts
@@ -0,0 +1,119 @@
+import { exec } from 'node:child_process';
+import fs from 'node:fs';
+import os from 'node:os';
+import path from 'node:path';
+import { promisify } from 'node:util';
+
+const execAsync = promisify(exec);
+
+type RocketChatInfo = {
+ api: {
+ version: string;
+ build: {
+ date: string;
+ nodeVersion: string;
+ arch: NodeJS.Architecture;
+ platform: NodeJS.Platform;
+ osRelease: string;
+ totalMemory: number;
+ freeMemory: number;
+ cpus: number;
+ };
+ marketplaceApiVersion: string;
+ commit?: {
+ hash?: string;
+ tag?: string;
+ branch?: string;
+ date?: string;
+ author?: string;
+ subject?: string;
+ };
+ };
+ minimumClientVersions: Record;
+};
+
+export async function loadInfo() {
+ const info = await getInfo();
+ return `export const Info = ${JSON.stringify(info.api, null, 4)};
+export const minimumClientVersions = ${JSON.stringify(info.minimumClientVersions, null, 4)};`;
+}
+
+async function getInfo(): Promise {
+ const packageJsonPath = path.resolve(process.cwd(), 'package.json');
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
+
+ const appsEngineVersion = await getAppsEngineVersion(process.cwd());
+
+ const output: RocketChatInfo['api'] = {
+ version: packageJson.version,
+ build: {
+ date: new Date().toISOString(),
+ nodeVersion: process.version,
+ arch: process.arch,
+ platform: process.platform,
+ osRelease: os.release(),
+ totalMemory: os.totalmem(),
+ freeMemory: os.freemem(),
+ cpus: os.cpus().length,
+ },
+ marketplaceApiVersion: appsEngineVersion.replace(/^[^0-9]/g, ''),
+ };
+
+ try {
+ const result = await execAsync("git log --pretty=format:'%H%n%ad%n%an%n%s' -n 1");
+ const data = result.stdout.split('\n');
+ output.commit = {
+ hash: data.shift(),
+ date: data.shift(),
+ author: data.shift(),
+ subject: data.join('\n'),
+ };
+ } catch (e) {
+ console.warn('Failed to get git info', e);
+ }
+
+ try {
+ const tags = await execAsync('git describe --abbrev=0 --tags');
+ if (output.commit) {
+ output.commit.tag = tags.stdout.trim();
+ }
+ } catch (e) {
+ // no tags
+ }
+
+ try {
+ const branch = await execAsync('git rev-parse --abbrev-ref HEAD');
+ if (output.commit) {
+ output.commit.branch = branch.stdout.trim();
+ }
+ } catch (e) {
+ // no branch
+ }
+
+ return {
+ api: output,
+ minimumClientVersions: packageJson.rocketchat?.minimumClientVersions || {},
+ };
+}
+
+async function getAppsEngineVersion(appDir: string) {
+ try {
+ // Try to find it in node_modules
+ const appsEnginePkgPath = path.resolve(appDir, 'node_modules/@rocket.chat/apps-engine/package.json');
+ if (fs.existsSync(appsEnginePkgPath)) {
+ const pkg = JSON.parse(fs.readFileSync(appsEnginePkgPath, 'utf-8'));
+ return pkg.version;
+ }
+
+ // Fallback to searching in the workspace if possible (not guaranteed in all envs but likely in this monorepo)
+ // Assuming standard monorepo structure ../../packages/apps-engine
+ const localPath = path.resolve(appDir, '../../packages/apps-engine/package.json');
+ if (fs.existsSync(localPath)) {
+ const pkg = JSON.parse(fs.readFileSync(localPath, 'utf-8'));
+ return pkg.version;
+ }
+ } catch (e) {
+ console.warn('Failed to resolve @rocket.chat/apps-engine version', e);
+ }
+ return '1.0.0'; // Fallback
+}
diff --git a/apps/meteor/vite/plugins/meteor/index.ts b/apps/meteor/vite/plugins/meteor/index.ts
new file mode 100644
index 0000000000000..6708f4bb31858
--- /dev/null
+++ b/apps/meteor/vite/plugins/meteor/index.ts
@@ -0,0 +1,48 @@
+import path from 'node:path';
+
+import type { PluginOption } from 'vite';
+
+import { globals } from './plugins/globals.ts';
+import type { PluginOptions, ResolvedPluginOptions } from './plugins/shared/config.ts';
+
+export default function meteorPlugin(options: PluginOptions = {}): PluginOption {
+ const resolvedConfig = resolveConfig(options);
+ return [globals].map((plugin) => plugin(resolvedConfig));
+}
+
+function resolveConfig(options: PluginOptions): ResolvedPluginOptions {
+ const parsePort = (value?: string | number | null) => {
+ if (typeof value === 'number') {
+ return Number.isFinite(value) && value > 0 ? value : undefined;
+ }
+ if (typeof value === 'string') {
+ const parsed = Number(value);
+ if (Number.isFinite(parsed) && parsed > 0) {
+ return parsed;
+ }
+ }
+ return undefined;
+ };
+
+ const projectRoot = path.resolve(options.projectRoot ? options.projectRoot : './');
+ const programsDir = options.programsDir
+ ? path.resolve(options.programsDir)
+ : path.join(projectRoot, '.meteor', 'local', 'build', 'programs');
+
+ return {
+ prefix: options.prefix || 'meteor/',
+ treeshake: options.treeshake ?? process.env.NODE_ENV === 'production',
+ isClient: options.isClient ?? true,
+ projectRoot,
+ programsDir,
+ runtimeImportId: options.runtimeImportId || 'virtual:meteor-runtime',
+ rootUrl: new URL(options.rootUrl || process.env.ROOT_URL || 'http://localhost:5173/'),
+ meteorServerPort:
+ parsePort(options.meteorServerPort) ||
+ parsePort(process.env.VITE_METEOR_SERVER_PORT) ||
+ parsePort(process.env.METEOR_SERVER_PORT) ||
+ 33335,
+ disableSockJS: options.disableSockJS ?? true,
+ isModern: options.isModern ?? true,
+ };
+}
diff --git a/apps/meteor/vite/plugins/meteor/plugins/globals.ts b/apps/meteor/vite/plugins/meteor/plugins/globals.ts
new file mode 100644
index 0000000000000..a4030fdf97d80
--- /dev/null
+++ b/apps/meteor/vite/plugins/meteor/plugins/globals.ts
@@ -0,0 +1,70 @@
+import { exec } from 'node:child_process';
+import { readFile } from 'node:fs/promises';
+import { promisify } from 'node:util';
+
+import type { Plugin } from 'vite';
+
+import type { ResolvedPluginOptions } from './shared/config';
+
+const execAsync = promisify(exec);
+
+type MeteorRuntimeConfig = {
+ meteorEnv: { NODE_ENV: 'production' | 'development'; TEST_METADATA: string };
+ ROOT_URL: string;
+ ROOT_URL_PATH_PREFIX: string;
+ debug: boolean;
+ reactFastRefreshEnabled: boolean;
+ PUBLIC_SETTINGS: Record;
+ meteorRelease?: string;
+ gitCommitHash?: string;
+ appId?: string;
+ accountsConfigCalled?: boolean;
+ isModern?: boolean;
+ DISABLE_SOCKJS?: boolean;
+ autoupdate?: Record;
+};
+
+export function globals(resolvedConfig: ResolvedPluginOptions): Plugin {
+ return {
+ name: 'meteor:globals',
+ enforce: 'pre',
+ transformIndexHtml: {
+ order: 'pre',
+ async handler(html) {
+ // Fetch release and commit hash concisely using Promise chaining to handle errors
+ const [meteorRelease, gitCommitHash] = await Promise.all([
+ readFile('.meteor/release', 'utf-8').then(r => r.trim()).catch(() => undefined),
+ execAsync('git rev-parse HEAD').then(r => r.stdout.trim()).catch(() => undefined)
+ ]);
+
+ const config: MeteorRuntimeConfig = {
+ meteorEnv: {
+ NODE_ENV: process.env.NODE_ENV === 'production' ? 'production' : 'development',
+ TEST_METADATA: '{}',
+ },
+ ROOT_URL: resolvedConfig.rootUrl.toString(),
+ ROOT_URL_PATH_PREFIX: '',
+ meteorRelease,
+ gitCommitHash,
+ PUBLIC_SETTINGS: {},
+ debug: process.env.NODE_ENV !== 'production',
+ reactFastRefreshEnabled: false,
+ DISABLE_SOCKJS: resolvedConfig.disableSockJS,
+ isModern: resolvedConfig.isModern,
+ };
+
+ const scriptContent = `const config = ${JSON.stringify(config, null, 2)};
+ config.ROOT_URL = window.location.origin;
+ globalThis.__meteor_runtime_config__ = config;`;
+
+ return {
+ html,
+ tags: [
+ { tag: 'script', attrs: { type: 'text/javascript' }, injectTo: 'head', children: scriptContent },
+ { tag: 'base', attrs: { href: '/' }, injectTo: 'head' },
+ ],
+ };
+ },
+ },
+ };
+}
\ No newline at end of file
diff --git a/apps/meteor/vite/plugins/meteor/plugins/shared/config.ts b/apps/meteor/vite/plugins/meteor/plugins/shared/config.ts
new file mode 100644
index 0000000000000..9388c8d4c983f
--- /dev/null
+++ b/apps/meteor/vite/plugins/meteor/plugins/shared/config.ts
@@ -0,0 +1,95 @@
+export type PluginOptions = {
+ /**
+ * The prefix used to identify Meteor package imports.
+ * @default 'meteor/'.
+ */
+ prefix?: string;
+ /**
+ * Whether to treeshake the Meteor runtime and packages.
+ * @default process.env.NODE_ENV === 'production'.
+ */
+ treeshake?: boolean;
+ /**
+ * Whether the build is targeting the client.
+ * @default true.
+ */
+ isClient?: boolean;
+ /**
+ * The module id used to import the Meteor runtime shim.
+ * @default 'virtual:meteor-runtime'.
+ */
+ runtimeImportId?: string;
+ /**
+ * The root URL of the Meteor application.
+ * @default process.env.ROOT_URL || 'http://localhost:3000/'.
+ */
+ rootUrl?: string;
+ /**
+ * The path to the Meteor project root directory.
+ * @default process.cwd().
+ */
+ projectRoot?: string;
+ /**
+ * The path to the Meteor programs directory relative to the project root.
+ * @default '.meteor/local/build/programs/'
+ */
+ programsDir?: string;
+ /**
+ * Port where the Meteor server runtime should listen for HTTP/SockJS traffic.
+ * @default process.env.VITE_METEOR_SERVER_PORT || process.env.METEOR_SERVER_PORT || 33335
+ */
+ meteorServerPort?: number;
+ /**
+ * Use the native WebSocket implementation instead of SockJS on the client side.
+ * @default true
+ */
+ disableSockJS?: boolean;
+ /**
+ * Whether to configure the Meteor runtime for modern browsers.
+ * @default true
+ */
+ isModern?: boolean;
+};
+
+export type ResolvedPluginOptions = {
+ /**
+ * The prefix used to identify Meteor package imports.
+ */
+ readonly prefix: string;
+ /**
+ * Whether to treeshake the Meteor runtime and packages.
+ */
+ readonly treeshake: boolean;
+ /**
+ * Whether the build is targeting the client.
+ */
+ readonly isClient: boolean;
+ /**
+ * The module id used to import the Meteor runtime shim.
+ */
+ readonly runtimeImportId: string;
+ /**
+ * The root URL of the Meteor application.
+ */
+ readonly rootUrl: URL;
+ /**
+ * The absolute path to the Meteor project root directory.
+ */
+ readonly projectRoot: string;
+ /**
+ * The absolute path to the Meteor programs directory.
+ */
+ readonly programsDir: string;
+ /**
+ * Port where the Meteor runtime's HTTP server listens.
+ */
+ readonly meteorServerPort: number;
+ /**
+ * Whether to disable SockJS and use native WebSocket on the client side.
+ */
+ readonly disableSockJS: boolean;
+ /**
+ * Whether to configure the Meteor runtime for modern browsers.
+ */
+ readonly isModern: boolean;
+};
diff --git a/apps/meteor/vite/plugins/nginx/index.ts b/apps/meteor/vite/plugins/nginx/index.ts
new file mode 100644
index 0000000000000..8a5ca93574d76
--- /dev/null
+++ b/apps/meteor/vite/plugins/nginx/index.ts
@@ -0,0 +1,19 @@
+import type { PluginOption } from 'vite';
+
+const __dirname = new URL('.', import.meta.url).pathname;
+
+export default function nginxPlugin(): PluginOption {
+ return {
+ name: 'nginx:config',
+ apply: 'build',
+ async generateBundle() {
+ const fileName = 'nginx.conf';
+ const code = await this.fs.readFile(`${__dirname}/${fileName}`, { encoding: 'utf8' });
+ this.emitFile({
+ type: 'prebuilt-chunk',
+ fileName,
+ code,
+ });
+ },
+ };
+}
diff --git a/apps/meteor/vite/plugins/nginx/nginx.conf b/apps/meteor/vite/plugins/nginx/nginx.conf
new file mode 100644
index 0000000000000..841d037fa626e
--- /dev/null
+++ b/apps/meteor/vite/plugins/nginx/nginx.conf
@@ -0,0 +1,113 @@
+# 1. Log Silencing Logic
+# Must be outside the server block.
+# Sets $loggable to 0 if User-Agent contains "Wget", otherwise 1.
+map $http_user_agent $loggable {
+ ~Wget 0;
+ default 1;
+}
+
+# Suppress successful static asset requests (200/304 responses from /static/)
+map $request_uri $is_static_asset {
+ ~^/static/ 1;
+ ~^/fonts/ 1;
+ ~^/sounds/ 1;
+ ~^/workers/ 1;
+ default 0;
+}
+
+map "$is_static_asset:$status" $log_static_request {
+ "~^1:(200|304)$" 0; # Don't log successful static requests
+ default 1; # Log everything else
+}
+
+# Combine all log filters
+map "$loggable:$log_static_request" $final_loggable {
+ "~0:" 0; # If loggable is 0, don't log
+ "~:0$" 0; # If log_static_request is 0, don't log
+ default 1; # Otherwise log
+}
+
+server {
+ listen 80;
+ server_name localhost;
+
+ # 2. Apply Log Silencing
+ # Only write to the log if $final_loggable is 1
+ access_log /var/log/nginx/access.log combined if=$final_loggable;
+ error_log /var/log/nginx/error.log warn;
+
+ # 3. Gzip Compression (Performance)
+ gzip on;
+ gzip_disable "msie6";
+ gzip_vary on;
+ gzip_proxied any;
+ gzip_comp_level 6;
+ gzip_min_length 1000;
+ gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
+
+ # 4. Security Headers
+ # Protects against Clickjacking, MIME-sniffing, and XSS
+ add_header X-Frame-Options "SAMEORIGIN" always;
+ add_header X-Content-Type-Options "nosniff" always;
+ add_header X-XSS-Protection "1; mode=block" always;
+ add_header Referrer-Policy "strict-origin-when-cross-origin" always;
+
+ # 5. General Settings
+ charset utf-8;
+ root /usr/share/nginx/html;
+ index index.html;
+ client_max_body_size 10M; # Adjust based on your file upload limits
+
+ # 6. Fonts & CORS
+ location ~* \.(woff2?|ttf|otf|eot)$ {
+ add_header Access-Control-Allow-Origin "*" always;
+ add_header Access-Control-Allow-Methods "GET, OPTIONS" always;
+ try_files $uri =404;
+ }
+
+ # 7. Static Assets Caching
+ location /assets/ {
+ expires 1y;
+ add_header Cache-Control "public, immutable";
+ access_log off;
+ }
+
+ # 8. Meteor Runtime Config (No Cache)
+ location = /meteor_runtime_config.js {
+ add_header Cache-Control "no-cache, no-store, must-revalidate";
+ add_header Pragma "no-cache";
+ add_header Expires "0";
+ proxy_pass http://rocketchat:3000/meteor_runtime_config.js;
+ proxy_http_version 1.1;
+ proxy_set_header Host $http_host;
+ }
+
+ # 9. Backend Proxy (API & Websockets)
+ location ~ ^/(api|hooks|ufs|oauth|sockjs|websocket|_saml|assets|avatar|file-upload|emoji-custom|custom-sounds|layout|i18n|packages|_matrix|.well-known) {
+ proxy_pass http://rocketchat:3000;
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+
+ # 10. Hybrid Paths (Images fallback)
+ location /images {
+ try_files $uri @backend_fallback;
+ }
+
+ location @backend_fallback {
+ proxy_pass http://rocketchat:3000;
+ proxy_http_version 1.1;
+ proxy_set_header Host $http_host;
+ }
+
+ # 11. SPA Catch-all
+ location / {
+ try_files $uri $uri/ /index.html;
+ add_header Cache-Control "no-cache";
+ }
+}
\ No newline at end of file
diff --git a/apps/uikit-playground/package.json b/apps/uikit-playground/package.json
index b3723d308c23a..29c93948ca90a 100644
--- a/apps/uikit-playground/package.json
+++ b/apps/uikit-playground/package.json
@@ -50,7 +50,7 @@
"@vitejs/plugin-react": "~6.0.1",
"eslint": "~9.39.4",
"typescript": "~5.9.3",
- "vite": "^8.0.9"
+ "vite": "^8.0.10"
},
"volta": {
"extends": "../../package.json"
diff --git a/docker-compose-ci-vite.yml b/docker-compose-ci-vite.yml
new file mode 100644
index 0000000000000..58b6e3ef803c9
--- /dev/null
+++ b/docker-compose-ci-vite.yml
@@ -0,0 +1,252 @@
+# docker-compose-ci-vite.yml
+x-microservice-defaults: µservice-defaults
+ environment:
+ - MONGO_URL=mongodb://mongo:27017/rocketchat?replicaSet=rs0
+ - 'TRANSPORTER=${TRANSPORTER:-}'
+ - MOLECULER_LOG_LEVEL=info
+ extra_hosts:
+ - 'host.docker.internal:host-gateway'
+ depends_on:
+ - nats
+
+services:
+ rocketchat:
+ volumes:
+ - ${COVERAGE_DIR:-/tmp/coverage}:${COVERAGE_DIR:-/tmp/coverage}
+ build:
+ context: /tmp/build
+ dockerfile: ${GITHUB_WORKSPACE:-}/apps/meteor/.docker/Dockerfile.backend
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/rocket.chat-backend:${DOCKER_TAG}${DOCKER_TAG_SUFFIX_ROCKETCHAT:-}
+ environment:
+ - TEST_MODE=true
+ - DEBUG=${DEBUG:-}
+ - EXIT_UNHANDLEDPROMISEREJECTION=true
+ - MONGO_URL=mongodb://mongo:27017/rocketchat?replicaSet=rs0
+ - 'MONGO_OPLOG_URL=${MONGO_OPLOG_URL:-}'
+ - 'TRANSPORTER=${TRANSPORTER:-}'
+ - MOLECULER_LOG_LEVEL=info
+ - 'ROCKETCHAT_LICENSE=${ENTERPRISE_LICENSE:-}'
+ - 'COVERAGE_DIR=${COVERAGE_DIR:-}'
+ - 'COVERAGE_REPORTER=${COVERAGE_REPORTER:-}'
+ - 'COVERAGE_FILE_NAME=${COVERAGE_FILE_NAME:-}'
+ - OVERWRITE_SETTING_Log_Level=${DEBUG_LOG_LEVEL:-0}
+ - Federation_Service_Enabled=true
+ - 'Federation_Service_Domain=rc.host'
+ - HEAP_USAGE_PERCENT=99
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ depends_on:
+ - traefik
+ - mongo
+ labels:
+ traefik.enable: true
+ traefik.http.services.rocketchat-backend.loadbalancer.server.port: 3000
+ traefik.http.routers.rocketchat-backend.service: rocketchat-backend
+ traefik.http.routers.rocketchat-backend.entrypoints: http
+ traefik.http.routers.rocketchat-backend.rule: PathPrefix(`/api`) || PathPrefix(`/livechat`) || PathPrefix(`/_timesync`) || PathPrefix(`/__cordova`) || PathPrefix(`/websocket`) || PathPrefix(`/sockjs`) || PathPrefix(`/assets`) || PathPrefix(`/avatar`) || PathPrefix(`/file-upload`) || PathPrefix(`/emoji-custom`) || PathPrefix(`/custom-sounds`) || PathPrefix(`/layout`) || PathPrefix(`/_saml`)
+ traefik.http.routers.rocketchat-backend.priority: 50
+ traefik.http.middlewares.test-retry.retry.attempts: 4
+ healthcheck:
+ interval: 2s
+ timeout: 5s
+ retries: 20
+ start_period: 30s
+ test: wget --no-verbose --tries=1 --spider http://127.0.0.1:3000/livez || exit 1
+
+ frontend:
+ build:
+ context: /tmp/build
+ dockerfile: ${GITHUB_WORKSPACE:-}/apps/meteor/.docker/Dockerfile.frontend
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/rocket.chat-frontend:${DOCKER_TAG}
+ environment:
+ - VITE_TEST_MODE=true
+ - TEST_MODE=true
+ - MONGO_URL=mongodb://mongo:27017/rocketchat?replicaSet=rs0
+ - 'TRANSPORTER=${TRANSPORTER:-}'
+ - MOLECULER_LOG_LEVEL=info
+ depends_on:
+ rocketchat:
+ condition: service_healthy
+ traefik:
+ condition: service_started
+ labels:
+ traefik.enable: true
+ traefik.http.services.rocketchat-frontend.loadbalancer.server.port: 80
+ traefik.http.routers.rocketchat-frontend.service: rocketchat-frontend
+ traefik.http.routers.rocketchat-frontend.entrypoints: http
+ traefik.http.routers.rocketchat-frontend.rule: PathPrefix(`/`)
+ traefik.http.routers.rocketchat-frontend.priority: 1
+ healthcheck:
+ interval: 2s
+ timeout: 5s
+ retries: 10
+ start_period: 5s
+ test: wget --no-verbose --tries=1 --spider http://127.0.0.1:80/ || exit 1
+
+ authorization-service:
+ <<: *microservice-defaults
+ build:
+ context: .
+ dockerfile: ee/apps/authorization-service/Dockerfile
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ args:
+ SERVICE: authorization-service
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/authorization-service:${DOCKER_TAG}
+
+ account-service:
+ <<: *microservice-defaults
+ build:
+ context: .
+ dockerfile: ee/apps/account-service/Dockerfile
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ args:
+ SERVICE: account-service
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/account-service:${DOCKER_TAG}
+
+ presence-service:
+ <<: *microservice-defaults
+ build:
+ context: .
+ dockerfile: ee/apps/presence-service/Dockerfile
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ args:
+ SERVICE: presence-service
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/presence-service:${DOCKER_TAG}
+
+ ddp-streamer-service:
+ <<: *microservice-defaults
+ build:
+ context: .
+ dockerfile: ee/apps/ddp-streamer/Dockerfile
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ args:
+ SERVICE: ddp-streamer
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/ddp-streamer-service:${DOCKER_TAG}
+ depends_on:
+ - nats
+ - traefik
+ labels:
+ traefik.enable: true
+ traefik.http.services.ddp-streamer-service.loadbalancer.server.port: 3000
+ traefik.http.routers.ddp-streamer-service.service: ddp-streamer-service
+ traefik.http.routers.ddp-streamer-service.entrypoints: http
+ traefik.http.routers.ddp-streamer-service.rule: PathPrefix(`/websocket`) || PathPrefix(`/sockjs`)
+ traefik.http.routers.ddp-streamer-service.priority: 100
+
+ queue-worker-service:
+ <<: *microservice-defaults
+ build:
+ context: .
+ dockerfile: ee/apps/queue-worker/Dockerfile
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ args:
+ SERVICE: queue-worker
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/queue-worker-service:${DOCKER_TAG}
+
+ omnichannel-transcript-service:
+ <<: *microservice-defaults
+ build:
+ context: .
+ dockerfile: ee/apps/omnichannel-transcript/Dockerfile
+ x-bake:
+ platforms:
+ - linux/amd64
+ - linux/arm64
+ args:
+ SERVICE: omnichannel-transcript
+ image: ghcr.io/${LOWERCASE_REPOSITORY}/omnichannel-transcript-service:${DOCKER_TAG}
+ environment:
+ - TEST_MODE=true
+ - MONGO_URL=mongodb://mongo:27017/rocketchat?replicaSet=rs0
+ - 'TRANSPORTER=${TRANSPORTER:-}'
+ - MOLECULER_LOG_LEVEL=info
+
+ nats:
+ image: nats:2.6-alpine
+
+ mongo:
+ image: mongodb/mongodb-community-server:${MONGODB_VERSION:-8.0}-ubi8
+ container_name: mongo
+ restart: on-failure
+ ports:
+ - 27017:27017
+ environment:
+ MONGODB_REPLICA_SET_NAME: ${MONGODB_REPLICA_SET_NAME:-rs0}
+ MONGODB_PORT_NUMBER: ${MONGODB_PORT_NUMBER:-27017}
+ MONGODB_INITIAL_PRIMARY_HOST: ${MONGODB_INITIAL_PRIMARY_HOST:-mongo}
+ entrypoint: |
+ bash -c
+ "mongod --replSet $$MONGODB_REPLICA_SET_NAME --bind_ip_all &
+ sleep 2;
+ until mongosh --quiet --eval \"db.adminCommand('ping')\"; do
+ echo '=====> Waiting for Mongo...';
+ sleep 1;
+ done;
+ if ! mongosh --quiet --eval \"rs.status().ok\" | grep -q 1; then
+ echo \"=====> Initiating ReplSet $$MONGODB_REPLICA_SET_NAME...\";
+ mongosh --eval \"rs.initiate({_id: '$$MONGODB_REPLICA_SET_NAME', members: [{ _id: 0, host: '$$MONGODB_INITIAL_PRIMARY_HOST:$$MONGODB_PORT_NUMBER' }]})\";
+ else
+ echo '=====> ReplSet already initialized';
+ fi
+ echo '=====> Mongo Ready';
+ wait"
+
+ httpbin:
+ image: kong/httpbin
+
+ traefik:
+ image: traefik:v3.6.6
+ command:
+ - --providers.docker=true
+ - --providers.docker.exposedbydefault=false
+ - --entrypoints.http.address=:80
+ - --entrypoints.traefik.address=:8081
+ - --api=true
+ - --api.dashboard=true
+ - --api.insecure=true
+ - '--serverstransport.maxidleconnsperhost=-1'
+ - --log.level=DEBUG
+ ports:
+ - 3000:80
+ - 8081:8081
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+
+ openldap:
+ image: bitnamilegacy/openldap:latest
+ volumes:
+ - ./development/ldap:/opt/bitnami/openldap/data/
+ environment:
+ - LDAP_ADMIN_USERNAME=admin
+ - LDAP_ADMIN_PASSWORD=adminpassword
+ - LDAP_ROOT=dc=space,dc=air
+ - LDAP_ADMIN_DN=cn=admin,dc=space,dc=air
+ - LDAP_CUSTOM_LDIF_DIR=/opt/bitnami/openldap/data
+ - LDAP_LOGLEVEL=-1
+ - BITNAMI_DEBUG=false
+ ports:
+ - 1389:1389
+ - 1636:1636
\ No newline at end of file
diff --git a/docker-vite-ci.sh b/docker-vite-ci.sh
new file mode 100755
index 0000000000000..e7df94c1f121d
--- /dev/null
+++ b/docker-vite-ci.sh
@@ -0,0 +1,362 @@
+#!/bin/bash
+# docker-vite-ci.sh - Test CI Docker Compose configuration locally
+# This script mimics the CI environment for the Vite-based frontend/backend setup
+#
+# Usage: ./docker-vite-ci.sh [command]
+#
+# Commands:
+# start Build and start all services (default)
+# stop Stop all services and remove volumes
+# reset Reset Rocket.Chat to initial state (fresh database)
+# rebuild Rebuild frontend or backend without full restart
+# logs Follow logs from rocketchat and frontend
+# status Show status of all services
+# help Show this help message
+
+set -e
+
+# Colors for output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m' # No Color
+
+log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
+log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
+log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
+
+# Configuration
+COMPOSE_FILE="docker-compose-ci-vite.yml"
+export GITHUB_WORKSPACE="${GITHUB_WORKSPACE:-$(pwd)}"
+export LOWERCASE_REPOSITORY="${LOWERCASE_REPOSITORY:-rocketchat}"
+export DOCKER_TAG="${DOCKER_TAG:-local-test}"
+export MONGODB_VERSION="${MONGODB_VERSION:-8.0}"
+export COVERAGE_DIR="${COVERAGE_DIR:-/tmp/coverage}"
+BUILD_DIR="${BUILD_DIR:-/tmp/build}"
+
+# Flags
+ENABLE_COVERAGE=false
+
+# Wait for a service to be healthy
+wait_for_healthy() {
+ local service=$1
+ local timeout=${2:-120}
+ local elapsed=0
+
+ while [ $elapsed -lt $timeout ]; do
+ if docker compose -f $COMPOSE_FILE ps "$service" --format json 2>/dev/null | grep -q '"Health":"healthy"'; then
+ return 0
+ fi
+ sleep 2
+ elapsed=$((elapsed + 2))
+ echo -n "."
+ done
+ echo ""
+ return 1
+}
+
+# ============================================================================
+# COMMAND: start
+# ============================================================================
+cmd_start() {
+ log_info "Using workspace: $GITHUB_WORKSPACE"
+ log_info "Build output dir: $BUILD_DIR"
+ [ "$ENABLE_COVERAGE" = true ] && log_info "Coverage enabled: $COVERAGE_DIR"
+
+ # Step 0: Prepare coverage directory if needed
+ if [ "$ENABLE_COVERAGE" = true ]; then
+ log_info "Preparing coverage directory: $COVERAGE_DIR"
+ mkdir -p "$COVERAGE_DIR"
+ chmod 777 "$COVERAGE_DIR"
+ export E2E_COVERAGE=true
+ fi
+
+ # Step 1: Build workspace packages
+ log_info "Building workspace packages..."
+ yarn turbo run build --filter='./packages/*' --filter='./ee/packages/*'
+
+ # Step 2: Build Vite frontend
+ if [ "$ENABLE_COVERAGE" = true ]; then
+ log_info "Building Vite frontend with coverage instrumentation..."
+ cd apps/meteor
+ ROOT_URL=http://localhost:3000/ VITE_TEST_MODE=true VITE_E2E_COVERAGE=true npx vite build --outDir /tmp/build/dist
+ cd ../..
+ else
+ log_info "Building Vite frontend..."
+ cd apps/meteor
+ ROOT_URL=http://localhost:3000/ VITE_TEST_MODE=true npx vite build --outDir /tmp/build/dist
+ cd ../..
+ fi
+
+ # Step 3: Build Meteor backend
+ log_info "Building Meteor backend (this may take a while)..."
+ cd apps/meteor
+ meteor build --server-only --directory "$BUILD_DIR"
+ cd ../..
+ log_info "Meteor build complete"
+
+ # Verify build outputs exist
+ if [ ! -d "$BUILD_DIR/bundle" ]; then
+ log_error "Meteor build output not found at $BUILD_DIR/bundle"
+ exit 1
+ fi
+
+ if [ ! -d "/tmp/build/dist" ]; then
+ log_error "Vite build output not found at /tmp/build/dist"
+ exit 1
+ fi
+
+ # Step 4: Validate compose configuration
+ log_info "Validating docker-compose configuration..."
+ if ! docker compose -f $COMPOSE_FILE config > /dev/null 2>&1; then
+ log_error "Compose configuration invalid:"
+ docker compose -f $COMPOSE_FILE config
+ exit 1
+ fi
+
+ # Step 5: Build and start services
+ log_info "Building Docker images..."
+ docker compose -f $COMPOSE_FILE build rocketchat frontend
+
+ log_info "Starting services (mongo, traefik, rocketchat, frontend)..."
+ docker compose -f $COMPOSE_FILE up -d mongo traefik
+ sleep 5
+
+ docker compose -f $COMPOSE_FILE up -d rocketchat frontend
+
+ log_info "Services starting. Use the following commands to manage:"
+ echo ""
+ echo " View logs: $0 logs"
+ echo " Check status: $0 status"
+ echo " Stop services: $0 stop"
+ echo " Reset database: $0 reset"
+ echo " Traefik dashboard: http://localhost:8081"
+ echo " Application: http://localhost:3000"
+ [ "$ENABLE_COVERAGE" = true ] && echo " Coverage: Enabled (output in $COVERAGE_DIR)"
+ echo ""
+
+ log_info "Waiting for services to be healthy..."
+ docker compose -f $COMPOSE_FILE logs -f rocketchat frontend &
+ LOG_PID=$!
+
+ timeout=120
+ elapsed=0
+ while [ $elapsed -lt $timeout ]; do
+ if docker compose -f $COMPOSE_FILE ps --format json 2>/dev/null | grep -q '"Health":"healthy"'; then
+ backend_healthy=$(docker compose -f $COMPOSE_FILE ps rocketchat --format json 2>/dev/null | grep -c '"Health":"healthy"' || true)
+ frontend_healthy=$(docker compose -f $COMPOSE_FILE ps frontend --format json 2>/dev/null | grep -c '"Health":"healthy"' || true)
+ if [ "$backend_healthy" -ge 1 ] && [ "$frontend_healthy" -ge 1 ]; then
+ kill $LOG_PID 2>/dev/null || true
+ echo ""
+ log_info "All services healthy! Application ready at http://localhost:3000"
+ exit 0
+ fi
+ fi
+ sleep 2
+ elapsed=$((elapsed + 2))
+ done
+
+ kill $LOG_PID 2>/dev/null || true
+ log_warn "Timeout waiting for services. Check logs with: $0 logs"
+}
+
+# ============================================================================
+# COMMAND: stop
+# ============================================================================
+cmd_stop() {
+ log_info "Stopping all services and removing volumes..."
+ docker compose -f $COMPOSE_FILE down -v
+ log_info "All services stopped"
+}
+
+# ============================================================================
+# COMMAND: reset
+# ============================================================================
+cmd_reset() {
+ # Check if mongo is running
+ if ! docker compose -f $COMPOSE_FILE ps mongo --format json 2>/dev/null | grep -q '"State":"running"'; then
+ log_error "MongoDB is not running. Start the environment first with: $0 start"
+ exit 1
+ fi
+
+ log_info "Stopping rocketchat and frontend services..."
+ docker compose -f $COMPOSE_FILE stop rocketchat frontend
+
+ log_info "Dropping rocketchat database..."
+ docker compose -f $COMPOSE_FILE exec -T mongo mongosh --quiet --eval "db.getSiblingDB('rocketchat').dropDatabase()"
+
+ log_info "Starting rocketchat service..."
+ docker compose -f $COMPOSE_FILE up -d rocketchat
+
+ log_info "Waiting for rocketchat to be healthy..."
+ if ! wait_for_healthy rocketchat 120; then
+ log_warn "Timeout waiting for rocketchat. Check logs with: $0 logs"
+ exit 1
+ fi
+ log_info "Rocketchat is healthy"
+
+ log_info "Starting frontend service..."
+ docker compose -f $COMPOSE_FILE up -d frontend
+
+ log_info "Waiting for frontend to be healthy..."
+ if ! wait_for_healthy frontend 60; then
+ log_warn "Timeout waiting for frontend. Check logs with: $0 logs"
+ exit 1
+ fi
+ log_info "Frontend is healthy"
+
+ log_info "Reset complete! Rocket.Chat is ready at http://localhost:3000"
+}
+
+# ============================================================================
+# COMMAND: rebuild
+# ============================================================================
+cmd_rebuild() {
+ local target="${1:-frontend}"
+
+ case "$target" in
+ frontend)
+ if [ "$ENABLE_COVERAGE" = true ]; then
+ log_info "Rebuilding Vite frontend with coverage..."
+ cd apps/meteor
+ ROOT_URL=http://localhost:3000/ VITE_TEST_MODE=true VITE_E2E_COVERAGE=true npx vite build --outDir /tmp/build/dist
+ cd ../..
+ else
+ log_info "Rebuilding Vite frontend..."
+ cd apps/meteor
+ ROOT_URL=http://localhost:3000/ VITE_TEST_MODE=true npx vite build --outDir /tmp/build/dist
+ cd ../..
+ fi
+
+ log_info "Rebuilding frontend Docker image..."
+ docker compose -f $COMPOSE_FILE build frontend
+
+ log_info "Recreating frontend container..."
+ docker compose -f $COMPOSE_FILE up -d --no-deps --force-recreate frontend
+
+ log_info "Waiting for frontend to be healthy..."
+ if ! wait_for_healthy frontend 60; then
+ log_warn "Timeout waiting for frontend"
+ exit 1
+ fi
+ log_info "Frontend rebuild complete!"
+ ;;
+ backend)
+ log_info "Rebuilding Meteor backend..."
+ cd apps/meteor
+ meteor build --server-only --directory "$BUILD_DIR"
+ cd ../..
+
+ log_info "Rebuilding backend Docker image..."
+ docker compose -f $COMPOSE_FILE build rocketchat
+
+ log_info "Recreating rocketchat container..."
+ docker compose -f $COMPOSE_FILE up -d --no-deps --force-recreate rocketchat
+
+ log_info "Waiting for rocketchat to be healthy..."
+ if ! wait_for_healthy rocketchat 120; then
+ log_warn "Timeout waiting for rocketchat"
+ exit 1
+ fi
+ log_info "Backend rebuild complete!"
+ ;;
+ all)
+ cmd_rebuild frontend
+ cmd_rebuild backend
+ ;;
+ *)
+ log_error "Unknown rebuild target: $target"
+ echo "Usage: $0 rebuild [frontend|backend|all]"
+ exit 1
+ ;;
+ esac
+}
+
+# ============================================================================
+# COMMAND: logs
+# ============================================================================
+cmd_logs() {
+ docker compose -f $COMPOSE_FILE logs -f rocketchat frontend
+}
+
+# ============================================================================
+# COMMAND: status
+# ============================================================================
+cmd_status() {
+ docker compose -f $COMPOSE_FILE ps
+}
+
+# ============================================================================
+# COMMAND: help
+# ============================================================================
+cmd_help() {
+ echo "Usage: $0 [command] [options]"
+ echo ""
+ echo "Test CI Docker Compose configuration locally."
+ echo "Mimics the CI environment for the Vite-based frontend/backend setup."
+ echo ""
+ echo "Commands:"
+ echo " start [--coverage] Build and start all services (default)"
+ echo " stop Stop all services and remove volumes"
+ echo " reset Reset Rocket.Chat to initial state (drop database)"
+ echo " rebuild [target] Rebuild services without full restart"
+ echo " target: frontend (default), backend, all"
+ echo " logs Follow logs from rocketchat and frontend"
+ echo " status Show status of all services"
+ echo " help Show this help message"
+ echo ""
+ echo "Flags:"
+ echo " --coverage Enable code coverage instrumentation"
+ echo ""
+ echo "Environment variables:"
+ echo " COVERAGE_DIR Coverage output directory (default: /tmp/coverage)"
+ echo " BUILD_DIR Build output directory (default: /tmp/build)"
+ echo " MONGODB_VERSION MongoDB version (default: 8.0)"
+ echo ""
+ echo "Examples:"
+ echo " $0 start # Build and start everything"
+ echo " $0 start --coverage # Build with coverage instrumentation"
+ echo " $0 reset # Reset database for fresh test run"
+ echo " $0 rebuild frontend # Rebuild only the frontend"
+}
+
+# ============================================================================
+# MAIN
+# ============================================================================
+# Parse all arguments (flags can appear before or after command)
+COMMAND=""
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --coverage)
+ ENABLE_COVERAGE=true
+ shift
+ ;;
+ start|stop|reset|rebuild|logs|status|help)
+ COMMAND="$1"
+ shift
+ ;;
+ *)
+ if [ -z "$COMMAND" ]; then
+ COMMAND="$1"
+ fi
+ shift
+ ;;
+ esac
+done
+
+COMMAND="${COMMAND:-start}"
+
+case "$COMMAND" in
+ start) cmd_start ;;
+ stop) cmd_stop ;;
+ reset) cmd_reset ;;
+ rebuild) cmd_rebuild "$@" ;;
+ logs) cmd_logs ;;
+ status) cmd_status ;;
+ help) cmd_help ;;
+ *)
+ log_error "Unknown command: $COMMAND"
+ cmd_help
+ exit 1
+ ;;
+esac
diff --git a/e2e.sh b/e2e.sh
new file mode 100755
index 0000000000000..82ef31644d0c9
--- /dev/null
+++ b/e2e.sh
@@ -0,0 +1,144 @@
+#!/bin/bash
+# e2e.sh - Run E2E tests locally (mimics CI environment)
+#
+# Usage: ./e2e.sh [--coverage] [--shards N]
+#
+# This script:
+# 1. Builds the environment (optionally with coverage instrumentation)
+# 2. Runs all test shards
+# 3. Merges coverage data (if enabled)
+# 4. Generates coverage reports (if enabled)
+
+set -e
+
+# Colors for output
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+BLUE='\033[0;34m'
+NC='\033[0m' # No Color
+
+log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
+log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
+log_cmd() { echo -e "${BLUE}[CMD]${NC} $1"; }
+
+# Default configuration
+ENABLE_COVERAGE=false
+TOTAL_SHARDS=4
+
+# Parse flags
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --coverage)
+ ENABLE_COVERAGE=true
+ shift
+ ;;
+ --shards)
+ TOTAL_SHARDS="$2"
+ shift 2
+ ;;
+ --help)
+ echo "Usage: $0 [options]"
+ echo ""
+ echo "Options:"
+ echo " --coverage Enable code coverage collection and reporting"
+ echo " --shards N Number of test shards to run (default: 4)"
+ echo " --help Show this help message"
+ echo ""
+ echo "Examples:"
+ echo " $0 # Run tests without coverage"
+ echo " $0 --coverage # Run tests with coverage"
+ echo " $0 --shards 2 # Run only 2 shards"
+ exit 0
+ ;;
+ *)
+ log_warn "Unknown option: $1"
+ exit 1
+ ;;
+ esac
+done
+
+# Configuration (matches CI)
+export MONGO_URL='mongodb://localhost:27017/rocketchat?replicaSet=rs0&directConnection=true'
+export COVERAGE_DIR='/tmp/coverage/ui'
+export IS_EE=''
+
+
+# Run each shard
+for shard in $(seq 1 $TOTAL_SHARDS); do
+ log_info "========================================"
+ log_info "Running shard $shard/$TOTAL_SHARDS"
+ log_info "========================================"
+
+ # Reset between shards (fresh database)
+ if [ $shard -gt 1 ]; then
+ log_info "Resetting environment for shard $shard..."
+ ./docker-vite-ci.sh reset
+ fi
+
+ # Run tests for this shard
+ cd apps/meteor
+ yarn prepare
+ if [ "$ENABLE_COVERAGE" = true ]; then
+ E2E_COVERAGE=true yarn test:e2e --shard $shard/$TOTAL_SHARDS || {
+ log_warn "Shard $shard failed, continuing with other shards..."
+ }
+ else
+ yarn test:e2e --shard $shard/$TOTAL_SHARDS || {
+ log_warn "Shard $shard failed, continuing with other shards..."
+ }
+ fi
+ cd ../..
+
+ # Merge coverage for this shard (mimics CI workflow)
+ if [ "$ENABLE_COVERAGE" = true ]; then
+ if [ -d "apps/meteor/.nyc_output" ] && [ "$(ls -A apps/meteor/.nyc_output)" ]; then
+ log_info "Merging coverage for shard $shard..."
+ cd apps/meteor
+ npx nyc merge .nyc_output "${COVERAGE_DIR}/ui-${shard}.json"
+ cd ../..
+ else
+ log_warn "No coverage data found for shard $shard"
+ fi
+ fi
+done
+
+log_info "========================================"
+log_info "All shards complete!"
+log_info "========================================"
+
+# Merge all shard coverage files into one
+if [ "$ENABLE_COVERAGE" = true ] && [ -d "$COVERAGE_DIR" ] && [ "$(ls -A $COVERAGE_DIR/*.json 2>/dev/null)" ]; then
+ log_info "Merging all shard coverage files..."
+ cd apps/meteor
+
+ # Create temporary directory for merged coverage
+ rm -rf .nyc_output
+ mkdir -p .nyc_output
+
+ # Copy all shard files to .nyc_output
+ cp "${COVERAGE_DIR}"/*.json .nyc_output/
+
+ # Generate reports
+ log_info "Generating coverage reports..."
+ npx nyc report --reporter=html --reporter=text-summary --reporter=lcov
+
+ log_info "========================================"
+ log_info "Coverage Summary:"
+ log_info "========================================"
+ npx nyc report --reporter=text-summary
+
+ log_info ""
+ log_info "Coverage reports generated:"
+ log_info " HTML: apps/meteor/coverage/index.html"
+ log_info " LCOV: apps/meteor/coverage/lcov.info"
+ log_info ""
+ log_info "Open coverage report with: open apps/meteor/coverage/index.html"
+
+ cd ../..
+elif [ "$ENABLE_COVERAGE" = true ]; then
+ log_warn "No coverage data found in any shard"
+fi
+
+log_info "========================================"
+log_info "E2E test run complete!"
+log_info "========================================"
diff --git a/packages/gazzodown/src/elements/LinkSpan.tsx b/packages/gazzodown/src/elements/LinkSpan.tsx
index ba234e65d3c42..57680c273fcf1 100644
--- a/packages/gazzodown/src/elements/LinkSpan.tsx
+++ b/packages/gazzodown/src/elements/LinkSpan.tsx
@@ -1,5 +1,5 @@
import type * as MessageParser from '@rocket.chat/message-parser';
-import { getBaseURI, isExternal } from '@rocket.chat/ui-client/dist/helpers/getBaseURI';
+import { getBaseURI, isExternal } from '@rocket.chat/ui-client';
import type { ReactElement } from 'react';
import { useMemo } from 'react';
import { useTranslation } from 'react-i18next';
diff --git a/packages/livechat/package.json b/packages/livechat/package.json
index 696ac06168cee..6b011493916dd 100644
--- a/packages/livechat/package.json
+++ b/packages/livechat/package.json
@@ -44,6 +44,7 @@
"mem": "^8.1.1",
"path-to-regexp": "^6.3.0",
"preact": "~10.25.4",
+ "preact-render-to-string": "^6.6.5",
"preact-router": "^4.1.2",
"query-string": "^7.1.3",
"react-hook-form": "~7.45.4",
diff --git a/packages/ui-client/src/hooks/useThemeMode.ts b/packages/ui-client/src/hooks/useThemeMode.ts
index 49a140d78a4f7..d7b32b154abd9 100644
--- a/packages/ui-client/src/hooks/useThemeMode.ts
+++ b/packages/ui-client/src/hooks/useThemeMode.ts
@@ -27,9 +27,10 @@ export const useThemeMode = (): [
);
const setTheme = useCallback((value: ThemeMode): (() => void) => updaters[value], [updaters]);
+ const isDarkMode = useDarkMode(themeMode === 'auto' ? undefined : themeMode === 'dark');
const useTheme = () => {
- if (useDarkMode(themeMode === 'auto' ? undefined : themeMode === 'dark')) {
+ if (isDarkMode) {
return 'dark';
}
if (themeMode === 'high-contrast') {
diff --git a/packages/ui-contexts/src/hooks/useRoomRoute.ts b/packages/ui-contexts/src/hooks/useRoomRoute.ts
new file mode 100644
index 0000000000000..ef4e19f812fbd
--- /dev/null
+++ b/packages/ui-contexts/src/hooks/useRoomRoute.ts
@@ -0,0 +1,30 @@
+import type { IRoom, RoomType } from '@rocket.chat/core-typings';
+import { useEffectEvent } from '@rocket.chat/fuselage-hooks';
+
+import { useRouter } from './useRouter';
+
+type RoomRouteData = {
+ rid: IRoom['_id'];
+ t: RoomType;
+ name?: IRoom['name'];
+};
+
+/**
+ * Returns a function to navigate to a room using existing room data.
+ * Unlike `useGoToRoom`, this doesn't make an API call - use it when you already have the room data.
+ */
+export const useRoomRoute = ({ replace = false }: { replace?: boolean } = {}): ((room: RoomRouteData) => void) => {
+ const router = useRouter();
+
+ return useEffectEvent((room: RoomRouteData) => {
+ const { t, name, rid } = room;
+ const { path } = router.getRoomRoute(t, ['c', 'p'].includes(t) ? { name } : { rid });
+
+ router.navigate(
+ {
+ pathname: path,
+ },
+ { replace },
+ );
+ });
+};
diff --git a/packages/ui-contexts/src/index.ts b/packages/ui-contexts/src/index.ts
index a8ed0f4fbeca6..9b5f202ae1452 100644
--- a/packages/ui-contexts/src/index.ts
+++ b/packages/ui-contexts/src/index.ts
@@ -38,6 +38,7 @@ export { useCurrentRoutePath } from './hooks/useCurrentRoutePath';
export { useCustomSound } from './hooks/useCustomSound';
export { useEndpoint } from './hooks/useEndpoint';
export { useGoToRoom } from './hooks/useGoToRoom';
+export { useRoomRoute } from './hooks/useRoomRoute';
export type { EndpointFunction } from './hooks/useEndpoint';
export { useIsLoggingIn } from './hooks/useIsLoggingIn';
export { useIsPrivilegedSettingsContext } from './hooks/useIsPrivilegedSettingsContext';
diff --git a/yarn.lock b/yarn.lock
index 73d3e9da25175..3afa8343f1ba4 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -999,7 +999,7 @@ __metadata:
languageName: node
linkType: hard
-"@babel/generator@npm:^7.29.0":
+"@babel/generator@npm:^7.29.0, @babel/generator@npm:^7.29.1":
version: 7.29.1
resolution: "@babel/generator@npm:7.29.1"
dependencies:
@@ -3415,13 +3415,13 @@ __metadata:
languageName: node
linkType: hard
-"@emnapi/core@npm:1.9.2":
- version: 1.9.2
- resolution: "@emnapi/core@npm:1.9.2"
+"@emnapi/core@npm:1.10.0":
+ version: 1.10.0
+ resolution: "@emnapi/core@npm:1.10.0"
dependencies:
"@emnapi/wasi-threads": "npm:1.2.1"
tslib: "npm:^2.4.0"
- checksum: 10/32084861f306b405f10f3ae13d1a49fa75650bdaaa40704892c397856815fe5d3781670d2662806d39c2d8a19bb62826dd7b870a79858f7be77500d9d0d3d91a
+ checksum: 10/d32f386084e64deaf2609aabb8295d1ad5af6144d0f46d2060b76cc53f1f3b486df54bec9b0f33c37d85a3822e1193ebcd4e3deb4a5f0e4cd650aa2ffc631715
languageName: node
linkType: hard
@@ -3435,12 +3435,12 @@ __metadata:
languageName: node
linkType: hard
-"@emnapi/runtime@npm:1.9.2":
- version: 1.9.2
- resolution: "@emnapi/runtime@npm:1.9.2"
+"@emnapi/runtime@npm:1.10.0":
+ version: 1.10.0
+ resolution: "@emnapi/runtime@npm:1.10.0"
dependencies:
tslib: "npm:^2.4.0"
- checksum: 10/de123d6b7acdbe34bf997523be761e5ae6d8f9b3967b72e8e50ff7dd1791a2a0d2b9fb0d7d92230b0738502980ea6f947189b7c1f47814ff666515a55c6fff48
+ checksum: 10/d21083d07fa0c2da171c142e78ef986b66b07d45b06accc0bcaf49fcc61bb4dbc10e1c1760813070165b9f49b054376a931045347f21c0f42ff1eb2d2040faac
languageName: node
linkType: hard
@@ -4325,15 +4325,6 @@ __metadata:
languageName: node
linkType: hard
-"@internationalized/date@npm:^3.12.1":
- version: 3.12.1
- resolution: "@internationalized/date@npm:3.12.1"
- dependencies:
- "@swc/helpers": "npm:^0.5.0"
- checksum: 10/a8178a73e65cb86357008e39e589bf5899b47a4ebd6123d96b54e3b19aade31c136d8e5f9c48c4627110f26d857e15aa4be9e189e56386a4b26c616df4ea1795
- languageName: node
- linkType: hard
-
"@internationalized/date@npm:^3.7.0":
version: 3.7.0
resolution: "@internationalized/date@npm:3.7.0"
@@ -4362,15 +4353,6 @@ __metadata:
languageName: node
linkType: hard
-"@internationalized/number@npm:^3.6.6":
- version: 3.6.6
- resolution: "@internationalized/number@npm:3.6.6"
- dependencies:
- "@swc/helpers": "npm:^0.5.0"
- checksum: 10/7a7c8290a91bae3c1b22ab006c036b50f041162a383446360d0dd8194aa491a370057df1b2aa2cdfbccefd335cf6f4679e14608f5c24031b6852375654fa59df
- languageName: node
- linkType: hard
-
"@internationalized/string@npm:^3.2.5":
version: 3.2.5
resolution: "@internationalized/string@npm:3.2.5"
@@ -4380,15 +4362,6 @@ __metadata:
languageName: node
linkType: hard
-"@internationalized/string@npm:^3.2.8":
- version: 3.2.8
- resolution: "@internationalized/string@npm:3.2.8"
- dependencies:
- "@swc/helpers": "npm:^0.5.0"
- checksum: 10/2054baf8b2d5f32c7904b5a584e724d00ae781b3efb22c113c18d6a604f700569faf006be28929032831972272693d7dd863d324550a7385068715e3a67b8a56
- languageName: node
- linkType: hard
-
"@isaacs/cliui@npm:^8.0.2":
version: 8.0.2
resolution: "@isaacs/cliui@npm:8.0.2"
@@ -4412,7 +4385,7 @@ __metadata:
languageName: node
linkType: hard
-"@istanbuljs/load-nyc-config@npm:^1.0.0":
+"@istanbuljs/load-nyc-config@npm:^1.0.0, @istanbuljs/load-nyc-config@npm:^1.1.0":
version: 1.1.0
resolution: "@istanbuljs/load-nyc-config@npm:1.1.0"
dependencies:
@@ -6281,10 +6254,10 @@ __metadata:
languageName: node
linkType: hard
-"@oxc-project/types@npm:=0.126.0":
- version: 0.126.0
- resolution: "@oxc-project/types@npm:0.126.0"
- checksum: 10/b4d7d350728381916acfd81549021d075c2456e4de64229b0402ae8f9531a2d44f928c835b99da3a589ff1e32c133da1f0c0e2229cc32f62a62e5752b6bd728b
+"@oxc-project/types@npm:=0.127.0":
+ version: 0.127.0
+ resolution: "@oxc-project/types@npm:0.127.0"
+ checksum: 10/f154f4720367186aed63a16fb1395f9039d4e6872265fe9e6b5eacc02fb2b948f9ea6c5f85efd3a015ea28aa8c31232b7a8301218ae28651659e46dd0c4f2031
languageName: node
linkType: hard
@@ -6928,20 +6901,7 @@ __metadata:
languageName: node
linkType: hard
-"@react-aria/focus@npm:^3.0.0-nightly-fb28ab3b4-241024":
- version: 3.22.0
- resolution: "@react-aria/focus@npm:3.22.0"
- dependencies:
- "@swc/helpers": "npm:^0.5.0"
- react-aria: "npm:3.48.0"
- peerDependencies:
- react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- react-dom: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- checksum: 10/6467625ad37e4dd6f16669145f19ef75a44134364bd116959369407f1b3ff309f86fc25610b4e7c3736a1a4befd178112429749fad505b944e11cec25e3847c1
- languageName: node
- linkType: hard
-
-"@react-aria/focus@npm:^3.19.1":
+"@react-aria/focus@npm:^3.0.0-nightly-fb28ab3b4-241024, @react-aria/focus@npm:^3.19.1":
version: 3.19.1
resolution: "@react-aria/focus@npm:3.19.1"
dependencies:
@@ -7559,21 +7519,7 @@ __metadata:
languageName: node
linkType: hard
-"@react-aria/utils@npm:^3.0.0-nightly-fb28ab3b4-241024":
- version: 3.34.0
- resolution: "@react-aria/utils@npm:3.34.0"
- dependencies:
- "@swc/helpers": "npm:^0.5.0"
- react-aria: "npm:3.48.0"
- react-stately: "npm:3.46.0"
- peerDependencies:
- react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- react-dom: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- checksum: 10/55a120c1b33510bd18154128f8e6c7ca17de38d3950e474c92d2cd154f01c2db9c3248103d6183330f8793db47a138e02fb40ce1763736aea469f3a15d673de9
- languageName: node
- linkType: hard
-
-"@react-aria/utils@npm:^3.27.0":
+"@react-aria/utils@npm:^3.0.0-nightly-fb28ab3b4-241024, @react-aria/utils@npm:^3.27.0":
version: 3.27.0
resolution: "@react-aria/utils@npm:3.27.0"
dependencies:
@@ -8621,16 +8567,7 @@ __metadata:
languageName: node
linkType: hard
-"@react-types/shared@npm:^3.0.0-nightly-fb28ab3b4-241024, @react-types/shared@npm:^3.34.0":
- version: 3.34.0
- resolution: "@react-types/shared@npm:3.34.0"
- peerDependencies:
- react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- checksum: 10/d28b0a3a3f68f94167fd7b4f474803430093b1a31f5f50cef6ddd755b923ba3af35dde40ffcc1f320926892744823a039b4a396c671f7c59aa49634811f0c43a
- languageName: node
- linkType: hard
-
-"@react-types/shared@npm:^3.14.1, @react-types/shared@npm:^3.27.0":
+"@react-types/shared@npm:^3.0.0-nightly-fb28ab3b4-241024, @react-types/shared@npm:^3.14.1, @react-types/shared@npm:^3.27.0":
version: 3.27.0
resolution: "@react-types/shared@npm:3.27.0"
peerDependencies:
@@ -9705,6 +9642,7 @@ __metadata:
postcss-scss: "npm:^4.0.9"
postcss-selector-not: "npm:^8.0.1"
preact: "npm:~10.25.4"
+ preact-render-to-string: "npm:^6.6.5"
preact-router: "npm:^4.1.2"
query-string: "npm:^7.1.3"
react: "npm:~18.3.1"
@@ -10047,6 +9985,7 @@ __metadata:
"@types/underscore": "npm:^1.13.0"
"@types/xml-crypto": "npm:~1.4.6"
"@types/xml-encryption": "npm:~1.2.4"
+ "@vitejs/plugin-react": "npm:~6.0.1"
"@xmldom/xmldom": "npm:~0.8.13"
adm-zip: "npm:0.5.17"
ajv: "npm:^8.17.1"
@@ -10236,6 +10175,8 @@ __metadata:
ua-parser-js: "npm:~1.0.41"
underscore: "npm:^1.13.8"
universal-perf-hooks: "npm:^1.0.1"
+ vite: "npm:^8.0.10"
+ vite-plugin-istanbul: "npm:^8.0.0"
webdav: "npm:^4.11.5"
webpack: "npm:~5.104.1"
xml-crypto: "npm:~3.2.1"
@@ -11235,7 +11176,7 @@ __metadata:
react-virtuoso: "npm:~4.12.8"
reactflow: "npm:^11.11.4"
typescript: "npm:~5.9.3"
- vite: "npm:^8.0.9"
+ vite: "npm:^8.0.10"
languageName: unknown
linkType: soft
@@ -11291,119 +11232,119 @@ __metadata:
languageName: unknown
linkType: soft
-"@rolldown/binding-android-arm64@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-android-arm64@npm:1.0.0-rc.16"
+"@rolldown/binding-android-arm64@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-android-arm64@npm:1.0.0-rc.17"
conditions: os=android & cpu=arm64
languageName: node
linkType: hard
-"@rolldown/binding-darwin-arm64@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-darwin-arm64@npm:1.0.0-rc.16"
+"@rolldown/binding-darwin-arm64@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-darwin-arm64@npm:1.0.0-rc.17"
conditions: os=darwin & cpu=arm64
languageName: node
linkType: hard
-"@rolldown/binding-darwin-x64@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-darwin-x64@npm:1.0.0-rc.16"
+"@rolldown/binding-darwin-x64@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-darwin-x64@npm:1.0.0-rc.17"
conditions: os=darwin & cpu=x64
languageName: node
linkType: hard
-"@rolldown/binding-freebsd-x64@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-freebsd-x64@npm:1.0.0-rc.16"
+"@rolldown/binding-freebsd-x64@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-freebsd-x64@npm:1.0.0-rc.17"
conditions: os=freebsd & cpu=x64
languageName: node
linkType: hard
-"@rolldown/binding-linux-arm-gnueabihf@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-arm-gnueabihf@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-arm-gnueabihf@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-arm-gnueabihf@npm:1.0.0-rc.17"
conditions: os=linux & cpu=arm
languageName: node
linkType: hard
-"@rolldown/binding-linux-arm64-gnu@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-arm64-gnu@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-arm64-gnu@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-arm64-gnu@npm:1.0.0-rc.17"
conditions: os=linux & cpu=arm64 & libc=glibc
languageName: node
linkType: hard
-"@rolldown/binding-linux-arm64-musl@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-arm64-musl@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-arm64-musl@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-arm64-musl@npm:1.0.0-rc.17"
conditions: os=linux & cpu=arm64 & libc=musl
languageName: node
linkType: hard
-"@rolldown/binding-linux-ppc64-gnu@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-ppc64-gnu@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-ppc64-gnu@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-ppc64-gnu@npm:1.0.0-rc.17"
conditions: os=linux & cpu=ppc64 & libc=glibc
languageName: node
linkType: hard
-"@rolldown/binding-linux-s390x-gnu@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-s390x-gnu@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-s390x-gnu@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-s390x-gnu@npm:1.0.0-rc.17"
conditions: os=linux & cpu=s390x & libc=glibc
languageName: node
linkType: hard
-"@rolldown/binding-linux-x64-gnu@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-x64-gnu@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-x64-gnu@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-x64-gnu@npm:1.0.0-rc.17"
conditions: os=linux & cpu=x64 & libc=glibc
languageName: node
linkType: hard
-"@rolldown/binding-linux-x64-musl@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-linux-x64-musl@npm:1.0.0-rc.16"
+"@rolldown/binding-linux-x64-musl@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-linux-x64-musl@npm:1.0.0-rc.17"
conditions: os=linux & cpu=x64 & libc=musl
languageName: node
linkType: hard
-"@rolldown/binding-openharmony-arm64@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-openharmony-arm64@npm:1.0.0-rc.16"
+"@rolldown/binding-openharmony-arm64@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-openharmony-arm64@npm:1.0.0-rc.17"
conditions: os=openharmony & cpu=arm64
languageName: node
linkType: hard
-"@rolldown/binding-wasm32-wasi@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-wasm32-wasi@npm:1.0.0-rc.16"
+"@rolldown/binding-wasm32-wasi@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-wasm32-wasi@npm:1.0.0-rc.17"
dependencies:
- "@emnapi/core": "npm:1.9.2"
- "@emnapi/runtime": "npm:1.9.2"
+ "@emnapi/core": "npm:1.10.0"
+ "@emnapi/runtime": "npm:1.10.0"
"@napi-rs/wasm-runtime": "npm:^1.1.4"
conditions: cpu=wasm32
languageName: node
linkType: hard
-"@rolldown/binding-win32-arm64-msvc@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-win32-arm64-msvc@npm:1.0.0-rc.16"
+"@rolldown/binding-win32-arm64-msvc@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-win32-arm64-msvc@npm:1.0.0-rc.17"
conditions: os=win32 & cpu=arm64
languageName: node
linkType: hard
-"@rolldown/binding-win32-x64-msvc@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/binding-win32-x64-msvc@npm:1.0.0-rc.16"
+"@rolldown/binding-win32-x64-msvc@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/binding-win32-x64-msvc@npm:1.0.0-rc.17"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
-"@rolldown/pluginutils@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "@rolldown/pluginutils@npm:1.0.0-rc.16"
- checksum: 10/81d2b15b548375efbd63824c817def7ffe5e8f9949ccc5b5e0a15b8b01bf236a1e3f89d614e19aceb0fd8ee2796bc912fd8ba7652d21b9a29cd60e2741b1180e
+"@rolldown/pluginutils@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "@rolldown/pluginutils@npm:1.0.0-rc.17"
+ checksum: 10/d659ea756ee6d360a015708d1035c07047e08db99a4160c74c7f22a7ece5611efcc18ad56db4a63b69edb506ded47596d9c0d301919242470d8c412d916b9750
languageName: node
linkType: hard
@@ -13696,6 +13637,15 @@ __metadata:
languageName: node
linkType: hard
+"@types/babel__generator@npm:7.27.0":
+ version: 7.27.0
+ resolution: "@types/babel__generator@npm:7.27.0"
+ dependencies:
+ "@babel/types": "npm:^7.0.0"
+ checksum: 10/f572e67a9a39397664350a4437d8a7fbd34acc83ff4887a8cf08349e39f8aeb5ad2f70fb78a0a0a23a280affe3a5f4c25f50966abdce292bcf31237af1c27b1a
+ languageName: node
+ linkType: hard
+
"@types/babel__preset-env@npm:^7.10.0":
version: 7.10.0
resolution: "@types/babel__preset-env@npm:7.10.0"
@@ -16170,7 +16120,7 @@ __metadata:
languageName: node
linkType: hard
-"acorn@npm:^8.15.0":
+"acorn@npm:^8.15.0, acorn@npm:^8.16.0":
version: 8.16.0
resolution: "acorn@npm:8.16.0"
bin:
@@ -16553,15 +16503,6 @@ __metadata:
languageName: node
linkType: hard
-"aria-hidden@npm:^1.2.3":
- version: 1.2.6
- resolution: "aria-hidden@npm:1.2.6"
- dependencies:
- tslib: "npm:^2.0.0"
- checksum: 10/1914e5a36225dccdb29f0b88cc891eeca736cdc5b0c905ab1437b90b28b5286263ed3a221c75b7dc788f25b942367be0044b2ac8ccf073a72e07a50b1d964202
- languageName: node
- linkType: hard
-
"aria-query@npm:5.3.0":
version: 5.3.0
resolution: "aria-query@npm:5.3.0"
@@ -21933,7 +21874,7 @@ __metadata:
languageName: node
linkType: hard
-"eslint-visitor-keys@npm:^5.0.0":
+"eslint-visitor-keys@npm:^5.0.0, eslint-visitor-keys@npm:^5.0.1":
version: 5.0.1
resolution: "eslint-visitor-keys@npm:5.0.1"
checksum: 10/f9cc1a57b75e0ef949545cac33d01e8367e302de4c1483266ed4d8646ee5c306376660196bbb38b004e767b7043d1e661cb4336b49eff634a1bbe75c1db709ec
@@ -22007,6 +21948,17 @@ __metadata:
languageName: node
linkType: hard
+"espree@npm:^11.2.0":
+ version: 11.2.0
+ resolution: "espree@npm:11.2.0"
+ dependencies:
+ acorn: "npm:^8.16.0"
+ acorn-jsx: "npm:^5.3.2"
+ eslint-visitor-keys: "npm:^5.0.1"
+ checksum: 10/5cc4233b8f150010c70713669ef8231f07fe9b6391870cfa0a292a07f723ed5c2922064b978e627f7cabf7753280e64c5bde41d3840caaa40946989df7009a51
+ languageName: node
+ linkType: hard
+
"esprima@npm:^4.0.0, esprima@npm:^4.0.1, esprima@npm:~4.0.0":
version: 4.0.1
resolution: "esprima@npm:4.0.1"
@@ -23424,6 +23376,17 @@ __metadata:
languageName: node
linkType: hard
+"glob@npm:^13.0.6":
+ version: 13.0.6
+ resolution: "glob@npm:13.0.6"
+ dependencies:
+ minimatch: "npm:^10.2.2"
+ minipass: "npm:^7.1.3"
+ path-scurry: "npm:^2.0.2"
+ checksum: 10/201ad69e5f0aa74e1d8c00a481581f8b8c804b6a4fbfabeeb8541f5d756932800331daeba99b58fb9e4cd67e12ba5a7eba5b82fb476691588418060b84353214
+ languageName: node
+ linkType: hard
+
"glob@npm:^7.0.3, glob@npm:^7.1.3, glob@npm:^7.1.4, glob@npm:^7.1.6, glob@npm:^7.2.0":
version: 7.2.3
resolution: "glob@npm:7.2.3"
@@ -23797,7 +23760,7 @@ __metadata:
languageName: node
linkType: hard
-"hash-base@npm:~3.0, hash-base@npm:~3.0.4":
+"hash-base@npm:~3.0":
version: 3.0.4
resolution: "hash-base@npm:3.0.4"
dependencies:
@@ -23807,6 +23770,16 @@ __metadata:
languageName: node
linkType: hard
+"hash-base@npm:~3.0.4":
+ version: 3.0.5
+ resolution: "hash-base@npm:3.0.5"
+ dependencies:
+ inherits: "npm:^2.0.4"
+ safe-buffer: "npm:^5.2.1"
+ checksum: 10/6a82675a5de2ea9347501bbe655a2334950c7ec972fd9810ae9529e06aeab8f7e8ef68fc2112e5e6f0745561a7e05326efca42ad59bb5fd116537f5f8b0a216d
+ languageName: node
+ linkType: hard
+
"hash.js@npm:^1.0.0, hash.js@npm:^1.0.3":
version: 1.1.7
resolution: "hash.js@npm:1.1.7"
@@ -25403,7 +25376,7 @@ __metadata:
languageName: node
linkType: hard
-"istanbul-lib-instrument@npm:^6.0.0, istanbul-lib-instrument@npm:^6.0.2":
+"istanbul-lib-instrument@npm:^6.0.0, istanbul-lib-instrument@npm:^6.0.2, istanbul-lib-instrument@npm:^6.0.3":
version: 6.0.3
resolution: "istanbul-lib-instrument@npm:6.0.3"
dependencies:
@@ -28724,6 +28697,13 @@ __metadata:
languageName: node
linkType: hard
+"minipass@npm:^7.1.3":
+ version: 7.1.3
+ resolution: "minipass@npm:7.1.3"
+ checksum: 10/175e4d5e20980c3cd316ae82d2c031c42f6c746467d8b1905b51060a0ba4461441a0c25bb67c025fd9617f9a3873e152c7b543c6b5ac83a1846be8ade80dffd6
+ languageName: node
+ linkType: hard
+
"minizlib@npm:^2.1.1, minizlib@npm:^2.1.2":
version: 2.1.2
resolution: "minizlib@npm:2.1.2"
@@ -30442,6 +30422,16 @@ __metadata:
languageName: node
linkType: hard
+"path-scurry@npm:^2.0.2":
+ version: 2.0.2
+ resolution: "path-scurry@npm:2.0.2"
+ dependencies:
+ lru-cache: "npm:^11.0.0"
+ minipass: "npm:^7.1.2"
+ checksum: 10/2b4257422bcb870a4c2d205b3acdbb213a72f5e2250f61c80f79c9d014d010f82bdf8584441612c8e1fa4eb098678f5704a66fa8377d72646bad4be38e57a2c3
+ languageName: node
+ linkType: hard
+
"path-to-regexp@npm:0.1.12, path-to-regexp@npm:~0.1.12":
version: 0.1.12
resolution: "path-to-regexp@npm:0.1.12"
@@ -31556,6 +31546,15 @@ __metadata:
languageName: node
linkType: hard
+"preact-render-to-string@npm:^6.6.5":
+ version: 6.6.5
+ resolution: "preact-render-to-string@npm:6.6.5"
+ peerDependencies:
+ preact: ">=10 || >= 11.0.0-0"
+ checksum: 10/8d5bbde63857e3b4a0113c8e8d53b1f700cd14c6ee0e0a841aba30939930877e59ae1850bc74e1f369abdc21fd069af04f47fa7943f1b377ecd05366ab1353e6
+ languageName: node
+ linkType: hard
+
"preact-router@npm:^4.1.2":
version: 4.1.2
resolution: "preact-router@npm:4.1.2"
@@ -32327,26 +32326,6 @@ __metadata:
languageName: node
linkType: hard
-"react-aria@npm:3.48.0":
- version: 3.48.0
- resolution: "react-aria@npm:3.48.0"
- dependencies:
- "@internationalized/date": "npm:^3.12.1"
- "@internationalized/number": "npm:^3.6.6"
- "@internationalized/string": "npm:^3.2.8"
- "@react-types/shared": "npm:^3.34.0"
- "@swc/helpers": "npm:^0.5.0"
- aria-hidden: "npm:^1.2.3"
- clsx: "npm:^2.0.0"
- react-stately: "npm:3.46.0"
- use-sync-external-store: "npm:^1.6.0"
- peerDependencies:
- react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- react-dom: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- checksum: 10/e70ba3a21f99967daffcb7399e6c4cc33fe9ae0ba4b13216ac3fbc150f37416d882b68ecd52f3c59852b87ef61a1c4b184066083d699d5afda1ad8b38fab8b99
- languageName: node
- linkType: hard
-
"react-aria@patch:react-aria@npm%3A3.37.0#~/.yarn/patches/react-aria-npm-3.37.0-83959bd2fa.patch":
version: 3.37.0
resolution: "react-aria@patch:react-aria@npm%3A3.37.0#~/.yarn/patches/react-aria-npm-3.37.0-83959bd2fa.patch::version=3.37.0&hash=e69ffb"
@@ -32688,22 +32667,6 @@ __metadata:
languageName: node
linkType: hard
-"react-stately@npm:3.46.0":
- version: 3.46.0
- resolution: "react-stately@npm:3.46.0"
- dependencies:
- "@internationalized/date": "npm:^3.12.1"
- "@internationalized/number": "npm:^3.6.6"
- "@internationalized/string": "npm:^3.2.8"
- "@react-types/shared": "npm:^3.34.0"
- "@swc/helpers": "npm:^0.5.0"
- use-sync-external-store: "npm:^1.6.0"
- peerDependencies:
- react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1
- checksum: 10/ee2d8b0633c6ba82eb159197ddaaeb0832d318c6ed1304c7e14273d0c3dc3156c48aef0c8cf4207481dbca1cf054c6726c42c089259605213d1f08f35bebf321
- languageName: node
- linkType: hard
-
"react-stately@patch:react-stately@npm%3A3.17.0#~/.yarn/patches/react-stately-npm-3.17.0-264cc7a43c.patch":
version: 3.17.0
resolution: "react-stately@patch:react-stately@npm%3A3.17.0#~/.yarn/patches/react-stately-npm-3.17.0-264cc7a43c.patch::version=3.17.0&hash=e13f63"
@@ -33632,27 +33595,27 @@ __metadata:
languageName: unknown
linkType: soft
-"rolldown@npm:1.0.0-rc.16":
- version: 1.0.0-rc.16
- resolution: "rolldown@npm:1.0.0-rc.16"
- dependencies:
- "@oxc-project/types": "npm:=0.126.0"
- "@rolldown/binding-android-arm64": "npm:1.0.0-rc.16"
- "@rolldown/binding-darwin-arm64": "npm:1.0.0-rc.16"
- "@rolldown/binding-darwin-x64": "npm:1.0.0-rc.16"
- "@rolldown/binding-freebsd-x64": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-arm-gnueabihf": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-arm64-gnu": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-arm64-musl": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-ppc64-gnu": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-s390x-gnu": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-x64-gnu": "npm:1.0.0-rc.16"
- "@rolldown/binding-linux-x64-musl": "npm:1.0.0-rc.16"
- "@rolldown/binding-openharmony-arm64": "npm:1.0.0-rc.16"
- "@rolldown/binding-wasm32-wasi": "npm:1.0.0-rc.16"
- "@rolldown/binding-win32-arm64-msvc": "npm:1.0.0-rc.16"
- "@rolldown/binding-win32-x64-msvc": "npm:1.0.0-rc.16"
- "@rolldown/pluginutils": "npm:1.0.0-rc.16"
+"rolldown@npm:1.0.0-rc.17":
+ version: 1.0.0-rc.17
+ resolution: "rolldown@npm:1.0.0-rc.17"
+ dependencies:
+ "@oxc-project/types": "npm:=0.127.0"
+ "@rolldown/binding-android-arm64": "npm:1.0.0-rc.17"
+ "@rolldown/binding-darwin-arm64": "npm:1.0.0-rc.17"
+ "@rolldown/binding-darwin-x64": "npm:1.0.0-rc.17"
+ "@rolldown/binding-freebsd-x64": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-arm-gnueabihf": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-arm64-gnu": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-arm64-musl": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-ppc64-gnu": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-s390x-gnu": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-x64-gnu": "npm:1.0.0-rc.17"
+ "@rolldown/binding-linux-x64-musl": "npm:1.0.0-rc.17"
+ "@rolldown/binding-openharmony-arm64": "npm:1.0.0-rc.17"
+ "@rolldown/binding-wasm32-wasi": "npm:1.0.0-rc.17"
+ "@rolldown/binding-win32-arm64-msvc": "npm:1.0.0-rc.17"
+ "@rolldown/binding-win32-x64-msvc": "npm:1.0.0-rc.17"
+ "@rolldown/pluginutils": "npm:1.0.0-rc.17"
dependenciesMeta:
"@rolldown/binding-android-arm64":
optional: true
@@ -33686,7 +33649,7 @@ __metadata:
optional: true
bin:
rolldown: bin/cli.mjs
- checksum: 10/064ef08cbd171842da7595db7519bb5b7ca2a620b4481338716f2d494353c01511efbb01e93805de649e8cba2898b8ed7814193381ca1edd5b9883f51caf0865
+ checksum: 10/5e7415a7cb732c4f7168ab6dcc841ed9ec4ad614058294a53d94821a762c274a69b009e41e9c8e4983a059907f02d462030a36b42543c0f41ce702fcd68d10d5
languageName: node
linkType: hard
@@ -34664,7 +34627,7 @@ __metadata:
languageName: node
linkType: hard
-"source-map@npm:~0.7.6":
+"source-map@npm:^0.7.6, source-map@npm:~0.7.6":
version: 0.7.6
resolution: "source-map@npm:0.7.6"
checksum: 10/c8d2da7c57c14f3fd7568f764b39ad49bbf9dd7632b86df3542b31fed117d4af2fb74a4f886fc06baf7a510fee68e37998efc3080aacdac951c36211dc29a7a3
@@ -35939,6 +35902,17 @@ __metadata:
languageName: node
linkType: hard
+"test-exclude@npm:^8.0.0":
+ version: 8.0.0
+ resolution: "test-exclude@npm:8.0.0"
+ dependencies:
+ "@istanbuljs/schema": "npm:^0.1.2"
+ glob: "npm:^13.0.6"
+ minimatch: "npm:^10.2.2"
+ checksum: 10/4f16902c4bc91f62678ba64f8a36f70661ae0009d7c480bee4a0befaf9a1118b2925293cec6dde13ceb892a7ab9d71cd5387874a8c91cc05904c45b46fe13831
+ languageName: node
+ linkType: hard
+
"text-decoder@npm:^1.1.0":
version: 1.2.0
resolution: "text-decoder@npm:1.2.0"
@@ -37262,15 +37236,6 @@ __metadata:
languageName: node
linkType: hard
-"use-sync-external-store@npm:^1.6.0":
- version: 1.6.0
- resolution: "use-sync-external-store@npm:1.6.0"
- peerDependencies:
- react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
- checksum: 10/b40ad2847ba220695bff2d4ba4f4d60391c0fb4fb012faa7a4c18eb38b69181936f5edc55a522c4d20a788d1a879b73c3810952c9d0fd128d01cb3f22042c09e
- languageName: node
- linkType: hard
-
"utf7@npm:>=1.0.2":
version: 1.0.2
resolution: "utf7@npm:1.0.2"
@@ -37491,15 +37456,33 @@ __metadata:
languageName: node
linkType: hard
-"vite@npm:^8.0.9":
- version: 8.0.9
- resolution: "vite@npm:8.0.9"
+"vite-plugin-istanbul@npm:^8.0.0":
+ version: 8.0.0
+ resolution: "vite-plugin-istanbul@npm:8.0.0"
+ dependencies:
+ "@babel/generator": "npm:^7.29.1"
+ "@istanbuljs/load-nyc-config": "npm:^1.1.0"
+ "@types/babel__generator": "npm:7.27.0"
+ espree: "npm:^11.2.0"
+ istanbul-lib-instrument: "npm:^6.0.3"
+ picocolors: "npm:^1.1.1"
+ source-map: "npm:^0.7.6"
+ test-exclude: "npm:^8.0.0"
+ peerDependencies:
+ vite: ">=4"
+ checksum: 10/2a401af6c40dae85b59ebd01873b3bb609ea0f3abf5670ea1efa92b700d51ea27d79a84463229290187389a2fad2b7766ca5de22de2bf4411b143c614abc9040
+ languageName: node
+ linkType: hard
+
+"vite@npm:^8.0.10":
+ version: 8.0.10
+ resolution: "vite@npm:8.0.10"
dependencies:
fsevents: "npm:~2.3.3"
lightningcss: "npm:^1.32.0"
picomatch: "npm:^4.0.4"
postcss: "npm:^8.5.10"
- rolldown: "npm:1.0.0-rc.16"
+ rolldown: "npm:1.0.0-rc.17"
tinyglobby: "npm:^0.2.16"
peerDependencies:
"@types/node": ^20.19.0 || >=22.12.0
@@ -37544,7 +37527,7 @@ __metadata:
optional: true
bin:
vite: bin/vite.js
- checksum: 10/745ff66e6d6379d93172cf03c2788db73b83b3f6ad7980456be5d523daf38ee2397d15d266e1acad5afbab950709081e640b447e3ce8a32f0e06856f37253a08
+ checksum: 10/64c6fa4efa1a9ca3e1cacbcca16487b75ea25d62efbfb99c4e571b5f716296dc4f8af825eb624e273b11c3bee4e87daec35815fb6a56e01c843659c003ed2bcd
languageName: node
linkType: hard