Skip to content
Merged
217 changes: 217 additions & 0 deletions spec/AuthDataUniqueIndex.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
'use strict';

const request = require('../lib/request');
const Config = require('../lib/Config');

describe('AuthData Unique Index', () => {
const fakeAuthProvider = {
validateAppId: () => Promise.resolve(),
validateAuthData: () => Promise.resolve(),
};

beforeEach(async () => {
await reconfigureServer({ auth: { fakeAuthProvider } });
});

it('should prevent concurrent signups with the same authData from creating duplicate users', async () => {
const authData = { fakeAuthProvider: { id: 'duplicate-test-id', token: 'token1' } };

// Fire multiple concurrent signup requests with the same authData
const concurrentRequests = Array.from({ length: 5 }, () =>
request({
method: 'POST',
headers: {
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'Content-Type': 'application/json',
},
url: 'http://localhost:8378/1/users',
body: { authData },
}).then(
response => ({ success: true, data: response.data }),
error => ({ success: false, error: error.data || error.message })
)
);

const results = await Promise.all(concurrentRequests);
const successes = results.filter(r => r.success);
const failures = results.filter(r => !r.success);

// All should either succeed (returning the same user) or fail with "this auth is already used"
// The key invariant: only ONE unique objectId should exist
const uniqueObjectIds = new Set(successes.map(r => r.data.objectId));
expect(uniqueObjectIds.size).toBe(1);

// Failures should be "this auth is already used" errors
for (const failure of failures) {
expect(failure.error.code).toBe(208);
expect(failure.error.error).toBe('this auth is already used');
}

// Verify only one user exists in the database with this authData
const query = new Parse.Query('_User');
query.equalTo('authData.fakeAuthProvider.id', 'duplicate-test-id');
const users = await query.find({ useMasterKey: true });
expect(users.length).toBe(1);
});

it('should prevent concurrent signups via batch endpoint with same authData', async () => {
const authData = { fakeAuthProvider: { id: 'batch-race-test-id', token: 'token1' } };

const response = await request({
method: 'POST',
headers: {
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'Content-Type': 'application/json',
},
url: 'http://localhost:8378/1/batch',
body: {
requests: Array.from({ length: 3 }, () => ({
method: 'POST',
path: '/1/users',
body: { authData },
})),
},
});

const results = response.data;
const successes = results.filter(r => r.success);
const failures = results.filter(r => r.error);

// All successes should reference the same user
const uniqueObjectIds = new Set(successes.map(r => r.success.objectId));
expect(uniqueObjectIds.size).toBe(1);

// Failures should be "this auth is already used" errors
for (const failure of failures) {
expect(failure.error.code).toBe(208);
expect(failure.error.error).toBe('this auth is already used');
}

// Verify only one user exists in the database with this authData
const query = new Parse.Query('_User');
query.equalTo('authData.fakeAuthProvider.id', 'batch-race-test-id');
const users = await query.find({ useMasterKey: true });
expect(users.length).toBe(1);
});

it('should allow sequential signups with different authData IDs', async () => {
const user1 = await Parse.User.logInWith('fakeAuthProvider', {
authData: { id: 'user-id-1', token: 'token1' },
});
const user2 = await Parse.User.logInWith('fakeAuthProvider', {
authData: { id: 'user-id-2', token: 'token2' },
});

expect(user1.id).toBeDefined();
expect(user2.id).toBeDefined();
expect(user1.id).not.toBe(user2.id);
});

it('should still allow login with authData after successful signup', async () => {
const authPayload = { authData: { id: 'login-test-id', token: 'token1' } };

// Signup
const user1 = await Parse.User.logInWith('fakeAuthProvider', authPayload);
expect(user1.id).toBeDefined();

// Login again with same authData — should return same user
const user2 = await Parse.User.logInWith('fakeAuthProvider', authPayload);
expect(user2.id).toBe(user1.id);
});

it('should skip startup index creation when createIndexAuthDataUniqueness is false', async () => {
const config = Config.get('test');
const adapter = config.database.adapter;
const spy = spyOn(adapter, 'ensureAuthDataUniqueness').and.callThrough();

// Temporarily set the option to false
const originalOptions = config.database.options.databaseOptions;
config.database.options.databaseOptions = { createIndexAuthDataUniqueness: false };

await config.database.performInitialization();
expect(spy).not.toHaveBeenCalled();

// Restore original options
config.database.options.databaseOptions = originalOptions;
});

it('should handle calling ensureAuthDataUniqueness multiple times via cache', async () => {
const config = Config.get('test');
const adapter = config.database.adapter;

// First call creates the index
await adapter.ensureAuthDataUniqueness('fakeAuthProvider');
// Second call should be a cache hit (no DB call)
await adapter.ensureAuthDataUniqueness('fakeAuthProvider');
expect(adapter._authDataUniqueIndexes.has('fakeAuthProvider')).toBe(true);
});

it('should log warning when index creation fails due to existing duplicates', async () => {
const config = Config.get('test');
const adapter = config.database.adapter;

// Clear cache to force index creation attempt
if (adapter._authDataUniqueIndexes) {
adapter._authDataUniqueIndexes.clear();
}

// Spy on the adapter to simulate a duplicate value error
spyOn(adapter, 'ensureAuthDataUniqueness').and.callFake(() => {
return Promise.reject(
new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'duplicates exist')
);
});

const logSpy = spyOn(require('../lib/logger').logger, 'warn');

// Re-run performInitialization — should warn but not throw
await config.database.performInitialization();
expect(logSpy).toHaveBeenCalledWith(
jasmine.stringContaining('Unable to ensure uniqueness for auth data provider'),
jasmine.anything()
);
});

it('should prevent concurrent signups with same anonymous authData', async () => {
const anonymousId = 'anon-race-test-id';
const authData = { anonymous: { id: anonymousId } };

const concurrentRequests = Array.from({ length: 5 }, () =>
request({
method: 'POST',
headers: {
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'Content-Type': 'application/json',
},
url: 'http://localhost:8378/1/users',
body: { authData },
}).then(
response => ({ success: true, data: response.data }),
error => ({ success: false, error: error.data || error.message })
)
);

const results = await Promise.all(concurrentRequests);
const successes = results.filter(r => r.success);
const failures = results.filter(r => !r.success);

// All successes should reference the same user
const uniqueObjectIds = new Set(successes.map(r => r.data.objectId));
expect(uniqueObjectIds.size).toBe(1);

// Failures should be "this auth is already used" errors
for (const failure of failures) {
expect(failure.error.code).toBe(208);
expect(failure.error.error).toBe('this auth is already used');
}

// Verify only one user exists in the database with this authData
const query = new Parse.Query('_User');
query.equalTo('authData.anonymous.id', anonymousId);
const users = await query.find({ useMasterKey: true });
expect(users.length).toBe(1);
Comment thread
coderabbitai[bot] marked this conversation as resolved.
});
});
10 changes: 10 additions & 0 deletions spec/DatabaseController.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,11 @@ describe('DatabaseController', function () {
email_1: { email: 1 },
_email_verify_token: { _email_verify_token: 1 },
_perishable_token: { _perishable_token: 1 },
_auth_data_custom_id: { '_auth_data_custom.id': 1 },
_auth_data_facebook_id: { '_auth_data_facebook.id': 1 },
_auth_data_myoauth_id: { '_auth_data_myoauth.id': 1 },
_auth_data_shortLivedAuth_id: { '_auth_data_shortLivedAuth.id': 1 },
_auth_data_anonymous_id: { '_auth_data_anonymous.id': 1 },
});
}
);
Expand All @@ -441,6 +446,11 @@ describe('DatabaseController', function () {
email_1: { email: 1 },
_email_verify_token: { _email_verify_token: 1 },
_perishable_token: { _perishable_token: 1 },
_auth_data_custom_id: { '_auth_data_custom.id': 1 },
_auth_data_facebook_id: { '_auth_data_facebook.id': 1 },
_auth_data_myoauth_id: { '_auth_data_myoauth.id': 1 },
_auth_data_shortLivedAuth_id: { '_auth_data_shortLivedAuth.id': 1 },
_auth_data_anonymous_id: { '_auth_data_anonymous.id': 1 },
});
}
);
Expand Down
9 changes: 7 additions & 2 deletions spec/ParseUser.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -430,8 +430,13 @@ describe('Parse.User testing', () => {
},
},
};
const res = await request(options);
expect(res.data.objectId).not.toEqual(objectId);
try {
await request(options);
fail('should have thrown');
} catch (err) {
expect(err.data.code).toBe(208);
expect(err.data.error).toBe('this auth is already used');
}
});

it('user login with files', done => {
Expand Down
43 changes: 43 additions & 0 deletions src/Adapters/Storage/Mongo/MongoStorageAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -582,6 +582,13 @@ export class MongoStorageAdapter implements StorageAdapter {
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
}
// Check for authData unique index violations
if (!err.userInfo) {
const authDataMatch = error.message.match(/index:\s+(_auth_data_[a-zA-Z0-9_]+_id)/);
if (authDataMatch) {
err.userInfo = { duplicated_field: authDataMatch[1] };
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
throw err;
}
Expand Down Expand Up @@ -818,6 +825,42 @@ export class MongoStorageAdapter implements StorageAdapter {
.catch(err => this.handleError(err));
}

// Creates a unique sparse index on _auth_data_<provider>.id to prevent
// race conditions during concurrent signups with the same authData.
ensureAuthDataUniqueness(provider: string) {
if (!this._authDataUniqueIndexes) {
this._authDataUniqueIndexes = new Set();
}
if (this._authDataUniqueIndexes.has(provider)) {
return Promise.resolve();
}
return this._adaptiveCollection('_User')
.then(collection =>
collection._mongoCollection.createIndex(
{ [`_auth_data_${provider}.id`]: 1 },
{ unique: true, sparse: true, background: true, name: `_auth_data_${provider}_id` }
)
)
.then(() => {
this._authDataUniqueIndexes.add(provider);
})
.catch(error => {
if (error.code === 11000) {
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'Tried to ensure field uniqueness for a class that already has duplicates.'
);
}
// Ignore "index already exists with same name" or "index already exists with different options"
if (error.code === 85 || error.code === 86) {
this._authDataUniqueIndexes.add(provider);
return;
}
throw error;
})
.catch(err => this.handleError(err));
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

// Used in tests
_rawFind(className: string, query: QueryType) {
return this._adaptiveCollection(className)
Expand Down
44 changes: 41 additions & 3 deletions src/Adapters/Storage/Postgres/PostgresStorageAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -1479,9 +1479,15 @@ export class PostgresStorageAdapter implements StorageAdapter {
);
err.underlyingError = error;
if (error.constraint) {
const matches = error.constraint.match(/unique_([a-zA-Z]+)/);
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
// Check for authData unique index violations first
const authDataMatch = error.constraint.match(/_User_unique_authData_([a-zA-Z0-9_]+)_id/);
if (authDataMatch) {
err.userInfo = { duplicated_field: `_auth_data_${authDataMatch[1]}` };
} else {
const matches = error.constraint.match(/unique_([a-zA-Z]+)/);
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
error = err;
Expand Down Expand Up @@ -2052,6 +2058,38 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
}

// Creates a unique index on authData-><provider>->>'id' to prevent
// race conditions during concurrent signups with the same authData.
async ensureAuthDataUniqueness(provider: string) {
Comment thread
coderabbitai[bot] marked this conversation as resolved.
if (!this._authDataUniqueIndexes) {
this._authDataUniqueIndexes = new Set();
}
if (this._authDataUniqueIndexes.has(provider)) {
return;
}
const indexName = `_User_unique_authData_${provider}_id`;
const qs = `CREATE UNIQUE INDEX IF NOT EXISTS $1:name ON "_User" (("authData"->$2::text->>'id')) WHERE "authData"->$2::text->>'id' IS NOT NULL`;
await this._client.none(qs, [indexName, provider]).catch(error => {
if (
error.code === PostgresDuplicateRelationError &&
error.message.includes(indexName)
) {
// Index already exists. Ignore error.
} else if (
error.code === PostgresUniqueIndexViolationError &&
error.message.includes(indexName)
) {
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'Tried to ensure field uniqueness for a class that already has duplicates.'
);
} else {
throw error;
}
});
this._authDataUniqueIndexes.add(provider);
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Outdated
}
Comment thread
mtrezza marked this conversation as resolved.

// Executes a count.
async count(
className: string,
Expand Down
Loading
Loading