diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5804224477..768d210c18 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,9 +5,9 @@ on: - master pull_request: branches: - - '**' + - "**" env: - NODE_VERSION: 14.16.1 + NODE_VERSION: 14.17.0 PARSE_SERVER_TEST_TIMEOUT: 20000 jobs: check-ci: @@ -32,45 +32,45 @@ jobs: - name: CI Self-Check run: npm run ci:check check-lint: - name: Lint - timeout-minutes: 15 - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v2 - - name: Use Node.js ${{ matrix.NODE_VERSION }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Cache Node.js modules - uses: actions/cache@v2 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}- - - name: Install dependencies - run: npm ci - - run: npm run lint + name: Lint + timeout-minutes: 15 + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.NODE_VERSION }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Cache Node.js modules + uses: actions/cache@v2 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}- + - name: Install dependencies + run: npm ci + - run: npm run lint check-circular: - name: Circular Dependencies - timeout-minutes: 5 - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v2 - - name: Use Node.js ${{ matrix.NODE_VERSION }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Cache Node.js modules - uses: actions/cache@v2 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}- - - name: Install dependencies - run: npm ci - - run: npm run madge:circular + name: Circular Dependencies + timeout-minutes: 5 + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.NODE_VERSION }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Cache Node.js modules + uses: actions/cache@v2 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}- + - name: Install dependencies + run: npm ci + - run: npm run madge:circular check-docker: name: Docker Build timeout-minutes: 5 @@ -94,38 +94,38 @@ jobs: matrix: include: - name: MongoDB 4.4, ReplicaSet, WiredTiger - MONGODB_VERSION: 4.4.4 + MONGODB_VERSION: 4.4.6 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 14.16.1 + NODE_VERSION: 14.17.0 - name: MongoDB 4.2, ReplicaSet, WiredTiger - MONGODB_VERSION: 4.2.13 + MONGODB_VERSION: 4.2.14 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 14.16.1 + NODE_VERSION: 14.17.0 - name: MongoDB 4.0, ReplicaSet, WiredTiger MONGODB_VERSION: 4.0.23 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 14.16.1 + NODE_VERSION: 14.17.0 - name: MongoDB 4.0, Standalone, MMAPv1 MONGODB_VERSION: 4.0.23 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: mmapv1 - NODE_VERSION: 14.16.1 + NODE_VERSION: 14.17.0 - name: Redis Cache PARSE_SERVER_TEST_CACHE: redis - MONGODB_VERSION: 4.4.4 + MONGODB_VERSION: 4.4.6 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 14.16.1 + NODE_VERSION: 14.17.0 - name: Node 12 - MONGODB_VERSION: 4.4.4 + MONGODB_VERSION: 4.4.6 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: wiredTiger NODE_VERSION: 12.22.1 - name: Node 15 - MONGODB_VERSION: 4.4.4 + MONGODB_VERSION: 4.4.6 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: wiredTiger NODE_VERSION: 15.14.0 @@ -137,8 +137,8 @@ jobs: redis: image: redis ports: - - 6379:6379 - env: + - 6379:6379 + env: MONGODB_VERSION: ${{ matrix.MONGODB_VERSION }} MONGODB_TOPOLOGY: ${{ matrix.MONGODB_TOPOLOGY }} MONGODB_STORAGE_ENGINE: ${{ matrix.MONGODB_STORAGE_ENGINE }} diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000000..2e1fa2d52e --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +*.md \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 1b6e132ee3..969ca4db0c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -132,7 +132,8 @@ ___ - Add NPM package-lock version check to CI (Manuel Trezza) [#7333](https://github.com/parse-community/parse-server/pull/7333) - Fix incorrect LiveQuery events triggered for multiple subscriptions on the same class with different events [#7341](https://github.com/parse-community/parse-server/pull/7341) ___ -## 4.5.0 + +### 4.5.0 [Full Changelog](https://github.com/parse-community/parse-server/compare/4.4.0...4.5.0) ### Breaking Changes - FIX: Consistent casing for afterLiveQueryEvent. The afterLiveQueryEvent was introduced in 4.4.0 with inconsistent casing for the event names, which was fixed in 4.5.0. [#7023](https://github.com/parse-community/parse-server/pull/7023). Thanks to [dblythy](https://github.com/dblythy). diff --git a/package.json b/package.json index 5395916522..31cc06847e 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "@graphql-tools/links": "6.2.5", "@graphql-tools/stitch": "6.2.4", "@graphql-tools/utils": "6.2.4", + "@node-rs/bcrypt": "1.1.0", "@parse/fs-files-adapter": "1.2.0", "@parse/push-adapter": "3.4.0", "apollo-server-express": "2.23.0", @@ -91,8 +92,8 @@ "jsdoc-babel": "0.5.0", "lint-staged": "10.2.3", "madge": "4.0.2", - "mock-mail-adapter": "file:spec/dependencies/mock-mail-adapter", "mock-files-adapter": "file:spec/dependencies/mock-files-adapter", + "mock-mail-adapter": "file:spec/dependencies/mock-mail-adapter", "mongodb-runner": "4.8.1", "mongodb-version-list": "1.0.0", "node-fetch": "2.6.1", @@ -151,8 +152,7 @@ }, "lint-staged": { "{src,spec}/{**/*,*}.js": [ - "prettier --write", - "eslint --fix --cache", + "npm run prettier", "git add" ] } diff --git a/spec/DefinedSchemas.spec.js b/spec/DefinedSchemas.spec.js new file mode 100644 index 0000000000..82a4b551e8 --- /dev/null +++ b/spec/DefinedSchemas.spec.js @@ -0,0 +1,558 @@ +const { DefinedSchemas } = require('../lib/DefinedSchemas'); +const Config = require('../lib/Config'); + +const cleanUpIndexes = schema => { + if (schema.indexes) { + delete schema.indexes._id_; + if (!Object.keys(schema.indexes).length) { + delete schema.indexes; + } + } +}; + +describe('DefinedSchemas', () => { + let config; + beforeEach(async () => { + config = Config.get('test'); + await config.database.adapter.deleteAllClasses(); + }); + afterAll(async () => { + await config.database.adapter.deleteAllClasses(); + }); + + describe('Fields', () => { + it('should keep default fields if not provided', async () => { + const server = await reconfigureServer(); + // Will perform create + await new DefinedSchemas([{ className: 'Test' }], server.config).execute(); + let schema = await new Parse.Schema('Test').get(); + const expectedFields = { + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + }; + expect(schema.fields).toEqual(expectedFields); + + await server.config.schemaCache.clear(); + // Will perform update + await new DefinedSchemas([{ className: 'Test' }], server.config).execute(); + schema = await new Parse.Schema('Test').get(); + expect(schema.fields).toEqual(expectedFields); + }); + it('should protect default fields', async () => { + const server = await reconfigureServer(); + + const schemas = [ + { + className: '_User', + fields: { + email: 'Object', + }, + }, + { + className: '_Role', + fields: { + users: 'Object', + }, + }, + { + className: '_Installation', + fields: { + installationId: 'Object', + }, + }, + { + className: 'Test', + fields: { + createdAt: { type: 'Object' }, + objectId: { type: 'Number' }, + updatedAt: { type: 'String' }, + ACL: { type: 'String' }, + }, + }, + ]; + + const expectedFields = { + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + }; + + const expectedUserFields = { + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + username: { type: 'String' }, + password: { type: 'String' }, + email: { type: 'String' }, + emailVerified: { type: 'Boolean' }, + authData: { type: 'Object' }, + }; + + const expectedRoleFields = { + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + name: { type: 'String' }, + users: { type: 'Relation', targetClass: '_User' }, + roles: { type: 'Relation', targetClass: '_Role' }, + }; + + const expectedInstallationFields = { + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + installationId: { type: 'String' }, + deviceToken: { type: 'String' }, + channels: { type: 'Array' }, + deviceType: { type: 'String' }, + pushType: { type: 'String' }, + GCMSenderId: { type: 'String' }, + timeZone: { type: 'String' }, + localeIdentifier: { type: 'String' }, + badge: { type: 'Number' }, + appVersion: { type: 'String' }, + appName: { type: 'String' }, + appIdentifier: { type: 'String' }, + parseVersion: { type: 'String' }, + }; + + // Perform create + await new DefinedSchemas(schemas, server.config).execute(); + let schema = await new Parse.Schema('Test').get(); + expect(schema.fields).toEqual(expectedFields); + + let userSchema = await new Parse.Schema('_User').get(); + expect(userSchema.fields).toEqual(expectedUserFields); + + let roleSchema = await new Parse.Schema('_Role').get(); + expect(roleSchema.fields).toEqual(expectedRoleFields); + + let installationSchema = await new Parse.Schema('_Installation').get(); + expect(installationSchema.fields).toEqual(expectedInstallationFields); + + await server.config.schemaCache.clear(); + // Perform update + await new DefinedSchemas(schemas, server.config).execute(); + schema = await new Parse.Schema('Test').get(); + expect(schema.fields).toEqual(expectedFields); + + userSchema = await new Parse.Schema('_User').get(); + expect(userSchema.fields).toEqual(expectedUserFields); + + roleSchema = await new Parse.Schema('_Role').get(); + expect(roleSchema.fields).toEqual(expectedRoleFields); + + installationSchema = await new Parse.Schema('_Installation').get(); + expect(installationSchema.fields).toEqual(expectedInstallationFields); + }); + it('should create new fields', async () => { + const server = await reconfigureServer(); + const fields = { + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + aString: { type: 'String' }, + aStringWithDefault: { type: 'String', defaultValue: 'Test' }, + aStringWithRequired: { type: 'String', required: true }, + aStringWithRequiredAndDefault: { type: 'String', required: true, defaultValue: 'Test' }, + aBoolean: { type: 'Boolean' }, + aFile: { type: 'File' }, + aNumber: { type: 'Number' }, + aRelation: { type: 'Relation', targetClass: '_User' }, + aPointer: { type: 'Pointer', targetClass: '_Role' }, + aDate: { type: 'Date' }, + aGeoPoint: { type: 'GeoPoint' }, + aPolygon: { type: 'Polygon' }, + aArray: { type: 'Array' }, + aObject: { type: 'Object' }, + }; + const schemas = [ + { + className: 'Test', + fields, + }, + ]; + + // Create + await new DefinedSchemas(schemas, server.config).execute(); + let schema = await new Parse.Schema('Test').get(); + expect(schema.fields).toEqual(fields); + + fields.anotherObject = { type: 'Object' }; + // Update + await new DefinedSchemas(schemas, server.config).execute(); + schema = await new Parse.Schema('Test').get(); + expect(schema.fields).toEqual(fields); + }); + it('should delete removed fields', async () => { + const server = await reconfigureServer(); + + await new DefinedSchemas( + [{ className: 'Test', fields: { aField: { type: 'String' } } }], + server.config + ).execute(); + + let schema = await new Parse.Schema('Test').get(); + expect(schema.fields.aField).toBeDefined(); + + await new DefinedSchemas([{ className: 'Test' }], server.config).execute(); + + schema = await new Parse.Schema('Test').get(); + expect(schema.fields).toEqual({ + objectId: { type: 'String' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + }); + }); + it('should re create fields with changed type', async () => { + const server = await reconfigureServer(); + + await new DefinedSchemas( + [{ className: 'Test', fields: { aField: { type: 'String' } } }], + server.config + ).execute(); + + let schema = await new Parse.Schema('Test').get(); + expect(schema.fields.aField).toEqual({ type: 'String' }); + + const object = new Parse.Object('Test'); + await object.save({ aField: 'Hello' }, { useMasterKey: true }); + + await new DefinedSchemas( + [{ className: 'Test', fields: { aField: { type: 'Number' } } }], + server.config + ).execute(); + + schema = await new Parse.Schema('Test').get(); + expect(schema.fields.aField).toEqual({ type: 'Number' }); + + await object.fetch({ useMasterKey: true }); + expect(object.get('aField')).toBeUndefined(); + }); + it('should just update classic fields with changed params', async () => { + const server = await reconfigureServer(); + + await new DefinedSchemas( + [{ className: 'Test', fields: { aField: { type: 'String' } } }], + server.config + ).execute(); + + let schema = await new Parse.Schema('Test').get(); + expect(schema.fields.aField).toEqual({ type: 'String' }); + + const object = new Parse.Object('Test'); + await object.save({ aField: 'Hello' }, { useMasterKey: true }); + + await new DefinedSchemas( + [{ className: 'Test', fields: { aField: { type: 'String', required: true } } }], + server.config + ).execute(); + + schema = await new Parse.Schema('Test').get(); + expect(schema.fields.aField).toEqual({ type: 'String', required: true }); + + await object.fetch({ useMasterKey: true }); + expect(object.get('aField')).toEqual('Hello'); + }); + }); + + describe('Indexes', () => { + it('should create new indexes', async () => { + const server = await reconfigureServer(); + + const indexes = { complex: { createdAt: 1, updatedAt: 1 } }; + + const schemas = [{ className: 'Test', fields: { aField: { type: 'String' } }, indexes }]; + await new DefinedSchemas(schemas, server.config).execute(); + + let schema = await new Parse.Schema('Test').get(); + cleanUpIndexes(schema); + expect(schema.indexes).toEqual(indexes); + + indexes.complex2 = { createdAt: 1, aField: 1 }; + await new DefinedSchemas(schemas, server.config).execute(); + schema = await new Parse.Schema('Test').get(); + cleanUpIndexes(schema); + expect(schema.indexes).toEqual(indexes); + }); + it('should re create changed indexes', async () => { + const server = await reconfigureServer(); + + let indexes = { complex: { createdAt: 1, updatedAt: 1 } }; + + let schemas = [{ className: 'Test', indexes }]; + await new DefinedSchemas(schemas, server.config).execute(); + + indexes = { complex: { createdAt: 1 } }; + schemas = [{ className: 'Test', indexes }]; + + // Change indexes + await new DefinedSchemas(schemas, server.config).execute(); + let schema = await new Parse.Schema('Test').get(); + cleanUpIndexes(schema); + expect(schema.indexes).toEqual(indexes); + + // Update + await new DefinedSchemas(schemas, server.config).execute(); + schema = await new Parse.Schema('Test').get(); + cleanUpIndexes(schema); + expect(schema.indexes).toEqual(indexes); + }); + it('should delete removed indexes', async () => { + const server = await reconfigureServer(); + + let indexes = { complex: { createdAt: 1, updatedAt: 1 } }; + + let schemas = [{ className: 'Test', indexes }]; + await new DefinedSchemas(schemas, server.config).execute(); + + indexes = {}; + schemas = [{ className: 'Test', indexes }]; + // Change indexes + await new DefinedSchemas(schemas, server.config).execute(); + let schema = await new Parse.Schema('Test').get(); + cleanUpIndexes(schema); + expect(schema.indexes).toBeUndefined(); + + // Update + await new DefinedSchemas(schemas, server.config).execute(); + schema = await new Parse.Schema('Test').get(); + cleanUpIndexes(schema); + expect(schema.indexes).toBeUndefined(); + }); + it('should keep protected indexes', async () => { + const server = await reconfigureServer(); + + const expectedIndexes = { + username_1: { username: 1 }, + case_insensitive_username: { username: 1 }, + email_1: { email: 1 }, + case_insensitive_email: { email: 1 }, + }; + const schemas = [ + { + className: '_User', + indexes: { + case_insensitive_username: { password: true }, + case_insensitive_email: { password: true }, + }, + }, + { className: 'Test' }, + ]; + // Create + await new DefinedSchemas(schemas, server.config).execute(); + let userSchema = await new Parse.Schema('_User').get(); + let testSchema = await new Parse.Schema('Test').get(); + cleanUpIndexes(userSchema); + cleanUpIndexes(testSchema); + expect(testSchema.indexes).toBeUndefined(); + expect(userSchema.indexes).toEqual(expectedIndexes); + + // Update + await new DefinedSchemas(schemas, server.config).execute(); + userSchema = await new Parse.Schema('_User').get(); + testSchema = await new Parse.Schema('Test').get(); + cleanUpIndexes(userSchema); + cleanUpIndexes(testSchema); + expect(testSchema.indexes).toBeUndefined(); + expect(userSchema.indexes).toEqual(expectedIndexes); + }); + }); + + describe('ClassLevelPermissions', () => { + it('should use default CLP', async () => { + const server = await reconfigureServer(); + const schemas = [{ className: 'Test' }]; + await new DefinedSchemas(schemas, server.config).execute(); + + const expectedTestCLP = { + find: { '*': true }, + count: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: {}, + protectedFields: {}, + }; + let testSchema = await new Parse.Schema('Test').get(); + expect(testSchema.classLevelPermissions).toEqual(expectedTestCLP); + + await new DefinedSchemas(schemas, server.config).execute(); + testSchema = await new Parse.Schema('Test').get(); + expect(testSchema.classLevelPermissions).toEqual(expectedTestCLP); + }); + it('should save CLP', async () => { + const server = await reconfigureServer(); + + const expectedTestCLP = { + find: {}, + count: { requiresAuthentication: true }, + get: { 'role:Admin': true }, + create: { 'role:ARole': true, requiresAuthentication: true }, + update: { requiresAuthentication: true }, + delete: { requiresAuthentication: true }, + addField: {}, + protectedFields: { '*': ['aField'], 'role:Admin': ['anotherField'] }, + }; + const schemas = [ + { + className: 'Test', + fields: { aField: { type: 'String' }, anotherField: { type: 'Object' } }, + classLevelPermissions: expectedTestCLP, + }, + ]; + await new DefinedSchemas(schemas, server.config).execute(); + + let testSchema = await new Parse.Schema('Test').get(); + expect(testSchema.classLevelPermissions).toEqual(expectedTestCLP); + + expectedTestCLP.update = {}; + expectedTestCLP.create = { requiresAuthentication: true }; + + await new DefinedSchemas(schemas, server.config).execute(); + testSchema = await new Parse.Schema('Test').get(); + expect(testSchema.classLevelPermissions).toEqual(expectedTestCLP); + }); + it('should force addField to empty', async () => { + const server = await reconfigureServer(); + const schemas = [{ className: 'Test', classLevelPermissions: { addField: { '*': true } } }]; + await new DefinedSchemas(schemas, server.config).execute(); + + const expectedTestCLP = { + find: { '*': true }, + count: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: {}, + protectedFields: {}, + }; + + let testSchema = await new Parse.Schema('Test').get(); + expect(testSchema.classLevelPermissions).toEqual(expectedTestCLP); + + await new DefinedSchemas(schemas, server.config).execute(); + testSchema = await new Parse.Schema('Test').get(); + expect(testSchema.classLevelPermissions).toEqual(expectedTestCLP); + }); + }); + + it('should not delete automatically classes', async () => { + await reconfigureServer({ schemas: [{ className: '_User' }, { className: 'Test' }] }); + + await reconfigureServer({ schemas: [{ className: '_User' }] }); + + const schema = await new Parse.Schema('Test').get(); + expect(schema.className).toEqual('Test'); + }); + + it('should disable class PUT/POST endpoint when schemas provided to avoid dual source of truth', async () => { + await reconfigureServer({ schemas: [{ className: '_User' }, { className: 'Test' }] }); + await reconfigureServer({ schemas: [{ className: '_User' }] }); + + const schema = await new Parse.Schema('Test').get(); + expect(schema.className).toEqual('Test'); + + const schemas = await Parse.Schema.all(); + expect(schemas.length).toEqual(4); + + try { + await new Parse.Schema('Test').save(); + } catch (e) { + expect(e.message).toContain('cannot perform this operation when schemas options is used.'); + } + + try { + await new Parse.Schema('_User').update(); + } catch (e) { + expect(e.message).toContain('cannot perform this operation when schemas options is used.'); + } + }); + it('should only enable delete class endpoint since', async () => { + await reconfigureServer({ schemas: [{ className: '_User' }, { className: 'Test' }] }); + await reconfigureServer({ schemas: [{ className: '_User' }] }); + + let schemas = await Parse.Schema.all(); + expect(schemas.length).toEqual(4); + + await new Parse.Schema('_User').delete(); + schemas = await Parse.Schema.all(); + expect(schemas.length).toEqual(3); + }); + it('should run beforeSchemasMigration before execution of DefinedSchemas', async () => { + let before = false; + const server = await reconfigureServer({ + schemas: [{ className: '_User' }, { className: 'Test' }], + beforeSchemasMigration: async () => { + expect(before).toEqual(false); + before = true; + }, + }); + before = true; + expect(before).toEqual(true); + expect(server).toBeDefined(); + }); + it('should use logger in case of error after 3 retries', async () => { + const server = await reconfigureServer({ schemas: [{ className: '_User' }] }); + const error = new Error('A test error'); + const logger = require('../lib/logger').logger; + spyOn(DefinedSchemas.prototype, 'wait').and.resolveTo(); + spyOn(logger, 'error').and.callThrough(); + spyOn(Parse.Schema, 'all').and.callFake(async () => { + throw error; + }); + + await new DefinedSchemas( + [{ className: 'Test', fields: { aField: { type: 'String' } } }], + server.config + ).execute(); + + expect(logger.error).toHaveBeenCalledWith(error); + expect(DefinedSchemas.prototype.wait).toHaveBeenCalledTimes(3); + const calls = DefinedSchemas.prototype.wait.calls.all(); + expect(calls[0].args[0]).toEqual(1000); + expect(calls[1].args[0]).toEqual(2000); + expect(calls[2].args[0]).toEqual(3000); + }); + it('should perform migration in parallel without failing', async () => { + const server = await reconfigureServer(); + const logger = require('../lib/logger').logger; + spyOn(logger, 'error').and.callThrough(); + const schema = { + className: 'Test', + fields: { aField: { type: 'String' } }, + indexes: { aField: { aField: 1 } }, + classLevelPermissions: { + create: { requiresAuthentication: true }, + }, + }; + + // Simulate parallel deployment + await Promise.all([ + new DefinedSchemas([schema], server.config).execute(), + new DefinedSchemas([schema], server.config).execute(), + new DefinedSchemas([schema], server.config).execute(), + new DefinedSchemas([schema], server.config).execute(), + new DefinedSchemas([schema], server.config).execute(), + ]); + + const testSchema = (await Parse.Schema.all()).find( + ({ className }) => className === schema.className + ); + + expect(testSchema.indexes.aField).toEqual({ aField: 1 }); + expect(testSchema.fields.aField).toEqual({ type: 'String' }); + expect(testSchema.classLevelPermissions.create).toEqual({ requiresAuthentication: true }); + expect(logger.error).toHaveBeenCalledTimes(0); + }); +}); diff --git a/spec/SecurityCheck.spec.js b/spec/SecurityCheck.spec.js index 5f79ca2bbd..647ed909c0 100644 --- a/spec/SecurityCheck.spec.js +++ b/spec/SecurityCheck.spec.js @@ -23,14 +23,20 @@ describe('Security Check', () => { await reconfigureServer(config); } - const securityRequest = (options) => request(Object.assign({ - url: securityUrl, - headers: { - 'X-Parse-Master-Key': Parse.masterKey, - 'X-Parse-Application-Id': Parse.applicationId, - }, - followRedirects: false, - }, options)).catch(e => e); + const securityRequest = options => + request( + Object.assign( + { + url: securityUrl, + headers: { + 'X-Parse-Master-Key': Parse.masterKey, + 'X-Parse-Application-Id': Parse.applicationId, + }, + followRedirects: false, + }, + options + ) + ).catch(e => e); beforeEach(async () => { groupName = 'Example Group Name'; @@ -41,7 +47,7 @@ describe('Security Check', () => { solution: 'TestSolution', check: () => { return true; - } + }, }); checkFail = new Check({ group: 'TestGroup', @@ -50,14 +56,14 @@ describe('Security Check', () => { solution: 'TestSolution', check: () => { throw 'Fail'; - } + }, }); Group = class Group extends CheckGroup { setName() { return groupName; } setChecks() { - return [ checkSuccess, checkFail ]; + return [checkSuccess, checkFail]; } }; config = { @@ -154,7 +160,7 @@ describe('Security Check', () => { title: 'string', warning: 'string', solution: 'string', - check: () => {} + check: () => {}, }, { group: 'string', @@ -203,7 +209,9 @@ describe('Security Check', () => { title: 'string', warning: 'string', solution: 'string', - check: () => { throw 'error' }, + check: () => { + throw 'error'; + }, }); expect(check._checkState == CheckState.none); check.run(); @@ -277,7 +285,7 @@ describe('Security Check', () => { }); it('runs all checks of all groups', async () => { - const checkGroups = [ Group, Group ]; + const checkGroups = [Group, Group]; const runner = new CheckRunner({ checkGroups }); const report = await runner.run(); expect(report.report.groups[0].checks[0].state).toBe(CheckState.success); @@ -287,27 +295,27 @@ describe('Security Check', () => { }); it('reports correct default syntax version 1.0.0', async () => { - const checkGroups = [ Group ]; + const checkGroups = [Group]; const runner = new CheckRunner({ checkGroups, enableCheckLog: true }); const report = await runner.run(); expect(report).toEqual({ report: { - version: "1.0.0", - state: "fail", + version: '1.0.0', + state: 'fail', groups: [ { - name: "Example Group Name", - state: "fail", + name: 'Example Group Name', + state: 'fail', checks: [ { - title: "TestTitleSuccess", - state: "success", + title: 'TestTitleSuccess', + state: 'success', }, { - title: "TestTitleFail", - state: "fail", - warning: "TestWarning", - solution: "TestSolution", + title: 'TestTitleFail', + state: 'fail', + warning: 'TestWarning', + solution: 'TestSolution', }, ], }, @@ -319,7 +327,7 @@ describe('Security Check', () => { it('logs report', async () => { const logger = require('../lib/logger').logger; const logSpy = spyOn(logger, 'warn').and.callThrough(); - const checkGroups = [ Group ]; + const checkGroups = [Group]; const runner = new CheckRunner({ checkGroups, enableCheckLog: true }); const report = await runner.run(); const titles = report.report.groups.flatMap(group => group.checks.map(check => check.title)); diff --git a/spec/schemas.spec.js b/spec/schemas.spec.js index 15dc9e111e..5180b36ca5 100644 --- a/spec/schemas.spec.js +++ b/spec/schemas.spec.js @@ -759,7 +759,7 @@ describe('schemas', () => { }); }); - it('refuses to put to existing fields, even if it would not be a change', done => { + it('refuses to put to existing fields with different type, even if it would not be a change', done => { const obj = hasAllPODobject(); obj.save().then(() => { request({ @@ -769,7 +769,7 @@ describe('schemas', () => { json: true, body: { fields: { - aString: { type: 'String' }, + aString: { type: 'Number' }, }, }, }).then(fail, response => { diff --git a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js index d2f89d75d4..b2da7aeaa1 100644 --- a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js +++ b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js @@ -212,7 +212,7 @@ class MongoSchemaCollection { .then( schema => { // If a field with this name already exists, it will be handled elsewhere. - if (schema.fields[fieldName] != undefined) { + if (schema.fields[fieldName] !== undefined) { return; } // The schema exists. Check for existing GeoPoints. @@ -274,6 +274,20 @@ class MongoSchemaCollection { } }); } + + async updateFieldOptions(className: string, fieldName: string, fieldType: string) { + // eslint-disable-next-line no-unused-vars + const { type, targetClass, ...fieldOptions } = fieldType; + await this.upsertSchema( + className, + { [fieldName]: { $exists: true } }, + { + $set: { + [`_metadata.fields_options.${fieldName}`]: fieldOptions, + }, + } + ); + } } // Exported for testing reasons and because we haven't moved all mongo schema format diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index 2b5eaa0f09..da2b7b6c64 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -345,6 +345,11 @@ export class MongoStorageAdapter implements StorageAdapter { .catch(err => this.handleError(err)); } + async updateFieldOptions(className: string, fieldName: string, type: any) { + const schemaCollection = await this._schemaCollection(); + await schemaCollection.updateFieldOptions(className, fieldName, type); + } + addFieldIfNotExists(className: string, fieldName: string, type: any): Promise { return this._schemaCollection() .then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type)) diff --git a/src/Adapters/Storage/Postgres/PostgresClient.js b/src/Adapters/Storage/Postgres/PostgresClient.js index b436945249..16a9564c29 100644 --- a/src/Adapters/Storage/Postgres/PostgresClient.js +++ b/src/Adapters/Storage/Postgres/PostgresClient.js @@ -20,7 +20,7 @@ export function createClient(uri, databaseOptions) { if (process.env.PARSE_SERVER_LOG_LEVEL === 'debug') { const monitor = require('pg-monitor'); - if(monitor.isAttached()) { + if (monitor.isAttached()) { monitor.detach(); } monitor.attach(initOptions); diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index 5d0e211ab4..3d61e64617 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -1119,6 +1119,16 @@ export class PostgresStorageAdapter implements StorageAdapter { this._notifySchemaChange(); } + async updateFieldOptions(className: string, fieldName: string, type: any) { + await this._client.tx('update-schema-field-options', async t => { + const path = `{fields,${fieldName}}`; + await t.none( + 'UPDATE "_SCHEMA" SET "schema"=jsonb_set("schema", $, $) WHERE "className"=$', + { path, type, className } + ); + }); + } + // Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.) // and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible. async deleteClass(className: string) { diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js index d46265f64f..6e4573b748 100644 --- a/src/Adapters/Storage/StorageAdapter.js +++ b/src/Adapters/Storage/StorageAdapter.js @@ -35,6 +35,7 @@ export interface StorageAdapter { setClassLevelPermissions(className: string, clps: any): Promise; createClass(className: string, schema: SchemaType): Promise; addFieldIfNotExists(className: string, fieldName: string, type: any): Promise; + updateFieldOptions(className: string, fieldName: string, type: any): Promise; deleteClass(className: string): Promise; deleteAllClasses(fast: boolean): Promise; deleteFields(className: string, schema: SchemaType, fieldNames: Array): Promise; diff --git a/src/Controllers/SchemaController.js b/src/Controllers/SchemaController.js index 90f32b0b16..642b3bcd14 100644 --- a/src/Controllers/SchemaController.js +++ b/src/Controllers/SchemaController.js @@ -16,6 +16,8 @@ // TODO: hide all schema logic inside the database adapter. // @flow-disable-next const Parse = require('parse/node').Parse; +// @flow-disable-next +import _ from 'lodash'; import { StorageAdapter } from '../Adapters/Storage/StorageAdapter'; import SchemaCache from '../Adapters/Cache/SchemaCache'; import DatabaseController from './DatabaseController'; @@ -832,7 +834,11 @@ export default class SchemaController { const existingFields = schema.fields; Object.keys(submittedFields).forEach(name => { const field = submittedFields[name]; - if (existingFields[name] && field.__op !== 'Delete') { + if ( + existingFields[name] && + existingFields[name].type !== field.type && + field.__op !== 'Delete' + ) { throw new Parse.Error(255, `Field ${name} exists, cannot update.`); } if (!existingFields[name] && field.__op === 'Delete') { @@ -1058,7 +1064,12 @@ export default class SchemaController { // object if the provided className-fieldName-type tuple is valid. // The className must already be validated. // If 'freeze' is true, refuse to update the schema for this field. - enforceFieldExists(className: string, fieldName: string, type: string | SchemaField) { + enforceFieldExists( + className: string, + fieldName: string, + type: string | SchemaField, + isValidation?: boolean + ) { if (fieldName.indexOf('.') > 0) { // subdocument key (x.y) => ok if x is of type 'object' fieldName = fieldName.split('.')[0]; @@ -1102,7 +1113,15 @@ export default class SchemaController { )} but got ${typeToString(type)}` ); } - return undefined; + // If type options do not change + // we can safely return + if (isValidation || _.isEqual(expectedType, type)) { + return undefined; + } else { + // Field options are may be changed + // ensure to have an update to date schema field + return this._dbAdapter.updateFieldOptions(className, fieldName, type); + } } return this._dbAdapter @@ -1237,7 +1256,7 @@ export default class SchemaController { // Every object has ACL implicitly. continue; } - promises.push(schema.enforceFieldExists(className, fieldName, expected)); + promises.push(schema.enforceFieldExists(className, fieldName, expected, true)); } const results = await Promise.all(promises); const enforceFields = results.filter(result => !!result); diff --git a/src/DefinedSchemas.js b/src/DefinedSchemas.js new file mode 100644 index 0000000000..0c4475c80f --- /dev/null +++ b/src/DefinedSchemas.js @@ -0,0 +1,308 @@ +import Parse from 'parse/node'; +import { logger } from './logger'; +import Config from './Config'; +import { internalCreateSchema, internalUpdateSchema } from './Routers/SchemasRouter'; +import { defaultColumns } from './Controllers/SchemaController'; + +export class DefinedSchemas { + constructor(localSchemas, config) { + this.config = Config.get(config.appId); + this.localSchemas = localSchemas; + this.retries = 0; + this.maxRetries = 3; + } + + // Simulate save like the SDK + // We cannot use SDK since routes are disabled + async saveSchemaToDB(schema) { + const payload = { + className: schema.className, + fields: schema._fields, + indexes: schema._indexes, + classLevelPermissions: schema._clp, + }; + await internalCreateSchema(schema.className, payload, this.config); + this.resetSchemaOps(schema); + } + + async resetSchemaOps(schema) { + // Reset ops like SDK + schema._fields = {}; + schema._indexes = {}; + } + + // Simulate update like the SDK + // We cannot use SDK since routes are disabled + async updateSchemaToDB(schema) { + const payload = { + className: schema.className, + fields: schema._fields, + indexes: schema._indexes, + classLevelPermissions: schema._clp, + }; + await internalUpdateSchema(schema.className, payload, this.config); + this.resetSchemaOps(schema); + } + + async execute() { + let timeout; + try { + // Set up a time out in production + // if we fail to get schema + // pm2 or K8s and many other process managers will try to restart the process + // after the exit + timeout = setTimeout(() => { + if (process.env.NODE_ENV === 'production') process.exit(1); + }, 20000); + // Hack to force session schema to be created + await this.createDeleteSession(); + this.allCloudSchemas = await Parse.Schema.all(); + clearTimeout(timeout); + await Promise.all(this.localSchemas.map(async localSchema => this.saveOrUpdate(localSchema))); + await this.enforceCLPForNonProvidedClass(); + } catch (e) { + if (timeout) clearTimeout(timeout); + if (this.retries < this.maxRetries) { + this.retries++; + // first retry 1sec, 2sec, 3sec total 6sec retry sequence + // retry will only happen in case of deploying multi parse server instance + // at the same time + // modern systems like k8 avoid this by doing rolling updates + await this.wait(1000 * this.retries); + await this.execute(); + } else { + logger.error(e); + if (process.env.NODE_ENV === 'production') process.exit(1); + } + } + } + + // Required for testing purpose + async wait(time) { + await new Promise(resolve => setTimeout(resolve, time)); + } + + async enforceCLPForNonProvidedClass() { + const nonProvidedClasses = this.allCloudSchemas.filter( + cloudSchema => + !this.localSchemas.some(localSchema => localSchema.className === cloudSchema.className) + ); + await Promise.all( + nonProvidedClasses.map(async schema => { + const parseSchema = new Parse.Schema(schema.className); + this.handleCLP(schema, parseSchema); + await this.updateSchemaToDB(parseSchema); + }) + ); + } + + // Create a fake session since Parse do not create the _Session until + // a session is created + async createDeleteSession() { + const session = new Parse.Session(); + await session.save(null, { useMasterKey: true }); + await session.destroy({ useMasterKey: true }); + } + + async saveOrUpdate(localSchema) { + const cloudSchema = this.allCloudSchemas.find(sc => sc.className === localSchema.className); + if (cloudSchema) { + await this.updateSchema(localSchema, cloudSchema); + } else { + await this.saveSchema(localSchema); + } + } + + async saveSchema(localSchema) { + const newLocalSchema = new Parse.Schema(localSchema.className); + if (localSchema.fields) { + // Handle fields + Object.keys(localSchema.fields) + .filter(fieldName => !this.isProtectedFields(localSchema.className, fieldName)) + .forEach(fieldName => { + const { type, ...others } = localSchema.fields[fieldName]; + this.handleFields(newLocalSchema, fieldName, type, others); + }); + } + // Handle indexes + if (localSchema.indexes) { + Object.keys(localSchema.indexes).forEach(indexName => { + if (!this.isProtectedIndex(localSchema.className, indexName)) { + newLocalSchema.addIndex(indexName, localSchema.indexes[indexName]); + } + }); + } + + this.handleCLP(localSchema, newLocalSchema); + + return this.saveSchemaToDB(newLocalSchema); + } + + async updateSchema(localSchema, cloudSchema) { + const newLocalSchema = new Parse.Schema(localSchema.className); + + // Handle fields + // Check addition + if (localSchema.fields) { + Object.keys(localSchema.fields) + .filter(fieldName => !this.isProtectedFields(localSchema.className, fieldName)) + .forEach(fieldName => { + const { type, ...others } = localSchema.fields[fieldName]; + if (!cloudSchema.fields[fieldName]) + this.handleFields(newLocalSchema, fieldName, type, others); + }); + } + + const fieldsToDelete = []; + const fieldsToRecreate = []; + const fieldsWithChangedParams = []; + + // Check deletion + Object.keys(cloudSchema.fields) + .filter(fieldName => !this.isProtectedFields(localSchema.className, fieldName)) + .forEach(async fieldName => { + const field = cloudSchema.fields[fieldName]; + if (!localSchema.fields || !localSchema.fields[fieldName]) { + fieldsToDelete.push(fieldName); + return; + } + + const localField = localSchema.fields[fieldName]; + // Check if field has a changed type + if ( + !this.paramsAreEquals( + { type: field.type, targetClass: field.targetClass }, + { type: localField.type, targetClass: localField.targetClass } + ) + ) { + fieldsToRecreate.push(fieldName); + fieldsToDelete.push(fieldName); + return; + } + + // Check if something changed other than the type (like required, defaultValue) + if (!this.paramsAreEquals(field, localField)) { + fieldsWithChangedParams.push(fieldName); + } + }); + + fieldsToDelete.forEach(fieldName => { + newLocalSchema.deleteField(fieldName); + }); + + // Delete fields from the schema then apply changes + await this.updateSchemaToDB(newLocalSchema); + + fieldsToRecreate.forEach(fieldName => { + const { type, ...others } = localSchema.fields[fieldName]; + this.handleFields(newLocalSchema, fieldName, type, others); + }); + fieldsWithChangedParams.forEach(fieldName => { + const { type, ...others } = localSchema.fields[fieldName]; + this.handleFields(newLocalSchema, fieldName, type, others); + }); + + // Handle Indexes + // Check addition + if (localSchema.indexes) { + Object.keys(localSchema.indexes).forEach(indexName => { + if ( + (!cloudSchema.indexes || !cloudSchema.indexes[indexName]) && + !this.isProtectedIndex(localSchema.className, indexName) + ) + newLocalSchema.addIndex(indexName, localSchema.indexes[indexName]); + }); + } + + const indexesToAdd = []; + + // Check deletion + if (cloudSchema.indexes) { + Object.keys(cloudSchema.indexes).forEach(async indexName => { + if (!this.isProtectedIndex(localSchema.className, indexName)) { + if (!localSchema.indexes || !localSchema.indexes[indexName]) { + newLocalSchema.deleteIndex(indexName); + } else if ( + !this.paramsAreEquals(localSchema.indexes[indexName], cloudSchema.indexes[indexName]) + ) { + newLocalSchema.deleteIndex(indexName); + indexesToAdd.push({ + indexName, + index: localSchema.indexes[indexName], + }); + } + } + }); + } + + this.handleCLP(localSchema, newLocalSchema, cloudSchema); + // Apply changes + await this.updateSchemaToDB(newLocalSchema); + // Apply new/changed indexes + if (indexesToAdd.length) { + indexesToAdd.forEach(o => newLocalSchema.addIndex(o.indexName, o.index)); + await this.updateSchemaToDB(newLocalSchema); + } + } + + handleCLP(localSchema, newLocalSchema, cloudSchema) { + if (!localSchema.classLevelPermissions && !cloudSchema) { + logger.warn(`classLevelPermissions not provided for ${localSchema.className}.`); + } + // Use spread to avoid read only issue (encountered by Moumouls using directAccess) + const clp = { ...localSchema.classLevelPermissions } || {}; + const cloudCLP = (cloudSchema && cloudSchema.classLevelPermissions) || {}; + // Try to inject default CLPs + const CLPKeys = ['find', 'count', 'get', 'create', 'update', 'delete', 'addField']; + CLPKeys.forEach(key => { + if (!clp[key]) { + clp[key] = cloudCLP[key] || { '*': true }; + } + }); + // To avoid inconsistency we need to remove all rights on addField + clp.addField = {}; + newLocalSchema.setCLP(clp); + } + + isProtectedFields(className, fieldName) { + return ( + !!defaultColumns._Default[fieldName] || + !!(defaultColumns[className] && defaultColumns[className][fieldName]) + ); + } + + isProtectedIndex(className, indexName) { + let indexes = ['_id_']; + if (className === '_User') { + indexes = [ + ...indexes, + 'case_insensitive_username', + 'case_insensitive_email', + 'username_1', + 'email_1', + ]; + } + + return indexes.indexOf(indexName) !== -1; + } + + paramsAreEquals(indexA, indexB) { + const keysIndexA = Object.keys(indexA); + const keysIndexB = Object.keys(indexB); + + // Check key name + if (keysIndexA.length !== keysIndexB.length) return false; + return keysIndexA.every(k => indexA[k] === indexB[k]); + } + + handleFields(newLocalSchema, fieldName, type, others) { + if (type === 'Relation') { + newLocalSchema.addRelation(fieldName, others.targetClass); + } else if (type === 'Pointer') { + const { targetClass, ...others2 } = others; + newLocalSchema.addPointer(fieldName, targetClass, others2); + } else { + newLocalSchema.addField(fieldName, type, others); + } + } +} diff --git a/src/Deprecator/Deprecator.js b/src/Deprecator/Deprecator.js index 5ab0bb43ee..64fdb3b721 100644 --- a/src/Deprecator/Deprecator.js +++ b/src/Deprecator/Deprecator.js @@ -55,8 +55,8 @@ class Deprecator { changeNewKey == null ? undefined : changeNewKey.length > 0 - ? `renamed to '${changeNewKey}'` - : `removed`; + ? `renamed to '${changeNewKey}'` + : `removed`; // Compose message let output = `DeprecationWarning: The Parse Server ${type} '${key}' `; diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index 65799f8191..c9beb4061d 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -52,6 +52,11 @@ module.exports.ParseServerOptions = { 'Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication', action: parsers.objectParser, }, + beforeSchemasMigration: { + env: 'PARSE_SERVER_BEFORE_SCHEMAS_MIGRATION', + help: + 'Callback when server has started and before running schemas migration operations if schemas key provided', + }, cacheAdapter: { env: 'PARSE_SERVER_CACHE_ADAPTER', help: 'Adapter module for the cache', @@ -360,6 +365,12 @@ module.exports.ParseServerOptions = { action: parsers.booleanParser, default: false, }, + schemas: { + env: 'PARSE_SERVER_SCHEMAS', + help: + 'Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema', + action: parsers.arrayParser, + }, security: { env: 'PARSE_SERVER_SECURITY', help: 'The security options to identify and report weak security settings.', @@ -553,6 +564,8 @@ module.exports.PagesCustomUrlsOptions = { help: 'The URL to the custom page for password reset -> success.', }, }; +module.exports.FieldType = {}; +module.exports.JSONSchema = {}; module.exports.CustomPagesOptions = { choosePassword: { env: 'PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD', diff --git a/src/Options/docs.js b/src/Options/docs.js index 30b3aba1a0..9cf90c7c32 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -9,6 +9,7 @@ * @property {String} appId Your Parse Application ID * @property {String} appName Sets the app name * @property {Any} auth Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication + * @property {Function} beforeSchemasMigration Callback when server has started and before running schemas migration operations if schemas key provided * @property {Adapter} cacheAdapter Adapter module for the cache * @property {Number} cacheMaxSize Sets the maximum size for the in memory cache, defaults to 10000 * @property {Number} cacheTTL Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds) @@ -66,6 +67,7 @@ * @property {String} restAPIKey Key for REST calls * @property {Boolean} revokeSessionOnPasswordReset When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions. * @property {Boolean} scheduledPush Configuration for push scheduling, defaults to false. + * @property {JSONSchema[]} schemas Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema * @property {SecurityOptions} security The security options to identify and report weak security settings. * @property {Function} serverCloseComplete Callback when server has closed * @property {Function} serverStartComplete Callback when server has started @@ -119,6 +121,14 @@ * @property {String} passwordResetSuccess The URL to the custom page for password reset -> success. */ +/** + * @interface FieldType + */ + +/** + * @interface JSONSchema + */ + /** * @interface CustomPagesOptions * @property {String} choosePassword choose password page path diff --git a/src/Options/index.js b/src/Options/index.js index 5f3d9afa47..0c74b241eb 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -241,6 +241,10 @@ export interface ParseServerOptions { playgroundPath: ?string; /* Callback when server has started */ serverStartComplete: ?(error: ?Error) => void; + /* Callback when server has started and before running schemas migration operations if schemas key provided */ + beforeSchemasMigration: ?() => void | Promise; + /* Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema */ + schemas: ?(JSONSchema[]); /* Callback when server has closed */ serverCloseComplete: ?() => void; /* The security options to identify and report weak security settings. @@ -319,6 +323,40 @@ export interface PagesCustomUrlsOptions { emailVerificationLinkExpired: ?string; } +export interface FieldType { + type: + | 'String' + | 'Boolean' + | 'File' + | 'Number' + | 'Relation' + | 'Pointer' + | 'Date' + | 'GeoPoint' + | 'Polygon' + | 'Array' + | 'Object'; + required?: boolean; + defaultValue?: mixed; + targetClass?: string; +} + +export interface JSONSchema { + className: '_User' | '_Role' | string; + fields?: { [key: string]: FieldType }; + indexes?: any; + classLevelPermissions?: { + find?: any, + count?: any, + get?: any, + update?: any, + create?: any, + delete?: any, + addField?: any, + protectedFields?: any, + }; +} + export interface CustomPagesOptions { /* invalid link page path */ invalidLink: ?string; diff --git a/src/ParseServer.js b/src/ParseServer.js index 43996ac751..d77ff8e6e2 100644 --- a/src/ParseServer.js +++ b/src/ParseServer.js @@ -44,6 +44,7 @@ import { ParseGraphQLServer } from './GraphQL/ParseGraphQLServer'; import { SecurityRouter } from './Routers/SecurityRouter'; import CheckRunner from './Security/CheckRunner'; import Deprecator from './Deprecator/Deprecator'; +import { DefinedSchemas } from './DefinedSchemas'; // Mutate the Parse object to add the Cloud Code handlers addParseCloud(); @@ -68,6 +69,8 @@ class ParseServer { javascriptKey, serverURL = requiredParameter('You must provide a serverURL!'), serverStartComplete, + beforeSchemasMigration, + schemas, } = options; // Initialize the node client SDK automatically Parse.initialize(appId, javascriptKey || 'unused', masterKey); @@ -84,7 +87,13 @@ class ParseServer { databaseController .performInitialization() .then(() => hooksController.load()) - .then(() => { + .then(async () => { + if (beforeSchemasMigration) { + await Promise.resolve(beforeSchemasMigration()); + } + if (schemas) { + await new DefinedSchemas(schemas, this.config).execute(); + } if (serverStartComplete) { serverStartComplete(); } diff --git a/src/Routers/PagesRouter.js b/src/Routers/PagesRouter.js index 5d5a1467a7..6e6ba8a8fd 100644 --- a/src/Routers/PagesRouter.js +++ b/src/Routers/PagesRouter.js @@ -236,15 +236,15 @@ export class PagesRouter extends PromiseRouter { const query = result.success ? { - [pageParams.username]: username, - } + [pageParams.username]: username, + } : { - [pageParams.username]: username, - [pageParams.token]: token, - [pageParams.appId]: config.applicationId, - [pageParams.error]: result.err, - [pageParams.appName]: config.appName, - }; + [pageParams.username]: username, + [pageParams.token]: token, + [pageParams.appId]: config.applicationId, + [pageParams.error]: result.err, + [pageParams.appName]: config.appName, + }; const page = result.success ? pages.passwordResetSuccess : pages.passwordReset; return this.goToPage(req, page, query, false); @@ -273,8 +273,8 @@ export class PagesRouter extends PromiseRouter { const redirect = config.pages.forceRedirect ? true : responseType !== undefined - ? responseType - : req.method == 'POST'; + ? responseType + : req.method == 'POST'; // Include default parameters const defaultParams = this.getDefaultParams(config); @@ -311,9 +311,9 @@ export class PagesRouter extends PromiseRouter { return Utils.getLocalizedPath(defaultPath, locale).then(({ path, subdir }) => redirect ? this.redirectResponse( - this.composePageUrl(defaultFile, config.publicServerURL, subdir), - params - ) + this.composePageUrl(defaultFile, config.publicServerURL, subdir), + params + ) : this.pageResponse(path, params, placeholders) ); } else { @@ -452,8 +452,8 @@ export class PagesRouter extends PromiseRouter { typeof this.pagesConfig.placeholders === 'function' ? this.pagesConfig.placeholders(params) : Object.prototype.toString.call(this.pagesConfig.placeholders) === '[object Object]' - ? this.pagesConfig.placeholders - : {}; + ? this.pagesConfig.placeholders + : {}; if (configPlaceholders instanceof Promise) { configPlaceholders = await configPlaceholders; } @@ -543,10 +543,10 @@ export class PagesRouter extends PromiseRouter { getDefaultParams(config) { return config ? { - [pageParams.appId]: config.appId, - [pageParams.appName]: config.appName, - [pageParams.publicServerUrl]: config.publicServerURL, - } + [pageParams.appId]: config.appId, + [pageParams.appName]: config.appName, + [pageParams.publicServerUrl]: config.publicServerURL, + } : {}; } diff --git a/src/Routers/SchemasRouter.js b/src/Routers/SchemasRouter.js index ae0a736eb5..9d0f037022 100644 --- a/src/Routers/SchemasRouter.js +++ b/src/Routers/SchemasRouter.js @@ -35,7 +35,42 @@ function getOneSchema(req) { }); } +const checkIfDefinedSchemasIsUsed = req => { + if (req.config && req.config.schemas) { + throw new Parse.Error( + Parse.Error.OPERATION_FORBIDDEN, + 'cannot perform this operation when schemas options is used.' + ); + } +}; + +export const internalCreateSchema = async (className, body, config) => { + const controller = await config.database.loadSchema({ clearCache: true }); + return { + response: await controller.addClassIfNotExists( + className, + body.fields, + body.classLevelPermissions, + body.indexes + ), + }; +}; + +export const internalUpdateSchema = async (className, body, config) => { + const controller = await config.database.loadSchema({ clearCache: true }); + return { + response: await controller.updateClass( + className, + body.fields || {}, + body.classLevelPermissions, + body.indexes, + config.database + ), + }; +}; + function createSchema(req) { + checkIfDefinedSchemasIsUsed(req); if (req.auth.isReadOnly) { throw new Parse.Error( Parse.Error.OPERATION_FORBIDDEN, @@ -53,20 +88,11 @@ function createSchema(req) { throw new Parse.Error(135, `POST ${req.path} needs a class name.`); } - return req.config.database - .loadSchema({ clearCache: true }) - .then(schema => - schema.addClassIfNotExists( - className, - req.body.fields, - req.body.classLevelPermissions, - req.body.indexes - ) - ) - .then(schema => ({ response: schema })); + return internalCreateSchema(className, req.body, req.config); } function modifySchema(req) { + checkIfDefinedSchemasIsUsed(req); if (req.auth.isReadOnly) { throw new Parse.Error( Parse.Error.OPERATION_FORBIDDEN, @@ -76,22 +102,9 @@ function modifySchema(req) { if (req.body.className && req.body.className != req.params.className) { return classNameMismatchResponse(req.body.className, req.params.className); } - - const submittedFields = req.body.fields || {}; const className = req.params.className; - return req.config.database - .loadSchema({ clearCache: true }) - .then(schema => - schema.updateClass( - className, - submittedFields, - req.body.classLevelPermissions, - req.body.indexes, - req.config.database - ) - ) - .then(result => ({ response: result })); + return internalUpdateSchema(className, req.body, req.config); } const deleteSchema = req => { diff --git a/src/Routers/SecurityRouter.js b/src/Routers/SecurityRouter.js index a9c50ecb8e..c7c217a048 100644 --- a/src/Routers/SecurityRouter.js +++ b/src/Routers/SecurityRouter.js @@ -4,10 +4,12 @@ import CheckRunner from '../Security/CheckRunner'; export class SecurityRouter extends PromiseRouter { mountRoutes() { - this.route('GET', '/security', + this.route( + 'GET', + '/security', middleware.promiseEnforceMasterKeyAccess, this._enforceSecurityCheckEnabled, - async (req) => { + async req => { const report = await new CheckRunner(req.config.security).run(); return { status: 200, diff --git a/src/Security/Check.js b/src/Security/Check.js index 7853fe7cce..dc57d63088 100644 --- a/src/Security/Check.js +++ b/src/Security/Check.js @@ -73,9 +73,9 @@ class Check { * The check state. */ const CheckState = Object.freeze({ - none: "none", - fail: "fail", - success: "success", + none: 'none', + fail: 'fail', + success: 'success', }); export default Check; diff --git a/src/Security/CheckGroups/CheckGroupDatabase.js b/src/Security/CheckGroups/CheckGroupDatabase.js index d0da79a4ba..f9b9340eb1 100644 --- a/src/Security/CheckGroups/CheckGroupDatabase.js +++ b/src/Security/CheckGroups/CheckGroupDatabase.js @@ -8,9 +8,9 @@ import Config from '../../Config'; import Parse from 'parse/node'; /** -* The security checks group for Parse Server configuration. -* Checks common Parse Server parameters such as access keys. -*/ + * The security checks group for Parse Server configuration. + * Checks common Parse Server parameters such as access keys. + */ class CheckGroupDatabase extends CheckGroup { setName() { return 'Database'; @@ -23,7 +23,8 @@ class CheckGroupDatabase extends CheckGroup { new Check({ title: 'Secure database password', warning: 'The database password is insecure and vulnerable to brute force attacks.', - solution: 'Choose a longer and/or more complex password with a combination of upper- and lowercase characters, numbers and special characters.', + solution: + 'Choose a longer and/or more complex password with a combination of upper- and lowercase characters, numbers and special characters.', check: () => { const password = databaseUrl.match(/\/\/\S+:(\S+)@/)[1]; const hasUpperCase = /[A-Z]/.test(password); diff --git a/src/Security/CheckGroups/CheckGroupServerConfig.js b/src/Security/CheckGroups/CheckGroupServerConfig.js index a0dc41ec47..729551ed7a 100644 --- a/src/Security/CheckGroups/CheckGroupServerConfig.js +++ b/src/Security/CheckGroups/CheckGroupServerConfig.js @@ -8,9 +8,9 @@ import Config from '../../Config'; import Parse from 'parse/node'; /** -* The security checks group for Parse Server configuration. -* Checks common Parse Server parameters such as access keys. -*/ + * The security checks group for Parse Server configuration. + * Checks common Parse Server parameters such as access keys. + */ class CheckGroupServerConfig extends CheckGroup { setName() { return 'Parse Server Configuration'; @@ -21,7 +21,8 @@ class CheckGroupServerConfig extends CheckGroup { new Check({ title: 'Secure master key', warning: 'The Parse Server master key is insecure and vulnerable to brute force attacks.', - solution: 'Choose a longer and/or more complex master key with a combination of upper- and lowercase characters, numbers and special characters.', + solution: + 'Choose a longer and/or more complex master key with a combination of upper- and lowercase characters, numbers and special characters.', check: () => { const masterKey = config.masterKey; const hasUpperCase = /[A-Z]/.test(masterKey); @@ -41,7 +42,7 @@ class CheckGroupServerConfig extends CheckGroup { new Check({ title: 'Security log disabled', warning: 'Security checks in logs may expose vulnerabilities to anyone access to logs.', - solution: 'Change Parse Server configuration to \'security.enableCheckLog: false\'.', + solution: "Change Parse Server configuration to 'security.enableCheckLog: false'.", check: () => { if (config.security && config.security.enableCheckLog) { throw 1; @@ -50,8 +51,9 @@ class CheckGroupServerConfig extends CheckGroup { }), new Check({ title: 'Client class creation disabled', - warning: 'Attackers are allowed to create new classes without restriction and flood the database.', - solution: 'Change Parse Server configuration to \'allowClientClassCreation: false\'.', + warning: + 'Attackers are allowed to create new classes without restriction and flood the database.', + solution: "Change Parse Server configuration to 'allowClientClassCreation: false'.", check: () => { if (config.allowClientClassCreation || config.allowClientClassCreation == null) { throw 1; diff --git a/src/Security/CheckRunner.js b/src/Security/CheckRunner.js index 2e522fefcb..a662ffbad4 100644 --- a/src/Security/CheckRunner.js +++ b/src/Security/CheckRunner.js @@ -46,7 +46,7 @@ class CheckRunner { // If report should be written to logs if (this.enableCheckLog) { - this._logReport(report) + this._logReport(report); } return report; } @@ -85,8 +85,8 @@ class CheckRunner { report: { version, state: CheckState.success, - groups: [] - } + groups: [], + }, }; // Identify report version @@ -95,13 +95,12 @@ class CheckRunner { default: // For each check group for (const group of groups) { - // Create group report const groupReport = { name: group.name(), state: CheckState.success, checks: [], - } + }; // Create check reports groupReport.checks = group.checks().map(check => { @@ -129,9 +128,9 @@ class CheckRunner { * @param {Object} report The report to log. */ _logReport(report) { - // Determine log level depending on whether any check failed - const log = report.report.state == CheckState.success ? (s) => logger.info(s) : (s) => logger.warn(s); + const log = + report.report.state == CheckState.success ? s => logger.info(s) : s => logger.warn(s); // Declare output const indent = ' '; @@ -142,7 +141,7 @@ class CheckRunner { // Traverse all groups and checks for compose output for (const group of report.report.groups) { - output += `\n- ${group.name}` + output += `\n- ${group.name}`; for (const check of group.checks) { checksCount++; @@ -166,7 +165,9 @@ class CheckRunner { `\n# #` + `\n###################################` + `\n` + - `\n${failedChecksCount > 0 ? 'Warning: ' : ''}${failedChecksCount} weak security setting(s) found${failedChecksCount > 0 ? '!' : ''}` + + `\n${ + failedChecksCount > 0 ? 'Warning: ' : '' + }${failedChecksCount} weak security setting(s) found${failedChecksCount > 0 ? '!' : ''}` + `\n${checksCount} check(s) executed` + `\n${skippedCheckCount} check(s) skipped` + `\n` + @@ -183,9 +184,12 @@ class CheckRunner { */ _getLogIconForState(state) { switch (state) { - case CheckState.success: return '✅'; - case CheckState.fail: return '❌'; - default: return 'ℹ️'; + case CheckState.success: + return '✅'; + case CheckState.fail: + return '❌'; + default: + return 'ℹ️'; } } diff --git a/src/batch.js b/src/batch.js index 58c23ccab6..25e4cc4dc7 100644 --- a/src/batch.js +++ b/src/batch.js @@ -48,8 +48,8 @@ function makeBatchRoutingPathFunction(originalUrl, serverURL, publicServerURL) { startsWithLocal && startsWithPublic ? Math.max(localPath.length, publicPath.length) : startsWithLocal - ? localPath.length - : publicPath.length; + ? localPath.length + : publicPath.length; const newPath = path.posix.join('/', localPath, '/', requestPath.slice(pathLengthToUse));