Skip to content

Commit

Permalink
Prettier some files + opti object relation (#7044)
Browse files Browse the repository at this point in the history
  • Loading branch information
Moumouls authored Dec 7, 2020
1 parent de9c7dc commit 88e958a
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 133 deletions.
88 changes: 18 additions & 70 deletions spec/GridFSBucketStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
await expectMissingFile(encryptedAdapter, 'myFileName');
const originalString = 'abcdefghi';
await encryptedAdapter.createFile('myFileName', originalString);
const unencryptedResult = await unencryptedAdapter.getFileData(
'myFileName'
);
const unencryptedResult = await unencryptedAdapter.getFileData('myFileName');
expect(unencryptedResult.toString('utf8')).not.toBe(originalString);
const encryptedResult = await encryptedAdapter.getFileData('myFileName');
expect(encryptedResult.toString('utf8')).toBe(originalString);
Expand All @@ -71,10 +69,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
const unencryptedResult2 = await unencryptedAdapter.getFileData(fileName2);
expect(unencryptedResult2.toString('utf8')).toBe(data2);
//Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter
const {
rotated,
notRotated,
} = await encryptedAdapter.rotateEncryptionKey();
const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey();
expect(rotated.length).toEqual(2);
expect(
rotated.filter(function (value) {
Expand All @@ -101,30 +96,18 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {

it('should rotate key of all old encrypted GridFS files to encrypted files', async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const encryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
'newKeyThatILove'
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove');
const fileName1 = 'file1.txt';
const data1 = 'hello world';
const fileName2 = 'file2.txt';
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter
const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({
Expand Down Expand Up @@ -170,32 +153,21 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {

it('should rotate key of all old encrypted GridFS files to unencrypted files', async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI);
const fileName1 = 'file1.txt';
const data1 = 'hello world';
const fileName2 = 'file2.txt';
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Check if unEncrypted adapter can read data and make sure it's not the same as oldEncrypted adapter
const {
rotated,
notRotated,
} = await unEncryptedAdapter.rotateEncryptionKey({
const { rotated, notRotated } = await unEncryptedAdapter.rotateEncryptionKey({
oldKey: oldEncryptionKey,
});
expect(rotated.length).toEqual(2);
Expand Down Expand Up @@ -238,31 +210,19 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {

it('should only encrypt specified fileNames', async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const encryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
'newKeyThatILove'
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove');
const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI);
const fileName1 = 'file1.txt';
const data1 = 'hello world';
const fileName2 = 'file2.txt';
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Inject unecrypted file to see if causes an issue
const fileName3 = 'file3.txt';
Expand Down Expand Up @@ -318,31 +278,19 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {

it("should return fileNames of those it can't encrypt with the new key", async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const encryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
'newKeyThatILove'
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove');
const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI);
const fileName1 = 'file1.txt';
const data1 = 'hello world';
const fileName2 = 'file2.txt';
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Inject unecrypted file to see if causes an issue
const fileName3 = 'file3.txt';
Expand Down
15 changes: 7 additions & 8 deletions spec/LdapAuth.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ it('Should succeed with right credentials when LDAPS is used and certifcate is n
suffix: 'o=example',
url: `ldaps://localhost:${sslport}`,
dn: 'uid={{id}}, o=example',
tlsOptions: { rejectUnauthorized: false }
tlsOptions: { rejectUnauthorized: false },
};
ldap
.validateAuthData({ id: 'testuser', password: 'secret' }, options)
Expand All @@ -57,8 +57,8 @@ it('Should succeed when LDAPS is used and the presented certificate is the expec
dn: 'uid={{id}}, o=example',
tlsOptions: {
ca: fs.readFileSync(__dirname + '/support/cert/cert.pem'),
rejectUnauthorized: true
}
rejectUnauthorized: true,
},
};
ldap
.validateAuthData({ id: 'testuser', password: 'secret' }, options)
Expand All @@ -76,8 +76,8 @@ it('Should fail when LDAPS is used and the presented certificate is not the expe
dn: 'uid={{id}}, o=example',
tlsOptions: {
ca: fs.readFileSync(__dirname + '/support/cert/anothercert.pem'),
rejectUnauthorized: true
}
rejectUnauthorized: true,
},
};
ldap
.validateAuthData({ id: 'testuser', password: 'secret' }, options)
Expand All @@ -98,8 +98,8 @@ it('Should fail when LDAPS is used certifcate matches but credentials are wrong'
dn: 'uid={{id}}, o=example',
tlsOptions: {
ca: fs.readFileSync(__dirname + '/support/cert/cert.pem'),
rejectUnauthorized: true
}
rejectUnauthorized: true,
},
};
ldap
.validateAuthData({ id: 'testuser', password: 'wrong!' }, options)
Expand All @@ -112,7 +112,6 @@ it('Should fail when LDAPS is used certifcate matches but credentials are wrong'
});
});


it('Should fail with wrong credentials', done => {
mockLdapServer(port, 'uid=testuser, o=example').then(server => {
const options = {
Expand Down
4 changes: 2 additions & 2 deletions spec/MockLdapServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ const fs = require('fs');

const tlsOptions = {
key: fs.readFileSync(__dirname + '/support/cert/key.pem'),
certificate: fs.readFileSync(__dirname + '/support/cert/cert.pem')
}
certificate: fs.readFileSync(__dirname + '/support/cert/cert.pem'),
};

function newServer(port, dn, provokeSearchError = false, ssl = false) {
const server = ssl ? ldapjs.createServer(tlsOptions) : ldapjs.createServer();
Expand Down
66 changes: 13 additions & 53 deletions src/GraphQL/helpers/objectsQueries.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ const needToGetAllKeys = (fields, keys, parseClasses) =>
? keys.split(',').some(keyName => {
const key = keyName.split('.');
if (fields[key[0]]) {
if (fields[key[0]].type === 'Relation') return false;
if (fields[key[0]].type === 'Pointer') {
const subClass = parseClasses.find(
({ className: parseClassName }) =>
fields[key[0]].targetClass === parseClassName
({ className: parseClassName }) => fields[key[0]].targetClass === parseClassName
);
if (subClass && subClass.fields[key[1]]) {
// Current sub key is not custom
Expand Down Expand Up @@ -50,9 +50,7 @@ const getObject = async (
try {
if (
!needToGetAllKeys(
parseClasses.find(
({ className: parseClassName }) => className === parseClassName
).fields,
parseClasses.find(({ className: parseClassName }) => className === parseClassName).fields,
keys,
parseClasses
)
Expand Down Expand Up @@ -141,15 +139,7 @@ const findObjects = async (
preCountOptions.subqueryReadPreference = subqueryReadPreference;
}
preCount = (
await rest.find(
config,
auth,
className,
where,
preCountOptions,
info.clientSDK,
info.context
)
await rest.find(config, auth, className, where, preCountOptions, info.clientSDK, info.context)
).count;
if ((skip || 0) + limit < preCount) {
skip = preCount - limit;
Expand All @@ -158,11 +148,7 @@ const findObjects = async (

const options = {};

if (
selectedFields.find(
field => field.startsWith('edges.') || field.startsWith('pageInfo.')
)
) {
if (selectedFields.find(field => field.startsWith('edges.') || field.startsWith('pageInfo.'))) {
if (limit || limit === 0) {
options.limit = limit;
} else {
Expand All @@ -181,9 +167,7 @@ const findObjects = async (
}
if (
!needToGetAllKeys(
parseClasses.find(
({ className: parseClassName }) => className === parseClassName
).fields,
parseClasses.find(({ className: parseClassName }) => className === parseClassName).fields,
keys,
parseClasses
)
Expand Down Expand Up @@ -245,9 +229,7 @@ const findObjects = async (

pageInfo = {
hasPreviousPage:
((preCount && preCount > 0) || (count && count > 0)) &&
skip !== undefined &&
skip > 0,
((preCount && preCount > 0) || (count && count > 0)) && skip !== undefined && skip > 0,
startCursor: offsetToCursor(skip || 0),
endCursor: offsetToCursor((skip || 0) + (results.length || 1) - 1),
hasNextPage: (preCount || count) > (skip || 0) + results.length,
Expand All @@ -261,25 +243,15 @@ const findObjects = async (
};
};

const calculateSkipAndLimit = (
skipInput,
first,
after,
last,
before,
maxLimit
) => {
const calculateSkipAndLimit = (skipInput, first, after, last, before, maxLimit) => {
let skip = undefined;
let limit = undefined;
let needToPreCount = false;

// Validates the skip input
if (skipInput || skipInput === 0) {
if (skipInput < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Skip should be a positive number'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Skip should be a positive number');
}
skip = skipInput;
}
Expand All @@ -288,10 +260,7 @@ const calculateSkipAndLimit = (
if (after) {
after = cursorToOffset(after);
if ((!after && after !== 0) || after < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'After is not a valid cursor'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'After is not a valid cursor');
}

// If skip and after are passed, a new skip is calculated by adding them
Expand All @@ -301,10 +270,7 @@ const calculateSkipAndLimit = (
// Validates the first param
if (first || first === 0) {
if (first < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'First should be a positive number'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'First should be a positive number');
}

// The first param is translated to the limit param of the Parse legacy API
Expand All @@ -316,10 +282,7 @@ const calculateSkipAndLimit = (
// This method converts the cursor to the index of the object
before = cursorToOffset(before);
if ((!before && before !== 0) || before < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Before is not a valid cursor'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Before is not a valid cursor');
}

if ((skip || 0) >= before) {
Expand All @@ -334,10 +297,7 @@ const calculateSkipAndLimit = (
// Validates the last param
if (last || last === 0) {
if (last < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Last should be a positive number'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Last should be a positive number');
}

if (last > maxLimit) {
Expand Down

0 comments on commit 88e958a

Please sign in to comment.