diff --git a/bin/commands/runs.js b/bin/commands/runs.js index c1d64e66..09bdc2af 100644 --- a/bin/commands/runs.js +++ b/bin/commands/runs.js @@ -9,6 +9,7 @@ const archiver = require("../helpers/archiver"), utils = require("../helpers/utils"), fileHelpers = require("../helpers/fileHelpers"), syncRunner = require("../helpers/syncRunner"), + checkUploaded = require("../helpers/checkUploaded"), reportGenerator = require('../helpers/reporterHTML').reportGenerator, {initTimeComponents, markBlockStart, markBlockEnd, getTimeComponents} = require('../helpers/timeComponents'); @@ -78,107 +79,115 @@ module.exports = function run(args) { // warn if specFiles cross our limit utils.warnSpecLimit(bsConfig, args, specFiles); - markBlockEnd('preArchiveSteps'); - // Archive the spec files - markBlockStart('zip'); - markBlockStart('zip.archive'); - return archiver.archive(bsConfig.run_settings, config.fileName, args.exclude).then(function (data) { - - markBlockEnd('zip.archive'); - // Uploaded zip file - markBlockStart('zip.zipUpload'); - return zipUploader.zipUpload(bsConfig, config.fileName).then(async function (zip) { - - markBlockEnd('zip.zipUpload'); - markBlockEnd('zip'); - // Create build - - //setup Local Testing - markBlockStart('localSetup'); - let bs_local = await utils.setupLocalTesting(bsConfig, args); - markBlockEnd('localSetup'); - markBlockStart('createBuild'); - return build.createBuild(bsConfig, zip).then(function (data) { - markBlockEnd('createBuild'); - markBlockEnd('total'); - let message = `${data.message}! ${Constants.userMessages.BUILD_CREATED} with build id: ${data.build_id}`; - let dashboardLink = `${Constants.userMessages.VISIT_DASHBOARD} ${data.dashboard_url}`; - utils.exportResults(data.build_id, `${config.dashboardUrl}${data.build_id}`); - if ((utils.isUndefined(bsConfig.run_settings.parallels) && utils.isUndefined(args.parallels)) || (!utils.isUndefined(bsConfig.run_settings.parallels) && bsConfig.run_settings.parallels == Constants.cliMessages.RUN.DEFAULT_PARALLEL_MESSAGE)) { - logger.warn(Constants.userMessages.NO_PARALLELS); - } + markBlockStart('checkAlreadyUploaded'); + return checkUploaded.checkUploadedMd5(bsConfig, args).then(function (md5data) { + markBlockEnd('checkAlreadyUploaded'); + + // Archive the spec files + markBlockStart('zip'); + markBlockStart('zip.archive'); + return archiver.archive(bsConfig.run_settings, config.fileName, args.exclude, md5data).then(function (data) { + markBlockEnd('zip.archive'); + + // Uploaded zip file + markBlockStart('zip.zipUpload'); + return zipUploader.zipUpload(bsConfig, config.fileName, md5data).then(async function (zip) { + markBlockEnd('zip.zipUpload'); + markBlockEnd('zip'); + // Create build + + //setup Local Testing + markBlockStart('localSetup'); + let bs_local = await utils.setupLocalTesting(bsConfig, args); + markBlockEnd('localSetup'); + markBlockStart('createBuild'); + return build.createBuild(bsConfig, zip).then(function (data) { + markBlockEnd('createBuild'); + markBlockEnd('total'); + let message = `${data.message}! ${Constants.userMessages.BUILD_CREATED} with build id: ${data.build_id}`; + let dashboardLink = `${Constants.userMessages.VISIT_DASHBOARD} ${data.dashboard_url}`; + utils.exportResults(data.build_id, `${config.dashboardUrl}${data.build_id}`); + if ((utils.isUndefined(bsConfig.run_settings.parallels) && utils.isUndefined(args.parallels)) || (!utils.isUndefined(bsConfig.run_settings.parallels) && bsConfig.run_settings.parallels == Constants.cliMessages.RUN.DEFAULT_PARALLEL_MESSAGE)) { + logger.warn(Constants.userMessages.NO_PARALLELS); + } - if (bsConfig.run_settings.cypress_version && bsConfig.run_settings.cypress_version !== data.cypress_version) { - let versionMessage = utils.versionChangedMessage(bsConfig.run_settings.cypress_version, data.cypress_version) - logger.warn(versionMessage); - } + if (bsConfig.run_settings.cypress_version && bsConfig.run_settings.cypress_version !== data.cypress_version) { + let versionMessage = utils.versionChangedMessage(bsConfig.run_settings.cypress_version, data.cypress_version) + logger.warn(versionMessage); + } - if (!args.disableNpmWarning && bsConfig.run_settings.npm_dependencies && Object.keys(bsConfig.run_settings.npm_dependencies).length <= 0) { - logger.warn(Constants.userMessages.NO_NPM_DEPENDENCIES); - logger.warn(Constants.userMessages.NO_NPM_DEPENDENCIES_READ_MORE); - } + if (!args.disableNpmWarning && bsConfig.run_settings.npm_dependencies && Object.keys(bsConfig.run_settings.npm_dependencies).length <= 0) { + logger.warn(Constants.userMessages.NO_NPM_DEPENDENCIES); + logger.warn(Constants.userMessages.NO_NPM_DEPENDENCIES_READ_MORE); + } - if (args.sync) { - syncRunner.pollBuildStatus(bsConfig, data).then(async (exitCode) => { + if (args.sync) { + syncRunner.pollBuildStatus(bsConfig, data).then(async (exitCode) => { - // stop the Local instance - await utils.stopLocalBinary(bsConfig, bs_local, args); + // stop the Local instance + await utils.stopLocalBinary(bsConfig, bs_local, args); - // Generate custom report! - reportGenerator(bsConfig, data.build_id, args, function(){ - utils.sendUsageReport(bsConfig, args, `${message}\n${dashboardLink}`, Constants.messageTypes.SUCCESS, null); - utils.handleSyncExit(exitCode, data.dashboard_url); + // Generate custom report! + reportGenerator(bsConfig, data.build_id, args, function(){ + utils.sendUsageReport(bsConfig, args, `${message}\n${dashboardLink}`, Constants.messageTypes.SUCCESS, null); + utils.handleSyncExit(exitCode, data.dashboard_url); + }); }); - }); - } - - logger.info(message); - logger.info(dashboardLink); - if(!args.sync) logger.info(Constants.userMessages.EXIT_SYNC_CLI_MESSAGE.replace("",data.build_id)); - let dataToSend = { - time_components: getTimeComponents(), - build_id: data.build_id, - }; - if (bsConfig && bsConfig.connection_settings) { - if (bsConfig.connection_settings.local_mode) { - dataToSend.local_mode = bsConfig.connection_settings.local_mode; } - if (bsConfig.connection_settings.usedAutoLocal) { - dataToSend.used_auto_local = bsConfig.connection_settings.usedAutoLocal; + + logger.info(message); + logger.info(dashboardLink); + if(!args.sync) logger.info(Constants.userMessages.EXIT_SYNC_CLI_MESSAGE.replace("",data.build_id)); + let dataToSend = { + time_components: getTimeComponents(), + build_id: data.build_id, + }; + if (bsConfig && bsConfig.connection_settings) { + if (bsConfig.connection_settings.local_mode) { + dataToSend.local_mode = bsConfig.connection_settings.local_mode; + } + if (bsConfig.connection_settings.usedAutoLocal) { + dataToSend.used_auto_local = bsConfig.connection_settings.usedAutoLocal; + } } - } - utils.sendUsageReport(bsConfig, args, `${message}\n${dashboardLink}`, Constants.messageTypes.SUCCESS, null, dataToSend); - return; - }).catch(async function (err) { - // Build creation failed + utils.sendUsageReport(bsConfig, args, `${message}\n${dashboardLink}`, Constants.messageTypes.SUCCESS, null, dataToSend); + return; + }).catch(async function (err) { + // Build creation failed + logger.error(err); + // stop the Local instance + await utils.stopLocalBinary(bsConfig, bs_local, args); + + utils.sendUsageReport(bsConfig, args, err, Constants.messageTypes.ERROR, 'build_failed'); + }); + }).catch(function (err) { + // Zip Upload failed | Local Start failed logger.error(err); - // stop the Local instance - await utils.stopLocalBinary(bsConfig, bs_local, args); - - utils.sendUsageReport(bsConfig, args, err, Constants.messageTypes.ERROR, 'build_failed'); + if(err === Constants.userMessages.LOCAL_START_FAILED){ + utils.sendUsageReport(bsConfig, args, `${err}\n${Constants.userMessages.LOCAL_START_FAILED}`, Constants.messageTypes.ERROR, 'local_start_failed'); + } else { + logger.error(Constants.userMessages.ZIP_UPLOAD_FAILED); + fileHelpers.deleteZip(); + utils.sendUsageReport(bsConfig, args, `${err}\n${Constants.userMessages.ZIP_UPLOAD_FAILED}`, Constants.messageTypes.ERROR, 'zip_upload_failed'); + } }); }).catch(function (err) { - // Zip Upload failed | Local Start failed + // Zipping failed logger.error(err); - if(err === Constants.userMessages.LOCAL_START_FAILED){ - utils.sendUsageReport(bsConfig, args, `${err}\n${Constants.userMessages.LOCAL_START_FAILED}`, Constants.messageTypes.ERROR, 'local_start_failed'); - } else { - logger.error(Constants.userMessages.ZIP_UPLOAD_FAILED); + logger.error(Constants.userMessages.FAILED_TO_ZIP); + utils.sendUsageReport(bsConfig, args, `${err}\n${Constants.userMessages.FAILED_TO_ZIP}`, Constants.messageTypes.ERROR, 'zip_creation_failed'); + try { fileHelpers.deleteZip(); - utils.sendUsageReport(bsConfig, args, `${err}\n${Constants.userMessages.ZIP_UPLOAD_FAILED}`, Constants.messageTypes.ERROR, 'zip_upload_failed'); + } catch (err) { + utils.sendUsageReport(bsConfig, args, Constants.userMessages.ZIP_DELETE_FAILED, Constants.messageTypes.ERROR, 'zip_deletion_failed'); } }); }).catch(function (err) { - // Zipping failed + // md5 check failed logger.error(err); - logger.error(Constants.userMessages.FAILED_TO_ZIP); - utils.sendUsageReport(bsConfig, args, `${err}\n${Constants.userMessages.FAILED_TO_ZIP}`, Constants.messageTypes.ERROR, 'zip_creation_failed'); - try { - fileHelpers.deleteZip(); - } catch (err) { - utils.sendUsageReport(bsConfig, args, Constants.userMessages.ZIP_DELETE_FAILED, Constants.messageTypes.ERROR, 'zip_deletion_failed'); - } + logger.error(Constants.userMessages.FAILED_MD5_CHECK); + utils.sendUsageReport(bsConfig, args, Constants.userMessages.MD5_CHECK_FAILED, Constants.messageTypes.ERROR, 'zip_already_uploaded_failed'); }); }).catch(function (err) { // browerstack.json is not valid diff --git a/bin/helpers/archiver.js b/bin/helpers/archiver.js index d05cd284..92eba9c0 100644 --- a/bin/helpers/archiver.js +++ b/bin/helpers/archiver.js @@ -7,8 +7,11 @@ const archiver = require("archiver"), utils = require('../helpers/utils'), path = require('path'); -const archiveSpecs = (runSettings, filePath, excludeFiles) => { +const archiveSpecs = (runSettings, filePath, excludeFiles, md5data) => { return new Promise(function (resolve, reject) { + if (md5data.zipUrlPresent) { + return resolve('Zipping not required'); + } var output = fs.createWriteStream(filePath); var cypressFolderPath = path.dirname(runSettings.cypressConfigFilePath); @@ -41,8 +44,7 @@ const archiveSpecs = (runSettings, filePath, excludeFiles) => { archive.pipe(output); - let ignoreFiles = getFilesToIgnore(runSettings, excludeFiles); - + let ignoreFiles = utils.getFilesToIgnore(runSettings, excludeFiles); archive.glob(`**/*.+(${Constants.allowedFileTypes.join("|")})`, { cwd: cypressFolderPath, matchBase: true, ignore: ignoreFiles, dot:true }); let packageJSON = {}; @@ -78,21 +80,4 @@ const archiveSpecs = (runSettings, filePath, excludeFiles) => { }); } -const getFilesToIgnore = (runSettings, excludeFiles) => { - let ignoreFiles = Constants.filesToIgnoreWhileUploading; - - // exclude files asked by the user - // args will take precedence over config file - if (!utils.isUndefined(excludeFiles)) { - let excludePatterns = utils.fixCommaSeparatedString(excludeFiles).split(','); - ignoreFiles = ignoreFiles.concat(excludePatterns); - logger.info(`Excluding files matching: ${JSON.stringify(excludePatterns)}`); - } else if (!utils.isUndefined(runSettings.exclude) && runSettings.exclude.length) { - ignoreFiles = ignoreFiles.concat(runSettings.exclude); - logger.info(`Excluding files matching: ${JSON.stringify(runSettings.exclude)}`); - } - - return ignoreFiles; -} - exports.archive = archiveSpecs diff --git a/bin/helpers/checkUploaded.js b/bin/helpers/checkUploaded.js new file mode 100644 index 00000000..f547d412 --- /dev/null +++ b/bin/helpers/checkUploaded.js @@ -0,0 +1,106 @@ +'use strict'; +const request = require('request'); + +const crypto = require('crypto'), + Constants = require('./constants'), + hashHelper = require('./hashUtil'), + config = require('./config'), + path = require('path'), + fs = require("fs"), + utils = require('./utils'); + + +const checkSpecsMd5 = (runSettings, excludeFiles) => { + return new Promise(function (resolve, reject) { + let cypressFolderPath = path.dirname(runSettings.cypressConfigFilePath); + let ignoreFiles = utils.getFilesToIgnore(runSettings, excludeFiles, false); + let options = { + cwd: cypressFolderPath, + ignore: ignoreFiles, + pattern: `**/*.+(${Constants.allowedFileTypes.join("|")})` + }; + hashHelper.hashWrapper(options).then(function (data) { + const outputHash = crypto.createHash(Constants.hashingOptions.algo); + outputHash.update(data); + let packageJSON = {}; + + if (typeof runSettings.package_config_options === 'object') { + Object.assign(packageJSON, runSettings.package_config_options); + } + + if (typeof runSettings.npm_dependencies === 'object') { + Object.assign(packageJSON, { + devDependencies: runSettings.npm_dependencies, + }); + } + + if (Object.keys(packageJSON).length > 0) { + let packageJSONString = JSON.stringify(packageJSON); + outputHash.update(packageJSONString); + } + + if ( + runSettings.cypress_config_file && + runSettings.cypress_config_filename !== 'false' + ) { + let cypressJSON = JSON.parse( + fs.readFileSync(runSettings.cypressConfigFilePath) + ); + let cypressJSONString = JSON.stringify(cypressJSON); + outputHash.update(cypressJSONString); + } + resolve(outputHash.digest(Constants.hashingOptions.encoding)); + }).catch(function (error) { + reject(error); + }); + }); +}; + +const checkUploadedMd5 = (bsConfig, args) => { + return new Promise(function (resolve) { + let obj = { + zipUrlPresent: false, + }; + if (args["force-upload"]) { + return resolve(obj); + } + checkSpecsMd5(bsConfig.run_settings, args.exclude).then(function (md5data) { + Object.assign(obj, {md5sum: md5data}); + let data = JSON.stringify({ zip_md5sum: md5data }); + + let options = { + url: config.checkMd5sum, + auth: { + user: bsConfig.auth.username, + password: bsConfig.auth.access_key + }, + headers: { + 'Content-Type': 'application/json', + "User-Agent": utils.getUserAgent(), + }, + body: data + }; + + request.post(options, function (err, resp, body) { + if (err) { + resolve(obj); + } else { + let zipData = null; + try { + zipData = JSON.parse(body); + } catch (error) { + zipData = {}; + } + if (resp.statusCode === 200 && !utils.isUndefined(zipData.zipUrl)) { + Object.assign(obj, zipData, {zipUrlPresent: true}); + } + resolve(obj); + } + }); + }).catch((error) => { + resolve({zipUrlPresent: false}); + }); + }); +}; + +exports.checkUploadedMd5 = checkUploadedMd5; diff --git a/bin/helpers/config.js b/bin/helpers/config.js index 3f6daeed..a17f0c43 100644 --- a/bin/helpers/config.js +++ b/bin/helpers/config.js @@ -15,6 +15,7 @@ if(config.env !== "production") { config.cypress_v1 = `${config.rails_host}/automate/cypress/v1`; config.buildUrl = `${config.cypress_v1}/builds/`; config.buildStopUrl = `${config.cypress_v1}/builds/stop/`; +config.checkMd5sum = `${config.cypress_v1}/md5sumcheck/`; config.fileName = "tests.zip"; config.retries = 5; config.networkErrorExitCode = 2; diff --git a/bin/helpers/constants.js b/bin/helpers/constants.js index f4219805..8a000fe6 100644 --- a/bin/helpers/constants.js +++ b/bin/helpers/constants.js @@ -21,10 +21,12 @@ const userMessages = { CONFIG_FILE_CREATED: "BrowserStack Config File created, you can now run browserstack-cypress --config-file run", CONFIG_FILE_EXISTS: "File already exists, delete the browserstack.json file manually. skipping...", DIR_NOT_FOUND: "Given path does not exist. Failed to create browserstack.json in %s", + MD5_CHECK_FAILED: "There was some issue while checking if zip is already uploaded.", ZIP_DELETE_FAILED: "Could not delete tests.zip successfully.", ZIP_DELETED: "Deleted tests.zip successfully.", API_DEPRECATED: "This version of API is deprecated, please use latest version of API.", FAILED_TO_ZIP: "Failed to zip files.", + FAILED_MD5_CHECK: "Something went wrong - you can retry running browserstack-cypress with ‘--force-upload’ parameter, or contact BrowserStack Support.", VISIT_DASHBOARD: "Visit the Automate dashboard for real-time test reporting:", CONFLICTING_INIT_ARGUMENTS: "Conflicting arguments given. You can use --path only with a file name, and not with a file path.", NO_PARALLELS: "Your specs will run sequentially on a single machine. Read more about running your specs in parallel here: https://www.browserstack.com/docs/automate/cypress/run-tests-in-parallel", @@ -111,6 +113,7 @@ const cliMessages = { }, COMMON: { DISABLE_USAGE_REPORTING: "Disable usage reporting", + FORCE_UPLOAD: "Force the upload of your test files even if BrowserStack has detected no changes in your suite since you last ran", USERNAME: "Your BrowserStack username", ACCESS_KEY: "Your BrowserStack access key", NO_NPM_WARNING: "No NPM warning if npm_dependencies is empty", @@ -145,6 +148,21 @@ const filesToIgnoreWhileUploading = [ '.yarn/**', ]; +const readDirOptions = { + cwd: '.', + matchBase: true, + ignore: [], + dot: true, + stat: true, + pattern: '' +} + +const hashingOptions = { + parallel: 10, + algo: 'md5', + encoding: 'hex', +}; + const specFileTypes = ['js', 'ts', 'feature', 'jsx', 'coffee', 'cjsx']; const DEFAULT_CYPRESS_SPEC_PATH = "cypress/integration" @@ -160,6 +178,8 @@ module.exports = Object.freeze({ messageTypes, allowedFileTypes, filesToIgnoreWhileUploading, + readDirOptions, + hashingOptions, specFileTypes, DEFAULT_CYPRESS_SPEC_PATH, SPEC_TOTAL_CHAR_LIMIT, diff --git a/bin/helpers/hashUtil.js b/bin/helpers/hashUtil.js new file mode 100644 index 00000000..e5086ebc --- /dev/null +++ b/bin/helpers/hashUtil.js @@ -0,0 +1,66 @@ +'use strict'; +const glob = require('readdir-glob'), + Constants = require('./constants'), + crypto = require('crypto'), + fs = require('fs'); + +const hashWrapper = (options) => { + return folderStats(options).then((files) => { + return batchHashFile(files); + }).then((hashes) => { + const combinedHash = crypto.createHash(Constants.hashingOptions.algo); + hashes.forEach((hash) => combinedHash.update(hash)); + return combinedHash.digest(Constants.hashingOptions.encoding); + }); +} + +const folderStats = (options) => { + return new Promise((resolve, reject) => { + let readDirOptions = {}; + Object.assign(readDirOptions, Constants.readDirOptions, options) + let globber = glob(readDirOptions.cwd || '.', readDirOptions); + let files = []; + globber.on('match', (fileMatch) => { + files.push({relativePath: fileMatch.relative, absolutePath: fileMatch.absolute, stats: fileMatch.stat}) + }); + globber.on('error', (_err) => { + reject("Error in getting files."); + }); + globber.on('end', (_) => { + resolve(files); + }); + }); +} + +const batchHashFile = (fileBatch) => { + return Promise.resolve(fileBatch).then((files) => { + files = Array(Math.ceil(files.length / Constants.hashingOptions.parallel)).fill([]).map((_, index) => index * Constants.hashingOptions.parallel).map(begin => files.slice(begin, begin + Constants.hashingOptions.parallel)); + files = files.map((batch) => { + return (res) => Promise.all(batch.map(hashFile)).then((data) => res.concat(data)); + }); + let hash = files.reduce((acc, curr) => acc.then(curr), Promise.resolve([])); + return hash; + }); +}; + +const hashFile = (file) => { + return new Promise((resolve) => { + let { relativePath, absolutePath, stats } = file; + const hash = crypto.createHash(Constants.hashingOptions.algo); + hash.update(relativePath); + if (stats.isFile()) { + const f = fs.createReadStream(absolutePath); + f.on('end', () => { + const hashedValue = hash.digest(Constants.hashingOptions.encoding); + return resolve(hashedValue); + }); + f.pipe(hash, { end: false }); + } else { + // paths like empty directories. + const hashedValue = hash.digest(Constants.hashingOptions.encoding); + return resolve(hashedValue); + } + }); +}; + +exports.hashWrapper = hashWrapper; diff --git a/bin/helpers/utils.js b/bin/helpers/utils.js index 38ca2475..b7c31d1d 100644 --- a/bin/helpers/utils.js +++ b/bin/helpers/utils.js @@ -600,6 +600,23 @@ exports.setNoWrap = (_bsConfig, args) => { } } +exports.getFilesToIgnore = (runSettings, excludeFiles, logging = true) => { + let ignoreFiles = Constants.filesToIgnoreWhileUploading; + + // exclude files asked by the user + // args will take precedence over config file + if (!this.isUndefined(excludeFiles)) { + let excludePatterns = this.fixCommaSeparatedString(excludeFiles).split(','); + ignoreFiles = ignoreFiles.concat(excludePatterns); + if (logging) logger.info(`Excluding files matching: ${JSON.stringify(excludePatterns)}`); + } else if (!this.isUndefined(runSettings.exclude) && runSettings.exclude.length) { + ignoreFiles = ignoreFiles.concat(runSettings.exclude); + if (logging) logger.info(`Excluding files matching: ${JSON.stringify(runSettings.exclude)}`); + } + + return ignoreFiles; +} + exports.getNumberOfSpecFiles = (bsConfig, args, cypressJson) => { let testFolderPath = cypressJson.integrationFolder || Constants.DEFAULT_CYPRESS_SPEC_PATH; let globSearchPattern = this.sanitizeSpecsPattern(bsConfig.run_settings.specs) || `${testFolderPath}/**/*.+(${Constants.specFileTypes.join("|")})`; diff --git a/bin/helpers/zipUpload.js b/bin/helpers/zipUpload.js index 5a1d3f5c..1c604206 100644 --- a/bin/helpers/zipUpload.js +++ b/bin/helpers/zipUpload.js @@ -7,8 +7,11 @@ const config = require("./config"), utils = require("./utils"), fileHelpers = require("./fileHelpers"); -const uploadCypressZip = (bsConfig, filePath) => { +const uploadCypressZip = (bsConfig, filePath, md5data) => { return new Promise(function (resolve, reject) { + if (md5data.zipUrlPresent) { + return resolve({ zip_url: md5data.zipUrl }); + } logger.info(Constants.userMessages.UPLOADING_TESTS); let options = { url: config.uploadUrl, @@ -19,7 +22,8 @@ const uploadCypressZip = (bsConfig, filePath) => { formData: { file: fs.createReadStream(filePath), filetype: 'zip', - filename: 'tests' + filename: 'tests', + zipMd5sum: md5data.md5sum ? md5data.md5sum : '', }, headers: { "User-Agent": utils.getUserAgent(), diff --git a/bin/runner.js b/bin/runner.js index 1a53b058..0e35b1f0 100755 --- a/bin/runner.js +++ b/bin/runner.js @@ -201,6 +201,11 @@ var argv = yargs describe: Constants.cliMessages.RUN.SYNC_DESCRIPTION, type: "boolean" }, + 'force-upload': { + default: false, + describe: Constants.cliMessages.COMMON.FORCE_UPLOAD, + type: "boolean" + }, 'headed': { default: false, describe: Constants.cliMessages.RUN.HEADED, diff --git a/test/unit/bin/commands/runs.js b/test/unit/bin/commands/runs.js index 8f485172..798bbd1a 100644 --- a/test/unit/bin/commands/runs.js +++ b/test/unit/bin/commands/runs.js @@ -205,6 +205,7 @@ describe("runs", () => { setTestEnvsStub = sandbox.stub(); validateBstackJsonStub = sandbox.stub(); setUsageReportingFlagStub = sandbox.stub().returns(undefined); + checkUploadedStub = sandbox.stub(); sendUsageReportStub = sandbox.stub().callsFake(function () { return "end"; }); @@ -267,11 +268,15 @@ describe("runs", () => { '../helpers/fileHelpers': { deleteZip: deleteZipStub, }, + '../helpers/checkUploaded': { + checkUploadedMd5: checkUploadedStub, + }, }); validateBstackJsonStub.returns(Promise.resolve(bsConfig)); - setupLocalTestingStub.returns(Promise.resolve("nothing")) + setupLocalTestingStub.returns(Promise.resolve("nothing")); capabilityValidatorStub.returns(Promise.resolve(Constants.validationMessages.VALIDATED)); + checkUploadedStub.returns(Promise.resolve({ zipUrlPresent: false })); archiverStub.returns(Promise.reject("random-error")); return runs(args) @@ -329,6 +334,7 @@ describe("runs", () => { setTestEnvsStub = sandbox.stub(); getConfigPathStub = sandbox.stub(); setUsageReportingFlagStub = sandbox.stub().returns(undefined); + checkUploadedStub = sandbox.stub(); sendUsageReportStub = sandbox.stub().callsFake(function () { return "end"; }); @@ -395,11 +401,15 @@ describe("runs", () => { '../helpers/zipUpload': { zipUpload: zipUploadStub, }, + '../helpers/checkUploaded': { + checkUploadedMd5: checkUploadedStub, + }, }); validateBstackJsonStub.returns(Promise.resolve(bsConfig)); capabilityValidatorStub.returns(Promise.resolve(Constants.validationMessages.VALIDATED)); setupLocalTestingStub.returns(Promise.resolve("nothing")); + checkUploadedStub.returns(Promise.resolve({ zipUrlPresent: false })) archiverStub.returns(Promise.resolve("Zipping completed")); zipUploadStub.returns(Promise.reject("random-error")); @@ -457,6 +467,7 @@ describe("runs", () => { setTestEnvsStub = sandbox.stub(); getConfigPathStub = sandbox.stub(); setUsageReportingFlagStub = sandbox.stub().returns(undefined); + checkUploadedStub = sandbox.stub(); sendUsageReportStub = sandbox.stub().callsFake(function () { return "end"; }); @@ -529,6 +540,9 @@ describe("runs", () => { '../helpers/build': { createBuild: createBuildStub, }, + '../helpers/checkUploaded': { + checkUploadedMd5: checkUploadedStub, + }, }); validateBstackJsonStub.returns(Promise.resolve(bsConfig)); @@ -537,6 +551,7 @@ describe("runs", () => { Promise.resolve(Constants.validationMessages.VALIDATED) ); archiverStub.returns(Promise.resolve("Zipping completed")); + checkUploadedStub.returns(Promise.resolve({ zipUrlPresent: false })); zipUploadStub.returns(Promise.resolve("zip uploaded")); stopLocalBinaryStub.returns(Promise.resolve("nothing")); createBuildStub.returns(Promise.reject("random-error")); @@ -598,6 +613,7 @@ describe("runs", () => { setTestEnvsStub = sandbox.stub(); getConfigPathStub = sandbox.stub(); setUsageReportingFlagStub = sandbox.stub().returns(undefined); + checkUploadedStub = sandbox.stub(); sendUsageReportStub = sandbox.stub().callsFake(function () { return "end"; }); @@ -682,6 +698,9 @@ describe("runs", () => { '../helpers/config': { dashboardUrl: dashboardUrl, }, + '../helpers/checkUploaded': { + checkUploadedMd5: checkUploadedStub, + }, '../helpers/timeComponents': { initTimeComponents: initTimeComponentsStub, getTimeComponents: getTimeComponentsStub, @@ -696,6 +715,7 @@ describe("runs", () => { Promise.resolve(Constants.validationMessages.VALIDATED) ); archiverStub.returns(Promise.resolve("Zipping completed")); + checkUploadedStub.returns(Promise.resolve({ zipUrlPresent: false })) zipUploadStub.returns(Promise.resolve("zip uploaded")); createBuildStub.returns(Promise.resolve({ message: 'Success', build_id: 'random_build_id', dashboard_url: dashboardUrl })); diff --git a/test/unit/bin/helpers/archiver.js b/test/unit/bin/helpers/archiver.js deleted file mode 100644 index 07f9e239..00000000 --- a/test/unit/bin/helpers/archiver.js +++ /dev/null @@ -1,57 +0,0 @@ -const chai = require("chai"), - rewire = require("rewire"), - chaiAsPromised = require("chai-as-promised"); - -const utils = require("../../../../bin/helpers/utils"), - Constants = require("../../../../bin/helpers/constants"), - logger = require("../../../../bin/helpers/logger").winstonLogger; - -chai.use(chaiAsPromised); -logger.transports["console.info"].silent = true; - -const archiver = rewire("../../../../bin/helpers/archiver"); - -_getFilesToIgnore = archiver.__get__("getFilesToIgnore"); - -describe("archiver.js", () => { - - describe("getFilesToIgnore", () => { - it("no args, no exclude in runSettings", () => { - chai.expect(_getFilesToIgnore({}, undefined)).to.be.eql(Constants.filesToIgnoreWhileUploading); - }); - - it("args passed, no exclude in runSettings", () => { - let excludeFiles = "file1.js, file2.json"; - let argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); - chai.expect(_getFilesToIgnore({}, excludeFiles)).to.be.eql(Constants.filesToIgnoreWhileUploading.concat(argsToArray)); - - excludeFiles = "file1.js,file2.json"; - argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); - chai.expect(_getFilesToIgnore({}, excludeFiles)).to.be.eql(Constants.filesToIgnoreWhileUploading.concat(argsToArray)); - - excludeFiles = " file1.js , file2.json "; - argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); - chai.expect(_getFilesToIgnore({}, excludeFiles)).to.be.eql(Constants.filesToIgnoreWhileUploading.concat(argsToArray)); - }); - - it("args passed, exclude added in runSettings", () => { - // args preceed over config file - let excludeFiles = "file1.js, file2.json "; - let argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); - - let runSettings = { exclude: [] }; - chai.expect(_getFilesToIgnore(runSettings, excludeFiles)).to.be.eql(Constants.filesToIgnoreWhileUploading.concat(argsToArray)); - - runSettings = { exclude: ["sample1.js", "sample2.json"] }; - chai.expect(_getFilesToIgnore(runSettings, excludeFiles)).to.be.eql(Constants.filesToIgnoreWhileUploading.concat(argsToArray)); - }); - - it("no args, exclude added in runSettings", () => { - let runSettings = { exclude: [] }; - chai.expect(_getFilesToIgnore(runSettings, undefined)).to.be.eql(Constants.filesToIgnoreWhileUploading); - - runSettings = { exclude: ["sample1.js", "sample2.json"] }; - chai.expect(_getFilesToIgnore(runSettings, undefined)).to.be.eql(Constants.filesToIgnoreWhileUploading.concat(runSettings.exclude)); - }); - }); -}); diff --git a/test/unit/bin/helpers/checkUploaded.js b/test/unit/bin/helpers/checkUploaded.js new file mode 100644 index 00000000..1f402f66 --- /dev/null +++ b/test/unit/bin/helpers/checkUploaded.js @@ -0,0 +1,199 @@ +'use strict'; +const chai = require("chai"), + chaiAsPromised = require("chai-as-promised"), + sinon = require("sinon"), + request = require("request"); + +const logger = require("../../../../bin/helpers/logger").winstonLogger, + testObjects = require("../../support/fixtures/testObjects"); + +const rewire = require("rewire"); + +chai.use(chaiAsPromised); +logger.transports["console.info"].silent = true; + +describe("checkUploaded", () => { + let bsConfig = testObjects.sampleBsConfig; + + let sandbox; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + }); + + afterEach(() => { + sandbox.restore(); + sinon.restore(); + }); + + context("checkUploadedMd5", () => { + let checkSpecsMd5Stub; + beforeEach(() => { + checkSpecsMd5Stub = sandbox.stub().returns(Promise.resolve("random_md5sum")); + }); + + it("resolves with zipUrlPresent false due to request error", () => { + let requestStub = sandbox + .stub(request, "post") + .yields(new Error("random error"), null, null); + + const checkUploaded = rewire("../../../../bin/helpers/checkUploaded"); + checkUploaded.__set__({ + request: { post: requestStub }, + checkSpecsMd5: checkSpecsMd5Stub + }); + let checkUploadedMd5rewire = checkUploaded.__get__('checkUploadedMd5'); + + return checkUploadedMd5rewire(bsConfig, {}) + .then(function (data) { + chai.assert.equal(data.md5sum, 'random_md5sum'); + chai.assert.equal(data.zipUrlPresent, false); + sinon.assert.calledOnce(requestStub); + sinon.assert.calledOnce(checkSpecsMd5Stub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + + it("resolves with zipUrlPresent true and zip url", () => { + let requestStub = sandbox + .stub(request, "post") + .yields(null, { statusCode: 200 }, '{"zipUrl":"bs://random_hashid"}'); + + const checkUploaded = rewire("../../../../bin/helpers/checkUploaded"); + checkUploaded.__set__({ + request: { post: requestStub }, + checkSpecsMd5: checkSpecsMd5Stub + }); + let checkUploadedMd5rewire = checkUploaded.__get__('checkUploadedMd5'); + + return checkUploadedMd5rewire(bsConfig, {}) + .then(function (data) { + chai.assert.deepEqual(data, { md5sum: 'random_md5sum', zipUrlPresent: true, zipUrl: 'bs://random_hashid' }) + sinon.assert.calledOnce(requestStub); + sinon.assert.calledOnce(checkSpecsMd5Stub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + + it("resolves with zipUrlPresent false as not found in db", () => { + let requestStub = sandbox + .stub(request, "post") + .yields(null, { statusCode: 404 }, '{"message":"zip_url for md5sum random_md5sum not found."}'); + + const checkUploaded = rewire("../../../../bin/helpers/checkUploaded"); + checkUploaded.__set__({ + request: { post: requestStub }, + checkSpecsMd5: checkSpecsMd5Stub + }); + let checkUploadedMd5rewire = checkUploaded.__get__('checkUploadedMd5'); + + return checkUploadedMd5rewire(bsConfig, {}) + .then(function (data) { + chai.assert.deepEqual(data, { md5sum: 'random_md5sum', zipUrlPresent: false }) + sinon.assert.calledOnce(requestStub); + sinon.assert.calledOnce(checkSpecsMd5Stub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + + it("resolves with zipUrlPresent false if force-upload enabled", () => { + let requestStub = sandbox + .stub(request, "post") + .yields(null, { statusCode: 404 }, '{"message":"zip_url for md5sum random_md5sum not found."}'); + + const checkUploaded = rewire("../../../../bin/helpers/checkUploaded"); + checkUploaded.__set__({ + request: { post: requestStub }, + checkSpecsMd5: checkSpecsMd5Stub + }); + let checkUploadedMd5rewire = checkUploaded.__get__('checkUploadedMd5'); + + return checkUploadedMd5rewire(bsConfig, {"force-upload": true}) + .then(function (data) { + chai.assert.deepEqual(data, { zipUrlPresent: false }) + sinon.assert.notCalled(requestStub); + sinon.assert.notCalled(checkSpecsMd5Stub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + }); + + context("checkSpecsMd5", () => { + let cryptoStub, digestStub, updateStub, pathStub, fsStub; + beforeEach(() => { + digestStub = sandbox.stub().returns("random_md5sum"); + updateStub = sandbox.stub().returns(null); + pathStub = { + dirname: sandbox.stub().returns(null) + }; + fsStub = { + readFileSync: sandbox.stub().returns('{}') + } + cryptoStub = { + createHash: () => { + return { + update: updateStub, + digest: digestStub + } + } + }; + }); + + it("resolves with md5 value without adding config_file and package.json", () => { + let hashElementstub = sandbox.stub().returns(Promise.resolve("random_md5sum")); + const checkUploaded = rewire("../../../../bin/helpers/checkUploaded"); + checkUploaded.__set__({ + hashHelper: { hashWrapper: hashElementstub }, + crypto: cryptoStub, + path: pathStub + }); + let checkSpecsMd5Rewire = checkUploaded.__get__('checkSpecsMd5'); + + return checkSpecsMd5Rewire(bsConfig.run_settings, "random_files") + .then(function (data) { + chai.assert.equal(data, 'random_md5sum') + sinon.assert.calledOnce(hashElementstub); + sinon.assert.calledOnce(digestStub); + sinon.assert.calledOnce(updateStub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + + it("resolves with md5 value adding config_file and package.json", () => { + let hashElementstub = sandbox.stub().returns(Promise.resolve("random_md5sum")); + const checkUploaded = rewire("../../../../bin/helpers/checkUploaded"); + checkUploaded.__set__({ + hashHelper: { hashWrapper: hashElementstub }, + crypto: cryptoStub, + path: pathStub, + fs: fsStub + }); + let checkSpecsMd5Rewire = checkUploaded.__get__('checkSpecsMd5'); + let run_settings = { + package_config_options: {random: "value"}, + cypress_config_file: "random/path" + } + + return checkSpecsMd5Rewire(run_settings, "random_files") + .then(function (data) { + chai.assert.equal(data, 'random_md5sum') + sinon.assert.calledOnce(hashElementstub); + sinon.assert.calledOnce(digestStub); + sinon.assert.calledThrice(updateStub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + }); +}); diff --git a/test/unit/bin/helpers/hashUtil.js b/test/unit/bin/helpers/hashUtil.js new file mode 100644 index 00000000..0b974c2f --- /dev/null +++ b/test/unit/bin/helpers/hashUtil.js @@ -0,0 +1,129 @@ +'use strict'; +const chai = require("chai"), + chaiAsPromised = require("chai-as-promised"), + sinon = require("sinon"), + EventEmitter = require('events'); + +const logger = require("../../../../bin/helpers/logger").winstonLogger; + +const rewire = require("rewire"); + +chai.use(chaiAsPromised); +logger.transports["console.info"].silent = true; + + +describe("md5util", () => { + let sandbox; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + }); + + afterEach(() => { + sandbox.restore(); + sinon.restore(); + }); + + context("folderStats", () => { + let fakeEvent, globStub; + const hashHelper = rewire("../../../../bin/helpers/hashUtil"); + + beforeEach(() => { + fakeEvent = new EventEmitter(); + globStub = sandbox.stub().returns(fakeEvent); + hashHelper.__set__({ + glob: globStub, + }); + }); + + it("resolve with array of files ", () => { + let folderStatsrewire = hashHelper.__get__('folderStats'); + process.nextTick(() => { + fakeEvent.emit('match', { relative: "random_path_1", absolute: "random_path_1", stat: "random_stat_1" }); + fakeEvent.emit('end'); + }) + return folderStatsrewire({}) + .then((data) => { + chai.assert.deepEqual(data, [{ relativePath: "random_path_1", absolutePath: "random_path_1", stats: "random_stat_1" }]); + sinon.assert.calledOnce(globStub); + }) + .catch((error) => { + chai.assert.fail("Promise error"); + }); + }); + + it("reject with Error in getting files. error", () => { + let folderStatsrewire = hashHelper.__get__('folderStats'); + process.nextTick(() => { + fakeEvent.emit('error'); + }) + return folderStatsrewire({}) + .then((data) => { + chai.assert.fail("Promise error"); + }) + .catch((error) => { + chai.assert.equal(error, "Error in getting files."); + sinon.assert.calledOnce(globStub); + }); + }); + }); + + context("hashFile", () => { + let cryptoStub, digestStub, updateStub, fsStub, statsStub; + const hashHelper = rewire("../../../../bin/helpers/hashUtil"); + + beforeEach(() => { + digestStub = sandbox.stub().returns("random_md5sum"); + updateStub = sandbox.stub().returns(null); + statsStub = { + isFile: () => { + return true; + } + }; + cryptoStub = { + createHash: () => { + return { + update: updateStub, + digest: digestStub + } + } + }; + fsStub = { + events: {}, + on: (event, func) => { + fsStub.events[event] = func + }, + createReadStream: () => { + return fsStub + }, + pipe: () => { + return fsStub + }, + emit: (event, data) => { + fsStub.events[event](data) + } + }; + hashHelper.__set__({ + crypto: cryptoStub, + fs: fsStub, + }); + }); + + it("resolve with hash of the file", () => { + let hashFileRewire = hashHelper.__get__('hashFile'); + process.nextTick(() => { + fsStub.emit('end'); + }); + return hashFileRewire({relativePath: "random", absolutePath: "random", stats: statsStub}) + .then((data) => { + chai.assert.equal(data, "random_md5sum"); + sinon.assert.calledOnce(updateStub); + sinon.assert.calledOnce(digestStub); + }) + .catch((error) => { + console.log("error is ",error) + chai.assert.fail("Promise error"); + }); + }); + }); +}); diff --git a/test/unit/bin/helpers/utils.js b/test/unit/bin/helpers/utils.js index ac105617..d78e851c 100644 --- a/test/unit/bin/helpers/utils.js +++ b/test/unit/bin/helpers/utils.js @@ -358,7 +358,7 @@ describe('utils', () => { let args = testObjects.initSampleArgs; it('should call sendUsageReport', () => { - sendUsageReportStub = sandbox + let sendUsageReportStub = sandbox .stub(utils, 'sendUsageReport') .callsFake(function () { return 'end'; @@ -507,6 +507,46 @@ describe('utils', () => { }); }); + describe("getFilesToIgnore", () => { + it("no args, no exclude in runSettings", () => { + chai.expect(utils.getFilesToIgnore({}, undefined)).to.be.eql(constant.filesToIgnoreWhileUploading); + }); + + it("args passed, no exclude in runSettings", () => { + let excludeFiles = "file1.js, file2.json"; + let argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); + chai.expect(utils.getFilesToIgnore({}, excludeFiles)).to.be.eql(constant.filesToIgnoreWhileUploading.concat(argsToArray)); + + excludeFiles = "file1.js,file2.json"; + argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); + chai.expect(utils.getFilesToIgnore({}, excludeFiles)).to.be.eql(constant.filesToIgnoreWhileUploading.concat(argsToArray)); + + excludeFiles = " file1.js , file2.json "; + argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); + chai.expect(utils.getFilesToIgnore({}, excludeFiles)).to.be.eql(constant.filesToIgnoreWhileUploading.concat(argsToArray)); + }); + + it("args passed, exclude added in runSettings", () => { + // args preceed over config file + let excludeFiles = "file1.js, file2.json "; + let argsToArray = utils.fixCommaSeparatedString(excludeFiles).split(','); + + let runSettings = { exclude: [] }; + chai.expect(utils.getFilesToIgnore(runSettings, excludeFiles)).to.be.eql(constant.filesToIgnoreWhileUploading.concat(argsToArray)); + + runSettings = { exclude: ["sample1.js", "sample2.json"] }; + chai.expect(utils.getFilesToIgnore(runSettings, excludeFiles)).to.be.eql(constant.filesToIgnoreWhileUploading.concat(argsToArray)); + }); + + it("no args, exclude added in runSettings", () => { + let runSettings = { exclude: [] }; + chai.expect(utils.getFilesToIgnore(runSettings, undefined)).to.be.eql(constant.filesToIgnoreWhileUploading); + + runSettings = { exclude: ["sample1.js", "sample2.json"] }; + chai.expect(utils.getFilesToIgnore(runSettings, undefined)).to.be.eql(constant.filesToIgnoreWhileUploading.concat(runSettings.exclude)); + }); + }); + describe('setTestEnvs', () => { it('sets env only from args', () => { let argsEnv = 'env3=value3, env4=value4'; diff --git a/test/unit/bin/helpers/zipUpload.js b/test/unit/bin/helpers/zipUpload.js index 6d547686..3a2de658 100644 --- a/test/unit/bin/helpers/zipUpload.js +++ b/test/unit/bin/helpers/zipUpload.js @@ -43,7 +43,7 @@ describe("zipUpload", () => { }); return zipUploader - .zipUpload(bsConfig, "./random_file_path") + .zipUpload(bsConfig, "./random_file_path",{}) .then(function (data) { chai.assert.fail("Promise error"); }) @@ -70,7 +70,7 @@ describe("zipUpload", () => { }); return zipUploader - .zipUpload(bsConfig, "./random_file_path") + .zipUpload(bsConfig, "./random_file_path", {}) .then(function (data) { chai.assert.fail("Promise error"); }) @@ -99,7 +99,7 @@ describe("zipUpload", () => { }); return zipUploader - .zipUpload(bsConfig, "./random_file_path") + .zipUpload(bsConfig, "./random_file_path", {}) .then(function (data) { chai.assert.fail("Promise error"); }) @@ -129,7 +129,7 @@ describe("zipUpload", () => { }); return zipUploader - .zipUpload(bsConfig, "./random_file_path") + .zipUpload(bsConfig, "./random_file_path", {}) .then(function (data) { chai.assert.fail("Promise error"); }) @@ -158,7 +158,7 @@ describe("zipUpload", () => { }); return zipUploader - .zipUpload(bsConfig, "./random_file_path") + .zipUpload(bsConfig, "./random_file_path", {}) .then(function (data) { chai.assert.fail("Promise error"); }) @@ -190,7 +190,7 @@ describe("zipUpload", () => { }); return zipUploader - .zipUpload(bsConfig, "./random_file_path") + .zipUpload(bsConfig, "./random_file_path", {}) .then(function (data) { sinon.assert.calledOnce(requestStub); sinon.assert.calledOnce(getUserAgentStub); @@ -202,4 +202,34 @@ describe("zipUpload", () => { chai.assert.isNotOk(error, "Promise error"); }); }); + + it("resolve early if zip url already present", () => { + let zip_url = "uploaded zip url"; + let requestStub = sandbox + .stub(request, "post") + .yields(null, { statusCode: 200 }, JSON.stringify({ zip_url: zip_url })); + + const zipUploader = proxyquire('../../../../bin/helpers/zipUpload', { + './utils': { + getUserAgent: getUserAgentStub, + }, + request: {post: requestStub}, + './fileHelpers': { + deleteZip: deleteZipStub, + }, + }); + + return zipUploader + .zipUpload(bsConfig, "./random_file_path", { zipUrlPresent: true, zipUrl: zip_url }) + .then(function (data) { + sinon.assert.notCalled(requestStub); + sinon.assert.notCalled(getUserAgentStub); + sinon.assert.notCalled(createReadStreamStub); + sinon.assert.notCalled(deleteZipStub); + chai.assert.equal(data.zip_url, zip_url); + }) + .catch((error) => { + chai.assert.isNotOk(error, "Promise error"); + }); + }); });