diff --git a/Dockerfile b/Dockerfile index d519c465..b439b564 100644 --- a/Dockerfile +++ b/Dockerfile @@ -36,9 +36,8 @@ ENV DISPLAY_CONFIGURATION='1024x768x24' # Agent Environment ENV AGENT_NAME='Katalon Agent' ENV SERVER_URL='https://testops.katalon.io' -ENV KATALON_USERNAME='' ENV KATALON_API_KEY='' -ENV TEAM_ID='' +ENV ORGANIZATION_ID='' ENV PROXY='' ENV LOG_LEVEL='INFO' ENV XVFB_RUN='' diff --git a/README.md b/README.md index 59d8e228..57475b60 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ agentName=My Agent email= apikey= serverUrl=https://analytics.katalon.com/ -teamId= +organizationId= ``` - Supported options: @@ -30,7 +30,7 @@ teamId= | email | The email of the Katalon account used to log into the Katalon TestOps. | | apikey | The API Key used to log into the Katalon TestOps. The API key can be generated and retrieved from [here](https://analytics.katalon.com/user/apikey). For more info read the [docs](https://docs.katalon.com/katalon-analytics/docs/ka-api-key.html#katalon-api-keys-usage). | | serverUrl | The Katalon TestOps' URL. | -| teamId | The ID of the Katalon TestOps team to integrate the Katalon Agent into. The agent is shared among members and projects within a team. | +| organizationId | The ID of the Katalon TestOps organization to integrate the Katalon Agent into. The agent is shared among members and projects within an organization that the agent linked to. | | uuid | The ID used to identify the agent. It is generated by the Katalon Agent on the first run and should not be specified manually or modified for any reason. | | Optional Option | Description | @@ -67,7 +67,7 @@ Note: On Linux and MacOS, you might need to add execute permission (`chmode u+x ## Generate the configuration file - The `agentconfig` file can be generated or updated by running `config` command with additional arguments. -- E.g. `node cli.js config --server-url https://analytics.katalon.com --username --apikey --teamid --agent-name my-agent` +- E.g. `node cli.js config --server-url https://analytics.katalon.com --username --apikey --organizationid --agent-name my-agent` - If an `agentconfig` has been created, the existing configuration will be overridden with the new value. - Show the usage of the `config` command with `-h` option. E.g. `node cli.js config -h`. diff --git a/agentconfig_template b/agentconfig_template index 92ec704b..ddd7d811 100644 --- a/agentconfig_template +++ b/agentconfig_template @@ -3,6 +3,6 @@ agentName= email= apikey= serverUrl= -teamId= +organizationId= keepFiles=false logLevel=INFO diff --git a/cli.js b/cli.js index 81ab6318..42120edf 100644 --- a/cli.js +++ b/cli.js @@ -26,7 +26,7 @@ program .option('-s, --server-url ', 'Katalon Analytics URL') .option('-u, --username ', 'Email') .option('-p, --apikey ', 'API key') - .option('-t, --teamid ', 'Team ID') + .option('-t, --organizationid ', 'Organization ID') .option('-a, --agent-name ', 'Agent name') .option('-c, --config ', 'Configuration file path') .option('-x, --proxy ', 'HTTTP/HTTPS Proxy') @@ -40,7 +40,7 @@ program serverUrl: command.serverUrl, email: command.username, apikey: command.apikey, - teamId: command.teamid, + organizationId: command.organizationid, agentName: command.agentName, configPath: command.config, proxy: command.proxy, @@ -58,7 +58,7 @@ program .option('-s, --server-url ', 'Katalon Analytics URL') .option('-u, --username ', 'Email') .option('-p, --apikey ', 'API key') - .option('-t, --teamid ', 'Team ID') + .option('-t, --organizationid ', 'Organization ID') .option('-a, --agent-name ', 'Agent name') .option('-c, --config ', 'Configuration file path') .option('-x, --proxy ', 'HTTTP/HTTPS Proxy') @@ -68,7 +68,7 @@ program serverUrl: command.serverUrl, email: command.username, apikey: command.apikey, - teamId: command.teamid, + organizationId: command.organizationid, agentName: command.agentName, configPath: command.config, proxy: command.proxy, diff --git a/docker/scripts/agent.sh b/docker/scripts/agent.sh index 82593865..ff33cc13 100755 --- a/docker/scripts/agent.sh +++ b/docker/scripts/agent.sh @@ -11,9 +11,8 @@ cd $KATALON_AGENT_DIR ./cli-linux-x64 config \ --server-url "$SERVER_URL" \ - --username "$KATALON_USERNAME" \ --apikey "$KATALON_API_KEY" \ - --teamid "$TEAM_ID" \ + --organizationid "$ORGANIZATION_ID" \ --agent-name "$AGENT_NAME" \ --proxy "$PROXY" \ --log-level "$LOG_LEVEL" \ diff --git a/package.json b/package.json index c0891252..236699a1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "katalon-agent", - "version": "v1.7.8", + "version": "v2.0.0", "description": "", "main": "cli.js", "scripts": { diff --git a/src/core/api/api.js b/src/core/api/api.js index a1b56621..176ea6f5 100644 --- a/src/core/api/api.js +++ b/src/core/api/api.js @@ -4,6 +4,13 @@ const http = require('./http'); const httpInternal = require('./http-testops'); const urlParam = require('./url-param'); const { getBasicAuthHeader } = require('./utils'); +const { getAuth } = require('../auth'); + +function withAuthorization(apiKey) { + return { + Authorization: getBasicAuthHeader(getAuth(apiKey)), + }; +} module.exports = { requestToken(email, password) { @@ -29,8 +36,8 @@ module.exports = { return httpInternal.post(urlParam.accessToken(), data); }, - getUploadInfo(projectId) { - return httpInternal.get(urlParam.getUploadInfo(projectId)); + getUploadInfo(projectId, apiKey) { + return httpInternal.get(urlParam.getUploadInfo(projectId), withAuthorization(apiKey)); }, uploadFile(uploadUrl, filePath) { @@ -45,6 +52,7 @@ module.exports = { uploadedPath, isEnd, reportType, + apiKey, extraParams = {}, ) { return httpInternal.post( @@ -58,6 +66,8 @@ module.exports = { reportType, extraParams, ), + null, + this.withAuthorization(apiKey), ); }, @@ -65,36 +75,36 @@ module.exports = { return httpInternal.post(urlParam.pingAgent(), body); }, - pingJob(jobId) { - return httpInternal.patch(urlParam.pingJob(jobId)); + pingJob(jobId, apiKey) { + return httpInternal.patch(urlParam.pingJob(jobId), null, withAuthorization(apiKey)); }, - requestJob(uuid, teamId) { - return httpInternal.get(urlParam.requestJob(uuid, teamId)); + requestJob(uuid, organizationId) { + return httpInternal.get(urlParam.requestJob(uuid, organizationId)); }, - updateJob(body) { - return httpInternal.post(urlParam.updateJob(), body); + updateJob(body, apiKey) { + return httpInternal.post(urlParam.updateJob(), body, withAuthorization(apiKey)); }, - saveJobLog(jobInfo, batch, fileName) { - return httpInternal.post(urlParam.saveJobLog(jobInfo, batch, fileName)); + saveJobLog(jobInfo, batch, fileName, apiKey) { + return httpInternal.post(urlParam.saveJobLog(jobInfo, batch, fileName), null, withAuthorization(apiKey)); }, - notifyJob(jobId, projectId) { - return httpInternal.post(urlParam.notifyJob(jobId, projectId)); + notifyJob(jobId, projectId, apiKey) { + return httpInternal.post(urlParam.notifyJob(jobId, projectId), null, withAuthorization(apiKey)); }, getBuildInfo() { return httpInternal.get(urlParam.getBuildInfo()); }, - updateNodeStatus(jobId, nodeStatus) { + updateNodeStatus(jobId, nodeStatus, apiKey) { const data = { id: jobId, nodeStatus, }; - return httpInternal.put(urlParam.updateNodeStatus(), data); + return httpInternal.put(urlParam.updateNodeStatus(), data, withAuthorization(apiKey)); }, getKSReleases() { @@ -105,7 +115,7 @@ module.exports = { return http.stream(urlParam.download(url), filePath); }, - downloadFromTestOps(url, filePath) { - return httpInternal.stream(urlParam.download(url), filePath); + downloadFromTestOps(url, filePath, apiKey) { + return httpInternal.stream(urlParam.download(url), filePath, withAuthorization(apiKey)); }, }; diff --git a/src/core/api/http-testops.js b/src/core/api/http-testops.js index 42baae81..22d3154d 100644 --- a/src/core/api/http-testops.js +++ b/src/core/api/http-testops.js @@ -4,7 +4,7 @@ const { getBasicAuthHeader } = require('./utils'); function withAuthorization(current = {}) { return { - Authorization: getBasicAuthHeader(getAuth()), + Authorization: (current && current.Authorization) ? current.Authorization : getBasicAuthHeader(getAuth()), ...current, }; } diff --git a/src/core/api/url-param.js b/src/core/api/url-param.js index 3704659c..d41c03dd 100644 --- a/src/core/api/url-param.js +++ b/src/core/api/url-param.js @@ -53,10 +53,10 @@ module.exports = { return buildUrl({}, PATHS.JOB, jobId); }, - requestJob(uuid, teamId) { + requestJob(uuid, organizationId) { const params = { uuid, - teamId, + organizationId, }; return buildUrl({ params }, PATHS.JOB, 'get-job'); }, diff --git a/src/core/auth.js b/src/core/auth.js index 8afefb8e..f9603be0 100644 --- a/src/core/auth.js +++ b/src/core/auth.js @@ -1,9 +1,9 @@ const config = require('./config'); -function getAuth() { +function getAuth(apikey) { return { username: '', - password: config.apikey, + password: apikey || config.apikey, }; } diff --git a/src/core/file.js b/src/core/file.js index 293c22e3..adfb675f 100644 --- a/src/core/file.js +++ b/src/core/file.js @@ -7,7 +7,7 @@ const fs = require('fs-extra'); const api = require('./api'); const defaultLogger = require('../config/logger'); -function download(downloadMethod, url, logger = defaultLogger) { +function download(downloadMethod, url, logger = defaultLogger, apiKey) { logger.info(`Downloading from ${url}. It may take a few minutes.`); const file = tmp.fileSync(); const filePath = file.name; @@ -19,7 +19,7 @@ function download(downloadMethod, url, logger = defaultLogger) { } return Promise.reject(new Error(`Unable to download from ${url} to ${filePath}`)); }; - return downloadMethod(url, filePath) + return downloadMethod(url, filePath, apiKey) .then(verifyDownloadedFile); } @@ -57,8 +57,8 @@ module.exports = { ); }, - downloadFromTestOps(url, targetPath, logger = defaultLogger) { - return download(api.downloadFromTestOps, url, logger).then((filePath) => + downloadFromTestOps(url, targetPath, apiKey, logger = defaultLogger) { + return download(api.downloadFromTestOps, url, logger, apiKey).then((filePath) => this.move(filePath, targetPath, logger), ); }, diff --git a/src/helper/agent.js b/src/helper/agent.js index 0c3b574b..d7f89efc 100644 --- a/src/helper/agent.js +++ b/src/helper/agent.js @@ -24,7 +24,6 @@ function buildUpdateJobBody(jobId, jobStatus, processId) { function createCommandExecutor( projectId, - teamId, ksArgs, x11Display, xvfbConfiguration, @@ -42,7 +41,7 @@ function createCommandExecutor( } const info = { - teamId, + teamId: parameter.teamId, projectId, ksVersionNumber: parameter.ksVersion, ksLocation: parameter.ksLocation, diff --git a/src/service/agent.js b/src/service/agent.js index cbf8163f..891e31b9 100644 --- a/src/service/agent.js +++ b/src/service/agent.js @@ -27,16 +27,17 @@ const pingInterval = NODE_ENV === 'debug' ? 30 * 1000 : 60 * 1000; const checkProcessInterval = NODE_ENV === 'debug' ? 30 * 1000 : 60 * 5 * 1000; const syncJobInterval = NODE_ENV === 'debug' ? 15 * 1000 : 30 * 1000; const sendLogWaitInterval = 10 * 1000; +const jobApiKeyEnv = 'TESTOPS_JOB_API_KEY'; -function updateJobStatus(jobId, jobStatus, processId = null) { +function updateJobStatus(jobId, jobStatus, processId = null, apiKey) { const body = buildUpdateJobBody(jobId, jobStatus, processId); - return api.updateJob(body); + return api.updateJob(body, apiKey); } -async function uploadLog(jobInfo, filePath) { +async function uploadLog(jobInfo, filePath, apiKey) { logger.info('Uploading job execution log...'); // Request upload URL - const response = await api.getUploadInfo(jobInfo.projectId); + const response = await api.getUploadInfo(jobInfo.projectId, apiKey); if (!response || !response.body) { return null; } @@ -58,7 +59,7 @@ async function uploadLog(jobInfo, filePath) { const fileName = path.basename(filePath); // Update job's upload file - return api.saveJobLog(jobInfo, batch, fileName); + return api.saveJobLog(jobInfo, batch, fileName, apiKey); } async function getProfiles() { @@ -82,9 +83,9 @@ function isOnPremiseProfile(profiles) { return null; } -function notifyJob(jobId, projectId) { +function notifyJob(jobId, projectId, apiKey) { return api - .notifyJob(jobId, projectId) + .notifyJob(jobId, projectId, apiKey) .catch((error) => logger.warn('Unable to send job notification:', error)); } @@ -95,11 +96,11 @@ function pingAgent(body) { .catch((err) => logger.error('Cannot send agent info to server:', err)); } -function synchronizeJob(jobId, onJobSynchronization = () => {}) { +function synchronizeJob(jobId, onJobSynchronization = () => {}, apiKey) { // NOSONAR return setInterval(async () => { try { - const synchronizedJob = await api.pingJob(jobId); + const synchronizedJob = await api.pingJob(jobId, apiKey); await onJobSynchronization(synchronizedJob && synchronizedJob.body); } catch (err) { logger.warn('Unable to synchronize job:', jobId, err); @@ -108,15 +109,15 @@ function synchronizeJob(jobId, onJobSynchronization = () => {}) { } async function executeJob(jobInfo, keepFiles) { - const { jobId, projectId } = jobInfo; - const notify = () => notifyJob(jobId, projectId); + const { jobId, projectId, apiKey } = jobInfo; + const notify = () => notifyJob(jobId, projectId, apiKey); let isCanceled = false; let jLogger; // Update job status to running // Take the job even if the subsequent setup steps fail // Prevent the job to be queued forever - await updateJobStatus(jobId, JOB_STATUS.RUNNING); + await updateJobStatus(jobId, JOB_STATUS.RUNNING, null, apiKey); const syncJobIntervalID = synchronizeJob(jobId, async (synchronizedJob) => { const { status, id, nodeStatus, processId } = synchronizedJob; if (status === JOB_STATUS.CANCELED && nodeStatus === NODE_STATUS.PENDING_CANCELED) { @@ -125,13 +126,13 @@ async function executeJob(jobInfo, keepFiles) { if (processId) { processController.killProcessFromJobId(id); } - await api.updateNodeStatus(id, NODE_STATUS.CANCELED); + await api.updateNodeStatus(id, NODE_STATUS.CANCELED, apiKey); } catch (err) { logger.error(`Error when canceling job ${id}:`, err); } logger.info(`Job ${jobId} is canceled.`); } - }); + }, apiKey); // Create directory where temporary files are contained const tmpRoot = path.resolve(global.appRoot, 'tmp/'); @@ -151,7 +152,7 @@ async function executeJob(jobInfo, keepFiles) { // Upload log and add new transport to stream log content to s3 // Everytime a new log entry is written to file - await uploadLog(jobInfo, logFilePath); + await uploadLog(jobInfo, logFilePath, apiKey); jLogger.add( new S3FileTransport( { @@ -188,8 +189,8 @@ async function executeJob(jobInfo, keepFiles) { const status = await executor.execute(jLogger, tmpDirPath, (pid) => { processId = pid; processController.createController(pid, jobId); - updateJobStatus(jobId, JOB_STATUS.RUNNING, processId).catch(() => { /* ignore */ }); - }); + updateJobStatus(jobId, JOB_STATUS.RUNNING, processId, apiKey).catch(() => { /* ignore */ }); + }, apiKey); if (isCanceled) { jLogger.debug(`Job ${jobId} is canceled.`); @@ -201,9 +202,9 @@ async function executeJob(jobInfo, keepFiles) { // Update job status after execution const jobStatus = status === 0 ? JOB_STATUS.SUCCESS : JOB_STATUS.FAILED; - + await uploadLog(jobInfo, logFilePath, apiKey); logger.debug(`Update job with status '${jobStatus}.'`); - await updateJobStatus(jobId, jobStatus, processId); + await updateJobStatus(jobId, jobStatus, processId, apiKey); logger.info('Job execution has been completed.'); } catch (err) { logger.error(`${executeJob.name}:`, err); @@ -213,14 +214,13 @@ async function executeJob(jobInfo, keepFiles) { // NOTE: Job status is set FAILED might not be because of a failed execution // but because of other reasons such as cannot remove tmp directory or cannot upload log const jobStatus = JOB_STATUS.FAILED; + await uploadLog(jobInfo, logFilePath, apiKey); logger.debug(`Error caught during job execution! Update job with status '${jobStatus}'`); - await updateJobStatus(jobId, jobStatus); + await updateJobStatus(jobId, jobStatus, apiKey); } finally { jLogger.close(); - await uploadLog(jobInfo, logFilePath); logger.info('Job execution log uploaded.'); - notify().catch(() => { /* ignore */ }); clearInterval(syncJobIntervalID); processController.killProcessFromJobId(jobId); @@ -233,8 +233,7 @@ async function executeJob(jobInfo, keepFiles) { function validateField(configs, propertyName, configFile = defaultConfigFile) { if (!configs[propertyName]) { - logger.error(`Please specify '${propertyName}' property in ${path.basename(configFile)}.`); - return false; + throw new Error(`Please specify '${propertyName}' property in ${path.basename(configFile)}.`); } return true; } @@ -246,13 +245,13 @@ class Agent { config.update(commandLineConfigs, configFile); setLogLevel(config.logLevel); - validateField(config, 'email', configFile); + // validateField(config, 'email', configFile); validateField(config, 'apikey', configFile); validateField(config, 'serverUrl', configFile); - validateField(config, 'teamId', configFile); + validateField(config, 'organizationId', configFile); this.configFile = configFile; - this.teamId = config.teamId; + this.organizationId = config.organizationId; this.apikey = config.apikey; } @@ -280,7 +279,7 @@ class Agent { setLogLevel(logLevel); // Agent is not executing job, request new job - const requestJobResponse = await api.requestJob(uuid, this.teamId); + const requestJobResponse = await api.requestJob(uuid, this.organizationId); if ( !requestJobResponse || !requestJobResponse.body || @@ -291,6 +290,9 @@ class Agent { return; } const jobBody = requestJobResponse.body; + const jobApiKey = requestJobResponse.body.parameter.environmentVariables + .find((item) => item.name === jobApiKeyEnv); + const apiKey = jobApiKey ? jobApiKey.value : this.apikey; const { id: jobId, parameter, @@ -301,19 +303,18 @@ class Agent { if (config.isOnPremise) { ksArgs = utils.updateCommand(parameter.command, { flag: '-apiKeyOnPremise', - value: this.apikey, + value: apiKey, }); } else { ksArgs = utils.updateCommand( parameter.command, - { flag: '-apiKey', value: this.apikey }, + { flag: '-apiKey', value: apiKey }, ); } const downloader = createDownloader(parameter); const executor = createCommandExecutor( projectId, - this.teamId, ksArgs, x11Display, xvfbRun, @@ -325,7 +326,7 @@ class Agent { executor, jobId, projectId, - teamId: this.teamId, + apiKey, }; await executeJob(jobInfo, keepFiles); @@ -347,7 +348,7 @@ class Agent { pingAgent({ uuid: configs.uuid, name: configs.agentName, - teamId: this.teamId, + organizationId: this.organizationId, hostname: os.getHostName(), ip: ip.address(), os: os.getVersion(), @@ -387,24 +388,26 @@ class Agent { parameter, testProject: { projectId }, } = jobBody; + const jobApiKey = parameter.environmentVariables + .find((item) => item.name === jobApiKeyEnv); + const apiKey = jobApiKey ? jobApiKey.value : this.apikey; let ksArgs; if (config.isOnPremise) { ksArgs = utils.updateCommand(parameter.command, { flag: '-apiKeyOnPremise', - value: this.apikey, + value: apiKey, }); } else { ksArgs = utils.updateCommand( parameter.command, - { flag: '-apiKey', value: this.apikey }, + { flag: '-apiKey', value: apiKey }, ); } const downloader = createDownloader(parameter); const executor = createCommandExecutor( projectId, - this.teamId, ksArgs, x11Display, xvfbRun, @@ -416,7 +419,7 @@ class Agent { executor, jobId, projectId, - teamId: this.teamId, + apiKey, }; await executeJob(jobInfo, keepFiles); diff --git a/src/service/command-executor.js b/src/service/command-executor.js index 590f144d..ef797ed4 100644 --- a/src/service/command-executor.js +++ b/src/service/command-executor.js @@ -17,11 +17,11 @@ const GENERIC_COMMAND_OUTPUT_DIR = 'katalon-agent-output'; const GENERIC_COMMAND_REPORT_DIR_ENV = 'KATALON_AGENT_REPORT_FOLDER'; const JUNIT_FILE_PATTERN = '**/*.xml'; -function buildTestOpsIntegrationProperties(teamId, projectId, organizationId, gitRepository) { +function buildTestOpsIntegrationProperties(teamId, projectId, organizationId, gitRepository, apiKey) { const deprecatedProperties = { 'analytics.server.endpoint': config.serverUrl, 'analytics.authentication.email': config.email, - 'analytics.authentication.password': config.apikey, + 'analytics.authentication.password': apiKey, 'analytics.authentication.encryptionEnabled': false, 'analytics.testresult.autosubmit': true, 'analytics.testresult.attach.screenshot': true, @@ -55,7 +55,7 @@ class BaseKatalonCommandExecutor { this.env = info.env; } - async execute(logger, execDirPath, callback) { + async execute(logger, execDirPath, callback, apiKey) { // Find project file inside project directory const projectPathPattern = path.resolve(execDirPath, PROJECT_FILE_PATTERN); const ksProjectPaths = glob.sync(projectPathPattern, { nodir: true }); @@ -73,7 +73,7 @@ class BaseKatalonCommandExecutor { const [ksProjectPath] = ksProjectPaths; if (this.preExecuteHook && typeof this.preExecuteHook === 'function') { - await this.preExecuteHook(logger, ksProjectPath); + await this.preExecuteHook(logger, ksProjectPath, apiKey); } return ks.execute( @@ -100,7 +100,7 @@ class KatalonCommandExecutor extends BaseKatalonCommandExecutor { this.gitRepository = info.gitRepository; } - async downloadExtraFiles(extraFiles, ksProjectPath, jLogger) { + async downloadExtraFiles(extraFiles, ksProjectPath, jLogger, apiKey) { await Promise.all( extraFiles .filter((extraFile) => @@ -109,11 +109,12 @@ class KatalonCommandExecutor extends BaseKatalonCommandExecutor { .map((extraFile) => file.downloadFromTestOps( extraFile.contentUrl, path.join(ksProjectPath, extraFile.path), + apiKey, jLogger)), ); } - async preExecuteHook(logger, ksProjectPath) { + async preExecuteHook(logger, ksProjectPath, apiKey) { // Manually configure integration settings for KS to upload execution report logger.debug('Configure Katalon TestOps integration.'); const ksProjectDir = path.dirname(ksProjectPath); @@ -125,7 +126,7 @@ class KatalonCommandExecutor extends BaseKatalonCommandExecutor { ); properties.writeProperties( testOpsPropertiesPath, - buildTestOpsIntegrationProperties(this.teamId, this.projectId, this.organizationId, this.gitRepository), + buildTestOpsIntegrationProperties(this.teamId, this.projectId, this.organizationId, this.gitRepository, apiKey), ); logger.debug('Finish configuring Katalon TestOps integration.'); @@ -133,7 +134,7 @@ class KatalonCommandExecutor extends BaseKatalonCommandExecutor { // The logic download extra file will run after we manually configure integration settings // if the extraFiles is not provided, the agent will work as normal flow if (_.isArray(this.extraFiles)) { - await this.downloadExtraFiles(this.extraFiles, ksProjectDir, logger); + await this.downloadExtraFiles(this.extraFiles, ksProjectDir, logger, apiKey); } logger.debug('Finish downloading extra files.'); } @@ -147,7 +148,7 @@ class GenericCommandExecutor { this.env = info.env; } - async execute(logger, execDirPath, callback) { + async execute(logger, execDirPath, callback, apiKey) { const outputDir = path.join(execDirPath, GENERIC_COMMAND_OUTPUT_DIR); fs.ensureDirSync(outputDir); @@ -178,6 +179,7 @@ class GenericCommandExecutor { 'junit', JUNIT_FILE_PATTERN, opts, + apiKey, ); logger.info('All JUnit reports successfully uploaded.'); return status; diff --git a/src/service/report-uploader.js b/src/service/report-uploader.js index 916de50c..7dd7fb3e 100644 --- a/src/service/report-uploader.js +++ b/src/service/report-uploader.js @@ -11,8 +11,8 @@ class Report { } } -function uploadFile(projectId, batch, folderName, filePath, isEnd, reportType, opts = {}) { - return api.getUploadInfo(projectId).then(({ body }) => { +function uploadFile(projectId, batch, folderName, filePath, isEnd, reportType, opts = {}, apiKey) { + return api.getUploadInfo(projectId, apiKey).then(({ body }) => { const { uploadUrl } = body; const uploadPath = body.path; @@ -26,6 +26,7 @@ function uploadFile(projectId, batch, folderName, filePath, isEnd, reportType, o uploadPath, isEnd, reportType, + apiKey, opts, ); }); @@ -45,7 +46,7 @@ function collectReports(folderPaths = [], reportPattern = '*') { } module.exports = { - uploadReports(projectId, folderPaths, reportType, reportPattern, opts = {}) { + uploadReports(projectId, folderPaths, reportType, reportPattern, opts = {}, apiKey) { const reports = collectReports(folderPaths, reportPattern); const [first, ...rest] = reports; if (!first) { @@ -55,11 +56,11 @@ module.exports = { const batch = `${new Date().getTime()}-${uuidv4()}`; const uploadPromises = rest.map((report) => - uploadFile(projectId, batch, report.dirName, report.path, false, reportType, opts), + uploadFile(projectId, batch, report.dirName, report.path, false, reportType, opts, apiKey), ); return Promise.all(uploadPromises).then(() => - uploadFile(projectId, batch, first.dirName, first.path, true, reportType, opts), + uploadFile(projectId, batch, first.dirName, first.path, true, reportType, opts, apiKey), ); }, };