| /** |
| * |
| * Copyright (c) 2020 Silicon Labs |
| * |
| * Licensed under the Apache License, Version 2.0 (the "License"); |
| * you may not use this file except in compliance with the License. |
| * You may obtain a copy of the License at |
| * |
| * http://www.apache.org/licenses/LICENSE-2.0 |
| * |
| * Unless required by applicable law or agreed to in writing, software |
| * distributed under the License is distributed on an "AS IS" BASIS, |
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| * See the License for the specific language governing permissions and |
| * limitations under the License. |
| */ |
| |
| /** |
| * @module JS API: random utilities |
| */ |
| |
| const os = require('os') |
| const fs = require('fs') |
| const fsp = fs.promises |
| const env = require('./env') |
| const crc = require('crc') |
| const path = require('path') |
| const childProcess = require('child_process') |
| const queryPackage = require('../db/query-package.js') |
| const queryEndpoint = require('../db/query-endpoint.js') |
| const queryEndpointType = require('../db/query-endpoint-type.js') |
| const queryZcl = require('../db/query-zcl.js') |
| const queryCommand = require('../db/query-command.js') |
| const querySession = require('../db/query-session.js') |
| const dbEnum = require('../../src-shared/db-enum.js') |
| const { v4: uuidv4 } = require('uuid') |
| const xml2js = require('xml2js') |
| const singleInstance = require('single-instance') |
| const string = require('./string') |
| const queryNotification = require('../db/query-session-notification.js') |
| |
| /** |
| * Returns the CRC of the data that is passed. |
| * @param {*} data |
| * @returns Calculated CRC of a data. |
| */ |
| function checksum(data) { |
| return crc.crc32(data) |
| } |
| |
| /** |
| * This function assigns a proper package ID to the session if there |
| * are no packages present. It will also populate session options. |
| * |
| * @param {*} db |
| * @param {*} sessionId |
| * @param {*} options: object containing 'zcl' and 'template' |
| * @param {*} selectedZclPropertyPackage |
| * @param {*} selectedGenTemplatePackages |
| * @returns Promise that resolves with the packages array. |
| */ |
| async function ensurePackagesAndPopulateSessionOptions( |
| db, |
| sessionId, |
| options = null, |
| selectedZclPropertyPackage = [], |
| selectedGenTemplatePackages = [] |
| ) { |
| let promises = [] |
| // This is the desired ZCL properties file. Because it is possible |
| // that an array is passed from the command line, we are simply taking |
| // the first one, if we pass multiple ones. |
| let zclFile |
| if (options) { |
| zclFile = options.zcl |
| } else { |
| zclFile = selectedZclPropertyPackage.path |
| ? selectedZclPropertyPackage.path |
| : selectedZclPropertyPackage[0].path |
| } |
| // 0. Read current packages. |
| let currentPackages = |
| await queryPackage.getPackageSessionPackagePairBySessionId(db, sessionId) |
| let hasZclPackage = false |
| let hasGenTemplate = false |
| currentPackages.forEach((pair) => { |
| if (pair.pkg.type == dbEnum.packageType.zclProperties) { |
| hasZclPackage = true |
| } |
| if (pair.pkg.type == dbEnum.packageType.genTemplatesJson) { |
| hasGenTemplate = true |
| } |
| }) |
| |
| // 1. Associate a zclProperties file. |
| let sessionPartitionIndex = 0 |
| let sessionPartitionInfo = |
| await querySession.getAllSessionPartitionInfoForSession(db, sessionId) |
| if (!hasZclPackage) { |
| if (selectedZclPropertyPackage && selectedZclPropertyPackage.length > 1) { |
| console.log( |
| `Multiple zcl.properties selected, using them for a multiprotocol configuration: ` + |
| JSON.stringify(selectedZclPropertyPackage) |
| ) |
| for (let i = 0; i < selectedZclPropertyPackage.length; i++) { |
| promises.push( |
| queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionInfo[sessionPartitionIndex].sessionPartitionId, |
| selectedZclPropertyPackage[i].id, |
| true |
| ) |
| ) |
| sessionPartitionIndex++ |
| } |
| } else if ( |
| options.upgradeZclPackages && |
| options.upgradeZclPackages.length > 0 |
| ) { |
| for (let i = 0; i < options.upgradeZclPackages.length; i++) { |
| sessionPartitionIndex++ |
| let sessionPartitionId = await querySession.insertSessionPartition( |
| db, |
| sessionId, |
| sessionPartitionIndex |
| ) |
| await queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionId, |
| options.upgradeZclPackages[i].id, |
| true |
| ) |
| } |
| } else { |
| let zclPropertiesPromise = queryPackage |
| .getPackagesByType(db, dbEnum.packageType.zclProperties) |
| .then((rows) => { |
| let packageId |
| if ( |
| selectedZclPropertyPackage && |
| selectedZclPropertyPackage.length > 0 |
| ) { |
| packageId = selectedZclPropertyPackage[0].id |
| } else if (rows.length == 1) { |
| packageId = rows[0].id |
| env.logDebug( |
| `Single zcl.properties found, using it for the session: ${packageId}` |
| ) |
| } else if (rows.length == 0) { |
| env.logError(`No zcl.properties found for session.`) |
| queryNotification.setNotification( |
| db, |
| 'WARNING', |
| `No zcl.properties found for session.`, |
| sessionId, |
| 2, |
| 0 |
| ) |
| packageId = null |
| } else { |
| rows.forEach((p) => { |
| // If no zcl file is selected then pick the first one available |
| if (!zclFile) { |
| zclFile = p.path |
| } |
| if (path.resolve(zclFile) === p.path) { |
| packageId = p.id |
| } |
| }) |
| env.logWarning( |
| `${sessionId}, ${zclFile}: Multiple toplevel zcl.properties found. Using the first one from args: ${packageId}` |
| ) |
| queryNotification.setNotification( |
| db, |
| 'WARNING', |
| `${sessionId}, ${zclFile}: Multiple toplevel zcl.properties found. Using the first one from args: ${packageId}`, |
| sessionId, |
| 2, |
| 0 |
| ) |
| } |
| if (packageId != null) { |
| if (sessionPartitionInfo.length == 0) { |
| sessionPartitionIndex++ |
| return querySession |
| .insertSessionPartition(db, sessionId, sessionPartitionIndex) |
| .then((sessionPartitionId) => |
| queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionId, |
| packageId, |
| true |
| ) |
| ) |
| } else { |
| sessionPartitionIndex++ |
| return querySession |
| .getAllSessionPartitionInfoForSession(db, sessionId) |
| .then((sessionPartitionInfo) => |
| queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionInfo[sessionPartitionIndex - 1] |
| .sessionPartitionId, |
| packageId, |
| true |
| ) |
| ) |
| } |
| } |
| }) |
| promises.push(zclPropertiesPromise) |
| } |
| } |
| |
| // 2. Associate gen template files |
| if (!hasGenTemplate) { |
| if (selectedGenTemplatePackages && selectedGenTemplatePackages.length > 1) { |
| console.log( |
| `Multiple generation templates selected, using them for a multiprotocol configuration: ` + |
| JSON.stringify(selectedGenTemplatePackages) |
| ) |
| for (let i = 0; i < selectedGenTemplatePackages.length; i++) { |
| promises.push( |
| queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionInfo[sessionPartitionIndex].sessionPartitionId, |
| selectedGenTemplatePackages[i], |
| true |
| ) |
| ) |
| sessionPartitionIndex++ |
| } |
| } else if ( |
| options.upgradeTemplatePackages && |
| options.upgradeTemplatePackages.length > 0 |
| ) { |
| for (let i = 0; i < options.upgradeTemplatePackages.length; i++) { |
| sessionPartitionIndex++ |
| let sessionPartitionId = await querySession.insertSessionPartition( |
| db, |
| sessionId, |
| sessionPartitionIndex |
| ) |
| await queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionId, |
| options.upgradeTemplatePackages[i].id, |
| true |
| ) |
| } |
| } else { |
| let rows = await queryPackage.getPackagesByType( |
| db, |
| dbEnum.packageType.genTemplatesJson |
| ) |
| let packageId |
| if ( |
| selectedGenTemplatePackages && |
| selectedGenTemplatePackages.length > 0 |
| ) { |
| for (const gen of selectedGenTemplatePackages) { |
| if (gen) { |
| packageId = gen |
| } else if (rows.length == 1) { |
| packageId = rows[0].id |
| env.logDebug( |
| `Single generation template metafile found, using it for the session: ${packageId}` |
| ) |
| } else if (rows.length == 0) { |
| env.logDebug(`No generation template metafile found for session.`) |
| packageId = null |
| } else { |
| rows.forEach((p) => { |
| if ( |
| selectedGenTemplatePackages != null && |
| path.resolve(selectedGenTemplatePackages) === p.path |
| ) { |
| packageId = p.id |
| } |
| }) |
| if (packageId != null) { |
| env.logWarning( |
| `Multiple toplevel generation template metafiles found. Using the one from args: ${packageId}` |
| ) |
| queryNotification.setNotification( |
| db, |
| 'WARNING', |
| `Multiple toplevel generation template metafiles found. Using the one from args: ${packageId}`, |
| sessionId, |
| 2, |
| 0 |
| ) |
| } else { |
| packageId = rows[0].id |
| env.logWarning( |
| `Multiple toplevel generation template metafiles found. Using the first one.` |
| ) |
| queryNotification.setNotification( |
| db, |
| 'WARNING', |
| `Multiple toplevel generation template metafiles found. Using the first one.`, |
| sessionId, |
| 2, |
| 0 |
| ) |
| } |
| } |
| if (packageId != null) { |
| if (sessionPartitionInfo.length === 0) { |
| sessionPartitionIndex++ |
| const sessionPartitionId = |
| await querySession.insertSessionPartition( |
| db, |
| sessionId, |
| sessionPartitionIndex |
| ) |
| await queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionId, |
| packageId, |
| true |
| ) |
| } else { |
| sessionPartitionIndex++ |
| const sessionPartitionInfo = |
| await querySession.getAllSessionPartitionInfoForSession( |
| db, |
| sessionId |
| ) |
| await queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionInfo[sessionPartitionIndex - 1] |
| .sessionPartitionId, |
| packageId, |
| true |
| ) |
| } |
| } |
| } |
| } |
| if (packageId == null && rows.length > 0) { |
| // If package id is not resolved and there are gen-template packages available, |
| // find one with matching category. if nothing is found, blindly pick the first one available |
| |
| let packageId |
| if ( |
| selectedZclPropertyPackage && |
| selectedZclPropertyPackage.length > 0 |
| ) { |
| const matchBySelectedCategory = rows.find( |
| (r) => r?.category === selectedZclPropertyPackage[0].category |
| ) |
| packageId = matchBySelectedCategory?.id || rows[0].id |
| } else { |
| packageId = rows[0].id |
| } |
| |
| if (sessionPartitionInfo.length == 0) { |
| sessionPartitionIndex++ |
| const sessionPartitionId = await querySession.insertSessionPartition( |
| db, |
| sessionId, |
| sessionPartitionIndex |
| ) |
| await queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionId, |
| packageId, |
| true |
| ) |
| } else { |
| sessionPartitionIndex++ |
| await querySession.getAllSessionPartitionInfoForSession(db, sessionId) |
| await queryPackage.insertSessionPackage( |
| db, |
| sessionPartitionInfo[sessionPartitionIndex - 1].sessionPartitionId, |
| packageId, |
| true |
| ) |
| } |
| } |
| } |
| } |
| |
| if (promises.length > 0) await Promise.all(promises) |
| // We read all the packages. |
| let packages = await queryPackage.getSessionPackagesWithTypes(db, sessionId) |
| // Now we create promises with the queries that populate the |
| // session key/value pairs from package options. |
| await populateSessionPackageOptions(db, sessionId, packages) |
| return packages |
| } |
| |
| /** |
| * Insert session package options. |
| * |
| * @param {*} db |
| * @param {*} sessionId |
| * @param {*} packages |
| * @returns Promise array on inserting session package options |
| */ |
| async function populateSessionPackageOptions(db, sessionId, packages) { |
| let p = packages.map((pkg) => |
| queryPackage |
| .selectAllDefaultOptions(db, pkg.packageRef) |
| .then((optionDefaultsArray) => |
| Promise.all( |
| optionDefaultsArray.map((optionDefault) => { |
| return queryPackage |
| .selectOptionValueByOptionDefaultId(db, optionDefault.optionRef) |
| .then((option) => { |
| return querySession.insertSessionKeyValue( |
| db, |
| sessionId, |
| option.optionCategory, |
| option.optionCode |
| ) |
| }) |
| }) |
| ) |
| ) |
| ) |
| |
| return Promise.all(p) |
| } |
| |
| /** |
| * Move database file out of the way into the backup location. |
| * |
| * @param {*} filePath |
| */ |
| function createBackupFile(filePath) { |
| let pathBak = filePath + '~' |
| if (fs.existsSync(filePath)) { |
| if (fs.existsSync(pathBak)) { |
| env.logDebug(`Deleting old backup file: ${pathBak}`) |
| fs.unlinkSync(pathBak) |
| } |
| env.logDebug(`Creating backup file: ${filePath} to ${pathBak}`) |
| fs.renameSync(filePath, pathBak) |
| } |
| } |
| |
| /** |
| * Returns an object that contains: |
| * match: true or false if featureLevel is matched or not. |
| * message: in case of missmatch, the message shown to user. |
| * @param {*} featureLevel |
| */ |
| function matchFeatureLevel(featureLevel, requirementSource = null) { |
| if (featureLevel > env.zapVersion().featureLevel) { |
| return { |
| match: false, |
| message: `${ |
| requirementSource == null ? 'File' : requirementSource |
| } requires feature level ${featureLevel}, we only have ${ |
| env.zapVersion().featureLevel |
| }. Latest ZAP release can be found in https://github.com/project-chip/zap/releases. Please upgrade your zap!` |
| } |
| } else { |
| return { match: true } |
| } |
| } |
| |
| /** |
| * Produces a text dump of a session data for human consumption. |
| * |
| * @param {*} db |
| * @param {*} sessionId |
| * @returns promise that resolves into a text report for the session. |
| */ |
| async function sessionReport(db, sessionId) { |
| return queryEndpointType |
| .selectAllEndpointTypes(db, sessionId) |
| .then((epts) => { |
| let ps = [] |
| epts.forEach((ept) => { |
| ps.push( |
| queryEndpoint.selectEndpointClusters(db, ept.id).then((clusters) => { |
| let s = `Endpoint: ${ept.name} \n` |
| let ps2 = [] |
| for (let c of clusters) { |
| let rpt = ` - ${c.hexCode}: cluster: ${c.name} (${c.side})\n` |
| ps2.push( |
| queryEndpoint |
| .selectEndpointClusterAttributes( |
| db, |
| c.clusterId, |
| c.side, |
| ept.id |
| ) |
| .then((attrs) => { |
| for (let at of attrs) { |
| rpt = rpt.concat( |
| ` - ${at.hexCode}: attribute: ${at.name} [${at.type}] [bound: ${at.isBound}]\n` |
| ) |
| } |
| }) |
| .then(() => |
| queryEndpoint.selectEndpointClusterCommands( |
| db, |
| c.clusterId, |
| ept.id |
| ) |
| ) |
| .then((cmds) => { |
| for (let cmd of cmds) { |
| rpt = rpt.concat( |
| ` - ${cmd.hexCode}: command: ${cmd.name}\n` |
| ) |
| } |
| return rpt |
| }) |
| ) |
| } |
| return Promise.all(ps2) |
| .then((rpts) => rpts.join('')) |
| .then((r) => s.concat(r)) |
| }) |
| ) |
| }) |
| return Promise.all(ps).then((results) => results.join('\n')) |
| }) |
| } |
| |
| /** |
| * Produces a text dump of a session data for human consumption. |
| * |
| * @param {*} db |
| * @param {*} sessionId |
| * @returns promise that resolves into a text report for the session. |
| */ |
| async function sessionDump(db, sessionId) { |
| let dump = { |
| endpointTypes: [], |
| attributes: [], |
| commands: [], |
| clusters: [], |
| usedPackages: [], |
| packageReport: '' |
| } |
| let endpoints = await queryEndpoint.selectAllEndpoints(db, sessionId) |
| dump.endpoints = endpoints |
| |
| let epts = await queryEndpointType.selectAllEndpointTypes(db, sessionId) |
| let ps = [] |
| |
| epts.forEach((ept) => { |
| ept.clusters = [] |
| ept.attributes = [] |
| ept.commands = [] |
| dump.endpointTypes.push(ept) |
| ps.push( |
| queryEndpoint.selectEndpointClusters(db, ept.id).then((clusters) => { |
| let ps2 = [] |
| for (let c of clusters) { |
| ept.clusters.push(c) |
| dump.clusters.push(c) |
| ps2.push( |
| queryEndpoint |
| .selectEndpointClusterAttributes(db, c.clusterId, c.side, ept.id) |
| .then((attrs) => { |
| c.attributes = attrs |
| ept.attributes.push(...attrs) |
| dump.attributes.push(...attrs) |
| }) |
| .then(() => |
| queryEndpoint.selectEndpointClusterCommands( |
| db, |
| c.clusterId, |
| ept.id |
| ) |
| ) |
| .then((cmds) => { |
| c.commands = cmds |
| ept.commands.push(...cmds) |
| dump.commands.push(...cmds) |
| }) |
| ) |
| } |
| return Promise.all(ps2) |
| }) |
| ) |
| }) |
| await Promise.all(ps) |
| |
| // Here we are testing that we have entities only from ONE |
| // package present. There was a bug, where global attributes from |
| // other packages got referenced under the session, because |
| // some query wasn't taking packageId into consideration. |
| for (const at of dump.attributes) { |
| let attributeId = at.id |
| let attribute = await queryZcl.selectAttributeById(db, attributeId) |
| if (dump.usedPackages.indexOf(attribute.packageRef) == -1) { |
| dump.usedPackages.push(attribute.packageRef) |
| } |
| } |
| |
| for (const cm of dump.commands) { |
| let commandId = cm.id |
| let cmd = await queryCommand.selectCommandById(db, commandId) |
| if (dump.usedPackages.indexOf(cmd.packageRef) == -1) { |
| dump.usedPackages.push(cmd.packageRef) |
| } |
| } |
| |
| for (const cl of dump.clusters) { |
| let clusterId = cl.clusterId |
| let cluster = await queryZcl.selectClusterById(db, clusterId) |
| if (dump.usedPackages.indexOf(cluster.packageRef) == -1) { |
| dump.usedPackages.push(cluster.packageRef) |
| } |
| } |
| return dump |
| } |
| |
| /** |
| * If you have an array of arguments, and a function that creates |
| * a promise out of each of those arguments, this function |
| * executes them sequentially, one by one. |
| * |
| * @param {*} arrayOfData |
| * @param {*} promiseCreator |
| */ |
| async function executePromisesSequentially(arrayOfData, promiseCreator) { |
| return arrayOfData.reduce((prev, nextData, currentIndex) => { |
| return prev.then(() => promiseCreator(nextData, currentIndex)) |
| }, Promise.resolve()) |
| } |
| |
| /** |
| * This function creates absolute path out of relative path and its relativity |
| * @param {*} relativePath |
| * @param {*} relativity |
| * @param {*} zapFilePath |
| */ |
| function createAbsolutePath(relativePath, relativity, zapFilePath) { |
| switch (relativity) { |
| case dbEnum.pathRelativity.absolute: |
| return relativePath |
| case dbEnum.pathRelativity.relativeToUserHome: |
| return path.join(os.homedir(), relativePath) |
| case dbEnum.pathRelativity.relativeToZap: |
| return path.join(path.dirname(zapFilePath), relativePath) |
| case dbEnum.pathRelativity.resolveEnvVars: |
| for (let key in process.env) { |
| if (Object.prototype.hasOwnProperty.call(process.env, key)) { |
| relativePath = relativePath.replaceAll('$' + key, process.env[key]) |
| relativePath = relativePath.replaceAll( |
| '${' + key + '}', |
| process.env[key] |
| ) |
| } |
| } |
| if (relativePath.indexOf('$') !== -1) { |
| throw new Error( |
| 'resolveEnvVars: unable to resolve environment variables completely: ' + |
| relativePath |
| ) |
| } |
| } |
| return relativePath |
| } |
| |
| /** |
| * This method takes an array of root locations and a relative path. |
| * It will attempt to locate an absolute file at the path, combining |
| * the root location and a relative path, until a file is found and returned. |
| * |
| * If none of the combined root locations and relative paths results |
| * in an actual file, null is returned. |
| * |
| * @param {*} rootFileLocations Array of root file locations, typically directories |
| * @param {*} relativeFilePath Relative path |
| * @returns A fully resolved path that exists, or null if none is available. |
| */ |
| function locateRelativeFilePath(rootFileLocations, relativeFilePath) { |
| if (relativeFilePath) { |
| for (let i = 0; i < rootFileLocations.length; i++) { |
| let resolvedFile = path.resolve( |
| rootFileLocations[i], |
| relativeFilePath.trim() |
| ) |
| if (fs.existsSync(resolvedFile)) { |
| return resolvedFile |
| } |
| } |
| } |
| return null |
| } |
| |
| /** |
| * Returns a promise of an execution of an external program. |
| * |
| * @param {*} cmd |
| */ |
| function executeExternalProgram( |
| cmd, |
| workingDirectory, |
| options = { |
| rejectOnFail: true, |
| routeErrToOut: false |
| } |
| ) { |
| console.log(` ✍ ${cmd}`) |
| try { |
| const stdout = childProcess.execSync(cmd, { |
| cwd: workingDirectory, |
| windowsHide: true, |
| timeout: 20000, |
| encoding: 'utf-8' |
| }) |
| if (stdout) console.log(stdout) |
| // execSync throws on non-zero exit code, so we don't need to check for error explicitly here. |
| // It returns stdout buffer, stderr is printed to parent process stderr by default. |
| } catch (error) { |
| // execSync throws an error that contains stdout and stderr. |
| if (error.stdout) console.log(error.stdout) |
| if (error.stderr) { |
| if (options.routeErrToOut) { |
| console.log(error.stderr) |
| } else { |
| console.error(error.stderr) |
| } |
| } |
| |
| if (options.rejectOnFail) { |
| // We return a rejected promise to allow the caller to handle it. |
| return Promise.reject(toErrorObject(error)) |
| } else { |
| // If we're not rejecting on fail, we just log the error and continue. |
| console.error(error.message) |
| } |
| } |
| // Since the original function returned a promise, we'll return a resolved promise |
| // to maintain compatibility with any callers that might be using .then() |
| return Promise.resolve() |
| } |
| |
| /** |
| * Retrieve specific entry from extensions defaults(array) via 'clusterCode' key fields |
| * |
| * @param {*} extensions |
| * @param {*} extensionId field name under specific extension |
| * @param {*} clusterCode search key |
| * @parem {*} clusterRole: one of server/client enums, or null for either. |
| * @returns Value of the cluster extension property. |
| */ |
| function getClusterExtensionDefault( |
| extensions, |
| extensionId, |
| clusterCode, |
| clusterRole = null |
| ) { |
| let f = getClusterExtension(extensions, extensionId) |
| if (f.length == 0) { |
| return '' |
| } else { |
| let val = null |
| f[0].defaults.forEach((d) => { |
| if (d.entityCode == clusterCode) { |
| if (clusterRole == null) { |
| val = d.value |
| } else if (clusterRole == d.entityQualifier) { |
| val = d.value |
| } |
| } |
| }) |
| if (val == null) val = f[0].globalDefault |
| if (val == null) val = '' |
| return val |
| } |
| } |
| |
| /** |
| * Retrieve specific entry from extensions defaults(array) via 'clusterCode' key fields |
| * |
| * @param {*} extensions |
| * @param {*} property field name under specific extension |
| * @param {*} clusterCode search key |
| * @returns Object containing all attribuetes specific to the extension |
| */ |
| function getClusterExtension(extensions, extensionId) { |
| return extensions.filter((x) => x.property == extensionId) |
| } |
| |
| /** |
| * Global way how to get an UUID. |
| */ |
| function createUuid() { |
| return uuidv4() |
| } |
| |
| /** |
| * Returns a promise that resolves after time milliseconds |
| * @param {} time |
| */ |
| function waitFor(time) { |
| return new Promise((r) => setTimeout(r, time)) |
| } |
| |
| /** |
| * Returns a promise that resolve into a parsed XML object. |
| * @param {*} fileContent |
| * @returns promise that resolves into parsed object. |
| */ |
| async function parseXml(fileContent) { |
| return xml2js.parseStringPromise(fileContent) |
| } |
| |
| /** |
| * Reads the properties file and returns object containing |
| * 'data', 'filePath' and 'crc' |
| * |
| * @param {*} metadata file |
| * @returns Promise to populate data, filePath and crc into the context. |
| */ |
| async function readFileContentAndCrc(metadataFile) { |
| let content = await fsp.readFile(metadataFile, { encoding: 'utf-8' }) |
| return { |
| data: content, |
| filePath: metadataFile, |
| crc: checksum(content) |
| } |
| } |
| |
| /** |
| * This method takes a nanosecond duration and prints out |
| * decently human readable time out of it. |
| * |
| * @param {*} nsDifference |
| * @returns String with human readable time duration. |
| */ |
| function duration(nsDifference) { |
| let diff = Number(nsDifference) |
| let out = '' |
| if (diff > 1000000000) { |
| out += `${Math.floor(diff / 1000000000)}s ` |
| } |
| out += `${Math.round((diff % 1000000000) / 1000000)}ms` |
| return out |
| } |
| |
| /** |
| * This method returns true if the running instance is the first |
| * and main instance of the zap, and false if zap instance is already |
| * running. |
| * |
| */ |
| function mainOrSecondaryInstance( |
| allowSecondary, |
| mainInstanceCallback, |
| secondaryInstanceCallback |
| ) { |
| if (allowSecondary) { |
| let lock = new singleInstance('zap') |
| lock.lock().then(mainInstanceCallback).catch(secondaryInstanceCallback) |
| } else { |
| mainInstanceCallback() |
| } |
| } |
| |
| /** |
| * Disable test. |
| * |
| * @param {*} testName |
| */ |
| function disable(testName) { |
| const index = this.indexOf(testName) |
| if (index == -1) { |
| const errStr = `Test ${testName} does not exists.` |
| throw new Error(errStr) |
| } |
| |
| this.splice(index, 1) |
| } |
| |
| /** |
| * Utility method that collects data from a JSON file. |
| * |
| * JSON file is formatted as a bunch of keyed strings: |
| * "someKey": [ "a", "b", "c"] |
| * Then it supports following special keys: |
| * "include": "path/to/json/file" - includes the said JSON file |
| * "disable": [ "x", "y" ...] - disables the specified data points |
| * "collection": ["key", "key2", ...] - collects final list of data points |
| * |
| * @param {*} jsonFile |
| */ |
| async function collectJsonData(jsonFile, recursiveLevel = 0) { |
| if (recursiveLevel > 20) { |
| // Prevent infinite recursion |
| throw new Error(`Recursion too deep in JSON file inclusion.`) |
| } |
| let rawData = await fsp.readFile(jsonFile) |
| let jsonData = JSON.parse(rawData) |
| let collectedData = [] |
| if ('include' in jsonData) { |
| let f = path.join(path.dirname(jsonFile), jsonData.include) |
| let includedData = await collectJsonData(f, recursiveLevel++) |
| collectedData.push(...includedData) |
| } |
| if ('collection' in jsonData) { |
| collectedData.push(...jsonData.collection.map((c) => jsonData[c]).flat(1)) |
| } |
| if ('disable' in jsonData) { |
| collectedData = collectedData.filter( |
| (test) => !jsonData.disable.includes(test) |
| ) |
| } |
| collectedData.disable = disable.bind(collectedData) |
| return collectedData |
| } |
| |
| /** |
| * This function receives pattern and data, and it formats pattern |
| * by applyind data to it via it's keys. |
| * For example, if pattern is "{a}" and data.a is 1, then this prints out "1". |
| * |
| * @param {*} pattern |
| * @param {*} data |
| */ |
| function patternFormat(pattern, data) { |
| let out = pattern |
| for (let key of Object.keys(data)) { |
| let value = data[key] |
| if (value == null) continue |
| out = out.replace(`{${key}}`, value) |
| out = out.replace(`{${key}:hexuppercase}`, value.toString(16).toUpperCase()) |
| out = out.replace(`{${key}:hexlowercase}`, value.toString(16).toLowerCase()) |
| out = out.replace(`{${key}:tolowercase}`, value.toString().toLowerCase()) |
| out = out.replace(`{${key}:touppercase}`, value.toString().toUpperCase()) |
| out = out.replace( |
| `{${key}:tocamelcase}`, |
| string.toCamelCase(value.toString()) |
| ) |
| out = out.replace( |
| `{${key}:tosnakecase}`, |
| string.toSnakeCase(value.toString()) |
| ) |
| out = out.replace( |
| `{${key}:tosnakecaseallcaps}`, |
| string.toSnakeCaseAllCaps(value.toString()) |
| ) |
| out = out.replace( |
| `{${key}:tokensintouppercamelcase}`, |
| string.tokensIntoCamelCase(value.toString()) |
| ) |
| // Keep this for backward compatibility. |
| out = out.replace( |
| `{${key}:touppercamelcase}`, |
| string.toCamelCase(value.toString(), /* firstLower = */ false) |
| ) |
| |
| // Note: if you add more of these, add the documentation to sdk-integration.md |
| } |
| return out |
| } |
| |
| /** |
| * Wraps a non-error object into an Error object. |
| * If message is provided, it will be used as the error message. |
| * |
| * @param {*} err |
| * @param {*} message |
| * @returns {Error} |
| */ |
| function toErrorObject(err, message = null) { |
| if (err instanceof Error) return err |
| let wrapped = new Error(message || err.message || String(err)) |
| if (err.code) wrapped.code = err.code |
| if (err.errno) wrapped.errno = err.errno |
| if (err.stack) wrapped.stack = err.stack |
| return wrapped |
| } |
| |
| /** |
| * Go over the zap file's top level packages and see if they can be upgraded |
| * based on the upgrade packages given. |
| * |
| * @param {*} db |
| * @param {*} upgradePackages |
| * @param {*} zapFilePackages |
| * @param {*} packageType |
| * @returns list of packages |
| */ |
| async function getUpgradePackageMatch( |
| db, |
| upgradePackages, |
| zapFilePackages, |
| packageType |
| ) { |
| let matchedUpgradePackages = [] |
| if (Array.isArray(upgradePackages) && Array.isArray(zapFilePackages)) { |
| for (let i = 0; i < upgradePackages.length; i++) { |
| let upgradePackage = await queryPackage.getPackageByPathAndType( |
| db, |
| upgradePackages[i], |
| packageType |
| ) |
| if (upgradePackage) { |
| for (let j = 0; j < zapFilePackages.length; j++) { |
| if ( |
| zapFilePackages[j].category == upgradePackage.category && |
| zapFilePackages[j].type == upgradePackage.type |
| ) { |
| matchedUpgradePackages.push(upgradePackage) |
| } |
| } |
| } |
| } |
| } |
| return matchedUpgradePackages |
| } |
| |
| exports.createBackupFile = createBackupFile |
| exports.checksum = checksum |
| exports.ensurePackagesAndPopulateSessionOptions = |
| ensurePackagesAndPopulateSessionOptions |
| exports.matchFeatureLevel = matchFeatureLevel |
| exports.sessionReport = sessionReport |
| exports.sessionDump = sessionDump |
| exports.executePromisesSequentially = executePromisesSequentially |
| exports.createAbsolutePath = createAbsolutePath |
| exports.executeExternalProgram = executeExternalProgram |
| exports.locateRelativeFilePath = locateRelativeFilePath |
| exports.createUuid = createUuid |
| exports.waitFor = waitFor |
| exports.getClusterExtension = getClusterExtension |
| exports.getClusterExtensionDefault = getClusterExtensionDefault |
| exports.parseXml = parseXml |
| exports.readFileContentAndCrc = readFileContentAndCrc |
| exports.duration = duration |
| exports.mainOrSecondaryInstance = mainOrSecondaryInstance |
| exports.collectJsonData = collectJsonData |
| exports.patternFormat = patternFormat |
| exports.toErrorObject = toErrorObject |
| exports.getUpgradePackageMatch = getUpgradePackageMatch |