diff --git a/analyzer.js b/analyzer.js index 28a53884b..7ad8ca4c5 100644 --- a/analyzer.js +++ b/analyzer.js @@ -36,7 +36,7 @@ const getAllFiles = (dir, extn, files, result, regex) => { if (IGNORE_FILE_PATTERN.test(files[i])) { continue; } - let file = join(dir, files[i]); + const file = join(dir, files[i]); if (statSync(file).isDirectory()) { // Ignore directories const dirName = basename(file); diff --git a/bin/cdxgen.js b/bin/cdxgen.js index 1e46bf09e..e2133830d 100755 --- a/bin/cdxgen.js +++ b/bin/cdxgen.js @@ -130,7 +130,7 @@ if (process.env.GLOBAL_AGENT_HTTP_PROXY || process.env.HTTP_PROXY) { globalAgent.bootstrap(); } -let filePath = args._[0] || "."; +const filePath = args._[0] || "."; if (!args.projectName) { if (filePath !== ".") { args.projectName = basename(filePath); @@ -143,7 +143,7 @@ if (!args.projectName) { * projectType: python, nodejs, java, golang * multiProject: Boolean to indicate monorepo or multi-module projects */ -let options = { +const options = { projectType: args.type, multiProject: args.recurse, output: args.output, @@ -353,7 +353,7 @@ const checkPermissions = (filePath) => { if (args.print && bomNSData.bomJson && bomNSData.bomJson.components) { const data = [["Group", "Name", "Version", "Scope"]]; - for (let comp of bomNSData.bomJson.components) { + for (const comp of bomNSData.bomJson.components) { data.push([comp.group || "", comp.name, comp.version, comp.scope || ""]); } const config = { diff --git a/index.js b/index.js index 204bce5ac..1f4b6470e 100644 --- a/index.js +++ b/index.js @@ -148,7 +148,7 @@ if (process.env.SWIFT_CMD) { } // Construct sbt cache directory -let SBT_CACHE_DIR = +const SBT_CACHE_DIR = process.env.SBT_CACHE_DIR || join(homedir(), ".ivy2", "cache"); // Debug mode flag @@ -213,7 +213,7 @@ const determineParentComponent = (options) => { * @returns {Array} */ function addGlobalReferences(src, filename, format = "xml") { - let externalReferences = []; + const externalReferences = []; if (format === "json") { externalReferences.push({ type: "other", @@ -251,10 +251,10 @@ function addGlobalReferences(src, filename, format = "xml") { * Function to create the services block */ function addServices(services, format = "xml") { - let serv_list = []; + const serv_list = []; for (const aserv of services) { if (format === "xml") { - let service = { + const service = { "@bom-ref": aserv["bom-ref"], group: aserv.group || "", name: aserv.name, @@ -276,9 +276,9 @@ function addServices(services, format = "xml") { * Function to create the dependency block */ function addDependencies(dependencies) { - let deps_list = []; + const deps_list = []; for (const adep of dependencies) { - let dependsOnList = adep.dependsOn.map((v) => ({ + const dependsOnList = adep.dependsOn.map((v) => ({ "@ref": v })); const aentry = { @@ -299,7 +299,7 @@ function addDependencies(dependencies) { function addMetadata(parentComponent = {}, format = "xml", options = {}) { // DO NOT fork this project to just change the vendor or author's name // Try to contribute to this project by sending PR or filing issues - let metadata = { + const metadata = { timestamp: new Date().toISOString(), tools: { components: [ @@ -508,7 +508,7 @@ function addMetadata(parentComponent = {}, format = "xml", options = {}) { * @returns {Array} */ function addExternalReferences(opkg, format = "xml") { - let externalReferences = []; + const externalReferences = []; let pkgList = []; if (Array.isArray(opkg)) { pkgList = opkg; @@ -575,17 +575,15 @@ function addExternalReferences(opkg, format = "xml") { * For all modules in the specified package, creates a list of * component objects from each one. */ -const _listComponents = listComponents; -export { _listComponents as listComponents }; -function listComponents( +export function listComponents( options, allImports, pkg, ptype = "npm", format = "xml" ) { - let compMap = {}; - let isRootPkg = ptype === "npm"; + const compMap = {}; + const isRootPkg = ptype === "npm"; if (Array.isArray(pkg)) { pkg.forEach((p) => { addComponent(options, allImports, p, ptype, compMap, false, format); @@ -616,13 +614,13 @@ function addComponent( return; } if (!isRootPkg) { - let pkgIdentifier = parsePackageJsonName(pkg.name); - let author = pkg.author || ""; - let publisher = pkg.publisher || ""; + const pkgIdentifier = parsePackageJsonName(pkg.name); + const author = pkg.author || ""; + const publisher = pkg.publisher || ""; let group = pkg.group || pkgIdentifier.scope; // Create empty group group = group || ""; - let name = pkgIdentifier.fullName || pkg.name || ""; + const name = pkgIdentifier.fullName || pkg.name || ""; // name is mandatory if (!name) { return; @@ -640,13 +638,13 @@ function addComponent( ) { return; } - let version = pkg.version; + const version = pkg.version; if (!version || ["dummy", "ignore"].includes(version)) { return; } - let licenses = pkg.licenses || getLicenses(pkg, format); + const licenses = pkg.licenses || getLicenses(pkg, format); - let purl = + const purl = pkg.purl || new PackageURL( ptype, @@ -680,7 +678,7 @@ function addComponent( if (options.requiredOnly && ["optional", "excluded"].includes(compScope)) { return; } - let component = { + const component = { author, publisher, group, @@ -735,7 +733,7 @@ function determinePackageType(pkg) { } if (pkg.purl) { try { - let purl = PackageURL.fromString(pkg.purl); + const purl = PackageURL.fromString(pkg.purl); if (purl.type) { if (["docker", "oci", "container"].includes(purl.type)) { return "container"; @@ -801,7 +799,7 @@ function determinePackageType(pkg) { } } if (Object.prototype.hasOwnProperty.call(pkg, "keywords")) { - for (let keyword of pkg.keywords) { + for (const keyword of pkg.keywords) { if (keyword.toLowerCase() === "framework") { return "framework"; } @@ -832,7 +830,7 @@ function processHashes(pkg, component, format = "xml") { }); } } else if (pkg._integrity) { - let integrity = parse(pkg._integrity) || {}; + const integrity = parse(pkg._integrity) || {}; // Components may have multiple hashes with various lengths. Check each one // that is supported by the CycloneDX specification. if (Object.prototype.hasOwnProperty.call(integrity, "sha512")) { @@ -1009,7 +1007,7 @@ const buildBomNSData = (options, pkgInfo, ptype, context) => { * @param path to the project * @param options Parse options from the cli */ -const createJarBom = (path, options) => { +export const createJarBom = (path, options) => { console.log( `About to create SBoM for all jar files under ${path}. This would take a while ...` ); @@ -1026,8 +1024,8 @@ const createJarBom = (path, options) => { if (hpiFiles.length) { jarFiles = jarFiles.concat(hpiFiles); } - let tempDir = mkdtempSync(join(tmpdir(), "jar-deps-")); - for (let jar of jarFiles) { + const tempDir = mkdtempSync(join(tmpdir(), "jar-deps-")); + for (const jar of jarFiles) { if (DEBUG_MODE) { console.log(`Parsing ${jar}`); } @@ -1054,7 +1052,7 @@ const createJarBom = (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createJavaBom = async (path, options) => { +export const createJavaBom = async (path, options) => { let jarNSMapping = {}; let pkgList = []; let dependencies = []; @@ -1068,7 +1066,7 @@ const createJavaBom = async (path, options) => { if (DEBUG_MODE) { console.log(`Retrieving packages from ${path}`); } - let tempDir = mkdtempSync(join(tmpdir(), "war-deps-")); + const tempDir = mkdtempSync(join(tmpdir(), "war-deps-")); pkgList = extractJarArchive(path, tempDir); if (pkgList.length) { pkgList = await getMvnMetadata(pkgList); @@ -1119,7 +1117,7 @@ const createJavaBom = async (path, options) => { const addArgs = process.env.MVN_ARGS.split(" "); mvnArgs = mvnArgs.concat(addArgs); } - for (let f of pomFiles) { + for (const f of pomFiles) { const basePath = dirname(f); const settingsXml = join(basePath, "settings.xml"); if (existsSync(settingsXml)) { @@ -1127,7 +1125,7 @@ const createJavaBom = async (path, options) => { `maven settings.xml found in ${basePath}. Please set the MVN_ARGS environment variable based on the full mvn build command used for this project.\nExample: MVN_ARGS='--settings ${settingsXml}'` ); } - let mavenCmd = getMavenCommand(basePath, path); + const mavenCmd = getMavenCommand(basePath, path); // Should we attempt to resolve class names if (options.resolveClass) { console.log( @@ -1150,8 +1148,8 @@ const createJavaBom = async (path, options) => { const bomJsonFiles = getAllFiles(path, "**/target/*.json"); const bomGenerated = bomJsonFiles.length; if (!bomGenerated || result.status !== 0 || result.error) { - let tempDir = mkdtempSync(join(tmpdir(), "cdxmvn-")); - let tempMvnTree = join(tempDir, "mvn-tree.txt"); + const tempDir = mkdtempSync(join(tmpdir(), "cdxmvn-")); + const tempMvnTree = join(tempDir, "mvn-tree.txt"); let mvnTreeArgs = ["dependency:tree", "-DoutputFile=" + tempMvnTree]; if (process.env.MVN_ARGS) { const addArgs = process.env.MVN_ARGS.split(" "); @@ -1281,18 +1279,18 @@ const createJavaBom = async (path, options) => { } } // gradle - let gradleFiles = getAllFiles( + const gradleFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "build.gradle*" ); - let allProjects = []; + const allProjects = []; const allProjectsAddedPurls = []; const rootDependsOn = []; // Execute gradle properties if (gradleFiles && gradleFiles.length) { let retMap = executeGradleProperties(path, null, null); const allProjectsStr = retMap.projects || []; - let rootProject = retMap.rootProject; + const rootProject = retMap.rootProject; if (rootProject) { parentComponent = { name: rootProject, @@ -1314,11 +1312,11 @@ const createJavaBom = async (path, options) => { } // Get the sub-project properties and set the root dependencies if (allProjectsStr && allProjectsStr.length) { - for (let spstr of allProjectsStr) { + for (const spstr of allProjectsStr) { retMap = executeGradleProperties(path, null, spstr); - let rootSubProject = retMap.rootProject; + const rootSubProject = retMap.rootProject; if (rootSubProject) { - let rspName = rootSubProject.replace(/^:/, "").replace(/:/, "/"); + const rspName = rootSubProject.replace(/^:/, "").replace(/:/, "/"); const rootSubProjectObj = { name: rspName, type: "application", @@ -1356,9 +1354,9 @@ const createJavaBom = async (path, options) => { } } if (gradleFiles && gradleFiles.length && options.installDeps) { - let gradleCmd = getGradleCommand(path, null); + const gradleCmd = getGradleCommand(path, null); allProjects.push(parentComponent); - for (let sp of allProjects) { + for (const sp of allProjects) { let gradleDepArgs = [ sp.purl === parentComponent.purl ? "dependencies" @@ -1452,13 +1450,13 @@ const createJavaBom = async (path, options) => { // Bazel // Look for the BUILD file only in the root directory - let bazelFiles = getAllFiles(path, "BUILD"); + const bazelFiles = getAllFiles(path, "BUILD"); if (bazelFiles && bazelFiles.length) { let BAZEL_CMD = "bazel"; if (process.env.BAZEL_HOME) { BAZEL_CMD = join(process.env.BAZEL_HOME, "bin", "bazel"); } - for (let f of bazelFiles) { + for (const f of bazelFiles) { const basePath = dirname(f); // Invoke bazel build first const bazelTarget = process.env.BAZEL_TARGET || ":all"; @@ -1500,7 +1498,7 @@ const createJavaBom = async (path, options) => { console.error(result.stdout, result.stderr); options.failOnError && process.exit(1); } - let stdout = result.stdout; + const stdout = result.stdout; if (stdout) { const cmdOutput = Buffer.from(stdout).toString(); const dlist = parseBazelSkyframe(cmdOutput); @@ -1545,7 +1543,7 @@ const createJavaBom = async (path, options) => { ); let sbtProjects = []; - for (let i in sbtProjectFiles) { + for (const i in sbtProjectFiles) { // parent dir of sbtProjectFile is the `project` directory // parent dir of `project` is the sbt root project directory const baseDir = dirname(dirname(sbtProjectFiles[i])); @@ -1558,7 +1556,7 @@ const createJavaBom = async (path, options) => { path, (options.multiProject ? "**/" : "") + "*.sbt" ); - for (let i in sbtProjectFiles) { + for (const i in sbtProjectFiles) { const baseDir = dirname(sbtProjectFiles[i]); sbtProjects = sbtProjects.concat(baseDir); } @@ -1566,7 +1564,7 @@ const createJavaBom = async (path, options) => { sbtProjects = [...new Set(sbtProjects)]; // eliminate duplicates - let sbtLockFiles = getAllFiles( + const sbtLockFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "build.sbt.lock" ); @@ -1575,15 +1573,15 @@ const createJavaBom = async (path, options) => { let pkgList = []; // If the project use sbt lock files if (sbtLockFiles && sbtLockFiles.length) { - for (let f of sbtLockFiles) { + for (const f of sbtLockFiles) { const dlist = parseSbtLock(f); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); } } } else { - let SBT_CMD = process.env.SBT_CMD || "sbt"; - let sbtVersion = determineSbtVersion(path); + const SBT_CMD = process.env.SBT_CMD || "sbt"; + const sbtVersion = determineSbtVersion(path); if (DEBUG_MODE) { console.log("Detected sbt version: " + sbtVersion); } @@ -1596,11 +1594,11 @@ const createJavaBom = async (path, options) => { const useSlashSyntax = gte(sbtVersion, "1.5.0"); const isDependencyTreeBuiltIn = sbtVersion != null && gte(sbtVersion, "1.4.0"); - let tempDir = mkdtempSync(join(tmpdir(), "cdxsbt-")); - let tempSbtgDir = mkdtempSync(join(tmpdir(), "cdxsbtg-")); + const tempDir = mkdtempSync(join(tmpdir(), "cdxsbt-")); + const tempSbtgDir = mkdtempSync(join(tmpdir(), "cdxsbtg-")); mkdirSync(tempSbtgDir, { recursive: true }); // Create temporary plugins file - let tempSbtPlugins = join(tempSbtgDir, "dep-plugins.sbt"); + const tempSbtPlugins = join(tempSbtgDir, "dep-plugins.sbt"); // Requires a custom version of `sbt-dependency-graph` that // supports `--append` for `toFile` subtask. @@ -1613,9 +1611,9 @@ const createJavaBom = async (path, options) => { } writeFileSync(tempSbtPlugins, sbtPluginDefinition); - for (let i in sbtProjects) { + for (const i in sbtProjects) { const basePath = sbtProjects[i]; - let dlFile = join(tempDir, "dl-" + i + ".tmp"); + const dlFile = join(tempDir, "dl-" + i + ".tmp"); console.log( "Executing", SBT_CMD, @@ -1624,8 +1622,8 @@ const createJavaBom = async (path, options) => { "using plugins", tempSbtgDir ); - var sbtArgs = []; - var pluginFile = null; + let sbtArgs = []; + let pluginFile = null; if (standalonePluginFile) { sbtArgs = [ `-addPluginSbtFile=${tempSbtPlugins}`, @@ -1709,7 +1707,7 @@ const createJavaBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createNodejsBom = async (path, options) => { +export const createNodejsBom = async (path, options) => { let pkgList = []; let manifestFiles = []; let dependencies = []; @@ -1720,7 +1718,7 @@ const createNodejsBom = async (path, options) => { const pkgJsonFiles = getAllFiles(path, "**/package.json"); // Are there any package.json files in the container? if (pkgJsonFiles.length) { - for (let pj of pkgJsonFiles) { + for (const pj of pkgJsonFiles) { const dlist = await parsePkgJson(pj); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -1776,7 +1774,7 @@ const createNodejsBom = async (path, options) => { // Parse min js files if (minJsFiles && minJsFiles.length) { manifestFiles = manifestFiles.concat(minJsFiles); - for (let f of minJsFiles) { + for (const f of minJsFiles) { const dlist = await parseMinJs(f); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -1786,7 +1784,7 @@ const createNodejsBom = async (path, options) => { // Parse bower json files if (bowerFiles && bowerFiles.length) { manifestFiles = manifestFiles.concat(bowerFiles); - for (let f of bowerFiles) { + for (const f of bowerFiles) { const dlist = await parseBowerJson(f); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -1795,7 +1793,7 @@ const createNodejsBom = async (path, options) => { } if (pnpmLockFiles && pnpmLockFiles.length) { manifestFiles = manifestFiles.concat(pnpmLockFiles); - for (let f of pnpmLockFiles) { + for (const f of pnpmLockFiles) { const basePath = dirname(f); // Determine the parent component const packageJsonF = join(basePath, "package.json"); @@ -1841,7 +1839,7 @@ const createNodejsBom = async (path, options) => { } if (pkgLockFiles && pkgLockFiles.length) { manifestFiles = manifestFiles.concat(pkgLockFiles); - for (let f of pkgLockFiles) { + for (const f of pkgLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -1917,7 +1915,7 @@ const createNodejsBom = async (path, options) => { } if (yarnLockFiles && yarnLockFiles.length) { manifestFiles = manifestFiles.concat(yarnLockFiles); - for (let f of yarnLockFiles) { + for (const f of yarnLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -1993,7 +1991,7 @@ const createNodejsBom = async (path, options) => { "**/package.json" ); manifestFiles = manifestFiles.concat(pkgJsonFiles); - for (let pkgjf of pkgJsonFiles) { + for (const pkgjf of pkgJsonFiles) { const dlist = await parsePkgJson(pkgjf); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -2026,7 +2024,7 @@ const createNodejsBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createPythonBom = async (path, options) => { +export const createPythonBom = async (path, options) => { let allImports = {}; let metadataFilename = ""; const pipenvMode = existsSync(join(path, "Pipfile")); @@ -2065,7 +2063,7 @@ const createPythonBom = async (path, options) => { // Poetry sets up its own virtual env containing site-packages so // we give preference to poetry lock file. Issue# 129 if (poetryMode) { - for (let f of poetryFiles) { + for (const f of poetryFiles) { const lockData = readFileSync(f, { encoding: "utf-8" }); const dlist = await parsePoetrylockData(lockData); if (dlist && dlist.length) { @@ -2078,7 +2076,7 @@ const createPythonBom = async (path, options) => { }); } else if (metadataFiles && metadataFiles.length) { // dist-info directories - for (let mf of metadataFiles) { + for (const mf of metadataFiles) { const mData = readFileSync(mf, { encoding: "utf-8" }); @@ -2090,7 +2088,7 @@ const createPythonBom = async (path, options) => { } // .whl files. Zip file containing dist-info directory if (whlFiles && whlFiles.length) { - for (let wf of whlFiles) { + for (const wf of whlFiles) { const mData = await readZipEntry(wf, "METADATA"); if (mData) { const dlist = parseBdistMetadata(mData); @@ -2102,7 +2100,7 @@ const createPythonBom = async (path, options) => { } // .egg-info files if (eggInfoFiles && eggInfoFiles.length) { - for (let ef of eggInfoFiles) { + for (const ef of eggInfoFiles) { const dlist = parseBdistMetadata(readFileSync(ef, { encoding: "utf-8" })); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -2126,7 +2124,7 @@ const createPythonBom = async (path, options) => { } else if (requirementsMode) { metadataFilename = "requirements.txt"; if (reqFiles && reqFiles.length) { - for (let f of reqFiles) { + for (const f of reqFiles) { const basePath = dirname(f); let reqData = undefined; let frozen = false; @@ -2154,7 +2152,7 @@ const createPythonBom = async (path, options) => { } // for metadataFilename = reqFiles.join(", "); } else if (reqDirFiles && reqDirFiles.length) { - for (let j in reqDirFiles) { + for (const j in reqDirFiles) { const f = reqDirFiles[j]; const reqData = readFileSync(f, { encoding: "utf-8" }); const dlist = await parseReqFile(reqData, false); @@ -2213,7 +2211,7 @@ const createPythonBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createGoBom = async (path, options) => { +export const createGoBom = async (path, options) => { let pkgList = []; // Is this a binary file let maybeBinary = false; @@ -2230,8 +2228,8 @@ const createGoBom = async (path, options) => { } // Since this pkg list is derived from the binary mark them as used. const allImports = {}; - for (let mpkg of pkgList) { - let pkgFullName = `${mpkg.group}/${mpkg.name}`; + for (const mpkg of pkgList) { + const pkgFullName = `${mpkg.group}/${mpkg.name}`; allImports[pkgFullName] = true; } return buildBomNSData(options, pkgList, "golang", { @@ -2255,7 +2253,7 @@ const createGoBom = async (path, options) => { "Using go.sum to generate BOMs for go projects may return an inaccurate representation of transitive dependencies.\nSee: https://github.com/golang/go/wiki/Modules#is-gosum-a-lock-file-why-does-gosum-include-information-for-module-versions-i-am-no-longer-using\n", "Set USE_GOSUM=false to generate BOMs using go.mod as the dependency source of truth." ); - for (let f of gosumFiles) { + for (const f of gosumFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2274,7 +2272,7 @@ const createGoBom = async (path, options) => { // If USE_GOSUM is false, generate BOM components using go.mod. const gosumMap = {}; if (gosumFiles.length) { - for (let f of gosumFiles) { + for (const f of gosumFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2303,7 +2301,7 @@ const createGoBom = async (path, options) => { let shouldManuallyParse = false; // Use the go list -deps and go mod why commands to generate a good quality BoM for non-docker invocations if (!["docker", "oci", "os"].includes(options.projectType)) { - for (let f of gomodFiles) { + for (const f of gomodFiles) { const basePath = dirname(f); // Ignore vendor packages if (basePath.includes("/vendor/") || basePath.includes("/build/")) { @@ -2349,11 +2347,11 @@ const createGoBom = async (path, options) => { ); } // Using go mod why detect required packages - for (let apkg of pkgList) { + for (const apkg of pkgList) { if (circuitBreak) { break; } - let pkgFullName = `${apkg.name}`; + const pkgFullName = `${apkg.name}`; if (apkg.scope === "required") { allImports[pkgFullName] = true; continue; @@ -2375,7 +2373,7 @@ const createGoBom = async (path, options) => { const mstdout = mresult.stdout; if (mstdout) { const cmdOutput = Buffer.from(mstdout).toString(); - let whyPkg = parseGoModWhy(cmdOutput); + const whyPkg = parseGoModWhy(cmdOutput); if (whyPkg == pkgFullName) { allImports[pkgFullName] = true; } @@ -2399,7 +2397,7 @@ const createGoBom = async (path, options) => { "Manually parsing go.mod files. The resultant BoM would be incomplete." ); } - for (let f of gomodFiles) { + for (const f of gomodFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2414,7 +2412,7 @@ const createGoBom = async (path, options) => { filename: gomodFiles.join(", ") }); } else if (gopkgLockFiles.length) { - for (let f of gopkgLockFiles) { + for (const f of gopkgLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2440,7 +2438,7 @@ const createGoBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createRustBom = async (path, options) => { +export const createRustBom = async (path, options) => { let pkgList = []; // Is this a binary file let maybeBinary = false; @@ -2457,8 +2455,8 @@ const createRustBom = async (path, options) => { } // Since this pkg list is derived from the binary mark them as used. const allImports = {}; - for (let mpkg of pkgList) { - let pkgFullName = `${mpkg.group}/${mpkg.name}`; + for (const mpkg of pkgList) { + const pkgFullName = `${mpkg.group}/${mpkg.name}`; allImports[pkgFullName] = true; } return buildBomNSData(options, pkgList, "cargo", { @@ -2476,9 +2474,9 @@ const createRustBom = async (path, options) => { (options.multiProject ? "**/" : "") + "Cargo.toml" ); const cargoMode = cargoFiles.length; - let cargoLockMode = cargoLockFiles.length; + const cargoLockMode = cargoLockFiles.length; if (cargoMode && !cargoLockMode) { - for (let f of cargoFiles) { + for (const f of cargoFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2499,7 +2497,7 @@ const createRustBom = async (path, options) => { (options.multiProject ? "**/" : "") + "Cargo.lock" ); if (cargoLockFiles.length) { - for (let f of cargoLockFiles) { + for (const f of cargoLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2523,7 +2521,7 @@ const createRustBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createDartBom = async (path, options) => { +export const createDartBom = async (path, options) => { const pubFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "pubspec.lock" @@ -2534,7 +2532,7 @@ const createDartBom = async (path, options) => { ); let pkgList = []; if (pubFiles.length) { - for (let f of pubFiles) { + for (const f of pubFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2549,7 +2547,7 @@ const createDartBom = async (path, options) => { filename: pubFiles.join(", ") }); } else if (pubSpecYamlFiles.length) { - for (let f of pubSpecYamlFiles) { + for (const f of pubSpecYamlFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2574,7 +2572,7 @@ const createDartBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createCppBom = async (path, options) => { +export const createCppBom = async (path, options) => { const conanLockFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "conan.lock" @@ -2585,7 +2583,7 @@ const createCppBom = async (path, options) => { ); let pkgList = []; if (conanLockFiles.length) { - for (let f of conanLockFiles) { + for (const f of conanLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2600,7 +2598,7 @@ const createCppBom = async (path, options) => { filename: conanLockFiles.join(", ") }); } else if (conanFiles.length) { - for (let f of conanFiles) { + for (const f of conanFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2625,7 +2623,7 @@ const createCppBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createClojureBom = async (path, options) => { +export const createClojureBom = (path, options) => { const ednFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "deps.edn" @@ -2640,7 +2638,7 @@ const createClojureBom = async (path, options) => { if (process.env.LEIN_ARGS) { LEIN_ARGS = process.env.LEIN_ARGS.split(" "); } - for (let f of leinFiles) { + for (const f of leinFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2690,7 +2688,7 @@ const createClojureBom = async (path, options) => { if (process.env.CLJ_ARGS) { CLJ_ARGS = process.env.CLJ_ARGS.split(" "); } - for (let f of ednFiles) { + for (const f of ednFiles) { const basePath = dirname(f); console.log("Executing", CLJ_CMD, CLJ_ARGS.join(" "), "in", basePath); const result = spawnSync(CLJ_CMD, CLJ_ARGS, { @@ -2743,14 +2741,14 @@ const createClojureBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createHaskellBom = async (path, options) => { +export const createHaskellBom = async (path, options) => { const cabalFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "cabal.project.freeze" ); let pkgList = []; if (cabalFiles.length) { - for (let f of cabalFiles) { + for (const f of cabalFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2774,14 +2772,14 @@ const createHaskellBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createElixirBom = async (path, options) => { +export const createElixirBom = async (path, options) => { const mixFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "mix.lock" ); let pkgList = []; if (mixFiles.length) { - for (let f of mixFiles) { + for (const f of mixFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2805,11 +2803,11 @@ const createElixirBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createGitHubBom = async (path, options) => { +export const createGitHubBom = async (path, options) => { const ghactionFiles = getAllFiles(path, ".github/workflows/" + "*.yml"); let pkgList = []; if (ghactionFiles.length) { - for (let f of ghactionFiles) { + for (const f of ghactionFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2833,11 +2831,11 @@ const createGitHubBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createCloudBuildBom = async (path, options) => { +export const createCloudBuildBom = async (path, options) => { const cbFiles = getAllFiles(path, "cloudbuild.yml"); let pkgList = []; if (cbFiles.length) { - for (let f of cbFiles) { + for (const f of cbFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2861,7 +2859,7 @@ const createCloudBuildBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createOSBom = async (path, options) => { +export const createOSBom = (path, options) => { console.warn( "About to generate SBoM for the current OS installation. This would take several minutes ..." ); @@ -2893,7 +2891,7 @@ const createOSBom = async (path, options) => { allLayersDir: options.allLayersExplodedDir, allLayersExplodedDir: options.allLayersExplodedDir }; - let pkgPathList = []; + const pkgPathList = []; if (options.deep) { getPkgPathList(exportData, undefined); } @@ -2906,15 +2904,15 @@ const createOSBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createJenkinsBom = async (path, options) => { +export const createJenkinsBom = async (path, options) => { let pkgList = []; const hpiFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "*.hpi" ); - let tempDir = mkdtempSync(join(tmpdir(), "hpi-deps-")); + const tempDir = mkdtempSync(join(tmpdir(), "hpi-deps-")); if (hpiFiles.length) { - for (let f of hpiFiles) { + for (const f of hpiFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2926,7 +2924,7 @@ const createJenkinsBom = async (path, options) => { } const jsFiles = getAllFiles(tempDir, "**/*.js"); if (jsFiles.length) { - for (let f of jsFiles) { + for (const f of jsFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2954,14 +2952,14 @@ const createJenkinsBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createHelmBom = async (path, options) => { +export const createHelmBom = async (path, options) => { let pkgList = []; const yamlFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "*.yaml" ); if (yamlFiles.length) { - for (let f of yamlFiles) { + for (const f of yamlFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -2985,7 +2983,7 @@ const createHelmBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createSwiftBom = async (path, options) => { +export const createSwiftBom = (path, options) => { const swiftFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "Package*.swift" @@ -2997,9 +2995,9 @@ const createSwiftBom = async (path, options) => { let pkgList = []; let dependencies = []; let parentComponent = {}; - let completedPath = []; + const completedPath = []; if (pkgResolvedFiles.length) { - for (let f of pkgResolvedFiles) { + for (const f of pkgResolvedFiles) { if (!parentComponent || !Object.keys(parentComponent).length) { parentComponent = createDefaultParentComponent(f); } @@ -3013,7 +3011,7 @@ const createSwiftBom = async (path, options) => { } } if (swiftFiles.length) { - for (let f of swiftFiles) { + for (const f of swiftFiles) { const basePath = dirname(f); if (completedPath.includes(basePath)) { continue; @@ -3071,16 +3069,16 @@ const createSwiftBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createContainerSpecLikeBom = async (path, options) => { +export const createContainerSpecLikeBom = async (path, options) => { let services = []; - let ociSpecs = []; + const ociSpecs = []; let components = []; let componentsXmls = []; - let parentComponent = {}; + const parentComponent = {}; let dependencies = []; - let doneimages = []; - let doneservices = []; - let origProjectType = options.projectType; + const doneimages = []; + const doneservices = []; + const origProjectType = options.projectType; let dcFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "*.yml" @@ -3093,7 +3091,7 @@ const createContainerSpecLikeBom = async (path, options) => { path, (options.multiProject ? "**/" : "") + "open*.json" ); - let oapiYamlFiles = getAllFiles( + const oapiYamlFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "open*.yaml" ); @@ -3107,7 +3105,7 @@ const createContainerSpecLikeBom = async (path, options) => { const privadoFiles = getAllFiles(path, ".privado/" + "*.json"); // parse yaml manifest files if (dcFiles.length) { - for (let f of dcFiles) { + for (const f of dcFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -3118,7 +3116,7 @@ const createContainerSpecLikeBom = async (path, options) => { console.log("Images identified in", f, "are", imglist); } for (const img of imglist) { - let commonProperties = [ + const commonProperties = [ { name: "SrcFile", value: f @@ -3239,7 +3237,7 @@ const createContainerSpecLikeBom = async (path, options) => { } // if // Parse openapi files if (oapiFiles.length) { - for (let af of oapiFiles) { + for (const af of oapiFiles) { if (DEBUG_MODE) { console.log(`Parsing ${af}`); } @@ -3265,14 +3263,14 @@ const createContainerSpecLikeBom = async (path, options) => { "Enriching your SBoM with information from privado.ai scan reports" ); let rows = [["Classification", "Flow"]]; - let config = { + const config = { header: { alignment: "center", content: "Data Privacy Insights from privado.ai" }, columns: [{ width: 50 }, { width: 10 }] }; - for (let f of privadoFiles) { + for (const f of privadoFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -3281,14 +3279,14 @@ const createContainerSpecLikeBom = async (path, options) => { if (servlist.length) { const aservice = servlist[0]; if (aservice.data) { - for (let d of aservice.data) { + for (const d of aservice.data) { rows.push([d.classification, d.flow]); } console.log(table(rows, config)); } if (aservice.endpoints) { rows = [["Leaky Endpoints"]]; - for (let e of aservice.endpoints) { + for (const e of aservice.endpoints) { rows.push([e]); } console.log( @@ -3350,7 +3348,7 @@ const createContainerSpecLikeBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createPHPBom = async (path, options) => { +export const createPHPBom = (path, options) => { const composerJsonFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "composer.json" @@ -3383,14 +3381,14 @@ const createPHPBom = async (path, options) => { if (DEBUG_MODE) { console.log("Parsing version", versionResult.stdout); } - let tmpV = undefined; + const tmpV = undefined; if (versionResult && versionResult.stdout) { versionResult.stdout.split(" "); } if (tmpV && tmpV.length > 1) { composerVersion = tmpV[1]; } - for (let f of composerJsonFiles) { + for (const f of composerJsonFiles) { const basePath = dirname(f); let args = []; if (composerVersion && !composerVersion.startsWith("1")) { @@ -3416,11 +3414,11 @@ const createPHPBom = async (path, options) => { (options.multiProject ? "**/" : "") + "composer.lock" ); if (composerLockFiles.length) { - for (let f of composerLockFiles) { + for (const f of composerLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } - let dlist = parseComposerLock(f); + const dlist = parseComposerLock(f); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); } @@ -3439,7 +3437,7 @@ const createPHPBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createRubyBom = async (path, options) => { +export const createRubyBom = async (path, options) => { const gemFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "Gemfile" @@ -3450,9 +3448,9 @@ const createRubyBom = async (path, options) => { ); let pkgList = []; const gemFileMode = gemFiles.length; - let gemLockMode = gemLockFiles.length; + const gemLockMode = gemLockFiles.length; if (gemFileMode && !gemLockMode && options.installDeps) { - for (let f of gemFiles) { + for (const f of gemFiles) { const basePath = dirname(f); console.log("Executing 'bundle install' in", basePath); const result = spawnSync("bundle", ["install"], { @@ -3473,11 +3471,11 @@ const createRubyBom = async (path, options) => { (options.multiProject ? "**/" : "") + "Gemfile.lock" ); if (gemLockFiles.length) { - for (let f of gemLockFiles) { + for (const f of gemLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } - let gemLockData = readFileSync(f, { encoding: "utf-8" }); + const gemLockData = readFileSync(f, { encoding: "utf-8" }); const dlist = await parseGemfileLockData(gemLockData); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -3497,8 +3495,9 @@ const createRubyBom = async (path, options) => { * @param path to the project * @param options Parse options from the cli */ -const createCsharpBom = async (path, options) => { +export const createCsharpBom = async (path, options) => { let manifestFiles = []; + let pkgData = undefined; const csProjFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "*.csproj" @@ -3522,7 +3521,7 @@ const createCsharpBom = async (path, options) => { let pkgList = []; if (nupkgFiles.length) { manifestFiles = manifestFiles.concat(nupkgFiles); - for (let nf of nupkgFiles) { + for (const nf of nupkgFiles) { if (DEBUG_MODE) { console.log(`Parsing ${nf}`); } @@ -3535,11 +3534,11 @@ const createCsharpBom = async (path, options) => { // project.assets.json parsing if (projAssetsFiles.length) { manifestFiles = manifestFiles.concat(projAssetsFiles); - for (let af of projAssetsFiles) { + for (const af of projAssetsFiles) { if (DEBUG_MODE) { console.log(`Parsing ${af}`); } - let pkgData = readFileSync(af, { encoding: "utf-8" }); + pkgData = readFileSync(af, { encoding: "utf-8" }); const dlist = await parseCsProjAssetsData(pkgData); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -3548,11 +3547,11 @@ const createCsharpBom = async (path, options) => { } else if (pkgLockFiles.length) { manifestFiles = manifestFiles.concat(pkgLockFiles); // packages.lock.json from nuget - for (let af of pkgLockFiles) { + for (const af of pkgLockFiles) { if (DEBUG_MODE) { console.log(`Parsing ${af}`); } - let pkgData = readFileSync(af, { encoding: "utf-8" }); + pkgData = readFileSync(af, { encoding: "utf-8" }); const dlist = await parseCsPkgLockData(pkgData); if (dlist && dlist.length) { pkgList = pkgList.concat(dlist); @@ -3561,11 +3560,11 @@ const createCsharpBom = async (path, options) => { } else if (pkgConfigFiles.length) { manifestFiles = manifestFiles.concat(pkgConfigFiles); // packages.config parsing - for (let f of pkgConfigFiles) { + for (const f of pkgConfigFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } - let pkgData = readFileSync(f, { encoding: "utf-8" }); + pkgData = readFileSync(f, { encoding: "utf-8" }); // Remove byte order mark if (pkgData.charCodeAt(0) === 0xfeff) { pkgData = pkgData.slice(1); @@ -3578,7 +3577,7 @@ const createCsharpBom = async (path, options) => { } else if (csProjFiles.length) { manifestFiles = manifestFiles.concat(csProjFiles); // .csproj parsing - for (let f of csProjFiles) { + for (const f of csProjFiles) { if (DEBUG_MODE) { console.log(`Parsing ${f}`); } @@ -3602,9 +3601,9 @@ const createCsharpBom = async (path, options) => { return {}; }; -const mergeDependencies = (dependencies, newDependencies) => { +export const mergeDependencies = (dependencies, newDependencies) => { const deps_map = {}; - let combinedDeps = dependencies.concat(newDependencies || []); + const combinedDeps = dependencies.concat(newDependencies || []); for (const adep of combinedDeps) { if (!deps_map[adep.ref]) { deps_map[adep.ref] = new Set(); @@ -3613,7 +3612,7 @@ const mergeDependencies = (dependencies, newDependencies) => { deps_map[adep.ref].add(eachDepends); } } - let retlist = []; + const retlist = []; for (const akey of Object.keys(deps_map)) { retlist.push({ ref: akey, @@ -3622,13 +3621,11 @@ const mergeDependencies = (dependencies, newDependencies) => { } return retlist; }; -const _mergeDependencies = mergeDependencies; -export { _mergeDependencies as mergeDependencies }; -const trimComponents = (components, format) => { +export const trimComponents = (components, format) => { const keyCache = {}; const filteredComponents = []; - for (let comp of components) { + for (const comp of components) { if (format === "xml" && comp.component) { if (!keyCache[comp.component.purl]) { keyCache[comp.component.purl] = true; @@ -3643,10 +3640,8 @@ const trimComponents = (components, format) => { } return filteredComponents; }; -const _trimComponents = trimComponents; -export { _trimComponents as trimComponents }; -const dedupeBom = ( +export const dedupeBom = ( options, components, componentsXmls, @@ -3694,8 +3689,6 @@ const dedupeBom = ( } }; }; -const _dedupeBom = dedupeBom; -export { _dedupeBom as dedupeBom }; /** * Function to create bom string for all languages @@ -3703,7 +3696,7 @@ export { _dedupeBom as dedupeBom }; * @param pathList list of to the project * @param options Parse options from the cli */ -const createMultiXBom = async (pathList, options) => { +export const createMultiXBom = async (pathList, options) => { let components = []; let dependencies = []; let componentsXmls = []; @@ -3741,7 +3734,7 @@ const createMultiXBom = async (pathList, options) => { ); } } - for (let path of pathList) { + for (const path of pathList) { if (DEBUG_MODE) { console.log("Scanning", path); } @@ -4094,7 +4087,7 @@ const createMultiXBom = async (pathList, options) => { * @param path to the project * @param options Parse options from the cli */ -const createXBom = async (path, options) => { +export const createXBom = async (path, options) => { try { accessSync(path, constants.R_OK); } catch (err) { @@ -4115,12 +4108,12 @@ const createXBom = async (path, options) => { (options.multiProject ? "**/" : "") + "pom.xml" ); // gradle - let gradleFiles = getAllFiles( + const gradleFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "build.gradle*" ); // scala sbt - let sbtFiles = getAllFiles( + const sbtFiles = getAllFiles( path, (options.multiProject ? "**/" : "") + "{build.sbt,Build.scala}*" ); @@ -4606,7 +4599,7 @@ export const createBom = async (path, options) => { * @param bomContents BOM Json */ export async function submitBom(args, bomContents) { - let serverUrl = args.serverUrl.replace(/\/$/, "") + "/api/v1/bom"; + const serverUrl = args.serverUrl.replace(/\/$/, "") + "/api/v1/bom"; let encodedBomContents = Buffer.from(JSON.stringify(bomContents)).toString( "base64" ); diff --git a/server.js b/server.js index 7e571f029..8539498c0 100644 --- a/server.js +++ b/server.js @@ -79,7 +79,7 @@ const configureServer = (cdxgenServer) => { cdxgenServer.keepAliveTimeout = 0; }; -const start = async (options) => { +const start = (options) => { console.log("Listening on", options.serverHost, options.serverPort); const cdxgenServer = http .createServer(app) @@ -89,7 +89,7 @@ const start = async (options) => { const q = url.parse(req.url, true).query; let cleanup = false; options = parseQueryString(q, req.body, options); - let filePath = q.path || q.url || req.body.path || req.body.url; + const filePath = q.path || q.url || req.body.path || req.body.url; if (!filePath) { res.writeHead(500, { "Content-Type": "application/json" }); return res.end( diff --git a/utils.js b/utils.js index ad9c8bbae..5676d7ce2 100644 --- a/utils.js +++ b/utils.js @@ -195,7 +195,7 @@ export function getLicenses(pkg, format = "xml") { * the text to the license text object and stop. */ export function addLicenseText(pkg, l, licenseContent, format = "xml") { - let licenseFilenames = [ + const licenseFilenames = [ "LICENSE", "License", "license", @@ -206,7 +206,7 @@ export function addLicenseText(pkg, l, licenseContent, format = "xml") { "Notice", "notice" ]; - let licenseContentTypes = { + const licenseContentTypes = { "text/plain": "", "text/txt": ".txt", "text/markdown": ".md", @@ -219,7 +219,7 @@ export function addLicenseText(pkg, l, licenseContent, format = "xml") { for (const [licenseContentType, fileExtension] of Object.entries( licenseContentTypes )) { - let licenseFilepath = `${pkg.realPath}/${licenseFilename}${licenseName}${fileExtension}`; + const licenseFilepath = `${pkg.realPath}/${licenseFilename}${licenseName}${fileExtension}`; if (existsSync(licenseFilepath)) { licenseContent.text = readLicenseText( licenseFilepath, @@ -242,16 +242,16 @@ export function readLicenseText( licenseContentType, format = "xml" ) { - let licenseText = readFileSync(licenseFilepath, "utf8"); + const licenseText = readFileSync(licenseFilepath, "utf8"); if (licenseText) { if (format === "xml") { - let licenseContentText = { "#cdata": licenseText }; + const licenseContentText = { "#cdata": licenseText }; if (licenseContentType !== "text/plain") { licenseContentText["@content-type"] = licenseContentType; } return licenseContentText; } else { - let licenseContentText = { content: licenseText }; + const licenseContentText = { content: licenseText }; if (licenseContentType !== "text/plain") { licenseContentText["contentType"] = licenseContentType; } @@ -315,7 +315,7 @@ const _getDepPkgList = async function ( depKeys, pkg ) { - let pkgDependencies = + const pkgDependencies = pkg.lockfileVersion && pkg.lockfileVersion >= 3 ? pkg.packages : pkg.dependencies; @@ -326,7 +326,7 @@ const _getDepPkgList = async function ( if (k === "") { continue; } - let name = k; + const name = k; const version = pkgDependencies[name].version; const purl = new PackageURL( "npm", @@ -337,7 +337,7 @@ const _getDepPkgList = async function ( null ); const purlString = decodeURIComponent(purl.toString()); - let scope = pkgDependencies[name].dev === true ? "optional" : undefined; + const scope = pkgDependencies[name].dev === true ? "optional" : undefined; const apkg = { name: name.replace("node_modules/", ""), version, @@ -446,8 +446,8 @@ export const parsePkgJson = async (pkgJsonFile) => { */ export const parsePkgLock = async (pkgLockFile) => { let pkgList = []; - let dependenciesList = []; - let depKeys = {}; + const dependenciesList = []; + const depKeys = {}; let rootPkg = {}; if (existsSync(pkgLockFile)) { const lockData = JSON.parse(readFileSync(pkgLockFile, "utf8")); @@ -766,7 +766,7 @@ export const parseNodeShrinkwrap = async function (swFile) { if (existsSync(swFile)) { const lockData = JSON.parse(readFileSync(swFile, "utf8")); const pkgKeys = Object.keys(lockData); - for (var k in pkgKeys) { + for (const k in pkgKeys) { const fullName = pkgKeys[k]; const integrity = lockData[fullName]; const parts = fullName.split("@"); @@ -779,7 +779,7 @@ export const parseNodeShrinkwrap = async function (swFile) { version = parts[1]; } else if (parts.length === 3) { if (parts[0] === "") { - let gnameparts = parts[1].split("/"); + const gnameparts = parts[1].split("/"); group = gnameparts[0]; name = gnameparts[1]; } else { @@ -874,7 +874,7 @@ export const parsePnpmLock = async function (pnpmLock, parentComponent = null) { } const packages = yamlObj.packages; const pkgKeys = Object.keys(packages); - for (var k in pkgKeys) { + for (const k in pkgKeys) { // Eg: @babel/code-frame/7.10.1 // In lockfileVersion 6, /@babel/code-frame@7.18.6 let fullName = pkgKeys[k].replace("/@", "@"); @@ -885,7 +885,7 @@ export const parsePnpmLock = async function (pnpmLock, parentComponent = null) { const parts = fullName.split("/"); const integrity = packages[pkgKeys[k]].resolution.integrity; const deps = packages[pkgKeys[k]].dependencies || []; - let scope = packages[pkgKeys[k]].dev === true ? "optional" : undefined; + const scope = packages[pkgKeys[k]].dev === true ? "optional" : undefined; if (parts && parts.length) { let name = ""; let version = ""; @@ -1049,7 +1049,7 @@ export const parseMinJs = async (minJsFile) => { : pkgNameVer.split(" "); if (tmpB && tmpB.length > 1) { // Fix #223 - lowercase parsed package name - let name = tmpB[0].replace(/ /g, "-").trim().toLowerCase(); + const name = tmpB[0].replace(/ /g, "-").trim().toLowerCase(); if ( ["copyright", "author", "licensed"].includes(name.toLowerCase()) ) { @@ -1111,7 +1111,7 @@ export const parsePom = function (pomFile) { } else if (dependencies && !Array.isArray(dependencies)) { dependencies = [dependencies]; } - for (let adep of dependencies) { + for (const adep of dependencies) { const version = adep.version; let versionStr = undefined; if (version && version._ && version._.indexOf("$") == -1) { @@ -1150,7 +1150,7 @@ export const parseMavenTree = function (rawOutput) { const tmpA = rawOutput.split("\n"); let last_level = 0; let last_purl = ""; - let stack = []; + const stack = []; tmpA.forEach((l) => { if (!includeMavenTestScope && l.trim().endsWith(":test")) { return; @@ -1314,7 +1314,7 @@ export const parseGradleDep = function ( ) { last_level = 1; if (rline.startsWith("+--- project :")) { - let tmpProj = rline.split("+--- project :"); + const tmpProj = rline.split("+--- project :"); last_project_purl = `pkg:maven/${tmpProj[1].trim()}@${rootProjectVersion}?type=jar`; stack = [last_project_purl]; last_purl = last_project_purl; @@ -1481,17 +1481,17 @@ export const parseLeinDep = function (rawOutput) { export const parseLeinMap = function (node, keys_cache, deps) { if (node["map"]) { - for (let n of node["map"]) { + for (const n of node["map"]) { if (n.length === 2) { const rootNode = n[0]; - let psym = rootNode[0].sym; - let version = rootNode[1]; + const psym = rootNode[0].sym; + const version = rootNode[1]; let group = dirname(psym); if (group === ".") { group = ""; } - let name = basename(psym); - let cacheKey = group + "-" + name + "-" + version; + const name = basename(psym); + const cacheKey = group + "-" + name + "-" + version; if (!keys_cache[cacheKey]) { keys_cache[cacheKey] = true; deps.push({ group, name, version }); @@ -1524,7 +1524,7 @@ export const parseGradleProjects = function (rawOutput) { } else if (l.includes("--- Project")) { const tmpB = l.split("Project "); if (tmpB && tmpB.length > 1) { - let projName = tmpB[1].split(" ")[0].replace(/'/g, ""); + const projName = tmpB[1].split(" ")[0].replace(/'/g, ""); // Include all projects including test projects if (projName.startsWith(":")) { projects.add(projName); @@ -1533,7 +1533,7 @@ export const parseGradleProjects = function (rawOutput) { } else if (l.includes("--- project ")) { const tmpB = l.split("--- project "); if (tmpB && tmpB.length > 1) { - let projName = tmpB[1]; + const projName = tmpB[1]; if (projName.startsWith(":")) { projects.add(projName); } @@ -1554,7 +1554,7 @@ export const parseGradleProjects = function (rawOutput) { */ export const parseGradleProperties = function (rawOutput) { let rootProject = "root"; - let projects = new Set(); + const projects = new Set(); const metadata = { group: "", version: "latest", properties: [] }; if (typeof rawOutput === "string") { const tmpA = rawOutput.split("\n"); @@ -1616,7 +1616,7 @@ export const executeGradleProperties = function (dir, rootPath, subProject) { "plain", "--build-cache" ]; - let gradleCmd = getGradleCommand(dir, rootPath); + const gradleCmd = getGradleCommand(dir, rootPath); if (process.env.GRADLE_ARGS) { const addArgs = process.env.GRADLE_ARGS.split(" "); gradlePropertiesArgs = gradlePropertiesArgs.concat(addArgs); @@ -1764,7 +1764,7 @@ export const parseKVDep = function (rawOutput) { * @param {string} name License full name */ export const findLicenseId = function (name) { - for (let l of licenseMapping) { + for (const l of licenseMapping) { if (l.names.includes(name)) { return l.exp; } @@ -1781,8 +1781,8 @@ export const findLicenseId = function (name) { */ export const guessLicenseId = function (content) { content = content.replace(/\n/g, " "); - for (let l of licenseMapping) { - for (let j in l.names) { + for (const l of licenseMapping) { + for (const j in l.names) { if (content.toUpperCase().indexOf(l.names[j].toUpperCase()) > -1) { return l.exp; } @@ -1817,7 +1817,7 @@ export const getMvnMetadata = async function (pkgList) { if (p.group.indexOf("android") !== -1) { urlPrefix = ANDROID_MAVEN; } - let groupPart = p.group.replace(/\./g, "/"); + const groupPart = p.group.replace(/\./g, "/"); // Querying maven requires a valid group name if (!groupPart || groupPart === "") { cdepList.push(p); @@ -1900,7 +1900,7 @@ export const parsePyRequiresDist = function (dist_string) { name = tmpA[0]; } else if (tmpA.length > 1) { name = tmpA[0]; - let tmpVersion = tmpA[1]; + const tmpVersion = tmpA[1]; version = tmpVersion.split(",")[0].replace(/[();=&glt><]/g, ""); } return { @@ -1919,7 +1919,7 @@ export const guessPypiMatchingVersion = (versionsList, versionSpecifiers) => { versionSpecifiers = versionSpecifiers.replace(/,/g, " ").split(";")[0]; // Iterate in the reverse order for (let i = versionsList.length - 1; i > 0; i--) { - let rv = versionsList[i]; + const rv = versionsList[i]; if (satisfies(coerce(rv), versionSpecifiers, true)) { return rv; } @@ -1939,7 +1939,7 @@ export const getPyMetadata = async function (pkgList, fetchDepsInfo) { return pkgList; } const PYPI_URL = "https://pypi.org/pypi/"; - let cdepList = []; + const cdepList = []; for (const p of pkgList) { if (!p || !p.name) { continue; @@ -2099,7 +2099,7 @@ export const parsePiplockData = async function (lockData) { Object.keys(depBlock).forEach((p) => { const pkg = depBlock[p]; if (Object.prototype.hasOwnProperty.call(pkg, "version")) { - let versionStr = pkg.version.replace("==", ""); + const versionStr = pkg.version.replace("==", ""); pkgList.push({ name: p, version: versionStr }); } }); @@ -2184,7 +2184,7 @@ export async function parseReqFile(reqData, fetchDepsInfo) { versionStr = null; } if (!tmpA[0].includes("=") && !tmpA[0].trim().includes(" ")) { - let name = tmpA[0].trim().replace(";", ""); + const name = tmpA[0].trim().replace(";", ""); if (!PYTHON_STD_MODULES.includes(name)) { pkgList.push({ name, @@ -2194,8 +2194,8 @@ export async function parseReqFile(reqData, fetchDepsInfo) { } } } else if (l.includes("<") && l.includes(">")) { - let tmpA = l.split(">"); - let name = tmpA[0].trim().replace(";", ""); + const tmpA = l.split(">"); + const name = tmpA[0].trim().replace(";", ""); const versionSpecifiers = l.replace(name, ""); if (!PYTHON_STD_MODULES.includes(name)) { pkgList.push({ @@ -2216,7 +2216,7 @@ export async function parseReqFile(reqData, fetchDepsInfo) { tmpA = tmpA.split("#")[0]; } if (!tmpA[0].trim().includes(" ")) { - let name = tmpA[0].trim().replace(";", ""); + const name = tmpA[0].trim().replace(";", ""); const versionSpecifiers = l.replace(name, ""); if (!PYTHON_STD_MODULES.includes(name)) { pkgList.push({ @@ -2237,9 +2237,9 @@ export async function parseReqFile(reqData, fetchDepsInfo) { l = l.split("#")[0]; } l = l.trim(); - let tmpA = l.split(/(<|>)/); + const tmpA = l.split(/(<|>)/); if (tmpA && tmpA.length === 3) { - let name = tmpA[0].trim().replace(";", ""); + const name = tmpA[0].trim().replace(";", ""); const versionSpecifiers = l.replace(name, ""); if (!PYTHON_STD_MODULES.includes(name)) { pkgList.push({ @@ -2255,7 +2255,7 @@ export async function parseReqFile(reqData, fetchDepsInfo) { }); } } else if (!l.includes(" ")) { - let name = l.replace(";", ""); + const name = l.replace(";", ""); const versionSpecifiers = l.replace(name, ""); if (!PYTHON_STD_MODULES.includes(name)) { pkgList.push({ @@ -2429,7 +2429,7 @@ export const getRepoLicense = async function (repoUrl, repoMetadata) { const group = repoMetadata.group; const name = repoMetadata.name; if (group && name) { - for (let akLic of knownLicenses) { + for (const akLic of knownLicenses) { if (akLic.group === "." && akLic.name === name) { return akLic.license; } else if ( @@ -2471,7 +2471,7 @@ export const getGoPkgLicense = async function (repoMetadata) { } const licenseIds = licenses.split(", "); const licList = []; - for (let id of licenseIds) { + for (const id of licenseIds) { const alicense = { id: id }; @@ -2554,23 +2554,33 @@ export const parseGoModData = async function (goModData, gosumMap) { if (!isModReplacement) { // Add group, name and version component properties for required modules const version = tmpA[1]; - let gosumHash = gosumMap[`${tmpA[0]}/${version}`]; + const gosumHash = gosumMap[`${tmpA[0]}/${version}`]; // The hash for this version was not found in go.sum, so skip as it is most likely being replaced. if (gosumHash === undefined) { continue; } - let component = await getGoPkgComponent("", tmpA[0], version, gosumHash); + const component = await getGoPkgComponent( + "", + tmpA[0], + version, + gosumHash + ); pkgComponentsList.push(component); } else { // Add group, name and version component properties for replacement modules const version = tmpA[3]; - let gosumHash = gosumMap[`${tmpA[2]}/${version}`]; + const gosumHash = gosumMap[`${tmpA[2]}/${version}`]; // The hash for this version was not found in go.sum, so skip. if (gosumHash === undefined) { continue; } - let component = await getGoPkgComponent("", tmpA[2], version, gosumHash); + const component = await getGoPkgComponent( + "", + tmpA[2], + version, + gosumHash + ); pkgComponentsList.push(component); } } @@ -2590,7 +2600,7 @@ export const parseGoListDep = async function (rawOutput, gosumMap) { const deps = []; const keys_cache = {}; const pkgs = rawOutput.split("\n"); - for (let l of pkgs) { + for (const l of pkgs) { const verArr = l.trim().replace(new RegExp("[\"']", "g"), "").split(" "); if (verArr && verArr.length === 5) { @@ -2599,8 +2609,8 @@ export const parseGoListDep = async function (rawOutput, gosumMap) { if (!keys_cache[key]) { keys_cache[key] = key; const version = verArr[1]; - let gosumHash = gosumMap[`${verArr[0]}/${version}`]; - let component = await getGoPkgComponent( + const gosumHash = gosumMap[`${verArr[0]}/${version}`]; + const component = await getGoPkgComponent( "", verArr[0], version, @@ -2655,7 +2665,7 @@ export const parseGosumData = async function (gosumData) { return pkgList; } const pkgs = gosumData.split("\n"); - for (let l of pkgs) { + for (const l of pkgs) { // look for lines containing go.mod if (l.indexOf("go.mod") > -1) { const tmpA = l.split(" "); @@ -2693,7 +2703,7 @@ export const parseGopkgData = async function (gopkgData) { } let pkg = null; const pkgs = gopkgData.split("\n"); - for (let l of pkgs) { + for (const l of pkgs) { let key = null; let value = null; if (l.indexOf("[[projects]]") > -1) { @@ -2741,7 +2751,7 @@ export const parseGoVersionData = async function (buildInfoData) { return pkgList; } const pkgs = buildInfoData.split("\n"); - for (let i in pkgs) { + for (const i in pkgs) { const l = pkgs[i].trim().replace(/\t/g, " "); if (!l.startsWith("dep")) { continue; @@ -2755,7 +2765,7 @@ export const parseGoVersionData = async function (buildInfoData) { if (tmpA.length == 4) { hash = tmpA[tmpA.length - 1].replace("h1:", "sha256-"); } - let component = await getGoPkgComponent("", name, tmpA[2].trim(), hash); + const component = await getGoPkgComponent("", name, tmpA[2].trim(), hash); pkgList.push(component); } return pkgList; @@ -2958,7 +2968,7 @@ export const getDartMetadata = async function (pkgList) { }); if (res && res.body) { const versions = res.body.versions; - for (let v of versions) { + for (const v of versions) { if (p.version === v.version) { const pubspec = v.pubspec; p.description = pubspec.description; @@ -2982,7 +2992,7 @@ export const getDartMetadata = async function (pkgList) { }; export const parseCargoTomlData = async function (cargoData) { - let pkgList = []; + const pkgList = []; if (!cargoData) { return pkgList; } @@ -3031,7 +3041,7 @@ export const parseCargoTomlData = async function (cargoData) { pkgList.push(pkg); } pkg = undefined; - let tmpA = l.split(" = "); + const tmpA = l.split(" = "); let tmpB = undefined; let name = tmpA[0]; let version = undefined; @@ -3179,7 +3189,7 @@ export const parsePubLockData = async function (pubLockData) { } }; -export const parsePubYamlData = async function (pubYamlData) { +export const parsePubYamlData = function (pubYamlData) { const pkgList = []; let yamlObj = undefined; try { @@ -3199,7 +3209,7 @@ export const parsePubYamlData = async function (pubYamlData) { return pkgList; }; -export const parseHelmYamlData = async function (helmData) { +export const parseHelmYamlData = function (helmData) { const pkgList = []; let yamlObj = undefined; try { @@ -3211,7 +3221,7 @@ export const parseHelmYamlData = async function (helmData) { return pkgList; } if (yamlObj.name && yamlObj.version) { - let pkg = { + const pkg = { name: yamlObj.name, description: yamlObj.description || "", version: yamlObj.version @@ -3223,7 +3233,7 @@ export const parseHelmYamlData = async function (helmData) { } if (yamlObj.dependencies) { for (const hd of yamlObj.dependencies) { - let pkg = { + const pkg = { name: hd.name, version: hd.version // This could have * so not precise }; @@ -3238,7 +3248,7 @@ export const parseHelmYamlData = async function (helmData) { for (const key of Object.keys(yamlObj.entries[he])) { const hd = yamlObj.entries[he][key]; if (hd.name && hd.version) { - let pkg = { + const pkg = { name: hd.name, version: hd.version, description: hd.description || "" @@ -3327,7 +3337,7 @@ export const recurseImageNameLookup = (keyValueObj, pkgList, imgList) => { return imgList; }; -export const parseContainerSpecData = async function (dcData) { +export const parseContainerSpecData = function (dcData) { const pkgList = []; const imgList = []; if (!dcData.includes("image") && !dcData.includes("kind")) { @@ -3397,7 +3407,7 @@ export const parseContainerSpecData = async function (dcData) { export const identifyFlow = function (processingObj) { let flow = "unknown"; if (processingObj.sinkId) { - let sinkId = processingObj.sinkId.toLowerCase(); + const sinkId = processingObj.sinkId.toLowerCase(); if (sinkId.endsWith("write")) { flow = "inbound"; } else if (sinkId.endsWith("read")) { @@ -3427,7 +3437,7 @@ export const parsePrivadoFile = function (f) { return servlist; } const jsonData = JSON.parse(pData); - let aservice = { + const aservice = { "x-trust-boundary": false, properties: [], data: [], @@ -3472,9 +3482,9 @@ export const parsePrivadoFile = function (f) { // Find endpoints if (jsonData.collections) { const endpoints = []; - for (let c of jsonData.collections) { - for (let occ of c.collections) { - for (let e of occ.occurrences) { + for (const c of jsonData.collections) { + for (const occ of c.collections) { + for (const e of occ.occurrences) { if (e.endPoint) { endpoints.push(e.endPoint); } @@ -3485,7 +3495,7 @@ export const parsePrivadoFile = function (f) { } // Capture violations if (jsonData.violations) { - for (let v of jsonData.violations) { + for (const v of jsonData.violations) { aservice.properties.push({ name: "privado_violations", value: v.policyId @@ -3505,7 +3515,7 @@ export const parsePrivadoFile = function (f) { return servlist; }; -export const parseOpenapiSpecData = async function (oaData) { +export const parseOpenapiSpecData = function (oaData) { const servlist = []; if (!oaData) { return servlist; @@ -3555,7 +3565,7 @@ export const parseOpenapiSpecData = async function (oaData) { return servlist; }; -export const parseCabalData = async function (cabalData) { +export const parseCabalData = function (cabalData) { const pkgList = []; if (!cabalData) { return pkgList; @@ -3582,7 +3592,7 @@ export const parseCabalData = async function (cabalData) { return pkgList; }; -export const parseMixLockData = async function (mixData) { +export const parseMixLockData = function (mixData) { const pkgList = []; if (!mixData) { return pkgList; @@ -3608,7 +3618,7 @@ export const parseMixLockData = async function (mixData) { return pkgList; }; -export const parseGitHubWorkflowData = async function (ghwData) { +export const parseGitHubWorkflowData = function (ghwData) { const pkgList = []; const keys_cache = {}; if (!ghwData) { @@ -3650,7 +3660,7 @@ export const parseGitHubWorkflowData = async function (ghwData) { return pkgList; }; -export const parseCloudBuildData = async function (cbwData) { +export const parseCloudBuildData = function (cbwData) { const pkgList = []; const keys_cache = {}; if (!cbwData) { @@ -3666,7 +3676,7 @@ export const parseCloudBuildData = async function (cbwData) { const tmpA = step.name.split(":"); if (tmpA.length === 2) { let group = dirname(tmpA[0]); - let name = basename(tmpA[0]); + const name = basename(tmpA[0]); if (group === ".") { group = ""; } @@ -3687,7 +3697,7 @@ export const parseCloudBuildData = async function (cbwData) { return pkgList; }; -export const parseConanLockData = async function (conanLockData) { +export const parseConanLockData = function (conanLockData) { const pkgList = []; if (!conanLockData) { return pkgList; @@ -3697,7 +3707,7 @@ export const parseConanLockData = async function (conanLockData) { return pkgList; } const nodes = graphLock.graph_lock.nodes; - for (let nk of Object.keys(nodes)) { + for (const nk of Object.keys(nodes)) { if (nodes[nk].ref) { const tmpA = nodes[nk].ref.split("/"); if (tmpA.length === 2) { @@ -3708,7 +3718,7 @@ export const parseConanLockData = async function (conanLockData) { return pkgList; }; -export const parseConanData = async function (conanData) { +export const parseConanData = function (conanData) { const pkgList = []; if (!conanData) { return pkgList; @@ -3737,7 +3747,7 @@ export const parseLeiningenData = function (leinData) { leinData = "(defproject" + tmpArr[1]; } const ednData = parseEDNString(leinData); - for (let k of Object.keys(ednData)) { + for (const k of Object.keys(ednData)) { if (k === "list") { ednData[k].forEach((jk) => { if (Array.isArray(jk)) { @@ -3768,7 +3778,7 @@ export const parseEdnData = function (rawEdnData) { } const ednData = parseEDNString(rawEdnData); const pkgCache = {}; - for (let k of Object.keys(ednData)) { + for (const k of Object.keys(ednData)) { if (k === "map") { ednData[k].forEach((jk) => { if (Array.isArray(jk)) { @@ -3815,7 +3825,7 @@ export const parseEdnData = function (rawEdnData) { export const parseNupkg = async function (nupkgFile) { const pkgList = []; - let pkg = { group: "" }; + const pkg = { group: "" }; let nuspecData = await readZipEntry(nupkgFile, ".nuspec"); // Remove byte order mark if (nuspecData.charCodeAt(0) === 0xfeff) { @@ -3875,9 +3885,9 @@ export const parseCsPkgData = async function (pkgData) { return pkgList; } packages = packages[0].package; - for (let i in packages) { + for (const i in packages) { const p = packages[i].$; - let pkg = { group: "" }; + const pkg = { group: "" }; pkg.name = p.id; pkg.version = p.version; pkgList.push(pkg); @@ -3907,12 +3917,12 @@ export const parseCsProjData = async function (csProjData) { } const project = projects[0]; if (project.ItemGroup && project.ItemGroup.length) { - for (let i in project.ItemGroup) { + for (const i in project.ItemGroup) { const item = project.ItemGroup[i]; // .net core use PackageReference - for (let j in item.PackageReference) { + for (const j in item.PackageReference) { const pref = item.PackageReference[j].$; - let pkg = { group: "" }; + const pkg = { group: "" }; if (!pref.Include || pref.Include.includes(".csproj")) { continue; } @@ -3921,9 +3931,9 @@ export const parseCsProjData = async function (csProjData) { pkgList.push(pkg); } // .net framework use Reference - for (let j in item.Reference) { + for (const j in item.Reference) { const pref = item.Reference[j].$; - let pkg = { group: "" }; + const pkg = { group: "" }; if (!pref.Include || pref.Include.includes(".csproj")) { continue; } @@ -3953,7 +3963,7 @@ export const parseCsProjAssetsData = async function (csProjData) { if (!assetData || !assetData.libraries) { return pkgList; } - for (let alib of Object.keys(assetData.libraries)) { + for (const alib of Object.keys(assetData.libraries)) { // Skip os runtime packages if (alib.startsWith("runtime")) { continue; @@ -3992,8 +4002,8 @@ export const parseCsPkgLockData = async function (csLockData) { if (!assetData || !assetData.dependencies) { return pkgList; } - for (let aversion of Object.keys(assetData.dependencies)) { - for (let alib of Object.keys(assetData.dependencies[aversion])) { + for (const aversion of Object.keys(assetData.dependencies)) { + for (const alib of Object.keys(assetData.dependencies[aversion])) { const libData = assetData.dependencies[aversion][alib]; pkg = { group: "", @@ -4135,15 +4145,15 @@ export const parseComposerLock = function (pkgLockFile) { return []; } if (lockData) { - let packages = {}; + const packages = {}; if (lockData["packages"]) { packages["required"] = lockData["packages"]; } if (lockData["packages-dev"]) { packages["optional"] = lockData["packages-dev"]; } - for (let compScope in packages) { - for (let i in packages[compScope]) { + for (const compScope in packages) { + for (const i in packages[compScope]) { const pkg = packages[compScope][i]; // Be extra cautious. Potential fix for #236 if (!pkg || !pkg.name || !pkg.version) { @@ -4153,7 +4163,7 @@ export const parseComposerLock = function (pkgLockFile) { if (group === ".") { group = ""; } - let name = basename(pkg.name); + const name = basename(pkg.name); pkgList.push({ group: group, name: name, @@ -4190,7 +4200,7 @@ export const parseSbtLock = function (pkgLockFile) { if (existsSync(pkgLockFile)) { const lockData = JSON.parse(readFileSync(pkgLockFile, "utf8")); if (lockData && lockData.dependencies) { - for (let pkg of lockData.dependencies) { + for (const pkg of lockData.dependencies) { const artifacts = pkg.artifacts || undefined; let integrity = ""; if (artifacts && artifacts.length) { @@ -4240,15 +4250,15 @@ export const convertOSQueryResults = function ( if (res.version) { const version = res.version; let name = res.name || res.device_id; - let group = ""; - let subpath = res.path || res.admindir || res.source; - let publisher = res.maintainer || res.creator; + const group = ""; + const subpath = res.path || res.admindir || res.source; + const publisher = res.maintainer || res.creator; let scope = undefined; - let compScope = res.priority; + const compScope = res.priority; if (["required", "optional", "excluded"].includes(compScope)) { scope = compScope; } - let description = + const description = res.description || res.arguments || res.device || @@ -4293,7 +4303,7 @@ export const _swiftDepPkgList = ( jsonData ) => { if (jsonData && jsonData.dependencies) { - for (let adep of jsonData.dependencies) { + for (const adep of jsonData.dependencies) { const urlOrPath = adep.url || adep.path; const apkg = { group: adep.identity || "", @@ -4333,7 +4343,7 @@ export const _swiftDepPkgList = ( // Handle the immediate dependencies before recursing if (adep.dependencies && adep.dependencies.length) { const deplist = []; - for (let cdep of adep.dependencies) { + for (const cdep of adep.dependencies) { const deppurl = new PackageURL( "swift", cdep.identity || "", @@ -4380,7 +4390,7 @@ export const parseSwiftJsonTree = (rawOutput, pkgFile) => { } const pkgList = []; const dependenciesList = []; - let depKeys = {}; + const depKeys = {}; let rootPkg = {}; let jsonData = {}; try { @@ -4497,7 +4507,7 @@ export const parseSwiftResolved = (resolvedFile) => { * @param {string} basePath Path to the maven project */ export const collectMvnDependencies = function (mavenCmd, basePath) { - let tempDir = mkdtempSync(join(tmpdir(), "mvn-deps-")); + const tempDir = mkdtempSync(join(tmpdir(), "mvn-deps-")); console.log( `Executing 'mvn dependency:copy-dependencies -DoutputDirectory=${tempDir} -DexcludeTransitive=true -DincludeScope=runtime' in ${basePath}` ); @@ -4555,7 +4565,7 @@ export const collectJarNS = function (jarPath) { // Execute jar tvf to get class names const jarFiles = getAllFiles(jarPath, "**/*.jar"); if (jarFiles && jarFiles.length) { - for (let jf of jarFiles) { + for (const jf of jarFiles) { const jarname = basename(jf); if (DEBUG_MODE) { console.log(`Executing 'jar tf ${jf}'`); @@ -4659,7 +4669,7 @@ export { _encodeForPurl as encodeForPurl }; * @return pkgList Package list */ export const extractJarArchive = function (jarFile, tempDir) { - let pkgList = []; + const pkgList = []; let jarFiles = []; const fname = basename(jarFile); let pomname = undefined; @@ -4674,7 +4684,7 @@ export const extractJarArchive = function (jarFile, tempDir) { copyFileSync(jarFile, join(tempDir, fname), constants.COPYFILE_FICLONE); } if (jarFile.endsWith(".war") || jarFile.endsWith(".hpi")) { - let jarResult = spawnSync("jar", ["-xf", join(tempDir, fname)], { + const jarResult = spawnSync("jar", ["-xf", join(tempDir, fname)], { encoding: "utf-8", cwd: tempDir }); @@ -4693,7 +4703,7 @@ export const extractJarArchive = function (jarFile, tempDir) { jarFiles = [join(tempDir, fname)]; } if (jarFiles && jarFiles.length) { - for (let jf of jarFiles) { + for (const jf of jarFiles) { pomname = jf.replace(".jar", ".pom"); const jarname = basename(jf); // Ignore test jars @@ -4703,7 +4713,7 @@ export const extractJarArchive = function (jarFile, tempDir) { ) { continue; } - let manifestDir = join(tempDir, "META-INF"); + const manifestDir = join(tempDir, "META-INF"); const manifestFile = join(manifestDir, "MANIFEST.MF"); let jarResult = { status: 1 @@ -4841,8 +4851,8 @@ export const extractJarArchive = function (jarFile, tempDir) { export const determineSbtVersion = function (projectPath) { const buildPropFile = join(projectPath, "project", "build.properties"); if (existsSync(buildPropFile)) { - let properties = propertiesReader(buildPropFile); - let property = properties.get("sbt.version"); + const properties = propertiesReader(buildPropFile); + const property = properties.get("sbt.version"); if (property != null && valid(property)) { return property; } @@ -4864,7 +4874,7 @@ export const determineSbtVersion = function (projectPath) { */ export const addPlugin = function (projectPath, plugin) { const pluginsFile = sbtPluginsPath(projectPath); - var originalPluginsFile = null; + let originalPluginsFile = null; if (existsSync(pluginsFile)) { originalPluginsFile = pluginsFile + ".cdxgen"; copyFileSync(pluginsFile, originalPluginsFile, constants.COPYFILE_FICLONE); @@ -5157,7 +5167,7 @@ export const executePipFreezeInVenv = async (basePath, reqOrSetupFile) => { * such as the version of python, pip, os, pypi.org availability (and weather?) */ if (tempDir === env.VIRTUAL_ENV) { - let pipInstallArgs = [ + const pipInstallArgs = [ "-m", "pip", "install", @@ -5219,7 +5229,7 @@ export const executePipFreezeInVenv = async (basePath, reqOrSetupFile) => { * At this point, the previous attempt to do a pip install might have failed and we might have an unclean virtual environment with an incomplete list * The position taken by cdxgen is "Some SBoM is better than no SBoM", so we proceed to collecting the dependencies that got installed with pip freeze */ - let pipFreezeArgs = [ + const pipFreezeArgs = [ "-m", "pip", "freeze", diff --git a/utils.test.js b/utils.test.js index fbbd64d3b..81b78c56a 100644 --- a/utils.test.js +++ b/utils.test.js @@ -633,7 +633,7 @@ test("parseGoSumData", async () => { }, 120000); test("parse go list dependencies", async () => { - let dep_list = await parseGoListDep( + const dep_list = await parseGoListDep( readFileSync("./test/data/golist-dep.txt", { encoding: "utf-8" }), {} ); @@ -766,7 +766,7 @@ test("parse cargo toml", async () => { test("parse cargo auditable data", async () => { expect(await parseCargoAuditableData(null)).toEqual([]); - let dep_list = await parseCargoAuditableData( + const dep_list = await parseCargoAuditableData( readFileSync("./test/data/cargo-auditable.txt", { encoding: "utf-8" }) ); expect(dep_list.length).toEqual(32); @@ -1662,7 +1662,7 @@ test("parseComposerLock", () => { }); test("parseGemfileLockData", async () => { - let deps = await parseGemfileLockData( + const deps = await parseGemfileLockData( readFileSync("./test/data/Gemfile.lock", { encoding: "utf-8" }) ); expect(deps.length).toEqual(140); @@ -1673,7 +1673,7 @@ test("parseGemfileLockData", async () => { }); test("parseGemspecData", async () => { - let deps = await parseGemspecData( + const deps = await parseGemspecData( readFileSync("./test/data/xmlrpc.gemspec", { encoding: "utf-8" }) ); expect(deps.length).toEqual(1); @@ -1752,7 +1752,7 @@ test("parse wheel metadata", () => { }); test("parse wheel", async () => { - let metadata = await readZipEntry( + const metadata = await readZipEntry( "./test/data/appthreat_depscan-2.0.2-py3-none-any.whl", "METADATA" ); @@ -1769,13 +1769,13 @@ test("parse wheel", async () => { }); test("parse pipfile.lock with hashes", async () => { - let deps = await parsePiplockData( + const deps = await parsePiplockData( JSON.parse(readFileSync("./test/data/Pipfile.lock", { encoding: "utf-8" })) ); expect(deps.length).toEqual(46); }, 120000); -test("parse scala sbt list", async () => { +test("parse scala sbt list", () => { let deps = parseKVDep( readFileSync("./test/data/sbt-dl.list", { encoding: "utf-8" }) ); @@ -1786,19 +1786,19 @@ test("parse scala sbt list", async () => { expect(deps.length).toEqual(117); }); -test("parse scala sbt lock", async () => { - let deps = parseSbtLock("./test/data/build.sbt.lock"); +test("parse scala sbt lock", () => { + const deps = parseSbtLock("./test/data/build.sbt.lock"); expect(deps.length).toEqual(117); }); test("parse nupkg file", async () => { - let deps = await parseNupkg("./test/data/jquery.3.6.0.nupkg"); + const deps = await parseNupkg("./test/data/jquery.3.6.0.nupkg"); expect(deps.length).toEqual(1); expect(deps[0].name).toEqual("jQuery"); }); test("parse bazel skyframe", () => { - let deps = parseBazelSkyframe( + const deps = parseBazelSkyframe( readFileSync("./test/data/bazel/bazel-state.txt", { encoding: "utf-8" }) ); expect(deps.length).toEqual(16); @@ -1806,7 +1806,7 @@ test("parse bazel skyframe", () => { }); test("parse bazel build", () => { - let projs = parseBazelBuild( + const projs = parseBazelBuild( readFileSync("./test/data/bazel/BUILD", { encoding: "utf-8" }) ); expect(projs.length).toEqual(2); @@ -1932,7 +1932,7 @@ test("parse container spec like files", async () => { test("parse cloudbuild data", async () => { expect(await parseCloudBuildData(null)).toEqual([]); - let dep_list = await parseCloudBuildData( + const dep_list = await parseCloudBuildData( readFileSync("./test/data/cloudbuild.yaml", { encoding: "utf-8" }) ); expect(dep_list.length).toEqual(1); @@ -1944,7 +1944,7 @@ test("parse cloudbuild data", async () => { }); test("parse privado files", () => { - let servList = parsePrivadoFile("./test/data/privado.json"); + const servList = parsePrivadoFile("./test/data/privado.json"); expect(servList.length).toEqual(1); expect(servList[0].data.length).toEqual(11); expect(servList[0].endpoints.length).toEqual(17);