[update] enhance wpCompress logging, add rootModule parameter to sliceAndWriteCalls, and improve dependency checks

This commit is contained in:
2025-08-14 21:29:36 +01:00
parent 9dc2e300dc
commit 7f2f0b9240
8 changed files with 86 additions and 21 deletions

View File

@@ -15,7 +15,7 @@ export function wpCompress(l, moduleLocation,outputPath = path.resolve('./output
const libraryLocation = extractFunctionForModule(l, moduleLocation); const libraryLocation = extractFunctionForModule(l, moduleLocation);
console.log(libraryLocation); console.log(libraryLocation);
const outputFile = l + '.bundle.cjs'; const outputFile = l + '.bundle.cjs';
console.log(`[WebPack] Compressing ${l} in ${moduleLocation} to ${outputFile}`); console.log(`[WebPack] Compressing ${l} in ${moduleLocation} to ${path.join(outputPath, outputFile)}`);
const moduleFallbackMap = builtinModules.reduce((prev, current) => { const moduleFallbackMap = builtinModules.reduce((prev, current) => {
prev[current] = false; prev[current] = false;
return prev; return prev;
@@ -48,8 +48,8 @@ export function wpCompress(l, moduleLocation,outputPath = path.resolve('./output
}, },
}, (err, stats) => { }, (err, stats) => {
if (err || stats.hasErrors()) { if (err || stats.hasErrors()) {
console.error(`[WebPack] Error Stack`,err?.stack); console.error(`[WebPack] Error encountered`);
console.log(`[WebPack]`,stats?.toJson().errors); // console.log(`[WebPack]`,stats?.toJson().errors);
reject(err || stats); reject(err || stats);
}else{ }else{
resolve(path.resolve(outputPath, outputFile)); resolve(path.resolve(outputPath, outputFile));

View File

@@ -12,13 +12,14 @@ import { LibraryTypesRecorder } from './libcalls.mjs';
* *
* @param {ReturnType<LibraryTypesRecorder['generateAllArgumentsForRecordedCalls']>} calls * @param {ReturnType<LibraryTypesRecorder['generateAllArgumentsForRecordedCalls']>} calls
* @param {string} folderPath * @param {string} folderPath
* @param {string} rootModule
*/ */
export async function sliceAndWriteCalls(calls, folderPath) { export async function sliceAndWriteCalls(calls, folderPath, rootModule) {
const writePromises = []; const writePromises = [];
for (const [moduleName, callBox] of calls) { for (const [moduleName, callBox] of calls) {
if (isRelativeModule(moduleName) || isNodeModule(moduleName)) { // not relative module if (isRelativeModule(moduleName) || isNodeModule(moduleName)) { // not relative module
console.warn(`Skipping module ${moduleName} - relative or inbuilt Node.js module`); // console.warn(`Skipping module ${moduleName} - relative or inbuilt Node.js module`);
continue; continue;
} }
console.log(`Slicing module ${moduleName} - ${callBox.size} calls`); console.log(`Slicing module ${moduleName} - ${callBox.size} calls`);
@@ -51,14 +52,18 @@ export async function sliceAndWriteCalls(calls, folderPath) {
// console.log(`Sliced code ${moduleName}\n`,slicedCode); // console.log(`Sliced code ${moduleName}\n`,slicedCode);
// continue; // continue;
const writePath = path.resolve('./dist', moduleName, 'index.cjs'); const writePath = path.resolve('./dist',rootModule, moduleName, 'index.cjs');
if (writePath === moduleName) { if (writePath === moduleName) {
throw Error("Unexpected Directory rewrite. Not allowed."); throw Error("Unexpected Directory rewrite. Not allowed.");
} }
const { packageJsonFilePath, packageJsonFileContentsString } = createPackageJsonForModule(moduleName, writePath);
mkdirSync(path.dirname(writePath), { recursive: true }); mkdirSync(path.dirname(writePath), { recursive: true });
console.log(`Writing module '${moduleName}' to '${writePath}'`); console.log(`Writing module '${moduleName}' to '${writePath}'`);
writePromises.push(writeFile(writePath, slicedCode)); writePromises.push(writeFile(packageJsonFilePath, packageJsonFileContentsString),
writeFile(writePath, slicedCode));
// writePromises.push(writeFile(writePath, slicedCode));
} }
@@ -67,6 +72,23 @@ export async function sliceAndWriteCalls(calls, folderPath) {
}).catch(console.log); }).catch(console.log);
} }
function createPackageJsonForModule(moduleName, writePath) {
const packageJsonFileContents = {
"name": moduleName,
"version": "1.0.0",
"main": "index.cjs",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"description": ""
};
const packageJsonFileContentsString = JSON.stringify(packageJsonFileContents, null, 2);
const packageJsonFilePath = path.resolve(path.dirname(writePath), 'package.json');
return { packageJsonFilePath, packageJsonFileContentsString };
}
// is-glob WORKED // is-glob WORKED
/** /**
* *
@@ -97,9 +119,9 @@ function driver(folderPath = './candidates/braces') {
const callMap = libraryTypesRecorder.generateAllArgumentsForRecordedCalls(); const callMap = libraryTypesRecorder.generateAllArgumentsForRecordedCalls();
const moduleBaseName = path.basename(folderPath);
// logCallList(callMap, folderPath); // logCallList(callMap, folderPath);
sliceAndWriteCalls(callMap, folderPath).then(() => { sliceAndWriteCalls(callMap, folderPath,moduleBaseName).then(() => {
console.log("Slicing and writing calls done"); console.log("Slicing and writing calls done");
}); });
} }
@@ -135,6 +157,8 @@ function constructJavascriptGlobInFolder(folderPath) {
["**/tests/**", false], ["**/tests/**", false],
["**/__tests__/**", false], ["**/__tests__/**", false],
["**/__mocks__/**", false], ["**/__mocks__/**", false],
["**/test.js", false],
["**/tests.js", false],
].map(glob => { ].map(glob => {
const prefix = glob[1] ? '' : '!'; const prefix = glob[1] ? '' : '!';
return prefix+path.resolve(folderPath, glob[0])}); return prefix+path.resolve(folderPath, glob[0])});

View File

@@ -95,7 +95,13 @@ export class LibraryTypesRecorder {
return type.getTupleElements().map(t => this.instantiateFakerOnType(t,level+1)); return type.getTupleElements().map(t => this.instantiateFakerOnType(t,level+1));
} else if (type.isArray()) { } else if (type.isArray()) {
return []// TODO - handle arrays; return []// TODO - handle arrays;
//also, check if its a buffer from NodeJS
} else if (type.isObject()) { } else if (type.isObject()) {
// TODO check if its a buffer
if (type.getText() === 'Buffer') {
return Buffer.from(simpleFaker.string.alphanumeric(10));
}
const f = type.getCallSignatures(); const f = type.getCallSignatures();
if(f.length > 0) { if(f.length > 0) {
return simpleFaker.helpers.arrayElement(f.map(fn => ()=>this.instantiateFakerOnType(fn.getReturnType(),level+1))); return simpleFaker.helpers.arrayElement(f.map(fn => ()=>this.instantiateFakerOnType(fn.getReturnType(),level+1)));

View File

@@ -53,6 +53,38 @@ export function getImportCallsAndArgumentTypes(importDecls, checker, mainFilePat
const parent = importDecl.getParent(); const parent = importDecl.getParent();
if(!parent?.isKind(SyntaxKind.VariableDeclaration)) { if(!parent?.isKind(SyntaxKind.VariableDeclaration)) {
console.log("Parent of import call", parent?.getKindName(), parent?.getText()); console.log("Parent of import call", parent?.getKindName(), parent?.getText());
// Check to see if there is a declaration of type:
// const x = require('something').x;
// or else, drop it.
if(parent?.isKind(SyntaxKind.PropertyAccessExpression)){
// this is a property access expression
const propAccessExpr = parent;
const propAccessName = propAccessExpr.getName();
const propAccessNameNode = propAccessExpr.getNameNode();
if (propAccessNameNode.isKind(SyntaxKind.Identifier)) {
// assert that the parent of the property access is a variable declaration
const parentVarDecl = propAccessExpr.getFirstAncestorByKind(SyntaxKind.VariableDeclaration);
if (parentVarDecl !== undefined) {
// this is a variable declaration
const varName = parentVarDecl.getName();
if (varName === propAccessName) {
const varNameNode = parentVarDecl.getNameNode();
if(varNameNode.isKind(SyntaxKind.Identifier)) {
recordImportedIdentifierUsage(checker, varNameNode, mainFilePath, libraryTypesRecorder, importStringDecl);
}
}else{
console.warn("Variable name does not match property access name", varName, propAccessName);
}
}
// console.error("Property access expression is not a variable declaration", propAccessExpr.getText());
// this is a property access expression with identifier
}else{
console.log("Property access name", propAccessName);
}
}
} }
if (parent?.isKind(SyntaxKind.VariableDeclaration)) { if (parent?.isKind(SyntaxKind.VariableDeclaration)) {
// this is a variable declaration // this is a variable declaration
@@ -196,7 +228,7 @@ function recordNamespaceImportIdentifierUsage(checker, importNode, mainFilePath,
continue; continue;
} }
const callExpressionArguments = callExpression?.getArguments(); const callExpressionArguments = callExpression?.getArguments();
if (callExpressionArguments === undefined || callExpressionArguments.length === 0) { if (callExpressionArguments === undefined || !Array.isArray( callExpressionArguments)) {
console.warn("No call expressions found for import reference", ref.getNode().getText()); console.warn("No call expressions found for import reference", ref.getNode().getText());
continue; continue;
} }
@@ -211,7 +243,7 @@ function recordNamespaceImportIdentifierUsage(checker, importNode, mainFilePath,
const paramArgType = checker.getTypeOfSymbolAtLocation(paramType,funcCall); const paramArgType = checker.getTypeOfSymbolAtLocation(paramType,funcCall);
if(!paramArgType.isAny()){ if(!paramArgType.isAny()){
console.log("[analyzer] Using scoped argument", paramArgType.getText(), "for argument", i, "of call", funcCall.getText()); // console.log("[analyzer] Using scoped argument", paramArgType.getText(), "for argument", i, "of call", funcCall.getText());
return paramArgType; return paramArgType;
} }
} }

View File

@@ -17,7 +17,6 @@ const FILTER_LIST = [
"https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.macchiato", "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.macchiato",
"https://github.com/paulmillr/async-each", "https://github.com/paulmillr/async-each",
"https://github.com/yarnpkg/yarn/blob/master/packages", "https://github.com/yarnpkg/yarn/blob/master/packages",
"https://github.com/emotion-js/emotion/tree/master/packages/stylis",
"https://github.com/kogosoftwarellc/open-api/tree/master/packages/openapi-types", "https://github.com/kogosoftwarellc/open-api/tree/master/packages/openapi-types",
"https://github.com/thenativeweb/boolean", "https://github.com/thenativeweb/boolean",
"https://github.com/zkochan/packages/tree/master/read-yaml-file", "https://github.com/zkochan/packages/tree/master/read-yaml-file",
@@ -25,7 +24,6 @@ const FILTER_LIST = [
"https://github.com/adobe/react-spectrum/tree/main/packages/@internationalized/date", "https://github.com/adobe/react-spectrum/tree/main/packages/@internationalized/date",
"https://github.com/pnpm/pnpm/blob/main/packages", "https://github.com/pnpm/pnpm/blob/main/packages",
"https://github.com/jhermsmeier/node-scuid", "https://github.com/jhermsmeier/node-scuid",
"https://github.com/emotion-js/emotion/tree/master/packages/babel-plugin-emotion",
"https://github.com/emotion-js/emotion/tree/master/removed-packages/core", "https://github.com/emotion-js/emotion/tree/master/removed-packages/core",
"https://github.com/babel/babel/tree/master/packages/*", "https://github.com/babel/babel/tree/master/packages/*",
"https://github.com/pugjs/pug/tree/master/packages/*", "https://github.com/pugjs/pug/tree/master/packages/*",
@@ -33,7 +31,11 @@ const FILTER_LIST = [
"https://github.com/Marak/Faker.js", "https://github.com/Marak/Faker.js",
"https://github.com/ethanent/phin", "https://github.com/ethanent/phin",
"https://github.com/Popmotion/popmotion/tree/master/packages/*", "https://github.com/Popmotion/popmotion/tree/master/packages/*",
"https://github.com/gulpjs/copy-prop" "https://github.com/gulpjs/copy-prop",
"https://github.com/netlify/serverless-functions-api",
"https://github.com/igoradamenko/esbuild-plugin-alias",
"https://github.com/emotion-js/emotion/tree/master/packages/*",
"https://github.com/jhermsmeier/node-http-link-header"
]; ];
const FILTER_LIST_REGEX = FILTER_LIST.map(GlobToRegExp) const FILTER_LIST_REGEX = FILTER_LIST.map(GlobToRegExp)

View File

@@ -6,7 +6,7 @@ import { processPromisesBatch } from './batch.mjs';
const intermediateRepoList = await cacheFunctionOutput('repos.json', async function () { const intermediateRepoList = await cacheFunctionOutput('repos.n2.json', async function () {
const [packagesM, packageReposM] = await Promise.all([ const [packagesM, packageReposM] = await Promise.all([
import('download-counts', { with:{type: 'json'}}), import('download-counts', { with:{type: 'json'}}),
import('all-the-package-repos', { with: { type: 'json' } }) import('all-the-package-repos', { with: { type: 'json' } })
@@ -15,7 +15,7 @@ const intermediateRepoList = await cacheFunctionOutput('repos.json', async funct
const packageRepos = packageReposM.default; const packageRepos = packageReposM.default;
const packageList = Object.keys(packages).map(e => [e, packages[e]]) const packageList = Object.keys(packages).map(e => [e, packages[e]])
.filter(e => e[1] > 100).filter(e => !e[0].startsWith("@types/")) .filter(e => e[1] > 100_000).filter(e => !e[0].startsWith("@types/")).filter(e => !e[0].startsWith("@webassemblyjs/")) // filter out typescript packages and @types packages
console.log('packagelist', packageList.length) console.log('packagelist', packageList.length)
/** /**
* @type {[string,string,number][]} repo, link count * @type {[string,string,number][]} repo, link count
@@ -30,9 +30,9 @@ const intermediateRepoList = await cacheFunctionOutput('repos.json', async funct
// const packageMap = new Map(packageList) // const packageMap = new Map(packageList)
console.log(`Total repos`,intermediateRepoList.length) console.log(`Total repos`,intermediateRepoList.length)
const intermediateRepoListSmaller = intermediateRepoList.slice(0,5000); const intermediateRepoListSmaller = intermediateRepoList.slice(0,6000);
const repoStatus = await processPromisesBatch(intermediateRepoListSmaller,15,cloneRepoAndCheck) const repoStatus = await processPromisesBatch(intermediateRepoListSmaller,10,cloneRepoAndCheck)
const repoStatusString = csv.stringify(repoStatus); const repoStatusString = csv.stringify(repoStatus);
await fsp.writeFile('repostatus.csv', repoStatusString); await fsp.writeFile('repostatus.csv', repoStatusString);

View File

@@ -28,6 +28,7 @@ export async function cloneRepoAndCheck([repoName, repoGitUrl, downloadCount]) {
const packageFile = resolve(repoPath, 'package.json') const packageFile = resolve(repoPath, 'package.json')
if (!existsSync(packageFile)) return [repoName, null]; if (!existsSync(packageFile)) return [repoName, null];
// console.log("[git] checking", repoName, "for dependencies at ", packageFile);
const packageJSONContentsString = (await readFile(packageFile)).toString() const packageJSONContentsString = (await readFile(packageFile)).toString()
// console.log(packageJSONContentsString); // console.log(packageJSONContentsString);
@@ -58,7 +59,7 @@ function hasAnyActualDependencies(packageJSONContents, repoName) {
} }
function checkTestingDependencies(packageJSONContents, repoName) { function checkTestingDependencies(packageJSONContents, repoName) {
const testingLibraries = new Set(['mocha']); const testingLibraries = new Set(['mocha','jest']);
const dependencies = new Set(); const dependencies = new Set();
if (packageJSONContents.dependencies !== undefined) { if (packageJSONContents.dependencies !== undefined) {
for (const dep of Object.keys(packageJSONContents.dependencies)) { for (const dep of Object.keys(packageJSONContents.dependencies)) {

View File

@@ -1,4 +1,4 @@
import fs,{readFile} from 'node:fs' import fs,{readFile} from 'node:fs' with {abc: 'xyz'};
import classnames from 'classnames' import classnames from 'classnames'
// import * as s from 'esprima' // import * as s from 'esprima'