init
This commit is contained in:
41
mc_test/node_modules/app-builder-lib/out/asar/asar.d.ts
generated
vendored
Executable file
41
mc_test/node_modules/app-builder-lib/out/asar/asar.d.ts
generated
vendored
Executable file
@ -0,0 +1,41 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { Stats } from "fs-extra";
|
||||
export interface ReadAsarHeader {
|
||||
readonly header: string;
|
||||
readonly size: number;
|
||||
}
|
||||
export interface NodeIntegrity {
|
||||
algorithm: "SHA256";
|
||||
hash: string;
|
||||
blockSize: number;
|
||||
blocks: Array<string>;
|
||||
}
|
||||
export declare class Node {
|
||||
files?: {
|
||||
[key: string]: Node;
|
||||
};
|
||||
unpacked?: boolean;
|
||||
size?: number;
|
||||
offset?: string;
|
||||
executable?: boolean;
|
||||
link?: string;
|
||||
integrity?: NodeIntegrity;
|
||||
}
|
||||
export declare class AsarFilesystem {
|
||||
readonly src: string;
|
||||
readonly header: Node;
|
||||
readonly headerSize: number;
|
||||
private offset;
|
||||
constructor(src: string, header?: Node, headerSize?: number);
|
||||
searchNodeFromDirectory(p: string, isCreate: boolean): Node | null;
|
||||
getOrCreateNode(p: string): Node;
|
||||
addFileNode(file: string, dirNode: Node, size: number, unpacked: boolean, stat: Stats, integrity?: NodeIntegrity): Node;
|
||||
getNode(p: string): Node | null;
|
||||
getFile(p: string, followLinks?: boolean): Node;
|
||||
readJson(file: string): Promise<any>;
|
||||
readFile(file: string): Promise<Buffer>;
|
||||
}
|
||||
export declare function readAsarHeader(archive: string): Promise<ReadAsarHeader>;
|
||||
export declare function readAsar(archive: string): Promise<AsarFilesystem>;
|
||||
export declare function readAsarJson(archive: string, file: string): Promise<any>;
|
||||
151
mc_test/node_modules/app-builder-lib/out/asar/asar.js
generated
vendored
Executable file
151
mc_test/node_modules/app-builder-lib/out/asar/asar.js
generated
vendored
Executable file
@ -0,0 +1,151 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.readAsarJson = exports.readAsar = exports.readAsarHeader = exports.AsarFilesystem = exports.Node = void 0;
|
||||
const chromium_pickle_js_1 = require("chromium-pickle-js");
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const path = require("path");
|
||||
class Node {
|
||||
}
|
||||
exports.Node = Node;
|
||||
class AsarFilesystem {
|
||||
constructor(src, header = new Node(), headerSize = -1) {
|
||||
this.src = src;
|
||||
this.header = header;
|
||||
this.headerSize = headerSize;
|
||||
this.offset = 0;
|
||||
if (this.header.files == null) {
|
||||
this.header.files = {};
|
||||
}
|
||||
}
|
||||
searchNodeFromDirectory(p, isCreate) {
|
||||
let node = this.header;
|
||||
for (const dir of p.split(path.sep)) {
|
||||
if (dir !== ".") {
|
||||
let child = node.files[dir];
|
||||
if (child == null) {
|
||||
if (!isCreate) {
|
||||
return null;
|
||||
}
|
||||
child = new Node();
|
||||
child.files = {};
|
||||
node.files[dir] = child;
|
||||
}
|
||||
node = child;
|
||||
}
|
||||
}
|
||||
return node;
|
||||
}
|
||||
getOrCreateNode(p) {
|
||||
if (p == null || p.length === 0) {
|
||||
return this.header;
|
||||
}
|
||||
const name = path.basename(p);
|
||||
const dirNode = this.searchNodeFromDirectory(path.dirname(p), true);
|
||||
if (dirNode.files == null) {
|
||||
dirNode.files = {};
|
||||
}
|
||||
let result = dirNode.files[name];
|
||||
if (result == null) {
|
||||
result = new Node();
|
||||
dirNode.files[name] = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
addFileNode(file, dirNode, size, unpacked, stat, integrity) {
|
||||
if (size > 4294967295) {
|
||||
throw new Error(`${file}: file size cannot be larger than 4.2GB`);
|
||||
}
|
||||
const node = new Node();
|
||||
node.size = size;
|
||||
if (integrity) {
|
||||
node.integrity = integrity;
|
||||
}
|
||||
if (unpacked) {
|
||||
node.unpacked = true;
|
||||
}
|
||||
else {
|
||||
// electron expects string
|
||||
node.offset = this.offset.toString();
|
||||
if (process.platform !== "win32" && stat.mode & 0o100) {
|
||||
node.executable = true;
|
||||
}
|
||||
this.offset += node.size;
|
||||
}
|
||||
let children = dirNode.files;
|
||||
if (children == null) {
|
||||
children = {};
|
||||
dirNode.files = children;
|
||||
}
|
||||
children[path.basename(file)] = node;
|
||||
return node;
|
||||
}
|
||||
getNode(p) {
|
||||
const node = this.searchNodeFromDirectory(path.dirname(p), false);
|
||||
return node.files[path.basename(p)];
|
||||
}
|
||||
getFile(p, followLinks = true) {
|
||||
const info = this.getNode(p);
|
||||
// if followLinks is false we don't resolve symlinks
|
||||
return followLinks && info.link != null ? this.getFile(info.link) : info;
|
||||
}
|
||||
async readJson(file) {
|
||||
return JSON.parse((await this.readFile(file)).toString());
|
||||
}
|
||||
readFile(file) {
|
||||
return readFileFromAsar(this, file, this.getFile(file));
|
||||
}
|
||||
}
|
||||
exports.AsarFilesystem = AsarFilesystem;
|
||||
async function readAsarHeader(archive) {
|
||||
const fd = await (0, fs_extra_1.open)(archive, "r");
|
||||
let size;
|
||||
let headerBuf;
|
||||
try {
|
||||
const sizeBuf = Buffer.allocUnsafe(8);
|
||||
if ((await (0, fs_extra_1.read)(fd, sizeBuf, 0, 8, null)).bytesRead !== 8) {
|
||||
throw new Error("Unable to read header size");
|
||||
}
|
||||
const sizePickle = (0, chromium_pickle_js_1.createFromBuffer)(sizeBuf);
|
||||
size = sizePickle.createIterator().readUInt32();
|
||||
headerBuf = Buffer.allocUnsafe(size);
|
||||
if ((await (0, fs_extra_1.read)(fd, headerBuf, 0, size, null)).bytesRead !== size) {
|
||||
throw new Error("Unable to read header");
|
||||
}
|
||||
}
|
||||
finally {
|
||||
await (0, fs_extra_1.close)(fd);
|
||||
}
|
||||
const headerPickle = (0, chromium_pickle_js_1.createFromBuffer)(headerBuf);
|
||||
return { header: headerPickle.createIterator().readString(), size };
|
||||
}
|
||||
exports.readAsarHeader = readAsarHeader;
|
||||
async function readAsar(archive) {
|
||||
const { header, size } = await readAsarHeader(archive);
|
||||
return new AsarFilesystem(archive, JSON.parse(header), size);
|
||||
}
|
||||
exports.readAsar = readAsar;
|
||||
async function readAsarJson(archive, file) {
|
||||
const fs = await readAsar(archive);
|
||||
return await fs.readJson(file);
|
||||
}
|
||||
exports.readAsarJson = readAsarJson;
|
||||
async function readFileFromAsar(filesystem, filename, info) {
|
||||
const size = info.size;
|
||||
const buffer = Buffer.allocUnsafe(size);
|
||||
if (size <= 0) {
|
||||
return buffer;
|
||||
}
|
||||
if (info.unpacked) {
|
||||
return await (0, fs_extra_1.readFile)(path.join(`${filesystem.src}.unpacked`, filename));
|
||||
}
|
||||
const fd = await (0, fs_extra_1.open)(filesystem.src, "r");
|
||||
try {
|
||||
const offset = 8 + filesystem.headerSize + parseInt(info.offset, 10);
|
||||
await (0, fs_extra_1.read)(fd, buffer, 0, size, offset);
|
||||
}
|
||||
finally {
|
||||
await (0, fs_extra_1.close)(fd);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
//# sourceMappingURL=asar.js.map
|
||||
1
mc_test/node_modules/app-builder-lib/out/asar/asar.js.map
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/asar.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
1
mc_test/node_modules/app-builder-lib/out/asar/asarFileChecker.d.ts
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/asarFileChecker.d.ts
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
export {};
|
||||
38
mc_test/node_modules/app-builder-lib/out/asar/asarFileChecker.js
generated
vendored
Executable file
38
mc_test/node_modules/app-builder-lib/out/asar/asarFileChecker.js
generated
vendored
Executable file
@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.checkFileInArchive = void 0;
|
||||
const fs_1 = require("builder-util/out/fs");
|
||||
const asar_1 = require("./asar");
|
||||
/** @internal */
|
||||
async function checkFileInArchive(asarFile, relativeFile, messagePrefix) {
|
||||
function error(text) {
|
||||
return new Error(`${messagePrefix} "${relativeFile}" in the "${asarFile}" ${text}`);
|
||||
}
|
||||
let fs;
|
||||
try {
|
||||
fs = await (0, asar_1.readAsar)(asarFile);
|
||||
}
|
||||
catch (e) {
|
||||
throw error(`is corrupted: ${e}`);
|
||||
}
|
||||
let stat;
|
||||
try {
|
||||
stat = fs.getFile(relativeFile);
|
||||
}
|
||||
catch (e) {
|
||||
const fileStat = await (0, fs_1.statOrNull)(asarFile);
|
||||
if (fileStat == null) {
|
||||
throw error(`does not exist. Seems like a wrong configuration.`);
|
||||
}
|
||||
// asar throws error on access to undefined object (info.link)
|
||||
stat = null;
|
||||
}
|
||||
if (stat == null) {
|
||||
throw error(`does not exist. Seems like a wrong configuration.`);
|
||||
}
|
||||
if (stat.size === 0) {
|
||||
throw error(`is corrupted: size 0`);
|
||||
}
|
||||
}
|
||||
exports.checkFileInArchive = checkFileInArchive;
|
||||
//# sourceMappingURL=asarFileChecker.js.map
|
||||
1
mc_test/node_modules/app-builder-lib/out/asar/asarFileChecker.js.map
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/asarFileChecker.js.map
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"asarFileChecker.js","sourceRoot":"","sources":["../../src/asar/asarFileChecker.ts"],"names":[],"mappings":";;;AAAA,4CAAgD;AAChD,iCAAuC;AAEvC,gBAAgB;AACT,KAAK,UAAU,kBAAkB,CAAC,QAAgB,EAAE,YAAoB,EAAE,aAAqB;IACpG,SAAS,KAAK,CAAC,IAAY;QACzB,OAAO,IAAI,KAAK,CAAC,GAAG,aAAa,KAAK,YAAY,aAAa,QAAQ,KAAK,IAAI,EAAE,CAAC,CAAA;IACrF,CAAC;IAED,IAAI,EAAE,CAAA;IACN,IAAI,CAAC;QACH,EAAE,GAAG,MAAM,IAAA,eAAQ,EAAC,QAAQ,CAAC,CAAA;IAC/B,CAAC;IAAC,OAAO,CAAM,EAAE,CAAC;QAChB,MAAM,KAAK,CAAC,iBAAiB,CAAC,EAAE,CAAC,CAAA;IACnC,CAAC;IAED,IAAI,IAAiB,CAAA;IACrB,IAAI,CAAC;QACH,IAAI,GAAG,EAAE,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;IACjC,CAAC;IAAC,OAAO,CAAM,EAAE,CAAC;QAChB,MAAM,QAAQ,GAAG,MAAM,IAAA,eAAU,EAAC,QAAQ,CAAC,CAAA;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE,CAAC;YACrB,MAAM,KAAK,CAAC,mDAAmD,CAAC,CAAA;QAClE,CAAC;QAED,8DAA8D;QAC9D,IAAI,GAAG,IAAI,CAAA;IACb,CAAC;IAED,IAAI,IAAI,IAAI,IAAI,EAAE,CAAC;QACjB,MAAM,KAAK,CAAC,mDAAmD,CAAC,CAAA;IAClE,CAAC;IACD,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,EAAE,CAAC;QACpB,MAAM,KAAK,CAAC,sBAAsB,CAAC,CAAA;IACrC,CAAC;AACH,CAAC;AA/BD,gDA+BC","sourcesContent":["import { statOrNull } from \"builder-util/out/fs\"\nimport { Node, readAsar } from \"./asar\"\n\n/** @internal */\nexport async function checkFileInArchive(asarFile: string, relativeFile: string, messagePrefix: string) {\n function error(text: string) {\n return new Error(`${messagePrefix} \"${relativeFile}\" in the \"${asarFile}\" ${text}`)\n }\n\n let fs\n try {\n fs = await readAsar(asarFile)\n } catch (e: any) {\n throw error(`is corrupted: ${e}`)\n }\n\n let stat: Node | null\n try {\n stat = fs.getFile(relativeFile)\n } catch (e: any) {\n const fileStat = await statOrNull(asarFile)\n if (fileStat == null) {\n throw error(`does not exist. Seems like a wrong configuration.`)\n }\n\n // asar throws error on access to undefined object (info.link)\n stat = null\n }\n\n if (stat == null) {\n throw error(`does not exist. Seems like a wrong configuration.`)\n }\n if (stat.size === 0) {\n throw error(`is corrupted: size 0`)\n }\n}\n"]}
|
||||
1
mc_test/node_modules/app-builder-lib/out/asar/asarUtil.d.ts
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/asarUtil.d.ts
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
export {};
|
||||
241
mc_test/node_modules/app-builder-lib/out/asar/asarUtil.js
generated
vendored
Executable file
241
mc_test/node_modules/app-builder-lib/out/asar/asarUtil.js
generated
vendored
Executable file
@ -0,0 +1,241 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AsarPackager = void 0;
|
||||
const builder_util_1 = require("builder-util");
|
||||
const fs_1 = require("builder-util/out/fs");
|
||||
const fs_2 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
const path = require("path");
|
||||
const appFileCopier_1 = require("../util/appFileCopier");
|
||||
const asar_1 = require("./asar");
|
||||
const integrity_1 = require("./integrity");
|
||||
const unpackDetector_1 = require("./unpackDetector");
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const pickle = require("chromium-pickle-js");
|
||||
/** @internal */
|
||||
class AsarPackager {
|
||||
constructor(src, destination, options, unpackPattern) {
|
||||
this.src = src;
|
||||
this.destination = destination;
|
||||
this.options = options;
|
||||
this.unpackPattern = unpackPattern;
|
||||
this.fs = new asar_1.AsarFilesystem(this.src);
|
||||
this.outFile = path.join(destination, "app.asar");
|
||||
this.unpackedDest = `${this.outFile}.unpacked`;
|
||||
}
|
||||
// sort files to minimize file change (i.e. asar file is not changed dramatically on small change)
|
||||
async pack(fileSets, packager) {
|
||||
if (this.options.ordering != null) {
|
||||
// ordering doesn't support transformed files, but ordering is not used functionality - wait user report to fix it
|
||||
await order(fileSets[0].files, this.options.ordering, fileSets[0].src);
|
||||
}
|
||||
await (0, promises_1.mkdir)(path.dirname(this.outFile), { recursive: true });
|
||||
const unpackedFileIndexMap = new Map();
|
||||
for (const fileSet of fileSets) {
|
||||
unpackedFileIndexMap.set(fileSet, await this.createPackageFromFiles(fileSet, packager.info));
|
||||
}
|
||||
await this.writeAsarFile(fileSets, unpackedFileIndexMap);
|
||||
}
|
||||
async createPackageFromFiles(fileSet, packager) {
|
||||
const metadata = fileSet.metadata;
|
||||
// search auto unpacked dir
|
||||
const unpackedDirs = new Set();
|
||||
const rootForAppFilesWithoutAsar = path.join(this.destination, "app");
|
||||
if (this.options.smartUnpack !== false) {
|
||||
await (0, unpackDetector_1.detectUnpackedDirs)(fileSet, unpackedDirs, this.unpackedDest, rootForAppFilesWithoutAsar);
|
||||
}
|
||||
const dirToCreateForUnpackedFiles = new Set(unpackedDirs);
|
||||
const correctDirNodeUnpackedFlag = async (filePathInArchive, dirNode) => {
|
||||
for (const dir of unpackedDirs) {
|
||||
if (filePathInArchive.length > dir.length + 2 && filePathInArchive[dir.length] === path.sep && filePathInArchive.startsWith(dir)) {
|
||||
dirNode.unpacked = true;
|
||||
unpackedDirs.add(filePathInArchive);
|
||||
// not all dirs marked as unpacked after first iteration - because node module dir can be marked as unpacked after processing node module dir content
|
||||
// e.g. node-notifier/example/advanced.js processed, but only on process vendor/terminal-notifier.app module will be marked as unpacked
|
||||
await (0, promises_1.mkdir)(path.join(this.unpackedDest, filePathInArchive), { recursive: true });
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
const transformedFiles = fileSet.transformedFiles;
|
||||
const taskManager = new builder_util_1.AsyncTaskManager(packager.cancellationToken);
|
||||
const fileCopier = new fs_1.FileCopier();
|
||||
let currentDirNode = null;
|
||||
let currentDirPath = null;
|
||||
const unpackedFileIndexSet = new Set();
|
||||
for (let i = 0, n = fileSet.files.length; i < n; i++) {
|
||||
const file = fileSet.files[i];
|
||||
const stat = metadata.get(file);
|
||||
if (stat == null) {
|
||||
continue;
|
||||
}
|
||||
const pathInArchive = path.relative(rootForAppFilesWithoutAsar, (0, appFileCopier_1.getDestinationPath)(file, fileSet));
|
||||
if (stat.isSymbolicLink()) {
|
||||
const s = stat;
|
||||
this.fs.getOrCreateNode(pathInArchive).link = s.relativeLink;
|
||||
s.pathInArchive = pathInArchive;
|
||||
unpackedFileIndexSet.add(i);
|
||||
continue;
|
||||
}
|
||||
let fileParent = path.dirname(pathInArchive);
|
||||
if (fileParent === ".") {
|
||||
fileParent = "";
|
||||
}
|
||||
if (currentDirPath !== fileParent) {
|
||||
if (fileParent.startsWith("..")) {
|
||||
throw new Error(`Internal error: path must not start with "..": ${fileParent}`);
|
||||
}
|
||||
currentDirPath = fileParent;
|
||||
currentDirNode = this.fs.getOrCreateNode(fileParent);
|
||||
// do not check for root
|
||||
if (fileParent !== "" && !currentDirNode.unpacked) {
|
||||
if (unpackedDirs.has(fileParent)) {
|
||||
currentDirNode.unpacked = true;
|
||||
}
|
||||
else {
|
||||
await correctDirNodeUnpackedFlag(fileParent, currentDirNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
const dirNode = currentDirNode;
|
||||
const newData = transformedFiles == null ? undefined : transformedFiles.get(i);
|
||||
const isUnpacked = dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat));
|
||||
const integrity = newData === undefined ? await (0, integrity_1.hashFile)(file) : (0, integrity_1.hashFileContents)(newData);
|
||||
this.fs.addFileNode(file, dirNode, newData == undefined ? stat.size : Buffer.byteLength(newData), isUnpacked, stat, integrity);
|
||||
if (isUnpacked) {
|
||||
if (!dirNode.unpacked && !dirToCreateForUnpackedFiles.has(fileParent)) {
|
||||
dirToCreateForUnpackedFiles.add(fileParent);
|
||||
await (0, promises_1.mkdir)(path.join(this.unpackedDest, fileParent), { recursive: true });
|
||||
}
|
||||
const unpackedFile = path.join(this.unpackedDest, pathInArchive);
|
||||
taskManager.addTask(copyFileOrData(fileCopier, newData, file, unpackedFile, stat));
|
||||
if (taskManager.tasks.length > fs_1.MAX_FILE_REQUESTS) {
|
||||
await taskManager.awaitTasks();
|
||||
}
|
||||
unpackedFileIndexSet.add(i);
|
||||
}
|
||||
}
|
||||
if (taskManager.tasks.length > 0) {
|
||||
await taskManager.awaitTasks();
|
||||
}
|
||||
return unpackedFileIndexSet;
|
||||
}
|
||||
writeAsarFile(fileSets, unpackedFileIndexMap) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(this.fs.header));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
const writeStream = (0, fs_2.createWriteStream)(this.outFile);
|
||||
writeStream.on("error", reject);
|
||||
writeStream.on("close", resolve);
|
||||
writeStream.write(sizeBuf);
|
||||
let fileSetIndex = 0;
|
||||
let files = fileSets[0].files;
|
||||
let metadata = fileSets[0].metadata;
|
||||
let transformedFiles = fileSets[0].transformedFiles;
|
||||
let unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[0]);
|
||||
const w = (index) => {
|
||||
while (true) {
|
||||
if (index >= files.length) {
|
||||
if (++fileSetIndex >= fileSets.length) {
|
||||
writeStream.end();
|
||||
return;
|
||||
}
|
||||
else {
|
||||
files = fileSets[fileSetIndex].files;
|
||||
metadata = fileSets[fileSetIndex].metadata;
|
||||
transformedFiles = fileSets[fileSetIndex].transformedFiles;
|
||||
unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[fileSetIndex]);
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
if (!unpackedFileIndexSet.has(index)) {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
const stat = metadata.get(files[index]);
|
||||
if (stat != null && stat.isSymbolicLink()) {
|
||||
(0, fs_2.symlink)(stat.linkRelativeToFile, path.join(this.unpackedDest, stat.pathInArchive), () => w(index + 1));
|
||||
return;
|
||||
}
|
||||
}
|
||||
index++;
|
||||
}
|
||||
const data = transformedFiles == null ? null : transformedFiles.get(index);
|
||||
const file = files[index];
|
||||
if (data !== null && data !== undefined) {
|
||||
writeStream.write(data, () => w(index + 1));
|
||||
return;
|
||||
}
|
||||
// https://github.com/yarnpkg/yarn/pull/3539
|
||||
const stat = metadata.get(file);
|
||||
if (stat != null && stat.size < 2 * 1024 * 1024) {
|
||||
(0, promises_1.readFile)(file)
|
||||
.then(it => {
|
||||
writeStream.write(it, () => w(index + 1));
|
||||
})
|
||||
.catch((e) => reject(`Cannot read file ${file}: ${e.stack || e}`));
|
||||
}
|
||||
else {
|
||||
const readStream = (0, fs_2.createReadStream)(file);
|
||||
readStream.on("error", reject);
|
||||
readStream.once("end", () => w(index + 1));
|
||||
readStream.on("open", () => {
|
||||
readStream.pipe(writeStream, {
|
||||
end: false,
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
writeStream.write(headerBuf, () => w(0));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.AsarPackager = AsarPackager;
|
||||
async function order(filenames, orderingFile, src) {
|
||||
const orderingFiles = (await (0, promises_1.readFile)(orderingFile, "utf8")).split("\n").map(line => {
|
||||
if (line.indexOf(":") !== -1) {
|
||||
line = line.split(":").pop();
|
||||
}
|
||||
line = line.trim();
|
||||
if (line[0] === "/") {
|
||||
line = line.slice(1);
|
||||
}
|
||||
return line;
|
||||
});
|
||||
const ordering = [];
|
||||
for (const file of orderingFiles) {
|
||||
const pathComponents = file.split(path.sep);
|
||||
for (const pathComponent of pathComponents) {
|
||||
ordering.push(path.join(src, pathComponent));
|
||||
}
|
||||
}
|
||||
const sortedFiles = [];
|
||||
let missing = 0;
|
||||
const total = filenames.length;
|
||||
for (const file of ordering) {
|
||||
if (!sortedFiles.includes(file) && filenames.includes(file)) {
|
||||
sortedFiles.push(file);
|
||||
}
|
||||
}
|
||||
for (const file of filenames) {
|
||||
if (!sortedFiles.includes(file)) {
|
||||
sortedFiles.push(file);
|
||||
missing += 1;
|
||||
}
|
||||
}
|
||||
builder_util_1.log.info({ coverage: ((total - missing) / total) * 100 }, "ordering files in ASAR archive");
|
||||
return sortedFiles;
|
||||
}
|
||||
function copyFileOrData(fileCopier, data, source, destination, stats) {
|
||||
if (data == null) {
|
||||
return fileCopier.copy(source, destination, stats);
|
||||
}
|
||||
else {
|
||||
return (0, promises_1.writeFile)(destination, data);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=asarUtil.js.map
|
||||
1
mc_test/node_modules/app-builder-lib/out/asar/asarUtil.js.map
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/asarUtil.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
16
mc_test/node_modules/app-builder-lib/out/asar/integrity.d.ts
generated
vendored
Executable file
16
mc_test/node_modules/app-builder-lib/out/asar/integrity.d.ts
generated
vendored
Executable file
@ -0,0 +1,16 @@
|
||||
/// <reference types="node" />
|
||||
import { NodeIntegrity } from "./asar";
|
||||
export interface AsarIntegrityOptions {
|
||||
readonly resourcesPath: string;
|
||||
readonly resourcesRelativePath: string;
|
||||
}
|
||||
export interface HeaderHash {
|
||||
algorithm: "SHA256";
|
||||
hash: string;
|
||||
}
|
||||
export interface AsarIntegrity {
|
||||
[key: string]: HeaderHash;
|
||||
}
|
||||
export declare function computeData({ resourcesPath, resourcesRelativePath }: AsarIntegrityOptions): Promise<AsarIntegrity>;
|
||||
export declare function hashFile(file: string, blockSize?: number): Promise<NodeIntegrity>;
|
||||
export declare function hashFileContents(contents: Buffer | string, blockSize?: number): NodeIntegrity;
|
||||
89
mc_test/node_modules/app-builder-lib/out/asar/integrity.js
generated
vendored
Executable file
89
mc_test/node_modules/app-builder-lib/out/asar/integrity.js
generated
vendored
Executable file
@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.hashFileContents = exports.hashFile = exports.computeData = void 0;
|
||||
const bluebird_lst_1 = require("bluebird-lst");
|
||||
const crypto_1 = require("crypto");
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
const path = require("path");
|
||||
const asar_1 = require("./asar");
|
||||
async function computeData({ resourcesPath, resourcesRelativePath }) {
|
||||
// sort to produce constant result
|
||||
const names = (await (0, promises_1.readdir)(resourcesPath)).filter(it => it.endsWith(".asar")).sort();
|
||||
const checksums = await bluebird_lst_1.default.map(names, it => hashHeader(path.join(resourcesPath, it)));
|
||||
const result = {};
|
||||
for (let i = 0; i < names.length; i++) {
|
||||
result[path.join(resourcesRelativePath, names[i])] = checksums[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.computeData = computeData;
|
||||
async function hashHeader(file) {
|
||||
const hash = (0, crypto_1.createHash)("sha256");
|
||||
const { header } = await (0, asar_1.readAsarHeader)(file);
|
||||
hash.update(header);
|
||||
return {
|
||||
algorithm: "SHA256",
|
||||
hash: hash.digest("hex"),
|
||||
};
|
||||
}
|
||||
function hashFile(file, blockSize = 4 * 1024 * 1024) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = (0, crypto_1.createHash)("sha256");
|
||||
const blocks = new Array();
|
||||
let blockBytes = 0;
|
||||
let blockHash = (0, crypto_1.createHash)("sha256");
|
||||
function updateBlockHash(chunk) {
|
||||
let off = 0;
|
||||
while (off < chunk.length) {
|
||||
const toHash = Math.min(blockSize - blockBytes, chunk.length - off);
|
||||
blockHash.update(chunk.slice(off, off + toHash));
|
||||
off += toHash;
|
||||
blockBytes += toHash;
|
||||
if (blockBytes === blockSize) {
|
||||
blocks.push(blockHash.digest("hex"));
|
||||
blockHash = (0, crypto_1.createHash)("sha256");
|
||||
blockBytes = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
(0, fs_1.createReadStream)(file)
|
||||
.on("data", it => {
|
||||
// Note that `it` is a Buffer anyway so this cast is a no-op
|
||||
updateBlockHash(Buffer.from(it));
|
||||
hash.update(it);
|
||||
})
|
||||
.on("error", reject)
|
||||
.on("end", () => {
|
||||
if (blockBytes !== 0) {
|
||||
blocks.push(blockHash.digest("hex"));
|
||||
}
|
||||
resolve({
|
||||
algorithm: "SHA256",
|
||||
hash: hash.digest("hex"),
|
||||
blockSize,
|
||||
blocks,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.hashFile = hashFile;
|
||||
function hashFileContents(contents, blockSize = 4 * 1024 * 1024) {
|
||||
const buffer = Buffer.from(contents);
|
||||
const hash = (0, crypto_1.createHash)("sha256");
|
||||
hash.update(buffer);
|
||||
const blocks = new Array();
|
||||
for (let off = 0; off < buffer.length; off += blockSize) {
|
||||
const blockHash = (0, crypto_1.createHash)("sha256");
|
||||
blockHash.update(buffer.slice(off, off + blockSize));
|
||||
blocks.push(blockHash.digest("hex"));
|
||||
}
|
||||
return {
|
||||
algorithm: "SHA256",
|
||||
hash: hash.digest("hex"),
|
||||
blockSize,
|
||||
blocks,
|
||||
};
|
||||
}
|
||||
exports.hashFileContents = hashFileContents;
|
||||
//# sourceMappingURL=integrity.js.map
|
||||
1
mc_test/node_modules/app-builder-lib/out/asar/integrity.js.map
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/integrity.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
1
mc_test/node_modules/app-builder-lib/out/asar/unpackDetector.d.ts
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/unpackDetector.d.ts
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
export declare function isLibOrExe(file: string): boolean;
|
||||
108
mc_test/node_modules/app-builder-lib/out/asar/unpackDetector.js
generated
vendored
Executable file
108
mc_test/node_modules/app-builder-lib/out/asar/unpackDetector.js
generated
vendored
Executable file
@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.detectUnpackedDirs = exports.isLibOrExe = void 0;
|
||||
const bluebird_lst_1 = require("bluebird-lst");
|
||||
const builder_util_1 = require("builder-util");
|
||||
const fs_1 = require("builder-util/out/fs");
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const isbinaryfile_1 = require("isbinaryfile");
|
||||
const path = require("path");
|
||||
const fileTransformer_1 = require("../fileTransformer");
|
||||
const appFileCopier_1 = require("../util/appFileCopier");
|
||||
function addValue(map, key, value) {
|
||||
let list = map.get(key);
|
||||
if (list == null) {
|
||||
list = [value];
|
||||
map.set(key, list);
|
||||
}
|
||||
else {
|
||||
list.push(value);
|
||||
}
|
||||
}
|
||||
function isLibOrExe(file) {
|
||||
return file.endsWith(".dll") || file.endsWith(".exe") || file.endsWith(".dylib") || file.endsWith(".so");
|
||||
}
|
||||
exports.isLibOrExe = isLibOrExe;
|
||||
/** @internal */
|
||||
async function detectUnpackedDirs(fileSet, autoUnpackDirs, unpackedDest, rootForAppFilesWithoutAsar) {
|
||||
const dirToCreate = new Map();
|
||||
const metadata = fileSet.metadata;
|
||||
function addParents(child, root) {
|
||||
child = path.dirname(child);
|
||||
if (autoUnpackDirs.has(child)) {
|
||||
return;
|
||||
}
|
||||
do {
|
||||
autoUnpackDirs.add(child);
|
||||
const p = path.dirname(child);
|
||||
// create parent dir to be able to copy file later without directory existence check
|
||||
addValue(dirToCreate, p, path.basename(child));
|
||||
if (child === root || p === root || autoUnpackDirs.has(p)) {
|
||||
break;
|
||||
}
|
||||
child = p;
|
||||
} while (true);
|
||||
autoUnpackDirs.add(root);
|
||||
}
|
||||
for (let i = 0, n = fileSet.files.length; i < n; i++) {
|
||||
const file = fileSet.files[i];
|
||||
const index = file.lastIndexOf(fileTransformer_1.NODE_MODULES_PATTERN);
|
||||
if (index < 0) {
|
||||
continue;
|
||||
}
|
||||
let nextSlashIndex = file.indexOf(path.sep, index + fileTransformer_1.NODE_MODULES_PATTERN.length + 1);
|
||||
if (nextSlashIndex < 0) {
|
||||
continue;
|
||||
}
|
||||
if (file[index + fileTransformer_1.NODE_MODULES_PATTERN.length] === "@") {
|
||||
nextSlashIndex = file.indexOf(path.sep, nextSlashIndex + 1);
|
||||
}
|
||||
if (!metadata.get(file).isFile()) {
|
||||
continue;
|
||||
}
|
||||
const packageDir = file.substring(0, nextSlashIndex);
|
||||
const packageDirPathInArchive = path.relative(rootForAppFilesWithoutAsar, (0, appFileCopier_1.getDestinationPath)(packageDir, fileSet));
|
||||
const pathInArchive = path.relative(rootForAppFilesWithoutAsar, (0, appFileCopier_1.getDestinationPath)(file, fileSet));
|
||||
if (autoUnpackDirs.has(packageDirPathInArchive)) {
|
||||
// if package dir is unpacked, any file also unpacked
|
||||
addParents(pathInArchive, packageDirPathInArchive);
|
||||
continue;
|
||||
}
|
||||
// https://github.com/electron-userland/electron-builder/issues/2679
|
||||
let shouldUnpack = false;
|
||||
// ffprobe-static and ffmpeg-static are known packages to always unpack
|
||||
const moduleName = path.basename(packageDir);
|
||||
if (moduleName === "ffprobe-static" || moduleName === "ffmpeg-static" || isLibOrExe(file)) {
|
||||
shouldUnpack = true;
|
||||
}
|
||||
else if (!file.includes(".", nextSlashIndex)) {
|
||||
shouldUnpack = !!(0, isbinaryfile_1.isBinaryFileSync)(file);
|
||||
}
|
||||
if (!shouldUnpack) {
|
||||
continue;
|
||||
}
|
||||
if (builder_util_1.log.isDebugEnabled) {
|
||||
builder_util_1.log.debug({ file: pathInArchive, reason: "contains executable code" }, "not packed into asar archive");
|
||||
}
|
||||
addParents(pathInArchive, packageDirPathInArchive);
|
||||
}
|
||||
if (dirToCreate.size > 0) {
|
||||
await (0, fs_extra_1.mkdir)(`${unpackedDest + path.sep}node_modules`, { recursive: true });
|
||||
// child directories should be not created asynchronously - parent directories should be created first
|
||||
await bluebird_lst_1.default.map(dirToCreate.keys(), async (parentDir) => {
|
||||
const base = unpackedDest + path.sep + parentDir;
|
||||
await (0, fs_extra_1.mkdir)(base, { recursive: true });
|
||||
await bluebird_lst_1.default.each(dirToCreate.get(parentDir), (it) => {
|
||||
if (dirToCreate.has(parentDir + path.sep + it)) {
|
||||
// already created
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
return (0, fs_extra_1.mkdir)(base + path.sep + it, { recursive: true });
|
||||
}
|
||||
});
|
||||
}, fs_1.CONCURRENCY);
|
||||
}
|
||||
}
|
||||
exports.detectUnpackedDirs = detectUnpackedDirs;
|
||||
//# sourceMappingURL=unpackDetector.js.map
|
||||
1
mc_test/node_modules/app-builder-lib/out/asar/unpackDetector.js.map
generated
vendored
Executable file
1
mc_test/node_modules/app-builder-lib/out/asar/unpackDetector.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user