This commit is contained in:
root
2025-11-25 09:56:15 +03:00
commit 68c8f0e80d
23717 changed files with 3200521 additions and 0 deletions

22
mc_test/node_modules/builder-util-runtime/LICENSE generated vendored Executable file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Loopline Systems
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,18 @@
/// <reference types="node" />
import { EventEmitter } from "events";
export declare class CancellationToken extends EventEmitter {
private parentCancelHandler;
private _cancelled;
get cancelled(): boolean;
private _parent;
set parent(value: CancellationToken);
constructor(parent?: CancellationToken);
cancel(): void;
private onCancel;
createPromise<R>(callback: (resolve: (thenableOrResult: R | PromiseLike<R>) => void, reject: (error: Error) => void, onCancel: (callback: () => void) => void) => void): Promise<R>;
private removeParentCancelHandler;
dispose(): void;
}
export declare class CancellationError extends Error {
constructor();
}

View File

@ -0,0 +1,108 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CancellationError = exports.CancellationToken = void 0;
const events_1 = require("events");
class CancellationToken extends events_1.EventEmitter {
get cancelled() {
return this._cancelled || (this._parent != null && this._parent.cancelled);
}
set parent(value) {
this.removeParentCancelHandler();
this._parent = value;
this.parentCancelHandler = () => this.cancel();
this._parent.onCancel(this.parentCancelHandler);
}
// babel cannot compile ... correctly for super calls
constructor(parent) {
super();
this.parentCancelHandler = null;
this._parent = null;
this._cancelled = false;
if (parent != null) {
this.parent = parent;
}
}
cancel() {
this._cancelled = true;
this.emit("cancel");
}
onCancel(handler) {
if (this.cancelled) {
handler();
}
else {
this.once("cancel", handler);
}
}
createPromise(callback) {
if (this.cancelled) {
return Promise.reject(new CancellationError());
}
const finallyHandler = () => {
if (cancelHandler != null) {
try {
this.removeListener("cancel", cancelHandler);
cancelHandler = null;
}
catch (ignore) {
// ignore
}
}
};
let cancelHandler = null;
return new Promise((resolve, reject) => {
let addedCancelHandler = null;
cancelHandler = () => {
try {
if (addedCancelHandler != null) {
addedCancelHandler();
addedCancelHandler = null;
}
}
finally {
reject(new CancellationError());
}
};
if (this.cancelled) {
cancelHandler();
return;
}
this.onCancel(cancelHandler);
callback(resolve, reject, (callback) => {
addedCancelHandler = callback;
});
})
.then(it => {
finallyHandler();
return it;
})
.catch((e) => {
finallyHandler();
throw e;
});
}
removeParentCancelHandler() {
const parent = this._parent;
if (parent != null && this.parentCancelHandler != null) {
parent.removeListener("cancel", this.parentCancelHandler);
this.parentCancelHandler = null;
}
}
dispose() {
try {
this.removeParentCancelHandler();
}
finally {
this.removeAllListeners();
this._parent = null;
}
}
}
exports.CancellationToken = CancellationToken;
class CancellationError extends Error {
constructor() {
super("cancelled");
}
}
exports.CancellationError = CancellationError;
//# sourceMappingURL=CancellationToken.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,22 @@
/// <reference types="node" />
import { Transform } from "stream";
import { CancellationToken } from "./CancellationToken";
export interface ProgressInfo {
total: number;
delta: number;
transferred: number;
percent: number;
bytesPerSecond: number;
}
export declare class ProgressCallbackTransform extends Transform {
private readonly total;
private readonly cancellationToken;
private readonly onProgress;
private start;
private transferred;
private delta;
private nextUpdate;
constructor(total: number, cancellationToken: CancellationToken, onProgress: (info: ProgressInfo) => any);
_transform(chunk: any, encoding: string, callback: any): void;
_flush(callback: any): void;
}

View File

@ -0,0 +1,54 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ProgressCallbackTransform = void 0;
const stream_1 = require("stream");
class ProgressCallbackTransform extends stream_1.Transform {
constructor(total, cancellationToken, onProgress) {
super();
this.total = total;
this.cancellationToken = cancellationToken;
this.onProgress = onProgress;
this.start = Date.now();
this.transferred = 0;
this.delta = 0;
this.nextUpdate = this.start + 1000;
}
_transform(chunk, encoding, callback) {
if (this.cancellationToken.cancelled) {
callback(new Error("cancelled"), null);
return;
}
this.transferred += chunk.length;
this.delta += chunk.length;
const now = Date.now();
if (now >= this.nextUpdate && this.transferred !== this.total /* will be emitted on _flush */) {
this.nextUpdate = now + 1000;
this.onProgress({
total: this.total,
delta: this.delta,
transferred: this.transferred,
percent: (this.transferred / this.total) * 100,
bytesPerSecond: Math.round(this.transferred / ((now - this.start) / 1000)),
});
this.delta = 0;
}
callback(null, chunk);
}
_flush(callback) {
if (this.cancellationToken.cancelled) {
callback(new Error("cancelled"));
return;
}
this.onProgress({
total: this.total,
delta: this.delta,
transferred: this.total,
percent: 100,
bytesPerSecond: Math.round(this.transferred / ((Date.now() - this.start) / 1000)),
});
this.delta = 0;
callback(null);
}
}
exports.ProgressCallbackTransform = ProgressCallbackTransform;
//# sourceMappingURL=ProgressCallbackTransform.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"ProgressCallbackTransform.js","sourceRoot":"","sources":["../src/ProgressCallbackTransform.ts"],"names":[],"mappings":";;;AAAA,mCAAkC;AAWlC,MAAa,yBAA0B,SAAQ,kBAAS;IAOtD,YACmB,KAAa,EACb,iBAAoC,EACpC,UAAuC;QAExD,KAAK,EAAE,CAAA;QAJU,UAAK,GAAL,KAAK,CAAQ;QACb,sBAAiB,GAAjB,iBAAiB,CAAmB;QACpC,eAAU,GAAV,UAAU,CAA6B;QATlD,UAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;QAClB,gBAAW,GAAG,CAAC,CAAA;QACf,UAAK,GAAG,CAAC,CAAA;QAET,eAAU,GAAG,IAAI,CAAC,KAAK,GAAG,IAAI,CAAA;IAQtC,CAAC;IAED,UAAU,CAAC,KAAU,EAAE,QAAgB,EAAE,QAAa;QACpD,IAAI,IAAI,CAAC,iBAAiB,CAAC,SAAS,EAAE,CAAC;YACrC,QAAQ,CAAC,IAAI,KAAK,CAAC,WAAW,CAAC,EAAE,IAAI,CAAC,CAAA;YACtC,OAAM;QACR,CAAC;QAED,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAA;QAChC,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,CAAA;QAE1B,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;QACtB,IAAI,GAAG,IAAI,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,KAAK,IAAI,CAAC,KAAK,CAAC,+BAA+B,EAAE,CAAC;YAC9F,IAAI,CAAC,UAAU,GAAG,GAAG,GAAG,IAAI,CAAA;YAE5B,IAAI,CAAC,UAAU,CAAC;gBACd,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,OAAO,EAAE,CAAC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG;gBAC9C,cAAc,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;aAC3E,CAAC,CAAA;YACF,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;QAChB,CAAC;QAED,QAAQ,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;IACvB,CAAC;IAED,MAAM,CAAC,QAAa;QAClB,IAAI,IAAI,CAAC,iBAAiB,CAAC,SAAS,EAAE,CAAC;YACrC,QAAQ,CAAC,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,CAAA;YAChC,OAAM;QACR,CAAC;QAED,IAAI,CAAC,UAAU,CAAC;YACd,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,WAAW,EAAE,IAAI,CAAC,KAAK;YACvB,OAAO,EAAE,GAAG;YACZ,cAAc,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SAClF,CAAC,CAAA;QACF,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;QAEd,QAAQ,CAAC,IAAI,CAAC,CAAA;IAChB,CAAC;CACF;AA1DD,8DA0DC","sourcesContent":["import { Transform } from \"stream\"\nimport { CancellationToken } from \"./CancellationToken\"\n\nexport interface ProgressInfo {\n total: number\n delta: number\n transferred: number\n percent: number\n bytesPerSecond: number\n}\n\nexport class ProgressCallbackTransform extends Transform {\n private start = Date.now()\n private transferred = 0\n private delta = 0\n\n private nextUpdate = this.start + 1000\n\n constructor(\n private readonly total: number,\n private readonly cancellationToken: CancellationToken,\n private readonly onProgress: (info: ProgressInfo) => any\n ) {\n super()\n }\n\n _transform(chunk: any, encoding: string, callback: any) {\n if (this.cancellationToken.cancelled) {\n callback(new Error(\"cancelled\"), null)\n return\n }\n\n this.transferred += chunk.length\n this.delta += chunk.length\n\n const now = Date.now()\n if (now >= this.nextUpdate && this.transferred !== this.total /* will be emitted on _flush */) {\n this.nextUpdate = now + 1000\n\n this.onProgress({\n total: this.total,\n delta: this.delta,\n transferred: this.transferred,\n percent: (this.transferred / this.total) * 100,\n bytesPerSecond: Math.round(this.transferred / ((now - this.start) / 1000)),\n })\n this.delta = 0\n }\n\n callback(null, chunk)\n }\n\n _flush(callback: any): void {\n if (this.cancellationToken.cancelled) {\n callback(new Error(\"cancelled\"))\n return\n }\n\n this.onProgress({\n total: this.total,\n delta: this.delta,\n transferred: this.total,\n percent: 100,\n bytesPerSecond: Math.round(this.transferred / ((Date.now() - this.start) / 1000)),\n })\n this.delta = 0\n\n callback(null)\n }\n}\n"]}

View File

@ -0,0 +1,12 @@
export interface FileChunks {
checksums: Array<string>;
sizes: Array<number>;
}
export interface BlockMap {
version: "1" | "2";
files: Array<BlockMapFile>;
}
export interface BlockMapFile extends FileChunks {
name: string;
offset: number;
}

View File

@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=blockMapApi.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"blockMapApi.js","sourceRoot":"","sources":["../src/blockMapApi.ts"],"names":[],"mappings":"","sourcesContent":["export interface FileChunks {\n checksums: Array<string>\n sizes: Array<number>\n}\n\nexport interface BlockMap {\n version: \"1\" | \"2\"\n files: Array<BlockMapFile>\n}\n\nexport interface BlockMapFile extends FileChunks {\n name: string\n offset: number\n}\n"]}

View File

@ -0,0 +1,76 @@
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import { BinaryToTextEncoding } from "crypto";
import { IncomingMessage, OutgoingHttpHeader, OutgoingHttpHeaders, RequestOptions } from "http";
import { Transform } from "stream";
import { URL } from "url";
import { CancellationToken } from "./CancellationToken";
import { ProgressInfo } from "./ProgressCallbackTransform";
export interface RequestHeaders extends OutgoingHttpHeaders {
[key: string]: OutgoingHttpHeader | undefined;
}
export interface DownloadOptions {
readonly headers?: OutgoingHttpHeaders | null;
readonly sha2?: string | null;
readonly sha512?: string | null;
readonly cancellationToken: CancellationToken;
onProgress?: (progress: ProgressInfo) => void;
}
export declare function createHttpError(response: IncomingMessage, description?: any | null): HttpError;
export declare class HttpError extends Error {
readonly statusCode: number;
readonly description: any | null;
constructor(statusCode: number, message?: string, description?: any | null);
isServerError(): boolean;
}
export declare function parseJson(result: Promise<string | null>): Promise<any>;
interface Request {
abort: () => void;
end: (data?: Buffer) => void;
}
export declare abstract class HttpExecutor<T extends Request> {
protected readonly maxRedirects = 10;
request(options: RequestOptions, cancellationToken?: CancellationToken, data?: {
[name: string]: any;
} | null): Promise<string | null>;
doApiRequest(options: RequestOptions, cancellationToken: CancellationToken, requestProcessor: (request: T, reject: (error: Error) => void) => void, redirectCount?: number): Promise<string>;
protected addRedirectHandlers(request: any, options: RequestOptions, reject: (error: Error) => void, redirectCount: number, handler: (options: RequestOptions) => void): void;
addErrorAndTimeoutHandlers(request: any, reject: (error: Error) => void, timeout?: number): void;
private handleResponse;
abstract createRequest(options: RequestOptions, callback: (response: any) => void): T;
downloadToBuffer(url: URL, options: DownloadOptions): Promise<Buffer>;
protected doDownload(requestOptions: RequestOptions, options: DownloadCallOptions, redirectCount: number): void;
protected createMaxRedirectError(): Error;
private addTimeOutHandler;
static prepareRedirectUrlOptions(redirectUrl: string, options: RequestOptions): RequestOptions;
static retryOnServerError(task: () => Promise<any>, maxRetries?: number): Promise<any>;
}
export interface DownloadCallOptions {
responseHandler: ((response: IncomingMessage, callback: (error: Error | null) => void) => void) | null;
onCancel: (callback: () => void) => void;
callback: (error: Error | null) => void;
options: DownloadOptions;
destination: string | null;
}
export declare function configureRequestOptionsFromUrl(url: string, options: RequestOptions): RequestOptions;
export declare function configureRequestUrl(url: URL, options: RequestOptions): void;
export declare class DigestTransform extends Transform {
readonly expected: string;
private readonly algorithm;
private readonly encoding;
private readonly digester;
private _actual;
get actual(): string | null;
isValidateOnEnd: boolean;
constructor(expected: string, algorithm?: string, encoding?: BinaryToTextEncoding);
_transform(chunk: Buffer, encoding: string, callback: any): void;
_flush(callback: any): void;
validate(): null;
}
export declare function safeGetHeader(response: any, headerKey: string): any;
export declare function configureRequestOptions(options: RequestOptions, token?: string | null, method?: "GET" | "DELETE" | "PUT" | "POST"): RequestOptions;
export declare function safeStringifyJson(data: any, skippedNames?: Set<string>): string;
export {};

428
mc_test/node_modules/builder-util-runtime/out/httpExecutor.js generated vendored Executable file
View File

@ -0,0 +1,428 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.safeStringifyJson = exports.configureRequestOptions = exports.safeGetHeader = exports.DigestTransform = exports.configureRequestUrl = exports.configureRequestOptionsFromUrl = exports.HttpExecutor = exports.parseJson = exports.HttpError = exports.createHttpError = void 0;
const crypto_1 = require("crypto");
const debug_1 = require("debug");
const fs_1 = require("fs");
const stream_1 = require("stream");
const url_1 = require("url");
const CancellationToken_1 = require("./CancellationToken");
const index_1 = require("./index");
const ProgressCallbackTransform_1 = require("./ProgressCallbackTransform");
const debug = (0, debug_1.default)("electron-builder");
function createHttpError(response, description = null) {
return new HttpError(response.statusCode || -1, `${response.statusCode} ${response.statusMessage}` +
(description == null ? "" : "\n" + JSON.stringify(description, null, " ")) +
"\nHeaders: " +
safeStringifyJson(response.headers), description);
}
exports.createHttpError = createHttpError;
const HTTP_STATUS_CODES = new Map([
[429, "Too many requests"],
[400, "Bad request"],
[403, "Forbidden"],
[404, "Not found"],
[405, "Method not allowed"],
[406, "Not acceptable"],
[408, "Request timeout"],
[413, "Request entity too large"],
[500, "Internal server error"],
[502, "Bad gateway"],
[503, "Service unavailable"],
[504, "Gateway timeout"],
[505, "HTTP version not supported"],
]);
class HttpError extends Error {
constructor(statusCode, message = `HTTP error: ${HTTP_STATUS_CODES.get(statusCode) || statusCode}`, description = null) {
super(message);
this.statusCode = statusCode;
this.description = description;
this.name = "HttpError";
this.code = `HTTP_ERROR_${statusCode}`;
}
isServerError() {
return this.statusCode >= 500 && this.statusCode <= 599;
}
}
exports.HttpError = HttpError;
function parseJson(result) {
return result.then(it => (it == null || it.length === 0 ? null : JSON.parse(it)));
}
exports.parseJson = parseJson;
class HttpExecutor {
constructor() {
this.maxRedirects = 10;
}
request(options, cancellationToken = new CancellationToken_1.CancellationToken(), data) {
configureRequestOptions(options);
const json = data == null ? undefined : JSON.stringify(data);
const encodedData = json ? Buffer.from(json) : undefined;
if (encodedData != null) {
debug(json);
const { headers, ...opts } = options;
options = {
method: "post",
headers: {
"Content-Type": "application/json",
"Content-Length": encodedData.length,
...headers,
},
...opts,
};
}
return this.doApiRequest(options, cancellationToken, it => it.end(encodedData));
}
doApiRequest(options, cancellationToken, requestProcessor, redirectCount = 0) {
if (debug.enabled) {
debug(`Request: ${safeStringifyJson(options)}`);
}
return cancellationToken.createPromise((resolve, reject, onCancel) => {
const request = this.createRequest(options, (response) => {
try {
this.handleResponse(response, options, cancellationToken, resolve, reject, redirectCount, requestProcessor);
}
catch (e) {
reject(e);
}
});
this.addErrorAndTimeoutHandlers(request, reject, options.timeout);
this.addRedirectHandlers(request, options, reject, redirectCount, options => {
this.doApiRequest(options, cancellationToken, requestProcessor, redirectCount).then(resolve).catch(reject);
});
requestProcessor(request, reject);
onCancel(() => request.abort());
});
}
// noinspection JSUnusedLocalSymbols
// eslint-disable-next-line
addRedirectHandlers(request, options, reject, redirectCount, handler) {
// not required for NodeJS
}
addErrorAndTimeoutHandlers(request, reject, timeout = 60 * 1000) {
this.addTimeOutHandler(request, reject, timeout);
request.on("error", reject);
request.on("aborted", () => {
reject(new Error("Request has been aborted by the server"));
});
}
handleResponse(response, options, cancellationToken, resolve, reject, redirectCount, requestProcessor) {
var _a;
if (debug.enabled) {
debug(`Response: ${response.statusCode} ${response.statusMessage}, request options: ${safeStringifyJson(options)}`);
}
// we handle any other >= 400 error on request end (read detailed message in the response body)
if (response.statusCode === 404) {
// error is clear, we don't need to read detailed error description
reject(createHttpError(response, `method: ${options.method || "GET"} url: ${options.protocol || "https:"}//${options.hostname}${options.port ? `:${options.port}` : ""}${options.path}
Please double check that your authentication token is correct. Due to security reasons, actual status maybe not reported, but 404.
`));
return;
}
else if (response.statusCode === 204) {
// on DELETE request
resolve();
return;
}
const code = (_a = response.statusCode) !== null && _a !== void 0 ? _a : 0;
const shouldRedirect = code >= 300 && code < 400;
const redirectUrl = safeGetHeader(response, "location");
if (shouldRedirect && redirectUrl != null) {
if (redirectCount > this.maxRedirects) {
reject(this.createMaxRedirectError());
return;
}
this.doApiRequest(HttpExecutor.prepareRedirectUrlOptions(redirectUrl, options), cancellationToken, requestProcessor, redirectCount).then(resolve).catch(reject);
return;
}
response.setEncoding("utf8");
let data = "";
response.on("error", reject);
response.on("data", (chunk) => (data += chunk));
response.on("end", () => {
try {
if (response.statusCode != null && response.statusCode >= 400) {
const contentType = safeGetHeader(response, "content-type");
const isJson = contentType != null && (Array.isArray(contentType) ? contentType.find(it => it.includes("json")) != null : contentType.includes("json"));
reject(createHttpError(response, `method: ${options.method || "GET"} url: ${options.protocol || "https:"}//${options.hostname}${options.port ? `:${options.port}` : ""}${options.path}
Data:
${isJson ? JSON.stringify(JSON.parse(data)) : data}
`));
}
else {
resolve(data.length === 0 ? null : data);
}
}
catch (e) {
reject(e);
}
});
}
async downloadToBuffer(url, options) {
return await options.cancellationToken.createPromise((resolve, reject, onCancel) => {
const responseChunks = [];
const requestOptions = {
headers: options.headers || undefined,
// because PrivateGitHubProvider requires HttpExecutor.prepareRedirectUrlOptions logic, so, we need to redirect manually
redirect: "manual",
};
configureRequestUrl(url, requestOptions);
configureRequestOptions(requestOptions);
this.doDownload(requestOptions, {
destination: null,
options,
onCancel,
callback: error => {
if (error == null) {
resolve(Buffer.concat(responseChunks));
}
else {
reject(error);
}
},
responseHandler: (response, callback) => {
let receivedLength = 0;
response.on("data", (chunk) => {
receivedLength += chunk.length;
if (receivedLength > 524288000) {
callback(new Error("Maximum allowed size is 500 MB"));
return;
}
responseChunks.push(chunk);
});
response.on("end", () => {
callback(null);
});
},
}, 0);
});
}
doDownload(requestOptions, options, redirectCount) {
const request = this.createRequest(requestOptions, (response) => {
if (response.statusCode >= 400) {
options.callback(new Error(`Cannot download "${requestOptions.protocol || "https:"}//${requestOptions.hostname}${requestOptions.path}", status ${response.statusCode}: ${response.statusMessage}`));
return;
}
// It is possible for the response stream to fail, e.g. when a network is lost while
// response stream is in progress. Stop waiting and reject so consumer can catch the error.
response.on("error", options.callback);
// this code not relevant for Electron (redirect event instead handled)
const redirectUrl = safeGetHeader(response, "location");
if (redirectUrl != null) {
if (redirectCount < this.maxRedirects) {
this.doDownload(HttpExecutor.prepareRedirectUrlOptions(redirectUrl, requestOptions), options, redirectCount++);
}
else {
options.callback(this.createMaxRedirectError());
}
return;
}
if (options.responseHandler == null) {
configurePipes(options, response);
}
else {
options.responseHandler(response, options.callback);
}
});
this.addErrorAndTimeoutHandlers(request, options.callback, requestOptions.timeout);
this.addRedirectHandlers(request, requestOptions, options.callback, redirectCount, requestOptions => {
this.doDownload(requestOptions, options, redirectCount++);
});
request.end();
}
createMaxRedirectError() {
return new Error(`Too many redirects (> ${this.maxRedirects})`);
}
addTimeOutHandler(request, callback, timeout) {
request.on("socket", (socket) => {
socket.setTimeout(timeout, () => {
request.abort();
callback(new Error("Request timed out"));
});
});
}
static prepareRedirectUrlOptions(redirectUrl, options) {
const newOptions = configureRequestOptionsFromUrl(redirectUrl, { ...options });
const headers = newOptions.headers;
if (headers === null || headers === void 0 ? void 0 : headers.authorization) {
const parsedNewUrl = new url_1.URL(redirectUrl);
if (parsedNewUrl.hostname.endsWith(".amazonaws.com") || parsedNewUrl.searchParams.has("X-Amz-Credential")) {
delete headers.authorization;
}
}
return newOptions;
}
static retryOnServerError(task, maxRetries = 3) {
for (let attemptNumber = 0;; attemptNumber++) {
try {
return task();
}
catch (e) {
if (attemptNumber < maxRetries && ((e instanceof HttpError && e.isServerError()) || e.code === "EPIPE")) {
continue;
}
throw e;
}
}
}
}
exports.HttpExecutor = HttpExecutor;
function configureRequestOptionsFromUrl(url, options) {
const result = configureRequestOptions(options);
configureRequestUrl(new url_1.URL(url), result);
return result;
}
exports.configureRequestOptionsFromUrl = configureRequestOptionsFromUrl;
function configureRequestUrl(url, options) {
options.protocol = url.protocol;
options.hostname = url.hostname;
if (url.port) {
options.port = url.port;
}
else if (options.port) {
delete options.port;
}
options.path = url.pathname + url.search;
}
exports.configureRequestUrl = configureRequestUrl;
class DigestTransform extends stream_1.Transform {
// noinspection JSUnusedGlobalSymbols
get actual() {
return this._actual;
}
constructor(expected, algorithm = "sha512", encoding = "base64") {
super();
this.expected = expected;
this.algorithm = algorithm;
this.encoding = encoding;
this._actual = null;
this.isValidateOnEnd = true;
this.digester = (0, crypto_1.createHash)(algorithm);
}
// noinspection JSUnusedGlobalSymbols
_transform(chunk, encoding, callback) {
this.digester.update(chunk);
callback(null, chunk);
}
// noinspection JSUnusedGlobalSymbols
_flush(callback) {
this._actual = this.digester.digest(this.encoding);
if (this.isValidateOnEnd) {
try {
this.validate();
}
catch (e) {
callback(e);
return;
}
}
callback(null);
}
validate() {
if (this._actual == null) {
throw (0, index_1.newError)("Not finished yet", "ERR_STREAM_NOT_FINISHED");
}
if (this._actual !== this.expected) {
throw (0, index_1.newError)(`${this.algorithm} checksum mismatch, expected ${this.expected}, got ${this._actual}`, "ERR_CHECKSUM_MISMATCH");
}
return null;
}
}
exports.DigestTransform = DigestTransform;
function checkSha2(sha2Header, sha2, callback) {
if (sha2Header != null && sha2 != null && sha2Header !== sha2) {
callback(new Error(`checksum mismatch: expected ${sha2} but got ${sha2Header} (X-Checksum-Sha2 header)`));
return false;
}
return true;
}
function safeGetHeader(response, headerKey) {
const value = response.headers[headerKey];
if (value == null) {
return null;
}
else if (Array.isArray(value)) {
// electron API
return value.length === 0 ? null : value[value.length - 1];
}
else {
return value;
}
}
exports.safeGetHeader = safeGetHeader;
function configurePipes(options, response) {
if (!checkSha2(safeGetHeader(response, "X-Checksum-Sha2"), options.options.sha2, options.callback)) {
return;
}
const streams = [];
if (options.options.onProgress != null) {
const contentLength = safeGetHeader(response, "content-length");
if (contentLength != null) {
streams.push(new ProgressCallbackTransform_1.ProgressCallbackTransform(parseInt(contentLength, 10), options.options.cancellationToken, options.options.onProgress));
}
}
const sha512 = options.options.sha512;
if (sha512 != null) {
streams.push(new DigestTransform(sha512, "sha512", sha512.length === 128 && !sha512.includes("+") && !sha512.includes("Z") && !sha512.includes("=") ? "hex" : "base64"));
}
else if (options.options.sha2 != null) {
streams.push(new DigestTransform(options.options.sha2, "sha256", "hex"));
}
const fileOut = (0, fs_1.createWriteStream)(options.destination);
streams.push(fileOut);
let lastStream = response;
for (const stream of streams) {
stream.on("error", (error) => {
fileOut.close();
if (!options.options.cancellationToken.cancelled) {
options.callback(error);
}
});
lastStream = lastStream.pipe(stream);
}
fileOut.on("finish", () => {
;
fileOut.close(options.callback);
});
}
function configureRequestOptions(options, token, method) {
if (method != null) {
options.method = method;
}
options.headers = { ...options.headers };
const headers = options.headers;
if (token != null) {
;
headers.authorization = token.startsWith("Basic") || token.startsWith("Bearer") ? token : `token ${token}`;
}
if (headers["User-Agent"] == null) {
headers["User-Agent"] = "electron-builder";
}
if (method == null || method === "GET" || headers["Cache-Control"] == null) {
headers["Cache-Control"] = "no-cache";
}
// do not specify for node (in any case we use https module)
if (options.protocol == null && process.versions.electron != null) {
options.protocol = "https:";
}
return options;
}
exports.configureRequestOptions = configureRequestOptions;
function safeStringifyJson(data, skippedNames) {
return JSON.stringify(data, (name, value) => {
if (name.endsWith("Authorization") ||
name.endsWith("authorization") ||
name.endsWith("Password") ||
name.endsWith("PASSWORD") ||
name.endsWith("Token") ||
name.includes("password") ||
name.includes("token") ||
(skippedNames != null && skippedNames.has(name))) {
return "<stripped sensitive data>";
}
return value;
}, 2);
}
exports.safeStringifyJson = safeStringifyJson;
//# sourceMappingURL=httpExecutor.js.map

File diff suppressed because one or more lines are too long

13
mc_test/node_modules/builder-util-runtime/out/index.d.ts generated vendored Executable file
View File

@ -0,0 +1,13 @@
export { CancellationToken, CancellationError } from "./CancellationToken";
export { HttpError, createHttpError, HttpExecutor, DownloadOptions, DigestTransform, RequestHeaders, safeGetHeader, configureRequestOptions, configureRequestOptionsFromUrl, safeStringifyJson, parseJson, configureRequestUrl, } from "./httpExecutor";
export { CustomPublishOptions, GenericServerOptions, GithubOptions, KeygenOptions, BitbucketOptions, SnapStoreOptions, PublishConfiguration, S3Options, SpacesOptions, BaseS3Options, getS3LikeProviderBaseUrl, githubUrl, PublishProvider, AllPublishOptions, } from "./publishOptions";
export { UpdateInfo, UpdateFileInfo, WindowsUpdateInfo, BlockMapDataHolder, PackageFileInfo, ReleaseNoteInfo } from "./updateInfo";
export { parseDn } from "./rfc2253Parser";
export { UUID } from "./uuid";
export { ProgressCallbackTransform, ProgressInfo } from "./ProgressCallbackTransform";
export { parseXml, XElement } from "./xml";
export { BlockMap } from "./blockMapApi";
export declare const CURRENT_APP_INSTALLER_FILE_NAME = "installer.exe";
export declare const CURRENT_APP_PACKAGE_FILE_NAME = "package.7z";
export declare function asArray<T>(v: null | undefined | T | Array<T>): Array<T>;
export declare function newError(message: string, code: string): Error;

52
mc_test/node_modules/builder-util-runtime/out/index.js generated vendored Executable file
View File

@ -0,0 +1,52 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.newError = exports.asArray = exports.CURRENT_APP_PACKAGE_FILE_NAME = exports.CURRENT_APP_INSTALLER_FILE_NAME = exports.XElement = exports.parseXml = exports.ProgressCallbackTransform = exports.UUID = exports.parseDn = exports.githubUrl = exports.getS3LikeProviderBaseUrl = exports.configureRequestUrl = exports.parseJson = exports.safeStringifyJson = exports.configureRequestOptionsFromUrl = exports.configureRequestOptions = exports.safeGetHeader = exports.DigestTransform = exports.HttpExecutor = exports.createHttpError = exports.HttpError = exports.CancellationError = exports.CancellationToken = void 0;
var CancellationToken_1 = require("./CancellationToken");
Object.defineProperty(exports, "CancellationToken", { enumerable: true, get: function () { return CancellationToken_1.CancellationToken; } });
Object.defineProperty(exports, "CancellationError", { enumerable: true, get: function () { return CancellationToken_1.CancellationError; } });
var httpExecutor_1 = require("./httpExecutor");
Object.defineProperty(exports, "HttpError", { enumerable: true, get: function () { return httpExecutor_1.HttpError; } });
Object.defineProperty(exports, "createHttpError", { enumerable: true, get: function () { return httpExecutor_1.createHttpError; } });
Object.defineProperty(exports, "HttpExecutor", { enumerable: true, get: function () { return httpExecutor_1.HttpExecutor; } });
Object.defineProperty(exports, "DigestTransform", { enumerable: true, get: function () { return httpExecutor_1.DigestTransform; } });
Object.defineProperty(exports, "safeGetHeader", { enumerable: true, get: function () { return httpExecutor_1.safeGetHeader; } });
Object.defineProperty(exports, "configureRequestOptions", { enumerable: true, get: function () { return httpExecutor_1.configureRequestOptions; } });
Object.defineProperty(exports, "configureRequestOptionsFromUrl", { enumerable: true, get: function () { return httpExecutor_1.configureRequestOptionsFromUrl; } });
Object.defineProperty(exports, "safeStringifyJson", { enumerable: true, get: function () { return httpExecutor_1.safeStringifyJson; } });
Object.defineProperty(exports, "parseJson", { enumerable: true, get: function () { return httpExecutor_1.parseJson; } });
Object.defineProperty(exports, "configureRequestUrl", { enumerable: true, get: function () { return httpExecutor_1.configureRequestUrl; } });
var publishOptions_1 = require("./publishOptions");
Object.defineProperty(exports, "getS3LikeProviderBaseUrl", { enumerable: true, get: function () { return publishOptions_1.getS3LikeProviderBaseUrl; } });
Object.defineProperty(exports, "githubUrl", { enumerable: true, get: function () { return publishOptions_1.githubUrl; } });
var rfc2253Parser_1 = require("./rfc2253Parser");
Object.defineProperty(exports, "parseDn", { enumerable: true, get: function () { return rfc2253Parser_1.parseDn; } });
var uuid_1 = require("./uuid");
Object.defineProperty(exports, "UUID", { enumerable: true, get: function () { return uuid_1.UUID; } });
var ProgressCallbackTransform_1 = require("./ProgressCallbackTransform");
Object.defineProperty(exports, "ProgressCallbackTransform", { enumerable: true, get: function () { return ProgressCallbackTransform_1.ProgressCallbackTransform; } });
var xml_1 = require("./xml");
Object.defineProperty(exports, "parseXml", { enumerable: true, get: function () { return xml_1.parseXml; } });
Object.defineProperty(exports, "XElement", { enumerable: true, get: function () { return xml_1.XElement; } });
// nsis
exports.CURRENT_APP_INSTALLER_FILE_NAME = "installer.exe";
// nsis-web
exports.CURRENT_APP_PACKAGE_FILE_NAME = "package.7z";
function asArray(v) {
if (v == null) {
return [];
}
else if (Array.isArray(v)) {
return v;
}
else {
return [v];
}
}
exports.asArray = asArray;
function newError(message, code) {
const error = new Error(message);
error.code = code;
return error;
}
exports.newError = newError;
//# sourceMappingURL=index.js.map

1
mc_test/node_modules/builder-util-runtime/out/index.js.map generated vendored Executable file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yDAA0E;AAAjE,sHAAA,iBAAiB,OAAA;AAAE,sHAAA,iBAAiB,OAAA;AAC7C,+CAauB;AAZrB,yGAAA,SAAS,OAAA;AACT,+GAAA,eAAe,OAAA;AACf,4GAAA,YAAY,OAAA;AAEZ,+GAAA,eAAe,OAAA;AAEf,6GAAA,aAAa,OAAA;AACb,uHAAA,uBAAuB,OAAA;AACvB,8HAAA,8BAA8B,OAAA;AAC9B,iHAAA,iBAAiB,OAAA;AACjB,yGAAA,SAAS,OAAA;AACT,mHAAA,mBAAmB,OAAA;AAErB,mDAeyB;AAJvB,0HAAA,wBAAwB,OAAA;AACxB,2GAAA,SAAS,OAAA;AAKX,iDAAyC;AAAhC,wGAAA,OAAO,OAAA;AAChB,+BAA6B;AAApB,4FAAA,IAAI,OAAA;AACb,yEAAqF;AAA5E,sIAAA,yBAAyB,OAAA;AAClC,6BAA0C;AAAjC,+FAAA,QAAQ,OAAA;AAAE,+FAAA,QAAQ,OAAA;AAG3B,OAAO;AACM,QAAA,+BAA+B,GAAG,eAAe,CAAA;AAC9D,WAAW;AACE,QAAA,6BAA6B,GAAG,YAAY,CAAA;AAEzD,SAAgB,OAAO,CAAI,CAAkC;IAC3D,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;QACd,OAAO,EAAE,CAAA;IACX,CAAC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QAC5B,OAAO,CAAC,CAAA;IACV,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,CAAA;IACZ,CAAC;AACH,CAAC;AARD,0BAQC;AAED,SAAgB,QAAQ,CAAC,OAAe,EAAE,IAAY;IACpD,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,CAC/B;IAAC,KAA+B,CAAC,IAAI,GAAG,IAAI,CAAA;IAC7C,OAAO,KAAK,CAAA;AACd,CAAC;AAJD,4BAIC","sourcesContent":["export { CancellationToken, CancellationError } from \"./CancellationToken\"\nexport {\n HttpError,\n createHttpError,\n HttpExecutor,\n DownloadOptions,\n DigestTransform,\n RequestHeaders,\n safeGetHeader,\n configureRequestOptions,\n configureRequestOptionsFromUrl,\n safeStringifyJson,\n parseJson,\n configureRequestUrl,\n} from \"./httpExecutor\"\nexport {\n CustomPublishOptions,\n GenericServerOptions,\n GithubOptions,\n KeygenOptions,\n BitbucketOptions,\n SnapStoreOptions,\n PublishConfiguration,\n S3Options,\n SpacesOptions,\n BaseS3Options,\n getS3LikeProviderBaseUrl,\n githubUrl,\n PublishProvider,\n AllPublishOptions,\n} from \"./publishOptions\"\nexport { UpdateInfo, UpdateFileInfo, WindowsUpdateInfo, BlockMapDataHolder, PackageFileInfo, ReleaseNoteInfo } from \"./updateInfo\"\nexport { parseDn } from \"./rfc2253Parser\"\nexport { UUID } from \"./uuid\"\nexport { ProgressCallbackTransform, ProgressInfo } from \"./ProgressCallbackTransform\"\nexport { parseXml, XElement } from \"./xml\"\nexport { BlockMap } from \"./blockMapApi\"\n\n// nsis\nexport const CURRENT_APP_INSTALLER_FILE_NAME = \"installer.exe\"\n// nsis-web\nexport const CURRENT_APP_PACKAGE_FILE_NAME = \"package.7z\"\n\nexport function asArray<T>(v: null | undefined | T | Array<T>): Array<T> {\n if (v == null) {\n return []\n } else if (Array.isArray(v)) {\n return v\n } else {\n return [v]\n }\n}\n\nexport function newError(message: string, code: string) {\n const error = new Error(message)\n ;(error as NodeJS.ErrnoException).code = code\n return error\n}\n"]}

View File

@ -0,0 +1,315 @@
/// <reference types="node" />
import { OutgoingHttpHeaders } from "http";
export type PublishProvider = "github" | "s3" | "spaces" | "generic" | "custom" | "snapStore" | "keygen" | "bitbucket";
export type AllPublishOptions = string | GithubOptions | S3Options | SpacesOptions | GenericServerOptions | CustomPublishOptions | KeygenOptions | SnapStoreOptions | BitbucketOptions;
export interface PublishConfiguration {
/**
* The provider.
*/
readonly provider: PublishProvider;
/**
* @private
* win-only
*/
publisherName?: Array<string> | null;
/**
* @private
* win-only
*/
readonly updaterCacheDirName?: string | null;
/**
* Whether to publish auto update info files.
*
* Auto update relies only on the first provider in the list (you can specify several publishers).
* Thus, probably, there`s no need to upload the metadata files for the other configured providers. But by default will be uploaded.
*
* @default true
*/
readonly publishAutoUpdate?: boolean;
/**
* Any custom request headers
*/
readonly requestHeaders?: OutgoingHttpHeaders;
/**
* Request timeout in milliseconds. (Default is 2 minutes; O is ignored)
*
* @default 120000
*/
readonly timeout?: number | null;
}
export interface CustomPublishOptions extends PublishConfiguration {
/**
* The provider. Must be `custom`.
*/
readonly provider: "custom";
/**
* The Provider to provide UpdateInfo regarding available updates. Required
* to use custom providers with electron-updater.
*/
updateProvider?: new (options: CustomPublishOptions, updater: any, runtimeOptions: any) => any;
[index: string]: any;
}
/**
* [GitHub](https://help.github.com/articles/about-releases/) options.
*
* GitHub [personal access token](https://help.github.com/articles/creating-an-access-token-for-command-line-use/) is required. You can generate by going to [https://github.com/settings/tokens/new](https://github.com/settings/tokens/new). The access token should have the repo scope/permission.
* Define `GH_TOKEN` environment variable.
*/
export interface GithubOptions extends PublishConfiguration {
/**
* The provider. Must be `github`.
*/
readonly provider: "github";
/**
* The repository name. [Detected automatically](#github-repository-and-bintray-package).
*/
readonly repo?: string | null;
/**
* The owner.
*/
readonly owner?: string | null;
/**
* Whether to use `v`-prefixed tag name.
* @default true
*/
readonly vPrefixedTagName?: boolean;
/**
* The host (including the port if need).
* @default github.com
*/
readonly host?: string | null;
/**
* The protocol. GitHub Publisher supports only `https`.
* @default https
*/
readonly protocol?: "https" | "http" | null;
/**
* The access token to support auto-update from private github repositories. Never specify it in the configuration files. Only for [setFeedURL](/auto-update#appupdatersetfeedurloptions).
*/
readonly token?: string | null;
/**
* Whether to use private github auto-update provider if `GH_TOKEN` environment variable is defined. See [Private GitHub Update Repo](/auto-update#private-github-update-repo).
*/
readonly private?: boolean | null;
/**
* The channel.
* @default latest
*/
readonly channel?: string | null;
/**
* The type of release. By default `draft` release will be created.
*
* Also you can set release type using environment variable. If `EP_DRAFT`is set to `true` — `draft`, if `EP_PRE_RELEASE`is set to `true` — `prerelease`.
* @default draft
*/
releaseType?: "draft" | "prerelease" | "release" | null;
}
/** @private */
export declare function githubUrl(options: GithubOptions, defaultHost?: string): string;
/**
* Generic (any HTTP(S) server) options.
* In all publish options [File Macros](/file-patterns#file-macros) are supported.
*/
export interface GenericServerOptions extends PublishConfiguration {
/**
* The provider. Must be `generic`.
*/
readonly provider: "generic";
/**
* The base url. e.g. `https://bucket_name.s3.amazonaws.com`.
*/
readonly url: string;
/**
* The channel.
* @default latest
*/
readonly channel?: string | null;
/**
* Whether to use multiple range requests for differential update. Defaults to `true` if `url` doesn't contain `s3.amazonaws.com`.
*/
readonly useMultipleRangeRequest?: boolean;
}
/**
* Keygen options.
* https://keygen.sh/
* Define `KEYGEN_TOKEN` environment variable.
*/
export interface KeygenOptions extends PublishConfiguration {
/**
* The provider. Must be `keygen`.
*/
readonly provider: "keygen";
/**
* Keygen account's UUID
*/
readonly account: string;
/**
* Keygen product's UUID
*/
readonly product: string;
/**
* The channel.
* @default stable
*/
readonly channel?: "stable" | "rc" | "beta" | "alpha" | "dev" | null;
/**
* The target Platform. Is set programmatically explicitly during publishing.
*/
readonly platform?: string | null;
}
/**
* Bitbucket options.
* https://bitbucket.org/
* Define `BITBUCKET_TOKEN` environment variable.
*
* For converting an app password to a usable token, you can utilize this
```typescript
convertAppPassword(owner: string, appPassword: string) {
const base64encodedData = Buffer.from(`${owner}:${appPassword.trim()}`).toString("base64")
return `Basic ${base64encodedData}`
}
```
*/
export interface BitbucketOptions extends PublishConfiguration {
/**
* The provider. Must be `bitbucket`.
*/
readonly provider: "bitbucket";
/**
* Repository owner
*/
readonly owner: string;
/**
* The app password (account>settings>app-passwords) to support auto-update from private bitbucket repositories.
*/
readonly token?: string | null;
/**
* The user name to support auto-update from private bitbucket repositories.
*/
readonly username?: string | null;
/**
* Repository slug/name
*/
readonly slug: string;
/**
* The channel.
* @default latest
*/
readonly channel?: string | null;
}
/**
* [Snap Store](https://snapcraft.io/) options. To publish directly to Snapcraft, see <a href="https://snapcraft.io/docs/snapcraft-authentication">Snapcraft authentication options</a> for local or CI/CD authentication options.
*/
export interface SnapStoreOptions extends PublishConfiguration {
/**
* The provider. Must be `snapStore`.
*/
readonly provider: "snapStore";
/**
* snapcraft repo name
*/
readonly repo?: string;
/**
* The list of channels the snap would be released.
* @default ["edge"]
*/
readonly channels?: string | Array<string> | null;
}
export interface BaseS3Options extends PublishConfiguration {
/**
* The update channel.
* @default latest
*/
channel?: string | null;
/**
* The directory path.
* @default /
*/
readonly path?: string | null;
/**
* The ACL. Set to `null` to not [add](https://github.com/electron-userland/electron-builder/issues/1822).
*
* @default public-read
*/
readonly acl?: "private" | "public-read" | null;
}
/**
* [Amazon S3](https://aws.amazon.com/s3/) options.
* AWS credentials are required, please see [getting your credentials](http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/getting-your-credentials.html).
* Define `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` [environment variables](http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/loading-node-credentials-environment.html).
* Or in the [~/.aws/credentials](http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/loading-node-credentials-shared.html).
*
* Example configuration:
*
```json
{
"build":
"publish": {
"provider": "s3",
"bucket": "bucket-name"
}
}
}
```
*/
export interface S3Options extends BaseS3Options {
/**
* The provider. Must be `s3`.
*/
readonly provider: "s3";
/**
* The bucket name.
*/
readonly bucket: string;
/**
* The region. Is determined and set automatically when publishing.
*/
region?: string | null;
/**
* The ACL. Set to `null` to not [add](https://github.com/electron-userland/electron-builder/issues/1822).
*
* Please see [required permissions for the S3 provider](https://github.com/electron-userland/electron-builder/issues/1618#issuecomment-314679128).
*
* @default public-read
*/
readonly acl?: "private" | "public-read" | null;
/**
* The type of storage to use for the object.
* @default STANDARD
*/
readonly storageClass?: "STANDARD" | "REDUCED_REDUNDANCY" | "STANDARD_IA" | null;
/**
* Server-side encryption algorithm to use for the object.
*/
readonly encryption?: "AES256" | "aws:kms" | null;
/**
* The endpoint URI to send requests to. The default endpoint is built from the configured region.
* The endpoint should be a string like `https://{service}.{region}.amazonaws.com`.
*/
readonly endpoint?: string | null;
/**
* If set to true, this will enable the s3 accelerated endpoint
* These endpoints have a particular format of:
* ${bucketname}.s3-accelerate.amazonaws.com
*/
readonly accelerate?: boolean;
}
/**
* [DigitalOcean Spaces](https://www.digitalocean.com/community/tutorials/an-introduction-to-digitalocean-spaces) options.
* Access key is required, define `DO_KEY_ID` and `DO_SECRET_KEY` environment variables.
*/
export interface SpacesOptions extends BaseS3Options {
/**
* The provider. Must be `spaces`.
*/
readonly provider: "spaces";
/**
* The space name.
*/
readonly name: string;
/**
* The region (e.g. `nyc3`).
*/
readonly region: string;
}
export declare function getS3LikeProviderBaseUrl(configuration: PublishConfiguration): string;

View File

@ -0,0 +1,66 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getS3LikeProviderBaseUrl = exports.githubUrl = void 0;
/** @private */
function githubUrl(options, defaultHost = "github.com") {
return `${options.protocol || "https"}://${options.host || defaultHost}`;
}
exports.githubUrl = githubUrl;
function getS3LikeProviderBaseUrl(configuration) {
const provider = configuration.provider;
if (provider === "s3") {
return s3Url(configuration);
}
if (provider === "spaces") {
return spacesUrl(configuration);
}
throw new Error(`Not supported provider: ${provider}`);
}
exports.getS3LikeProviderBaseUrl = getS3LikeProviderBaseUrl;
function s3Url(options) {
let url;
if (options.accelerate == true) {
url = `https://${options.bucket}.s3-accelerate.amazonaws.com`;
}
else if (options.endpoint != null) {
url = `${options.endpoint}/${options.bucket}`;
}
else if (options.bucket.includes(".")) {
if (options.region == null) {
throw new Error(`Bucket name "${options.bucket}" includes a dot, but S3 region is missing`);
}
// special case, see http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro
if (options.region === "us-east-1") {
url = `https://s3.amazonaws.com/${options.bucket}`;
}
else {
url = `https://s3-${options.region}.amazonaws.com/${options.bucket}`;
}
}
else if (options.region === "cn-north-1") {
url = `https://${options.bucket}.s3.${options.region}.amazonaws.com.cn`;
}
else {
url = `https://${options.bucket}.s3.amazonaws.com`;
}
return appendPath(url, options.path);
}
function appendPath(url, p) {
if (p != null && p.length > 0) {
if (!p.startsWith("/")) {
url += "/";
}
url += p;
}
return url;
}
function spacesUrl(options) {
if (options.name == null) {
throw new Error(`name is missing`);
}
if (options.region == null) {
throw new Error(`region is missing`);
}
return appendPath(`https://${options.name}.${options.region}.digitaloceanspaces.com`, options.path);
}
//# sourceMappingURL=publishOptions.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
export declare function parseDn(seq: string): Map<string, string>;

View File

@ -0,0 +1,81 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseDn = void 0;
function parseDn(seq) {
let quoted = false;
let key = null;
let token = "";
let nextNonSpace = 0;
seq = seq.trim();
const result = new Map();
for (let i = 0; i <= seq.length; i++) {
if (i === seq.length) {
if (key !== null) {
result.set(key, token);
}
break;
}
const ch = seq[i];
if (quoted) {
if (ch === '"') {
quoted = false;
continue;
}
}
else {
if (ch === '"') {
quoted = true;
continue;
}
if (ch === "\\") {
i++;
const ord = parseInt(seq.slice(i, i + 2), 16);
if (Number.isNaN(ord)) {
token += seq[i];
}
else {
i++;
token += String.fromCharCode(ord);
}
continue;
}
if (key === null && ch === "=") {
key = token;
token = "";
continue;
}
if (ch === "," || ch === ";" || ch === "+") {
if (key !== null) {
result.set(key, token);
}
key = null;
token = "";
continue;
}
}
if (ch === " " && !quoted) {
if (token.length === 0) {
continue;
}
if (i > nextNonSpace) {
let j = i;
while (seq[j] === " ") {
j++;
}
nextNonSpace = j;
}
if (nextNonSpace >= seq.length ||
seq[nextNonSpace] === "," ||
seq[nextNonSpace] === ";" ||
(key === null && seq[nextNonSpace] === "=") ||
(key !== null && seq[nextNonSpace] === "+")) {
i = nextNonSpace - 1;
continue;
}
}
token += ch;
}
return result;
}
exports.parseDn = parseDn;
//# sourceMappingURL=rfc2253Parser.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"rfc2253Parser.js","sourceRoot":"","sources":["../src/rfc2253Parser.ts"],"names":[],"mappings":";;;AAAA,SAAgB,OAAO,CAAC,GAAW;IACjC,IAAI,MAAM,GAAG,KAAK,CAAA;IAClB,IAAI,GAAG,GAAkB,IAAI,CAAA;IAC7B,IAAI,KAAK,GAAG,EAAE,CAAA;IACd,IAAI,YAAY,GAAG,CAAC,CAAA;IAEpB,GAAG,GAAG,GAAG,CAAC,IAAI,EAAE,CAAA;IAChB,MAAM,MAAM,GAAG,IAAI,GAAG,EAAkB,CAAA;IACxC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,MAAM,EAAE,CAAC;YACrB,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;gBACjB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YACD,MAAK;QACP,CAAC;QAED,MAAM,EAAE,GAAG,GAAG,CAAC,CAAC,CAAC,CAAA;QACjB,IAAI,MAAM,EAAE,CAAC;YACX,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC;gBACf,MAAM,GAAG,KAAK,CAAA;gBACd,SAAQ;YACV,CAAC;QACH,CAAC;aAAM,CAAC;YACN,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC;gBACf,MAAM,GAAG,IAAI,CAAA;gBACb,SAAQ;YACV,CAAC;YAED,IAAI,EAAE,KAAK,IAAI,EAAE,CAAC;gBAChB,CAAC,EAAE,CAAA;gBACH,MAAM,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;gBAC7C,IAAI,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC;oBACtB,KAAK,IAAI,GAAG,CAAC,CAAC,CAAC,CAAA;gBACjB,CAAC;qBAAM,CAAC;oBACN,CAAC,EAAE,CAAA;oBACH,KAAK,IAAI,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAA;gBACnC,CAAC;gBACD,SAAQ;YACV,CAAC;YAED,IAAI,GAAG,KAAK,IAAI,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC;gBAC/B,GAAG,GAAG,KAAK,CAAA;gBACX,KAAK,GAAG,EAAE,CAAA;gBACV,SAAQ;YACV,CAAC;YAED,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC;gBAC3C,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;oBACjB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAA;gBACxB,CAAC;gBACD,GAAG,GAAG,IAAI,CAAA;gBACV,KAAK,GAAG,EAAE,CAAA;gBACV,SAAQ;YACV,CAAC;QACH,CAAC;QAED,IAAI,EAAE,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;YAC1B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBACvB,SAAQ;YACV,CAAC;YAED,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC;gBACrB,IAAI,CAAC,GAAG,CAAC,CAAA;gBACT,OAAO,GAAG,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;oBACtB,CAAC,EAAE,CAAA;gBACL,CAAC;gBACD,YAAY,GAAG,CAAC,CAAA;YAClB,CAAC;YAED,IACE,YAAY,IAAI,GAAG,CAAC,MAAM;gBAC1B,GAAG,CAAC,YAAY,CAAC,KAAK,GAAG;gBACzB,GAAG,CAAC,YAAY,CAAC,KAAK,GAAG;gBACzB,CAAC,GAAG,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,CAAC,KAAK,GAAG,CAAC;gBAC3C,CAAC,GAAG,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,CAAC,KAAK,GAAG,CAAC,EAC3C,CAAC;gBACD,CAAC,GAAG,YAAY,GAAG,CAAC,CAAA;gBACpB,SAAQ;YACV,CAAC;QACH,CAAC;QAED,KAAK,IAAI,EAAE,CAAA;IACb,CAAC;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AArFD,0BAqFC","sourcesContent":["export function parseDn(seq: string): Map<string, string> {\n let quoted = false\n let key: string | null = null\n let token = \"\"\n let nextNonSpace = 0\n\n seq = seq.trim()\n const result = new Map<string, string>()\n for (let i = 0; i <= seq.length; i++) {\n if (i === seq.length) {\n if (key !== null) {\n result.set(key, token)\n }\n break\n }\n\n const ch = seq[i]\n if (quoted) {\n if (ch === '\"') {\n quoted = false\n continue\n }\n } else {\n if (ch === '\"') {\n quoted = true\n continue\n }\n\n if (ch === \"\\\\\") {\n i++\n const ord = parseInt(seq.slice(i, i + 2), 16)\n if (Number.isNaN(ord)) {\n token += seq[i]\n } else {\n i++\n token += String.fromCharCode(ord)\n }\n continue\n }\n\n if (key === null && ch === \"=\") {\n key = token\n token = \"\"\n continue\n }\n\n if (ch === \",\" || ch === \";\" || ch === \"+\") {\n if (key !== null) {\n result.set(key, token)\n }\n key = null\n token = \"\"\n continue\n }\n }\n\n if (ch === \" \" && !quoted) {\n if (token.length === 0) {\n continue\n }\n\n if (i > nextNonSpace) {\n let j = i\n while (seq[j] === \" \") {\n j++\n }\n nextNonSpace = j\n }\n\n if (\n nextNonSpace >= seq.length ||\n seq[nextNonSpace] === \",\" ||\n seq[nextNonSpace] === \";\" ||\n (key === null && seq[nextNonSpace] === \"=\") ||\n (key !== null && seq[nextNonSpace] === \"+\")\n ) {\n i = nextNonSpace - 1\n continue\n }\n }\n\n token += ch\n }\n\n return result\n}\n"]}

View File

@ -0,0 +1,71 @@
export interface ReleaseNoteInfo {
/**
* The version.
*/
readonly version: string;
/**
* The note.
*/
readonly note: string | null;
}
export interface BlockMapDataHolder {
/**
* The file size. Used to verify downloaded size (save one HTTP request to get length).
* Also used when block map data is embedded into the file (appimage, windows web installer package).
*/
size?: number;
/**
* The block map file size. Used when block map data is embedded into the file (appimage, windows web installer package).
* This information can be obtained from the file itself, but it requires additional HTTP request,
* so, to reduce request count, block map size is specified in the update metadata too.
*/
blockMapSize?: number;
/**
* The file checksum.
*/
readonly sha512: string;
readonly isAdminRightsRequired?: boolean;
}
export interface PackageFileInfo extends BlockMapDataHolder {
readonly path: string;
}
export interface UpdateFileInfo extends BlockMapDataHolder {
url: string;
}
export interface UpdateInfo {
/**
* The version.
*/
readonly version: string;
readonly files: Array<UpdateFileInfo>;
/** @deprecated */
readonly path: string;
/** @deprecated */
readonly sha512: string;
/**
* The release name.
*/
releaseName?: string | null;
/**
* The release notes. List if `updater.fullChangelog` is set to `true`, `string` otherwise.
*/
releaseNotes?: string | Array<ReleaseNoteInfo> | null;
/**
* The release date.
*/
releaseDate: string;
/**
* The [staged rollout](/auto-update#staged-rollouts) percentage, 0-100.
*/
readonly stagingPercentage?: number;
}
export interface WindowsUpdateInfo extends UpdateInfo {
packages?: {
[arch: string]: PackageFileInfo;
} | null;
/**
* @deprecated
* @private
*/
sha2?: string;
}

View File

@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=updateInfo.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"updateInfo.js","sourceRoot":"","sources":["../src/updateInfo.ts"],"names":[],"mappings":"","sourcesContent":["export interface ReleaseNoteInfo {\n /**\n * The version.\n */\n readonly version: string\n\n /**\n * The note.\n */\n readonly note: string | null\n}\n\nexport interface BlockMapDataHolder {\n /**\n * The file size. Used to verify downloaded size (save one HTTP request to get length).\n * Also used when block map data is embedded into the file (appimage, windows web installer package).\n */\n size?: number\n\n /**\n * The block map file size. Used when block map data is embedded into the file (appimage, windows web installer package).\n * This information can be obtained from the file itself, but it requires additional HTTP request,\n * so, to reduce request count, block map size is specified in the update metadata too.\n */\n blockMapSize?: number\n\n /**\n * The file checksum.\n */\n readonly sha512: string\n\n readonly isAdminRightsRequired?: boolean\n}\n\nexport interface PackageFileInfo extends BlockMapDataHolder {\n readonly path: string\n}\n\nexport interface UpdateFileInfo extends BlockMapDataHolder {\n url: string\n}\n\nexport interface UpdateInfo {\n /**\n * The version.\n */\n readonly version: string\n\n readonly files: Array<UpdateFileInfo>\n\n /** @deprecated */\n readonly path: string\n\n /** @deprecated */\n readonly sha512: string\n\n /**\n * The release name.\n */\n releaseName?: string | null\n\n /**\n * The release notes. List if `updater.fullChangelog` is set to `true`, `string` otherwise.\n */\n releaseNotes?: string | Array<ReleaseNoteInfo> | null\n\n /**\n * The release date.\n */\n releaseDate: string\n\n /**\n * The [staged rollout](/auto-update#staged-rollouts) percentage, 0-100.\n */\n readonly stagingPercentage?: number\n}\n\nexport interface WindowsUpdateInfo extends UpdateInfo {\n packages?: { [arch: string]: PackageFileInfo } | null\n\n /**\n * @deprecated\n * @private\n */\n sha2?: string\n}\n"]}

22
mc_test/node_modules/builder-util-runtime/out/uuid.d.ts generated vendored Executable file
View File

@ -0,0 +1,22 @@
/// <reference types="node" />
export declare class UUID {
private ascii;
private readonly binary;
private readonly version;
static readonly OID: Buffer;
constructor(uuid: Buffer | string);
static v5(name: string | Buffer, namespace: Buffer): any;
toString(): string;
inspect(): string;
static check(uuid: Buffer | string, offset?: number): false | {
version: undefined;
variant: string;
format: string;
} | {
version: number;
variant: string;
format: string;
};
static parse(input: string): Buffer;
}
export declare const nil: UUID;

200
mc_test/node_modules/builder-util-runtime/out/uuid.js generated vendored Executable file
View File

@ -0,0 +1,200 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.nil = exports.UUID = void 0;
const crypto_1 = require("crypto");
const index_1 = require("./index");
const invalidName = "options.name must be either a string or a Buffer";
// Node ID according to rfc4122#section-4.5
const randomHost = (0, crypto_1.randomBytes)(16);
randomHost[0] = randomHost[0] | 0x01;
// lookup table hex to byte
const hex2byte = {};
// lookup table byte to hex
const byte2hex = [];
// populate lookup tables
for (let i = 0; i < 256; i++) {
const hex = (i + 0x100).toString(16).substr(1);
hex2byte[hex] = i;
byte2hex[i] = hex;
}
// UUID class
class UUID {
constructor(uuid) {
this.ascii = null;
this.binary = null;
const check = UUID.check(uuid);
if (!check) {
throw new Error("not a UUID");
}
this.version = check.version;
if (check.format === "ascii") {
this.ascii = uuid;
}
else {
this.binary = uuid;
}
}
static v5(name, namespace) {
return uuidNamed(name, "sha1", 0x50, namespace);
}
toString() {
if (this.ascii == null) {
this.ascii = stringify(this.binary);
}
return this.ascii;
}
inspect() {
return `UUID v${this.version} ${this.toString()}`;
}
static check(uuid, offset = 0) {
if (typeof uuid === "string") {
uuid = uuid.toLowerCase();
if (!/^[a-f0-9]{8}(-[a-f0-9]{4}){3}-([a-f0-9]{12})$/.test(uuid)) {
return false;
}
if (uuid === "00000000-0000-0000-0000-000000000000") {
return { version: undefined, variant: "nil", format: "ascii" };
}
return {
version: (hex2byte[uuid[14] + uuid[15]] & 0xf0) >> 4,
variant: getVariant((hex2byte[uuid[19] + uuid[20]] & 0xe0) >> 5),
format: "ascii",
};
}
if (Buffer.isBuffer(uuid)) {
if (uuid.length < offset + 16) {
return false;
}
let i = 0;
for (; i < 16; i++) {
if (uuid[offset + i] !== 0) {
break;
}
}
if (i === 16) {
return { version: undefined, variant: "nil", format: "binary" };
}
return {
version: (uuid[offset + 6] & 0xf0) >> 4,
variant: getVariant((uuid[offset + 8] & 0xe0) >> 5),
format: "binary",
};
}
throw (0, index_1.newError)("Unknown type of uuid", "ERR_UNKNOWN_UUID_TYPE");
}
// read stringified uuid into a Buffer
static parse(input) {
const buffer = Buffer.allocUnsafe(16);
let j = 0;
for (let i = 0; i < 16; i++) {
buffer[i] = hex2byte[input[j++] + input[j++]];
if (i === 3 || i === 5 || i === 7 || i === 9) {
j += 1;
}
}
return buffer;
}
}
exports.UUID = UUID;
// from rfc4122#appendix-C
UUID.OID = UUID.parse("6ba7b812-9dad-11d1-80b4-00c04fd430c8");
// according to rfc4122#section-4.1.1
function getVariant(bits) {
switch (bits) {
case 0:
case 1:
case 3:
return "ncs";
case 4:
case 5:
return "rfc4122";
case 6:
return "microsoft";
default:
return "future";
}
}
var UuidEncoding;
(function (UuidEncoding) {
UuidEncoding[UuidEncoding["ASCII"] = 0] = "ASCII";
UuidEncoding[UuidEncoding["BINARY"] = 1] = "BINARY";
UuidEncoding[UuidEncoding["OBJECT"] = 2] = "OBJECT";
})(UuidEncoding || (UuidEncoding = {}));
// v3 + v5
function uuidNamed(name, hashMethod, version, namespace, encoding = UuidEncoding.ASCII) {
const hash = (0, crypto_1.createHash)(hashMethod);
const nameIsNotAString = typeof name !== "string";
if (nameIsNotAString && !Buffer.isBuffer(name)) {
throw (0, index_1.newError)(invalidName, "ERR_INVALID_UUID_NAME");
}
hash.update(namespace);
hash.update(name);
const buffer = hash.digest();
let result;
switch (encoding) {
case UuidEncoding.BINARY:
buffer[6] = (buffer[6] & 0x0f) | version;
buffer[8] = (buffer[8] & 0x3f) | 0x80;
result = buffer;
break;
case UuidEncoding.OBJECT:
buffer[6] = (buffer[6] & 0x0f) | version;
buffer[8] = (buffer[8] & 0x3f) | 0x80;
result = new UUID(buffer);
break;
default:
result =
byte2hex[buffer[0]] +
byte2hex[buffer[1]] +
byte2hex[buffer[2]] +
byte2hex[buffer[3]] +
"-" +
byte2hex[buffer[4]] +
byte2hex[buffer[5]] +
"-" +
byte2hex[(buffer[6] & 0x0f) | version] +
byte2hex[buffer[7]] +
"-" +
byte2hex[(buffer[8] & 0x3f) | 0x80] +
byte2hex[buffer[9]] +
"-" +
byte2hex[buffer[10]] +
byte2hex[buffer[11]] +
byte2hex[buffer[12]] +
byte2hex[buffer[13]] +
byte2hex[buffer[14]] +
byte2hex[buffer[15]];
break;
}
return result;
}
function stringify(buffer) {
return (byte2hex[buffer[0]] +
byte2hex[buffer[1]] +
byte2hex[buffer[2]] +
byte2hex[buffer[3]] +
"-" +
byte2hex[buffer[4]] +
byte2hex[buffer[5]] +
"-" +
byte2hex[buffer[6]] +
byte2hex[buffer[7]] +
"-" +
byte2hex[buffer[8]] +
byte2hex[buffer[9]] +
"-" +
byte2hex[buffer[10]] +
byte2hex[buffer[11]] +
byte2hex[buffer[12]] +
byte2hex[buffer[13]] +
byte2hex[buffer[14]] +
byte2hex[buffer[15]]);
}
// according to rfc4122#section-4.1.7
exports.nil = new UUID("00000000-0000-0000-0000-000000000000");
// UUID.v4 = uuidRandom
// UUID.v4fast = uuidRandomFast
// UUID.v3 = function(options, callback) {
// return uuidNamed("md5", 0x30, options, callback)
// }
//# sourceMappingURL=uuid.js.map

1
mc_test/node_modules/builder-util-runtime/out/uuid.js.map generated vendored Executable file

File diff suppressed because one or more lines are too long

17
mc_test/node_modules/builder-util-runtime/out/xml.d.ts generated vendored Executable file
View File

@ -0,0 +1,17 @@
export declare class XElement {
readonly name: string;
value: string;
attributes: {
[key: string]: string;
} | null;
isCData: boolean;
elements: Array<XElement> | null;
constructor(name: string);
attribute(name: string): string;
removeAttribute(name: string): void;
element(name: string, ignoreCase?: boolean, errorIfMissed?: string | null): XElement;
elementOrNull(name: string, ignoreCase?: boolean): XElement | null;
getElements(name: string, ignoreCase?: boolean): XElement[];
elementValueOrEmpty(name: string, ignoreCase?: boolean): string;
}
export declare function parseXml(data: string): XElement;

109
mc_test/node_modules/builder-util-runtime/out/xml.js generated vendored Executable file
View File

@ -0,0 +1,109 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseXml = exports.XElement = void 0;
const sax = require("sax");
const index_1 = require("./index");
class XElement {
constructor(name) {
this.name = name;
this.value = "";
this.attributes = null;
this.isCData = false;
this.elements = null;
if (!name) {
throw (0, index_1.newError)("Element name cannot be empty", "ERR_XML_ELEMENT_NAME_EMPTY");
}
if (!isValidName(name)) {
throw (0, index_1.newError)(`Invalid element name: ${name}`, "ERR_XML_ELEMENT_INVALID_NAME");
}
}
attribute(name) {
const result = this.attributes === null ? null : this.attributes[name];
if (result == null) {
throw (0, index_1.newError)(`No attribute "${name}"`, "ERR_XML_MISSED_ATTRIBUTE");
}
return result;
}
removeAttribute(name) {
if (this.attributes !== null) {
delete this.attributes[name];
}
}
element(name, ignoreCase = false, errorIfMissed = null) {
const result = this.elementOrNull(name, ignoreCase);
if (result === null) {
throw (0, index_1.newError)(errorIfMissed || `No element "${name}"`, "ERR_XML_MISSED_ELEMENT");
}
return result;
}
elementOrNull(name, ignoreCase = false) {
if (this.elements === null) {
return null;
}
for (const element of this.elements) {
if (isNameEquals(element, name, ignoreCase)) {
return element;
}
}
return null;
}
getElements(name, ignoreCase = false) {
if (this.elements === null) {
return [];
}
return this.elements.filter(it => isNameEquals(it, name, ignoreCase));
}
elementValueOrEmpty(name, ignoreCase = false) {
const element = this.elementOrNull(name, ignoreCase);
return element === null ? "" : element.value;
}
}
exports.XElement = XElement;
const NAME_REG_EXP = new RegExp(/^[A-Za-z_][:A-Za-z0-9_-]*$/i);
function isValidName(name) {
return NAME_REG_EXP.test(name);
}
function isNameEquals(element, name, ignoreCase) {
const elementName = element.name;
return elementName === name || (ignoreCase === true && elementName.length === name.length && elementName.toLowerCase() === name.toLowerCase());
}
function parseXml(data) {
let rootElement = null;
const parser = sax.parser(true, {});
const elements = [];
parser.onopentag = saxElement => {
const element = new XElement(saxElement.name);
element.attributes = saxElement.attributes;
if (rootElement === null) {
rootElement = element;
}
else {
const parent = elements[elements.length - 1];
if (parent.elements == null) {
parent.elements = [];
}
parent.elements.push(element);
}
elements.push(element);
};
parser.onclosetag = () => {
elements.pop();
};
parser.ontext = text => {
if (elements.length > 0) {
elements[elements.length - 1].value = text;
}
};
parser.oncdata = cdata => {
const element = elements[elements.length - 1];
element.value = cdata;
element.isCData = true;
};
parser.onerror = err => {
throw err;
};
parser.write(data);
return rootElement;
}
exports.parseXml = parseXml;
//# sourceMappingURL=xml.js.map

1
mc_test/node_modules/builder-util-runtime/out/xml.js.map generated vendored Executable file

File diff suppressed because one or more lines are too long

29
mc_test/node_modules/builder-util-runtime/package.json generated vendored Executable file
View File

@ -0,0 +1,29 @@
{
"name": "builder-util-runtime",
"version": "9.2.4",
"main": "out/index.js",
"author": "Vladimir Krivosheev",
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/electron-userland/electron-builder.git",
"directory": "packages/builder-util-runtime"
},
"bugs": "https://github.com/electron-userland/electron-builder/issues",
"homepage": "https://github.com/electron-userland/electron-builder",
"files": [
"out"
],
"engines": {
"node": ">=12.0.0"
},
"dependencies": {
"debug": "^4.3.4",
"sax": "^1.2.4"
},
"devDependencies": {
"@types/debug": "4.1.7",
"@types/sax": "1.2.3"
},
"types": "./out/index.d.ts"
}

3
mc_test/node_modules/builder-util-runtime/readme.md generated vendored Executable file
View File

@ -0,0 +1,3 @@
# builder-util-runtime
HTTP utilities. Used by [electron-builder](https://github.com/electron-userland/electron-builder).