mirror of
https://github.com/actions/setup-node.git
synced 2025-11-20 21:45:39 +00:00
Merge 6dd044cc77 into dda4788290
This commit is contained in:
commit
125ebc14e0
BIN
.licenses/npm/data-uri-to-buffer.dep.yml
generated
Normal file
BIN
.licenses/npm/data-uri-to-buffer.dep.yml
generated
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.licenses/npm/node-domexception.dep.yml
generated
Normal file
BIN
.licenses/npm/node-domexception.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/node-fetch.dep.yml
generated
BIN
.licenses/npm/node-fetch.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/web-streams-polyfill.dep.yml
generated
Normal file
BIN
.licenses/npm/web-streams-polyfill.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/webidl-conversions.dep.yml
generated
BIN
.licenses/npm/webidl-conversions.dep.yml
generated
Binary file not shown.
@ -1,5 +1,5 @@
|
||||
// This is a reusable configuration file copied from https://github.com/actions/reusable-workflows/tree/main/reusable-configurations. Please don't make changes to this file as it's the subject of an automatic update.
|
||||
module.exports = {
|
||||
export default {
|
||||
printWidth: 80,
|
||||
tabWidth: 2,
|
||||
useTabs: false,
|
||||
|
||||
@ -3,17 +3,22 @@ import fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as auth from '../src/authutil';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
import * as auth from '../src/authutil.js';
|
||||
import * as cacheUtils from '../src/cache-utils.js';
|
||||
import {fileURLToPath} from 'url';
|
||||
import {jest, describe, beforeEach, afterEach, it, expect} from '@jest/globals';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
let rcFile: string;
|
||||
|
||||
describe('authutil tests', () => {
|
||||
const _runnerDir = path.join(__dirname, 'runner');
|
||||
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let logSpy: jest.SpyInstance;
|
||||
let dbgSpy: jest.SpyInstance;
|
||||
let cnSpy: ReturnType<typeof jest.spyOn>;
|
||||
let logSpy: ReturnType<typeof jest.spyOn>;
|
||||
let dbgSpy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
beforeAll(async () => {
|
||||
const randPath = path.join(Math.random().toString(36).substring(7));
|
||||
|
||||
@ -3,9 +3,13 @@ import * as cache from '@actions/cache';
|
||||
import * as path from 'path';
|
||||
import * as glob from '@actions/glob';
|
||||
import osm from 'os';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
import * as utils from '../src/cache-utils';
|
||||
import {restoreCache} from '../src/cache-restore';
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
import * as utils from '../src/cache-utils.js';
|
||||
import {restoreCache} from '../src/cache-restore.js';
|
||||
|
||||
describe('cache-restore', () => {
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
|
||||
@ -3,10 +3,14 @@ import * as cache from '@actions/cache';
|
||||
import * as glob from '@actions/glob';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
import * as utils from '../src/cache-utils';
|
||||
import {run} from '../src/cache-save';
|
||||
import {State} from '../src/constants';
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
import * as utils from '../src/cache-utils.js';
|
||||
import {run} from '../src/cache-save.js';
|
||||
import {State} from '../src/constants.js';
|
||||
|
||||
describe('run', () => {
|
||||
const yarnFileHash =
|
||||
|
||||
@ -1,19 +1,23 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
import path from 'path';
|
||||
import * as utils from '../src/cache-utils';
|
||||
import * as utils from '../src/cache-utils.js';
|
||||
import {
|
||||
PackageManagerInfo,
|
||||
isCacheFeatureAvailable,
|
||||
supportedPackageManagers,
|
||||
isGhes,
|
||||
resetProjectDirectoriesMemoized
|
||||
} from '../src/cache-utils';
|
||||
} from '../src/cache-utils.js';
|
||||
import fs from 'fs';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
import * as cacheUtils from '../src/cache-utils.js';
|
||||
import * as glob from '@actions/glob';
|
||||
import {Globber} from '@actions/glob';
|
||||
import {MockGlobber} from './mock/glob-mock';
|
||||
import {MockGlobber} from './mock/glob-mock.js';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
describe('cache-utils', () => {
|
||||
const versionYarn1 = '1.2.3';
|
||||
|
||||
@ -8,9 +8,9 @@ import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
import * as main from '../src/main.js';
|
||||
import * as auth from '../src/authutil.js';
|
||||
import {INodeVersion} from '../src/distributions/base-models.js';
|
||||
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
|
||||
@ -7,37 +7,42 @@ import * as io from '@actions/io';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import osm from 'os';
|
||||
import {fileURLToPath} from 'url';
|
||||
import {jest, describe, beforeEach, afterEach, it, expect} from '@jest/globals';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
import each from 'jest-each';
|
||||
|
||||
import * as main from '../src/main';
|
||||
import * as util from '../src/util';
|
||||
import OfficialBuilds from '../src/distributions/official_builds/official_builds';
|
||||
import * as main from '../src/main.js';
|
||||
import * as util from '../src/util.js';
|
||||
import OfficialBuilds from '../src/distributions/official_builds/official_builds.js';
|
||||
|
||||
describe('main tests', () => {
|
||||
let inputs = {} as any;
|
||||
let os = {} as any;
|
||||
|
||||
let infoSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let saveStateSpy: jest.SpyInstance;
|
||||
let inSpy: jest.SpyInstance;
|
||||
let setOutputSpy: jest.SpyInstance;
|
||||
let startGroupSpy: jest.SpyInstance;
|
||||
let endGroupSpy: jest.SpyInstance;
|
||||
let infoSpy: ReturnType<typeof jest.spyOn>;
|
||||
let warningSpy: ReturnType<typeof jest.spyOn>;
|
||||
let saveStateSpy: ReturnType<typeof jest.spyOn>;
|
||||
let inSpy: ReturnType<typeof jest.spyOn>;
|
||||
let setOutputSpy: ReturnType<typeof jest.spyOn>;
|
||||
let startGroupSpy: ReturnType<typeof jest.spyOn>;
|
||||
let endGroupSpy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
let whichSpy: jest.SpyInstance;
|
||||
let whichSpy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let existsSpy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
let getExecOutputSpy: jest.SpyInstance;
|
||||
let getExecOutputSpy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
let getNodeVersionFromFileSpy: jest.SpyInstance;
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let findSpy: jest.SpyInstance;
|
||||
let isCacheActionAvailable: jest.SpyInstance;
|
||||
let getNodeVersionFromFileSpy: ReturnType<typeof jest.spyOn>;
|
||||
let cnSpy: ReturnType<typeof jest.spyOn>;
|
||||
let findSpy: ReturnType<typeof jest.spyOn>;
|
||||
let isCacheActionAvailable: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
let setupNodeJsSpy: jest.SpyInstance;
|
||||
let setupNodeJsSpy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
inputs = {};
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import {MockGlobber} from './glob-mock';
|
||||
import {MockGlobber} from './glob-mock.js';
|
||||
|
||||
describe('mocked globber tests', () => {
|
||||
it('globber should return generator', async () => {
|
||||
|
||||
@ -8,9 +8,9 @@ import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
import * as main from '../src/main.js';
|
||||
import * as auth from '../src/authutil.js';
|
||||
import {INodeVersion} from '../src/distributions/base-models.js';
|
||||
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
|
||||
@ -8,9 +8,9 @@ import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
import * as main from '../src/main.js';
|
||||
import * as auth from '../src/authutil.js';
|
||||
import {INodeVersion} from '../src/distributions/base-models.js';
|
||||
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
|
||||
449
dist/cache-save/101.index.js
vendored
Normal file
449
dist/cache-save/101.index.js
vendored
Normal file
@ -0,0 +1,449 @@
|
||||
export const id = 101;
|
||||
export const ids = [101];
|
||||
export const modules = {
|
||||
|
||||
/***/ 9101:
|
||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
||||
|
||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
||||
/* harmony export */ toFormData: () => (/* binding */ toFormData)
|
||||
/* harmony export */ });
|
||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(9802);
|
||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(3018);
|
||||
|
||||
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .fS();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .ZH(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
};
|
||||
12843
dist/cache-save/index.js
vendored
12843
dist/cache-save/index.js
vendored
File diff suppressed because one or more lines are too long
3
dist/cache-save/package.json
vendored
Normal file
3
dist/cache-save/package.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "module"
|
||||
}
|
||||
449
dist/setup/101.index.js
vendored
Normal file
449
dist/setup/101.index.js
vendored
Normal file
@ -0,0 +1,449 @@
|
||||
export const id = 101;
|
||||
export const ids = [101];
|
||||
export const modules = {
|
||||
|
||||
/***/ 9101:
|
||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
||||
|
||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
||||
/* harmony export */ toFormData: () => (/* binding */ toFormData)
|
||||
/* harmony export */ });
|
||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(9802);
|
||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(3018);
|
||||
|
||||
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .fS();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .ZH(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
};
|
||||
BIN
dist/setup/7zr.exe
vendored
Normal file
BIN
dist/setup/7zr.exe
vendored
Normal file
Binary file not shown.
18264
dist/setup/index.js
vendored
18264
dist/setup/index.js
vendored
File diff suppressed because one or more lines are too long
3
dist/setup/package.json
vendored
Normal file
3
dist/setup/package.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "module"
|
||||
}
|
||||
@ -1,11 +1,25 @@
|
||||
module.exports = {
|
||||
export default {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
'^.+\\.ts$': ['ts-jest', {
|
||||
useESM: true,
|
||||
tsconfig: {
|
||||
module: 'es2022',
|
||||
target: 'es2022'
|
||||
}
|
||||
}]
|
||||
},
|
||||
preset: 'ts-jest/presets/default-esm',
|
||||
moduleNameMapper: {
|
||||
'^(\\.{1,2}/.*)\\.js$': '$1'
|
||||
},
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(node-fetch|fetch-blob|formdata-polyfill|data-uri-to-buffer)/)'
|
||||
],
|
||||
verbose: true
|
||||
}
|
||||
1299
package-lock.json
generated
1299
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
11
package.json
11
package.json
@ -2,6 +2,7 @@
|
||||
"name": "setup-node",
|
||||
"version": "6.0.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"description": "setup node action",
|
||||
"main": "lib/setup-node.js",
|
||||
"engines": {
|
||||
@ -11,9 +12,9 @@
|
||||
"build": "ncc build -o dist/setup src/setup-node.ts && ncc build -o dist/cache-save src/cache-save.ts",
|
||||
"format": "prettier --no-error-on-unmatched-pattern --config ./.prettierrc.js --write \"**/*.{ts,yml,yaml}\"",
|
||||
"format-check": "prettier --no-error-on-unmatched-pattern --config ./.prettierrc.js --check \"**/*.{ts,yml,yaml}\"",
|
||||
"lint": "eslint --config ./.eslintrc.js \"**/*.ts\"",
|
||||
"lint:fix": "eslint --config ./.eslintrc.js \"**/*.ts\" --fix",
|
||||
"test": "jest --coverage",
|
||||
"lint": "eslint --config ./.eslintrc.cjs \"**/*.ts\"",
|
||||
"lint:fix": "eslint --config ./.eslintrc.cjs \"**/*.ts\" --fix",
|
||||
"test": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
|
||||
"pre-checkin": "npm run format && npm run lint:fix && npm run build && npm test"
|
||||
},
|
||||
"repository": {
|
||||
@ -40,6 +41,7 @@
|
||||
"uuid": "^11.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jest/globals": "^30.2.0",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/semver": "^7.5.8",
|
||||
@ -56,5 +58,8 @@
|
||||
"prettier": "^3.6.2",
|
||||
"ts-jest": "^29.4.1",
|
||||
"typescript": "^5.4.2"
|
||||
},
|
||||
"overrides": {
|
||||
"node-fetch": "^3.3.2"
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,13 +5,13 @@ import path from 'path';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
|
||||
import {State} from './constants';
|
||||
import {State} from './constants.js';
|
||||
import {
|
||||
getCacheDirectories,
|
||||
getPackageManagerInfo,
|
||||
repoHasYarnBerryManagedDependencies,
|
||||
PackageManagerInfo
|
||||
} from './cache-utils';
|
||||
} from './cache-utils.js';
|
||||
|
||||
export const restoreCache = async (
|
||||
packageManager: string,
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
import {State} from './constants';
|
||||
import {getPackageManagerInfo} from './cache-utils';
|
||||
import {State} from './constants.js';
|
||||
import {getPackageManagerInfo} from './cache-utils.js';
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
|
||||
@ -4,7 +4,7 @@ import * as cache from '@actions/cache';
|
||||
import * as glob from '@actions/glob';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import {unique} from './util';
|
||||
import {unique} from './util.js';
|
||||
|
||||
export interface PackageManagerInfo {
|
||||
name: string;
|
||||
|
||||
@ -2,8 +2,8 @@ import * as tc from '@actions/tool-cache';
|
||||
|
||||
import semver from 'semver';
|
||||
|
||||
import BaseDistribution from './base-distribution';
|
||||
import {NodeInputs} from './base-models';
|
||||
import BaseDistribution from './base-distribution.js';
|
||||
import {NodeInputs} from './base-models.js';
|
||||
|
||||
export default abstract class BasePrereleaseNodejs extends BaseDistribution {
|
||||
protected abstract distribution: string;
|
||||
|
||||
@ -10,8 +10,12 @@ import * as assert from 'assert';
|
||||
import * as path from 'path';
|
||||
import os from 'os';
|
||||
import fs from 'fs';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
import {NodeInputs, INodeVersion, INodeVersionInfo} from './base-models';
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
import {NodeInputs, INodeVersion, INodeVersionInfo} from './base-models.js';
|
||||
|
||||
export default abstract class BaseDistribution {
|
||||
protected httpClient: hc.HttpClient;
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import BaseDistribution from './base-distribution';
|
||||
import {NodeInputs} from './base-models';
|
||||
import NightlyNodejs from './nightly/nightly_builds';
|
||||
import OfficialBuilds from './official_builds/official_builds';
|
||||
import RcBuild from './rc/rc_builds';
|
||||
import CanaryBuild from './v8-canary/canary_builds';
|
||||
import BaseDistribution from './base-distribution.js';
|
||||
import {NodeInputs} from './base-models.js';
|
||||
import NightlyNodejs from './nightly/nightly_builds.js';
|
||||
import OfficialBuilds from './official_builds/official_builds.js';
|
||||
import RcBuild from './rc/rc_builds.js';
|
||||
import CanaryBuild from './v8-canary/canary_builds.js';
|
||||
|
||||
enum Distributions {
|
||||
DEFAULT = '',
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import BasePrereleaseNodejs from '../base-distribution-prerelease';
|
||||
import {NodeInputs} from '../base-models';
|
||||
import BasePrereleaseNodejs from '../base-distribution-prerelease.js';
|
||||
import {NodeInputs} from '../base-models.js';
|
||||
|
||||
export default class NightlyNodejs extends BasePrereleaseNodejs {
|
||||
protected distribution = 'nightly';
|
||||
|
||||
@ -2,8 +2,8 @@ import * as core from '@actions/core';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import path from 'path';
|
||||
|
||||
import BaseDistribution from '../base-distribution';
|
||||
import {NodeInputs, INodeVersion, INodeVersionInfo} from '../base-models';
|
||||
import BaseDistribution from '../base-distribution.js';
|
||||
import {NodeInputs, INodeVersion, INodeVersionInfo} from '../base-models.js';
|
||||
|
||||
interface INodeRelease extends tc.IToolRelease {
|
||||
lts?: string;
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import BaseDistribution from '../base-distribution';
|
||||
import {NodeInputs} from '../base-models';
|
||||
import BaseDistribution from '../base-distribution.js';
|
||||
import {NodeInputs} from '../base-models.js';
|
||||
|
||||
export default class RcBuild extends BaseDistribution {
|
||||
constructor(nodeInfo: NodeInputs) {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import BasePrereleaseNodejs from '../base-distribution-prerelease';
|
||||
import {NodeInputs} from '../base-models';
|
||||
import BasePrereleaseNodejs from '../base-distribution-prerelease.js';
|
||||
import {NodeInputs} from '../base-models.js';
|
||||
|
||||
export default class CanaryBuild extends BasePrereleaseNodejs {
|
||||
protected distribution = 'v8-canary';
|
||||
|
||||
16
src/main.ts
16
src/main.ts
@ -3,13 +3,17 @@ import * as core from '@actions/core';
|
||||
import os from 'os';
|
||||
import fs from 'fs';
|
||||
|
||||
import * as auth from './authutil';
|
||||
import * as auth from './authutil.js';
|
||||
import * as path from 'path';
|
||||
import {restoreCache} from './cache-restore';
|
||||
import {isCacheFeatureAvailable} from './cache-utils';
|
||||
import {getNodejsDistribution} from './distributions/installer-factory';
|
||||
import {getNodeVersionFromFile, printEnvDetailsAndSetOutput} from './util';
|
||||
import {State} from './constants';
|
||||
import {restoreCache} from './cache-restore.js';
|
||||
import {isCacheFeatureAvailable} from './cache-utils.js';
|
||||
import {getNodejsDistribution} from './distributions/installer-factory.js';
|
||||
import {getNodeVersionFromFile, printEnvDetailsAndSetOutput} from './util.js';
|
||||
import {State} from './constants.js';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
export async function run() {
|
||||
try {
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
import {run} from './main';
|
||||
import {run} from './main.js';
|
||||
|
||||
run();
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
"target": "es2022", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "es2022", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
"moduleResolution": "node", /* Specify module resolution strategy: 'node' or 'classic'. */
|
||||
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
"sourceMap": true,
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
"resolveJsonModule": true, /* Allows importing modules with a '.json' extension, which is a common practice in node projects. */
|
||||
},
|
||||
"exclude": ["__tests__", "lib", "node_modules"]
|
||||
"exclude": ["lib", "node_modules"]
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user