Update build scripts to target es2020

This commit is contained in:
Matt Bierner
2022-02-28 14:00:17 -08:00
parent 1d77856203
commit 433bf7cd88
17 changed files with 44 additions and 35 deletions

View File

@@ -42,7 +42,7 @@ async function main() {
const aadCredentials = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials });
const scripts = client.database('builds').container(quality).scripts;
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]));
}
main().then(() => {
console.log('Build successfully created');

View File

@@ -43,7 +43,7 @@ async function mixinClient(quality) {
else {
fancyLog(ansiColors.blue('[mixin]'), 'Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name));
}
return Object.assign(Object.assign({ webBuiltInExtensions: originalProduct.webBuiltInExtensions }, o), { builtInExtensions });
return { webBuiltInExtensions: originalProduct.webBuiltInExtensions, ...o, builtInExtensions };
}))
.pipe(productJsonFilter.restore)
.pipe(es.mapSync((f) => {
@@ -64,7 +64,7 @@ function mixinServer(quality) {
fancyLog(ansiColors.blue('[mixin]'), `Mixing in server:`);
const originalProduct = JSON.parse(fs.readFileSync(path.join(__dirname, '..', '..', 'product.json'), 'utf8'));
const serverProductJson = JSON.parse(fs.readFileSync(serverProductJsonPath, 'utf8'));
fs.writeFileSync('product.json', JSON.stringify(Object.assign(Object.assign({}, originalProduct), serverProductJson), undefined, '\t'));
fs.writeFileSync('product.json', JSON.stringify({ ...originalProduct, ...serverProductJson }, undefined, '\t'));
fancyLog(ansiColors.blue('[mixin]'), 'product.json', ansiColors.green('✔︎'));
}
function main() {

View File

@@ -41,14 +41,26 @@ async function main() {
identity: '99FM488X57',
'gatekeeper-assess': false
};
const appOpts = Object.assign(Object.assign({}, defaultOpts), {
const appOpts = {
...defaultOpts,
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
ignore: (filePath) => {
return filePath.includes(gpuHelperAppName) ||
filePath.includes(rendererHelperAppName);
} });
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
}
};
const gpuHelperOpts = {
...defaultOpts,
app: path.join(appFrameworkPath, gpuHelperAppName),
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'),
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'),
};
const rendererHelperOpts = {
...defaultOpts,
app: path.join(appFrameworkPath, rendererHelperAppName),
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'),
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'),
};
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {

View File

@@ -18,7 +18,6 @@ const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'
const contentBasePath = 'raw.githubusercontent.com';
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
async function downloadExtensionDetails(extension) {
var _a, _b, _c;
const extensionLabel = `${extension.name}@${extension.version}`;
const repository = url.parse(extension.repo).path.substr(1);
const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`;
@@ -56,11 +55,11 @@ async function downloadExtensionDetails(extension) {
}
}
// Validation
if (!((_a = results.find(r => r.fileName === 'package.json')) === null || _a === void 0 ? void 0 : _a.body)) {
if (!results.find(r => r.fileName === 'package.json')?.body) {
// throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`);
}
if (!((_b = results.find(r => r.fileName === 'package-lock.json')) === null || _b === void 0 ? void 0 : _b.body) &&
!((_c = results.find(r => r.fileName === 'yarn.lock')) === null || _c === void 0 ? void 0 : _c.body)) {
if (!results.find(r => r.fileName === 'package-lock.json')?.body &&
!results.find(r => r.fileName === 'yarn.lock')?.body) {
// throw new Error(`The "package-lock.json"/"yarn.lock" could not be found for the built-in extension - ${extensionLabel}`);
}
}

View File

@@ -22,10 +22,10 @@ function bundle(entryPoints, config, callback) {
const allMentionedModulesMap = {};
entryPoints.forEach((module) => {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
module.include?.forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(module.exclude || []).forEach(function (excludedModule) {
module.exclude?.forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});

View File

@@ -38,7 +38,7 @@ function createCompile(src, build, emitError) {
const tsb = require('gulp-tsb');
const sourcemaps = require('gulp-sourcemaps');
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
if (!build) {
overrideOptions.inlineSourceMap = true;
}

View File

@@ -36,7 +36,7 @@ function asYarnDependency(prefix, tree) {
return { name, version, path: dependencyPath, children };
}
function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: Object.assign(Object.assign({}, process.env), { NODE_ENV: 'production' }), stdio: [null, null, 'inherit'] });
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) {
throw new Error('Could not parse result of `yarn list --json`');

View File

@@ -40,7 +40,7 @@ const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSyn
function darwinBundleDocumentType(extensions, icon, nameOrSuffix) {
// If given a suffix, generate a name from it. If not given anything, default to 'document'
if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) {
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix !== null && nameOrSuffix !== void 0 ? nameOrSuffix : 'document');
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix ?? 'document');
}
return {
name: nameOrSuffix,

View File

@@ -14,9 +14,8 @@ module.exports = new class ApiLiteralOrTypes {
create(context) {
return {
['TSDeclareFunction Identifier[name=/create.*/]']: (node) => {
var _a;
const decl = node.parent;
if (((_a = decl.returnType) === null || _a === void 0 ? void 0 : _a.typeAnnotation.type) !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
if (decl.returnType?.typeAnnotation.type !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
return;
}
if (decl.returnType.typeAnnotation.typeName.type !== experimental_utils_1.AST_NODE_TYPES.Identifier) {

View File

@@ -25,8 +25,7 @@ module.exports = new (_a = class ApiEventNaming {
const verbs = new Set(config.verbs);
return {
['TSTypeAnnotation TSTypeReference Identifier[name="Event"]']: (node) => {
var _a, _b;
const def = (_b = (_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent) === null || _b === void 0 ? void 0 : _b.parent;
const def = node.parent?.parent?.parent;
const ident = this.getIdent(def);
if (!ident) {
// event on unknown structure...

View File

@@ -17,8 +17,7 @@ module.exports = new (_a = class ApiProviderNaming {
const allowed = new Set(config.allowed);
return {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node) => {
var _a;
const interfaceName = ((_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent).id.name;
const interfaceName = node.parent?.parent.id.name;
if (allowed.has(interfaceName)) {
// allowed
return;

View File

@@ -115,7 +115,10 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
@@ -411,7 +414,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject();
}
else {
reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
reporter(stats?.toJson());
}
});
}
@@ -422,7 +425,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject();
}
else {
reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
reporter(stats?.toJson());
resolve();
}
});

View File

@@ -209,15 +209,14 @@ let hasErrors = false;
function checkFile(program, sourceFile, rule) {
checkNode(sourceFile);
function checkNode(node) {
var _a, _b;
if (node.kind !== ts.SyntaxKind.Identifier) {
return ts.forEachChild(node, checkNode); // recurse down
}
const text = node.getText(sourceFile);
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
if (rule.allowedTypes?.some(allowed => allowed === text)) {
return; // override
}
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
if (rule.disallowedTypes?.some(disallowed => disallowed === text)) {
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
hasErrors = true;

View File

@@ -13,7 +13,7 @@ const rootDir = path.resolve(__dirname, '..', '..');
function runProcess(command, args = []) {
return new Promise((resolve, reject) => {
const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
child.on('exit', err => !err ? resolve() : process.exit(err ?? 1));
child.on('error', reject);
});
}
@@ -22,7 +22,7 @@ async function exists(subdir) {
await fs_1.promises.stat(path.join(rootDir, subdir));
return true;
}
catch (_a) {
catch {
return false;
}
}

View File

@@ -27,7 +27,6 @@ function writeFile(filePath, contents) {
fs.writeFileSync(filePath, contents);
}
function extractEditor(options) {
var _a;
const ts = require('typescript');
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString());
let compilerOptions;
@@ -49,7 +48,7 @@ function extractEditor(options) {
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
// Add extra .d.ts files from `node_modules/@types/`
if (Array.isArray((_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.types)) {
if (Array.isArray(options.compilerOptions?.types)) {
options.compilerOptions.types.forEach((type) => {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});

View File

@@ -304,7 +304,6 @@ function getElectronVersion() {
}
exports.getElectronVersion = getElectronVersion;
function acquireWebNodePaths() {
var _a;
const root = path.join(__dirname, '..', '..');
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
@@ -312,7 +311,7 @@ function acquireWebNodePaths() {
for (const key of Object.keys(webPackages)) {
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
let entryPoint = (_a = packageData.browser) !== null && _a !== void 0 ? _a : packageData.main;
let entryPoint = packageData.browser ?? packageData.main;
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
if (!entryPoint) {
// TODO @lramos15 remove this when jschardet adds an entrypoint so we can warn on all packages w/out entrypoint

View File

@@ -1,6 +1,7 @@
{
"compilerOptions": {
"target": "es2017",
"target": "es2020",
"lib": ["ES2020"],
"module": "commonjs",
"removeComments": false,
"preserveConstEnums": true,