diff --git a/.eslint-plugin-local/tsconfig.json b/.eslint-plugin-local/tsconfig.json index 4f199170a11..0de6dacc146 100644 --- a/.eslint-plugin-local/tsconfig.json +++ b/.eslint-plugin-local/tsconfig.json @@ -8,13 +8,13 @@ "module": "esnext", "allowImportingTsExtensions": true, "erasableSyntaxOnly": true, + "verbatimModuleSyntax": true, "noEmit": true, "strict": true, "exactOptionalPropertyTypes": false, "useUnknownInCatchVariables": false, "noUnusedLocals": true, "noUnusedParameters": true, - "newLine": "lf", "typeRoots": [ "." ] diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index 1b0af580378..0024456b4df 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -51,7 +51,7 @@ jobs: sudo service xvfb start - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux x64 $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux x64 $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -107,7 +107,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt @@ -115,7 +115,7 @@ jobs: run: mkdir -p .build - name: Prepare built-in extensions cache key - run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash + run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash - name: Restore built-in extensions cache id: cache-builtin-extensions @@ -127,7 +127,7 @@ jobs: - name: Download built-in extensions if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' - run: node build/lib/builtInExtensions.js + run: node build/lib/builtInExtensions.ts env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/monaco-editor.yml b/.github/workflows/monaco-editor.yml index b1d462546ac..f574aab1c7e 100644 --- a/.github/workflows/monaco-editor.yml +++ b/.github/workflows/monaco-editor.yml @@ -29,7 +29,7 @@ jobs: - name: Compute node modules cache key id: nodeModulesCacheKey - run: echo "value=$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)" >> $GITHUB_OUTPUT + run: echo "value=$(node build/azure-pipelines/common/computeNodeModulesCacheKey.ts)" >> $GITHUB_OUTPUT - name: Cache node modules id: cacheNodeModules uses: actions/cache@v4 diff --git a/.github/workflows/pr-darwin-test.yml b/.github/workflows/pr-darwin-test.yml index e48140b9569..685bab2cf37 100644 --- a/.github/workflows/pr-darwin-test.yml +++ b/.github/workflows/pr-darwin-test.yml @@ -32,7 +32,7 @@ jobs: node-version-file: .nvmrc - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -77,7 +77,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt @@ -85,7 +85,7 @@ jobs: run: mkdir -p .build - name: Prepare built-in extensions cache key - run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash + run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash - name: Restore built-in extensions cache id: cache-builtin-extensions @@ -97,7 +97,7 @@ jobs: - name: Download built-in extensions if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' - run: node build/lib/builtInExtensions.js + run: node build/lib/builtInExtensions.ts env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr-linux-test.yml b/.github/workflows/pr-linux-test.yml index 992be267cf9..694c456b5a3 100644 --- a/.github/workflows/pr-linux-test.yml +++ b/.github/workflows/pr-linux-test.yml @@ -49,7 +49,7 @@ jobs: sudo service xvfb start - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -105,7 +105,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt @@ -113,7 +113,7 @@ jobs: run: mkdir -p .build - name: Prepare built-in extensions cache key - run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash + run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash - name: Restore built-in extensions cache id: cache-builtin-extensions @@ -125,7 +125,7 @@ jobs: - name: Download built-in extensions if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' - run: node build/lib/builtInExtensions.js + run: node build/lib/builtInExtensions.ts env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr-node-modules.yml b/.github/workflows/pr-node-modules.yml index fc7497aa3f6..ce99efd7a97 100644 --- a/.github/workflows/pr-node-modules.yml +++ b/.github/workflows/pr-node-modules.yml @@ -21,7 +21,7 @@ jobs: node-version-file: .nvmrc - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts compile $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -60,7 +60,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt @@ -68,7 +68,7 @@ jobs: run: | set -e mkdir -p .build - node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash + node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash - name: Restore built-in extensions cache id: cache-builtin-extensions @@ -80,7 +80,7 @@ jobs: - name: Download built-in extensions if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' - run: node build/lib/builtInExtensions.js + run: node build/lib/builtInExtensions.ts env: GITHUB_TOKEN: ${{ secrets.VSCODE_OSS }} @@ -100,7 +100,7 @@ jobs: node-version-file: .nvmrc - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -152,7 +152,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt @@ -172,7 +172,7 @@ jobs: node-version-file: .nvmrc - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -213,7 +213,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt @@ -236,7 +236,7 @@ jobs: shell: pwsh run: | mkdir .build -ea 0 - node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash + node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache uses: actions/cache@v4 @@ -280,6 +280,6 @@ jobs: run: | . build/azure-pipelines/win32/exec.ps1 $ErrorActionPreference = "Stop" - exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } + exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt } exec { mkdir -Force .build/node_modules_cache } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } diff --git a/.github/workflows/pr-win32-test.yml b/.github/workflows/pr-win32-test.yml index ec2baa2f5b9..99c2c70b158 100644 --- a/.github/workflows/pr-win32-test.yml +++ b/.github/workflows/pr-win32-test.yml @@ -35,7 +35,7 @@ jobs: shell: pwsh run: | mkdir .build -ea 0 - node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash + node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache uses: actions/cache/restore@v4 @@ -84,7 +84,7 @@ jobs: run: | . build/azure-pipelines/win32/exec.ps1 $ErrorActionPreference = "Stop" - exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } + exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt } exec { mkdir -Force .build/node_modules_cache } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } @@ -94,7 +94,7 @@ jobs: - name: Prepare built-in extensions cache key shell: pwsh - run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash + run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash - name: Restore built-in extensions cache id: cache-builtin-extensions @@ -106,7 +106,7 @@ jobs: - name: Download built-in extensions if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' - run: node build/lib/builtInExtensions.js + run: node build/lib/builtInExtensions.ts env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 07186308186..b0b2ed66321 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -29,7 +29,7 @@ jobs: node-version-file: .nvmrc - name: Prepare node_modules cache key - run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash + run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts compile $(node -p process.arch) > .build/packagelockhash - name: Restore node_modules cache id: cache-node-modules @@ -68,7 +68,7 @@ jobs: if: steps.cache-node-modules.outputs.cache-hit != 'true' run: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 85bbd28a4d8..3fb87652c81 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -8,6 +8,7 @@ "ms-vscode.vscode-github-issue-notebooks", "ms-vscode.extension-test-runner", "jrieken.vscode-pr-pinger", - "typescriptteam.native-preview" + "typescriptteam.native-preview", + "ms-vscode.ts-customized-language-service" ] } diff --git a/.vscode/launch.json b/.vscode/launch.json index 07407e53ab6..a7a15cc31a6 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -282,7 +282,7 @@ // To debug observables you also need the extension "ms-vscode.debug-value-editor" "type": "chrome", "request": "launch", - "name": "Launch VS Code Internal (Dev Debug)", + "name": "Launch VS Code Internal (Hot Reload)", "windows": { "runtimeExecutable": "${workspaceFolder}/scripts/code.bat" }, @@ -298,7 +298,10 @@ "VSCODE_EXTHOST_WILL_SEND_SOCKET": null, "VSCODE_SKIP_PRELAUNCH": "1", "VSCODE_DEV_DEBUG": "1", + "VSCODE_DEV_SERVER_URL": "http://localhost:5199/build/vite/workbench-vite-electron.html", + "DEV_WINDOW_SRC": "http://localhost:5199/build/vite/workbench-vite-electron.html", "VSCODE_DEV_DEBUG_OBSERVABLES": "1", + "VSCODE_DEV": "1" }, "cleanUp": "wholeBrowser", "runtimeArgs": [ @@ -322,6 +325,7 @@ "presentation": { "hidden": true, }, + "preLaunchTask": "Launch Monaco Editor Vite" }, { "type": "node", @@ -591,7 +595,7 @@ "name": "Monaco Editor - Playground", "type": "chrome", "request": "launch", - "url": "https://microsoft.github.io/monaco-editor/playground.html?source=http%3A%2F%2Flocalhost%3A5199%2Fbuild%2Fmonaco-editor-playground%2Findex.ts%3Fesm#example-creating-the-editor-hello-world", + "url": "https://microsoft.github.io/monaco-editor/playground.html?source=http%3A%2F%2Flocalhost%3A5199%2Fbuild%2Fvite%2Findex.ts%3Fesm#example-creating-the-editor-hello-world", "preLaunchTask": "Launch Monaco Editor Vite", "presentation": { "group": "monaco", @@ -602,7 +606,7 @@ "name": "Monaco Editor - Self Contained Diff Editor", "type": "chrome", "request": "launch", - "url": "http://localhost:5199/build/monaco-editor-playground/index.html", + "url": "http://localhost:5199/build/vite/index.html", "preLaunchTask": "Launch Monaco Editor Vite", "presentation": { "group": "monaco", @@ -613,7 +617,7 @@ "name": "Monaco Editor - Workbench", "type": "chrome", "request": "launch", - "url": "http://localhost:5199/build/monaco-editor-playground/workbench-vite.html", + "url": "http://localhost:5199/build/vite/workbench-vite.html", "preLaunchTask": "Launch Monaco Editor Vite", "presentation": { "group": "monaco", @@ -638,10 +642,10 @@ } }, { - "name": "VS Code (Debug Observables)", + "name": "VS Code (Hot Reload)", "stopAll": true, "configurations": [ - "Launch VS Code Internal (Dev Debug)", + "Launch VS Code Internal (Hot Reload)", "Attach to Main Process", "Attach to Extension Host", "Attach to Shared Process", diff --git a/.vscode/settings.json b/.vscode/settings.json index 9fc915163ee..8ccf6b95f1d 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -214,5 +214,4 @@ "azureMcp.serverMode": "all", "azureMcp.readOnly": true, "chat.tools.terminal.outputLocation": "none", - "chat.agentSessionsViewLocation": "single-view" } diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 96d5015f4d1..633362dddf1 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -257,7 +257,7 @@ }, { "type": "shell", - "command": "node build/lib/preLaunch.js", + "command": "node build/lib/preLaunch.ts", "label": "Ensure Prelaunch Dependencies", "presentation": { "reveal": "silent", @@ -283,7 +283,7 @@ "type": "shell", "command": "npm run dev", "options": { - "cwd": "./build/monaco-editor-playground/" + "cwd": "./build/vite/" }, "isBackground": true, "problemMatcher": { diff --git a/build/azure-pipelines/alpine/product-build-alpine-node-modules.yml b/build/azure-pipelines/alpine/product-build-alpine-node-modules.yml index d1c6659d197..f1b9fceac83 100644 --- a/build/azure-pipelines/alpine/product-build-alpine-node-modules.yml +++ b/build/azure-pipelines/alpine/product-build-alpine-node-modules.yml @@ -33,7 +33,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -108,13 +108,13 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts displayName: Mixin distro node modules condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) diff --git a/build/azure-pipelines/alpine/product-build-alpine.yml b/build/azure-pipelines/alpine/product-build-alpine.yml index c6d5ba27eda..5c33e758802 100644 --- a/build/azure-pipelines/alpine/product-build-alpine.yml +++ b/build/azure-pipelines/alpine/product-build-alpine.yml @@ -77,7 +77,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -156,19 +156,19 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts displayName: Mixin distro node modules condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - template: ../common/install-builtin-extensions.yml@self diff --git a/build/azure-pipelines/cli/cli-apply-patches.yml b/build/azure-pipelines/cli/cli-apply-patches.yml index 2815124efb6..e04951f3f56 100644 --- a/build/azure-pipelines/cli/cli-apply-patches.yml +++ b/build/azure-pipelines/cli/cli-apply-patches.yml @@ -1,7 +1,7 @@ steps: - template: ../distro/download-distro.yml@self - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - script: node .build/distro/cli-patches/index.js diff --git a/build/azure-pipelines/cli/cli-compile.yml b/build/azure-pipelines/cli/cli-compile.yml index 769a1153bc1..2abefa7b6a4 100644 --- a/build/azure-pipelines/cli/cli-compile.yml +++ b/build/azure-pipelines/cli/cli-compile.yml @@ -35,7 +35,7 @@ steps: set -e if [ -n "$SYSROOT_ARCH" ]; then export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots - node -e '(async () => { const { getVSCodeSysroot } = require("../build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"], process.env["IS_MUSL"] === "1"); })()' + node -e 'import { getVSCodeSysroot } from "../build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"], process.env["IS_MUSL"] === "1"); })()' if [ "$SYSROOT_ARCH" == "arm64" ]; then if [ -n "$IS_MUSL" ]; then export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER="$VSCODE_SYSROOT_DIR/output/bin/aarch64-linux-musl-gcc" diff --git a/build/azure-pipelines/common/checkForArtifact.js b/build/azure-pipelines/common/checkForArtifact.js deleted file mode 100644 index 899448f78bd..00000000000 --- a/build/azure-pipelines/common/checkForArtifact.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const publish_1 = require("./publish"); -const retry_1 = require("./retry"); -async function getPipelineArtifacts() { - const result = await (0, publish_1.requestAZDOAPI)('artifacts'); - return result.value.filter(a => !/sbom$/.test(a.name)); -} -async function main([variableName, artifactName]) { - if (!variableName || !artifactName) { - throw new Error(`Usage: node checkForArtifact.js `); - } - try { - const artifacts = await (0, retry_1.retry)(() => getPipelineArtifacts()); - const artifact = artifacts.find(a => a.name === artifactName); - console.log(`##vso[task.setvariable variable=${variableName}]${artifact ? 'true' : 'false'}`); - } - catch (err) { - console.error(`ERROR: Failed to get pipeline artifacts: ${err}`); - console.log(`##vso[task.setvariable variable=${variableName}]false`); - } -} -main(process.argv.slice(2)) - .then(() => { - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=checkForArtifact.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/checkForArtifact.ts b/build/azure-pipelines/common/checkForArtifact.ts index e0a1a2ce1d3..21a30552e58 100644 --- a/build/azure-pipelines/common/checkForArtifact.ts +++ b/build/azure-pipelines/common/checkForArtifact.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { Artifact, requestAZDOAPI } from './publish'; -import { retry } from './retry'; +import { type Artifact, requestAZDOAPI } from './publish.ts'; +import { retry } from './retry.ts'; async function getPipelineArtifacts(): Promise { const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); @@ -13,7 +13,7 @@ async function getPipelineArtifacts(): Promise { async function main([variableName, artifactName]: string[]): Promise { if (!variableName || !artifactName) { - throw new Error(`Usage: node checkForArtifact.js `); + throw new Error(`Usage: node checkForArtifact.ts `); } try { diff --git a/build/azure-pipelines/common/codesign.js b/build/azure-pipelines/common/codesign.js deleted file mode 100644 index e3a8f330dcd..00000000000 --- a/build/azure-pipelines/common/codesign.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.printBanner = printBanner; -exports.streamProcessOutputAndCheckResult = streamProcessOutputAndCheckResult; -exports.spawnCodesignProcess = spawnCodesignProcess; -const zx_1 = require("zx"); -function printBanner(title) { - title = `${title} (${new Date().toISOString()})`; - console.log('\n'); - console.log('#'.repeat(75)); - console.log(`# ${title.padEnd(71)} #`); - console.log('#'.repeat(75)); - console.log('\n'); -} -async function streamProcessOutputAndCheckResult(name, promise) { - const result = await promise.pipe(process.stdout); - if (result.ok) { - console.log(`\n${name} completed successfully. Duration: ${result.duration} ms`); - return; - } - throw new Error(`${name} failed: ${result.stderr}`); -} -function spawnCodesignProcess(esrpCliDLLPath, type, folder, glob) { - return (0, zx_1.$) `node build/azure-pipelines/common/sign ${esrpCliDLLPath} ${type} ${folder} ${glob}`; -} -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/codesign.ts b/build/azure-pipelines/common/codesign.ts index 9f26b3924b5..4c27048093b 100644 --- a/build/azure-pipelines/common/codesign.ts +++ b/build/azure-pipelines/common/codesign.ts @@ -26,5 +26,5 @@ export async function streamProcessOutputAndCheckResult(name: string, promise: P } export function spawnCodesignProcess(esrpCliDLLPath: string, type: 'sign-windows' | 'sign-windows-appx' | 'sign-pgp' | 'sign-darwin' | 'notarize-darwin', folder: string, glob: string): ProcessPromise { - return $`node build/azure-pipelines/common/sign ${esrpCliDLLPath} ${type} ${folder} ${glob}`; + return $`node build/azure-pipelines/common/sign.ts ${esrpCliDLLPath} ${type} ${folder} ${glob}`; } diff --git a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js deleted file mode 100644 index 10fa9087454..00000000000 --- a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const crypto_1 = __importDefault(require("crypto")); -const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../../product.json'), 'utf8')); -const shasum = crypto_1.default.createHash('sha256'); -for (const ext of productjson.builtInExtensions) { - shasum.update(`${ext.name}@${ext.version}`); -} -process.stdout.write(shasum.digest('hex')); -//# sourceMappingURL=computeBuiltInDepsCacheKey.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts index 8abaaccb654..8e172ee5ecb 100644 --- a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts +++ b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts @@ -7,7 +7,7 @@ import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../product.json'), 'utf8')); +const productjson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../../product.json'), 'utf8')); const shasum = crypto.createHash('sha256'); for (const ext of productjson.builtInExtensions) { diff --git a/build/azure-pipelines/common/computeNodeModulesCacheKey.js b/build/azure-pipelines/common/computeNodeModulesCacheKey.js deleted file mode 100644 index c09c13be9d4..00000000000 --- a/build/azure-pipelines/common/computeNodeModulesCacheKey.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const crypto_1 = __importDefault(require("crypto")); -const { dirs } = require('../../npm/dirs'); -const ROOT = path_1.default.join(__dirname, '../../../'); -const shasum = crypto_1.default.createHash('sha256'); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build/.cachesalt'))); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, '.npmrc'))); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build', '.npmrc'))); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'remote', '.npmrc'))); -// Add `package.json` and `package-lock.json` files -for (const dir of dirs) { - const packageJsonPath = path_1.default.join(ROOT, dir, 'package.json'); - const packageJson = JSON.parse(fs_1.default.readFileSync(packageJsonPath).toString()); - const relevantPackageJsonSections = { - dependencies: packageJson.dependencies, - devDependencies: packageJson.devDependencies, - optionalDependencies: packageJson.optionalDependencies, - resolutions: packageJson.resolutions, - distro: packageJson.distro - }; - shasum.update(JSON.stringify(relevantPackageJsonSections)); - const packageLockPath = path_1.default.join(ROOT, dir, 'package-lock.json'); - shasum.update(fs_1.default.readFileSync(packageLockPath)); -} -// Add any other command line arguments -for (let i = 2; i < process.argv.length; i++) { - shasum.update(process.argv[i]); -} -process.stdout.write(shasum.digest('hex')); -//# sourceMappingURL=computeNodeModulesCacheKey.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/computeNodeModulesCacheKey.ts b/build/azure-pipelines/common/computeNodeModulesCacheKey.ts index 57b35dc78de..54a5e16bca9 100644 --- a/build/azure-pipelines/common/computeNodeModulesCacheKey.ts +++ b/build/azure-pipelines/common/computeNodeModulesCacheKey.ts @@ -2,13 +2,12 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ - import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; -const { dirs } = require('../../npm/dirs'); +import { dirs } from '../../npm/dirs.js'; -const ROOT = path.join(__dirname, '../../../'); +const ROOT = path.join(import.meta.dirname, '../../../'); const shasum = crypto.createHash('sha256'); diff --git a/build/azure-pipelines/common/createBuild.js b/build/azure-pipelines/common/createBuild.js deleted file mode 100644 index c605ed6218e..00000000000 --- a/build/azure-pipelines/common/createBuild.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const identity_1 = require("@azure/identity"); -const cosmos_1 = require("@azure/cosmos"); -const retry_1 = require("./retry"); -if (process.argv.length !== 3) { - console.error('Usage: node createBuild.js VERSION'); - process.exit(-1); -} -function getEnv(name) { - const result = process.env[name]; - if (typeof result === 'undefined') { - throw new Error('Missing env: ' + name); - } - return result; -} -async function main() { - const [, , _version] = process.argv; - const quality = getEnv('VSCODE_QUALITY'); - const commit = getEnv('BUILD_SOURCEVERSION'); - const queuedBy = getEnv('BUILD_QUEUEDBY'); - const sourceBranch = getEnv('BUILD_SOURCEBRANCH'); - const version = _version + (quality === 'stable' ? '' : `-${quality}`); - console.log('Creating build...'); - console.log('Quality:', quality); - console.log('Version:', version); - console.log('Commit:', commit); - const build = { - id: commit, - timestamp: (new Date()).getTime(), - version, - isReleased: false, - private: process.env['VSCODE_PRIVATE_BUILD']?.toLowerCase() === 'true', - sourceBranch, - queuedBy, - assets: [], - updates: {} - }; - const aadCredentials = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); - const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials }); - const scripts = client.database('builds').container(quality).scripts; - await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }])); -} -main().then(() => { - console.log('Build successfully created'); - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=createBuild.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/createBuild.ts b/build/azure-pipelines/common/createBuild.ts index 6afeb01e6cc..f477f3cc09e 100644 --- a/build/azure-pipelines/common/createBuild.ts +++ b/build/azure-pipelines/common/createBuild.ts @@ -5,10 +5,10 @@ import { ClientAssertionCredential } from '@azure/identity'; import { CosmosClient } from '@azure/cosmos'; -import { retry } from './retry'; +import { retry } from './retry.ts'; if (process.argv.length !== 3) { - console.error('Usage: node createBuild.js VERSION'); + console.error('Usage: node createBuild.ts VERSION'); process.exit(-1); } diff --git a/build/azure-pipelines/common/getPublishAuthTokens.js b/build/azure-pipelines/common/getPublishAuthTokens.js deleted file mode 100644 index 9c22e9ad94b..00000000000 --- a/build/azure-pipelines/common/getPublishAuthTokens.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getAccessToken = getAccessToken; -const msal_node_1 = require("@azure/msal-node"); -function e(name) { - const result = process.env[name]; - if (typeof result !== 'string') { - throw new Error(`Missing env: ${name}`); - } - return result; -} -async function getAccessToken(endpoint, tenantId, clientId, idToken) { - const app = new msal_node_1.ConfidentialClientApplication({ - auth: { - clientId, - authority: `https://login.microsoftonline.com/${tenantId}`, - clientAssertion: idToken - } - }); - const result = await app.acquireTokenByClientCredential({ scopes: [`${endpoint}.default`] }); - if (!result) { - throw new Error('Failed to get access token'); - } - return { - token: result.accessToken, - expiresOnTimestamp: result.expiresOn.getTime(), - refreshAfterTimestamp: result.refreshOn?.getTime() - }; -} -async function main() { - const cosmosDBAccessToken = await getAccessToken(e('AZURE_DOCUMENTDB_ENDPOINT'), e('AZURE_TENANT_ID'), e('AZURE_CLIENT_ID'), e('AZURE_ID_TOKEN')); - const blobServiceAccessToken = await getAccessToken(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_ID_TOKEN']); - console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken })); -} -if (require.main === module) { - main().then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=getPublishAuthTokens.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/getPublishAuthTokens.ts b/build/azure-pipelines/common/getPublishAuthTokens.ts index 68e76de1a83..2293480b306 100644 --- a/build/azure-pipelines/common/getPublishAuthTokens.ts +++ b/build/azure-pipelines/common/getPublishAuthTokens.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { AccessToken } from '@azure/core-auth'; +import type { AccessToken } from '@azure/core-auth'; import { ConfidentialClientApplication } from '@azure/msal-node'; function e(name: string): string { @@ -44,7 +44,7 @@ async function main() { console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken })); } -if (require.main === module) { +if (import.meta.main) { main().then(() => { process.exit(0); }, err => { diff --git a/build/azure-pipelines/common/install-builtin-extensions.yml b/build/azure-pipelines/common/install-builtin-extensions.yml index c1ee18d05b5..f9cbfd4b085 100644 --- a/build/azure-pipelines/common/install-builtin-extensions.yml +++ b/build/azure-pipelines/common/install-builtin-extensions.yml @@ -7,7 +7,7 @@ steps: condition: and(succeeded(), not(contains(variables['Agent.OS'], 'windows'))) displayName: Create .build folder - - script: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash + - script: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash displayName: Prepare built-in extensions cache key - task: Cache@2 @@ -17,7 +17,7 @@ steps: cacheHitVar: BUILTIN_EXTENSIONS_RESTORED displayName: Restore built-in extensions cache - - script: node build/lib/builtInExtensions.js + - script: node build/lib/builtInExtensions.ts env: GITHUB_TOKEN: "$(github-distro-mixin-password)" condition: and(succeeded(), ne(variables.BUILTIN_EXTENSIONS_RESTORED, 'true')) diff --git a/build/azure-pipelines/common/listNodeModules.js b/build/azure-pipelines/common/listNodeModules.js deleted file mode 100644 index 301b5f930b6..00000000000 --- a/build/azure-pipelines/common/listNodeModules.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -if (process.argv.length !== 3) { - console.error('Usage: node listNodeModules.js OUTPUT_FILE'); - process.exit(-1); -} -const ROOT = path_1.default.join(__dirname, '../../../'); -function findNodeModulesFiles(location, inNodeModules, result) { - const entries = fs_1.default.readdirSync(path_1.default.join(ROOT, location)); - for (const entry of entries) { - const entryPath = `${location}/${entry}`; - if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) { - continue; - } - let stat; - try { - stat = fs_1.default.statSync(path_1.default.join(ROOT, entryPath)); - } - catch (err) { - continue; - } - if (stat.isDirectory()) { - findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result); - } - else { - if (inNodeModules) { - result.push(entryPath.substr(1)); - } - } - } -} -const result = []; -findNodeModulesFiles('', false, result); -fs_1.default.writeFileSync(process.argv[2], result.join('\n') + '\n'); -//# sourceMappingURL=listNodeModules.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/listNodeModules.ts b/build/azure-pipelines/common/listNodeModules.ts index fb85b25cfd1..5ab955faca4 100644 --- a/build/azure-pipelines/common/listNodeModules.ts +++ b/build/azure-pipelines/common/listNodeModules.ts @@ -7,11 +7,11 @@ import fs from 'fs'; import path from 'path'; if (process.argv.length !== 3) { - console.error('Usage: node listNodeModules.js OUTPUT_FILE'); + console.error('Usage: node listNodeModules.ts OUTPUT_FILE'); process.exit(-1); } -const ROOT = path.join(__dirname, '../../../'); +const ROOT = path.join(import.meta.dirname, '../../../'); function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) { const entries = fs.readdirSync(path.join(ROOT, location)); diff --git a/build/azure-pipelines/common/publish.js b/build/azure-pipelines/common/publish.js deleted file mode 100644 index 49b718344a0..00000000000 --- a/build/azure-pipelines/common/publish.js +++ /dev/null @@ -1,724 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.e = e; -exports.requestAZDOAPI = requestAZDOAPI; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const stream_1 = require("stream"); -const promises_1 = require("node:stream/promises"); -const yauzl_1 = __importDefault(require("yauzl")); -const crypto_1 = __importDefault(require("crypto")); -const retry_1 = require("./retry"); -const cosmos_1 = require("@azure/cosmos"); -const child_process_1 = __importDefault(require("child_process")); -const os_1 = __importDefault(require("os")); -const node_worker_threads_1 = require("node:worker_threads"); -const msal_node_1 = require("@azure/msal-node"); -const storage_blob_1 = require("@azure/storage-blob"); -const jws_1 = __importDefault(require("jws")); -const node_timers_1 = require("node:timers"); -function e(name) { - const result = process.env[name]; - if (typeof result !== 'string') { - throw new Error(`Missing env: ${name}`); - } - return result; -} -function hashStream(hashName, stream) { - return new Promise((c, e) => { - const shasum = crypto_1.default.createHash(hashName); - stream - .on('data', shasum.update.bind(shasum)) - .on('error', e) - .on('close', () => c(shasum.digest())); - }); -} -var StatusCode; -(function (StatusCode) { - StatusCode["Pass"] = "pass"; - StatusCode["Aborted"] = "aborted"; - StatusCode["Inprogress"] = "inprogress"; - StatusCode["FailCanRetry"] = "failCanRetry"; - StatusCode["FailDoNotRetry"] = "failDoNotRetry"; - StatusCode["PendingAnalysis"] = "pendingAnalysis"; - StatusCode["Cancelled"] = "cancelled"; -})(StatusCode || (StatusCode = {})); -function getCertificateBuffer(input) { - return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64'); -} -function getThumbprint(input, algorithm) { - const buffer = getCertificateBuffer(input); - return crypto_1.default.createHash(algorithm).update(buffer).digest(); -} -function getKeyFromPFX(pfx) { - const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx'); - const pemKeyPath = path_1.default.join(os_1.default.tmpdir(), 'key.pem'); - try { - const pfxCertificate = Buffer.from(pfx, 'base64'); - fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate); - child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`); - const raw = fs_1.default.readFileSync(pemKeyPath, 'utf-8'); - const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)[0]; - return result; - } - finally { - fs_1.default.rmSync(pfxCertificatePath, { force: true }); - fs_1.default.rmSync(pemKeyPath, { force: true }); - } -} -function getCertificatesFromPFX(pfx) { - const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx'); - const pemCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pem'); - try { - const pfxCertificate = Buffer.from(pfx, 'base64'); - fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate); - child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`); - const raw = fs_1.default.readFileSync(pemCertificatePath, 'utf-8'); - const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g); - return matches ? matches.reverse() : []; - } - finally { - fs_1.default.rmSync(pfxCertificatePath, { force: true }); - fs_1.default.rmSync(pemCertificatePath, { force: true }); - } -} -class ESRPReleaseService { - log; - clientId; - accessToken; - requestSigningCertificates; - requestSigningKey; - containerClient; - stagingSasToken; - static async create(log, tenantId, clientId, authCertificatePfx, requestSigningCertificatePfx, containerClient, stagingSasToken) { - const authKey = getKeyFromPFX(authCertificatePfx); - const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0]; - const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx); - const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx); - const app = new msal_node_1.ConfidentialClientApplication({ - auth: { - clientId, - authority: `https://login.microsoftonline.com/${tenantId}`, - clientCertificate: { - thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'), - privateKey: authKey, - x5c: authCertificate - } - } - }); - const response = await app.acquireTokenByClientCredential({ - scopes: ['https://api.esrp.microsoft.com/.default'] - }); - return new ESRPReleaseService(log, clientId, response.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken); - } - static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/'; - constructor(log, clientId, accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken) { - this.log = log; - this.clientId = clientId; - this.accessToken = accessToken; - this.requestSigningCertificates = requestSigningCertificates; - this.requestSigningKey = requestSigningKey; - this.containerClient = containerClient; - this.stagingSasToken = stagingSasToken; - } - async createRelease(version, filePath, friendlyFileName) { - const correlationId = crypto_1.default.randomUUID(); - const blobClient = this.containerClient.getBlockBlobClient(correlationId); - this.log(`Uploading ${filePath} to ${blobClient.url}`); - await blobClient.uploadFile(filePath); - this.log('Uploaded blob successfully'); - try { - this.log(`Submitting release for ${version}: ${filePath}`); - const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient); - this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`); - // Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times - for (let i = 0; i < 720; i++) { - await new Promise(c => setTimeout(c, 5000)); - const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId); - if (releaseStatus.status === 'pass') { - break; - } - else if (releaseStatus.status === 'aborted') { - this.log(JSON.stringify(releaseStatus)); - throw new Error(`Release was aborted`); - } - else if (releaseStatus.status !== 'inprogress') { - this.log(JSON.stringify(releaseStatus)); - throw new Error(`Unknown error when polling for release`); - } - } - const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId); - if (releaseDetails.status !== 'pass') { - throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`); - } - this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails[0].downloadUrl); - return releaseDetails.files[0].fileDownloadDetails[0].downloadUrl; - } - finally { - this.log(`Deleting blob ${blobClient.url}`); - await blobClient.delete(); - this.log('Deleted blob successfully'); - } - } - async submitRelease(version, filePath, friendlyFileName, correlationId, blobClient) { - const size = fs_1.default.statSync(filePath).size; - const hash = await hashStream('sha256', fs_1.default.createReadStream(filePath)); - const blobUrl = `${blobClient.url}?${this.stagingSasToken}`; - const message = { - customerCorrelationId: correlationId, - esrpCorrelationId: correlationId, - driEmail: ['joao.moreno@microsoft.com'], - createdBy: { userPrincipalName: 'jomo@microsoft.com' }, - owners: [{ owner: { userPrincipalName: 'jomo@microsoft.com' } }], - approvers: [{ approver: { userPrincipalName: 'jomo@microsoft.com' }, isAutoApproved: true, isMandatory: false }], - releaseInfo: { - title: 'VS Code', - properties: { - 'ReleaseContentType': 'InstallPackage' - }, - minimumNumberOfApprovers: 1 - }, - productInfo: { - name: 'VS Code', - version, - description: 'VS Code' - }, - accessPermissionsInfo: { - mainPublisher: 'VSCode', - channelDownloadEntityDetails: { - AllDownloadEntities: ['VSCode'] - } - }, - routingInfo: { - intent: 'filedownloadlinkgeneration' - }, - files: [{ - name: path_1.default.basename(filePath), - friendlyFileName, - tenantFileLocation: blobUrl, - tenantFileLocationType: 'AzureBlob', - sourceLocation: { - type: 'azureBlob', - blobUrl - }, - hashType: 'sha256', - hash: Array.from(hash), - sizeInBytes: size - }] - }; - message.jwsToken = await this.generateJwsToken(message); - const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${this.accessToken}` - }, - body: JSON.stringify(message) - }); - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to submit release: ${res.statusText}\n${text}`); - } - return await res.json(); - } - async getReleaseStatus(releaseId) { - const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`; - const res = await (0, retry_1.retry)(() => fetch(url, { - headers: { - 'Authorization': `Bearer ${this.accessToken}` - } - })); - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to get release status: ${res.statusText}\n${text}`); - } - return await res.json(); - } - async getReleaseDetails(releaseId) { - const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`; - const res = await (0, retry_1.retry)(() => fetch(url, { - headers: { - 'Authorization': `Bearer ${this.accessToken}` - } - })); - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to get release status: ${res.statusText}\n${text}`); - } - return await res.json(); - } - async generateJwsToken(message) { - // Create header with properly typed properties, then override x5c with the non-standard string format - const header = { - alg: 'RS256', - crit: ['exp', 'x5t'], - // Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483) - exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000, - // Release service uses hex format, not base64url :roll_eyes: - x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'), - }; - // The Release service expects x5c as a '.' separated string, not the standard array format - header['x5c'] = this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.'); - return jws_1.default.sign({ - header, - payload: message, - privateKey: this.requestSigningKey, - }); - } -} -class State { - statePath; - set = new Set(); - constructor() { - const pipelineWorkspacePath = e('PIPELINE_WORKSPACE'); - const previousState = fs_1.default.readdirSync(pipelineWorkspacePath) - .map(name => /^artifacts_processed_(\d+)$/.exec(name)) - .filter((match) => !!match) - .map(match => ({ name: match[0], attempt: Number(match[1]) })) - .sort((a, b) => b.attempt - a.attempt)[0]; - if (previousState) { - const previousStatePath = path_1.default.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt'); - fs_1.default.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name)); - } - const stageAttempt = e('SYSTEM_STAGEATTEMPT'); - this.statePath = path_1.default.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`); - fs_1.default.mkdirSync(path_1.default.dirname(this.statePath), { recursive: true }); - fs_1.default.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join('')); - } - get size() { - return this.set.size; - } - has(name) { - return this.set.has(name); - } - add(name) { - this.set.add(name); - fs_1.default.appendFileSync(this.statePath, `${name}\n`); - } - [Symbol.iterator]() { - return this.set[Symbol.iterator](); - } -} -const azdoFetchOptions = { - headers: { - // Pretend we're a web browser to avoid download rate limits - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0', - 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7', - 'Accept-Encoding': 'gzip, deflate, br', - 'Accept-Language': 'en-US,en;q=0.9', - 'Referer': 'https://dev.azure.com', - Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}` - } -}; -async function requestAZDOAPI(path) { - const abortController = new AbortController(); - const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000); - try { - const res = await (0, retry_1.retry)(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal })); - if (!res.ok) { - throw new Error(`Unexpected status code: ${res.status}`); - } - return await res.json(); - } - finally { - clearTimeout(timeout); - } -} -async function getPipelineArtifacts() { - const result = await requestAZDOAPI('artifacts'); - return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name)); -} -async function getPipelineTimeline() { - return await requestAZDOAPI('timeline'); -} -async function downloadArtifact(artifact, downloadPath) { - const abortController = new AbortController(); - const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000); - try { - const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal }); - if (!res.ok) { - throw new Error(`Unexpected status code: ${res.status}`); - } - await (0, promises_1.pipeline)(stream_1.Readable.fromWeb(res.body), fs_1.default.createWriteStream(downloadPath)); - } - finally { - clearTimeout(timeout); - } -} -async function unzip(packagePath, outputPath) { - return new Promise((resolve, reject) => { - yauzl_1.default.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => { - if (err) { - return reject(err); - } - const result = []; - zipfile.on('entry', entry => { - if (/\/$/.test(entry.fileName)) { - zipfile.readEntry(); - } - else { - zipfile.openReadStream(entry, (err, istream) => { - if (err) { - return reject(err); - } - const filePath = path_1.default.join(outputPath, entry.fileName); - fs_1.default.mkdirSync(path_1.default.dirname(filePath), { recursive: true }); - const ostream = fs_1.default.createWriteStream(filePath); - ostream.on('finish', () => { - result.push(filePath); - zipfile.readEntry(); - }); - istream?.on('error', err => reject(err)); - istream.pipe(ostream); - }); - } - }); - zipfile.on('close', () => resolve(result)); - zipfile.readEntry(); - }); - }); -} -// Contains all of the logic for mapping details to our actual product names in CosmosDB -function getPlatform(product, os, arch, type) { - switch (os) { - case 'win32': - switch (product) { - case 'client': { - switch (type) { - case 'archive': - return `win32-${arch}-archive`; - case 'setup': - return `win32-${arch}`; - case 'user-setup': - return `win32-${arch}-user`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - } - case 'server': - return `server-win32-${arch}`; - case 'web': - return `server-win32-${arch}-web`; - case 'cli': - return `cli-win32-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'alpine': - switch (product) { - case 'server': - return `server-alpine-${arch}`; - case 'web': - return `server-alpine-${arch}-web`; - case 'cli': - return `cli-alpine-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'linux': - switch (type) { - case 'snap': - return `linux-snap-${arch}`; - case 'archive-unsigned': - switch (product) { - case 'client': - return `linux-${arch}`; - case 'server': - return `server-linux-${arch}`; - case 'web': - if (arch === 'standalone') { - return 'web-standalone'; - } - return `server-linux-${arch}-web`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'deb-package': - return `linux-deb-${arch}`; - case 'rpm-package': - return `linux-rpm-${arch}`; - case 'cli': - return `cli-linux-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'darwin': - switch (product) { - case 'client': - if (arch === 'x64') { - return 'darwin'; - } - return `darwin-${arch}`; - case 'server': - if (arch === 'x64') { - return 'server-darwin'; - } - return `server-darwin-${arch}`; - case 'web': - if (arch === 'x64') { - return 'server-darwin-web'; - } - return `server-darwin-${arch}-web`; - case 'cli': - return `cli-darwin-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } -} -// Contains all of the logic for mapping types to our actual types in CosmosDB -function getRealType(type) { - switch (type) { - case 'user-setup': - return 'setup'; - case 'deb-package': - case 'rpm-package': - return 'package'; - default: - return type; - } -} -async function withLease(client, fn) { - const lease = client.getBlobLeaseClient(); - for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes - try { - await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired - await lease.acquireLease(60); - try { - const abortController = new AbortController(); - const refresher = new Promise((c, e) => { - abortController.signal.onabort = () => { - (0, node_timers_1.clearInterval)(interval); - c(); - }; - const interval = (0, node_timers_1.setInterval)(() => { - lease.renewLease().catch(err => { - (0, node_timers_1.clearInterval)(interval); - e(new Error('Failed to renew lease ' + err)); - }); - }, 30_000); - }); - const result = await Promise.race([fn(), refresher]); - abortController.abort(); - return result; - } - finally { - await lease.releaseLease(); - } - } - catch (err) { - if (err.statusCode !== 409 && err.statusCode !== 412) { - throw err; - } - await new Promise(c => setTimeout(c, 5000)); - } - } - throw new Error('Failed to acquire lease on blob after 30 minutes'); -} -async function processArtifact(artifact, filePath) { - const log = (...args) => console.log(`[${artifact.name}]`, ...args); - const match = /^vscode_(?[^_]+)_(?[^_]+)(?:_legacy)?_(?[^_]+)_(?[^_]+)$/.exec(artifact.name); - if (!match) { - throw new Error(`Invalid artifact name: ${artifact.name}`); - } - const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS')); - const quality = e('VSCODE_QUALITY'); - const version = e('BUILD_SOURCEVERSION'); - const friendlyFileName = `${quality}/${version}/${path_1.default.basename(filePath)}`; - const blobServiceClient = new storage_blob_1.BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken }); - const leasesContainerClient = blobServiceClient.getContainerClient('leases'); - await leasesContainerClient.createIfNotExists(); - const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName); - log(`Acquiring lease for: ${friendlyFileName}`); - await withLease(leaseBlobClient, async () => { - log(`Successfully acquired lease for: ${friendlyFileName}`); - const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`; - const res = await (0, retry_1.retry)(() => fetch(url)); - if (res.status === 200) { - log(`Already released and provisioned: ${url}`); - } - else { - const stagingContainerClient = blobServiceClient.getContainerClient('staging'); - await stagingContainerClient.createIfNotExists(); - const now = new Date().valueOf(); - const oneHour = 60 * 60 * 1000; - const oneHourAgo = new Date(now - oneHour); - const oneHourFromNow = new Date(now + oneHour); - const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow); - const sasOptions = { containerName: 'staging', permissions: storage_blob_1.ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow }; - const stagingSasToken = (0, storage_blob_1.generateBlobSASQueryParameters)(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString(); - const releaseService = await ESRPReleaseService.create(log, e('RELEASE_TENANT_ID'), e('RELEASE_CLIENT_ID'), e('RELEASE_AUTH_CERT'), e('RELEASE_REQUEST_SIGNING_CERT'), stagingContainerClient, stagingSasToken); - await releaseService.createRelease(version, filePath, friendlyFileName); - } - const { product, os, arch, unprocessedType } = match.groups; - const platform = getPlatform(product, os, arch, unprocessedType); - const type = getRealType(unprocessedType); - const size = fs_1.default.statSync(filePath).size; - const stream = fs_1.default.createReadStream(filePath); - const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256 - const asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true }; - log('Creating asset...'); - const result = await (0, retry_1.retry)(async (attempt) => { - log(`Creating asset in Cosmos DB (attempt ${attempt})...`); - const client = new cosmos_1.CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT'), tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) }); - const scripts = client.database('builds').container(quality).scripts; - const { resource: result } = await scripts.storedProcedure('createAsset').execute('', [version, asset, true]); - return result; - }); - if (result === 'already exists') { - log('Asset already exists!'); - } - else { - log('Asset successfully created: ', JSON.stringify(asset, undefined, 2)); - } - }); - log(`Successfully released lease for: ${friendlyFileName}`); -} -// It is VERY important that we don't download artifacts too much too fast from AZDO. -// AZDO throttles us SEVERELY if we do. Not just that, but they also close open -// sockets, so the whole things turns to a grinding halt. So, downloading and extracting -// happens serially in the main thread, making the downloads are spaced out -// properly. For each extracted artifact, we spawn a worker thread to upload it to -// the CDN and finally update the build in Cosmos DB. -async function main() { - if (!node_worker_threads_1.isMainThread) { - const { artifact, artifactFilePath } = node_worker_threads_1.workerData; - await processArtifact(artifact, artifactFilePath); - return; - } - const done = new State(); - const processing = new Set(); - for (const name of done) { - console.log(`\u2705 ${name}`); - } - const stages = new Set(['Compile']); - if (e('VSCODE_BUILD_STAGE_LINUX') === 'True' || - e('VSCODE_BUILD_STAGE_ALPINE') === 'True' || - e('VSCODE_BUILD_STAGE_MACOS') === 'True' || - e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { - stages.add('CompileCLI'); - } - if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { - stages.add('Windows'); - } - if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') { - stages.add('Linux'); - } - if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') { - stages.add('Alpine'); - } - if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') { - stages.add('macOS'); - } - if (e('VSCODE_BUILD_STAGE_WEB') === 'True') { - stages.add('Web'); - } - let timeline; - let artifacts; - let resultPromise = Promise.resolve([]); - const operations = []; - while (true) { - [timeline, artifacts] = await Promise.all([(0, retry_1.retry)(() => getPipelineTimeline()), (0, retry_1.retry)(() => getPipelineArtifacts())]); - const stagesCompleted = new Set(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name)); - const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s)); - const artifactsInProgress = artifacts.filter(a => processing.has(a.name)); - if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) { - break; - } - else if (stagesInProgress.length > 0) { - console.log('Stages in progress:', stagesInProgress.join(', ')); - } - else if (artifactsInProgress.length > 0) { - console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', ')); - } - else { - console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`); - } - for (const artifact of artifacts) { - if (done.has(artifact.name) || processing.has(artifact.name)) { - continue; - } - console.log(`[${artifact.name}] Found new artifact`); - const artifactZipPath = path_1.default.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`); - await (0, retry_1.retry)(async (attempt) => { - const start = Date.now(); - console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`); - await downloadArtifact(artifact, artifactZipPath); - const archiveSize = fs_1.default.statSync(artifactZipPath).size; - const downloadDurationS = (Date.now() - start) / 1000; - const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS); - console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`); - }); - const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY')); - const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0]; - processing.add(artifact.name); - const promise = new Promise((resolve, reject) => { - const worker = new node_worker_threads_1.Worker(__filename, { workerData: { artifact, artifactFilePath } }); - worker.on('error', reject); - worker.on('exit', code => { - if (code === 0) { - resolve(); - } - else { - reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`)); - } - }); - }); - const operation = promise.then(() => { - processing.delete(artifact.name); - done.add(artifact.name); - console.log(`\u2705 ${artifact.name} `); - }); - operations.push({ name: artifact.name, operation }); - resultPromise = Promise.allSettled(operations.map(o => o.operation)); - } - await new Promise(c => setTimeout(c, 10_000)); - } - console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`); - const artifactsInProgress = operations.filter(o => processing.has(o.name)); - if (artifactsInProgress.length > 0) { - console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', ')); - } - const results = await resultPromise; - for (let i = 0; i < operations.length; i++) { - const result = results[i]; - if (result.status === 'rejected') { - console.error(`[${operations[i].name}]`, result.reason); - } - } - // Fail the job if any of the artifacts failed to publish - if (results.some(r => r.status === 'rejected')) { - throw new Error('Some artifacts failed to publish'); - } - // Also fail the job if any of the stages did not succeed - let shouldFail = false; - for (const stage of stages) { - const record = timeline.records.find(r => r.name === stage && r.type === 'Stage'); - if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') { - shouldFail = true; - console.error(`Stage ${stage} did not succeed: ${record.result}`); - } - } - if (shouldFail) { - throw new Error('Some stages did not succeed'); - } - console.log(`All ${done.size} artifacts published!`); -} -if (require.main === module) { - main().then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=publish.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/publish.ts b/build/azure-pipelines/common/publish.ts index e8a6776ceb1..5761c0d06df 100644 --- a/build/azure-pipelines/common/publish.ts +++ b/build/azure-pipelines/common/publish.ts @@ -10,7 +10,7 @@ import type { ReadableStream } from 'stream/web'; import { pipeline } from 'node:stream/promises'; import yauzl from 'yauzl'; import crypto from 'crypto'; -import { retry } from './retry'; +import { retry } from './retry.ts'; import { CosmosClient } from '@azure/cosmos'; import cp from 'child_process'; import os from 'os'; @@ -73,15 +73,16 @@ interface ReleaseError { errorMessages: string[]; } -const enum StatusCode { - Pass = 'pass', - Aborted = 'aborted', - Inprogress = 'inprogress', - FailCanRetry = 'failCanRetry', - FailDoNotRetry = 'failDoNotRetry', - PendingAnalysis = 'pendingAnalysis', - Cancelled = 'cancelled' -} +const StatusCode = Object.freeze({ + Pass: 'pass', + Aborted: 'aborted', + Inprogress: 'inprogress', + FailCanRetry: 'failCanRetry', + FailDoNotRetry: 'failDoNotRetry', + PendingAnalysis: 'pendingAnalysis', + Cancelled: 'cancelled' +}); +type StatusCode = typeof StatusCode[keyof typeof StatusCode]; interface ReleaseResultMessage { activities: ReleaseActivityInfo[]; @@ -349,15 +350,31 @@ class ESRPReleaseService { private static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/'; + private readonly log: (...args: unknown[]) => void; + private readonly clientId: string; + private readonly accessToken: string; + private readonly requestSigningCertificates: string[]; + private readonly requestSigningKey: string; + private readonly containerClient: ContainerClient; + private readonly stagingSasToken: string; + private constructor( - private readonly log: (...args: unknown[]) => void, - private readonly clientId: string, - private readonly accessToken: string, - private readonly requestSigningCertificates: string[], - private readonly requestSigningKey: string, - private readonly containerClient: ContainerClient, - private readonly stagingSasToken: string - ) { } + log: (...args: unknown[]) => void, + clientId: string, + accessToken: string, + requestSigningCertificates: string[], + requestSigningKey: string, + containerClient: ContainerClient, + stagingSasToken: string + ) { + this.log = log; + this.clientId = clientId; + this.accessToken = accessToken; + this.requestSigningCertificates = requestSigningCertificates; + this.requestSigningKey = requestSigningKey; + this.containerClient = containerClient; + this.stagingSasToken = stagingSasToken; + } async createRelease(version: string, filePath: string, friendlyFileName: string) { const correlationId = crypto.randomUUID(); @@ -1009,7 +1026,7 @@ async function main() { processing.add(artifact.name); const promise = new Promise((resolve, reject) => { - const worker = new Worker(__filename, { workerData: { artifact, artifactFilePath } }); + const worker = new Worker(import.meta.filename, { workerData: { artifact, artifactFilePath } }); worker.on('error', reject); worker.on('exit', code => { if (code === 0) { @@ -1075,7 +1092,7 @@ async function main() { console.log(`All ${done.size} artifacts published!`); } -if (require.main === module) { +if (import.meta.main) { main().then(() => { process.exit(0); }, err => { diff --git a/build/azure-pipelines/common/releaseBuild.js b/build/azure-pipelines/common/releaseBuild.js deleted file mode 100644 index b74e2847cbc..00000000000 --- a/build/azure-pipelines/common/releaseBuild.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const cosmos_1 = require("@azure/cosmos"); -const retry_1 = require("./retry"); -function getEnv(name) { - const result = process.env[name]; - if (typeof result === 'undefined') { - throw new Error('Missing env: ' + name); - } - return result; -} -function createDefaultConfig(quality) { - return { - id: quality, - frozen: false - }; -} -async function getConfig(client, quality) { - const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`; - const res = await client.database('builds').container('config').items.query(query).fetchAll(); - if (res.resources.length === 0) { - return createDefaultConfig(quality); - } - return res.resources[0]; -} -async function main(force) { - const commit = getEnv('BUILD_SOURCEVERSION'); - const quality = getEnv('VSCODE_QUALITY'); - const { cosmosDBAccessToken } = JSON.parse(getEnv('PUBLISH_AUTH_TOKENS')); - const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) }); - if (!force) { - const config = await getConfig(client, quality); - console.log('Quality config:', config); - if (config.frozen) { - console.log(`Skipping release because quality ${quality} is frozen.`); - return; - } - } - console.log(`Releasing build ${commit}...`); - const scripts = client.database('builds').container(quality).scripts; - await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit])); -} -const [, , force] = process.argv; -console.log(process.argv); -main(/^true$/i.test(force)).then(() => { - console.log('Build successfully released'); - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=releaseBuild.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/releaseBuild.ts b/build/azure-pipelines/common/releaseBuild.ts index d60701c2fac..32ea596ff64 100644 --- a/build/azure-pipelines/common/releaseBuild.ts +++ b/build/azure-pipelines/common/releaseBuild.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { CosmosClient } from '@azure/cosmos'; -import { retry } from './retry'; +import { retry } from './retry.ts'; function getEnv(name: string): string { const result = process.env[name]; diff --git a/build/azure-pipelines/common/retry.js b/build/azure-pipelines/common/retry.js deleted file mode 100644 index 91f60bf24b2..00000000000 --- a/build/azure-pipelines/common/retry.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.retry = retry; -async function retry(fn) { - let lastError; - for (let run = 1; run <= 10; run++) { - try { - return await fn(run); - } - catch (err) { - if (!/fetch failed|terminated|aborted|timeout|TimeoutError|Timeout Error|RestError|Client network socket disconnected|socket hang up|ECONNRESET|CredentialUnavailableError|endpoints_resolution_error|Audience validation failed|end of central directory record signature not found/i.test(err.message)) { - throw err; - } - lastError = err; - // maximum delay is 10th retry: ~3 seconds - const millis = Math.floor((Math.random() * 200) + (50 * Math.pow(1.5, run))); - await new Promise(c => setTimeout(c, millis)); - } - } - console.error(`Too many retries, aborting.`); - throw lastError; -} -//# sourceMappingURL=retry.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/sign-win32.js b/build/azure-pipelines/common/sign-win32.js deleted file mode 100644 index f4e3f27c1f2..00000000000 --- a/build/azure-pipelines/common/sign-win32.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const sign_1 = require("./sign"); -const path_1 = __importDefault(require("path")); -(0, sign_1.main)([ - process.env['EsrpCliDllPath'], - 'sign-windows', - path_1.default.dirname(process.argv[2]), - path_1.default.basename(process.argv[2]) -]); -//# sourceMappingURL=sign-win32.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/sign-win32.ts b/build/azure-pipelines/common/sign-win32.ts index ad88435b5a3..677c2024b9c 100644 --- a/build/azure-pipelines/common/sign-win32.ts +++ b/build/azure-pipelines/common/sign-win32.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { main } from './sign'; +import { main } from './sign.ts'; import path from 'path'; main([ diff --git a/build/azure-pipelines/common/sign.js b/build/azure-pipelines/common/sign.js deleted file mode 100644 index 47c034dea1c..00000000000 --- a/build/azure-pipelines/common/sign.js +++ /dev/null @@ -1,209 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Temp = void 0; -exports.main = main; -const child_process_1 = __importDefault(require("child_process")); -const fs_1 = __importDefault(require("fs")); -const crypto_1 = __importDefault(require("crypto")); -const path_1 = __importDefault(require("path")); -const os_1 = __importDefault(require("os")); -class Temp { - _files = []; - tmpNameSync() { - const file = path_1.default.join(os_1.default.tmpdir(), crypto_1.default.randomBytes(20).toString('hex')); - this._files.push(file); - return file; - } - dispose() { - for (const file of this._files) { - try { - fs_1.default.unlinkSync(file); - } - catch (err) { - // noop - } - } - } -} -exports.Temp = Temp; -function getParams(type) { - switch (type) { - case 'sign-windows': - return [ - { - keyCode: 'CP-230012', - operationSetCode: 'SigntoolSign', - parameters: [ - { parameterName: 'OpusName', parameterValue: 'VS Code' }, - { parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' }, - { parameterName: 'Append', parameterValue: '/as' }, - { parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' }, - { parameterName: 'PageHash', parameterValue: '/NPH' }, - { parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' } - ], - toolName: 'sign', - toolVersion: '1.0' - }, - { - keyCode: 'CP-230012', - operationSetCode: 'SigntoolVerify', - parameters: [ - { parameterName: 'VerifyAll', parameterValue: '/all' } - ], - toolName: 'sign', - toolVersion: '1.0' - } - ]; - case 'sign-windows-appx': - return [ - { - keyCode: 'CP-229979', - operationSetCode: 'SigntoolSign', - parameters: [ - { parameterName: 'OpusName', parameterValue: 'VS Code' }, - { parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' }, - { parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' }, - { parameterName: 'PageHash', parameterValue: '/NPH' }, - { parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' } - ], - toolName: 'sign', - toolVersion: '1.0' - }, - { - keyCode: 'CP-229979', - operationSetCode: 'SigntoolVerify', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - } - ]; - case 'sign-pgp': - return [{ - keyCode: 'CP-450779-Pgp', - operationSetCode: 'LinuxSign', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'sign-darwin': - return [{ - keyCode: 'CP-401337-Apple', - operationSetCode: 'MacAppDeveloperSign', - parameters: [{ parameterName: 'Hardening', parameterValue: '--options=runtime' }], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'notarize-darwin': - return [{ - keyCode: 'CP-401337-Apple', - operationSetCode: 'MacAppNotarize', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'nuget': - return [{ - keyCode: 'CP-401405', - operationSetCode: 'NuGetSign', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }, { - keyCode: 'CP-401405', - operationSetCode: 'NuGetVerify', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - default: - throw new Error(`Sign type ${type} not found`); - } -} -function main([esrpCliPath, type, folderPath, pattern]) { - const tmp = new Temp(); - process.on('exit', () => tmp.dispose()); - const key = crypto_1.default.randomBytes(32); - const iv = crypto_1.default.randomBytes(16); - const cipher = crypto_1.default.createCipheriv('aes-256-cbc', key, iv); - const encryptedToken = cipher.update(process.env['SYSTEM_ACCESSTOKEN'].trim(), 'utf8', 'hex') + cipher.final('hex'); - const encryptionDetailsPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(encryptionDetailsPath, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') })); - const encryptedTokenPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(encryptedTokenPath, encryptedToken); - const patternPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(patternPath, pattern); - const paramsPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(paramsPath, JSON.stringify(getParams(type))); - const dotnetVersion = child_process_1.default.execSync('dotnet --version', { encoding: 'utf8' }).trim(); - const adoTaskVersion = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(esrpCliPath))); - const federatedTokenData = { - jobId: process.env['SYSTEM_JOBID'], - planId: process.env['SYSTEM_PLANID'], - projectId: process.env['SYSTEM_TEAMPROJECTID'], - hub: process.env['SYSTEM_HOSTTYPE'], - uri: process.env['SYSTEM_COLLECTIONURI'], - managedIdentityId: process.env['VSCODE_ESRP_CLIENT_ID'], - managedIdentityTenantId: process.env['VSCODE_ESRP_TENANT_ID'], - serviceConnectionId: process.env['VSCODE_ESRP_SERVICE_CONNECTION_ID'], - tempDirectory: os_1.default.tmpdir(), - systemAccessToken: encryptedTokenPath, - encryptionKey: encryptionDetailsPath - }; - const args = [ - esrpCliPath, - 'vsts.sign', - '-a', - process.env['ESRP_CLIENT_ID'], - '-d', - process.env['ESRP_TENANT_ID'], - '-k', JSON.stringify({ akv: 'vscode-esrp' }), - '-z', JSON.stringify({ akv: 'vscode-esrp', cert: 'esrp-sign' }), - '-f', folderPath, - '-p', patternPath, - '-u', 'false', - '-x', 'regularSigning', - '-b', 'input.json', - '-l', 'AzSecPack_PublisherPolicyProd.xml', - '-y', 'inlineSignParams', - '-j', paramsPath, - '-c', '9997', - '-t', '120', - '-g', '10', - '-v', 'Tls12', - '-s', 'https://api.esrp.microsoft.com/api/v1', - '-m', '0', - '-o', 'Microsoft', - '-i', 'https://www.microsoft.com', - '-n', '5', - '-r', 'true', - '-w', dotnetVersion, - '-skipAdoReportAttachment', 'false', - '-pendingAnalysisWaitTimeoutMinutes', '5', - '-adoTaskVersion', adoTaskVersion, - '-resourceUri', 'https://msazurecloud.onmicrosoft.com/api.esrp.microsoft.com', - '-esrpClientId', - process.env['ESRP_CLIENT_ID'], - '-useMSIAuthentication', 'true', - '-federatedTokenData', JSON.stringify(federatedTokenData) - ]; - try { - child_process_1.default.execFileSync('dotnet', args, { stdio: 'inherit' }); - } - catch (err) { - console.error('ESRP failed'); - console.error(err); - process.exit(1); - } -} -if (require.main === module) { - main(process.argv.slice(2)); - process.exit(0); -} -//# sourceMappingURL=sign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/sign.ts b/build/azure-pipelines/common/sign.ts index 19a288483c8..d93f752eeeb 100644 --- a/build/azure-pipelines/common/sign.ts +++ b/build/azure-pipelines/common/sign.ts @@ -216,7 +216,7 @@ export function main([esrpCliPath, type, folderPath, pattern]: string[]) { } } -if (require.main === module) { +if (import.meta.main) { main(process.argv.slice(2)); process.exit(0); } diff --git a/build/azure-pipelines/common/waitForArtifacts.js b/build/azure-pipelines/common/waitForArtifacts.js deleted file mode 100644 index b9ffb73962d..00000000000 --- a/build/azure-pipelines/common/waitForArtifacts.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const publish_1 = require("../common/publish"); -const retry_1 = require("../common/retry"); -async function getPipelineArtifacts() { - const result = await (0, publish_1.requestAZDOAPI)('artifacts'); - return result.value.filter(a => !/sbom$/.test(a.name)); -} -async function main(artifacts) { - if (artifacts.length === 0) { - throw new Error(`Usage: node waitForArtifacts.js ...`); - } - // This loop will run for 30 minutes and waits to the x64 and arm64 artifacts - // to be uploaded to the pipeline by the `macOS` and `macOSARM64` jobs. As soon - // as these artifacts are found, the loop completes and the `macOSUnivesrsal` - // job resumes. - for (let index = 0; index < 60; index++) { - try { - console.log(`Waiting for artifacts (${artifacts.join(', ')}) to be uploaded (${index + 1}/60)...`); - const allArtifacts = await (0, retry_1.retry)(() => getPipelineArtifacts()); - console.log(` * Artifacts attached to the pipelines: ${allArtifacts.length > 0 ? allArtifacts.map(a => a.name).join(', ') : 'none'}`); - const foundArtifacts = allArtifacts.filter(a => artifacts.includes(a.name)); - console.log(` * Found artifacts: ${foundArtifacts.length > 0 ? foundArtifacts.map(a => a.name).join(', ') : 'none'}`); - if (foundArtifacts.length === artifacts.length) { - console.log(` * All artifacts were found`); - return; - } - } - catch (err) { - console.error(`ERROR: Failed to get pipeline artifacts: ${err}`); - } - await new Promise(c => setTimeout(c, 30_000)); - } - throw new Error(`ERROR: Artifacts (${artifacts.join(', ')}) were not uploaded within 30 minutes.`); -} -main(process.argv.splice(2)).then(() => { - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=waitForArtifacts.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/waitForArtifacts.ts b/build/azure-pipelines/common/waitForArtifacts.ts index 3fed6cd38d2..1b48a70d994 100644 --- a/build/azure-pipelines/common/waitForArtifacts.ts +++ b/build/azure-pipelines/common/waitForArtifacts.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { Artifact, requestAZDOAPI } from '../common/publish'; -import { retry } from '../common/retry'; +import { type Artifact, requestAZDOAPI } from '../common/publish.ts'; +import { retry } from '../common/retry.ts'; async function getPipelineArtifacts(): Promise { const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); @@ -13,7 +13,7 @@ async function getPipelineArtifacts(): Promise { async function main(artifacts: string[]): Promise { if (artifacts.length === 0) { - throw new Error(`Usage: node waitForArtifacts.js ...`); + throw new Error(`Usage: node waitForArtifacts.ts ...`); } // This loop will run for 30 minutes and waits to the x64 and arm64 artifacts diff --git a/build/azure-pipelines/darwin/codesign.js b/build/azure-pipelines/darwin/codesign.js deleted file mode 100644 index 30a3bdc332b..00000000000 --- a/build/azure-pipelines/darwin/codesign.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const codesign_1 = require("../common/codesign"); -const publish_1 = require("../common/publish"); -async function main() { - const arch = (0, publish_1.e)('VSCODE_ARCH'); - const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath'); - const pipelineWorkspace = (0, publish_1.e)('PIPELINE_WORKSPACE'); - const folder = `${pipelineWorkspace}/vscode_client_darwin_${arch}_archive`; - const glob = `VSCode-darwin-${arch}.zip`; - // Codesign - (0, codesign_1.printBanner)('Codesign'); - const codeSignTask = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-darwin', folder, glob); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign', codeSignTask); - // Notarize - (0, codesign_1.printBanner)('Notarize'); - const notarizeTask = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'notarize-darwin', folder, glob); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Notarize', notarizeTask); -} -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/darwin/codesign.ts b/build/azure-pipelines/darwin/codesign.ts index e6f6a5ce754..848fb0f4647 100644 --- a/build/azure-pipelines/darwin/codesign.ts +++ b/build/azure-pipelines/darwin/codesign.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; -import { e } from '../common/publish'; +import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign.ts'; +import { e } from '../common/publish.ts'; async function main() { const arch = e('VSCODE_ARCH'); diff --git a/build/azure-pipelines/darwin/product-build-darwin-node-modules.yml b/build/azure-pipelines/darwin/product-build-darwin-node-modules.yml index 4151d30b06c..8b3f9c9305a 100644 --- a/build/azure-pipelines/darwin/product-build-darwin-node-modules.yml +++ b/build/azure-pipelines/darwin/product-build-darwin-node-modules.yml @@ -32,7 +32,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -85,13 +85,13 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) diff --git a/build/azure-pipelines/darwin/product-build-darwin-universal.yml b/build/azure-pipelines/darwin/product-build-darwin-universal.yml index 23c85dc714a..5938c13dde2 100644 --- a/build/azure-pipelines/darwin/product-build-darwin-universal.yml +++ b/build/azure-pipelines/darwin/product-build-darwin-universal.yml @@ -67,7 +67,7 @@ jobs: GITHUB_TOKEN: "$(github-distro-mixin-password)" displayName: Install build dependencies - - pwsh: node build/azure-pipelines/common/waitForArtifacts.js unsigned_vscode_client_darwin_x64_archive unsigned_vscode_client_darwin_arm64_archive + - pwsh: node -- build/azure-pipelines/common/waitForArtifacts.ts unsigned_vscode_client_darwin_x64_archive unsigned_vscode_client_darwin_arm64_archive env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: Wait for x64 and arm64 artifacts @@ -80,7 +80,7 @@ jobs: artifact: unsigned_vscode_client_darwin_arm64_archive displayName: Download arm64 artifact - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - script: | @@ -88,14 +88,14 @@ jobs: unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64 & unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64 & wait - DEBUG=* node build/darwin/create-universal-app.js $(agent.builddirectory) + DEBUG=* node build/darwin/create-universal-app.ts $(agent.builddirectory) displayName: Create Universal App - script: | set -e APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.js universal + APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.ts universal displayName: Verify arch of Mach-O objects - script: | @@ -107,7 +107,7 @@ jobs: security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1) security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain - DEBUG=electron-osx-sign* node build/darwin/sign.js $(agent.builddirectory) + DEBUG=electron-osx-sign* node build/darwin/sign.ts $(agent.builddirectory) displayName: Set Hardened Entitlements - script: | @@ -132,12 +132,12 @@ jobs: Pattern: noop displayName: 'Install ESRP Tooling' - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip + - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: ✍️ Codesign - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip + - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: ✍️ Notarize diff --git a/build/azure-pipelines/darwin/steps/product-build-darwin-cli-sign.yml b/build/azure-pipelines/darwin/steps/product-build-darwin-cli-sign.yml index 883645aec69..1cd0fe2a824 100644 --- a/build/azure-pipelines/darwin/steps/product-build-darwin-cli-sign.yml +++ b/build/azure-pipelines/darwin/steps/product-build-darwin-cli-sign.yml @@ -33,12 +33,12 @@ steps: archiveFilePatterns: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/*.zip destinationFolder: $(Build.ArtifactStagingDirectory)/sign/${{ target }} - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" + - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: ✍️ Codesign - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" + - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: ✍️ Notarize diff --git a/build/azure-pipelines/darwin/steps/product-build-darwin-compile.yml b/build/azure-pipelines/darwin/steps/product-build-darwin-compile.yml index d1d431505f6..50ef7bd6158 100644 --- a/build/azure-pipelines/darwin/steps/product-build-darwin-compile.yml +++ b/build/azure-pipelines/darwin/steps/product-build-darwin-compile.yml @@ -43,7 +43,7 @@ steps: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -100,25 +100,25 @@ steps: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - template: ../../common/install-builtin-extensions.yml@self - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - script: node build/lib/policies/policyGenerator build/lib/policies/policyData.jsonc darwin + - script: npm run copy-policy-dto --prefix build && node build/lib/policies/policyGenerator.ts build/lib/policies/policyData.jsonc darwin displayName: Generate policy definitions retryCountOnTaskFailure: 3 @@ -178,8 +178,8 @@ steps: set -e APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.js $(VSCODE_ARCH) - APP_PATH="$(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)" node build/darwin/verify-macho.js $(VSCODE_ARCH) + APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.ts $(VSCODE_ARCH) + APP_PATH="$(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)" node build/darwin/verify-macho.ts $(VSCODE_ARCH) displayName: Verify arch of Mach-O objects - script: | @@ -191,7 +191,7 @@ steps: condition: eq(variables['BUILT_CLIENT'], 'true') displayName: Package client - - pwsh: node build/azure-pipelines/common/checkForArtifact.js CLIENT_ARCHIVE_UPLOADED unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive + - pwsh: node build/azure-pipelines/common/checkForArtifact.ts CLIENT_ARCHIVE_UPLOADED unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: Check for client artifact @@ -221,7 +221,7 @@ steps: security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1) security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain - DEBUG=electron-osx-sign* node build/darwin/sign.js $(agent.builddirectory) + DEBUG=electron-osx-sign* node build/darwin/sign.ts $(agent.builddirectory) displayName: Set Hardened Entitlements - script: | @@ -257,7 +257,7 @@ steps: echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll" displayName: Find ESRP CLI - - script: npx deemon --detach --wait node build/azure-pipelines/darwin/codesign.js + - script: npx deemon --detach --wait node build/azure-pipelines/darwin/codesign.ts env: EsrpCliDllPath: $(EsrpCliDllPath) SYSTEM_ACCESSTOKEN: $(System.AccessToken) @@ -271,7 +271,7 @@ steps: VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - script: npx deemon --attach node build/azure-pipelines/darwin/codesign.js + - script: npx deemon --attach node build/azure-pipelines/darwin/codesign.ts condition: succeededOrFailed() displayName: "Post-job: ✍️ Codesign & Notarize" diff --git a/build/azure-pipelines/distro/mixin-npm.js b/build/azure-pipelines/distro/mixin-npm.js deleted file mode 100644 index 87958a5d449..00000000000 --- a/build/azure-pipelines/distro/mixin-npm.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const { dirs } = require('../../npm/dirs'); -function log(...args) { - console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); -} -function mixin(mixinPath) { - if (!fs_1.default.existsSync(`${mixinPath}/node_modules`)) { - log(`Skipping distro npm dependencies: ${mixinPath} (no node_modules)`); - return; - } - log(`Mixing in distro npm dependencies: ${mixinPath}`); - const distroPackageJson = JSON.parse(fs_1.default.readFileSync(`${mixinPath}/package.json`, 'utf8')); - const targetPath = path_1.default.relative('.build/distro/npm', mixinPath); - for (const dependency of Object.keys(distroPackageJson.dependencies)) { - fs_1.default.rmSync(`./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true }); - fs_1.default.cpSync(`${mixinPath}/node_modules/${dependency}`, `./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true, dereference: true }); - } - log(`Mixed in distro npm dependencies: ${mixinPath} ✔︎`); -} -function main() { - log(`Mixing in distro npm dependencies...`); - const mixinPaths = dirs.filter(d => /^.build\/distro\/npm/.test(d)); - for (const mixinPath of mixinPaths) { - mixin(mixinPath); - } -} -main(); -//# sourceMappingURL=mixin-npm.js.map \ No newline at end of file diff --git a/build/azure-pipelines/distro/mixin-npm.ts b/build/azure-pipelines/distro/mixin-npm.ts index f98f6e6b55d..ce0441c9b72 100644 --- a/build/azure-pipelines/distro/mixin-npm.ts +++ b/build/azure-pipelines/distro/mixin-npm.ts @@ -5,7 +5,7 @@ import fs from 'fs'; import path from 'path'; -const { dirs } = require('../../npm/dirs') as { dirs: string[] }; +import { dirs } from '../../npm/dirs.js'; function log(...args: unknown[]): void { console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); diff --git a/build/azure-pipelines/distro/mixin-quality.js b/build/azure-pipelines/distro/mixin-quality.js deleted file mode 100644 index 335f63ca1fc..00000000000 --- a/build/azure-pipelines/distro/mixin-quality.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -function log(...args) { - console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); -} -function main() { - const quality = process.env['VSCODE_QUALITY']; - if (!quality) { - throw new Error('Missing VSCODE_QUALITY, skipping mixin'); - } - log(`Mixing in distro quality...`); - const basePath = `.build/distro/mixin/${quality}`; - for (const name of fs_1.default.readdirSync(basePath)) { - const distroPath = path_1.default.join(basePath, name); - const ossPath = path_1.default.relative(basePath, distroPath); - if (ossPath === 'product.json') { - const distro = JSON.parse(fs_1.default.readFileSync(distroPath, 'utf8')); - const oss = JSON.parse(fs_1.default.readFileSync(ossPath, 'utf8')); - let builtInExtensions = oss.builtInExtensions; - if (Array.isArray(distro.builtInExtensions)) { - log('Overwriting built-in extensions:', distro.builtInExtensions.map(e => e.name)); - builtInExtensions = distro.builtInExtensions; - } - else if (distro.builtInExtensions) { - const include = distro.builtInExtensions['include'] ?? []; - const exclude = distro.builtInExtensions['exclude'] ?? []; - log('OSS built-in extensions:', builtInExtensions.map(e => e.name)); - log('Including built-in extensions:', include.map(e => e.name)); - log('Excluding built-in extensions:', exclude); - builtInExtensions = builtInExtensions.filter(ext => !include.find(e => e.name === ext.name) && !exclude.find(name => name === ext.name)); - builtInExtensions = [...builtInExtensions, ...include]; - log('Final built-in extensions:', builtInExtensions.map(e => e.name)); - } - else { - log('Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name)); - } - const result = { webBuiltInExtensions: oss.webBuiltInExtensions, ...distro, builtInExtensions }; - fs_1.default.writeFileSync(ossPath, JSON.stringify(result, null, '\t'), 'utf8'); - } - else { - fs_1.default.cpSync(distroPath, ossPath, { force: true, recursive: true }); - } - log(distroPath, '✔︎'); - } -} -main(); -//# sourceMappingURL=mixin-quality.js.map \ No newline at end of file diff --git a/build/azure-pipelines/linux/codesign.js b/build/azure-pipelines/linux/codesign.js deleted file mode 100644 index 98b97db5666..00000000000 --- a/build/azure-pipelines/linux/codesign.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const codesign_1 = require("../common/codesign"); -const publish_1 = require("../common/publish"); -async function main() { - const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath'); - // Start the code sign processes in parallel - // 1. Codesign deb package - // 2. Codesign rpm package - const codesignTask1 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-pgp', '.build/linux/deb', '*.deb'); - const codesignTask2 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-pgp', '.build/linux/rpm', '*.rpm'); - // Codesign deb package - (0, codesign_1.printBanner)('Codesign deb package'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign deb package', codesignTask1); - // Codesign rpm package - (0, codesign_1.printBanner)('Codesign rpm package'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign rpm package', codesignTask2); -} -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/linux/codesign.ts b/build/azure-pipelines/linux/codesign.ts index 1f74cc21ee9..67a34d9e7a1 100644 --- a/build/azure-pipelines/linux/codesign.ts +++ b/build/azure-pipelines/linux/codesign.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; -import { e } from '../common/publish'; +import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign.ts'; +import { e } from '../common/publish.ts'; async function main() { const esrpCliDLLPath = e('EsrpCliDllPath'); diff --git a/build/azure-pipelines/linux/product-build-linux-node-modules.yml b/build/azure-pipelines/linux/product-build-linux-node-modules.yml index e3fd5c35173..cfbdae8d55f 100644 --- a/build/azure-pipelines/linux/product-build-linux-node-modules.yml +++ b/build/azure-pipelines/linux/product-build-linux-node-modules.yml @@ -52,7 +52,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -104,7 +104,7 @@ jobs: SYSROOT_ARCH="amd64" fi export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0 - SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' + SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' env: VSCODE_ARCH: $(VSCODE_ARCH) GITHUB_TOKEN: "$(github-distro-mixin-password)" @@ -137,13 +137,13 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) diff --git a/build/azure-pipelines/linux/setup-env.sh b/build/azure-pipelines/linux/setup-env.sh index b7804545b6b..48274a8d38e 100755 --- a/build/azure-pipelines/linux/setup-env.sh +++ b/build/azure-pipelines/linux/setup-env.sh @@ -13,14 +13,14 @@ if [ -d "$VSCODE_CLIENT_SYSROOT_DIR" ]; then echo "Using cached client sysroot" else echo "Downloading client sysroot" - SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_CLIENT_SYSROOT_DIR" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' + SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_CLIENT_SYSROOT_DIR" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' fi if [ -d "$VSCODE_REMOTE_SYSROOT_DIR" ]; then echo "Using cached remote sysroot" else echo "Downloading remote sysroot" - SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_REMOTE_SYSROOT_DIR" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' + SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_REMOTE_SYSROOT_DIR" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' fi if [ "$npm_config_arch" == "x64" ]; then @@ -33,7 +33,7 @@ if [ "$npm_config_arch" == "x64" ]; then VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \ VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \ VSCODE_ARCH="$npm_config_arch" \ - node build/linux/libcxx-fetcher.js + node build/linux/libcxx-fetcher.ts # Set compiler toolchain # Flags for the client build are based on diff --git a/build/azure-pipelines/linux/steps/product-build-linux-compile.yml b/build/azure-pipelines/linux/steps/product-build-linux-compile.yml index 9dc3f9e120b..7548c0498d0 100644 --- a/build/azure-pipelines/linux/steps/product-build-linux-compile.yml +++ b/build/azure-pipelines/linux/steps/product-build-linux-compile.yml @@ -65,7 +65,7 @@ steps: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -121,7 +121,7 @@ steps: SYSROOT_ARCH="amd64" fi export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0 - SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' + SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' env: VSCODE_ARCH: $(VSCODE_ARCH) GITHUB_TOKEN: "$(github-distro-mixin-password)" @@ -153,25 +153,25 @@ steps: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - template: ../../common/install-builtin-extensions.yml@self - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - script: node build/lib/policies/policyGenerator build/lib/policies/policyData.jsonc linux + - script: npm run copy-policy-dto --prefix build && node build/lib/policies/policyGenerator.ts build/lib/policies/policyData.jsonc linux displayName: Generate policy definitions retryCountOnTaskFailure: 3 @@ -365,7 +365,7 @@ steps: echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll" displayName: Find ESRP CLI - - script: npx deemon --detach --wait node build/azure-pipelines/linux/codesign.js + - script: npx deemon --detach --wait node build/azure-pipelines/linux/codesign.ts env: EsrpCliDllPath: $(EsrpCliDllPath) SYSTEM_ACCESSTOKEN: $(System.AccessToken) @@ -379,7 +379,7 @@ steps: VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - script: npx deemon --attach node build/azure-pipelines/linux/codesign.js + - script: npx deemon --attach node build/azure-pipelines/linux/codesign.ts condition: succeededOrFailed() displayName: "✍️ Post-job: Codesign deb & rpm" diff --git a/build/azure-pipelines/product-compile.yml b/build/azure-pipelines/product-compile.yml index e025e84f911..caa539c67cb 100644 --- a/build/azure-pipelines/product-compile.yml +++ b/build/azure-pipelines/product-compile.yml @@ -33,7 +33,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts compile $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -81,19 +81,19 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - template: common/install-builtin-extensions.yml@self @@ -135,7 +135,7 @@ jobs: AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-sourcemaps + node build/azure-pipelines/upload-sourcemaps.ts displayName: Upload sourcemaps to Azure - script: ./build/azure-pipelines/common/extract-telemetry.sh diff --git a/build/azure-pipelines/product-publish.yml b/build/azure-pipelines/product-publish.yml index aa0727a1988..165fb177a9a 100644 --- a/build/azure-pipelines/product-publish.yml +++ b/build/azure-pipelines/product-publish.yml @@ -82,7 +82,7 @@ jobs: $VERSION = node -p "require('./package.json').version" Write-Host "Creating build with version: $VERSION" - exec { node build/azure-pipelines/common/createBuild.js $VERSION } + exec { node build/azure-pipelines/common/createBuild.ts $VERSION } env: AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" AZURE_CLIENT_ID: "$(AZURE_CLIENT_ID)" @@ -90,7 +90,7 @@ jobs: displayName: Create build if it hasn't been created before - pwsh: | - $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens) + $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens.ts) Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens" env: AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" @@ -98,7 +98,7 @@ jobs: AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" displayName: Get publish auth tokens - - pwsh: node build/azure-pipelines/common/publish.js + - pwsh: node build/azure-pipelines/common/publish.ts env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" @@ -110,7 +110,7 @@ jobs: retryCountOnTaskFailure: 3 - ${{ if and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(parameters.VSCODE_SCHEDULEDBUILD, true)) }}: - - script: node build/azure-pipelines/common/releaseBuild.js + - script: node build/azure-pipelines/common/releaseBuild.ts env: PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" displayName: Release build diff --git a/build/azure-pipelines/product-release.yml b/build/azure-pipelines/product-release.yml index bac4d0e53fa..72b33a78ad1 100644 --- a/build/azure-pipelines/product-release.yml +++ b/build/azure-pipelines/product-release.yml @@ -27,7 +27,7 @@ steps: displayName: Install build dependencies - pwsh: | - $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens) + $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens.ts) Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens" env: AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" @@ -35,7 +35,7 @@ steps: AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" displayName: Get publish auth tokens - - script: node build/azure-pipelines/common/releaseBuild.js ${{ parameters.VSCODE_RELEASE }} + - script: node build/azure-pipelines/common/releaseBuild.ts ${{ parameters.VSCODE_RELEASE }} displayName: Release build env: PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" diff --git a/build/azure-pipelines/publish-types/check-version.js b/build/azure-pipelines/publish-types/check-version.js deleted file mode 100644 index 5bd80a69bbf..00000000000 --- a/build/azure-pipelines/publish-types/check-version.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const child_process_1 = __importDefault(require("child_process")); -let tag = ''; -try { - tag = child_process_1.default - .execSync('git describe --tags `git rev-list --tags --max-count=1`') - .toString() - .trim(); - if (!isValidTag(tag)) { - throw Error(`Invalid tag ${tag}`); - } -} -catch (err) { - console.error(err); - console.error('Failed to update types'); - process.exit(1); -} -function isValidTag(t) { - if (t.split('.').length !== 3) { - return false; - } - const [major, minor, bug] = t.split('.'); - // Only release for tags like 1.34.0 - if (bug !== '0') { - return false; - } - if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) { - return false; - } - return true; -} -//# sourceMappingURL=check-version.js.map \ No newline at end of file diff --git a/build/azure-pipelines/publish-types/publish-types.yml b/build/azure-pipelines/publish-types/publish-types.yml index 65882ce1971..25dbf1f185a 100644 --- a/build/azure-pipelines/publish-types/publish-types.yml +++ b/build/azure-pipelines/publish-types/publish-types.yml @@ -34,7 +34,7 @@ steps: - bash: | # Install build dependencies (cd build && npm ci) - node build/azure-pipelines/publish-types/check-version.js + node build/azure-pipelines/publish-types/check-version.ts displayName: Check version - bash: | @@ -42,7 +42,7 @@ steps: git config --global user.name "VSCode" git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1 - node build/azure-pipelines/publish-types/update-types.js + node build/azure-pipelines/publish-types/update-types.ts TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`) diff --git a/build/azure-pipelines/publish-types/update-types.js b/build/azure-pipelines/publish-types/update-types.js deleted file mode 100644 index 6638de99c29..00000000000 --- a/build/azure-pipelines/publish-types/update-types.js +++ /dev/null @@ -1,80 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const child_process_1 = __importDefault(require("child_process")); -const path_1 = __importDefault(require("path")); -let tag = ''; -try { - tag = child_process_1.default - .execSync('git describe --tags `git rev-list --tags --max-count=1`') - .toString() - .trim(); - const [major, minor] = tag.split('.'); - const shorttag = `${major}.${minor}`; - const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vscode-dts/vscode.d.ts`; - const outDtsPath = path_1.default.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts'); - child_process_1.default.execSync(`curl ${dtsUri} --output ${outDtsPath}`); - updateDTSFile(outDtsPath, shorttag); - const outPackageJsonPath = path_1.default.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/package.json'); - const packageJson = JSON.parse(fs_1.default.readFileSync(outPackageJsonPath, 'utf-8')); - packageJson.version = shorttag + '.9999'; - fs_1.default.writeFileSync(outPackageJsonPath, JSON.stringify(packageJson, null, 2) + '\n'); - console.log(`Done updating vscode.d.ts at ${outDtsPath} and package.json to version ${packageJson.version}`); -} -catch (err) { - console.error(err); - console.error('Failed to update types'); - process.exit(1); -} -function updateDTSFile(outPath, shorttag) { - const oldContent = fs_1.default.readFileSync(outPath, 'utf-8'); - const newContent = getNewFileContent(oldContent, shorttag); - fs_1.default.writeFileSync(outPath, newContent); -} -function repeat(str, times) { - const result = new Array(times); - for (let i = 0; i < times; i++) { - result[i] = str; - } - return result.join(''); -} -function convertTabsToSpaces(str) { - return str.replace(/\t/gm, value => repeat(' ', value.length)); -} -function getNewFileContent(content, shorttag) { - const oldheader = [ - `/*---------------------------------------------------------------------------------------------`, - ` * Copyright (c) Microsoft Corporation. All rights reserved.`, - ` * Licensed under the MIT License. See License.txt in the project root for license information.`, - ` *--------------------------------------------------------------------------------------------*/` - ].join('\n'); - return convertTabsToSpaces(getNewFileHeader(shorttag) + content.slice(oldheader.length)); -} -function getNewFileHeader(shorttag) { - const header = [ - `// Type definitions for Visual Studio Code ${shorttag}`, - `// Project: https://github.com/microsoft/vscode`, - `// Definitions by: Visual Studio Code Team, Microsoft `, - `// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`, - ``, - `/*---------------------------------------------------------------------------------------------`, - ` * Copyright (c) Microsoft Corporation. All rights reserved.`, - ` * Licensed under the MIT License.`, - ` * See https://github.com/microsoft/vscode/blob/main/LICENSE.txt for license information.`, - ` *--------------------------------------------------------------------------------------------*/`, - ``, - `/**`, - ` * Type Definition for Visual Studio Code ${shorttag} Extension API`, - ` * See https://code.visualstudio.com/api for more information`, - ` */` - ].join('\n'); - return header; -} -//# sourceMappingURL=update-types.js.map \ No newline at end of file diff --git a/build/azure-pipelines/upload-cdn.js b/build/azure-pipelines/upload-cdn.js deleted file mode 100644 index 14108297ed4..00000000000 --- a/build/azure-pipelines/upload-cdn.js +++ /dev/null @@ -1,121 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const event_stream_1 = __importDefault(require("event-stream")); -const vinyl_1 = __importDefault(require("vinyl")); -const vinyl_fs_1 = __importDefault(require("vinyl-fs")); -const gulp_filter_1 = __importDefault(require("gulp-filter")); -const gulp_gzip_1 = __importDefault(require("gulp-gzip")); -const mime_1 = __importDefault(require("mime")); -const identity_1 = require("@azure/identity"); -const util_1 = require("../lib/util"); -const gulp_azure_storage_1 = __importDefault(require("gulp-azure-storage")); -const commit = process.env['BUILD_SOURCEVERSION']; -const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); -mime_1.default.define({ - 'application/typescript': ['ts'], - 'application/json': ['code-snippets'], -}); -// From default AFD configuration -const MimeTypesToCompress = new Set([ - 'application/eot', - 'application/font', - 'application/font-sfnt', - 'application/javascript', - 'application/json', - 'application/opentype', - 'application/otf', - 'application/pkcs7-mime', - 'application/truetype', - 'application/ttf', - 'application/typescript', - 'application/vnd.ms-fontobject', - 'application/xhtml+xml', - 'application/xml', - 'application/xml+rss', - 'application/x-font-opentype', - 'application/x-font-truetype', - 'application/x-font-ttf', - 'application/x-httpd-cgi', - 'application/x-javascript', - 'application/x-mpegurl', - 'application/x-opentype', - 'application/x-otf', - 'application/x-perl', - 'application/x-ttf', - 'font/eot', - 'font/ttf', - 'font/otf', - 'font/opentype', - 'image/svg+xml', - 'text/css', - 'text/csv', - 'text/html', - 'text/javascript', - 'text/js', - 'text/markdown', - 'text/plain', - 'text/richtext', - 'text/tab-separated-values', - 'text/xml', - 'text/x-script', - 'text/x-component', - 'text/x-java-source' -]); -function wait(stream) { - return new Promise((c, e) => { - stream.on('end', () => c()); - stream.on('error', (err) => e(err)); - }); -} -async function main() { - const files = []; - const options = (compressed) => ({ - account: process.env.AZURE_STORAGE_ACCOUNT, - credential, - container: '$web', - prefix: `${process.env.VSCODE_QUALITY}/${commit}/`, - contentSettings: { - contentEncoding: compressed ? 'gzip' : undefined, - cacheControl: 'max-age=31536000, public' - } - }); - const all = vinyl_fs_1.default.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true }) - .pipe((0, gulp_filter_1.default)(f => !f.isDirectory())); - const compressed = all - .pipe((0, gulp_filter_1.default)(f => MimeTypesToCompress.has(mime_1.default.lookup(f.path)))) - .pipe((0, gulp_gzip_1.default)({ append: false })) - .pipe(gulp_azure_storage_1.default.upload(options(true))); - const uncompressed = all - .pipe((0, gulp_filter_1.default)(f => !MimeTypesToCompress.has(mime_1.default.lookup(f.path)))) - .pipe(gulp_azure_storage_1.default.upload(options(false))); - const out = event_stream_1.default.merge(compressed, uncompressed) - .pipe(event_stream_1.default.through(function (f) { - console.log('Uploaded:', f.relative); - files.push(f.relative); - this.emit('data', f); - })); - console.log(`Uploading files to CDN...`); // debug - await wait(out); - const listing = new vinyl_1.default({ - path: 'files.txt', - contents: Buffer.from(files.join('\n')), - stat: new util_1.VinylStat({ mode: 0o666 }) - }); - const filesOut = event_stream_1.default.readArray([listing]) - .pipe((0, gulp_gzip_1.default)({ append: false })) - .pipe(gulp_azure_storage_1.default.upload(options(true))); - console.log(`Uploading: files.txt (${files.length} files)`); // debug - await wait(filesOut); -} -main().catch(err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=upload-cdn.js.map \ No newline at end of file diff --git a/build/azure-pipelines/upload-cdn.ts b/build/azure-pipelines/upload-cdn.ts index d589c423522..dbc11ddbebd 100644 --- a/build/azure-pipelines/upload-cdn.ts +++ b/build/azure-pipelines/upload-cdn.ts @@ -10,7 +10,7 @@ import filter from 'gulp-filter'; import gzip from 'gulp-gzip'; import mime from 'mime'; import { ClientAssertionCredential } from '@azure/identity'; -import { VinylStat } from '../lib/util'; +import { VinylStat } from '../lib/util.ts'; import azure from 'gulp-azure-storage'; const commit = process.env['BUILD_SOURCEVERSION']; diff --git a/build/azure-pipelines/upload-nlsmetadata.js b/build/azure-pipelines/upload-nlsmetadata.js deleted file mode 100644 index 65386797fc9..00000000000 --- a/build/azure-pipelines/upload-nlsmetadata.js +++ /dev/null @@ -1,127 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const event_stream_1 = __importDefault(require("event-stream")); -const vinyl_fs_1 = __importDefault(require("vinyl-fs")); -const gulp_merge_json_1 = __importDefault(require("gulp-merge-json")); -const gulp_gzip_1 = __importDefault(require("gulp-gzip")); -const identity_1 = require("@azure/identity"); -const path = require("path"); -const fs_1 = require("fs"); -const gulp_azure_storage_1 = __importDefault(require("gulp-azure-storage")); -const commit = process.env['BUILD_SOURCEVERSION']; -const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); -function main() { - return new Promise((c, e) => { - const combinedMetadataJson = event_stream_1.default.merge( - // vscode: we are not using `out-build/nls.metadata.json` here because - // it includes metadata for translators for `keys`. but for our purpose - // we want only the `keys` and `messages` as `string`. - event_stream_1.default.merge(vinyl_fs_1.default.src('out-build/nls.keys.json', { base: 'out-build' }), vinyl_fs_1.default.src('out-build/nls.messages.json', { base: 'out-build' })) - .pipe((0, gulp_merge_json_1.default)({ - fileName: 'vscode.json', - jsonSpace: '', - concatArrays: true, - edit: (parsedJson, file) => { - if (file.base === 'out-build') { - if (file.basename === 'nls.keys.json') { - return { keys: parsedJson }; - } - else { - return { messages: parsedJson }; - } - } - } - })), - // extensions - vinyl_fs_1.default.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }), vinyl_fs_1.default.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }), vinyl_fs_1.default.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' })).pipe((0, gulp_merge_json_1.default)({ - fileName: 'combined.nls.metadata.json', - jsonSpace: '', - concatArrays: true, - edit: (parsedJson, file) => { - if (file.basename === 'vscode.json') { - return { vscode: parsedJson }; - } - // Handle extensions and follow the same structure as the Core nls file. - switch (file.basename) { - case 'package.nls.json': - // put package.nls.json content in Core NlsMetadata format - // language packs use the key "package" to specify that - // translations are for the package.json file - parsedJson = { - messages: { - package: Object.values(parsedJson) - }, - keys: { - package: Object.keys(parsedJson) - }, - bundles: { - main: ['package'] - } - }; - break; - case 'nls.metadata.header.json': - parsedJson = { header: parsedJson }; - break; - case 'nls.metadata.json': { - // put nls.metadata.json content in Core NlsMetadata format - const modules = Object.keys(parsedJson); - const json = { - keys: {}, - messages: {}, - bundles: { - main: [] - } - }; - for (const module of modules) { - json.messages[module] = parsedJson[module].messages; - json.keys[module] = parsedJson[module].keys; - json.bundles.main.push(module); - } - parsedJson = json; - break; - } - } - // Get extension id and use that as the key - const folderPath = path.join(file.base, file.relative.split('/')[0]); - const manifest = (0, fs_1.readFileSync)(path.join(folderPath, 'package.json'), 'utf-8'); - const manifestJson = JSON.parse(manifest); - const key = manifestJson.publisher + '.' + manifestJson.name; - return { [key]: parsedJson }; - }, - })); - const nlsMessagesJs = vinyl_fs_1.default.src('out-build/nls.messages.js', { base: 'out-build' }); - event_stream_1.default.merge(combinedMetadataJson, nlsMessagesJs) - .pipe((0, gulp_gzip_1.default)({ append: false })) - .pipe(vinyl_fs_1.default.dest('./nlsMetadata')) - .pipe(event_stream_1.default.through(function (data) { - console.log(`Uploading ${data.path}`); - // trigger artifact upload - console.log(`##vso[artifact.upload containerfolder=nlsmetadata;artifactname=${data.basename}]${data.path}`); - this.emit('data', data); - })) - .pipe(gulp_azure_storage_1.default.upload({ - account: process.env.AZURE_STORAGE_ACCOUNT, - credential, - container: '$web', - prefix: `nlsmetadata/${commit}/`, - contentSettings: { - contentEncoding: 'gzip', - cacheControl: 'max-age=31536000, public' - } - })) - .on('end', () => c()) - .on('error', (err) => e(err)); - }); -} -main().catch(err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=upload-nlsmetadata.js.map \ No newline at end of file diff --git a/build/azure-pipelines/upload-nlsmetadata.ts b/build/azure-pipelines/upload-nlsmetadata.ts index f1388556249..9d6a803e169 100644 --- a/build/azure-pipelines/upload-nlsmetadata.ts +++ b/build/azure-pipelines/upload-nlsmetadata.ts @@ -9,7 +9,7 @@ import vfs from 'vinyl-fs'; import merge from 'gulp-merge-json'; import gzip from 'gulp-gzip'; import { ClientAssertionCredential } from '@azure/identity'; -import path = require('path'); +import path from 'path'; import { readFileSync } from 'fs'; import azure from 'gulp-azure-storage'; diff --git a/build/azure-pipelines/upload-sourcemaps.js b/build/azure-pipelines/upload-sourcemaps.js deleted file mode 100644 index 525943c2c3d..00000000000 --- a/build/azure-pipelines/upload-sourcemaps.js +++ /dev/null @@ -1,101 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); -const event_stream_1 = __importDefault(require("event-stream")); -const vinyl_fs_1 = __importDefault(require("vinyl-fs")); -const util = __importStar(require("../lib/util")); -const dependencies_1 = require("../lib/dependencies"); -const identity_1 = require("@azure/identity"); -const gulp_azure_storage_1 = __importDefault(require("gulp-azure-storage")); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const commit = process.env['BUILD_SOURCEVERSION']; -const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); -// optionally allow to pass in explicit base/maps to upload -const [, , base, maps] = process.argv; -function src(base, maps = `${base}/**/*.map`) { - return vinyl_fs_1.default.src(maps, { base }) - .pipe(event_stream_1.default.mapSync((f) => { - f.path = `${f.base}/core/${f.relative}`; - return f; - })); -} -function main() { - const sources = []; - // vscode client maps (default) - if (!base) { - const vs = src('out-vscode-min'); // client source-maps only - sources.push(vs); - const productionDependencies = (0, dependencies_1.getProductionDependencies)(root); - const productionDependenciesSrc = productionDependencies.map((d) => path_1.default.relative(root, d)).map((d) => `./${d}/**/*.map`); - const nodeModules = vinyl_fs_1.default.src(productionDependenciesSrc, { base: '.' }) - .pipe(util.cleanNodeModules(path_1.default.join(root, 'build', '.moduleignore'))) - .pipe(util.cleanNodeModules(path_1.default.join(root, 'build', `.moduleignore.${process.platform}`))); - sources.push(nodeModules); - const extensionsOut = vinyl_fs_1.default.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' }); - sources.push(extensionsOut); - } - // specific client base/maps - else { - sources.push(src(base, maps)); - } - return new Promise((c, e) => { - event_stream_1.default.merge(...sources) - .pipe(event_stream_1.default.through(function (data) { - console.log('Uploading Sourcemap', data.relative); // debug - this.emit('data', data); - })) - .pipe(gulp_azure_storage_1.default.upload({ - account: process.env.AZURE_STORAGE_ACCOUNT, - credential, - container: '$web', - prefix: `sourcemaps/${commit}/` - })) - .on('end', () => c()) - .on('error', (err) => e(err)); - }); -} -main().catch(err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=upload-sourcemaps.js.map \ No newline at end of file diff --git a/build/azure-pipelines/upload-sourcemaps.ts b/build/azure-pipelines/upload-sourcemaps.ts index b63d213d559..9fcba829adc 100644 --- a/build/azure-pipelines/upload-sourcemaps.ts +++ b/build/azure-pipelines/upload-sourcemaps.ts @@ -7,13 +7,13 @@ import path from 'path'; import es from 'event-stream'; import Vinyl from 'vinyl'; import vfs from 'vinyl-fs'; -import * as util from '../lib/util'; -import { getProductionDependencies } from '../lib/dependencies'; +import * as util from '../lib/util.ts'; +import { getProductionDependencies } from '../lib/dependencies.ts'; import { ClientAssertionCredential } from '@azure/identity'; import Stream from 'stream'; import azure from 'gulp-azure-storage'; -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); const commit = process.env['BUILD_SOURCEVERSION']; const credential = new ClientAssertionCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, () => Promise.resolve(process.env['AZURE_ID_TOKEN']!)); diff --git a/build/azure-pipelines/web/product-build-web-node-modules.yml b/build/azure-pipelines/web/product-build-web-node-modules.yml index 1aea6719de3..6a98a9f79ad 100644 --- a/build/azure-pipelines/web/product-build-web-node-modules.yml +++ b/build/azure-pipelines/web/product-build-web-node-modules.yml @@ -26,7 +26,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js web $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts web $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -77,13 +77,13 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) diff --git a/build/azure-pipelines/web/product-build-web.yml b/build/azure-pipelines/web/product-build-web.yml index d4f1af2d0e0..74b84fc9fef 100644 --- a/build/azure-pipelines/web/product-build-web.yml +++ b/build/azure-pipelines/web/product-build-web.yml @@ -46,7 +46,7 @@ jobs: condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) displayName: Setup NPM Registry - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js web $(node -p process.arch) > .build/packagelockhash + - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts web $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -101,19 +101,19 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - script: | set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt + node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - template: ../common/install-builtin-extensions.yml@self @@ -147,7 +147,7 @@ jobs: AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-cdn + node build/azure-pipelines/upload-cdn.ts displayName: Upload to CDN - script: | @@ -156,7 +156,7 @@ jobs: AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.js.map + node build/azure-pipelines/upload-sourcemaps.ts out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.js.map displayName: Upload sourcemaps (Web Main) - script: | @@ -165,7 +165,7 @@ jobs: AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.internal.js.map + node build/azure-pipelines/upload-sourcemaps.ts out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.internal.js.map displayName: Upload sourcemaps (Web Internal) - script: | @@ -174,5 +174,5 @@ jobs: AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-nlsmetadata + node build/azure-pipelines/upload-nlsmetadata.ts displayName: Upload NLS Metadata diff --git a/build/azure-pipelines/win32/codesign.js b/build/azure-pipelines/win32/codesign.js deleted file mode 100644 index 630f9a64ba1..00000000000 --- a/build/azure-pipelines/win32/codesign.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const zx_1 = require("zx"); -const codesign_1 = require("../common/codesign"); -const publish_1 = require("../common/publish"); -async function main() { - (0, zx_1.usePwsh)(); - const arch = (0, publish_1.e)('VSCODE_ARCH'); - const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath'); - const codeSigningFolderPath = (0, publish_1.e)('CodeSigningFolderPath'); - // Start the code sign processes in parallel - // 1. Codesign executables and shared libraries - // 2. Codesign Powershell scripts - // 3. Codesign context menu appx package (insiders only) - const codesignTask1 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows', codeSigningFolderPath, '*.dll,*.exe,*.node'); - const codesignTask2 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.ps1'); - const codesignTask3 = process.env['VSCODE_QUALITY'] === 'insider' - ? (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.appx') - : undefined; - // Codesign executables and shared libraries - (0, codesign_1.printBanner)('Codesign executables and shared libraries'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign executables and shared libraries', codesignTask1); - // Codesign Powershell scripts - (0, codesign_1.printBanner)('Codesign Powershell scripts'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign Powershell scripts', codesignTask2); - if (codesignTask3) { - // Codesign context menu appx package - (0, codesign_1.printBanner)('Codesign context menu appx package'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign context menu appx package', codesignTask3); - } - // Create build artifact directory - await (0, zx_1.$) `New-Item -ItemType Directory -Path .build/win32-${arch} -Force`; - // Package client - if (process.env['BUILT_CLIENT']) { - // Product version - const version = await (0, zx_1.$) `node -p "require('../VSCode-win32-${arch}/resources/app/package.json').version"`; - (0, codesign_1.printBanner)('Package client'); - const clientArchivePath = `.build/win32-${arch}/VSCode-win32-${arch}-${version}.zip`; - await (0, zx_1.$) `7z.exe a -tzip ${clientArchivePath} ../VSCode-win32-${arch}/* "-xr!CodeSignSummary*.md"`.pipe(process.stdout); - await (0, zx_1.$) `7z.exe l ${clientArchivePath}`.pipe(process.stdout); - } - // Package server - if (process.env['BUILT_SERVER']) { - (0, codesign_1.printBanner)('Package server'); - const serverArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}.zip`; - await (0, zx_1.$) `7z.exe a -tzip ${serverArchivePath} ../vscode-server-win32-${arch}`.pipe(process.stdout); - await (0, zx_1.$) `7z.exe l ${serverArchivePath}`.pipe(process.stdout); - } - // Package server (web) - if (process.env['BUILT_WEB']) { - (0, codesign_1.printBanner)('Package server (web)'); - const webArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}-web.zip`; - await (0, zx_1.$) `7z.exe a -tzip ${webArchivePath} ../vscode-server-win32-${arch}-web`.pipe(process.stdout); - await (0, zx_1.$) `7z.exe l ${webArchivePath}`.pipe(process.stdout); - } - // Sign setup - if (process.env['BUILT_CLIENT']) { - (0, codesign_1.printBanner)('Sign setup packages (system, user)'); - const task = (0, zx_1.$) `npm exec -- npm-run-all -lp "gulp vscode-win32-${arch}-system-setup -- --sign" "gulp vscode-win32-${arch}-user-setup -- --sign"`; - await (0, codesign_1.streamProcessOutputAndCheckResult)('Sign setup packages (system, user)', task); - } -} -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/win32/codesign.ts b/build/azure-pipelines/win32/codesign.ts index 7e7170709b5..ccb30309e12 100644 --- a/build/azure-pipelines/win32/codesign.ts +++ b/build/azure-pipelines/win32/codesign.ts @@ -4,8 +4,8 @@ *--------------------------------------------------------------------------------------------*/ import { $, usePwsh } from 'zx'; -import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; -import { e } from '../common/publish'; +import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign.ts'; +import { e } from '../common/publish.ts'; async function main() { usePwsh(); diff --git a/build/azure-pipelines/win32/product-build-win32-node-modules.yml b/build/azure-pipelines/win32/product-build-win32-node-modules.yml index ba30f67ee33..f59cb84181f 100644 --- a/build/azure-pipelines/win32/product-build-win32-node-modules.yml +++ b/build/azure-pipelines/win32/product-build-win32-node-modules.yml @@ -39,7 +39,7 @@ jobs: - pwsh: | mkdir .build -ea 0 - node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash + node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -81,14 +81,14 @@ jobs: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - powershell: node build/azure-pipelines/distro/mixin-npm + - powershell: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - powershell: | . build/azure-pipelines/win32/exec.ps1 $ErrorActionPreference = "Stop" - exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } + exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt } exec { mkdir -Force .build/node_modules_cache } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) diff --git a/build/azure-pipelines/win32/sdl-scan-win32.yml b/build/azure-pipelines/win32/sdl-scan-win32.yml index dba656eff53..f7d8849dbcb 100644 --- a/build/azure-pipelines/win32/sdl-scan-win32.yml +++ b/build/azure-pipelines/win32/sdl-scan-win32.yml @@ -92,10 +92,10 @@ steps: retryCountOnTaskFailure: 5 displayName: Install dependencies - - script: node build/azure-pipelines/distro/mixin-npm + - script: node build/azure-pipelines/distro/mixin-npm.ts displayName: Mixin distro node modules - - script: node build/azure-pipelines/distro/mixin-quality + - script: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality env: VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} diff --git a/build/azure-pipelines/win32/steps/product-build-win32-cli-sign.yml b/build/azure-pipelines/win32/steps/product-build-win32-cli-sign.yml index e75581bea77..0caba3d1a2b 100644 --- a/build/azure-pipelines/win32/steps/product-build-win32-cli-sign.yml +++ b/build/azure-pipelines/win32/steps/product-build-win32-cli-sign.yml @@ -41,7 +41,7 @@ steps: archiveFilePatterns: $(Build.BinariesDirectory)/pkg/${{ target }}/*.zip destinationFolder: $(Build.BinariesDirectory)/sign/${{ target }} - - powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath sign-windows $(Build.BinariesDirectory)/sign "*.exe" + - powershell: node build\azure-pipelines\common\sign.ts $env:EsrpCliDllPath sign-windows $(Build.BinariesDirectory)/sign "*.exe" env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: ✍️ Codesign diff --git a/build/azure-pipelines/win32/steps/product-build-win32-compile.yml b/build/azure-pipelines/win32/steps/product-build-win32-compile.yml index bdc807fdae5..fa5acee1c6c 100644 --- a/build/azure-pipelines/win32/steps/product-build-win32-compile.yml +++ b/build/azure-pipelines/win32/steps/product-build-win32-compile.yml @@ -55,7 +55,7 @@ steps: - pwsh: | mkdir .build -ea 0 - node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash + node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash displayName: Prepare node_modules cache key - task: Cache@2 @@ -101,31 +101,33 @@ steps: displayName: Install dependencies condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - powershell: node build/azure-pipelines/distro/mixin-npm + - powershell: node build/azure-pipelines/distro/mixin-npm.ts condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Mixin distro node modules - powershell: | . build/azure-pipelines/win32/exec.ps1 $ErrorActionPreference = "Stop" - exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } + exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt } exec { mkdir -Force .build/node_modules_cache } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - - powershell: node build/azure-pipelines/distro/mixin-quality + - powershell: node build/azure-pipelines/distro/mixin-quality.ts displayName: Mixin distro quality - template: ../../common/install-builtin-extensions.yml@self - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - powershell: node build\lib\policies\policyGenerator build\lib\policies\policyData.jsonc win32 + - powershell: | + npm run copy-policy-dto --prefix build + node build\lib\policies\policyGenerator.ts build\lib\policies\policyData.jsonc win32 displayName: Generate Group Policy definitions retryCountOnTaskFailure: 3 - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'exploration')) }}: - - powershell: node build/win32/explorer-dll-fetcher .build/win32/appx + - powershell: node build/win32/explorer-dll-fetcher.ts .build/win32/appx displayName: Download Explorer dll - powershell: | @@ -223,7 +225,7 @@ steps: - powershell: | . build/azure-pipelines/win32/exec.ps1 $ErrorActionPreference = "Stop" - exec { npx deemon --detach --wait -- npx zx build/azure-pipelines/win32/codesign.js } + exec { npx deemon --detach --wait -- npx zx build/azure-pipelines/win32/codesign.ts } env: SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: ✍️ Codesign @@ -240,7 +242,7 @@ steps: - powershell: | . build/azure-pipelines/win32/exec.ps1 $ErrorActionPreference = "Stop" - exec { npx deemon --attach -- npx zx build/azure-pipelines/win32/codesign.js } + exec { npx deemon --attach -- npx zx build/azure-pipelines/win32/codesign.ts } condition: succeededOrFailed() displayName: "✍️ Post-job: Codesign" diff --git a/build/buildfile.js b/build/buildfile.js index 83f84563275..9b5d07dec45 100644 --- a/build/buildfile.js +++ b/build/buildfile.js @@ -6,24 +6,24 @@ /** * @param {string} name - * @returns {import('./lib/bundle').IEntryPoint} + * @returns {import('./lib/bundle.js').IEntryPoint} */ -function createModuleDescription(name) { +export function createModuleDescription(name) { return { name }; } -exports.workerEditor = createModuleDescription('vs/editor/common/services/editorWebWorkerMain'); -exports.workerExtensionHost = createModuleDescription('vs/workbench/api/worker/extensionHostWorkerMain'); -exports.workerNotebook = createModuleDescription('vs/workbench/contrib/notebook/common/services/notebookWebWorkerMain'); -exports.workerLanguageDetection = createModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionWebWorkerMain'); -exports.workerLocalFileSearch = createModuleDescription('vs/workbench/services/search/worker/localFileSearchMain'); -exports.workerProfileAnalysis = createModuleDescription('vs/platform/profiling/electron-browser/profileAnalysisWorkerMain'); -exports.workerOutputLinks = createModuleDescription('vs/workbench/contrib/output/common/outputLinkComputerMain'); -exports.workerBackgroundTokenization = createModuleDescription('vs/workbench/services/textMate/browser/backgroundTokenization/worker/textMateTokenizationWorker.workerMain'); +export const workerEditor = createModuleDescription('vs/editor/common/services/editorWebWorkerMain'); +export const workerExtensionHost = createModuleDescription('vs/workbench/api/worker/extensionHostWorkerMain'); +export const workerNotebook = createModuleDescription('vs/workbench/contrib/notebook/common/services/notebookWebWorkerMain'); +export const workerLanguageDetection = createModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionWebWorkerMain'); +export const workerLocalFileSearch = createModuleDescription('vs/workbench/services/search/worker/localFileSearchMain'); +export const workerProfileAnalysis = createModuleDescription('vs/platform/profiling/electron-browser/profileAnalysisWorkerMain'); +export const workerOutputLinks = createModuleDescription('vs/workbench/contrib/output/common/outputLinkComputerMain'); +export const workerBackgroundTokenization = createModuleDescription('vs/workbench/services/textMate/browser/backgroundTokenization/worker/textMateTokenizationWorker.workerMain'); -exports.workbenchDesktop = [ +export const workbenchDesktop = [ createModuleDescription('vs/workbench/contrib/debug/node/telemetryApp'), createModuleDescription('vs/platform/files/node/watcher/watcherMain'), createModuleDescription('vs/platform/terminal/node/ptyHostMain'), @@ -31,15 +31,15 @@ exports.workbenchDesktop = [ createModuleDescription('vs/workbench/workbench.desktop.main') ]; -exports.workbenchWeb = createModuleDescription('vs/workbench/workbench.web.main'); +export const workbenchWeb = createModuleDescription('vs/workbench/workbench.web.main'); -exports.keyboardMaps = [ +export const keyboardMaps = [ createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.linux'), createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.darwin'), createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.win') ]; -exports.code = [ +export const code = [ // 'vs/code/electron-main/main' is not included here because it comes in via ./src/main.js // 'vs/code/node/cli' is not included here because it comes in via ./src/cli.js createModuleDescription('vs/code/node/cliProcessMain'), @@ -47,9 +47,9 @@ exports.code = [ createModuleDescription('vs/code/electron-browser/workbench/workbench'), ]; -exports.codeWeb = createModuleDescription('vs/code/browser/workbench/workbench'); +export const codeWeb = createModuleDescription('vs/code/browser/workbench/workbench'); -exports.codeServer = [ +export const codeServer = [ // 'vs/server/node/server.main' is not included here because it gets inlined via ./src/server-main.js // 'vs/server/node/server.cli' is not included here because it gets inlined via ./src/server-cli.js createModuleDescription('vs/workbench/api/node/extensionHostProcess'), @@ -57,4 +57,24 @@ exports.codeServer = [ createModuleDescription('vs/platform/terminal/node/ptyHostMain') ]; -exports.entrypoint = createModuleDescription; +export const entrypoint = createModuleDescription; + +const buildfile = { + workerEditor, + workerExtensionHost, + workerNotebook, + workerLanguageDetection, + workerLocalFileSearch, + workerProfileAnalysis, + workerOutputLinks, + workerBackgroundTokenization, + workbenchDesktop, + workbenchWeb, + keyboardMaps, + code, + codeWeb, + codeServer, + entrypoint: createModuleDescription +}; + +export default buildfile; diff --git a/build/checker/layersChecker.js b/build/checker/layersChecker.js deleted file mode 100644 index ae84e8ffeb9..00000000000 --- a/build/checker/layersChecker.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const typescript_1 = __importDefault(require("typescript")); -const fs_1 = require("fs"); -const path_1 = require("path"); -const minimatch_1 = require("minimatch"); -// -// ############################################################################################# -// -// A custom typescript checker for the specific task of detecting the use of certain types in a -// layer that does not allow such use. -// -// Make changes to below RULES to lift certain files from these checks only if absolutely needed -// -// NOTE: Most layer checks are done via tsconfig..json files. -// -// ############################################################################################# -// -// Types that are defined in a common layer but are known to be only -// available in native environments should not be allowed in browser -const NATIVE_TYPES = [ - 'NativeParsedArgs', - 'INativeEnvironmentService', - 'AbstractNativeEnvironmentService', - 'INativeWindowConfiguration', - 'ICommonNativeHostService', - 'INativeHostService', - 'IMainProcessService', - 'INativeBrowserElementsService', -]; -const RULES = [ - // Tests: skip - { - target: '**/vs/**/test/**', - skip: true // -> skip all test files - }, - // Common: vs/platform services that can access native types - { - target: `**/vs/platform/{${[ - 'environment/common/*.ts', - 'window/common/window.ts', - 'native/common/native.ts', - 'native/common/nativeHostService.ts', - 'browserElements/common/browserElements.ts', - 'browserElements/common/nativeBrowserElementsService.ts' - ].join(',')}}`, - disallowedTypes: [ /* Ignore native types that are defined from here */ /* Ignore native types that are defined from here */], - }, - // Common: vs/base/parts/sandbox/electron-browser/preload{,-aux}.ts - { - target: '**/vs/base/parts/sandbox/electron-browser/preload{,-aux}.ts', - disallowedTypes: NATIVE_TYPES, - }, - // Common - { - target: '**/vs/**/common/**', - disallowedTypes: NATIVE_TYPES, - }, - // Common - { - target: '**/vs/**/worker/**', - disallowedTypes: NATIVE_TYPES, - }, - // Browser - { - target: '**/vs/**/browser/**', - disallowedTypes: NATIVE_TYPES, - }, - // Electron (main, utility) - { - target: '**/vs/**/{electron-main,electron-utility}/**', - disallowedTypes: [ - 'ipcMain' // not allowed, use validatedIpcMain instead - ] - } -]; -const TS_CONFIG_PATH = (0, path_1.join)(__dirname, '../../', 'src', 'tsconfig.json'); -let hasErrors = false; -function checkFile(program, sourceFile, rule) { - checkNode(sourceFile); - function checkNode(node) { - if (node.kind !== typescript_1.default.SyntaxKind.Identifier) { - return typescript_1.default.forEachChild(node, checkNode); // recurse down - } - const checker = program.getTypeChecker(); - const symbol = checker.getSymbolAtLocation(node); - if (!symbol) { - return; - } - let text = symbol.getName(); - let _parentSymbol = symbol; - while (_parentSymbol.parent) { - _parentSymbol = _parentSymbol.parent; - } - const parentSymbol = _parentSymbol; - text = parentSymbol.getName(); - if (rule.disallowedTypes?.some(disallowed => disallowed === text)) { - const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart()); - console.log(`[build/checker/layersChecker.ts]: Reference to type '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1}). Learn more about our source code organization at https://github.com/microsoft/vscode/wiki/Source-Code-Organization.`); - hasErrors = true; - return; - } - } -} -function createProgram(tsconfigPath) { - const tsConfig = typescript_1.default.readConfigFile(tsconfigPath, typescript_1.default.sys.readFile); - const configHostParser = { fileExists: fs_1.existsSync, readDirectory: typescript_1.default.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' }; - const tsConfigParsed = typescript_1.default.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true }); - const compilerHost = typescript_1.default.createCompilerHost(tsConfigParsed.options, true); - return typescript_1.default.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost); -} -// -// Create program and start checking -// -const program = createProgram(TS_CONFIG_PATH); -for (const sourceFile of program.getSourceFiles()) { - for (const rule of RULES) { - if ((0, minimatch_1.match)([sourceFile.fileName], rule.target).length > 0) { - if (!rule.skip) { - checkFile(program, sourceFile, rule); - } - break; - } - } -} -if (hasErrors) { - process.exit(1); -} -//# sourceMappingURL=layersChecker.js.map \ No newline at end of file diff --git a/build/checker/layersChecker.ts b/build/checker/layersChecker.ts index 68e12e61c40..87341dcffd0 100644 --- a/build/checker/layersChecker.ts +++ b/build/checker/layersChecker.ts @@ -6,7 +6,7 @@ import ts from 'typescript'; import { readFileSync, existsSync } from 'fs'; import { resolve, dirname, join } from 'path'; -import { match } from 'minimatch'; +import minimatch from 'minimatch'; // // ############################################################################################# @@ -88,7 +88,7 @@ const RULES: IRule[] = [ } ]; -const TS_CONFIG_PATH = join(__dirname, '../../', 'src', 'tsconfig.json'); +const TS_CONFIG_PATH = join(import.meta.dirname, '../../', 'src', 'tsconfig.json'); interface IRule { target: string; @@ -151,7 +151,7 @@ const program = createProgram(TS_CONFIG_PATH); for (const sourceFile of program.getSourceFiles()) { for (const rule of RULES) { - if (match([sourceFile.fileName], rule.target).length > 0) { + if (minimatch.match([sourceFile.fileName], rule.target).length > 0) { if (!rule.skip) { checkFile(program, sourceFile, rule); } diff --git a/build/darwin/create-universal-app.js b/build/darwin/create-universal-app.js deleted file mode 100644 index 98e14ef2160..00000000000 --- a/build/darwin/create-universal-app.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const minimatch_1 = __importDefault(require("minimatch")); -const vscode_universal_bundler_1 = require("vscode-universal-bundler"); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -async function main(buildDir) { - const arch = process.env['VSCODE_ARCH']; - if (!buildDir) { - throw new Error('Build dir not provided'); - } - const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); - const appName = product.nameLong + '.app'; - const x64AppPath = path_1.default.join(buildDir, 'VSCode-darwin-x64', appName); - const arm64AppPath = path_1.default.join(buildDir, 'VSCode-darwin-arm64', appName); - const asarRelativePath = path_1.default.join('Contents', 'Resources', 'app', 'node_modules.asar'); - const outAppPath = path_1.default.join(buildDir, `VSCode-darwin-${arch}`, appName); - const productJsonPath = path_1.default.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json'); - const filesToSkip = [ - '**/CodeResources', - '**/Credits.rtf', - '**/policies/{*.mobileconfig,**/*.plist}', - // TODO: Should we consider expanding this to other files in this area? - '**/node_modules/@parcel/node-addon-api/nothing.target.mk', - ]; - await (0, vscode_universal_bundler_1.makeUniversalApp)({ - x64AppPath, - arm64AppPath, - asarPath: asarRelativePath, - outAppPath, - force: true, - mergeASARs: true, - x64ArchFiles: '{*/kerberos.node,**/extensions/microsoft-authentication/dist/libmsalruntime.dylib,**/extensions/microsoft-authentication/dist/msal-node-runtime.node}', - filesToSkipComparison: (file) => { - for (const expected of filesToSkip) { - if ((0, minimatch_1.default)(file, expected)) { - return true; - } - } - return false; - } - }); - const productJson = JSON.parse(fs_1.default.readFileSync(productJsonPath, 'utf8')); - Object.assign(productJson, { - darwinUniversalAssetId: 'darwin-universal' - }); - fs_1.default.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t')); -} -if (require.main === module) { - main(process.argv[2]).catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=create-universal-app.js.map \ No newline at end of file diff --git a/build/darwin/create-universal-app.ts b/build/darwin/create-universal-app.ts index 41bae77cd12..4faa838f924 100644 --- a/build/darwin/create-universal-app.ts +++ b/build/darwin/create-universal-app.ts @@ -8,7 +8,7 @@ import fs from 'fs'; import minimatch from 'minimatch'; import { makeUniversalApp } from 'vscode-universal-bundler'; -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); async function main(buildDir?: string) { const arch = process.env['VSCODE_ARCH']; @@ -58,7 +58,7 @@ async function main(buildDir?: string) { fs.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t')); } -if (require.main === module) { +if (import.meta.main) { main(process.argv[2]).catch(err => { console.error(err); process.exit(1); diff --git a/build/darwin/sign.js b/build/darwin/sign.js deleted file mode 100644 index d640e94fbf5..00000000000 --- a/build/darwin/sign.js +++ /dev/null @@ -1,128 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const osx_sign_1 = require("@electron/osx-sign"); -const cross_spawn_promise_1 = require("@malept/cross-spawn-promise"); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const baseDir = path_1.default.dirname(__dirname); -const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); -const helperAppBaseName = product.nameShort; -const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app'; -const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app'; -const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app'; -function getElectronVersion() { - const npmrc = fs_1.default.readFileSync(path_1.default.join(root, '.npmrc'), 'utf8'); - const target = /^target="(.*)"$/m.exec(npmrc)[1]; - return target; -} -function getEntitlementsForFile(filePath) { - if (filePath.includes(gpuHelperAppName)) { - return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'); - } - else if (filePath.includes(rendererHelperAppName)) { - return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'); - } - else if (filePath.includes(pluginHelperAppName)) { - return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'); - } - return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'); -} -async function retrySignOnKeychainError(fn, maxRetries = 3) { - let lastError; - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - return await fn(); - } - catch (error) { - lastError = error; - // Check if this is the specific keychain error we want to retry - const errorMessage = error instanceof Error ? error.message : String(error); - const isKeychainError = errorMessage.includes('The specified item could not be found in the keychain.'); - if (!isKeychainError || attempt === maxRetries) { - throw error; - } - console.log(`Signing attempt ${attempt} failed with keychain error, retrying...`); - console.log(`Error: ${errorMessage}`); - const delay = 1000 * Math.pow(2, attempt - 1); - console.log(`Waiting ${Math.round(delay)}ms before retry ${attempt}/${maxRetries}...`); - await new Promise(resolve => setTimeout(resolve, delay)); - } - } - throw lastError; -} -async function main(buildDir) { - const tempDir = process.env['AGENT_TEMPDIRECTORY']; - const arch = process.env['VSCODE_ARCH']; - const identity = process.env['CODESIGN_IDENTITY']; - if (!buildDir) { - throw new Error('$AGENT_BUILDDIRECTORY not set'); - } - if (!tempDir) { - throw new Error('$AGENT_TEMPDIRECTORY not set'); - } - const appRoot = path_1.default.join(buildDir, `VSCode-darwin-${arch}`); - const appName = product.nameLong + '.app'; - const infoPlistPath = path_1.default.resolve(appRoot, appName, 'Contents', 'Info.plist'); - const appOpts = { - app: path_1.default.join(appRoot, appName), - platform: 'darwin', - optionsForFile: (filePath) => ({ - entitlements: getEntitlementsForFile(filePath), - hardenedRuntime: true, - }), - preAutoEntitlements: false, - preEmbedProvisioningProfile: false, - keychain: path_1.default.join(tempDir, 'buildagent.keychain'), - version: getElectronVersion(), - identity, - }; - // Only overwrite plist entries for x64 and arm64 builds, - // universal will get its copy from the x64 build. - if (arch !== 'universal') { - await (0, cross_spawn_promise_1.spawn)('plutil', [ - '-insert', - 'NSAppleEventsUsageDescription', - '-string', - 'An application in Visual Studio Code wants to use AppleScript.', - `${infoPlistPath}` - ]); - await (0, cross_spawn_promise_1.spawn)('plutil', [ - '-replace', - 'NSMicrophoneUsageDescription', - '-string', - 'An application in Visual Studio Code wants to use the Microphone.', - `${infoPlistPath}` - ]); - await (0, cross_spawn_promise_1.spawn)('plutil', [ - '-replace', - 'NSCameraUsageDescription', - '-string', - 'An application in Visual Studio Code wants to use the Camera.', - `${infoPlistPath}` - ]); - } - await retrySignOnKeychainError(() => (0, osx_sign_1.sign)(appOpts)); -} -if (require.main === module) { - main(process.argv[2]).catch(async err => { - console.error(err); - const tempDir = process.env['AGENT_TEMPDIRECTORY']; - if (tempDir) { - const keychain = path_1.default.join(tempDir, 'buildagent.keychain'); - const identities = await (0, cross_spawn_promise_1.spawn)('security', ['find-identity', '-p', 'codesigning', '-v', keychain]); - console.error(`Available identities:\n${identities}`); - const dump = await (0, cross_spawn_promise_1.spawn)('security', ['dump-keychain', keychain]); - console.error(`Keychain dump:\n${dump}`); - } - process.exit(1); - }); -} -//# sourceMappingURL=sign.js.map \ No newline at end of file diff --git a/build/darwin/sign.ts b/build/darwin/sign.ts index ca3ced9138a..fcdcb2b2d45 100644 --- a/build/darwin/sign.ts +++ b/build/darwin/sign.ts @@ -5,11 +5,11 @@ import fs from 'fs'; import path from 'path'; -import { sign, SignOptions } from '@electron/osx-sign'; +import { sign, type SignOptions } from '@electron/osx-sign'; import { spawn } from '@malept/cross-spawn-promise'; -const root = path.dirname(path.dirname(__dirname)); -const baseDir = path.dirname(__dirname); +const root = path.dirname(path.dirname(import.meta.dirname)); +const baseDir = path.dirname(import.meta.dirname); const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); const helperAppBaseName = product.nameShort; const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app'; @@ -122,7 +122,7 @@ async function main(buildDir?: string): Promise { await retrySignOnKeychainError(() => sign(appOpts)); } -if (require.main === module) { +if (import.meta.main) { main(process.argv[2]).catch(async err => { console.error(err); const tempDir = process.env['AGENT_TEMPDIRECTORY']; diff --git a/build/darwin/verify-macho.js b/build/darwin/verify-macho.js deleted file mode 100644 index 8202e8d7b76..00000000000 --- a/build/darwin/verify-macho.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const path_1 = __importDefault(require("path")); -const promises_1 = require("fs/promises"); -const cross_spawn_promise_1 = require("@malept/cross-spawn-promise"); -const minimatch_1 = __importDefault(require("minimatch")); -const MACHO_PREFIX = 'Mach-O '; -const MACHO_64_MAGIC_LE = 0xfeedfacf; -const MACHO_UNIVERSAL_MAGIC_LE = 0xbebafeca; -const MACHO_ARM64_CPU_TYPE = new Set([ - 0x0c000001, - 0x0100000c, -]); -const MACHO_X86_64_CPU_TYPE = new Set([ - 0x07000001, - 0x01000007, -]); -// Files to skip during architecture validation -const FILES_TO_SKIP = [ - // MSAL runtime files are only present in ARM64 builds - '**/extensions/microsoft-authentication/dist/libmsalruntime.dylib', - '**/extensions/microsoft-authentication/dist/msal-node-runtime.node', -]; -function isFileSkipped(file) { - return FILES_TO_SKIP.some(pattern => (0, minimatch_1.default)(file, pattern)); -} -async function read(file, buf, offset, length, position) { - let filehandle; - try { - filehandle = await (0, promises_1.open)(file); - await filehandle.read(buf, offset, length, position); - } - finally { - await filehandle?.close(); - } -} -async function checkMachOFiles(appPath, arch) { - const visited = new Set(); - const invalidFiles = []; - const header = Buffer.alloc(8); - const file_header_entry_size = 20; - const checkx86_64Arch = (arch === 'x64'); - const checkArm64Arch = (arch === 'arm64'); - const checkUniversalArch = (arch === 'universal'); - const traverse = async (p) => { - p = await (0, promises_1.realpath)(p); - if (visited.has(p)) { - return; - } - visited.add(p); - const info = await (0, promises_1.stat)(p); - if (info.isSymbolicLink()) { - return; - } - if (info.isFile()) { - let fileOutput = ''; - try { - fileOutput = await (0, cross_spawn_promise_1.spawn)('file', ['--brief', '--no-pad', p]); - } - catch (e) { - if (e instanceof cross_spawn_promise_1.ExitCodeError) { - /* silently accept error codes from "file" */ - } - else { - throw e; - } - } - if (fileOutput.startsWith(MACHO_PREFIX)) { - console.log(`Verifying architecture of ${p}`); - read(p, header, 0, 8, 0).then(_ => { - const header_magic = header.readUInt32LE(); - if (header_magic === MACHO_64_MAGIC_LE) { - const cpu_type = header.readUInt32LE(4); - if (checkUniversalArch) { - invalidFiles.push(p); - } - else if (checkArm64Arch && !MACHO_ARM64_CPU_TYPE.has(cpu_type)) { - invalidFiles.push(p); - } - else if (checkx86_64Arch && !MACHO_X86_64_CPU_TYPE.has(cpu_type)) { - invalidFiles.push(p); - } - } - else if (header_magic === MACHO_UNIVERSAL_MAGIC_LE) { - const num_binaries = header.readUInt32BE(4); - assert_1.default.equal(num_binaries, 2); - const file_entries_size = file_header_entry_size * num_binaries; - const file_entries = Buffer.alloc(file_entries_size); - read(p, file_entries, 0, file_entries_size, 8).then(_ => { - for (let i = 0; i < num_binaries; i++) { - const cpu_type = file_entries.readUInt32LE(file_header_entry_size * i); - if (!MACHO_ARM64_CPU_TYPE.has(cpu_type) && !MACHO_X86_64_CPU_TYPE.has(cpu_type)) { - invalidFiles.push(p); - } - } - }); - } - }); - } - } - if (info.isDirectory()) { - for (const child of await (0, promises_1.readdir)(p)) { - await traverse(path_1.default.resolve(p, child)); - } - } - }; - await traverse(appPath); - return invalidFiles; -} -const archToCheck = process.argv[2]; -(0, assert_1.default)(process.env['APP_PATH'], 'APP_PATH not set'); -(0, assert_1.default)(archToCheck === 'x64' || archToCheck === 'arm64' || archToCheck === 'universal', `Invalid architecture ${archToCheck} to check`); -checkMachOFiles(process.env['APP_PATH'], archToCheck).then(invalidFiles => { - // Filter out files that should be skipped - const actualInvalidFiles = invalidFiles.filter(file => !isFileSkipped(file)); - if (actualInvalidFiles.length > 0) { - console.error('\x1b[31mThese files are built for the wrong architecture:\x1b[0m'); - actualInvalidFiles.forEach(file => console.error(`\x1b[31m${file}\x1b[0m`)); - process.exit(1); - } - else { - console.log('\x1b[32mAll files are valid\x1b[0m'); - } -}).catch(err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=verify-macho.js.map \ No newline at end of file diff --git a/build/eslint.mjs b/build/eslint.mjs index ca1c987a111..228a3fd9d8c 100644 --- a/build/eslint.mjs +++ b/build/eslint.mjs @@ -4,12 +4,13 @@ *--------------------------------------------------------------------------------------------*/ // @ts-check import eventStream from 'event-stream'; -import { src } from 'vinyl-fs'; +import vfs from 'vinyl-fs'; import { eslintFilter } from './filters.js'; import gulpEslint from './gulp-eslint.js'; function eslint() { - return src(eslintFilter, { base: '.', follow: true, allowEmpty: true }) + return vfs + .src(eslintFilter, { base: '.', follow: true, allowEmpty: true }) .pipe( gulpEslint((results) => { if (results.warningCount > 0 || results.errorCount > 0) { @@ -19,8 +20,7 @@ function eslint() { ).pipe(eventStream.through(function () { /* noop, important for the stream to end */ })); } -const normalizeScriptPath = (/** @type {string} */ p) => p.replace(/\.(js|ts)$/, ''); -if (normalizeScriptPath(import.meta.filename) === normalizeScriptPath(process.argv[1])) { +if (import.meta.main) { eslint().on('error', (err) => { console.error(); console.error(err); diff --git a/build/filters.js b/build/filters.js index 0e485164892..7161395cd42 100644 --- a/build/filters.js +++ b/build/filters.js @@ -13,10 +13,11 @@ * all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript */ -const { readFileSync } = require('fs'); -const { join } = require('path'); +import { readFileSync } from 'fs'; +import { join } from 'path'; -module.exports.all = [ + +export const all = [ '*', 'build/**/*', 'extensions/**/*', @@ -31,7 +32,7 @@ module.exports.all = [ '!**/*.js.map', ]; -module.exports.unicodeFilter = [ +export const unicodeFilter = [ '**', '!**/ThirdPartyNotices.txt', @@ -68,7 +69,7 @@ module.exports.unicodeFilter = [ '!src/vs/workbench/contrib/terminal/common/scripts/psreadline/**', ]; -module.exports.indentationFilter = [ +export const indentationFilter = [ '**', // except specific files @@ -151,7 +152,7 @@ module.exports.indentationFilter = [ '!extensions/simple-browser/media/*.js', ]; -module.exports.copyrightFilter = [ +export const copyrightFilter = [ '**', '!**/*.desktop', '!**/*.json', @@ -193,7 +194,7 @@ module.exports.copyrightFilter = [ '!src/vs/workbench/contrib/terminal/common/scripts/psreadline/**', ]; -module.exports.tsFormattingFilter = [ +export const tsFormattingFilter = [ 'src/**/*.ts', 'test/**/*.ts', 'extensions/**/*.ts', @@ -212,19 +213,19 @@ module.exports.tsFormattingFilter = [ '!extensions/terminal-suggest/src/shell/fishBuiltinsCache.ts', ]; -module.exports.eslintFilter = [ +export const eslintFilter = [ '**/*.js', '**/*.cjs', '**/*.mjs', '**/*.ts', '.eslint-plugin-local/**/*.ts', - ...readFileSync(join(__dirname, '..', '.eslint-ignore')) + ...readFileSync(join(import.meta.dirname, '..', '.eslint-ignore')) .toString() .split(/\r\n|\n/) .filter(line => line && !line.startsWith('#')) .map(line => line.startsWith('!') ? line.slice(1) : `!${line}`) ]; -module.exports.stylelintFilter = [ +export const stylelintFilter = [ 'src/**/*.css' ]; diff --git a/build/gulp-eslint.js b/build/gulp-eslint.js index 793c16c2f30..9e543741de3 100644 --- a/build/gulp-eslint.js +++ b/build/gulp-eslint.js @@ -2,13 +2,10 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ - -'use strict'; - -const { ESLint } = require('eslint'); -const { Transform, default: Stream } = require('stream'); -const { relative } = require('path'); -const fancyLog = require('fancy-log'); +import { ESLint } from 'eslint'; +import fancyLog from 'fancy-log'; +import { relative } from 'path'; +import Stream, { Transform } from 'stream'; /** * @typedef {ESLint.LintResult[] & { errorCount: number, warningCount: number}} ESLintResults @@ -17,7 +14,7 @@ const fancyLog = require('fancy-log'); /** * @param {(results: ESLintResults) => void} action - A function to handle all ESLint results */ -function eslint(action) { +export default function eslint(action) { const linter = new ESLint({}); const formatter = linter.loadFormatter('compact'); @@ -82,5 +79,3 @@ function transform(transform, flush) { flush }); } - -module.exports = eslint; diff --git a/build/gulpfile.cli.mjs b/build/gulpfile.cli.mjs index 2d54cc024fd..efad44e17a5 100644 --- a/build/gulpfile.cli.mjs +++ b/build/gulpfile.cli.mjs @@ -11,20 +11,19 @@ import ansiColors from 'ansi-colors'; import * as cp from 'child_process'; import { tmpdir } from 'os'; import { existsSync, mkdirSync, rmSync } from 'fs'; -import task from './lib/task.js'; -import watcher from './lib/watch/index.js'; -import utilModule from './lib/util.js'; -import reporterModule from './lib/reporter.js'; +import * as task from './lib/task.ts'; +import * as watcher from './lib/watch/index.ts'; +import * as utilModule from './lib/util.ts'; +import * as reporterModule from './lib/reporter.ts'; import untar from 'gulp-untar'; import gunzip from 'gulp-gunzip'; import { fileURLToPath } from 'url'; const { debounce } = utilModule; const { createReporter } = reporterModule; -const __dirname = import.meta.dirname; const root = 'cli'; -const rootAbs = path.resolve(__dirname, '..', root); +const rootAbs = path.resolve(import.meta.dirname, '..', root); const src = `${root}/src`; const platformOpensslDirName = @@ -148,7 +147,7 @@ const compileCliTask = task.define('compile-cli', () => { const watchCliTask = task.define('watch-cli', () => { warnIfRustNotInstalled(); - return watcher.default(`${src}/**`, { read: false }) + return watcher(`${src}/**`, { read: false }) .pipe(debounce(compileCliTask)); }); diff --git a/build/gulpfile.compile.mjs b/build/gulpfile.compile.mjs index 0a55cd26d13..e79a61b0f43 100644 --- a/build/gulpfile.compile.mjs +++ b/build/gulpfile.compile.mjs @@ -4,10 +4,10 @@ *--------------------------------------------------------------------------------------------*/ //@ts-check import gulp from 'gulp'; -import util from './lib/util.js'; -import date from './lib/date.js'; -import task from './lib/task.js'; -import compilation from './lib/compilation.js'; +import * as util from './lib/util.ts'; +import * as date from './lib/date.ts'; +import * as task from './lib/task.ts'; +import * as compilation from './lib/compilation.ts'; /** * @param {boolean} disableMangle diff --git a/build/gulpfile.editor.mjs b/build/gulpfile.editor.mjs index bbd67316333..b5ff549fc36 100644 --- a/build/gulpfile.editor.mjs +++ b/build/gulpfile.editor.mjs @@ -4,26 +4,25 @@ *--------------------------------------------------------------------------------------------*/ //@ts-check import gulp from 'gulp'; -import * as path from 'path'; -import util from './lib/util.js'; -import getVersionModule from './lib/getVersion.js'; -import task from './lib/task.js'; +import path from 'path'; +import * as util from './lib/util.ts'; +import * as getVersionModule from './lib/getVersion.ts'; +import * as task from './lib/task.ts'; import es from 'event-stream'; import File from 'vinyl'; -import i18n from './lib/i18n.js'; -import standalone from './lib/standalone.js'; +import * as i18n from './lib/i18n.ts'; +import * as standalone from './lib/standalone.ts'; import * as cp from 'child_process'; -import compilation from './lib/compilation.js'; -import monacoapi from './lib/monaco-api.js'; +import * as compilation from './lib/compilation.ts'; +import * as monacoapi from './lib/monaco-api.ts'; import * as fs from 'fs'; import filter from 'gulp-filter'; -import reporterModule from './lib/reporter.js'; +import * as reporterModule from './lib/reporter.ts'; import monacoPackage from './monaco/package.json' with { type: 'json' }; -const __dirname = import.meta.dirname; const { getVersion } = getVersionModule; const { createReporter } = reporterModule; -const root = path.dirname(__dirname); +const root = path.dirname(import.meta.dirname); const sha1 = getVersion(root); const semver = monacoPackage.version; const headerVersion = semver + '(' + sha1 + ')'; @@ -242,7 +241,7 @@ function createTscCompileTask(watch) { args.push('-w'); } const child = cp.spawn(`node`, args, { - cwd: path.join(__dirname, '..'), + cwd: path.join(import.meta.dirname, '..'), // stdio: [null, 'pipe', 'inherit'] }); const errors = []; diff --git a/build/gulpfile.extensions.mjs b/build/gulpfile.extensions.mjs index c77c1275ea6..a31584f187a 100644 --- a/build/gulpfile.extensions.mjs +++ b/build/gulpfile.extensions.mjs @@ -12,22 +12,21 @@ import * as path from 'path'; import * as nodeUtil from 'util'; import es from 'event-stream'; import filter from 'gulp-filter'; -import util from './lib/util.js'; -import getVersionModule from './lib/getVersion.js'; -import task from './lib/task.js'; -import watcher from './lib/watch/index.js'; -import reporterModule from './lib/reporter.js'; +import * as util from './lib/util.ts'; +import * as getVersionModule from './lib/getVersion.ts'; +import * as task from './lib/task.ts'; +import watcher from './lib/watch/index.ts'; +import * as reporterModule from './lib/reporter.ts'; import glob from 'glob'; import plumber from 'gulp-plumber'; -import ext from './lib/extensions.js'; -import tsb from './lib/tsb/index.js'; +import * as ext from './lib/extensions.ts'; +import * as tsb from './lib/tsb/index.ts'; import sourcemaps from 'gulp-sourcemaps'; import { fileURLToPath } from 'url'; -const __dirname = import.meta.dirname; const { getVersion } = getVersionModule; const { createReporter } = reporterModule; -const root = path.dirname(__dirname); +const root = path.dirname(import.meta.dirname); const commit = getVersion(root); // To save 250ms for each gulp startup, we are caching the result here @@ -168,7 +167,7 @@ const tasks = compilations.map(function (tsconfigFile) { const pipeline = createPipeline(false); const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts'])); const input = es.merge(nonts, pipeline.tsProjectSrc()); - const watchInput = watcher.default(src, { ...srcOpts, ...{ readDelay: 200 } }); + const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } }); return watchInput .pipe(util.incremental(pipeline, input)) diff --git a/build/gulpfile.hygiene.mjs b/build/gulpfile.hygiene.mjs index fb0a7408118..8c4da9471b8 100644 --- a/build/gulpfile.hygiene.mjs +++ b/build/gulpfile.hygiene.mjs @@ -6,7 +6,7 @@ import gulp from 'gulp'; import es from 'event-stream'; import path from 'path'; import fs from 'fs'; -import task from './lib/task.js'; +import * as task from './lib/task.ts'; import { hygiene } from './hygiene.mjs'; const dirName = path.dirname(new URL(import.meta.url).pathname); diff --git a/build/gulpfile.reh.mjs b/build/gulpfile.reh.mjs index 837bcd3d5ee..24114225c04 100644 --- a/build/gulpfile.reh.mjs +++ b/build/gulpfile.reh.mjs @@ -6,17 +6,17 @@ import gulp from 'gulp'; import * as path from 'path'; import es from 'event-stream'; -import * as util from './lib/util.js'; -import * as getVersionModule from './lib/getVersion.js'; -import * as task from './lib/task.js'; -import optimize from './lib/optimize.js'; -import * as inlineMetaModule from './lib/inlineMeta.js'; +import * as util from './lib/util.ts'; +import * as getVersionModule from './lib/getVersion.ts'; +import * as task from './lib/task.ts'; +import * as optimize from './lib/optimize.ts'; +import * as inlineMetaModule from './lib/inlineMeta.ts'; import product from '../product.json' with { type: 'json' }; import rename from 'gulp-rename'; import replace from 'gulp-replace'; import filter from 'gulp-filter'; -import * as dependenciesModule from './lib/dependencies.js'; -import * as dateModule from './lib/date.js'; +import * as dependenciesModule from './lib/dependencies.ts'; +import * as dateModule from './lib/date.ts'; import vfs from 'vinyl-fs'; import packageJson from '../package.json' with { type: 'json' }; import flatmap from 'gulp-flatmap'; @@ -32,7 +32,7 @@ import * as cp from 'child_process'; import log from 'fancy-log'; import buildfile from './buildfile.js'; import { fileURLToPath } from 'url'; -import * as fetchModule from './lib/fetch.js'; +import * as fetchModule from './lib/fetch.ts'; import jsonEditor from 'gulp-json-editor'; const { inlineMeta } = inlineMetaModule; @@ -40,9 +40,8 @@ const { getVersion } = getVersionModule; const { getProductionDependencies } = dependenciesModule; const { readISODate } = dateModule; const { fetchUrls, fetchGithub } = fetchModule; -const __dirname = import.meta.dirname; -const REPO_ROOT = path.dirname(__dirname); +const REPO_ROOT = path.dirname(import.meta.dirname); const commit = getVersion(REPO_ROOT); const BUILD_ROOT = path.dirname(REPO_ROOT); const REMOTE_FOLDER = path.join(REPO_ROOT, 'remote'); @@ -340,8 +339,8 @@ function packageTask(type, platform, arch, sourceFolderName, destinationFolderNa const deps = gulp.src(dependenciesSrc, { base: 'remote', dot: true }) // filter out unnecessary files, no source maps in server build .pipe(filter(['**', '!**/package-lock.json', '!**/*.{js,css}.map'])) - .pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore'))) - .pipe(util.cleanNodeModules(path.join(__dirname, `.moduleignore.${process.platform}`))) + .pipe(util.cleanNodeModules(path.join(import.meta.dirname, '.moduleignore'))) + .pipe(util.cleanNodeModules(path.join(import.meta.dirname, `.moduleignore.${process.platform}`))) .pipe(jsFilter) .pipe(util.stripSourceMappingURL()) .pipe(jsFilter.restore); diff --git a/build/gulpfile.scan.mjs b/build/gulpfile.scan.mjs index af6aa0b150b..0f6b9d13b72 100644 --- a/build/gulpfile.scan.mjs +++ b/build/gulpfile.scan.mjs @@ -5,19 +5,18 @@ import gulp from 'gulp'; import * as path from 'path'; -import task from './lib/task.js'; -import util from './lib/util.js'; +import * as task from './lib/task.ts'; +import * as util from './lib/util.ts'; import electron from '@vscode/gulp-electron'; -import electronConfigModule from './lib/electron.js'; +import * as electronConfigModule from './lib/electron.ts'; import filter from 'gulp-filter'; -import deps from './lib/dependencies.js'; +import * as deps from './lib/dependencies.ts'; import { existsSync, readdirSync } from 'fs'; import { fileURLToPath } from 'url'; const { config } = electronConfigModule; -const __dirname = import.meta.dirname; -const root = path.dirname(__dirname); +const root = path.dirname(import.meta.dirname); const BUILD_TARGETS = [ { platform: 'win32', arch: 'x64' }, diff --git a/build/gulpfile.mjs b/build/gulpfile.ts similarity index 89% rename from build/gulpfile.mjs rename to build/gulpfile.ts index 1b14c7edd5f..f8d65580ce7 100644 --- a/build/gulpfile.mjs +++ b/build/gulpfile.ts @@ -2,22 +2,19 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ - import { EventEmitter } from 'events'; import glob from 'glob'; import gulp from 'gulp'; import { createRequire } from 'node:module'; -import { fileURLToPath } from 'url'; import { monacoTypecheckTask /* , monacoTypecheckWatchTask */ } from './gulpfile.editor.mjs'; import { compileExtensionMediaTask, compileExtensionsTask, watchExtensionsTask } from './gulpfile.extensions.mjs'; -import compilation from './lib/compilation.js'; -import task from './lib/task.js'; -import util from './lib/util.js'; +import * as compilation from './lib/compilation.ts'; +import * as task from './lib/task.ts'; +import * as util from './lib/util.ts'; EventEmitter.defaultMaxListeners = 100; const require = createRequire(import.meta.url); -const __dirname = import.meta.dirname; const { transpileTask, compileTask, watchTask, compileApiProposalNamesTask, watchApiProposalNamesTask } = compilation; @@ -55,5 +52,7 @@ process.on('unhandledRejection', (reason, p) => { }); // Load all the gulpfiles only if running tasks other than the editor tasks -glob.sync('gulpfile.*.{mjs,js}', { cwd: __dirname }) - .forEach(f => require(`./${f}`)); +glob.sync('gulpfile.*.{mjs,js}', { cwd: import.meta.dirname }) + .forEach(f => { + return require(`./${f}`); + }); diff --git a/build/gulpfile.vscode.linux.mjs b/build/gulpfile.vscode.linux.mjs index 315c29091a0..5f341526389 100644 --- a/build/gulpfile.vscode.linux.mjs +++ b/build/gulpfile.vscode.linux.mjs @@ -8,13 +8,13 @@ import replace from 'gulp-replace'; import rename from 'gulp-rename'; import es from 'event-stream'; import vfs from 'vinyl-fs'; -import * as utilModule from './lib/util.js'; -import * as getVersionModule from './lib/getVersion.js'; -import * as task from './lib/task.js'; +import * as utilModule from './lib/util.ts'; +import * as getVersionModule from './lib/getVersion.ts'; +import * as task from './lib/task.ts'; import packageJson from '../package.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' }; -import { getDependencies } from './linux/dependencies-generator.js'; -import * as depLists from './linux/debian/dep-lists.js'; +import { getDependencies } from './linux/dependencies-generator.ts'; +import * as depLists from './linux/debian/dep-lists.ts'; import * as path from 'path'; import * as cp from 'child_process'; import { promisify } from 'util'; @@ -23,9 +23,8 @@ import { fileURLToPath } from 'url'; const { rimraf } = utilModule; const { getVersion } = getVersionModule; const { recommendedDeps: debianRecommendedDependencies } = depLists; -const __dirname = import.meta.dirname; const exec = promisify(cp.exec); -const root = path.dirname(__dirname); +const root = path.dirname(import.meta.dirname); const commit = getVersion(root); const linuxPackageRevision = Math.floor(new Date().getTime() / 1000); diff --git a/build/gulpfile.vscode.mjs b/build/gulpfile.vscode.mjs index 8f5a7b0d516..1536bb114a6 100644 --- a/build/gulpfile.vscode.mjs +++ b/build/gulpfile.vscode.mjs @@ -13,20 +13,20 @@ import replace from 'gulp-replace'; import filter from 'gulp-filter'; import electron from '@vscode/gulp-electron'; import jsonEditor from 'gulp-json-editor'; -import * as util from './lib/util.js'; -import * as getVersionModule from './lib/getVersion.js'; -import * as dateModule from './lib/date.js'; -import * as task from './lib/task.js'; +import * as util from './lib/util.ts'; +import * as getVersionModule from './lib/getVersion.ts'; +import * as dateModule from './lib/date.ts'; +import * as task from './lib/task.ts'; import buildfile from './buildfile.js'; -import optimize from './lib/optimize.js'; -import * as inlineMetaModule from './lib/inlineMeta.js'; +import * as optimize from './lib/optimize.ts'; +import * as inlineMetaModule from './lib/inlineMeta.ts'; import packageJson from '../package.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' }; import * as crypto from 'crypto'; -import i18n from './lib/i18n.js'; -import * as dependenciesModule from './lib/dependencies.js'; -import electronModule from './lib/electron.js'; -import asarModule from './lib/asar.js'; +import * as i18n from './lib/i18n.ts'; +import * as dependenciesModule from './lib/dependencies.ts'; +import * as electronModule from './lib/electron.ts'; +import * as asarModule from './lib/asar.ts'; import minimist from 'minimist'; import { compileBuildWithoutManglingTask, compileBuildWithManglingTask } from './gulpfile.compile.mjs'; import { compileNonNativeExtensionsBuildTask, compileNativeExtensionsBuildTask, compileAllExtensionsBuildTask, compileExtensionMediaBuildTask, cleanExtensionsBuildTask } from './gulpfile.extensions.mjs'; @@ -43,8 +43,7 @@ const { config } = electronModule; const { createAsar } = asarModule; const glob = promisify(globCallback); const rcedit = promisify(rceditCallback); -const __dirname = import.meta.dirname; -const root = path.dirname(__dirname); +const root = path.dirname(import.meta.dirname); const commit = getVersion(root); // Build @@ -292,14 +291,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true }); const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path)); - const root = path.resolve(path.join(__dirname, '..')); + const root = path.resolve(path.join(import.meta.dirname, '..')); const productionDependencies = getProductionDependencies(root); const dependenciesSrc = productionDependencies.map(d => path.relative(root, d)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]).flat().concat('!**/*.mk'); const deps = gulp.src(dependenciesSrc, { base: '.', dot: true }) .pipe(filter(['**', `!**/${config.version}/**`, '!**/bin/darwin-arm64-87/**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.{js,css}.map'])) - .pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore'))) - .pipe(util.cleanNodeModules(path.join(__dirname, `.moduleignore.${process.platform}`))) + .pipe(util.cleanNodeModules(path.join(import.meta.dirname, '.moduleignore'))) + .pipe(util.cleanNodeModules(path.join(import.meta.dirname, `.moduleignore.${process.platform}`))) .pipe(jsFilter) .pipe(util.rewriteSourceMappingURL(sourceMappingURLBase)) .pipe(jsFilter.restore) diff --git a/build/gulpfile.vscode.web.mjs b/build/gulpfile.vscode.web.mjs index e976ed77a61..76a92c72aa8 100644 --- a/build/gulpfile.vscode.web.mjs +++ b/build/gulpfile.vscode.web.mjs @@ -6,19 +6,19 @@ import gulp from 'gulp'; import * as path from 'path'; import es from 'event-stream'; -import * as util from './lib/util.js'; -import * as getVersionModule from './lib/getVersion.js'; -import * as task from './lib/task.js'; -import optimize from './lib/optimize.js'; -import * as dateModule from './lib/date.js'; +import * as util from './lib/util.ts'; +import * as getVersionModule from './lib/getVersion.ts'; +import * as task from './lib/task.ts'; +import * as optimize from './lib/optimize.ts'; +import * as dateModule from './lib/date.ts'; import product from '../product.json' with { type: 'json' }; import rename from 'gulp-rename'; import filter from 'gulp-filter'; -import * as dependenciesModule from './lib/dependencies.js'; +import * as dependenciesModule from './lib/dependencies.ts'; import vfs from 'vinyl-fs'; import packageJson from '../package.json' with { type: 'json' }; import { compileBuildWithManglingTask } from './gulpfile.compile.mjs'; -import extensions from './lib/extensions.js'; +import * as extensions from './lib/extensions.ts'; import VinylFile from 'vinyl'; import jsonEditor from 'gulp-json-editor'; import buildfile from './buildfile.js'; @@ -27,9 +27,8 @@ import { fileURLToPath } from 'url'; const { getVersion } = getVersionModule; const { readISODate } = dateModule; const { getProductionDependencies } = dependenciesModule; -const __dirname = import.meta.dirname; -const REPO_ROOT = path.dirname(__dirname); +const REPO_ROOT = path.dirname(import.meta.dirname); const BUILD_ROOT = path.dirname(REPO_ROOT); const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web'); @@ -184,7 +183,7 @@ function packageTask(sourceFolderName, destinationFolderName) { const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true }) .pipe(filter(['**', '!**/package-lock.json'])) - .pipe(util.cleanNodeModules(path.join(__dirname, '.webignore'))); + .pipe(util.cleanNodeModules(path.join(import.meta.dirname, '.webignore'))); const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' }); const manifest = gulp.src('resources/server/manifest.json', { base: 'resources/server' }); diff --git a/build/gulpfile.vscode.win32.mjs b/build/gulpfile.vscode.win32.mjs index c10201dfc10..66e324d1832 100644 --- a/build/gulpfile.vscode.win32.mjs +++ b/build/gulpfile.vscode.win32.mjs @@ -7,8 +7,8 @@ import * as path from 'path'; import * as fs from 'fs'; import assert from 'assert'; import * as cp from 'child_process'; -import * as util from './lib/util.js'; -import * as task from './lib/task.js'; +import * as util from './lib/util.ts'; +import * as task from './lib/task.ts'; import pkg from '../package.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' }; import vfs from 'vinyl-fs'; @@ -16,13 +16,12 @@ import rcedit from 'rcedit'; import { createRequire } from 'module'; const require = createRequire(import.meta.url); -const __dirname = import.meta.dirname; -const repoPath = path.dirname(__dirname); +const repoPath = path.dirname(import.meta.dirname); const buildPath = (/** @type {string} */ arch) => path.join(path.dirname(repoPath), `VSCode-win32-${arch}`); const setupDir = (/** @type {string} */ arch, /** @type {string} */ target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`); -const issPath = path.join(__dirname, 'win32', 'code.iss'); +const issPath = path.join(import.meta.dirname, 'win32', 'code.iss'); const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe'); -const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32'); +const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32.ts'); function packageInnoSetup(iss, options, cb) { options = options || {}; diff --git a/build/hygiene.mjs b/build/hygiene.mjs index 3497cafdcc8..f3e37913405 100644 --- a/build/hygiene.mjs +++ b/build/hygiene.mjs @@ -13,7 +13,7 @@ import VinylFile from 'vinyl'; import vfs from 'vinyl-fs'; import { all, copyrightFilter, eslintFilter, indentationFilter, stylelintFilter, tsFormattingFilter, unicodeFilter } from './filters.js'; import eslint from './gulp-eslint.js'; -import formatter from './lib/formatter.js'; +import * as formatter from './lib/formatter.ts'; import gulpstylelint from './stylelint.mjs'; const copyrightHeaderLines = [ @@ -117,7 +117,7 @@ export function hygiene(some, runEslint = true) { this.emit('data', file); }); - const formatting = es.map(function (file, cb) { + const formatting = es.map(function (/** @type {any} */ file, cb) { try { const rawInput = file.contents.toString('utf8'); const rawOutput = formatter.format(file.path, rawInput); @@ -269,7 +269,7 @@ function createGitIndexVinyls(paths) { } // this allows us to run hygiene as a git pre-commit hook -if (import.meta.filename === process.argv[1]) { +if (import.meta.main) { process.on('unhandledRejection', (reason, p) => { console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); process.exit(1); diff --git a/build/lib/asar.js b/build/lib/asar.js deleted file mode 100644 index d08070a4fdc..00000000000 --- a/build/lib/asar.js +++ /dev/null @@ -1,156 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createAsar = createAsar; -const path_1 = __importDefault(require("path")); -const event_stream_1 = __importDefault(require("event-stream")); -const chromium_pickle_js_1 = __importDefault(require("chromium-pickle-js")); -const filesystem_js_1 = __importDefault(require("asar/lib/filesystem.js")); -const vinyl_1 = __importDefault(require("vinyl")); -const minimatch_1 = __importDefault(require("minimatch")); -function createAsar(folderPath, unpackGlobs, skipGlobs, duplicateGlobs, destFilename) { - const shouldUnpackFile = (file) => { - for (let i = 0; i < unpackGlobs.length; i++) { - if ((0, minimatch_1.default)(file.relative, unpackGlobs[i])) { - return true; - } - } - return false; - }; - const shouldSkipFile = (file) => { - for (const skipGlob of skipGlobs) { - if ((0, minimatch_1.default)(file.relative, skipGlob)) { - return true; - } - } - return false; - }; - // Files that should be duplicated between - // node_modules.asar and node_modules - const shouldDuplicateFile = (file) => { - for (const duplicateGlob of duplicateGlobs) { - if ((0, minimatch_1.default)(file.relative, duplicateGlob)) { - return true; - } - } - return false; - }; - const filesystem = new filesystem_js_1.default(folderPath); - const out = []; - // Keep track of pending inserts - let pendingInserts = 0; - let onFileInserted = () => { pendingInserts--; }; - // Do not insert twice the same directory - const seenDir = {}; - const insertDirectoryRecursive = (dir) => { - if (seenDir[dir]) { - return; - } - let lastSlash = dir.lastIndexOf('/'); - if (lastSlash === -1) { - lastSlash = dir.lastIndexOf('\\'); - } - if (lastSlash !== -1) { - insertDirectoryRecursive(dir.substring(0, lastSlash)); - } - seenDir[dir] = true; - filesystem.insertDirectory(dir); - }; - const insertDirectoryForFile = (file) => { - let lastSlash = file.lastIndexOf('/'); - if (lastSlash === -1) { - lastSlash = file.lastIndexOf('\\'); - } - if (lastSlash !== -1) { - insertDirectoryRecursive(file.substring(0, lastSlash)); - } - }; - const insertFile = (relativePath, stat, shouldUnpack) => { - insertDirectoryForFile(relativePath); - pendingInserts++; - // Do not pass `onFileInserted` directly because it gets overwritten below. - // Create a closure capturing `onFileInserted`. - filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted()); - }; - return event_stream_1.default.through(function (file) { - if (file.stat.isDirectory()) { - return; - } - if (!file.stat.isFile()) { - throw new Error(`unknown item in stream!`); - } - if (shouldSkipFile(file)) { - this.queue(new vinyl_1.default({ - base: '.', - path: file.path, - stat: file.stat, - contents: file.contents - })); - return; - } - if (shouldDuplicateFile(file)) { - this.queue(new vinyl_1.default({ - base: '.', - path: file.path, - stat: file.stat, - contents: file.contents - })); - } - const shouldUnpack = shouldUnpackFile(file); - insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack); - if (shouldUnpack) { - // The file goes outside of xx.asar, in a folder xx.asar.unpacked - const relative = path_1.default.relative(folderPath, file.path); - this.queue(new vinyl_1.default({ - base: '.', - path: path_1.default.join(destFilename + '.unpacked', relative), - stat: file.stat, - contents: file.contents - })); - } - else { - // The file goes inside of xx.asar - out.push(file.contents); - } - }, function () { - const finish = () => { - { - const headerPickle = chromium_pickle_js_1.default.createEmpty(); - headerPickle.writeString(JSON.stringify(filesystem.header)); - const headerBuf = headerPickle.toBuffer(); - const sizePickle = chromium_pickle_js_1.default.createEmpty(); - sizePickle.writeUInt32(headerBuf.length); - const sizeBuf = sizePickle.toBuffer(); - out.unshift(headerBuf); - out.unshift(sizeBuf); - } - const contents = Buffer.concat(out); - out.length = 0; - this.queue(new vinyl_1.default({ - base: '.', - path: destFilename, - contents: contents - })); - this.queue(null); - }; - // Call finish() only when all file inserts have finished... - if (pendingInserts === 0) { - finish(); - } - else { - onFileInserted = () => { - pendingInserts--; - if (pendingInserts === 0) { - finish(); - } - }; - } - }); -} -//# sourceMappingURL=asar.js.map \ No newline at end of file diff --git a/build/lib/builtInExtensions.js b/build/lib/builtInExtensions.js deleted file mode 100644 index 249777c4458..00000000000 --- a/build/lib/builtInExtensions.js +++ /dev/null @@ -1,179 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getExtensionStream = getExtensionStream; -exports.getBuiltInExtensions = getBuiltInExtensions; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const os_1 = __importDefault(require("os")); -const rimraf_1 = __importDefault(require("rimraf")); -const event_stream_1 = __importDefault(require("event-stream")); -const gulp_rename_1 = __importDefault(require("gulp-rename")); -const vinyl_fs_1 = __importDefault(require("vinyl-fs")); -const ext = __importStar(require("./extensions")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../product.json'), 'utf8')); -const builtInExtensions = productjson.builtInExtensions || []; -const webBuiltInExtensions = productjson.webBuiltInExtensions || []; -const controlFilePath = path_1.default.join(os_1.default.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); -const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE']; -function log(...messages) { - if (ENABLE_LOGGING) { - (0, fancy_log_1.default)(...messages); - } -} -function getExtensionPath(extension) { - return path_1.default.join(root, '.build', 'builtInExtensions', extension.name); -} -function isUpToDate(extension) { - const packagePath = path_1.default.join(getExtensionPath(extension), 'package.json'); - if (!fs_1.default.existsSync(packagePath)) { - return false; - } - const packageContents = fs_1.default.readFileSync(packagePath, { encoding: 'utf8' }); - try { - const diskVersion = JSON.parse(packageContents).version; - return (diskVersion === extension.version); - } - catch (err) { - return false; - } -} -function getExtensionDownloadStream(extension) { - let input; - if (extension.vsix) { - input = ext.fromVsix(path_1.default.join(root, extension.vsix), extension); - } - else if (productjson.extensionsGallery?.serviceUrl) { - input = ext.fromMarketplace(productjson.extensionsGallery.serviceUrl, extension); - } - else { - input = ext.fromGithub(extension); - } - return input.pipe((0, gulp_rename_1.default)(p => p.dirname = `${extension.name}/${p.dirname}`)); -} -function getExtensionStream(extension) { - // if the extension exists on disk, use those files instead of downloading anew - if (isUpToDate(extension)) { - log('[extensions]', `${extension.name}@${extension.version} up to date`, ansi_colors_1.default.green('✔︎')); - return vinyl_fs_1.default.src(['**'], { cwd: getExtensionPath(extension), dot: true }) - .pipe((0, gulp_rename_1.default)(p => p.dirname = `${extension.name}/${p.dirname}`)); - } - return getExtensionDownloadStream(extension); -} -function syncMarketplaceExtension(extension) { - const galleryServiceUrl = productjson.extensionsGallery?.serviceUrl; - const source = ansi_colors_1.default.blue(galleryServiceUrl ? '[marketplace]' : '[github]'); - if (isUpToDate(extension)) { - log(source, `${extension.name}@${extension.version}`, ansi_colors_1.default.green('✔︎')); - return event_stream_1.default.readArray([]); - } - rimraf_1.default.sync(getExtensionPath(extension)); - return getExtensionDownloadStream(extension) - .pipe(vinyl_fs_1.default.dest('.build/builtInExtensions')) - .on('end', () => log(source, extension.name, ansi_colors_1.default.green('✔︎'))); -} -function syncExtension(extension, controlState) { - if (extension.platforms) { - const platforms = new Set(extension.platforms); - if (!platforms.has(process.platform)) { - log(ansi_colors_1.default.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansi_colors_1.default.green('✔︎')); - return event_stream_1.default.readArray([]); - } - } - switch (controlState) { - case 'disabled': - log(ansi_colors_1.default.blue('[disabled]'), ansi_colors_1.default.gray(extension.name)); - return event_stream_1.default.readArray([]); - case 'marketplace': - return syncMarketplaceExtension(extension); - default: - if (!fs_1.default.existsSync(controlState)) { - log(ansi_colors_1.default.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`)); - return event_stream_1.default.readArray([]); - } - else if (!fs_1.default.existsSync(path_1.default.join(controlState, 'package.json'))) { - log(ansi_colors_1.default.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`)); - return event_stream_1.default.readArray([]); - } - log(ansi_colors_1.default.blue('[local]'), `${extension.name}: ${ansi_colors_1.default.cyan(controlState)}`, ansi_colors_1.default.green('✔︎')); - return event_stream_1.default.readArray([]); - } -} -function readControlFile() { - try { - return JSON.parse(fs_1.default.readFileSync(controlFilePath, 'utf8')); - } - catch (err) { - return {}; - } -} -function writeControlFile(control) { - fs_1.default.mkdirSync(path_1.default.dirname(controlFilePath), { recursive: true }); - fs_1.default.writeFileSync(controlFilePath, JSON.stringify(control, null, 2)); -} -function getBuiltInExtensions() { - log('Synchronizing built-in extensions...'); - log(`You can manage built-in extensions with the ${ansi_colors_1.default.cyan('--builtin')} flag`); - const control = readControlFile(); - const streams = []; - for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) { - const controlState = control[extension.name] || 'marketplace'; - control[extension.name] = controlState; - streams.push(syncExtension(extension, controlState)); - } - writeControlFile(control); - return new Promise((resolve, reject) => { - event_stream_1.default.merge(streams) - .on('error', reject) - .on('end', resolve); - }); -} -if (require.main === module) { - getBuiltInExtensions().then(() => process.exit(0)).catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=builtInExtensions.js.map \ No newline at end of file diff --git a/build/lib/builtInExtensions.ts b/build/lib/builtInExtensions.ts index e9a1180ce35..d52567b17d1 100644 --- a/build/lib/builtInExtensions.ts +++ b/build/lib/builtInExtensions.ts @@ -10,7 +10,7 @@ import rimraf from 'rimraf'; import es from 'event-stream'; import rename from 'gulp-rename'; import vfs from 'vinyl-fs'; -import * as ext from './extensions'; +import * as ext from './extensions.ts'; import fancyLog from 'fancy-log'; import ansiColors from 'ansi-colors'; import { Stream } from 'stream'; @@ -34,10 +34,10 @@ export interface IExtensionDefinition { }; } -const root = path.dirname(path.dirname(__dirname)); -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')); -const builtInExtensions = productjson.builtInExtensions || []; -const webBuiltInExtensions = productjson.webBuiltInExtensions || []; +const root = path.dirname(path.dirname(import.meta.dirname)); +const productjson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../product.json'), 'utf8')); +const builtInExtensions = productjson.builtInExtensions as IExtensionDefinition[] || []; +const webBuiltInExtensions = productjson.webBuiltInExtensions as IExtensionDefinition[] || []; const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE']; @@ -181,7 +181,7 @@ export function getBuiltInExtensions(): Promise { }); } -if (require.main === module) { +if (import.meta.main) { getBuiltInExtensions().then(() => process.exit(0)).catch(err => { console.error(err); process.exit(1); diff --git a/build/lib/builtInExtensionsCG.js b/build/lib/builtInExtensionsCG.js deleted file mode 100644 index 3dc0ae27f0a..00000000000 --- a/build/lib/builtInExtensionsCG.js +++ /dev/null @@ -1,81 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const url_1 = __importDefault(require("url")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const rootCG = path_1.default.join(root, 'extensionsCG'); -const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../product.json'), 'utf8')); -const builtInExtensions = productjson.builtInExtensions || []; -const webBuiltInExtensions = productjson.webBuiltInExtensions || []; -const token = process.env['GITHUB_TOKEN']; -const contentBasePath = 'raw.githubusercontent.com'; -const contentFileNames = ['package.json', 'package-lock.json']; -async function downloadExtensionDetails(extension) { - const extensionLabel = `${extension.name}@${extension.version}`; - const repository = url_1.default.parse(extension.repo).path.substr(1); - const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`; - async function getContent(fileName) { - try { - const response = await fetch(`${repositoryContentBaseUrl}/${fileName}`); - if (response.ok) { - return { fileName, body: Buffer.from(await response.arrayBuffer()) }; - } - else if (response.status === 404) { - return { fileName, body: undefined }; - } - else { - return { fileName, body: null }; - } - } - catch (e) { - return { fileName, body: null }; - } - } - const promises = contentFileNames.map(getContent); - console.log(extensionLabel); - const results = await Promise.all(promises); - for (const result of results) { - if (result.body) { - const extensionFolder = path_1.default.join(rootCG, extension.name); - fs_1.default.mkdirSync(extensionFolder, { recursive: true }); - fs_1.default.writeFileSync(path_1.default.join(extensionFolder, result.fileName), result.body); - console.log(` - ${result.fileName} ${ansi_colors_1.default.green('✔︎')}`); - } - else if (result.body === undefined) { - console.log(` - ${result.fileName} ${ansi_colors_1.default.yellow('⚠️')}`); - } - else { - console.log(` - ${result.fileName} ${ansi_colors_1.default.red('🛑')}`); - } - } - // Validation - if (!results.find(r => r.fileName === 'package.json')?.body) { - // throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`); - } - if (!results.find(r => r.fileName === 'package-lock.json')?.body) { - // throw new Error(`The "package-lock.json" could not be found for the built-in extension - ${extensionLabel}`); - } -} -async function main() { - for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) { - await downloadExtensionDetails(extension); - } -} -main().then(() => { - console.log(`Built-in extensions component data downloaded ${ansi_colors_1.default.green('✔︎')}`); - process.exit(0); -}, err => { - console.log(`Built-in extensions component data could not be downloaded ${ansi_colors_1.default.red('🛑')}`); - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=builtInExtensionsCG.js.map \ No newline at end of file diff --git a/build/lib/builtInExtensionsCG.ts b/build/lib/builtInExtensionsCG.ts index 4628b365a2e..1c4ce609c3d 100644 --- a/build/lib/builtInExtensionsCG.ts +++ b/build/lib/builtInExtensionsCG.ts @@ -7,13 +7,13 @@ import fs from 'fs'; import path from 'path'; import url from 'url'; import ansiColors from 'ansi-colors'; -import { IExtensionDefinition } from './builtInExtensions'; +import type { IExtensionDefinition } from './builtInExtensions.ts'; -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); const rootCG = path.join(root, 'extensionsCG'); -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')); -const builtInExtensions = productjson.builtInExtensions || []; -const webBuiltInExtensions = productjson.webBuiltInExtensions || []; +const productjson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../product.json'), 'utf8')); +const builtInExtensions = productjson.builtInExtensions as IExtensionDefinition[] || []; +const webBuiltInExtensions = productjson.webBuiltInExtensions as IExtensionDefinition[] || []; const token = process.env['GITHUB_TOKEN']; const contentBasePath = 'raw.githubusercontent.com'; diff --git a/build/lib/bundle.js b/build/lib/bundle.js deleted file mode 100644 index 382b648defb..00000000000 --- a/build/lib/bundle.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.removeAllTSBoilerplate = removeAllTSBoilerplate; -function removeAllTSBoilerplate(source) { - const seen = new Array(BOILERPLATE.length).fill(true, 0, BOILERPLATE.length); - return removeDuplicateTSBoilerplate(source, seen); -} -// Taken from typescript compiler => emitFiles -const BOILERPLATE = [ - { start: /^var __extends/, end: /^}\)\(\);$/ }, - { start: /^var __assign/, end: /^};$/ }, - { start: /^var __decorate/, end: /^};$/ }, - { start: /^var __metadata/, end: /^};$/ }, - { start: /^var __param/, end: /^};$/ }, - { start: /^var __awaiter/, end: /^};$/ }, - { start: /^var __generator/, end: /^};$/ }, - { start: /^var __createBinding/, end: /^}\)\);$/ }, - { start: /^var __setModuleDefault/, end: /^}\);$/ }, - { start: /^var __importStar/, end: /^};$/ }, - { start: /^var __addDisposableResource/, end: /^};$/ }, - { start: /^var __disposeResources/, end: /^}\);$/ }, -]; -function removeDuplicateTSBoilerplate(source, SEEN_BOILERPLATE = []) { - const lines = source.split(/\r\n|\n|\r/); - const newLines = []; - let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE; - for (let i = 0; i < lines.length; i++) { - const line = lines[i]; - if (IS_REMOVING_BOILERPLATE) { - newLines.push(''); - if (END_BOILERPLATE.test(line)) { - IS_REMOVING_BOILERPLATE = false; - } - } - else { - for (let j = 0; j < BOILERPLATE.length; j++) { - const boilerplate = BOILERPLATE[j]; - if (boilerplate.start.test(line)) { - if (SEEN_BOILERPLATE[j]) { - IS_REMOVING_BOILERPLATE = true; - END_BOILERPLATE = boilerplate.end; - } - else { - SEEN_BOILERPLATE[j] = true; - } - } - } - if (IS_REMOVING_BOILERPLATE) { - newLines.push(''); - } - else { - newLines.push(line); - } - } - } - return newLines.join('\n'); -} -//# sourceMappingURL=bundle.js.map \ No newline at end of file diff --git a/build/lib/compilation.js b/build/lib/compilation.js deleted file mode 100644 index 5d4fd4a90b2..00000000000 --- a/build/lib/compilation.js +++ /dev/null @@ -1,340 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.watchApiProposalNamesTask = exports.compileApiProposalNamesTask = void 0; -exports.createCompile = createCompile; -exports.transpileTask = transpileTask; -exports.compileTask = compileTask; -exports.watchTask = watchTask; -const event_stream_1 = __importDefault(require("event-stream")); -const fs_1 = __importDefault(require("fs")); -const gulp_1 = __importDefault(require("gulp")); -const path_1 = __importDefault(require("path")); -const monacodts = __importStar(require("./monaco-api")); -const nls = __importStar(require("./nls")); -const reporter_1 = require("./reporter"); -const util = __importStar(require("./util")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const os_1 = __importDefault(require("os")); -const vinyl_1 = __importDefault(require("vinyl")); -const task = __importStar(require("./task")); -const index_1 = require("./mangle/index"); -const typescript_1 = __importDefault(require("typescript")); -const watch_1 = __importDefault(require("./watch")); -const gulp_bom_1 = __importDefault(require("gulp-bom")); -// --- gulp-tsb: compile and transpile -------------------------------- -const reporter = (0, reporter_1.createReporter)(); -function getTypeScriptCompilerOptions(src) { - const rootDir = path_1.default.join(__dirname, `../../${src}`); - const options = {}; - options.verbose = false; - options.sourceMap = true; - if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry - options.sourceMap = false; - } - options.rootDir = rootDir; - options.baseUrl = rootDir; - options.sourceRoot = util.toFileUri(rootDir); - options.newLine = /\r\n/.test(fs_1.default.readFileSync(__filename, 'utf8')) ? 0 : 1; - return options; -} -function createCompile(src, { build, emitError, transpileOnly, preserveEnglish }) { - const tsb = require('./tsb'); - const sourcemaps = require('gulp-sourcemaps'); - const projectPath = path_1.default.join(__dirname, '../../', src, 'tsconfig.json'); - const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) }; - if (!build) { - overrideOptions.inlineSourceMap = true; - } - const compilation = tsb.create(projectPath, overrideOptions, { - verbose: false, - transpileOnly: Boolean(transpileOnly), - transpileWithEsbuild: typeof transpileOnly !== 'boolean' && transpileOnly.esbuild - }, err => reporter(err)); - function pipeline(token) { - const tsFilter = util.filter(data => /\.ts$/.test(data.path)); - const isUtf8Test = (f) => /(\/|\\)test(\/|\\).*utf8/.test(f.path); - const isRuntimeJs = (f) => f.path.endsWith('.js') && !f.path.includes('fixtures'); - const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path))); - const input = event_stream_1.default.through(); - const output = input - .pipe(util.$if(isUtf8Test, (0, gulp_bom_1.default)())) // this is required to preserve BOM in test files that loose it otherwise - .pipe(util.$if(!build && isRuntimeJs, util.appendOwnPathSourceURL())) - .pipe(tsFilter) - .pipe(util.loadSourcemaps()) - .pipe(compilation(token)) - .pipe(noDeclarationsFilter) - .pipe(util.$if(build, nls.nls({ preserveEnglish }))) - .pipe(noDeclarationsFilter.restore) - .pipe(util.$if(!transpileOnly, sourcemaps.write('.', { - addComment: false, - includeContent: !!build, - sourceRoot: overrideOptions.sourceRoot - }))) - .pipe(tsFilter.restore) - .pipe(reporter.end(!!emitError)); - return event_stream_1.default.duplex(input, output); - } - pipeline.tsProjectSrc = () => { - return compilation.src({ base: src }); - }; - pipeline.projectPath = projectPath; - return pipeline; -} -function transpileTask(src, out, esbuild) { - const task = () => { - const transpile = createCompile(src, { build: false, emitError: true, transpileOnly: { esbuild: !!esbuild }, preserveEnglish: false }); - const srcPipe = gulp_1.default.src(`${src}/**`, { base: `${src}` }); - return srcPipe - .pipe(transpile()) - .pipe(gulp_1.default.dest(out)); - }; - task.taskName = `transpile-${path_1.default.basename(src)}`; - return task; -} -function compileTask(src, out, build, options = {}) { - const task = () => { - if (os_1.default.totalmem() < 4_000_000_000) { - throw new Error('compilation requires 4GB of RAM'); - } - const compile = createCompile(src, { build, emitError: true, transpileOnly: false, preserveEnglish: !!options.preserveEnglish }); - const srcPipe = gulp_1.default.src(`${src}/**`, { base: `${src}` }); - const generator = new MonacoGenerator(false); - if (src === 'src') { - generator.execute(); - } - // mangle: TypeScript to TypeScript - let mangleStream = event_stream_1.default.through(); - if (build && !options.disableMangle) { - let ts2tsMangler = new index_1.Mangler(compile.projectPath, (...data) => (0, fancy_log_1.default)(ansi_colors_1.default.blue('[mangler]'), ...data), { mangleExports: true, manglePrivateFields: true }); - const newContentsByFileName = ts2tsMangler.computeNewFileContents(new Set(['saveState'])); - mangleStream = event_stream_1.default.through(async function write(data) { - const tsNormalPath = typescript_1.default.normalizePath(data.path); - const newContents = (await newContentsByFileName).get(tsNormalPath); - if (newContents !== undefined) { - data.contents = Buffer.from(newContents.out); - data.sourceMap = newContents.sourceMap && JSON.parse(newContents.sourceMap); - } - this.push(data); - }, async function end() { - // free resources - (await newContentsByFileName).clear(); - this.push(null); - ts2tsMangler = undefined; - }); - } - return srcPipe - .pipe(mangleStream) - .pipe(generator.stream) - .pipe(compile()) - .pipe(gulp_1.default.dest(out)); - }; - task.taskName = `compile-${path_1.default.basename(src)}`; - return task; -} -function watchTask(out, build, srcPath = 'src') { - const task = () => { - const compile = createCompile(srcPath, { build, emitError: false, transpileOnly: false, preserveEnglish: false }); - const src = gulp_1.default.src(`${srcPath}/**`, { base: srcPath }); - const watchSrc = (0, watch_1.default)(`${srcPath}/**`, { base: srcPath, readDelay: 200 }); - const generator = new MonacoGenerator(true); - generator.execute(); - return watchSrc - .pipe(generator.stream) - .pipe(util.incremental(compile, src, true)) - .pipe(gulp_1.default.dest(out)); - }; - task.taskName = `watch-${path_1.default.basename(out)}`; - return task; -} -const REPO_SRC_FOLDER = path_1.default.join(__dirname, '../../src'); -class MonacoGenerator { - _isWatch; - stream; - _watchedFiles; - _fsProvider; - _declarationResolver; - constructor(isWatch) { - this._isWatch = isWatch; - this.stream = event_stream_1.default.through(); - this._watchedFiles = {}; - const onWillReadFile = (moduleId, filePath) => { - if (!this._isWatch) { - return; - } - if (this._watchedFiles[filePath]) { - return; - } - this._watchedFiles[filePath] = true; - fs_1.default.watchFile(filePath, () => { - this._declarationResolver.invalidateCache(moduleId); - this._executeSoon(); - }); - }; - this._fsProvider = new class extends monacodts.FSProvider { - readFileSync(moduleId, filePath) { - onWillReadFile(moduleId, filePath); - return super.readFileSync(moduleId, filePath); - } - }; - this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider); - if (this._isWatch) { - fs_1.default.watchFile(monacodts.RECIPE_PATH, () => { - this._executeSoon(); - }); - } - } - _executeSoonTimer = null; - _executeSoon() { - if (this._executeSoonTimer !== null) { - clearTimeout(this._executeSoonTimer); - this._executeSoonTimer = null; - } - this._executeSoonTimer = setTimeout(() => { - this._executeSoonTimer = null; - this.execute(); - }, 20); - } - _run() { - const r = monacodts.run3(this._declarationResolver); - if (!r && !this._isWatch) { - // The build must always be able to generate the monaco.d.ts - throw new Error(`monaco.d.ts generation error - Cannot continue`); - } - return r; - } - _log(message, ...rest) { - (0, fancy_log_1.default)(ansi_colors_1.default.cyan('[monaco.d.ts]'), message, ...rest); - } - execute() { - const startTime = Date.now(); - const result = this._run(); - if (!result) { - // nothing really changed - return; - } - if (result.isTheSame) { - return; - } - fs_1.default.writeFileSync(result.filePath, result.content); - fs_1.default.writeFileSync(path_1.default.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); - this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`); - if (!this._isWatch) { - this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.'); - } - } -} -function generateApiProposalNames() { - let eol; - try { - const src = fs_1.default.readFileSync('src/vs/platform/extensions/common/extensionsApiProposals.ts', 'utf-8'); - const match = /\r?\n/m.exec(src); - eol = match ? match[0] : os_1.default.EOL; - } - catch { - eol = os_1.default.EOL; - } - const pattern = /vscode\.proposed\.([a-zA-Z\d]+)\.d\.ts$/; - const versionPattern = /^\s*\/\/\s*version\s*:\s*(\d+)\s*$/mi; - const proposals = new Map(); - const input = event_stream_1.default.through(); - const output = input - .pipe(util.filter((f) => pattern.test(f.path))) - .pipe(event_stream_1.default.through((f) => { - const name = path_1.default.basename(f.path); - const match = pattern.exec(name); - if (!match) { - return; - } - const proposalName = match[1]; - const contents = f.contents.toString('utf8'); - const versionMatch = versionPattern.exec(contents); - const version = versionMatch ? versionMatch[1] : undefined; - proposals.set(proposalName, { - proposal: `https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.${proposalName}.d.ts`, - version: version ? parseInt(version) : undefined - }); - }, function () { - const names = [...proposals.keys()].sort(); - const contents = [ - '/*---------------------------------------------------------------------------------------------', - ' * Copyright (c) Microsoft Corporation. All rights reserved.', - ' * Licensed under the MIT License. See License.txt in the project root for license information.', - ' *--------------------------------------------------------------------------------------------*/', - '', - '// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY.', - '', - 'const _allApiProposals = {', - `${names.map(proposalName => { - const proposal = proposals.get(proposalName); - return `\t${proposalName}: {${eol}\t\tproposal: '${proposal.proposal}',${eol}${proposal.version ? `\t\tversion: ${proposal.version}${eol}` : ''}\t}`; - }).join(`,${eol}`)}`, - '};', - 'export const allApiProposals = Object.freeze<{ [proposalName: string]: Readonly<{ proposal: string; version?: number }> }>(_allApiProposals);', - 'export type ApiProposalName = keyof typeof _allApiProposals;', - '', - ].join(eol); - this.emit('data', new vinyl_1.default({ - path: 'vs/platform/extensions/common/extensionsApiProposals.ts', - contents: Buffer.from(contents) - })); - this.emit('end'); - })); - return event_stream_1.default.duplex(input, output); -} -const apiProposalNamesReporter = (0, reporter_1.createReporter)('api-proposal-names'); -exports.compileApiProposalNamesTask = task.define('compile-api-proposal-names', () => { - return gulp_1.default.src('src/vscode-dts/**') - .pipe(generateApiProposalNames()) - .pipe(gulp_1.default.dest('src')) - .pipe(apiProposalNamesReporter.end(true)); -}); -exports.watchApiProposalNamesTask = task.define('watch-api-proposal-names', () => { - const task = () => gulp_1.default.src('src/vscode-dts/**') - .pipe(generateApiProposalNames()) - .pipe(apiProposalNamesReporter.end(true)); - return (0, watch_1.default)('src/vscode-dts/**', { readDelay: 200 }) - .pipe(util.debounce(task)) - .pipe(gulp_1.default.dest('src')); -}); -//# sourceMappingURL=compilation.js.map \ No newline at end of file diff --git a/build/lib/compilation.ts b/build/lib/compilation.ts index 53e37d82aa4..89f4b6a89d2 100644 --- a/build/lib/compilation.ts +++ b/build/lib/compilation.ts @@ -7,20 +7,22 @@ import es from 'event-stream'; import fs from 'fs'; import gulp from 'gulp'; import path from 'path'; -import * as monacodts from './monaco-api'; -import * as nls from './nls'; -import { createReporter } from './reporter'; -import * as util from './util'; +import * as monacodts from './monaco-api.ts'; +import * as nls from './nls.ts'; +import { createReporter } from './reporter.ts'; +import * as util from './util.ts'; import fancyLog from 'fancy-log'; import ansiColors from 'ansi-colors'; import os from 'os'; import File from 'vinyl'; -import * as task from './task'; -import { Mangler } from './mangle/index'; -import { RawSourceMap } from 'source-map'; +import * as task from './task.ts'; +import { Mangler } from './mangle/index.ts'; +import type { RawSourceMap } from 'source-map'; import ts from 'typescript'; -import watch from './watch'; +import watch from './watch/index.ts'; import bom from 'gulp-bom'; +import * as tsb from './tsb/index.ts'; +import sourcemaps from 'gulp-sourcemaps'; // --- gulp-tsb: compile and transpile -------------------------------- @@ -28,7 +30,7 @@ import bom from 'gulp-bom'; const reporter = createReporter(); function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions { - const rootDir = path.join(__dirname, `../../${src}`); + const rootDir = path.join(import.meta.dirname, `../../${src}`); const options: ts.CompilerOptions = {}; options.verbose = false; options.sourceMap = true; @@ -38,7 +40,7 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions { options.rootDir = rootDir; options.baseUrl = rootDir; options.sourceRoot = util.toFileUri(rootDir); - options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1; + options.newLine = /\r\n/.test(fs.readFileSync(import.meta.filename, 'utf8')) ? 0 : 1; return options; } @@ -50,11 +52,7 @@ interface ICompileTaskOptions { } export function createCompile(src: string, { build, emitError, transpileOnly, preserveEnglish }: ICompileTaskOptions) { - const tsb = require('./tsb') as typeof import('./tsb'); - const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps'); - - - const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json'); + const projectPath = path.join(import.meta.dirname, '../../', src, 'tsconfig.json'); const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) }; if (!build) { overrideOptions.inlineSourceMap = true; @@ -138,7 +136,7 @@ export function compileTask(src: string, out: string, build: boolean, options: { const newContentsByFileName = ts2tsMangler.computeNewFileContents(new Set(['saveState'])); mangleStream = es.through(async function write(data: File & { sourceMap?: RawSourceMap }) { type TypeScriptExt = typeof ts & { normalizePath(path: string): string }; - const tsNormalPath = (ts).normalizePath(data.path); + const tsNormalPath = (ts as TypeScriptExt).normalizePath(data.path); const newContents = (await newContentsByFileName).get(tsNormalPath); if (newContents !== undefined) { data.contents = Buffer.from(newContents.out); @@ -185,7 +183,7 @@ export function watchTask(out: string, build: boolean, srcPath: string = 'src'): return task; } -const REPO_SRC_FOLDER = path.join(__dirname, '../../src'); +const REPO_SRC_FOLDER = path.join(import.meta.dirname, '../../src'); class MonacoGenerator { private readonly _isWatch: boolean; diff --git a/build/lib/date.js b/build/lib/date.js deleted file mode 100644 index 1ed884fb7ee..00000000000 --- a/build/lib/date.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.writeISODate = writeISODate; -exports.readISODate = readISODate; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const root = path_1.default.join(__dirname, '..', '..'); -/** - * Writes a `outDir/date` file with the contents of the build - * so that other tasks during the build process can use it and - * all use the same date. - */ -function writeISODate(outDir) { - const result = () => new Promise((resolve, _) => { - const outDirectory = path_1.default.join(root, outDir); - fs_1.default.mkdirSync(outDirectory, { recursive: true }); - const date = new Date().toISOString(); - fs_1.default.writeFileSync(path_1.default.join(outDirectory, 'date'), date, 'utf8'); - resolve(); - }); - result.taskName = 'build-date-file'; - return result; -} -function readISODate(outDir) { - const outDirectory = path_1.default.join(root, outDir); - return fs_1.default.readFileSync(path_1.default.join(outDirectory, 'date'), 'utf8'); -} -//# sourceMappingURL=date.js.map \ No newline at end of file diff --git a/build/lib/date.ts b/build/lib/date.ts index 8a933178952..9c20c9eeb22 100644 --- a/build/lib/date.ts +++ b/build/lib/date.ts @@ -6,7 +6,7 @@ import path from 'path'; import fs from 'fs'; -const root = path.join(__dirname, '..', '..'); +const root = path.join(import.meta.dirname, '..', '..'); /** * Writes a `outDir/date` file with the contents of the build diff --git a/build/lib/dependencies.js b/build/lib/dependencies.js deleted file mode 100644 index 04a09f98708..00000000000 --- a/build/lib/dependencies.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getProductionDependencies = getProductionDependencies; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const child_process_1 = __importDefault(require("child_process")); -const root = fs_1.default.realpathSync(path_1.default.dirname(path_1.default.dirname(__dirname))); -function getNpmProductionDependencies(folder) { - let raw; - try { - raw = child_process_1.default.execSync('npm ls --all --omit=dev --parseable', { cwd: folder, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, null] }); - } - catch (err) { - const regex = /^npm ERR! .*$/gm; - let match; - while (match = regex.exec(err.message)) { - if (/ELSPROBLEMS/.test(match[0])) { - continue; - } - else if (/invalid: xterm/.test(match[0])) { - continue; - } - else if (/A complete log of this run/.test(match[0])) { - continue; - } - else { - throw err; - } - } - raw = err.stdout; - } - return raw.split(/\r?\n/).filter(line => { - return !!line.trim() && path_1.default.relative(root, line) !== path_1.default.relative(root, folder); - }); -} -function getProductionDependencies(folderPath) { - const result = getNpmProductionDependencies(folderPath); - // Account for distro npm dependencies - const realFolderPath = fs_1.default.realpathSync(folderPath); - const relativeFolderPath = path_1.default.relative(root, realFolderPath); - const distroFolderPath = `${root}/.build/distro/npm/${relativeFolderPath}`; - if (fs_1.default.existsSync(distroFolderPath)) { - result.push(...getNpmProductionDependencies(distroFolderPath)); - } - return [...new Set(result)]; -} -if (require.main === module) { - console.log(JSON.stringify(getProductionDependencies(root), null, ' ')); -} -//# sourceMappingURL=dependencies.js.map \ No newline at end of file diff --git a/build/lib/dependencies.ts b/build/lib/dependencies.ts index a5bc70088a7..ed7cbfbef02 100644 --- a/build/lib/dependencies.ts +++ b/build/lib/dependencies.ts @@ -6,7 +6,7 @@ import fs from 'fs'; import path from 'path'; import cp from 'child_process'; -const root = fs.realpathSync(path.dirname(path.dirname(__dirname))); +const root = fs.realpathSync(path.dirname(path.dirname(import.meta.dirname))); function getNpmProductionDependencies(folder: string): string[] { let raw: string; @@ -51,6 +51,6 @@ export function getProductionDependencies(folderPath: string): string[] { return [...new Set(result)]; } -if (require.main === module) { +if (import.meta.main) { console.log(JSON.stringify(getProductionDependencies(root), null, ' ')); } diff --git a/build/lib/electron.js b/build/lib/electron.js deleted file mode 100644 index 79f6d515636..00000000000 --- a/build/lib/electron.js +++ /dev/null @@ -1,258 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.config = void 0; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const vinyl_fs_1 = __importDefault(require("vinyl-fs")); -const gulp_filter_1 = __importDefault(require("gulp-filter")); -const util = __importStar(require("./util")); -const getVersion_1 = require("./getVersion"); -function isDocumentSuffix(str) { - return str === 'document' || str === 'script' || str === 'file' || str === 'source code'; -} -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8')); -const commit = (0, getVersion_1.getVersion)(root); -function createTemplate(input) { - return (params) => { - return input.replace(/<%=\s*([^\s]+)\s*%>/g, (match, key) => { - return params[key] || match; - }); - }; -} -const darwinCreditsTemplate = product.darwinCredits && createTemplate(fs_1.default.readFileSync(path_1.default.join(root, product.darwinCredits), 'utf8')); -/** - * Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name. - * @param extensions A list of file extensions, such as `['bat', 'cmd']` - * @param icon A sentence-cased file type name that matches the lowercase name of a darwin icon resource. - * For example, `'HTML'` instead of `'html'`, or `'Java'` instead of `'java'`. - * This parameter is lowercased before it is used to reference an icon file. - * @param nameOrSuffix An optional suffix or a string to use as the file type. If a suffix is provided, - * it is used with the icon parameter to generate a file type string. If nothing is provided, - * `'document'` is used with the icon parameter to generate file type string. - * - * For example, if you call `darwinBundleDocumentType(..., 'HTML')`, the resulting file type is `"HTML document"`, - * and the `'html'` darwin icon is used. - * - * If you call `darwinBundleDocumentType(..., 'Javascript', 'file')`, the resulting file type is `"Javascript file"`. - * and the `'javascript'` darwin icon is used. - * - * If you call `darwinBundleDocumentType(..., 'bat', 'Windows command script')`, the file type is `"Windows command script"`, - * and the `'bat'` darwin icon is used. - */ -function darwinBundleDocumentType(extensions, icon, nameOrSuffix, utis) { - // If given a suffix, generate a name from it. If not given anything, default to 'document' - if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) { - nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix ?? 'document'); - } - return { - name: nameOrSuffix, - role: 'Editor', - ostypes: ['TEXT', 'utxt', 'TUTX', '****'], - extensions, - iconFile: 'resources/darwin/' + icon.toLowerCase() + '.icns', - utis - }; -} -/** - * Generate several `DarwinDocumentType`s with unique names and a shared icon. - * @param types A map of file type names to their associated file extensions. - * @param icon A darwin icon resource to use. For example, `'HTML'` would refer to `resources/darwin/html.icns` - * - * Examples: - * ``` - * darwinBundleDocumentTypes({ 'C header file': 'h', 'C source code': 'c' },'c') - * darwinBundleDocumentTypes({ 'React source code': ['jsx', 'tsx'] }, 'react') - * ``` - */ -function darwinBundleDocumentTypes(types, icon) { - return Object.keys(types).map((name) => { - const extensions = types[name]; - return { - name, - role: 'Editor', - ostypes: ['TEXT', 'utxt', 'TUTX', '****'], - extensions: Array.isArray(extensions) ? extensions : [extensions], - iconFile: 'resources/darwin/' + icon + '.icns' - }; - }); -} -const { electronVersion, msBuildId } = util.getElectronVersion(); -exports.config = { - version: electronVersion, - tag: product.electronRepository ? `v${electronVersion}-${msBuildId}` : undefined, - productAppName: product.nameLong, - companyName: 'Microsoft Corporation', - copyright: 'Copyright (C) 2025 Microsoft. All rights reserved', - darwinIcon: 'resources/darwin/code.icns', - darwinBundleIdentifier: product.darwinBundleIdentifier, - darwinApplicationCategoryType: 'public.app-category.developer-tools', - darwinHelpBookFolder: 'VS Code HelpBook', - darwinHelpBookName: 'VS Code HelpBook', - darwinBundleDocumentTypes: [ - ...darwinBundleDocumentTypes({ 'C header file': 'h', 'C source code': 'c' }, 'c'), - ...darwinBundleDocumentTypes({ 'Git configuration file': ['gitattributes', 'gitconfig', 'gitignore'] }, 'config'), - ...darwinBundleDocumentTypes({ 'HTML template document': ['asp', 'aspx', 'cshtml', 'jshtm', 'jsp', 'phtml', 'shtml'] }, 'html'), - darwinBundleDocumentType(['bat', 'cmd'], 'bat', 'Windows command script'), - darwinBundleDocumentType(['bowerrc'], 'Bower'), - darwinBundleDocumentType(['config', 'editorconfig', 'ini', 'cfg'], 'config', 'Configuration file'), - darwinBundleDocumentType(['hh', 'hpp', 'hxx', 'h++'], 'cpp', 'C++ header file'), - darwinBundleDocumentType(['cc', 'cpp', 'cxx', 'c++'], 'cpp', 'C++ source code'), - darwinBundleDocumentType(['m'], 'default', 'Objective-C source code'), - darwinBundleDocumentType(['mm'], 'cpp', 'Objective-C++ source code'), - darwinBundleDocumentType(['cs', 'csx'], 'csharp', 'C# source code'), - darwinBundleDocumentType(['css'], 'css', 'CSS'), - darwinBundleDocumentType(['go'], 'go', 'Go source code'), - darwinBundleDocumentType(['htm', 'html', 'xhtml'], 'HTML'), - darwinBundleDocumentType(['jade'], 'Jade'), - darwinBundleDocumentType(['jav', 'java'], 'Java'), - darwinBundleDocumentType(['js', 'jscsrc', 'jshintrc', 'mjs', 'cjs'], 'Javascript', 'file'), - darwinBundleDocumentType(['json'], 'JSON'), - darwinBundleDocumentType(['less'], 'Less'), - darwinBundleDocumentType(['markdown', 'md', 'mdoc', 'mdown', 'mdtext', 'mdtxt', 'mdwn', 'mkd', 'mkdn'], 'Markdown'), - darwinBundleDocumentType(['php'], 'PHP', 'source code'), - darwinBundleDocumentType(['ps1', 'psd1', 'psm1'], 'Powershell', 'script'), - darwinBundleDocumentType(['py', 'pyi'], 'Python', 'script'), - darwinBundleDocumentType(['gemspec', 'rb', 'erb'], 'Ruby', 'source code'), - darwinBundleDocumentType(['scss', 'sass'], 'SASS', 'file'), - darwinBundleDocumentType(['sql'], 'SQL', 'script'), - darwinBundleDocumentType(['ts'], 'TypeScript', 'file'), - darwinBundleDocumentType(['tsx', 'jsx'], 'React', 'source code'), - darwinBundleDocumentType(['vue'], 'Vue', 'source code'), - darwinBundleDocumentType(['ascx', 'csproj', 'dtd', 'plist', 'wxi', 'wxl', 'wxs', 'xml', 'xaml'], 'XML'), - darwinBundleDocumentType(['eyaml', 'eyml', 'yaml', 'yml'], 'YAML'), - darwinBundleDocumentType([ - 'bash', 'bash_login', 'bash_logout', 'bash_profile', 'bashrc', - 'profile', 'rhistory', 'rprofile', 'sh', 'zlogin', 'zlogout', - 'zprofile', 'zsh', 'zshenv', 'zshrc' - ], 'Shell', 'script'), - // Default icon with specified names - ...darwinBundleDocumentTypes({ - 'Clojure source code': ['clj', 'cljs', 'cljx', 'clojure'], - 'VS Code workspace file': 'code-workspace', - 'CoffeeScript source code': 'coffee', - 'Comma Separated Values': 'csv', - 'CMake script': 'cmake', - 'Dart script': 'dart', - 'Diff file': 'diff', - 'Dockerfile': 'dockerfile', - 'Gradle file': 'gradle', - 'Groovy script': 'groovy', - 'Makefile': ['makefile', 'mk'], - 'Lua script': 'lua', - 'Pug document': 'pug', - 'Jupyter': 'ipynb', - 'Lockfile': 'lock', - 'Log file': 'log', - 'Plain Text File': 'txt', - 'Xcode project file': 'xcodeproj', - 'Xcode workspace file': 'xcworkspace', - 'Visual Basic script': 'vb', - 'R source code': 'r', - 'Rust source code': 'rs', - 'Restructured Text document': 'rst', - 'LaTeX document': ['tex', 'cls'], - 'F# source code': 'fs', - 'F# signature file': 'fsi', - 'F# script': ['fsx', 'fsscript'], - 'SVG document': ['svg'], - 'TOML document': 'toml', - 'Swift source code': 'swift', - }, 'default'), - // Default icon with default name - darwinBundleDocumentType([ - 'containerfile', 'ctp', 'dot', 'edn', 'handlebars', 'hbs', 'ml', 'mli', - 'pl', 'pl6', 'pm', 'pm6', 'pod', 'pp', 'properties', 'psgi', 'rt', 't' - ], 'default', product.nameLong + ' document'), - // Folder support () - darwinBundleDocumentType([], 'default', 'Folder', ['public.folder']) - ], - darwinBundleURLTypes: [{ - role: 'Viewer', - name: product.nameLong, - urlSchemes: [product.urlProtocol] - }], - darwinForceDarkModeSupport: true, - darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined, - linuxExecutableName: product.applicationName, - winIcon: 'resources/win32/code.ico', - token: process.env['GITHUB_TOKEN'], - repo: product.electronRepository || undefined, - validateChecksum: true, - checksumFile: path_1.default.join(root, 'build', 'checksums', 'electron.txt'), -}; -function getElectron(arch) { - return () => { - const electron = require('@vscode/gulp-electron'); - const json = require('gulp-json-editor'); - const electronOpts = { - ...exports.config, - platform: process.platform, - arch: arch === 'armhf' ? 'arm' : arch, - ffmpegChromium: false, - keepDefaultApp: true - }; - return vinyl_fs_1.default.src('package.json') - .pipe(json({ name: product.nameShort })) - .pipe(electron(electronOpts)) - .pipe((0, gulp_filter_1.default)(['**', '!**/app/package.json'])) - .pipe(vinyl_fs_1.default.dest('.build/electron')); - }; -} -async function main(arch = process.arch) { - const version = electronVersion; - const electronPath = path_1.default.join(root, '.build', 'electron'); - const versionFile = path_1.default.join(electronPath, 'version'); - const isUpToDate = fs_1.default.existsSync(versionFile) && fs_1.default.readFileSync(versionFile, 'utf8') === `${version}`; - if (!isUpToDate) { - await util.rimraf(electronPath)(); - await util.streamToPromise(getElectron(arch)()); - } -} -if (require.main === module) { - main(process.argv[2]).catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=electron.js.map \ No newline at end of file diff --git a/build/lib/electron.ts b/build/lib/electron.ts index 08ba68e1b89..8cc36de49ea 100644 --- a/build/lib/electron.ts +++ b/build/lib/electron.ts @@ -7,8 +7,10 @@ import fs from 'fs'; import path from 'path'; import vfs from 'vinyl-fs'; import filter from 'gulp-filter'; -import * as util from './util'; -import { getVersion } from './getVersion'; +import * as util from './util.ts'; +import { getVersion } from './getVersion.ts'; +import electron from '@vscode/gulp-electron'; +import json from 'gulp-json-editor'; type DarwinDocumentSuffix = 'document' | 'script' | 'file' | 'source code'; type DarwinDocumentType = { @@ -24,7 +26,7 @@ function isDocumentSuffix(str?: string): str is DarwinDocumentSuffix { return str === 'document' || str === 'script' || str === 'file' || str === 'source code'; } -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); const commit = getVersion(root); @@ -205,9 +207,6 @@ export const config = { function getElectron(arch: string): () => NodeJS.ReadWriteStream { return () => { - const electron = require('@vscode/gulp-electron'); - const json = require('gulp-json-editor') as typeof import('gulp-json-editor'); - const electronOpts = { ...config, platform: process.platform, @@ -236,7 +235,7 @@ async function main(arch: string = process.arch): Promise { } } -if (require.main === module) { +if (import.meta.main) { main(process.argv[2]).catch(err => { console.error(err); process.exit(1); diff --git a/build/lib/extensions.js b/build/lib/extensions.js deleted file mode 100644 index e3736888924..00000000000 --- a/build/lib/extensions.js +++ /dev/null @@ -1,621 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fromMarketplace = fromMarketplace; -exports.fromVsix = fromVsix; -exports.fromGithub = fromGithub; -exports.packageNonNativeLocalExtensionsStream = packageNonNativeLocalExtensionsStream; -exports.packageNativeLocalExtensionsStream = packageNativeLocalExtensionsStream; -exports.packageAllLocalExtensionsStream = packageAllLocalExtensionsStream; -exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream; -exports.scanBuiltinExtensions = scanBuiltinExtensions; -exports.translatePackageJSON = translatePackageJSON; -exports.webpackExtensions = webpackExtensions; -exports.buildExtensionMedia = buildExtensionMedia; -const event_stream_1 = __importDefault(require("event-stream")); -const fs_1 = __importDefault(require("fs")); -const child_process_1 = __importDefault(require("child_process")); -const glob_1 = __importDefault(require("glob")); -const gulp_1 = __importDefault(require("gulp")); -const path_1 = __importDefault(require("path")); -const crypto_1 = __importDefault(require("crypto")); -const vinyl_1 = __importDefault(require("vinyl")); -const stats_1 = require("./stats"); -const util2 = __importStar(require("./util")); -const gulp_filter_1 = __importDefault(require("gulp-filter")); -const gulp_rename_1 = __importDefault(require("gulp-rename")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const gulp_buffer_1 = __importDefault(require("gulp-buffer")); -const jsoncParser = __importStar(require("jsonc-parser")); -const dependencies_1 = require("./dependencies"); -const builtInExtensions_1 = require("./builtInExtensions"); -const getVersion_1 = require("./getVersion"); -const fetch_1 = require("./fetch"); -const vzip = require('gulp-vinyl-zip'); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const commit = (0, getVersion_1.getVersion)(root); -const sourceMappingURLBase = `https://main.vscode-cdn.net/sourcemaps/${commit}`; -function minifyExtensionResources(input) { - const jsonFilter = (0, gulp_filter_1.default)(['**/*.json', '**/*.code-snippets'], { restore: true }); - return input - .pipe(jsonFilter) - .pipe((0, gulp_buffer_1.default)()) - .pipe(event_stream_1.default.mapSync((f) => { - const errors = []; - const value = jsoncParser.parse(f.contents.toString('utf8'), errors, { allowTrailingComma: true }); - if (errors.length === 0) { - // file parsed OK => just stringify to drop whitespace and comments - f.contents = Buffer.from(JSON.stringify(value)); - } - return f; - })) - .pipe(jsonFilter.restore); -} -function updateExtensionPackageJSON(input, update) { - const packageJsonFilter = (0, gulp_filter_1.default)('extensions/*/package.json', { restore: true }); - return input - .pipe(packageJsonFilter) - .pipe((0, gulp_buffer_1.default)()) - .pipe(event_stream_1.default.mapSync((f) => { - const data = JSON.parse(f.contents.toString('utf8')); - f.contents = Buffer.from(JSON.stringify(update(data))); - return f; - })) - .pipe(packageJsonFilter.restore); -} -function fromLocal(extensionPath, forWeb, disableMangle) { - const webpackConfigFileName = forWeb - ? `extension-browser.webpack.config.js` - : `extension.webpack.config.js`; - const isWebPacked = fs_1.default.existsSync(path_1.default.join(extensionPath, webpackConfigFileName)); - let input = isWebPacked - ? fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) - : fromLocalNormal(extensionPath); - if (isWebPacked) { - input = updateExtensionPackageJSON(input, (data) => { - delete data.scripts; - delete data.dependencies; - delete data.devDependencies; - if (data.main) { - data.main = data.main.replace('/out/', '/dist/'); - } - return data; - }); - } - return input; -} -function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { - const vsce = require('@vscode/vsce'); - const webpack = require('webpack'); - const webpackGulp = require('webpack-stream'); - const result = event_stream_1.default.through(); - const packagedDependencies = []; - const packageJsonConfig = require(path_1.default.join(extensionPath, 'package.json')); - if (packageJsonConfig.dependencies) { - const webpackRootConfig = require(path_1.default.join(extensionPath, webpackConfigFileName)).default; - for (const key in webpackRootConfig.externals) { - if (key in packageJsonConfig.dependencies) { - packagedDependencies.push(key); - } - } - } - // TODO: add prune support based on packagedDependencies to vsce.PackageManager.Npm similar - // to vsce.PackageManager.Yarn. - // A static analysis showed there are no webpack externals that are dependencies of the current - // local extensions so we can use the vsce.PackageManager.None config to ignore dependencies list - // as a temporary workaround. - vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.None, packagedDependencies }).then(fileNames => { - const files = fileNames - .map(fileName => path_1.default.join(extensionPath, fileName)) - .map(filePath => new vinyl_1.default({ - path: filePath, - stat: fs_1.default.statSync(filePath), - base: extensionPath, - contents: fs_1.default.createReadStream(filePath) - })); - // check for a webpack configuration files, then invoke webpack - // and merge its output with the files stream. - const webpackConfigLocations = glob_1.default.sync(path_1.default.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] }); - const webpackStreams = webpackConfigLocations.flatMap(webpackConfigPath => { - const webpackDone = (err, stats) => { - (0, fancy_log_1.default)(`Bundled extension: ${ansi_colors_1.default.yellow(path_1.default.join(path_1.default.basename(extensionPath), path_1.default.relative(extensionPath, webpackConfigPath)))}...`); - if (err) { - result.emit('error', err); - } - const { compilation } = stats; - if (compilation.errors.length > 0) { - result.emit('error', compilation.errors.join('\n')); - } - if (compilation.warnings.length > 0) { - result.emit('error', compilation.warnings.join('\n')); - } - }; - const exportedConfig = require(webpackConfigPath).default; - return (Array.isArray(exportedConfig) ? exportedConfig : [exportedConfig]).map(config => { - const webpackConfig = { - ...config, - ...{ mode: 'production' } - }; - if (disableMangle) { - if (Array.isArray(config.module.rules)) { - for (const rule of config.module.rules) { - if (Array.isArray(rule.use)) { - for (const use of rule.use) { - if (String(use.loader).endsWith('mangle-loader.js')) { - use.options.disabled = true; - } - } - } - } - } - } - const relativeOutputPath = path_1.default.relative(extensionPath, webpackConfig.output.path); - return webpackGulp(webpackConfig, webpack, webpackDone) - .pipe(event_stream_1.default.through(function (data) { - data.stat = data.stat || {}; - data.base = extensionPath; - this.emit('data', data); - })) - .pipe(event_stream_1.default.through(function (data) { - // source map handling: - // * rewrite sourceMappingURL - // * save to disk so that upload-task picks this up - if (path_1.default.extname(data.basename) === '.js') { - const contents = data.contents.toString('utf8'); - data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) { - return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path_1.default.basename(extensionPath)}/${relativeOutputPath}/${g1}`; - }), 'utf8'); - } - this.emit('data', data); - })); - }); - }); - event_stream_1.default.merge(...webpackStreams, event_stream_1.default.readArray(files)) - // .pipe(es.through(function (data) { - // // debug - // console.log('out', data.path, data.contents.length); - // this.emit('data', data); - // })) - .pipe(result); - }).catch(err => { - console.error(extensionPath); - console.error(packagedDependencies); - result.emit('error', err); - }); - return result.pipe((0, stats_1.createStatsStream)(path_1.default.basename(extensionPath))); -} -function fromLocalNormal(extensionPath) { - const vsce = require('@vscode/vsce'); - const result = event_stream_1.default.through(); - vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Npm }) - .then(fileNames => { - const files = fileNames - .map(fileName => path_1.default.join(extensionPath, fileName)) - .map(filePath => new vinyl_1.default({ - path: filePath, - stat: fs_1.default.statSync(filePath), - base: extensionPath, - contents: fs_1.default.createReadStream(filePath) - })); - event_stream_1.default.readArray(files).pipe(result); - }) - .catch(err => result.emit('error', err)); - return result.pipe((0, stats_1.createStatsStream)(path_1.default.basename(extensionPath))); -} -const userAgent = 'VSCode Build'; -const baseHeaders = { - 'X-Market-Client-Id': 'VSCode Build', - 'User-Agent': userAgent, - 'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2', -}; -function fromMarketplace(serviceUrl, { name: extensionName, version, sha256, metadata }) { - const json = require('gulp-json-editor'); - const [publisher, name] = extensionName.split('.'); - const url = `${serviceUrl}/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`; - (0, fancy_log_1.default)('Downloading extension:', ansi_colors_1.default.yellow(`${extensionName}@${version}`), '...'); - const packageJsonFilter = (0, gulp_filter_1.default)('package.json', { restore: true }); - return (0, fetch_1.fetchUrls)('', { - base: url, - nodeFetchOptions: { - headers: baseHeaders - }, - checksumSha256: sha256 - }) - .pipe(vzip.src()) - .pipe((0, gulp_filter_1.default)('extension/**')) - .pipe((0, gulp_rename_1.default)(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) - .pipe(packageJsonFilter) - .pipe((0, gulp_buffer_1.default)()) - .pipe(json({ __metadata: metadata })) - .pipe(packageJsonFilter.restore); -} -function fromVsix(vsixPath, { name: extensionName, version, sha256, metadata }) { - const json = require('gulp-json-editor'); - (0, fancy_log_1.default)('Using local VSIX for extension:', ansi_colors_1.default.yellow(`${extensionName}@${version}`), '...'); - const packageJsonFilter = (0, gulp_filter_1.default)('package.json', { restore: true }); - return gulp_1.default.src(vsixPath) - .pipe((0, gulp_buffer_1.default)()) - .pipe(event_stream_1.default.mapSync((f) => { - const hash = crypto_1.default.createHash('sha256'); - hash.update(f.contents); - const checksum = hash.digest('hex'); - if (checksum !== sha256) { - throw new Error(`Checksum mismatch for ${vsixPath} (expected ${sha256}, actual ${checksum}))`); - } - return f; - })) - .pipe(vzip.src()) - .pipe((0, gulp_filter_1.default)('extension/**')) - .pipe((0, gulp_rename_1.default)(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) - .pipe(packageJsonFilter) - .pipe((0, gulp_buffer_1.default)()) - .pipe(json({ __metadata: metadata })) - .pipe(packageJsonFilter.restore); -} -function fromGithub({ name, version, repo, sha256, metadata }) { - const json = require('gulp-json-editor'); - (0, fancy_log_1.default)('Downloading extension from GH:', ansi_colors_1.default.yellow(`${name}@${version}`), '...'); - const packageJsonFilter = (0, gulp_filter_1.default)('package.json', { restore: true }); - return (0, fetch_1.fetchGithub)(new URL(repo).pathname, { - version, - name: name => name.endsWith('.vsix'), - checksumSha256: sha256 - }) - .pipe((0, gulp_buffer_1.default)()) - .pipe(vzip.src()) - .pipe((0, gulp_filter_1.default)('extension/**')) - .pipe((0, gulp_rename_1.default)(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) - .pipe(packageJsonFilter) - .pipe((0, gulp_buffer_1.default)()) - .pipe(json({ __metadata: metadata })) - .pipe(packageJsonFilter.restore); -} -/** - * All extensions that are known to have some native component and thus must be built on the - * platform that is being built. - */ -const nativeExtensions = [ - 'microsoft-authentication', -]; -const excludedExtensions = [ - 'vscode-api-tests', - 'vscode-colorize-tests', - 'vscode-colorize-perf-tests', - 'vscode-test-resolver', - 'ms-vscode.node-debug', - 'ms-vscode.node-debug2', -]; -const marketplaceWebExtensionsExclude = new Set([ - 'ms-vscode.node-debug', - 'ms-vscode.node-debug2', - 'ms-vscode.js-debug-companion', - 'ms-vscode.js-debug', - 'ms-vscode.vscode-js-profile-table' -]); -const productJson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../product.json'), 'utf8')); -const builtInExtensions = productJson.builtInExtensions || []; -const webBuiltInExtensions = productJson.webBuiltInExtensions || []; -/** - * Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionManifestPropertiesService.ts` - */ -function isWebExtension(manifest) { - if (Boolean(manifest.browser)) { - return true; - } - if (Boolean(manifest.main)) { - return false; - } - // neither browser nor main - if (typeof manifest.extensionKind !== 'undefined') { - const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind]; - if (extensionKind.indexOf('web') >= 0) { - return true; - } - } - if (typeof manifest.contributes !== 'undefined') { - for (const id of ['debuggers', 'terminal', 'typescriptServerPlugins']) { - if (manifest.contributes.hasOwnProperty(id)) { - return false; - } - } - } - return true; -} -/** - * Package local extensions that are known to not have native dependencies. Mutually exclusive to {@link packageNativeLocalExtensionsStream}. - * @param forWeb build the extensions that have web targets - * @param disableMangle disable the mangler - * @returns a stream - */ -function packageNonNativeLocalExtensionsStream(forWeb, disableMangle) { - return doPackageLocalExtensionsStream(forWeb, disableMangle, false); -} -/** - * Package local extensions that are known to have native dependencies. Mutually exclusive to {@link packageNonNativeLocalExtensionsStream}. - * @note it's possible that the extension does not have native dependencies for the current platform, especially if building for the web, - * but we simplify the logic here by having a flat list of extensions (See {@link nativeExtensions}) that are known to have native - * dependencies on some platform and thus should be packaged on the platform that they are building for. - * @param forWeb build the extensions that have web targets - * @param disableMangle disable the mangler - * @returns a stream - */ -function packageNativeLocalExtensionsStream(forWeb, disableMangle) { - return doPackageLocalExtensionsStream(forWeb, disableMangle, true); -} -/** - * Package all the local extensions... both those that are known to have native dependencies and those that are not. - * @param forWeb build the extensions that have web targets - * @param disableMangle disable the mangler - * @returns a stream - */ -function packageAllLocalExtensionsStream(forWeb, disableMangle) { - return event_stream_1.default.merge([ - packageNonNativeLocalExtensionsStream(forWeb, disableMangle), - packageNativeLocalExtensionsStream(forWeb, disableMangle) - ]); -} -/** - * @param forWeb build the extensions that have web targets - * @param disableMangle disable the mangler - * @param native build the extensions that are marked as having native dependencies - */ -function doPackageLocalExtensionsStream(forWeb, disableMangle, native) { - const nativeExtensionsSet = new Set(nativeExtensions); - const localExtensionsDescriptions = (glob_1.default.sync('extensions/*/package.json') - .map(manifestPath => { - const absoluteManifestPath = path_1.default.join(root, manifestPath); - const extensionPath = path_1.default.dirname(path_1.default.join(root, manifestPath)); - const extensionName = path_1.default.basename(extensionPath); - return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath }; - }) - .filter(({ name }) => native ? nativeExtensionsSet.has(name) : !nativeExtensionsSet.has(name)) - .filter(({ name }) => excludedExtensions.indexOf(name) === -1) - .filter(({ name }) => builtInExtensions.every(b => b.name !== name)) - .filter(({ manifestPath }) => (forWeb ? isWebExtension(require(manifestPath)) : true))); - const localExtensionsStream = minifyExtensionResources(event_stream_1.default.merge(...localExtensionsDescriptions.map(extension => { - return fromLocal(extension.path, forWeb, disableMangle) - .pipe((0, gulp_rename_1.default)(p => p.dirname = `extensions/${extension.name}/${p.dirname}`)); - }))); - let result; - if (forWeb) { - result = localExtensionsStream; - } - else { - // also include shared production node modules - const productionDependencies = (0, dependencies_1.getProductionDependencies)('extensions/'); - const dependenciesSrc = productionDependencies.map(d => path_1.default.relative(root, d)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]).flat(); - result = event_stream_1.default.merge(localExtensionsStream, gulp_1.default.src(dependenciesSrc, { base: '.' }) - .pipe(util2.cleanNodeModules(path_1.default.join(root, 'build', '.moduleignore'))) - .pipe(util2.cleanNodeModules(path_1.default.join(root, 'build', `.moduleignore.${process.platform}`)))); - } - return (result - .pipe(util2.setExecutableBit(['**/*.sh']))); -} -function packageMarketplaceExtensionsStream(forWeb) { - const marketplaceExtensionsDescriptions = [ - ...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)), - ...(forWeb ? webBuiltInExtensions : []) - ]; - const marketplaceExtensionsStream = minifyExtensionResources(event_stream_1.default.merge(...marketplaceExtensionsDescriptions - .map(extension => { - const src = (0, builtInExtensions_1.getExtensionStream)(extension).pipe((0, gulp_rename_1.default)(p => p.dirname = `extensions/${p.dirname}`)); - return updateExtensionPackageJSON(src, (data) => { - delete data.scripts; - delete data.dependencies; - delete data.devDependencies; - return data; - }); - }))); - return (marketplaceExtensionsStream - .pipe(util2.setExecutableBit(['**/*.sh']))); -} -function scanBuiltinExtensions(extensionsRoot, exclude = []) { - const scannedExtensions = []; - try { - const extensionsFolders = fs_1.default.readdirSync(extensionsRoot); - for (const extensionFolder of extensionsFolders) { - if (exclude.indexOf(extensionFolder) >= 0) { - continue; - } - const packageJSONPath = path_1.default.join(extensionsRoot, extensionFolder, 'package.json'); - if (!fs_1.default.existsSync(packageJSONPath)) { - continue; - } - const packageJSON = JSON.parse(fs_1.default.readFileSync(packageJSONPath).toString('utf8')); - if (!isWebExtension(packageJSON)) { - continue; - } - const children = fs_1.default.readdirSync(path_1.default.join(extensionsRoot, extensionFolder)); - const packageNLSPath = children.filter(child => child === 'package.nls.json')[0]; - const packageNLS = packageNLSPath ? JSON.parse(fs_1.default.readFileSync(path_1.default.join(extensionsRoot, extensionFolder, packageNLSPath)).toString()) : undefined; - const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0]; - const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0]; - scannedExtensions.push({ - extensionPath: extensionFolder, - packageJSON, - packageNLS, - readmePath: readme ? path_1.default.join(extensionFolder, readme) : undefined, - changelogPath: changelog ? path_1.default.join(extensionFolder, changelog) : undefined, - }); - } - return scannedExtensions; - } - catch (ex) { - return scannedExtensions; - } -} -function translatePackageJSON(packageJSON, packageNLSPath) { - const CharCode_PC = '%'.charCodeAt(0); - const packageNls = JSON.parse(fs_1.default.readFileSync(packageNLSPath).toString()); - const translate = (obj) => { - for (const key in obj) { - const val = obj[key]; - if (Array.isArray(val)) { - val.forEach(translate); - } - else if (val && typeof val === 'object') { - translate(val); - } - else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) { - const translated = packageNls[val.substr(1, val.length - 2)]; - if (translated) { - obj[key] = typeof translated === 'string' ? translated : (typeof translated.message === 'string' ? translated.message : val); - } - } - } - }; - translate(packageJSON); - return packageJSON; -} -const extensionsPath = path_1.default.join(root, 'extensions'); -// Additional projects to run esbuild on. These typically build code for webviews -const esbuildMediaScripts = [ - 'ipynb/esbuild.mjs', - 'markdown-language-features/esbuild-notebook.mjs', - 'markdown-language-features/esbuild-preview.mjs', - 'markdown-math/esbuild.mjs', - 'mermaid-chat-features/esbuild-chat-webview.mjs', - 'notebook-renderers/esbuild.mjs', - 'simple-browser/esbuild-preview.mjs', -]; -async function webpackExtensions(taskName, isWatch, webpackConfigLocations) { - const webpack = require('webpack'); - const webpackConfigs = []; - for (const { configPath, outputRoot } of webpackConfigLocations) { - const configOrFnOrArray = require(configPath).default; - function addConfig(configOrFnOrArray) { - for (const configOrFn of Array.isArray(configOrFnOrArray) ? configOrFnOrArray : [configOrFnOrArray]) { - const config = typeof configOrFn === 'function' ? configOrFn({}, {}) : configOrFn; - if (outputRoot) { - config.output.path = path_1.default.join(outputRoot, path_1.default.relative(path_1.default.dirname(configPath), config.output.path)); - } - webpackConfigs.push(config); - } - } - addConfig(configOrFnOrArray); - } - function reporter(fullStats) { - if (Array.isArray(fullStats.children)) { - for (const stats of fullStats.children) { - const outputPath = stats.outputPath; - if (outputPath) { - const relativePath = path_1.default.relative(extensionsPath, outputPath).replace(/\\/g, '/'); - const match = relativePath.match(/[^\/]+(\/server|\/client)?/); - (0, fancy_log_1.default)(`Finished ${ansi_colors_1.default.green(taskName)} ${ansi_colors_1.default.cyan(match[0])} with ${stats.errors.length} errors.`); - } - if (Array.isArray(stats.errors)) { - stats.errors.forEach((error) => { - fancy_log_1.default.error(error); - }); - } - if (Array.isArray(stats.warnings)) { - stats.warnings.forEach((warning) => { - fancy_log_1.default.warn(warning); - }); - } - } - } - } - return new Promise((resolve, reject) => { - if (isWatch) { - webpack(webpackConfigs).watch({}, (err, stats) => { - if (err) { - reject(); - } - else { - reporter(stats?.toJson()); - } - }); - } - else { - webpack(webpackConfigs).run((err, stats) => { - if (err) { - fancy_log_1.default.error(err); - reject(); - } - else { - reporter(stats?.toJson()); - resolve(); - } - }); - } - }); -} -async function esbuildExtensions(taskName, isWatch, scripts) { - function reporter(stdError, script) { - const matches = (stdError || '').match(/\> (.+): error: (.+)?/g); - (0, fancy_log_1.default)(`Finished ${ansi_colors_1.default.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`); - for (const match of matches || []) { - fancy_log_1.default.error(match); - } - } - const tasks = scripts.map(({ script, outputRoot }) => { - return new Promise((resolve, reject) => { - const args = [script]; - if (isWatch) { - args.push('--watch'); - } - if (outputRoot) { - args.push('--outputRoot', outputRoot); - } - const proc = child_process_1.default.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => { - if (error) { - return reject(error); - } - reporter(stderr, script); - return resolve(); - }); - proc.stdout.on('data', (data) => { - (0, fancy_log_1.default)(`${ansi_colors_1.default.green(taskName)}: ${data.toString('utf8')}`); - }); - }); - }); - return Promise.all(tasks); -} -async function buildExtensionMedia(isWatch, outputRoot) { - return esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({ - script: path_1.default.join(extensionsPath, p), - outputRoot: outputRoot ? path_1.default.join(root, outputRoot, path_1.default.dirname(p)) : undefined - }))); -} -//# sourceMappingURL=extensions.js.map \ No newline at end of file diff --git a/build/lib/extensions.ts b/build/lib/extensions.ts index 4779ddba03a..b8a601bf506 100644 --- a/build/lib/extensions.ts +++ b/build/lib/extensions.ts @@ -12,8 +12,8 @@ import path from 'path'; import crypto from 'crypto'; import { Stream } from 'stream'; import File from 'vinyl'; -import { createStatsStream } from './stats'; -import * as util2 from './util'; +import { createStatsStream } from './stats.ts'; +import * as util2 from './util.ts'; import filter from 'gulp-filter'; import rename from 'gulp-rename'; import fancyLog from 'fancy-log'; @@ -21,13 +21,16 @@ import ansiColors from 'ansi-colors'; import buffer from 'gulp-buffer'; import * as jsoncParser from 'jsonc-parser'; import webpack from 'webpack'; -import { getProductionDependencies } from './dependencies'; -import { IExtensionDefinition, getExtensionStream } from './builtInExtensions'; -import { getVersion } from './getVersion'; -import { fetchUrls, fetchGithub } from './fetch'; -const vzip = require('gulp-vinyl-zip'); +import { getProductionDependencies } from './dependencies.ts'; +import { type IExtensionDefinition, getExtensionStream } from './builtInExtensions.ts'; +import { getVersion } from './getVersion.ts'; +import { fetchUrls, fetchGithub } from './fetch.ts'; +import vzip from 'gulp-vinyl-zip'; -const root = path.dirname(path.dirname(__dirname)); +import { createRequire } from 'module'; +const require = createRequire(import.meta.url); + +const root = path.dirname(path.dirname(import.meta.dirname)); const commit = getVersion(root); const sourceMappingURLBase = `https://main.vscode-cdn.net/sourcemaps/${commit}`; @@ -122,11 +125,10 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string, // check for a webpack configuration files, then invoke webpack // and merge its output with the files stream. - const webpackConfigLocations = (glob.sync( + const webpackConfigLocations = (glob.sync( path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] } - )); - + ) as string[]); const webpackStreams = webpackConfigLocations.flatMap(webpackConfigPath => { const webpackDone = (err: any, stats: any) => { @@ -175,7 +177,7 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string, // * rewrite sourceMappingURL // * save to disk so that upload-task picks this up if (path.extname(data.basename) === '.js') { - const contents = (data.contents).toString('utf8'); + const contents = (data.contents as Buffer).toString('utf8'); data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) { return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`; }), 'utf8'); @@ -333,7 +335,7 @@ const marketplaceWebExtensionsExclude = new Set([ 'ms-vscode.vscode-js-profile-table' ]); -const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')); +const productJson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../product.json'), 'utf8')); const builtInExtensions: IExtensionDefinition[] = productJson.builtInExtensions || []; const webBuiltInExtensions: IExtensionDefinition[] = productJson.webBuiltInExtensions || []; @@ -417,7 +419,7 @@ export function packageAllLocalExtensionsStream(forWeb: boolean, disableMangle: function doPackageLocalExtensionsStream(forWeb: boolean, disableMangle: boolean, native: boolean): Stream { const nativeExtensionsSet = new Set(nativeExtensions); const localExtensionsDescriptions = ( - (glob.sync('extensions/*/package.json')) + (glob.sync('extensions/*/package.json') as string[]) .map(manifestPath => { const absoluteManifestPath = path.join(root, manifestPath); const extensionPath = path.dirname(path.join(root, manifestPath)); diff --git a/build/lib/fetch.js b/build/lib/fetch.js deleted file mode 100644 index b0876cda75a..00000000000 --- a/build/lib/fetch.js +++ /dev/null @@ -1,141 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fetchUrls = fetchUrls; -exports.fetchUrl = fetchUrl; -exports.fetchGithub = fetchGithub; -const event_stream_1 = __importDefault(require("event-stream")); -const vinyl_1 = __importDefault(require("vinyl")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const crypto_1 = __importDefault(require("crypto")); -const through2_1 = __importDefault(require("through2")); -function fetchUrls(urls, options) { - if (options === undefined) { - options = {}; - } - if (typeof options.base !== 'string' && options.base !== null) { - options.base = '/'; - } - if (!Array.isArray(urls)) { - urls = [urls]; - } - return event_stream_1.default.readArray(urls).pipe(event_stream_1.default.map((data, cb) => { - const url = [options.base, data].join(''); - fetchUrl(url, options).then(file => { - cb(undefined, file); - }, error => { - cb(error); - }); - })); -} -async function fetchUrl(url, options, retries = 10, retryDelay = 1000) { - const verbose = !!options.verbose || !!process.env['CI'] || !!process.env['BUILD_ARTIFACTSTAGINGDIRECTORY'] || !!process.env['GITHUB_WORKSPACE']; - try { - let startTime = 0; - if (verbose) { - (0, fancy_log_1.default)(`Start fetching ${ansi_colors_1.default.magenta(url)}${retries !== 10 ? ` (${10 - retries} retry)` : ''}`); - startTime = new Date().getTime(); - } - const controller = new AbortController(); - const timeout = setTimeout(() => controller.abort(), 30 * 1000); - try { - const response = await fetch(url, { - ...options.nodeFetchOptions, - signal: controller.signal - }); - if (verbose) { - (0, fancy_log_1.default)(`Fetch completed: Status ${response.status}. Took ${ansi_colors_1.default.magenta(`${new Date().getTime() - startTime} ms`)}`); - } - if (response.ok && (response.status >= 200 && response.status < 300)) { - const contents = Buffer.from(await response.arrayBuffer()); - if (options.checksumSha256) { - const actualSHA256Checksum = crypto_1.default.createHash('sha256').update(contents).digest('hex'); - if (actualSHA256Checksum !== options.checksumSha256) { - throw new Error(`Checksum mismatch for ${ansi_colors_1.default.cyan(url)} (expected ${options.checksumSha256}, actual ${actualSHA256Checksum}))`); - } - else if (verbose) { - (0, fancy_log_1.default)(`Verified SHA256 checksums match for ${ansi_colors_1.default.cyan(url)}`); - } - } - else if (verbose) { - (0, fancy_log_1.default)(`Skipping checksum verification for ${ansi_colors_1.default.cyan(url)} because no expected checksum was provided`); - } - if (verbose) { - (0, fancy_log_1.default)(`Fetched response body buffer: ${ansi_colors_1.default.magenta(`${contents.byteLength} bytes`)}`); - } - return new vinyl_1.default({ - cwd: '/', - base: options.base, - path: url, - contents - }); - } - let err = `Request ${ansi_colors_1.default.magenta(url)} failed with status code: ${response.status}`; - if (response.status === 403) { - err += ' (you may be rate limited)'; - } - throw new Error(err); - } - finally { - clearTimeout(timeout); - } - } - catch (e) { - if (verbose) { - (0, fancy_log_1.default)(`Fetching ${ansi_colors_1.default.cyan(url)} failed: ${e}`); - } - if (retries > 0) { - await new Promise(resolve => setTimeout(resolve, retryDelay)); - return fetchUrl(url, options, retries - 1, retryDelay); - } - throw e; - } -} -const ghApiHeaders = { - Accept: 'application/vnd.github.v3+json', - 'User-Agent': 'VSCode Build', -}; -if (process.env.GITHUB_TOKEN) { - ghApiHeaders.Authorization = 'Basic ' + Buffer.from(process.env.GITHUB_TOKEN).toString('base64'); -} -const ghDownloadHeaders = { - ...ghApiHeaders, - Accept: 'application/octet-stream', -}; -/** - * @param repo for example `Microsoft/vscode` - * @param version for example `16.17.1` - must be a valid releases tag - * @param assetName for example (name) => name === `win-x64-node.exe` - must be an asset that exists - * @returns a stream with the asset as file - */ -function fetchGithub(repo, options) { - return fetchUrls(`/repos/${repo.replace(/^\/|\/$/g, '')}/releases/tags/v${options.version}`, { - base: 'https://api.github.com', - verbose: options.verbose, - nodeFetchOptions: { headers: ghApiHeaders } - }).pipe(through2_1.default.obj(async function (file, _enc, callback) { - const assetFilter = typeof options.name === 'string' ? (name) => name === options.name : options.name; - const asset = JSON.parse(file.contents.toString()).assets.find((a) => assetFilter(a.name)); - if (!asset) { - return callback(new Error(`Could not find asset in release of ${repo} @ ${options.version}`)); - } - try { - callback(null, await fetchUrl(asset.url, { - nodeFetchOptions: { headers: ghDownloadHeaders }, - verbose: options.verbose, - checksumSha256: options.checksumSha256 - })); - } - catch (error) { - callback(error); - } - })); -} -//# sourceMappingURL=fetch.js.map \ No newline at end of file diff --git a/build/lib/formatter.js b/build/lib/formatter.js deleted file mode 100644 index 1085ea8f488..00000000000 --- a/build/lib/formatter.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.format = format; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const typescript_1 = __importDefault(require("typescript")); -class LanguageServiceHost { - files = {}; - addFile(fileName, text) { - this.files[fileName] = typescript_1.default.ScriptSnapshot.fromString(text); - } - fileExists(path) { - return !!this.files[path]; - } - readFile(path) { - return this.files[path]?.getText(0, this.files[path].getLength()); - } - // for ts.LanguageServiceHost - getCompilationSettings = () => typescript_1.default.getDefaultCompilerOptions(); - getScriptFileNames = () => Object.keys(this.files); - getScriptVersion = (_fileName) => '0'; - getScriptSnapshot = (fileName) => this.files[fileName]; - getCurrentDirectory = () => process.cwd(); - getDefaultLibFileName = (options) => typescript_1.default.getDefaultLibFilePath(options); -} -const defaults = { - baseIndentSize: 0, - indentSize: 4, - tabSize: 4, - indentStyle: typescript_1.default.IndentStyle.Smart, - newLineCharacter: '\r\n', - convertTabsToSpaces: false, - insertSpaceAfterCommaDelimiter: true, - insertSpaceAfterSemicolonInForStatements: true, - insertSpaceBeforeAndAfterBinaryOperators: true, - insertSpaceAfterConstructor: false, - insertSpaceAfterKeywordsInControlFlowStatements: true, - insertSpaceAfterFunctionKeywordForAnonymousFunctions: false, - insertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis: false, - insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets: false, - insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces: true, - insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces: false, - insertSpaceAfterOpeningAndBeforeClosingJsxExpressionBraces: false, - insertSpaceAfterTypeAssertion: false, - insertSpaceBeforeFunctionParenthesis: false, - placeOpenBraceOnNewLineForFunctions: false, - placeOpenBraceOnNewLineForControlBlocks: false, - insertSpaceBeforeTypeAnnotation: false, -}; -const getOverrides = (() => { - let value; - return () => { - value ??= JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '..', '..', 'tsfmt.json'), 'utf8')); - return value; - }; -})(); -function format(fileName, text) { - const host = new LanguageServiceHost(); - host.addFile(fileName, text); - const languageService = typescript_1.default.createLanguageService(host); - const edits = languageService.getFormattingEditsForDocument(fileName, { ...defaults, ...getOverrides() }); - edits - .sort((a, b) => a.span.start - b.span.start) - .reverse() - .forEach(edit => { - const head = text.slice(0, edit.span.start); - const tail = text.slice(edit.span.start + edit.span.length); - text = `${head}${edit.newText}${tail}`; - }); - return text; -} -//# sourceMappingURL=formatter.js.map \ No newline at end of file diff --git a/build/lib/formatter.ts b/build/lib/formatter.ts index 993722e5f92..09c1de929ba 100644 --- a/build/lib/formatter.ts +++ b/build/lib/formatter.ts @@ -59,7 +59,7 @@ const defaults: ts.FormatCodeSettings = { const getOverrides = (() => { let value: ts.FormatCodeSettings | undefined; return () => { - value ??= JSON.parse(fs.readFileSync(path.join(__dirname, '..', '..', 'tsfmt.json'), 'utf8')); + value ??= JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '..', '..', 'tsfmt.json'), 'utf8')); return value; }; })(); diff --git a/build/lib/getVersion.js b/build/lib/getVersion.js deleted file mode 100644 index 7606c17ab14..00000000000 --- a/build/lib/getVersion.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getVersion = getVersion; -const git = __importStar(require("./git")); -function getVersion(root) { - let version = process.env['BUILD_SOURCEVERSION']; - if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) { - version = git.getVersion(root); - } - return version; -} -//# sourceMappingURL=getVersion.js.map \ No newline at end of file diff --git a/build/lib/getVersion.ts b/build/lib/getVersion.ts index 2fddb309f83..1dc4600dadf 100644 --- a/build/lib/getVersion.ts +++ b/build/lib/getVersion.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as git from './git'; +import * as git from './git.ts'; export function getVersion(root: string): string | undefined { let version = process.env['BUILD_SOURCEVERSION']; diff --git a/build/lib/git.js b/build/lib/git.js deleted file mode 100644 index 30de97ed6e3..00000000000 --- a/build/lib/git.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getVersion = getVersion; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -/** - * Returns the sha1 commit version of a repository or undefined in case of failure. - */ -function getVersion(repo) { - const git = path_1.default.join(repo, '.git'); - const headPath = path_1.default.join(git, 'HEAD'); - let head; - try { - head = fs_1.default.readFileSync(headPath, 'utf8').trim(); - } - catch (e) { - return undefined; - } - if (/^[0-9a-f]{40}$/i.test(head)) { - return head; - } - const refMatch = /^ref: (.*)$/.exec(head); - if (!refMatch) { - return undefined; - } - const ref = refMatch[1]; - const refPath = path_1.default.join(git, ref); - try { - return fs_1.default.readFileSync(refPath, 'utf8').trim(); - } - catch (e) { - // noop - } - const packedRefsPath = path_1.default.join(git, 'packed-refs'); - let refsRaw; - try { - refsRaw = fs_1.default.readFileSync(packedRefsPath, 'utf8').trim(); - } - catch (e) { - return undefined; - } - const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm; - let refsMatch; - const refs = {}; - while (refsMatch = refsRegex.exec(refsRaw)) { - refs[refsMatch[2]] = refsMatch[1]; - } - return refs[ref]; -} -//# sourceMappingURL=git.js.map \ No newline at end of file diff --git a/build/lib/i18n.js b/build/lib/i18n.js deleted file mode 100644 index 0b371c8b812..00000000000 --- a/build/lib/i18n.js +++ /dev/null @@ -1,785 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.EXTERNAL_EXTENSIONS = exports.XLF = exports.Line = exports.extraLanguages = exports.defaultLanguages = void 0; -exports.processNlsFiles = processNlsFiles; -exports.getResource = getResource; -exports.createXlfFilesForCoreBundle = createXlfFilesForCoreBundle; -exports.createXlfFilesForExtensions = createXlfFilesForExtensions; -exports.createXlfFilesForIsl = createXlfFilesForIsl; -exports.prepareI18nPackFiles = prepareI18nPackFiles; -exports.prepareIslFiles = prepareIslFiles; -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const event_stream_1 = require("event-stream"); -const gulp_merge_json_1 = __importDefault(require("gulp-merge-json")); -const vinyl_1 = __importDefault(require("vinyl")); -const xml2js_1 = __importDefault(require("xml2js")); -const gulp_1 = __importDefault(require("gulp")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const iconv_lite_umd_1 = __importDefault(require("@vscode/iconv-lite-umd")); -const l10n_dev_1 = require("@vscode/l10n-dev"); -const REPO_ROOT_PATH = path_1.default.join(__dirname, '../..'); -function log(message, ...rest) { - (0, fancy_log_1.default)(ansi_colors_1.default.green('[i18n]'), message, ...rest); -} -exports.defaultLanguages = [ - { id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' }, - { id: 'zh-cn', folderName: 'chs', translationId: 'zh-hans' }, - { id: 'ja', folderName: 'jpn' }, - { id: 'ko', folderName: 'kor' }, - { id: 'de', folderName: 'deu' }, - { id: 'fr', folderName: 'fra' }, - { id: 'es', folderName: 'esn' }, - { id: 'ru', folderName: 'rus' }, - { id: 'it', folderName: 'ita' } -]; -// languages requested by the community -exports.extraLanguages = [ - { id: 'pt-br', folderName: 'ptb' }, - { id: 'tr', folderName: 'trk' }, - { id: 'cs' }, - { id: 'pl' } -]; -var LocalizeInfo; -(function (LocalizeInfo) { - function is(value) { - const candidate = value; - return candidate && typeof candidate.key === 'string' && (candidate.comment === undefined || (Array.isArray(candidate.comment) && candidate.comment.every(element => typeof element === 'string'))); - } - LocalizeInfo.is = is; -})(LocalizeInfo || (LocalizeInfo = {})); -var BundledFormat; -(function (BundledFormat) { - function is(value) { - if (value === undefined) { - return false; - } - const candidate = value; - const length = Object.keys(value).length; - return length === 3 && !!candidate.keys && !!candidate.messages && !!candidate.bundles; - } - BundledFormat.is = is; -})(BundledFormat || (BundledFormat = {})); -var NLSKeysFormat; -(function (NLSKeysFormat) { - function is(value) { - if (value === undefined) { - return false; - } - const candidate = value; - return Array.isArray(candidate) && Array.isArray(candidate[1]); - } - NLSKeysFormat.is = is; -})(NLSKeysFormat || (NLSKeysFormat = {})); -class Line { - buffer = []; - constructor(indent = 0) { - if (indent > 0) { - this.buffer.push(new Array(indent + 1).join(' ')); - } - } - append(value) { - this.buffer.push(value); - return this; - } - toString() { - return this.buffer.join(''); - } -} -exports.Line = Line; -class TextModel { - _lines; - constructor(contents) { - this._lines = contents.split(/\r\n|\r|\n/); - } - get lines() { - return this._lines; - } -} -class XLF { - project; - buffer; - files; - numberOfMessages; - constructor(project) { - this.project = project; - this.buffer = []; - this.files = Object.create(null); - this.numberOfMessages = 0; - } - toString() { - this.appendHeader(); - const files = Object.keys(this.files).sort(); - for (const file of files) { - this.appendNewLine(``, 2); - const items = this.files[file].sort((a, b) => { - return a.id < b.id ? -1 : a.id > b.id ? 1 : 0; - }); - for (const item of items) { - this.addStringItem(file, item); - } - this.appendNewLine(''); - } - this.appendFooter(); - return this.buffer.join('\r\n'); - } - addFile(original, keys, messages) { - if (keys.length === 0) { - console.log('No keys in ' + original); - return; - } - if (keys.length !== messages.length) { - throw new Error(`Unmatching keys(${keys.length}) and messages(${messages.length}).`); - } - this.numberOfMessages += keys.length; - this.files[original] = []; - const existingKeys = new Set(); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - let realKey; - let comment; - if (typeof key === 'string') { - realKey = key; - comment = undefined; - } - else if (LocalizeInfo.is(key)) { - realKey = key.key; - if (key.comment && key.comment.length > 0) { - comment = key.comment.map(comment => encodeEntities(comment)).join('\r\n'); - } - } - if (!realKey || existingKeys.has(realKey)) { - continue; - } - existingKeys.add(realKey); - const message = encodeEntities(messages[i]); - this.files[original].push({ id: realKey, message: message, comment: comment }); - } - } - addStringItem(file, item) { - if (!item.id || item.message === undefined || item.message === null) { - throw new Error(`No item ID or value specified: ${JSON.stringify(item)}. File: ${file}`); - } - if (item.message.length === 0) { - log(`Item with id ${item.id} in file ${file} has an empty message.`); - } - this.appendNewLine(``, 4); - this.appendNewLine(`${item.message}`, 6); - if (item.comment) { - this.appendNewLine(`${item.comment}`, 6); - } - this.appendNewLine('', 4); - } - appendHeader() { - this.appendNewLine('', 0); - this.appendNewLine('', 0); - } - appendFooter() { - this.appendNewLine('', 0); - } - appendNewLine(content, indent) { - const line = new Line(indent); - line.append(content); - this.buffer.push(line.toString()); - } - static parse = function (xlfString) { - return new Promise((resolve, reject) => { - const parser = new xml2js_1.default.Parser(); - const files = []; - parser.parseString(xlfString, function (err, result) { - if (err) { - reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`)); - } - const fileNodes = result['xliff']['file']; - if (!fileNodes) { - reject(new Error(`XLF parsing error: XLIFF file does not contain "xliff" or "file" node(s) required for parsing.`)); - } - fileNodes.forEach((file) => { - const name = file.$.original; - if (!name) { - reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`)); - } - const language = file.$['target-language']; - if (!language) { - reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`)); - } - const messages = {}; - const transUnits = file.body[0]['trans-unit']; - if (transUnits) { - transUnits.forEach((unit) => { - const key = unit.$.id; - if (!unit.target) { - return; // No translation available - } - let val = unit.target[0]; - if (typeof val !== 'string') { - // We allow empty source values so support them for translations as well. - val = val._ ? val._ : ''; - } - if (!key) { - reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${name} is missing the ID attribute.`)); - return; - } - messages[key] = decodeEntities(val); - }); - files.push({ messages, name, language: language.toLowerCase() }); - } - }); - resolve(files); - }); - }); - }; -} -exports.XLF = XLF; -function sortLanguages(languages) { - return languages.sort((a, b) => { - return a.id < b.id ? -1 : (a.id > b.id ? 1 : 0); - }); -} -function stripComments(content) { - // Copied from stripComments.js - // - // First group matches a double quoted string - // Second group matches a single quoted string - // Third group matches a multi line comment - // Forth group matches a single line comment - // Fifth group matches a trailing comma - const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))|(,\s*[}\]])/g; - const result = content.replace(regexp, (match, _m1, _m2, m3, m4, m5) => { - // Only one of m1, m2, m3, m4, m5 matches - if (m3) { - // A block comment. Replace with nothing - return ''; - } - else if (m4) { - // Since m4 is a single line comment is is at least of length 2 (e.g. //) - // If it ends in \r?\n then keep it. - const length = m4.length; - if (m4[length - 1] === '\n') { - return m4[length - 2] === '\r' ? '\r\n' : '\n'; - } - else { - return ''; - } - } - else if (m5) { - // Remove the trailing comma - return match.substring(1); - } - else { - // We match a string - return match; - } - }); - return result; -} -function processCoreBundleFormat(base, fileHeader, languages, json, emitter) { - const languageDirectory = path_1.default.join(REPO_ROOT_PATH, '..', 'vscode-loc', 'i18n'); - if (!fs_1.default.existsSync(languageDirectory)) { - log(`No VS Code localization repository found. Looking at ${languageDirectory}`); - log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`); - } - const sortedLanguages = sortLanguages(languages); - sortedLanguages.forEach((language) => { - if (process.env['VSCODE_BUILD_VERBOSE']) { - log(`Generating nls bundles for: ${language.id}`); - } - const languageFolderName = language.translationId || language.id; - const i18nFile = path_1.default.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json'); - let allMessages; - if (fs_1.default.existsSync(i18nFile)) { - const content = stripComments(fs_1.default.readFileSync(i18nFile, 'utf8')); - allMessages = JSON.parse(content); - } - let nlsIndex = 0; - const nlsResult = []; - for (const [moduleId, nlsKeys] of json) { - const moduleTranslations = allMessages?.contents[moduleId]; - for (const nlsKey of nlsKeys) { - nlsResult.push(moduleTranslations?.[nlsKey]); // pushing `undefined` is fine, as we keep english strings as fallback for monaco editor in the build - nlsIndex++; - } - } - emitter.queue(new vinyl_1.default({ - contents: Buffer.from(`${fileHeader} -globalThis._VSCODE_NLS_MESSAGES=${JSON.stringify(nlsResult)}; -globalThis._VSCODE_NLS_LANGUAGE=${JSON.stringify(language.id)};`), - base, - path: `${base}/nls.messages.${language.id}.js` - })); - }); -} -function processNlsFiles(opts) { - return (0, event_stream_1.through)(function (file) { - const fileName = path_1.default.basename(file.path); - if (fileName === 'nls.keys.json') { - try { - const contents = file.contents.toString('utf8'); - const json = JSON.parse(contents); - if (NLSKeysFormat.is(json)) { - processCoreBundleFormat(file.base, opts.fileHeader, opts.languages, json, this); - } - } - catch (error) { - this.emit('error', `Failed to read component file: ${error}`); - } - } - this.queue(file); - }); -} -const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench', extensionsProject = 'vscode-extensions', setupProject = 'vscode-setup', serverProject = 'vscode-server'; -function getResource(sourceFile) { - let resource; - if (/^vs\/platform/.test(sourceFile)) { - return { name: 'vs/platform', project: editorProject }; - } - else if (/^vs\/editor\/contrib/.test(sourceFile)) { - return { name: 'vs/editor/contrib', project: editorProject }; - } - else if (/^vs\/editor/.test(sourceFile)) { - return { name: 'vs/editor', project: editorProject }; - } - else if (/^vs\/base/.test(sourceFile)) { - return { name: 'vs/base', project: editorProject }; - } - else if (/^vs\/code/.test(sourceFile)) { - return { name: 'vs/code', project: workbenchProject }; - } - else if (/^vs\/server/.test(sourceFile)) { - return { name: 'vs/server', project: serverProject }; - } - else if (/^vs\/workbench\/contrib/.test(sourceFile)) { - resource = sourceFile.split('/', 4).join('/'); - return { name: resource, project: workbenchProject }; - } - else if (/^vs\/workbench\/services/.test(sourceFile)) { - resource = sourceFile.split('/', 4).join('/'); - return { name: resource, project: workbenchProject }; - } - else if (/^vs\/workbench/.test(sourceFile)) { - return { name: 'vs/workbench', project: workbenchProject }; - } - throw new Error(`Could not identify the XLF bundle for ${sourceFile}`); -} -function createXlfFilesForCoreBundle() { - return (0, event_stream_1.through)(function (file) { - const basename = path_1.default.basename(file.path); - if (basename === 'nls.metadata.json') { - if (file.isBuffer()) { - const xlfs = Object.create(null); - const json = JSON.parse(file.contents.toString('utf8')); - for (const coreModule in json.keys) { - const projectResource = getResource(coreModule); - const resource = projectResource.name; - const project = projectResource.project; - const keys = json.keys[coreModule]; - const messages = json.messages[coreModule]; - if (keys.length !== messages.length) { - this.emit('error', `There is a mismatch between keys and messages in ${file.relative} for module ${coreModule}`); - return; - } - else { - let xlf = xlfs[resource]; - if (!xlf) { - xlf = new XLF(project); - xlfs[resource] = xlf; - } - xlf.addFile(`src/${coreModule}`, keys, messages); - } - } - for (const resource in xlfs) { - const xlf = xlfs[resource]; - const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`; - const xlfFile = new vinyl_1.default({ - path: filePath, - contents: Buffer.from(xlf.toString(), 'utf8') - }); - this.queue(xlfFile); - } - } - else { - this.emit('error', new Error(`File ${file.relative} is not using a buffer content`)); - return; - } - } - else { - this.emit('error', new Error(`File ${file.relative} is not a core meta data file.`)); - return; - } - }); -} -function createL10nBundleForExtension(extensionFolderName, prefixWithBuildFolder) { - const prefix = prefixWithBuildFolder ? '.build/' : ''; - return gulp_1.default - .src([ - // For source code of extensions - `${prefix}extensions/${extensionFolderName}/{src,client,server}/**/*.{ts,tsx}`, - // // For any dependencies pulled in (think vscode-css-languageservice or @vscode/emmet-helper) - `${prefix}extensions/${extensionFolderName}/**/node_modules/{@vscode,vscode-*}/**/*.{js,jsx}`, - // // For any dependencies pulled in that bundle @vscode/l10n. They needed to export the bundle - `${prefix}extensions/${extensionFolderName}/**/bundle.l10n.json`, - ]) - .pipe((0, event_stream_1.map)(function (data, callback) { - const file = data; - if (!file.isBuffer()) { - // Not a buffer so we drop it - callback(); - return; - } - const extension = path_1.default.extname(file.relative); - if (extension !== '.json') { - const contents = file.contents.toString('utf8'); - (0, l10n_dev_1.getL10nJson)([{ contents, extension }]) - .then((json) => { - callback(undefined, new vinyl_1.default({ - path: `extensions/${extensionFolderName}/bundle.l10n.json`, - contents: Buffer.from(JSON.stringify(json), 'utf8') - })); - }) - .catch((err) => { - callback(new Error(`File ${file.relative} threw an error when parsing: ${err}`)); - }); - // signal pause? - return false; - } - // for bundle.l10n.jsons - let bundleJson; - try { - bundleJson = JSON.parse(file.contents.toString('utf8')); - } - catch (err) { - callback(new Error(`File ${file.relative} threw an error when parsing: ${err}`)); - return; - } - // some validation of the bundle.l10n.json format - for (const key in bundleJson) { - if (typeof bundleJson[key] !== 'string' && - (typeof bundleJson[key].message !== 'string' || !Array.isArray(bundleJson[key].comment))) { - callback(new Error(`Invalid bundle.l10n.json file. The value for key ${key} is not in the expected format.`)); - return; - } - } - callback(undefined, file); - })) - .pipe((0, gulp_merge_json_1.default)({ - fileName: `extensions/${extensionFolderName}/bundle.l10n.json`, - jsonSpace: '', - concatArrays: true - })); -} -exports.EXTERNAL_EXTENSIONS = [ - 'ms-vscode.js-debug', - 'ms-vscode.js-debug-companion', - 'ms-vscode.vscode-js-profile-table', -]; -function createXlfFilesForExtensions() { - let counter = 0; - let folderStreamEnded = false; - let folderStreamEndEmitted = false; - return (0, event_stream_1.through)(function (extensionFolder) { - const folderStream = this; - const stat = fs_1.default.statSync(extensionFolder.path); - if (!stat.isDirectory()) { - return; - } - const extensionFolderName = path_1.default.basename(extensionFolder.path); - if (extensionFolderName === 'node_modules') { - return; - } - // Get extension id and use that as the id - const manifest = fs_1.default.readFileSync(path_1.default.join(extensionFolder.path, 'package.json'), 'utf-8'); - const manifestJson = JSON.parse(manifest); - const extensionId = manifestJson.publisher + '.' + manifestJson.name; - counter++; - let _l10nMap; - function getL10nMap() { - if (!_l10nMap) { - _l10nMap = new Map(); - } - return _l10nMap; - } - (0, event_stream_1.merge)(gulp_1.default.src([`.build/extensions/${extensionFolderName}/package.nls.json`, `.build/extensions/${extensionFolderName}/**/nls.metadata.json`], { allowEmpty: true }), createL10nBundleForExtension(extensionFolderName, exports.EXTERNAL_EXTENSIONS.includes(extensionId))).pipe((0, event_stream_1.through)(function (file) { - if (file.isBuffer()) { - const buffer = file.contents; - const basename = path_1.default.basename(file.path); - if (basename === 'package.nls.json') { - const json = JSON.parse(buffer.toString('utf8')); - getL10nMap().set(`extensions/${extensionId}/package`, json); - } - else if (basename === 'nls.metadata.json') { - const json = JSON.parse(buffer.toString('utf8')); - const relPath = path_1.default.relative(`.build/extensions/${extensionFolderName}`, path_1.default.dirname(file.path)); - for (const file in json) { - const fileContent = json[file]; - const info = Object.create(null); - for (let i = 0; i < fileContent.messages.length; i++) { - const message = fileContent.messages[i]; - const { key, comment } = LocalizeInfo.is(fileContent.keys[i]) - ? fileContent.keys[i] - : { key: fileContent.keys[i], comment: undefined }; - info[key] = comment ? { message, comment } : message; - } - getL10nMap().set(`extensions/${extensionId}/${relPath}/${file}`, info); - } - } - else if (basename === 'bundle.l10n.json') { - const json = JSON.parse(buffer.toString('utf8')); - getL10nMap().set(`extensions/${extensionId}/bundle`, json); - } - else { - this.emit('error', new Error(`${file.path} is not a valid extension nls file`)); - return; - } - } - }, function () { - if (_l10nMap?.size > 0) { - const xlfFile = new vinyl_1.default({ - path: path_1.default.join(extensionsProject, extensionId + '.xlf'), - contents: Buffer.from((0, l10n_dev_1.getL10nXlf)(_l10nMap), 'utf8') - }); - folderStream.queue(xlfFile); - } - this.queue(null); - counter--; - if (counter === 0 && folderStreamEnded && !folderStreamEndEmitted) { - folderStreamEndEmitted = true; - folderStream.queue(null); - } - })); - }, function () { - folderStreamEnded = true; - if (counter === 0) { - folderStreamEndEmitted = true; - this.queue(null); - } - }); -} -function createXlfFilesForIsl() { - return (0, event_stream_1.through)(function (file) { - let projectName, resourceFile; - if (path_1.default.basename(file.path) === 'messages.en.isl') { - projectName = setupProject; - resourceFile = 'messages.xlf'; - } - else { - throw new Error(`Unknown input file ${file.path}`); - } - const xlf = new XLF(projectName), keys = [], messages = []; - const model = new TextModel(file.contents.toString()); - let inMessageSection = false; - model.lines.forEach(line => { - if (line.length === 0) { - return; - } - const firstChar = line.charAt(0); - switch (firstChar) { - case ';': - // Comment line; - return; - case '[': - inMessageSection = '[Messages]' === line || '[CustomMessages]' === line; - return; - } - if (!inMessageSection) { - return; - } - const sections = line.split('='); - if (sections.length !== 2) { - throw new Error(`Badly formatted message found: ${line}`); - } - else { - const key = sections[0]; - const value = sections[1]; - if (key.length > 0 && value.length > 0) { - keys.push(key); - messages.push(value); - } - } - }); - const originalPath = file.path.substring(file.cwd.length + 1, file.path.split('.')[0].length).replace(/\\/g, '/'); - xlf.addFile(originalPath, keys, messages); - // Emit only upon all ISL files combined into single XLF instance - const newFilePath = path_1.default.join(projectName, resourceFile); - const xlfFile = new vinyl_1.default({ path: newFilePath, contents: Buffer.from(xlf.toString(), 'utf-8') }); - this.queue(xlfFile); - }); -} -function createI18nFile(name, messages) { - const result = Object.create(null); - result[''] = [ - '--------------------------------------------------------------------------------------------', - 'Copyright (c) Microsoft Corporation. All rights reserved.', - 'Licensed under the MIT License. See License.txt in the project root for license information.', - '--------------------------------------------------------------------------------------------', - 'Do not edit this file. It is machine generated.' - ]; - for (const key of Object.keys(messages)) { - result[key] = messages[key]; - } - let content = JSON.stringify(result, null, '\t'); - if (process.platform === 'win32') { - content = content.replace(/\n/g, '\r\n'); - } - return new vinyl_1.default({ - path: path_1.default.join(name + '.i18n.json'), - contents: Buffer.from(content, 'utf8') - }); -} -const i18nPackVersion = '1.0.0'; -function getRecordFromL10nJsonFormat(l10nJsonFormat) { - const record = {}; - for (const key of Object.keys(l10nJsonFormat).sort()) { - const value = l10nJsonFormat[key]; - record[key] = typeof value === 'string' ? value : value.message; - } - return record; -} -function prepareI18nPackFiles(resultingTranslationPaths) { - const parsePromises = []; - const mainPack = { version: i18nPackVersion, contents: {} }; - const extensionsPacks = {}; - const errors = []; - return (0, event_stream_1.through)(function (xlf) { - let project = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(xlf.relative))); - // strip `-new` since vscode-extensions-loc uses the `-new` suffix to indicate that it's from the new loc pipeline - const resource = path_1.default.basename(path_1.default.basename(xlf.relative, '.xlf'), '-new'); - if (exports.EXTERNAL_EXTENSIONS.find(e => e === resource)) { - project = extensionsProject; - } - const contents = xlf.contents.toString(); - log(`Found ${project}: ${resource}`); - const parsePromise = (0, l10n_dev_1.getL10nFilesFromXlf)(contents); - parsePromises.push(parsePromise); - parsePromise.then(resolvedFiles => { - resolvedFiles.forEach(file => { - const path = file.name; - const firstSlash = path.indexOf('/'); - if (project === extensionsProject) { - // resource will be the extension id - let extPack = extensionsPacks[resource]; - if (!extPack) { - extPack = extensionsPacks[resource] = { version: i18nPackVersion, contents: {} }; - } - // remove 'extensions/extensionId/' segment - const secondSlash = path.indexOf('/', firstSlash + 1); - extPack.contents[path.substring(secondSlash + 1)] = getRecordFromL10nJsonFormat(file.messages); - } - else { - mainPack.contents[path.substring(firstSlash + 1)] = getRecordFromL10nJsonFormat(file.messages); - } - }); - }).catch(reason => { - errors.push(reason); - }); - }, function () { - Promise.all(parsePromises) - .then(() => { - if (errors.length > 0) { - throw errors; - } - const translatedMainFile = createI18nFile('./main', mainPack); - resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' }); - this.queue(translatedMainFile); - for (const extensionId in extensionsPacks) { - const translatedExtFile = createI18nFile(`extensions/${extensionId}`, extensionsPacks[extensionId]); - this.queue(translatedExtFile); - resultingTranslationPaths.push({ id: extensionId, resourceName: `extensions/${extensionId}.i18n.json` }); - } - this.queue(null); - }) - .catch((reason) => { - this.emit('error', reason); - }); - }); -} -function prepareIslFiles(language, innoSetupConfig) { - const parsePromises = []; - return (0, event_stream_1.through)(function (xlf) { - const stream = this; - const parsePromise = XLF.parse(xlf.contents.toString()); - parsePromises.push(parsePromise); - parsePromise.then(resolvedFiles => { - resolvedFiles.forEach(file => { - const translatedFile = createIslFile(file.name, file.messages, language, innoSetupConfig); - stream.queue(translatedFile); - }); - }).catch(reason => { - this.emit('error', reason); - }); - }, function () { - Promise.all(parsePromises) - .then(() => { this.queue(null); }) - .catch(reason => { - this.emit('error', reason); - }); - }); -} -function createIslFile(name, messages, language, innoSetup) { - const content = []; - let originalContent; - if (path_1.default.basename(name) === 'Default') { - originalContent = new TextModel(fs_1.default.readFileSync(name + '.isl', 'utf8')); - } - else { - originalContent = new TextModel(fs_1.default.readFileSync(name + '.en.isl', 'utf8')); - } - originalContent.lines.forEach(line => { - if (line.length > 0) { - const firstChar = line.charAt(0); - if (firstChar === '[' || firstChar === ';') { - content.push(line); - } - else { - const sections = line.split('='); - const key = sections[0]; - let translated = line; - if (key) { - const translatedMessage = messages[key]; - if (translatedMessage) { - translated = `${key}=${translatedMessage}`; - } - } - content.push(translated); - } - } - }); - const basename = path_1.default.basename(name); - const filePath = `${basename}.${language.id}.isl`; - const encoded = iconv_lite_umd_1.default.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage); - return new vinyl_1.default({ - path: filePath, - contents: Buffer.from(encoded), - }); -} -function encodeEntities(value) { - const result = []; - for (let i = 0; i < value.length; i++) { - const ch = value[i]; - switch (ch) { - case '<': - result.push('<'); - break; - case '>': - result.push('>'); - break; - case '&': - result.push('&'); - break; - default: - result.push(ch); - } - } - return result.join(''); -} -function decodeEntities(value) { - return value.replace(/</g, '<').replace(/>/g, '>').replace(/&/g, '&'); -} -//# sourceMappingURL=i18n.js.map \ No newline at end of file diff --git a/build/lib/i18n.ts b/build/lib/i18n.ts index 4506b2e3cd0..3845bc807f1 100644 --- a/build/lib/i18n.ts +++ b/build/lib/i18n.ts @@ -5,8 +5,7 @@ import path from 'path'; import fs from 'fs'; - -import { map, merge, through, ThroughStream } from 'event-stream'; +import eventStream from 'event-stream'; import jsonMerge from 'gulp-merge-json'; import File from 'vinyl'; import xml2js from 'xml2js'; @@ -14,9 +13,9 @@ import gulp from 'gulp'; import fancyLog from 'fancy-log'; import ansiColors from 'ansi-colors'; import iconv from '@vscode/iconv-lite-umd'; -import { l10nJsonFormat, getL10nXlf, l10nJsonDetails, getL10nFilesFromXlf, getL10nJson } from '@vscode/l10n-dev'; +import { type l10nJsonFormat, getL10nXlf, type l10nJsonDetails, getL10nFilesFromXlf, getL10nJson } from '@vscode/l10n-dev'; -const REPO_ROOT_PATH = path.join(__dirname, '../..'); +const REPO_ROOT_PATH = path.join(import.meta.dirname, '../..'); function log(message: any, ...rest: unknown[]): void { fancyLog(ansiColors.green('[i18n]'), message, ...rest); @@ -68,11 +67,9 @@ interface LocalizeInfo { comment: string[]; } -module LocalizeInfo { - export function is(value: unknown): value is LocalizeInfo { - const candidate = value as LocalizeInfo; - return candidate && typeof candidate.key === 'string' && (candidate.comment === undefined || (Array.isArray(candidate.comment) && candidate.comment.every(element => typeof element === 'string'))); - } +function isLocalizeInfo(value: unknown): value is LocalizeInfo { + const candidate = value as LocalizeInfo; + return candidate && typeof candidate.key === 'string' && (candidate.comment === undefined || (Array.isArray(candidate.comment) && candidate.comment.every(element => typeof element === 'string'))); } interface BundledFormat { @@ -81,30 +78,15 @@ interface BundledFormat { bundles: Record; } -module BundledFormat { - export function is(value: any): value is BundledFormat { - if (value === undefined) { - return false; - } - - const candidate = value as BundledFormat; - const length = Object.keys(value).length; - - return length === 3 && !!candidate.keys && !!candidate.messages && !!candidate.bundles; - } -} - type NLSKeysFormat = [string /* module ID */, string[] /* keys */]; -module NLSKeysFormat { - export function is(value: any): value is NLSKeysFormat { - if (value === undefined) { - return false; - } - - const candidate = value as NLSKeysFormat; - return Array.isArray(candidate) && Array.isArray(candidate[1]); +function isNLSKeysFormat(value: any): value is NLSKeysFormat { + if (value === undefined) { + return false; } + + const candidate = value as NLSKeysFormat; + return Array.isArray(candidate) && Array.isArray(candidate[1]); } interface BundledExtensionFormat { @@ -158,8 +140,10 @@ export class XLF { private buffer: string[]; private files: Record; public numberOfMessages: number; + public project: string; - constructor(public project: string) { + constructor(project: string) { + this.project = project; this.buffer = []; this.files = Object.create(null); this.numberOfMessages = 0; @@ -201,7 +185,7 @@ export class XLF { if (typeof key === 'string') { realKey = key; comment = undefined; - } else if (LocalizeInfo.is(key)) { + } else if (isLocalizeInfo(key)) { realKey = key.key; if (key.comment && key.comment.length > 0) { comment = key.comment.map(comment => encodeEntities(comment)).join('\r\n'); @@ -345,7 +329,7 @@ function stripComments(content: string): string { return result; } -function processCoreBundleFormat(base: string, fileHeader: string, languages: Language[], json: NLSKeysFormat, emitter: ThroughStream) { +function processCoreBundleFormat(base: string, fileHeader: string, languages: Language[], json: NLSKeysFormat, emitter: eventStream.ThroughStream) { const languageDirectory = path.join(REPO_ROOT_PATH, '..', 'vscode-loc', 'i18n'); if (!fs.existsSync(languageDirectory)) { log(`No VS Code localization repository found. Looking at ${languageDirectory}`); @@ -385,14 +369,14 @@ globalThis._VSCODE_NLS_LANGUAGE=${JSON.stringify(language.id)};`), }); } -export function processNlsFiles(opts: { out: string; fileHeader: string; languages: Language[] }): ThroughStream { - return through(function (this: ThroughStream, file: File) { +export function processNlsFiles(opts: { out: string; fileHeader: string; languages: Language[] }): eventStream.ThroughStream { + return eventStream.through(function (this: eventStream.ThroughStream, file: File) { const fileName = path.basename(file.path); if (fileName === 'nls.keys.json') { try { const contents = file.contents!.toString('utf8'); const json = JSON.parse(contents); - if (NLSKeysFormat.is(json)) { + if (isNLSKeysFormat(json)) { processCoreBundleFormat(file.base, opts.fileHeader, opts.languages, json, this); } } catch (error) { @@ -438,8 +422,8 @@ export function getResource(sourceFile: string): Resource { } -export function createXlfFilesForCoreBundle(): ThroughStream { - return through(function (this: ThroughStream, file: File) { +export function createXlfFilesForCoreBundle(): eventStream.ThroughStream { + return eventStream.through(function (this: eventStream.ThroughStream, file: File) { const basename = path.basename(file.path); if (basename === 'nls.metadata.json') { if (file.isBuffer()) { @@ -495,7 +479,7 @@ function createL10nBundleForExtension(extensionFolderName: string, prefixWithBui // // For any dependencies pulled in that bundle @vscode/l10n. They needed to export the bundle `${prefix}extensions/${extensionFolderName}/**/bundle.l10n.json`, ]) - .pipe(map(function (data, callback) { + .pipe(eventStream.map(function (data, callback) { const file = data as File; if (!file.isBuffer()) { // Not a buffer so we drop it @@ -554,11 +538,11 @@ export const EXTERNAL_EXTENSIONS = [ 'ms-vscode.vscode-js-profile-table', ]; -export function createXlfFilesForExtensions(): ThroughStream { +export function createXlfFilesForExtensions(): eventStream.ThroughStream { let counter: number = 0; let folderStreamEnded: boolean = false; let folderStreamEndEmitted: boolean = false; - return through(function (this: ThroughStream, extensionFolder: File) { + return eventStream.through(function (this: eventStream.ThroughStream, extensionFolder: File) { const folderStream = this; const stat = fs.statSync(extensionFolder.path); if (!stat.isDirectory()) { @@ -581,10 +565,10 @@ export function createXlfFilesForExtensions(): ThroughStream { } return _l10nMap; } - merge( + eventStream.merge( gulp.src([`.build/extensions/${extensionFolderName}/package.nls.json`, `.build/extensions/${extensionFolderName}/**/nls.metadata.json`], { allowEmpty: true }), createL10nBundleForExtension(extensionFolderName, EXTERNAL_EXTENSIONS.includes(extensionId)) - ).pipe(through(function (file: File) { + ).pipe(eventStream.through(function (file: File) { if (file.isBuffer()) { const buffer: Buffer = file.contents as Buffer; const basename = path.basename(file.path); @@ -599,7 +583,7 @@ export function createXlfFilesForExtensions(): ThroughStream { const info: l10nJsonFormat = Object.create(null); for (let i = 0; i < fileContent.messages.length; i++) { const message = fileContent.messages[i]; - const { key, comment } = LocalizeInfo.is(fileContent.keys[i]) + const { key, comment } = isLocalizeInfo(fileContent.keys[i]) ? fileContent.keys[i] as LocalizeInfo : { key: fileContent.keys[i] as string, comment: undefined }; @@ -639,8 +623,8 @@ export function createXlfFilesForExtensions(): ThroughStream { }); } -export function createXlfFilesForIsl(): ThroughStream { - return through(function (this: ThroughStream, file: File) { +export function createXlfFilesForIsl(): eventStream.ThroughStream { + return eventStream.through(function (this: eventStream.ThroughStream, file: File) { let projectName: string, resourceFile: string; if (path.basename(file.path) === 'messages.en.isl') { @@ -746,7 +730,7 @@ export function prepareI18nPackFiles(resultingTranslationPaths: TranslationPath[ const mainPack: I18nPack = { version: i18nPackVersion, contents: {} }; const extensionsPacks: Record = {}; const errors: unknown[] = []; - return through(function (this: ThroughStream, xlf: File) { + return eventStream.through(function (this: eventStream.ThroughStream, xlf: File) { let project = path.basename(path.dirname(path.dirname(xlf.relative))); // strip `-new` since vscode-extensions-loc uses the `-new` suffix to indicate that it's from the new loc pipeline const resource = path.basename(path.basename(xlf.relative, '.xlf'), '-new'); @@ -804,10 +788,10 @@ export function prepareI18nPackFiles(resultingTranslationPaths: TranslationPath[ }); } -export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup): ThroughStream { +export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup): eventStream.ThroughStream { const parsePromises: Promise[] = []; - return through(function (this: ThroughStream, xlf: File) { + return eventStream.through(function (this: eventStream.ThroughStream, xlf: File) { const stream = this; const parsePromise = XLF.parse(xlf.contents!.toString()); parsePromises.push(parsePromise); diff --git a/build/lib/inlineMeta.js b/build/lib/inlineMeta.js deleted file mode 100644 index 3b473ae091e..00000000000 --- a/build/lib/inlineMeta.js +++ /dev/null @@ -1,51 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.inlineMeta = inlineMeta; -const event_stream_1 = __importDefault(require("event-stream")); -const path_1 = require("path"); -const packageJsonMarkerId = 'BUILD_INSERT_PACKAGE_CONFIGURATION'; -// TODO in order to inline `product.json`, more work is -// needed to ensure that we cover all cases where modifications -// are done to the product configuration during build. There are -// at least 2 more changes that kick in very late: -// - a `darwinUniversalAssetId` is added in`create-universal-app.ts` -// - a `target` is added in `gulpfile.vscode.win32.js` -// const productJsonMarkerId = 'BUILD_INSERT_PRODUCT_CONFIGURATION'; -function inlineMeta(result, ctx) { - return result.pipe(event_stream_1.default.through(function (file) { - if (matchesFile(file, ctx)) { - let content = file.contents.toString(); - let markerFound = false; - const packageMarker = `${packageJsonMarkerId}:"${packageJsonMarkerId}"`; // this needs to be the format after esbuild has processed the file (e.g. double quotes) - if (content.includes(packageMarker)) { - content = content.replace(packageMarker, JSON.stringify(JSON.parse(ctx.packageJsonFn())).slice(1, -1) /* trim braces */); - markerFound = true; - } - // const productMarker = `${productJsonMarkerId}:"${productJsonMarkerId}"`; // this needs to be the format after esbuild has processed the file (e.g. double quotes) - // if (content.includes(productMarker)) { - // content = content.replace(productMarker, JSON.stringify(JSON.parse(ctx.productJsonFn())).slice(1, -1) /* trim braces */); - // markerFound = true; - // } - if (markerFound) { - file.contents = Buffer.from(content); - } - } - this.emit('data', file); - })); -} -function matchesFile(file, ctx) { - for (const targetPath of ctx.targetPaths) { - if (file.basename === (0, path_1.basename)(targetPath)) { // TODO would be nicer to figure out root relative path to not match on false positives - return true; - } - } - return false; -} -//# sourceMappingURL=inlineMeta.js.map \ No newline at end of file diff --git a/build/lib/mangle/index.js b/build/lib/mangle/index.js deleted file mode 100644 index fa729052f7c..00000000000 --- a/build/lib/mangle/index.js +++ /dev/null @@ -1,661 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Mangler = void 0; -const node_v8_1 = __importDefault(require("node:v8")); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const process_1 = require("process"); -const source_map_1 = require("source-map"); -const typescript_1 = __importDefault(require("typescript")); -const url_1 = require("url"); -const workerpool_1 = __importDefault(require("workerpool")); -const staticLanguageServiceHost_1 = require("./staticLanguageServiceHost"); -const buildfile = require('../../buildfile'); -class ShortIdent { - prefix; - static _keywords = new Set(['await', 'break', 'case', 'catch', 'class', 'const', 'continue', 'debugger', - 'default', 'delete', 'do', 'else', 'export', 'extends', 'false', 'finally', 'for', 'function', 'if', - 'import', 'in', 'instanceof', 'let', 'new', 'null', 'return', 'static', 'super', 'switch', 'this', 'throw', - 'true', 'try', 'typeof', 'var', 'void', 'while', 'with', 'yield']); - static _alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890$_'.split(''); - _value = 0; - constructor(prefix) { - this.prefix = prefix; - } - next(isNameTaken) { - const candidate = this.prefix + ShortIdent.convert(this._value); - this._value++; - if (ShortIdent._keywords.has(candidate) || /^[_0-9]/.test(candidate) || isNameTaken?.(candidate)) { - // try again - return this.next(isNameTaken); - } - return candidate; - } - static convert(n) { - const base = this._alphabet.length; - let result = ''; - do { - const rest = n % base; - result += this._alphabet[rest]; - n = (n / base) | 0; - } while (n > 0); - return result; - } -} -var FieldType; -(function (FieldType) { - FieldType[FieldType["Public"] = 0] = "Public"; - FieldType[FieldType["Protected"] = 1] = "Protected"; - FieldType[FieldType["Private"] = 2] = "Private"; -})(FieldType || (FieldType = {})); -class ClassData { - fileName; - node; - fields = new Map(); - replacements; - parent; - children; - constructor(fileName, node) { - // analyse all fields (properties and methods). Find usages of all protected and - // private ones and keep track of all public ones (to prevent naming collisions) - this.fileName = fileName; - this.node = node; - const candidates = []; - for (const member of node.members) { - if (typescript_1.default.isMethodDeclaration(member)) { - // method `foo() {}` - candidates.push(member); - } - else if (typescript_1.default.isPropertyDeclaration(member)) { - // property `foo = 234` - candidates.push(member); - } - else if (typescript_1.default.isGetAccessor(member)) { - // getter: `get foo() { ... }` - candidates.push(member); - } - else if (typescript_1.default.isSetAccessor(member)) { - // setter: `set foo() { ... }` - candidates.push(member); - } - else if (typescript_1.default.isConstructorDeclaration(member)) { - // constructor-prop:`constructor(private foo) {}` - for (const param of member.parameters) { - if (hasModifier(param, typescript_1.default.SyntaxKind.PrivateKeyword) - || hasModifier(param, typescript_1.default.SyntaxKind.ProtectedKeyword) - || hasModifier(param, typescript_1.default.SyntaxKind.PublicKeyword) - || hasModifier(param, typescript_1.default.SyntaxKind.ReadonlyKeyword)) { - candidates.push(param); - } - } - } - } - for (const member of candidates) { - const ident = ClassData._getMemberName(member); - if (!ident) { - continue; - } - const type = ClassData._getFieldType(member); - this.fields.set(ident, { type, pos: member.name.getStart() }); - } - } - static _getMemberName(node) { - if (!node.name) { - return undefined; - } - const { name } = node; - let ident = name.getText(); - if (name.kind === typescript_1.default.SyntaxKind.ComputedPropertyName) { - if (name.expression.kind !== typescript_1.default.SyntaxKind.StringLiteral) { - // unsupported: [Symbol.foo] or [abc + 'field'] - return; - } - // ['foo'] - ident = name.expression.getText().slice(1, -1); - } - return ident; - } - static _getFieldType(node) { - if (hasModifier(node, typescript_1.default.SyntaxKind.PrivateKeyword)) { - return 2 /* FieldType.Private */; - } - else if (hasModifier(node, typescript_1.default.SyntaxKind.ProtectedKeyword)) { - return 1 /* FieldType.Protected */; - } - else { - return 0 /* FieldType.Public */; - } - } - static _shouldMangle(type) { - return type === 2 /* FieldType.Private */ - || type === 1 /* FieldType.Protected */; - } - static makeImplicitPublicActuallyPublic(data, reportViolation) { - // TS-HACK - // A subtype can make an inherited protected field public. To prevent accidential - // mangling of public fields we mark the original (protected) fields as public... - for (const [name, info] of data.fields) { - if (info.type !== 0 /* FieldType.Public */) { - continue; - } - let parent = data.parent; - while (parent) { - if (parent.fields.get(name)?.type === 1 /* FieldType.Protected */) { - const parentPos = parent.node.getSourceFile().getLineAndCharacterOfPosition(parent.fields.get(name).pos); - const infoPos = data.node.getSourceFile().getLineAndCharacterOfPosition(info.pos); - reportViolation(name, `'${name}' from ${parent.fileName}:${parentPos.line + 1}`, `${data.fileName}:${infoPos.line + 1}`); - parent.fields.get(name).type = 0 /* FieldType.Public */; - } - parent = parent.parent; - } - } - } - static fillInReplacement(data) { - if (data.replacements) { - // already done - return; - } - // fill in parents first - if (data.parent) { - ClassData.fillInReplacement(data.parent); - } - data.replacements = new Map(); - const isNameTaken = (name) => { - // locally taken - if (data._isNameTaken(name)) { - return true; - } - // parents - let parent = data.parent; - while (parent) { - if (parent._isNameTaken(name)) { - return true; - } - parent = parent.parent; - } - // children - if (data.children) { - const stack = [...data.children]; - while (stack.length) { - const node = stack.pop(); - if (node._isNameTaken(name)) { - return true; - } - if (node.children) { - stack.push(...node.children); - } - } - } - return false; - }; - const identPool = new ShortIdent(''); - for (const [name, info] of data.fields) { - if (ClassData._shouldMangle(info.type)) { - const shortName = identPool.next(isNameTaken); - data.replacements.set(name, shortName); - } - } - } - // a name is taken when a field that doesn't get mangled exists or - // when the name is already in use for replacement - _isNameTaken(name) { - if (this.fields.has(name) && !ClassData._shouldMangle(this.fields.get(name).type)) { - // public field - return true; - } - if (this.replacements) { - for (const shortName of this.replacements.values()) { - if (shortName === name) { - // replaced already (happens wih super types) - return true; - } - } - } - if (isNameTakenInFile(this.node, name)) { - return true; - } - return false; - } - lookupShortName(name) { - let value = this.replacements.get(name); - let parent = this.parent; - while (parent) { - if (parent.replacements.has(name) && parent.fields.get(name)?.type === 1 /* FieldType.Protected */) { - value = parent.replacements.get(name) ?? value; - } - parent = parent.parent; - } - return value; - } - // --- parent chaining - addChild(child) { - this.children ??= []; - this.children.push(child); - child.parent = this; - } -} -function isNameTakenInFile(node, name) { - const identifiers = node.getSourceFile().identifiers; - if (identifiers instanceof Map) { - if (identifiers.has(name)) { - return true; - } - } - return false; -} -const skippedExportMangledFiles = [ - // Monaco - 'editorCommon', - 'editorOptions', - 'editorZoom', - 'standaloneEditor', - 'standaloneEnums', - 'standaloneLanguages', - // Generated - 'extensionsApiProposals', - // Module passed around as type - 'pfs', - // entry points - ...[ - buildfile.workerEditor, - buildfile.workerExtensionHost, - buildfile.workerNotebook, - buildfile.workerLanguageDetection, - buildfile.workerLocalFileSearch, - buildfile.workerProfileAnalysis, - buildfile.workerOutputLinks, - buildfile.workerBackgroundTokenization, - buildfile.workbenchDesktop, - buildfile.workbenchWeb, - buildfile.code, - buildfile.codeWeb - ].flat().map(x => x.name), -]; -const skippedExportMangledProjects = [ - // Test projects - 'vscode-api-tests', - // These projects use webpack to dynamically rewrite imports, which messes up our mangling - 'configuration-editing', - 'microsoft-authentication', - 'github-authentication', - 'html-language-features/server', -]; -const skippedExportMangledSymbols = [ - // Don't mangle extension entry points - 'activate', - 'deactivate', -]; -class DeclarationData { - fileName; - node; - replacementName; - constructor(fileName, node, fileIdents) { - this.fileName = fileName; - this.node = node; - // Todo: generate replacement names based on usage count, with more used names getting shorter identifiers - this.replacementName = fileIdents.next(); - } - getLocations(service) { - if (typescript_1.default.isVariableDeclaration(this.node)) { - // If the const aliases any types, we need to rename those too - const definitionResult = service.getDefinitionAndBoundSpan(this.fileName, this.node.name.getStart()); - if (definitionResult?.definitions && definitionResult.definitions.length > 1) { - return definitionResult.definitions.map(x => ({ fileName: x.fileName, offset: x.textSpan.start })); - } - } - return [{ - fileName: this.fileName, - offset: this.node.name.getStart() - }]; - } - shouldMangle(newName) { - const currentName = this.node.name.getText(); - if (currentName.startsWith('$') || skippedExportMangledSymbols.includes(currentName)) { - return false; - } - // New name is longer the existing one :'( - if (newName.length >= currentName.length) { - return false; - } - // Don't mangle functions we've explicitly opted out - if (this.node.getFullText().includes('@skipMangle')) { - return false; - } - return true; - } -} -/** - * TypeScript2TypeScript transformer that mangles all private and protected fields - * - * 1. Collect all class fields (properties, methods) - * 2. Collect all sub and super-type relations between classes - * 3. Compute replacement names for each field - * 4. Lookup rename locations for these fields - * 5. Prepare and apply edits - */ -class Mangler { - projectPath; - log; - config; - allClassDataByKey = new Map(); - allExportedSymbols = new Set(); - renameWorkerPool; - constructor(projectPath, log = () => { }, config) { - this.projectPath = projectPath; - this.log = log; - this.config = config; - this.renameWorkerPool = workerpool_1.default.pool(path_1.default.join(__dirname, 'renameWorker.js'), { - maxWorkers: 4, - minWorkers: 'max' - }); - } - async computeNewFileContents(strictImplicitPublicHandling) { - const service = typescript_1.default.createLanguageService(new staticLanguageServiceHost_1.StaticLanguageServiceHost(this.projectPath)); - // STEP: - // - Find all classes and their field info. - // - Find exported symbols. - const fileIdents = new ShortIdent('$'); - const visit = (node) => { - if (this.config.manglePrivateFields) { - if (typescript_1.default.isClassDeclaration(node) || typescript_1.default.isClassExpression(node)) { - const anchor = node.name ?? node; - const key = `${node.getSourceFile().fileName}|${anchor.getStart()}`; - if (this.allClassDataByKey.has(key)) { - throw new Error('DUPE?'); - } - this.allClassDataByKey.set(key, new ClassData(node.getSourceFile().fileName, node)); - } - } - if (this.config.mangleExports) { - // Find exported classes, functions, and vars - if (( - // Exported class - typescript_1.default.isClassDeclaration(node) - && hasModifier(node, typescript_1.default.SyntaxKind.ExportKeyword) - && node.name) || ( - // Exported function - typescript_1.default.isFunctionDeclaration(node) - && typescript_1.default.isSourceFile(node.parent) - && hasModifier(node, typescript_1.default.SyntaxKind.ExportKeyword) - && node.name && node.body // On named function and not on the overload - ) || ( - // Exported variable - typescript_1.default.isVariableDeclaration(node) - && hasModifier(node.parent.parent, typescript_1.default.SyntaxKind.ExportKeyword) // Variable statement is exported - && typescript_1.default.isSourceFile(node.parent.parent.parent)) - // Disabled for now because we need to figure out how to handle - // enums that are used in monaco or extHost interfaces. - /* || ( - // Exported enum - ts.isEnumDeclaration(node) - && ts.isSourceFile(node.parent) - && hasModifier(node, ts.SyntaxKind.ExportKeyword) - && !hasModifier(node, ts.SyntaxKind.ConstKeyword) // Don't bother mangling const enums because these are inlined - && node.name - */ - ) { - if (isInAmbientContext(node)) { - return; - } - this.allExportedSymbols.add(new DeclarationData(node.getSourceFile().fileName, node, fileIdents)); - } - } - typescript_1.default.forEachChild(node, visit); - }; - for (const file of service.getProgram().getSourceFiles()) { - if (!file.isDeclarationFile) { - typescript_1.default.forEachChild(file, visit); - } - } - this.log(`Done collecting. Classes: ${this.allClassDataByKey.size}. Exported symbols: ${this.allExportedSymbols.size}`); - // STEP: connect sub and super-types - const setupParents = (data) => { - const extendsClause = data.node.heritageClauses?.find(h => h.token === typescript_1.default.SyntaxKind.ExtendsKeyword); - if (!extendsClause) { - // no EXTENDS-clause - return; - } - const info = service.getDefinitionAtPosition(data.fileName, extendsClause.types[0].expression.getEnd()); - if (!info || info.length === 0) { - // throw new Error('SUPER type not found'); - return; - } - if (info.length !== 1) { - // inherits from declared/library type - return; - } - const [definition] = info; - const key = `${definition.fileName}|${definition.textSpan.start}`; - const parent = this.allClassDataByKey.get(key); - if (!parent) { - // throw new Error(`SUPER type not found: ${key}`); - return; - } - parent.addChild(data); - }; - for (const data of this.allClassDataByKey.values()) { - setupParents(data); - } - // STEP: make implicit public (actually protected) field really public - const violations = new Map(); - let violationsCauseFailure = false; - for (const data of this.allClassDataByKey.values()) { - ClassData.makeImplicitPublicActuallyPublic(data, (name, what, why) => { - const arr = violations.get(what); - if (arr) { - arr.push(why); - } - else { - violations.set(what, [why]); - } - if (strictImplicitPublicHandling && !strictImplicitPublicHandling.has(name)) { - violationsCauseFailure = true; - } - }); - } - for (const [why, whys] of violations) { - this.log(`WARN: ${why} became PUBLIC because of: ${whys.join(' , ')}`); - } - if (violationsCauseFailure) { - const message = 'Protected fields have been made PUBLIC. This hurts minification and is therefore not allowed. Review the WARN messages further above'; - this.log(`ERROR: ${message}`); - throw new Error(message); - } - // STEP: compute replacement names for each class - for (const data of this.allClassDataByKey.values()) { - ClassData.fillInReplacement(data); - } - this.log(`Done creating class replacements`); - // STEP: prepare rename edits - this.log(`Starting prepare rename edits`); - const editsByFile = new Map(); - const appendEdit = (fileName, edit) => { - const edits = editsByFile.get(fileName); - if (!edits) { - editsByFile.set(fileName, [edit]); - } - else { - edits.push(edit); - } - }; - const appendRename = (newText, loc) => { - appendEdit(loc.fileName, { - newText: (loc.prefixText || '') + newText + (loc.suffixText || ''), - offset: loc.textSpan.start, - length: loc.textSpan.length - }); - }; - const renameResults = []; - const queueRename = (fileName, pos, newName) => { - renameResults.push(Promise.resolve(this.renameWorkerPool.exec('findRenameLocations', [this.projectPath, fileName, pos])) - .then((locations) => ({ newName, locations }))); - }; - for (const data of this.allClassDataByKey.values()) { - if (hasModifier(data.node, typescript_1.default.SyntaxKind.DeclareKeyword)) { - continue; - } - fields: for (const [name, info] of data.fields) { - if (!ClassData._shouldMangle(info.type)) { - continue fields; - } - // TS-HACK: protected became public via 'some' child - // and because of that we might need to ignore this now - let parent = data.parent; - while (parent) { - if (parent.fields.get(name)?.type === 0 /* FieldType.Public */) { - continue fields; - } - parent = parent.parent; - } - const newName = data.lookupShortName(name); - queueRename(data.fileName, info.pos, newName); - } - } - for (const data of this.allExportedSymbols.values()) { - if (data.fileName.endsWith('.d.ts') - || skippedExportMangledProjects.some(proj => data.fileName.includes(proj)) - || skippedExportMangledFiles.some(file => data.fileName.endsWith(file + '.ts'))) { - continue; - } - if (!data.shouldMangle(data.replacementName)) { - continue; - } - const newText = data.replacementName; - for (const { fileName, offset } of data.getLocations(service)) { - queueRename(fileName, offset, newText); - } - } - await Promise.all(renameResults).then((result) => { - for (const { newName, locations } of result) { - for (const loc of locations) { - appendRename(newName, loc); - } - } - }); - await this.renameWorkerPool.terminate(); - this.log(`Done preparing edits: ${editsByFile.size} files`); - // STEP: apply all rename edits (per file) - const result = new Map(); - let savedBytes = 0; - for (const item of service.getProgram().getSourceFiles()) { - const { mapRoot, sourceRoot } = service.getProgram().getCompilerOptions(); - const projectDir = path_1.default.dirname(this.projectPath); - const sourceMapRoot = mapRoot ?? (0, url_1.pathToFileURL)(sourceRoot ?? projectDir).toString(); - // source maps - let generator; - let newFullText; - const edits = editsByFile.get(item.fileName); - if (!edits) { - // just copy - newFullText = item.getFullText(); - } - else { - // source map generator - const relativeFileName = normalize(path_1.default.relative(projectDir, item.fileName)); - const mappingsByLine = new Map(); - // apply renames - edits.sort((a, b) => b.offset - a.offset); - const characters = item.getFullText().split(''); - let lastEdit; - for (const edit of edits) { - if (lastEdit && lastEdit.offset === edit.offset) { - // - if (lastEdit.length !== edit.length || lastEdit.newText !== edit.newText) { - this.log('ERROR: Overlapping edit', item.fileName, edit.offset, edits); - throw new Error('OVERLAPPING edit'); - } - else { - continue; - } - } - lastEdit = edit; - const mangledName = characters.splice(edit.offset, edit.length, edit.newText).join(''); - savedBytes += mangledName.length - edit.newText.length; - // source maps - const pos = item.getLineAndCharacterOfPosition(edit.offset); - let mappings = mappingsByLine.get(pos.line); - if (!mappings) { - mappings = []; - mappingsByLine.set(pos.line, mappings); - } - mappings.unshift({ - source: relativeFileName, - original: { line: pos.line + 1, column: pos.character }, - generated: { line: pos.line + 1, column: pos.character }, - name: mangledName - }, { - source: relativeFileName, - original: { line: pos.line + 1, column: pos.character + edit.length }, - generated: { line: pos.line + 1, column: pos.character + edit.newText.length }, - }); - } - // source map generation, make sure to get mappings per line correct - generator = new source_map_1.SourceMapGenerator({ file: path_1.default.basename(item.fileName), sourceRoot: sourceMapRoot }); - generator.setSourceContent(relativeFileName, item.getFullText()); - for (const [, mappings] of mappingsByLine) { - let lineDelta = 0; - for (const mapping of mappings) { - generator.addMapping({ - ...mapping, - generated: { line: mapping.generated.line, column: mapping.generated.column - lineDelta } - }); - lineDelta += mapping.original.column - mapping.generated.column; - } - } - newFullText = characters.join(''); - } - result.set(item.fileName, { out: newFullText, sourceMap: generator?.toString() }); - } - service.dispose(); - this.renameWorkerPool.terminate(); - this.log(`Done: ${savedBytes / 1000}kb saved, memory-usage: ${JSON.stringify(node_v8_1.default.getHeapStatistics())}`); - return result; - } -} -exports.Mangler = Mangler; -// --- ast utils -function hasModifier(node, kind) { - const modifiers = typescript_1.default.canHaveModifiers(node) ? typescript_1.default.getModifiers(node) : undefined; - return Boolean(modifiers?.find(mode => mode.kind === kind)); -} -function isInAmbientContext(node) { - for (let p = node.parent; p; p = p.parent) { - if (typescript_1.default.isModuleDeclaration(p)) { - return true; - } - } - return false; -} -function normalize(path) { - return path.replace(/\\/g, '/'); -} -async function _run() { - const root = path_1.default.join(__dirname, '..', '..', '..'); - const projectBase = path_1.default.join(root, 'src'); - const projectPath = path_1.default.join(projectBase, 'tsconfig.json'); - const newProjectBase = path_1.default.join(path_1.default.dirname(projectBase), path_1.default.basename(projectBase) + '2'); - fs_1.default.cpSync(projectBase, newProjectBase, { recursive: true }); - const mangler = new Mangler(projectPath, console.log, { - mangleExports: true, - manglePrivateFields: true, - }); - for (const [fileName, contents] of await mangler.computeNewFileContents(new Set(['saveState']))) { - const newFilePath = path_1.default.join(newProjectBase, path_1.default.relative(projectBase, fileName)); - await fs_1.default.promises.mkdir(path_1.default.dirname(newFilePath), { recursive: true }); - await fs_1.default.promises.writeFile(newFilePath, contents.out); - if (contents.sourceMap) { - await fs_1.default.promises.writeFile(newFilePath + '.map', contents.sourceMap); - } - } -} -if (__filename === process_1.argv[1]) { - _run(); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/build/lib/mangle/index.ts b/build/lib/mangle/index.ts index 02050d2e6a2..e20f37f4cbb 100644 --- a/build/lib/mangle/index.ts +++ b/build/lib/mangle/index.ts @@ -6,13 +6,12 @@ import v8 from 'node:v8'; import fs from 'fs'; import path from 'path'; -import { argv } from 'process'; -import { Mapping, SourceMapGenerator } from 'source-map'; +import { type Mapping, SourceMapGenerator } from 'source-map'; import ts from 'typescript'; import { pathToFileURL } from 'url'; import workerpool from 'workerpool'; -import { StaticLanguageServiceHost } from './staticLanguageServiceHost'; -const buildfile = require('../../buildfile'); +import { StaticLanguageServiceHost } from './staticLanguageServiceHost.ts'; +import * as buildfile from '../../buildfile.js'; class ShortIdent { @@ -24,10 +23,13 @@ class ShortIdent { private static _alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890$_'.split(''); private _value = 0; + private readonly prefix: string; constructor( - private readonly prefix: string - ) { } + prefix: string + ) { + this.prefix = prefix; + } next(isNameTaken?: (name: string) => boolean): string { const candidate = this.prefix + ShortIdent.convert(this._value); @@ -51,11 +53,12 @@ class ShortIdent { } } -const enum FieldType { - Public, - Protected, - Private -} +const FieldType = Object.freeze({ + Public: 0, + Protected: 1, + Private: 2 +}); +type FieldType = typeof FieldType[keyof typeof FieldType]; class ClassData { @@ -66,10 +69,15 @@ class ClassData { parent: ClassData | undefined; children: ClassData[] | undefined; + readonly fileName: string; + readonly node: ts.ClassDeclaration | ts.ClassExpression; + constructor( - readonly fileName: string, - readonly node: ts.ClassDeclaration | ts.ClassExpression, + fileName: string, + node: ts.ClassDeclaration | ts.ClassExpression, ) { + this.fileName = fileName; + this.node = node; // analyse all fields (properties and methods). Find usages of all protected and // private ones and keep track of all public ones (to prevent naming collisions) @@ -338,12 +346,16 @@ const skippedExportMangledSymbols = [ class DeclarationData { readonly replacementName: string; + readonly fileName: string; + readonly node: ts.FunctionDeclaration | ts.ClassDeclaration | ts.EnumDeclaration | ts.VariableDeclaration; constructor( - readonly fileName: string, - readonly node: ts.FunctionDeclaration | ts.ClassDeclaration | ts.EnumDeclaration | ts.VariableDeclaration, + fileName: string, + node: ts.FunctionDeclaration | ts.ClassDeclaration | ts.EnumDeclaration | ts.VariableDeclaration, fileIdents: ShortIdent, ) { + this.fileName = fileName; + this.node = node; // Todo: generate replacement names based on usage count, with more used names getting shorter identifiers this.replacementName = fileIdents.next(); } @@ -404,13 +416,20 @@ export class Mangler { private readonly renameWorkerPool: workerpool.WorkerPool; - constructor( - private readonly projectPath: string, - private readonly log: typeof console.log = () => { }, - private readonly config: { readonly manglePrivateFields: boolean; readonly mangleExports: boolean }, - ) { + private readonly projectPath: string; + private readonly log: typeof console.log; + private readonly config: { readonly manglePrivateFields: boolean; readonly mangleExports: boolean }; - this.renameWorkerPool = workerpool.pool(path.join(__dirname, 'renameWorker.js'), { + constructor( + projectPath: string, + log: typeof console.log = () => { }, + config: { readonly manglePrivateFields: boolean; readonly mangleExports: boolean }, + ) { + this.projectPath = projectPath; + this.log = log; + this.config = config; + + this.renameWorkerPool = workerpool.pool(path.join(import.meta.dirname, 'renameWorker.ts'), { maxWorkers: 4, minWorkers: 'max' }); @@ -753,7 +772,7 @@ function normalize(path: string): string { } async function _run() { - const root = path.join(__dirname, '..', '..', '..'); + const root = path.join(import.meta.dirname, '..', '..', '..'); const projectBase = path.join(root, 'src'); const projectPath = path.join(projectBase, 'tsconfig.json'); const newProjectBase = path.join(path.dirname(projectBase), path.basename(projectBase) + '2'); @@ -774,6 +793,6 @@ async function _run() { } } -if (__filename === argv[1]) { +if (import.meta.main) { _run(); } diff --git a/build/lib/mangle/renameWorker.js b/build/lib/mangle/renameWorker.js deleted file mode 100644 index 8bd59a4e2d5..00000000000 --- a/build/lib/mangle/renameWorker.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const typescript_1 = __importDefault(require("typescript")); -const workerpool_1 = __importDefault(require("workerpool")); -const staticLanguageServiceHost_1 = require("./staticLanguageServiceHost"); -let service; -function findRenameLocations(projectPath, fileName, position) { - if (!service) { - service = typescript_1.default.createLanguageService(new staticLanguageServiceHost_1.StaticLanguageServiceHost(projectPath)); - } - return service.findRenameLocations(fileName, position, false, false, { - providePrefixAndSuffixTextForRename: true, - }) ?? []; -} -workerpool_1.default.worker({ - findRenameLocations -}); -//# sourceMappingURL=renameWorker.js.map \ No newline at end of file diff --git a/build/lib/mangle/renameWorker.ts b/build/lib/mangle/renameWorker.ts index 0cce5677593..b7bfb539398 100644 --- a/build/lib/mangle/renameWorker.ts +++ b/build/lib/mangle/renameWorker.ts @@ -5,7 +5,7 @@ import ts from 'typescript'; import workerpool from 'workerpool'; -import { StaticLanguageServiceHost } from './staticLanguageServiceHost'; +import { StaticLanguageServiceHost } from './staticLanguageServiceHost.ts'; let service: ts.LanguageService | undefined; diff --git a/build/lib/mangle/staticLanguageServiceHost.js b/build/lib/mangle/staticLanguageServiceHost.js deleted file mode 100644 index 7777888dd06..00000000000 --- a/build/lib/mangle/staticLanguageServiceHost.js +++ /dev/null @@ -1,68 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.StaticLanguageServiceHost = void 0; -const typescript_1 = __importDefault(require("typescript")); -const path_1 = __importDefault(require("path")); -class StaticLanguageServiceHost { - projectPath; - _cmdLine; - _scriptSnapshots = new Map(); - constructor(projectPath) { - this.projectPath = projectPath; - const existingOptions = {}; - const parsed = typescript_1.default.readConfigFile(projectPath, typescript_1.default.sys.readFile); - if (parsed.error) { - throw parsed.error; - } - this._cmdLine = typescript_1.default.parseJsonConfigFileContent(parsed.config, typescript_1.default.sys, path_1.default.dirname(projectPath), existingOptions); - if (this._cmdLine.errors.length > 0) { - throw parsed.error; - } - } - getCompilationSettings() { - return this._cmdLine.options; - } - getScriptFileNames() { - return this._cmdLine.fileNames; - } - getScriptVersion(_fileName) { - return '1'; - } - getProjectVersion() { - return '1'; - } - getScriptSnapshot(fileName) { - let result = this._scriptSnapshots.get(fileName); - if (result === undefined) { - const content = typescript_1.default.sys.readFile(fileName); - if (content === undefined) { - return undefined; - } - result = typescript_1.default.ScriptSnapshot.fromString(content); - this._scriptSnapshots.set(fileName, result); - } - return result; - } - getCurrentDirectory() { - return path_1.default.dirname(this.projectPath); - } - getDefaultLibFileName(options) { - return typescript_1.default.getDefaultLibFilePath(options); - } - directoryExists = typescript_1.default.sys.directoryExists; - getDirectories = typescript_1.default.sys.getDirectories; - fileExists = typescript_1.default.sys.fileExists; - readFile = typescript_1.default.sys.readFile; - readDirectory = typescript_1.default.sys.readDirectory; - // this is necessary to make source references work. - realpath = typescript_1.default.sys.realpath; -} -exports.StaticLanguageServiceHost = StaticLanguageServiceHost; -//# sourceMappingURL=staticLanguageServiceHost.js.map \ No newline at end of file diff --git a/build/lib/mangle/staticLanguageServiceHost.ts b/build/lib/mangle/staticLanguageServiceHost.ts index b41b4e52133..4fcf107f716 100644 --- a/build/lib/mangle/staticLanguageServiceHost.ts +++ b/build/lib/mangle/staticLanguageServiceHost.ts @@ -10,8 +10,10 @@ export class StaticLanguageServiceHost implements ts.LanguageServiceHost { private readonly _cmdLine: ts.ParsedCommandLine; private readonly _scriptSnapshots: Map = new Map(); + readonly projectPath: string; - constructor(readonly projectPath: string) { + constructor(projectPath: string) { + this.projectPath = projectPath; const existingOptions: Partial = {}; const parsed = ts.readConfigFile(projectPath, ts.sys.readFile); if (parsed.error) { diff --git a/build/lib/monaco-api.js b/build/lib/monaco-api.js deleted file mode 100644 index 1112b47370d..00000000000 --- a/build/lib/monaco-api.js +++ /dev/null @@ -1,578 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DeclarationResolver = exports.FSProvider = exports.RECIPE_PATH = void 0; -exports.run3 = run3; -exports.execute = execute; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const typeScriptLanguageServiceHost_1 = require("./typeScriptLanguageServiceHost"); -const dtsv = '3'; -const tsfmt = require('../../tsfmt.json'); -const SRC = path_1.default.join(__dirname, '../../src'); -exports.RECIPE_PATH = path_1.default.join(__dirname, '../monaco/monaco.d.ts.recipe'); -const DECLARATION_PATH = path_1.default.join(__dirname, '../../src/vs/monaco.d.ts'); -function logErr(message, ...rest) { - (0, fancy_log_1.default)(ansi_colors_1.default.yellow(`[monaco.d.ts]`), message, ...rest); -} -function isDeclaration(ts, a) { - return (a.kind === ts.SyntaxKind.InterfaceDeclaration - || a.kind === ts.SyntaxKind.EnumDeclaration - || a.kind === ts.SyntaxKind.ClassDeclaration - || a.kind === ts.SyntaxKind.TypeAliasDeclaration - || a.kind === ts.SyntaxKind.FunctionDeclaration - || a.kind === ts.SyntaxKind.ModuleDeclaration); -} -function visitTopLevelDeclarations(ts, sourceFile, visitor) { - let stop = false; - const visit = (node) => { - if (stop) { - return; - } - switch (node.kind) { - case ts.SyntaxKind.InterfaceDeclaration: - case ts.SyntaxKind.EnumDeclaration: - case ts.SyntaxKind.ClassDeclaration: - case ts.SyntaxKind.VariableStatement: - case ts.SyntaxKind.TypeAliasDeclaration: - case ts.SyntaxKind.FunctionDeclaration: - case ts.SyntaxKind.ModuleDeclaration: - stop = visitor(node); - } - if (stop) { - return; - } - ts.forEachChild(node, visit); - }; - visit(sourceFile); -} -function getAllTopLevelDeclarations(ts, sourceFile) { - const all = []; - visitTopLevelDeclarations(ts, sourceFile, (node) => { - if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) { - const interfaceDeclaration = node; - const triviaStart = interfaceDeclaration.pos; - const triviaEnd = interfaceDeclaration.name.pos; - const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd }); - if (triviaText.indexOf('@internal') === -1) { - all.push(node); - } - } - else { - const nodeText = getNodeText(sourceFile, node); - if (nodeText.indexOf('@internal') === -1) { - all.push(node); - } - } - return false /*continue*/; - }); - return all; -} -function getTopLevelDeclaration(ts, sourceFile, typeName) { - let result = null; - visitTopLevelDeclarations(ts, sourceFile, (node) => { - if (isDeclaration(ts, node) && node.name) { - if (node.name.text === typeName) { - result = node; - return true /*stop*/; - } - return false /*continue*/; - } - // node is ts.VariableStatement - if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) { - result = node; - return true /*stop*/; - } - return false /*continue*/; - }); - return result; -} -function getNodeText(sourceFile, node) { - return sourceFile.getFullText().substring(node.pos, node.end); -} -function hasModifier(modifiers, kind) { - if (modifiers) { - for (let i = 0; i < modifiers.length; i++) { - const mod = modifiers[i]; - if (mod.kind === kind) { - return true; - } - } - } - return false; -} -function isStatic(ts, member) { - if (ts.canHaveModifiers(member)) { - return hasModifier(ts.getModifiers(member), ts.SyntaxKind.StaticKeyword); - } - return false; -} -function isDefaultExport(ts, declaration) { - return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword) - && hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword)); -} -function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) { - let result = getNodeText(sourceFile, declaration); - if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) { - const interfaceDeclaration = declaration; - const staticTypeName = (isDefaultExport(ts, interfaceDeclaration) - ? `${importName}.default` - : `${importName}.${declaration.name.text}`); - let instanceTypeName = staticTypeName; - const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0); - if (typeParametersCnt > 0) { - const arr = []; - for (let i = 0; i < typeParametersCnt; i++) { - arr.push('any'); - } - instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`; - } - const members = interfaceDeclaration.members; - members.forEach((member) => { - try { - const memberText = getNodeText(sourceFile, member); - if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) { - result = result.replace(memberText, ''); - } - else { - const memberName = member.name.text; - const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`); - if (isStatic(ts, member)) { - usage.push(`a = ${staticTypeName}${memberAccess};`); - } - else { - usage.push(`a = (<${instanceTypeName}>b)${memberAccess};`); - } - } - } - catch (err) { - // life.. - } - }); - } - result = result.replace(/export default /g, 'export '); - result = result.replace(/export declare /g, 'export '); - result = result.replace(/declare /g, ''); - const lines = result.split(/\r\n|\r|\n/); - for (let i = 0; i < lines.length; i++) { - if (/\s*\*/.test(lines[i])) { - // very likely a comment - continue; - } - lines[i] = lines[i].replace(/"/g, '\''); - } - result = lines.join('\n'); - if (declaration.kind === ts.SyntaxKind.EnumDeclaration) { - result = result.replace(/const enum/, 'enum'); - enums.push({ - enumName: declaration.name.getText(sourceFile), - text: result - }); - } - return result; -} -function format(ts, text, endl) { - const REALLY_FORMAT = false; - text = preformat(text, endl); - if (!REALLY_FORMAT) { - return text; - } - // Parse the source text - const sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true); - // Get the formatting edits on the input sources - const edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt); - // Apply the edits on the input code - return applyEdits(text, edits); - function countParensCurly(text) { - let cnt = 0; - for (let i = 0; i < text.length; i++) { - if (text.charAt(i) === '(' || text.charAt(i) === '{') { - cnt++; - } - if (text.charAt(i) === ')' || text.charAt(i) === '}') { - cnt--; - } - } - return cnt; - } - function repeatStr(s, cnt) { - let r = ''; - for (let i = 0; i < cnt; i++) { - r += s; - } - return r; - } - function preformat(text, endl) { - const lines = text.split(endl); - let inComment = false; - let inCommentDeltaIndent = 0; - let indent = 0; - for (let i = 0; i < lines.length; i++) { - let line = lines[i].replace(/\s$/, ''); - let repeat = false; - let lineIndent = 0; - do { - repeat = false; - if (line.substring(0, 4) === ' ') { - line = line.substring(4); - lineIndent++; - repeat = true; - } - if (line.charAt(0) === '\t') { - line = line.substring(1); - lineIndent++; - repeat = true; - } - } while (repeat); - if (line.length === 0) { - continue; - } - if (inComment) { - if (/\*\//.test(line)) { - inComment = false; - } - lines[i] = repeatStr('\t', lineIndent + inCommentDeltaIndent) + line; - continue; - } - if (/\/\*/.test(line)) { - inComment = true; - inCommentDeltaIndent = indent - lineIndent; - lines[i] = repeatStr('\t', indent) + line; - continue; - } - const cnt = countParensCurly(line); - let shouldUnindentAfter = false; - let shouldUnindentBefore = false; - if (cnt < 0) { - if (/[({]/.test(line)) { - shouldUnindentAfter = true; - } - else { - shouldUnindentBefore = true; - } - } - else if (cnt === 0) { - shouldUnindentBefore = /^\}/.test(line); - } - let shouldIndentAfter = false; - if (cnt > 0) { - shouldIndentAfter = true; - } - else if (cnt === 0) { - shouldIndentAfter = /{$/.test(line); - } - if (shouldUnindentBefore) { - indent--; - } - lines[i] = repeatStr('\t', indent) + line; - if (shouldUnindentAfter) { - indent--; - } - if (shouldIndentAfter) { - indent++; - } - } - return lines.join(endl); - } - function getRuleProvider(options) { - // Share this between multiple formatters using the same options. - // This represents the bulk of the space the formatter uses. - return ts.formatting.getFormatContext(options); - } - function applyEdits(text, edits) { - // Apply edits in reverse on the existing text - let result = text; - for (let i = edits.length - 1; i >= 0; i--) { - const change = edits[i]; - const head = result.slice(0, change.span.start); - const tail = result.slice(change.span.start + change.span.length); - result = head + change.newText + tail; - } - return result; - } -} -function createReplacerFromDirectives(directives) { - return (str) => { - for (let i = 0; i < directives.length; i++) { - str = str.replace(directives[i][0], directives[i][1]); - } - return str; - }; -} -function createReplacer(data) { - data = data || ''; - const rawDirectives = data.split(';'); - const directives = []; - rawDirectives.forEach((rawDirective) => { - if (rawDirective.length === 0) { - return; - } - const pieces = rawDirective.split('=>'); - let findStr = pieces[0]; - const replaceStr = pieces[1]; - findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&'); - findStr = '\\b' + findStr + '\\b'; - directives.push([new RegExp(findStr, 'g'), replaceStr]); - }); - return createReplacerFromDirectives(directives); -} -function generateDeclarationFile(ts, recipe, sourceFileGetter) { - const endl = /\r\n/.test(recipe) ? '\r\n' : '\n'; - const lines = recipe.split(endl); - const result = []; - let usageCounter = 0; - const usageImports = []; - const usage = []; - let failed = false; - usage.push(`var a: any;`); - usage.push(`var b: any;`); - const generateUsageImport = (moduleId) => { - const importName = 'm' + (++usageCounter); - usageImports.push(`import * as ${importName} from './${moduleId}';`); - return importName; - }; - const enums = []; - let version = null; - lines.forEach(line => { - if (failed) { - return; - } - const m0 = line.match(/^\/\/dtsv=(\d+)$/); - if (m0) { - version = m0[1]; - } - const m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/); - if (m1) { - const moduleId = m1[1]; - const sourceFile = sourceFileGetter(moduleId); - if (!sourceFile) { - logErr(`While handling ${line}`); - logErr(`Cannot find ${moduleId}`); - failed = true; - return; - } - const importName = generateUsageImport(moduleId); - const replacer = createReplacer(m1[2]); - const typeNames = m1[3].split(/,/); - typeNames.forEach((typeName) => { - typeName = typeName.trim(); - if (typeName.length === 0) { - return; - } - const declaration = getTopLevelDeclaration(ts, sourceFile, typeName); - if (!declaration) { - logErr(`While handling ${line}`); - logErr(`Cannot find ${typeName}`); - failed = true; - return; - } - result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums))); - }); - return; - } - const m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/); - if (m2) { - const moduleId = m2[1]; - const sourceFile = sourceFileGetter(moduleId); - if (!sourceFile) { - logErr(`While handling ${line}`); - logErr(`Cannot find ${moduleId}`); - failed = true; - return; - } - const importName = generateUsageImport(moduleId); - const replacer = createReplacer(m2[2]); - const typeNames = m2[3].split(/,/); - const typesToExcludeMap = {}; - const typesToExcludeArr = []; - typeNames.forEach((typeName) => { - typeName = typeName.trim(); - if (typeName.length === 0) { - return; - } - typesToExcludeMap[typeName] = true; - typesToExcludeArr.push(typeName); - }); - getAllTopLevelDeclarations(ts, sourceFile).forEach((declaration) => { - if (isDeclaration(ts, declaration) && declaration.name) { - if (typesToExcludeMap[declaration.name.text]) { - return; - } - } - else { - // node is ts.VariableStatement - const nodeText = getNodeText(sourceFile, declaration); - for (let i = 0; i < typesToExcludeArr.length; i++) { - if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) { - return; - } - } - } - result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums))); - }); - return; - } - result.push(line); - }); - if (failed) { - return null; - } - if (version !== dtsv) { - if (!version) { - logErr(`gulp watch restart required. 'monaco.d.ts.recipe' is written before versioning was introduced.`); - } - else { - logErr(`gulp watch restart required. 'monaco.d.ts.recipe' v${version} does not match runtime v${dtsv}.`); - } - return null; - } - let resultTxt = result.join(endl); - resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri'); - resultTxt = resultTxt.replace(/\bEvent { - if (e1.enumName < e2.enumName) { - return -1; - } - if (e1.enumName > e2.enumName) { - return 1; - } - return 0; - }); - let resultEnums = [ - '/*---------------------------------------------------------------------------------------------', - ' * Copyright (c) Microsoft Corporation. All rights reserved.', - ' * Licensed under the MIT License. See License.txt in the project root for license information.', - ' *--------------------------------------------------------------------------------------------*/', - '', - '// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY.', - '' - ].concat(enums.map(e => e.text)).join(endl); - resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl); - resultEnums = format(ts, resultEnums, endl); - resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl); - return { - result: resultTxt, - usageContent: `${usageImports.join('\n')}\n\n${usage.join('\n')}`, - enums: resultEnums - }; -} -function _run(ts, sourceFileGetter) { - const recipe = fs_1.default.readFileSync(exports.RECIPE_PATH).toString(); - const t = generateDeclarationFile(ts, recipe, sourceFileGetter); - if (!t) { - return null; - } - const result = t.result; - const usageContent = t.usageContent; - const enums = t.enums; - const currentContent = fs_1.default.readFileSync(DECLARATION_PATH).toString(); - const one = currentContent.replace(/\r\n/gm, '\n'); - const other = result.replace(/\r\n/gm, '\n'); - const isTheSame = (one === other); - return { - content: result, - usageContent: usageContent, - enums: enums, - filePath: DECLARATION_PATH, - isTheSame - }; -} -class FSProvider { - existsSync(filePath) { - return fs_1.default.existsSync(filePath); - } - statSync(filePath) { - return fs_1.default.statSync(filePath); - } - readFileSync(_moduleId, filePath) { - return fs_1.default.readFileSync(filePath); - } -} -exports.FSProvider = FSProvider; -class CacheEntry { - sourceFile; - mtime; - constructor(sourceFile, mtime) { - this.sourceFile = sourceFile; - this.mtime = mtime; - } -} -class DeclarationResolver { - _fsProvider; - ts; - _sourceFileCache; - constructor(_fsProvider) { - this._fsProvider = _fsProvider; - this.ts = require('typescript'); - this._sourceFileCache = Object.create(null); - } - invalidateCache(moduleId) { - this._sourceFileCache[moduleId] = null; - } - getDeclarationSourceFile(moduleId) { - if (this._sourceFileCache[moduleId]) { - // Since we cannot trust file watching to invalidate the cache, check also the mtime - const fileName = this._getFileName(moduleId); - const mtime = this._fsProvider.statSync(fileName).mtime.getTime(); - if (this._sourceFileCache[moduleId].mtime !== mtime) { - this._sourceFileCache[moduleId] = null; - } - } - if (!this._sourceFileCache[moduleId]) { - this._sourceFileCache[moduleId] = this._getDeclarationSourceFile(moduleId); - } - return this._sourceFileCache[moduleId] ? this._sourceFileCache[moduleId].sourceFile : null; - } - _getFileName(moduleId) { - if (/\.d\.ts$/.test(moduleId)) { - return path_1.default.join(SRC, moduleId); - } - if (/\.js$/.test(moduleId)) { - return path_1.default.join(SRC, moduleId.replace(/\.js$/, '.ts')); - } - return path_1.default.join(SRC, `${moduleId}.ts`); - } - _getDeclarationSourceFile(moduleId) { - const fileName = this._getFileName(moduleId); - if (!this._fsProvider.existsSync(fileName)) { - return null; - } - const mtime = this._fsProvider.statSync(fileName).mtime.getTime(); - if (/\.d\.ts$/.test(moduleId)) { - // const mtime = this._fsProvider.statFileSync() - const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString(); - return new CacheEntry(this.ts.createSourceFile(fileName, fileContents, this.ts.ScriptTarget.ES5), mtime); - } - const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString(); - const fileMap = new Map([ - ['file.ts', fileContents] - ]); - const service = this.ts.createLanguageService(new typeScriptLanguageServiceHost_1.TypeScriptLanguageServiceHost(this.ts, fileMap, {})); - const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text; - return new CacheEntry(this.ts.createSourceFile(fileName, text, this.ts.ScriptTarget.ES5), mtime); - } -} -exports.DeclarationResolver = DeclarationResolver; -function run3(resolver) { - const sourceFileGetter = (moduleId) => resolver.getDeclarationSourceFile(moduleId); - return _run(resolver.ts, sourceFileGetter); -} -function execute() { - const r = run3(new DeclarationResolver(new FSProvider())); - if (!r) { - throw new Error(`monaco.d.ts generation error - Cannot continue`); - } - return r; -} -//# sourceMappingURL=monaco-api.js.map \ No newline at end of file diff --git a/build/lib/monaco-api.ts b/build/lib/monaco-api.ts index e0622bcd336..fa6c2a28c91 100644 --- a/build/lib/monaco-api.ts +++ b/build/lib/monaco-api.ts @@ -4,19 +4,19 @@ *--------------------------------------------------------------------------------------------*/ import fs from 'fs'; -import type * as ts from 'typescript'; import path from 'path'; import fancyLog from 'fancy-log'; import ansiColors from 'ansi-colors'; -import { IFileMap, TypeScriptLanguageServiceHost } from './typeScriptLanguageServiceHost'; +import { type IFileMap, TypeScriptLanguageServiceHost } from './typeScriptLanguageServiceHost.ts'; +import ts from 'typescript'; + +import tsfmt from '../../tsfmt.json' with { type: 'json' }; const dtsv = '3'; -const tsfmt = require('../../tsfmt.json'); - -const SRC = path.join(__dirname, '../../src'); -export const RECIPE_PATH = path.join(__dirname, '../monaco/monaco.d.ts.recipe'); -const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts'); +const SRC = path.join(import.meta.dirname, '../../src'); +export const RECIPE_PATH = path.join(import.meta.dirname, '../monaco/monaco.d.ts.recipe'); +const DECLARATION_PATH = path.join(import.meta.dirname, '../../src/vs/monaco.d.ts'); function logErr(message: any, ...rest: unknown[]): void { fancyLog(ansiColors.yellow(`[monaco.d.ts]`), message, ...rest); @@ -54,7 +54,7 @@ function visitTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: case ts.SyntaxKind.TypeAliasDeclaration: case ts.SyntaxKind.FunctionDeclaration: case ts.SyntaxKind.ModuleDeclaration: - stop = visitor(node); + stop = visitor(node as TSTopLevelDeclare); } if (stop) { @@ -71,7 +71,7 @@ function getAllTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: const all: TSTopLevelDeclare[] = []; visitTopLevelDeclarations(ts, sourceFile, (node) => { if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) { - const interfaceDeclaration = node; + const interfaceDeclaration = node as ts.InterfaceDeclaration; const triviaStart = interfaceDeclaration.pos; const triviaEnd = interfaceDeclaration.name.pos; const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd }); @@ -145,7 +145,7 @@ function isDefaultExport(ts: typeof import('typescript'), declaration: ts.Interf function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string { let result = getNodeText(sourceFile, declaration); if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) { - const interfaceDeclaration = declaration; + const interfaceDeclaration = declaration as ts.InterfaceDeclaration | ts.ClassDeclaration; const staticTypeName = ( isDefaultExport(ts, interfaceDeclaration) @@ -170,7 +170,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) { result = result.replace(memberText, ''); } else { - const memberName = (member.name).text; + const memberName = (member.name as ts.Identifier | ts.StringLiteral).text; const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`); if (isStatic(ts, member)) { usage.push(`a = ${staticTypeName}${memberAccess};`); @@ -602,19 +602,27 @@ export class FSProvider { } class CacheEntry { + public readonly sourceFile: ts.SourceFile; + public readonly mtime: number; + constructor( - public readonly sourceFile: ts.SourceFile, - public readonly mtime: number - ) { } + sourceFile: ts.SourceFile, + mtime: number + ) { + this.sourceFile = sourceFile; + this.mtime = mtime; + } } export class DeclarationResolver { public readonly ts: typeof import('typescript'); private _sourceFileCache: { [moduleId: string]: CacheEntry | null }; + private readonly _fsProvider: FSProvider; - constructor(private readonly _fsProvider: FSProvider) { - this.ts = require('typescript') as typeof import('typescript'); + constructor(fsProvider: FSProvider) { + this._fsProvider = fsProvider; + this.ts = ts; this._sourceFileCache = Object.create(null); } diff --git a/build/lib/nls.js b/build/lib/nls.js deleted file mode 100644 index 55984151ddb..00000000000 --- a/build/lib/nls.js +++ /dev/null @@ -1,411 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.nls = nls; -const lazy_js_1 = __importDefault(require("lazy.js")); -const event_stream_1 = require("event-stream"); -const vinyl_1 = __importDefault(require("vinyl")); -const source_map_1 = __importDefault(require("source-map")); -const path_1 = __importDefault(require("path")); -const gulp_sort_1 = __importDefault(require("gulp-sort")); -var CollectStepResult; -(function (CollectStepResult) { - CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes"; - CollectStepResult[CollectStepResult["YesAndRecurse"] = 1] = "YesAndRecurse"; - CollectStepResult[CollectStepResult["No"] = 2] = "No"; - CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse"; -})(CollectStepResult || (CollectStepResult = {})); -function collect(ts, node, fn) { - const result = []; - function loop(node) { - const stepResult = fn(node); - if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) { - result.push(node); - } - if (stepResult === CollectStepResult.YesAndRecurse || stepResult === CollectStepResult.NoAndRecurse) { - ts.forEachChild(node, loop); - } - } - loop(node); - return result; -} -function clone(object) { - const result = {}; - for (const id in object) { - result[id] = object[id]; - } - return result; -} -/** - * Returns a stream containing the patched JavaScript and source maps. - */ -function nls(options) { - let base; - const input = (0, event_stream_1.through)(); - const output = input - .pipe((0, gulp_sort_1.default)()) // IMPORTANT: to ensure stable NLS metadata generation, we must sort the files because NLS messages are globally extracted and indexed across all files - .pipe((0, event_stream_1.through)(function (f) { - if (!f.sourceMap) { - return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); - } - let source = f.sourceMap.sources[0]; - if (!source) { - return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`)); - } - const root = f.sourceMap.sourceRoot; - if (root) { - source = path_1.default.join(root, source); - } - const typescript = f.sourceMap.sourcesContent[0]; - if (!typescript) { - return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`)); - } - base = f.base; - this.emit('data', _nls.patchFile(f, typescript, options)); - }, function () { - for (const file of [ - new vinyl_1.default({ - contents: Buffer.from(JSON.stringify({ - keys: _nls.moduleToNLSKeys, - messages: _nls.moduleToNLSMessages, - }, null, '\t')), - base, - path: `${base}/nls.metadata.json` - }), - new vinyl_1.default({ - contents: Buffer.from(JSON.stringify(_nls.allNLSMessages)), - base, - path: `${base}/nls.messages.json` - }), - new vinyl_1.default({ - contents: Buffer.from(JSON.stringify(_nls.allNLSModulesAndKeys)), - base, - path: `${base}/nls.keys.json` - }), - new vinyl_1.default({ - contents: Buffer.from(`/*--------------------------------------------------------- - * Copyright (C) Microsoft Corporation. All rights reserved. - *--------------------------------------------------------*/ -globalThis._VSCODE_NLS_MESSAGES=${JSON.stringify(_nls.allNLSMessages)};`), - base, - path: `${base}/nls.messages.js` - }) - ]) { - this.emit('data', file); - } - this.emit('end'); - })); - return (0, event_stream_1.duplex)(input, output); -} -function isImportNode(ts, node) { - return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration; -} -var _nls; -(function (_nls) { - _nls.moduleToNLSKeys = {}; - _nls.moduleToNLSMessages = {}; - _nls.allNLSMessages = []; - _nls.allNLSModulesAndKeys = []; - let allNLSMessagesIndex = 0; - function fileFrom(file, contents, path = file.path) { - return new vinyl_1.default({ - contents: Buffer.from(contents), - base: file.base, - cwd: file.cwd, - path: path - }); - } - function mappedPositionFrom(source, lc) { - return { source, line: lc.line + 1, column: lc.character }; - } - function lcFrom(position) { - return { line: position.line - 1, character: position.column }; - } - class SingleFileServiceHost { - options; - filename; - file; - lib; - constructor(ts, options, filename, contents) { - this.options = options; - this.filename = filename; - this.file = ts.ScriptSnapshot.fromString(contents); - this.lib = ts.ScriptSnapshot.fromString(''); - } - getCompilationSettings = () => this.options; - getScriptFileNames = () => [this.filename]; - getScriptVersion = () => '1'; - getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib; - getCurrentDirectory = () => ''; - getDefaultLibFileName = () => 'lib.d.ts'; - readFile(path, _encoding) { - if (path === this.filename) { - return this.file.getText(0, this.file.getLength()); - } - return undefined; - } - fileExists(path) { - return path === this.filename; - } - } - function isCallExpressionWithinTextSpanCollectStep(ts, textSpan, node) { - if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) { - return CollectStepResult.No; - } - return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; - } - function analyze(ts, contents, functionName, options = {}) { - const filename = 'file.ts'; - const serviceHost = new SingleFileServiceHost(ts, Object.assign(clone(options), { noResolve: true }), filename, contents); - const service = ts.createLanguageService(serviceHost); - const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true); - // all imports - const imports = (0, lazy_js_1.default)(collect(ts, sourceFile, n => isImportNode(ts, n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse)); - // import nls = require('vs/nls'); - const importEqualsDeclarations = imports - .filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration) - .map(n => n) - .filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) - .filter(d => d.moduleReference.expression.getText().endsWith(`/nls.js'`)); - // import ... from 'vs/nls'; - const importDeclarations = imports - .filter(n => n.kind === ts.SyntaxKind.ImportDeclaration) - .map(n => n) - .filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral) - .filter(d => d.moduleSpecifier.getText().endsWith(`/nls.js'`)) - .filter(d => !!d.importClause && !!d.importClause.namedBindings); - // `nls.localize(...)` calls - const nlsLocalizeCallExpressions = importDeclarations - .filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)) - .map(d => d.importClause.namedBindings.name) - .concat(importEqualsDeclarations.map(d => d.name)) - // find read-only references to `nls` - .map(n => service.getReferencesAtPosition(filename, n.pos + 1) ?? []) - .flatten() - .filter(r => !r.isWriteAccess) - // find the deepest call expressions AST nodes that contain those references - .map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n))) - .map(a => (0, lazy_js_1.default)(a).last()) - .filter(n => !!n) - .map(n => n) - // only `localize` calls - .filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === functionName); - // `localize` named imports - const allLocalizeImportDeclarations = importDeclarations - .filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)) - .map(d => [].concat(d.importClause.namedBindings.elements)) - .flatten(); - // `localize` read-only references - const localizeReferences = allLocalizeImportDeclarations - .filter(d => d.name.getText() === functionName) - .map(n => service.getReferencesAtPosition(filename, n.pos + 1) ?? []) - .flatten() - .filter(r => !r.isWriteAccess); - // custom named `localize` read-only references - const namedLocalizeReferences = allLocalizeImportDeclarations - .filter(d => d.propertyName && d.propertyName.getText() === functionName) - .map(n => service.getReferencesAtPosition(filename, n.name.pos + 1) ?? []) - .flatten() - .filter(r => !r.isWriteAccess); - // find the deepest call expressions AST nodes that contain those references - const localizeCallExpressions = localizeReferences - .concat(namedLocalizeReferences) - .map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n))) - .map(a => (0, lazy_js_1.default)(a).last()) - .filter(n => !!n) - .map(n => n); - // collect everything - const localizeCalls = nlsLocalizeCallExpressions - .concat(localizeCallExpressions) - .map(e => e.arguments) - .filter(a => a.length > 1) - .sort((a, b) => a[0].getStart() - b[0].getStart()) - .map(a => ({ - keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) }, - key: a[0].getText(), - valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) }, - value: a[1].getText() - })); - return { - localizeCalls: localizeCalls.toArray() - }; - } - class TextModel { - lines; - lineEndings; - constructor(contents) { - const regex = /\r\n|\r|\n/g; - let index = 0; - let match; - this.lines = []; - this.lineEndings = []; - while (match = regex.exec(contents)) { - this.lines.push(contents.substring(index, match.index)); - this.lineEndings.push(match[0]); - index = regex.lastIndex; - } - if (contents.length > 0) { - this.lines.push(contents.substring(index, contents.length)); - this.lineEndings.push(''); - } - } - get(index) { - return this.lines[index]; - } - set(index, line) { - this.lines[index] = line; - } - get lineCount() { - return this.lines.length; - } - /** - * Applies patch(es) to the model. - * Multiple patches must be ordered. - * Does not support patches spanning multiple lines. - */ - apply(patch) { - const startLineNumber = patch.span.start.line; - const endLineNumber = patch.span.end.line; - const startLine = this.lines[startLineNumber] || ''; - const endLine = this.lines[endLineNumber] || ''; - this.lines[startLineNumber] = [ - startLine.substring(0, patch.span.start.character), - patch.content, - endLine.substring(patch.span.end.character) - ].join(''); - for (let i = startLineNumber + 1; i <= endLineNumber; i++) { - this.lines[i] = ''; - } - } - toString() { - return (0, lazy_js_1.default)(this.lines).zip(this.lineEndings) - .flatten().toArray().join(''); - } - } - function patchJavascript(patches, contents) { - const model = new TextModel(contents); - // patch the localize calls - (0, lazy_js_1.default)(patches).reverse().each(p => model.apply(p)); - return model.toString(); - } - function patchSourcemap(patches, rsm, smc) { - const smg = new source_map_1.default.SourceMapGenerator({ - file: rsm.file, - sourceRoot: rsm.sourceRoot - }); - patches = patches.reverse(); - let currentLine = -1; - let currentLineDiff = 0; - let source = null; - smc.eachMapping(m => { - const patch = patches[patches.length - 1]; - const original = { line: m.originalLine, column: m.originalColumn }; - const generated = { line: m.generatedLine, column: m.generatedColumn }; - if (currentLine !== generated.line) { - currentLineDiff = 0; - } - currentLine = generated.line; - generated.column += currentLineDiff; - if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) { - const originalLength = patch.span.end.character - patch.span.start.character; - const modifiedLength = patch.content.length; - const lengthDiff = modifiedLength - originalLength; - currentLineDiff += lengthDiff; - generated.column += lengthDiff; - patches.pop(); - } - source = rsm.sourceRoot ? path_1.default.relative(rsm.sourceRoot, m.source) : m.source; - source = source.replace(/\\/g, '/'); - smg.addMapping({ source, name: m.name, original, generated }); - }, null, source_map_1.default.SourceMapConsumer.GENERATED_ORDER); - if (source) { - smg.setSourceContent(source, smc.sourceContentFor(source)); - } - return JSON.parse(smg.toString()); - } - function parseLocalizeKeyOrValue(sourceExpression) { - // sourceValue can be "foo", 'foo', `foo` or { .... } - // in its evalulated form - // we want to return either the string or the object - // eslint-disable-next-line no-eval - return eval(`(${sourceExpression})`); - } - function patch(ts, typescript, javascript, sourcemap, options) { - const { localizeCalls } = analyze(ts, typescript, 'localize'); - const { localizeCalls: localize2Calls } = analyze(ts, typescript, 'localize2'); - if (localizeCalls.length === 0 && localize2Calls.length === 0) { - return { javascript, sourcemap }; - } - const nlsKeys = localizeCalls.map(lc => parseLocalizeKeyOrValue(lc.key)).concat(localize2Calls.map(lc => parseLocalizeKeyOrValue(lc.key))); - const nlsMessages = localizeCalls.map(lc => parseLocalizeKeyOrValue(lc.value)).concat(localize2Calls.map(lc => parseLocalizeKeyOrValue(lc.value))); - const smc = new source_map_1.default.SourceMapConsumer(sourcemap); - const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]); - // build patches - const toPatch = (c) => { - const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start))); - const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end))); - return { span: { start, end }, content: c.content }; - }; - const localizePatches = (0, lazy_js_1.default)(localizeCalls) - .map(lc => (options.preserveEnglish ? [ - { range: lc.keySpan, content: `${allNLSMessagesIndex++}` } // localize('key', "message") => localize(, "message") - ] : [ - { range: lc.keySpan, content: `${allNLSMessagesIndex++}` }, // localize('key', "message") => localize(, null) - { range: lc.valueSpan, content: 'null' } - ])) - .flatten() - .map(toPatch); - const localize2Patches = (0, lazy_js_1.default)(localize2Calls) - .map(lc => ({ range: lc.keySpan, content: `${allNLSMessagesIndex++}` } // localize2('key', "message") => localize(, "message") - )) - .map(toPatch); - // Sort patches by their start position - const patches = localizePatches.concat(localize2Patches).toArray().sort((a, b) => { - if (a.span.start.line < b.span.start.line) { - return -1; - } - else if (a.span.start.line > b.span.start.line) { - return 1; - } - else if (a.span.start.character < b.span.start.character) { - return -1; - } - else if (a.span.start.character > b.span.start.character) { - return 1; - } - else { - return 0; - } - }); - javascript = patchJavascript(patches, javascript); - sourcemap = patchSourcemap(patches, sourcemap, smc); - return { javascript, sourcemap, nlsKeys, nlsMessages }; - } - function patchFile(javascriptFile, typescript, options) { - const ts = require('typescript'); - // hack? - const moduleId = javascriptFile.relative - .replace(/\.js$/, '') - .replace(/\\/g, '/'); - const { javascript, sourcemap, nlsKeys, nlsMessages } = patch(ts, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap, options); - const result = fileFrom(javascriptFile, javascript); - result.sourceMap = sourcemap; - if (nlsKeys) { - _nls.moduleToNLSKeys[moduleId] = nlsKeys; - _nls.allNLSModulesAndKeys.push([moduleId, nlsKeys.map(nlsKey => typeof nlsKey === 'string' ? nlsKey : nlsKey.key)]); - } - if (nlsMessages) { - _nls.moduleToNLSMessages[moduleId] = nlsMessages; - _nls.allNLSMessages.push(...nlsMessages); - } - return result; - } - _nls.patchFile = patchFile; -})(_nls || (_nls = {})); -//# sourceMappingURL=nls.js.map \ No newline at end of file diff --git a/build/lib/nls.ts b/build/lib/nls.ts index 1cfb1cbd580..2dfdf988c47 100644 --- a/build/lib/nls.ts +++ b/build/lib/nls.ts @@ -3,9 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import type * as ts from 'typescript'; +import * as ts from 'typescript'; import lazy from 'lazy.js'; -import { duplex, through } from 'event-stream'; +import eventStream from 'event-stream'; import File from 'vinyl'; import sm from 'source-map'; import path from 'path'; @@ -13,12 +13,14 @@ import sort from 'gulp-sort'; type FileWithSourcemap = File & { sourceMap: sm.RawSourceMap }; -enum CollectStepResult { - Yes, - YesAndRecurse, - No, - NoAndRecurse -} +const CollectStepResult = Object.freeze({ + Yes: 'Yes', + YesAndRecurse: 'YesAndRecurse', + No: 'No', + NoAndRecurse: 'NoAndRecurse' +}); + +type CollectStepResult = typeof CollectStepResult[keyof typeof CollectStepResult]; function collect(ts: typeof import('typescript'), node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.Node[] { const result: ts.Node[] = []; @@ -52,10 +54,10 @@ function clone(object: T): T { */ export function nls(options: { preserveEnglish: boolean }): NodeJS.ReadWriteStream { let base: string; - const input = through(); + const input = eventStream.through(); const output = input .pipe(sort()) // IMPORTANT: to ensure stable NLS metadata generation, we must sort the files because NLS messages are globally extracted and indexed across all files - .pipe(through(function (f: FileWithSourcemap) { + .pipe(eventStream.through(function (f: FileWithSourcemap) { if (!f.sourceMap) { return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); } @@ -112,19 +114,19 @@ globalThis._VSCODE_NLS_MESSAGES=${JSON.stringify(_nls.allNLSMessages)};`), this.emit('end'); })); - return duplex(input, output); + return eventStream.duplex(input, output); } function isImportNode(ts: typeof import('typescript'), node: ts.Node): boolean { return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration; } -module _nls { +const _nls = (() => { - export const moduleToNLSKeys: { [name: string /* module ID */]: ILocalizeKey[] /* keys */ } = {}; - export const moduleToNLSMessages: { [name: string /* module ID */]: string[] /* messages */ } = {}; - export const allNLSMessages: string[] = []; - export const allNLSModulesAndKeys: Array<[string /* module ID */, string[] /* keys */]> = []; + const moduleToNLSKeys: { [name: string /* module ID */]: ILocalizeKey[] /* keys */ } = {}; + const moduleToNLSMessages: { [name: string /* module ID */]: string[] /* messages */ } = {}; + const allNLSMessages: string[] = []; + const allNLSModulesAndKeys: Array<[string /* module ID */, string[] /* keys */]> = []; let allNLSMessagesIndex = 0; type ILocalizeKey = string | { key: string }; // key might contain metadata for translators and then is not just a string @@ -178,8 +180,12 @@ module _nls { private file: ts.IScriptSnapshot; private lib: ts.IScriptSnapshot; + private options: ts.CompilerOptions; + private filename: string; - constructor(ts: typeof import('typescript'), private options: ts.CompilerOptions, private filename: string, contents: string) { + constructor(ts: typeof import('typescript'), options: ts.CompilerOptions, filename: string, contents: string) { + this.options = options; + this.filename = filename; this.file = ts.ScriptSnapshot.fromString(contents); this.lib = ts.ScriptSnapshot.fromString(''); } @@ -227,14 +233,14 @@ module _nls { // import nls = require('vs/nls'); const importEqualsDeclarations = imports .filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration) - .map(n => n) + .map(n => n as ts.ImportEqualsDeclaration) .filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) - .filter(d => (d.moduleReference).expression.getText().endsWith(`/nls.js'`)); + .filter(d => (d.moduleReference as ts.ExternalModuleReference).expression.getText().endsWith(`/nls.js'`)); // import ... from 'vs/nls'; const importDeclarations = imports .filter(n => n.kind === ts.SyntaxKind.ImportDeclaration) - .map(n => n) + .map(n => n as ts.ImportDeclaration) .filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral) .filter(d => d.moduleSpecifier.getText().endsWith(`/nls.js'`)) .filter(d => !!d.importClause && !!d.importClause.namedBindings); @@ -242,7 +248,7 @@ module _nls { // `nls.localize(...)` calls const nlsLocalizeCallExpressions = importDeclarations .filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)) - .map(d => (d.importClause!.namedBindings).name) + .map(d => (d.importClause!.namedBindings as ts.NamespaceImport).name) .concat(importEqualsDeclarations.map(d => d.name)) // find read-only references to `nls` @@ -254,15 +260,15 @@ module _nls { .map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n))) .map(a => lazy(a).last()) .filter(n => !!n) - .map(n => n) + .map(n => n as ts.CallExpression) // only `localize` calls - .filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && (n.expression).name.getText() === functionName); + .filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && (n.expression as ts.PropertyAccessExpression).name.getText() === functionName); // `localize` named imports const allLocalizeImportDeclarations = importDeclarations .filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)) - .map(d => ([] as any[]).concat((d.importClause!.namedBindings!).elements)) + .map(d => ([] as any[]).concat((d.importClause!.namedBindings! as ts.NamedImports).elements)) .flatten(); // `localize` read-only references @@ -285,7 +291,7 @@ module _nls { .map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n))) .map(a => lazy(a).last()) .filter(n => !!n) - .map(n => n); + .map(n => n as ts.CallExpression); // collect everything const localizeCalls = nlsLocalizeCallExpressions @@ -492,8 +498,7 @@ module _nls { return { javascript, sourcemap, nlsKeys, nlsMessages }; } - export function patchFile(javascriptFile: File, typescript: string, options: { preserveEnglish: boolean }): File { - const ts = require('typescript') as typeof import('typescript'); + function patchFile(javascriptFile: File, typescript: string, options: { preserveEnglish: boolean }): File { // hack? const moduleId = javascriptFile.relative .replace(/\.js$/, '') @@ -522,4 +527,12 @@ module _nls { return result; } -} + + return { + moduleToNLSKeys, + moduleToNLSMessages, + allNLSMessages, + allNLSModulesAndKeys, + patchFile + }; +})(); diff --git a/build/lib/node.js b/build/lib/node.js index 01a381183ff..0b07708c698 100644 --- a/build/lib/node.js +++ b/build/lib/node.js @@ -1,21 +1,20 @@ -"use strict"; /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const npmrcPath = path_1.default.join(root, 'remote', '.npmrc'); -const npmrc = fs_1.default.readFileSync(npmrcPath, 'utf8'); + +import path from 'path'; +import fs from 'fs'; + +const root = path.dirname(path.dirname(import.meta.dirname)); +const npmrcPath = path.join(root, 'remote', '.npmrc'); +const npmrc = fs.readFileSync(npmrcPath, 'utf8'); const version = /^target="(.*)"$/m.exec(npmrc)[1]; + const platform = process.platform; const arch = process.arch; + const node = platform === 'win32' ? 'node.exe' : 'node'; -const nodePath = path_1.default.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node); +const nodePath = path.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node); + console.log(nodePath); -//# sourceMappingURL=node.js.map \ No newline at end of file diff --git a/build/lib/node.ts b/build/lib/node.ts deleted file mode 100644 index a2fdc361aa1..00000000000 --- a/build/lib/node.ts +++ /dev/null @@ -1,20 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import path from 'path'; -import fs from 'fs'; - -const root = path.dirname(path.dirname(__dirname)); -const npmrcPath = path.join(root, 'remote', '.npmrc'); -const npmrc = fs.readFileSync(npmrcPath, 'utf8'); -const version = /^target="(.*)"$/m.exec(npmrc)![1]; - -const platform = process.platform; -const arch = process.arch; - -const node = platform === 'win32' ? 'node.exe' : 'node'; -const nodePath = path.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node); - -console.log(nodePath); diff --git a/build/lib/optimize.js b/build/lib/optimize.js deleted file mode 100644 index 2ba72a97159..00000000000 --- a/build/lib/optimize.js +++ /dev/null @@ -1,231 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.bundleTask = bundleTask; -exports.minifyTask = minifyTask; -const event_stream_1 = __importDefault(require("event-stream")); -const gulp_1 = __importDefault(require("gulp")); -const gulp_filter_1 = __importDefault(require("gulp-filter")); -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const pump_1 = __importDefault(require("pump")); -const vinyl_1 = __importDefault(require("vinyl")); -const bundle = __importStar(require("./bundle")); -const esbuild_1 = __importDefault(require("esbuild")); -const gulp_sourcemaps_1 = __importDefault(require("gulp-sourcemaps")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const tsconfigUtils_1 = require("./tsconfigUtils"); -const REPO_ROOT_PATH = path_1.default.join(__dirname, '../..'); -const DEFAULT_FILE_HEADER = [ - '/*!--------------------------------------------------------', - ' * Copyright (C) Microsoft Corporation. All rights reserved.', - ' *--------------------------------------------------------*/' -].join('\n'); -function bundleESMTask(opts) { - const resourcesStream = event_stream_1.default.through(); // this stream will contain the resources - const bundlesStream = event_stream_1.default.through(); // this stream will contain the bundled files - const target = getBuildTarget(); - const entryPoints = opts.entryPoints.map(entryPoint => { - if (typeof entryPoint === 'string') { - return { name: path_1.default.parse(entryPoint).name }; - } - return entryPoint; - }); - const bundleAsync = async () => { - const files = []; - const tasks = []; - for (const entryPoint of entryPoints) { - (0, fancy_log_1.default)(`Bundled entry point: ${ansi_colors_1.default.yellow(entryPoint.name)}...`); - // support for 'dest' via esbuild#in/out - const dest = entryPoint.dest?.replace(/\.[^/.]+$/, '') ?? entryPoint.name; - // banner contents - const banner = { - js: DEFAULT_FILE_HEADER, - css: DEFAULT_FILE_HEADER - }; - // TS Boilerplate - if (!opts.skipTSBoilerplateRemoval?.(entryPoint.name)) { - const tslibPath = path_1.default.join(require.resolve('tslib'), '../tslib.es6.js'); - banner.js += await fs_1.default.promises.readFile(tslibPath, 'utf-8'); - } - const contentsMapper = { - name: 'contents-mapper', - setup(build) { - build.onLoad({ filter: /\.js$/ }, async ({ path }) => { - const contents = await fs_1.default.promises.readFile(path, 'utf-8'); - // TS Boilerplate - let newContents; - if (!opts.skipTSBoilerplateRemoval?.(entryPoint.name)) { - newContents = bundle.removeAllTSBoilerplate(contents); - } - else { - newContents = contents; - } - // File Content Mapper - const mapper = opts.fileContentMapper?.(path.replace(/\\/g, '/')); - if (mapper) { - newContents = await mapper(newContents); - } - return { contents: newContents }; - }); - } - }; - const externalOverride = { - name: 'external-override', - setup(build) { - // We inline selected modules that are we depend on on startup without - // a conditional `await import(...)` by hooking into the resolution. - build.onResolve({ filter: /^minimist$/ }, () => { - return { path: path_1.default.join(REPO_ROOT_PATH, 'node_modules', 'minimist', 'index.js'), external: false }; - }); - }, - }; - const task = esbuild_1.default.build({ - bundle: true, - packages: 'external', // "external all the things", see https://esbuild.github.io/api/#packages - platform: 'neutral', // makes esm - format: 'esm', - sourcemap: 'external', - plugins: [contentsMapper, externalOverride], - target: [target], - loader: { - '.ttf': 'file', - '.svg': 'file', - '.png': 'file', - '.sh': 'file', - }, - assetNames: 'media/[name]', // moves media assets into a sub-folder "media" - banner: entryPoint.name === 'vs/workbench/workbench.web.main' ? undefined : banner, // TODO@esm remove line when we stop supporting web-amd-esm-bridge - entryPoints: [ - { - in: path_1.default.join(REPO_ROOT_PATH, opts.src, `${entryPoint.name}.js`), - out: dest, - } - ], - outdir: path_1.default.join(REPO_ROOT_PATH, opts.src), - write: false, // enables res.outputFiles - metafile: true, // enables res.metafile - // minify: NOT enabled because we have a separate minify task that takes care of the TSLib banner as well - }).then(res => { - for (const file of res.outputFiles) { - let sourceMapFile = undefined; - if (file.path.endsWith('.js')) { - sourceMapFile = res.outputFiles.find(f => f.path === `${file.path}.map`); - } - const fileProps = { - contents: Buffer.from(file.contents), - sourceMap: sourceMapFile ? JSON.parse(sourceMapFile.text) : undefined, // support gulp-sourcemaps - path: file.path, - base: path_1.default.join(REPO_ROOT_PATH, opts.src) - }; - files.push(new vinyl_1.default(fileProps)); - } - }); - tasks.push(task); - } - await Promise.all(tasks); - return { files }; - }; - bundleAsync().then((output) => { - // bundle output (JS, CSS, SVG...) - event_stream_1.default.readArray(output.files).pipe(bundlesStream); - // forward all resources - gulp_1.default.src(opts.resources ?? [], { base: `${opts.src}`, allowEmpty: true }).pipe(resourcesStream); - }); - const result = event_stream_1.default.merge(bundlesStream, resourcesStream); - return result - .pipe(gulp_sourcemaps_1.default.write('./', { - sourceRoot: undefined, - addComment: true, - includeContent: true - })); -} -function bundleTask(opts) { - return function () { - return bundleESMTask(opts.esm).pipe(gulp_1.default.dest(opts.out)); - }; -} -function minifyTask(src, sourceMapBaseUrl) { - const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined; - const target = getBuildTarget(); - return cb => { - const svgmin = require('gulp-svgmin'); - const esbuildFilter = (0, gulp_filter_1.default)('**/*.{js,css}', { restore: true }); - const svgFilter = (0, gulp_filter_1.default)('**/*.svg', { restore: true }); - (0, pump_1.default)(gulp_1.default.src([src + '/**', '!' + src + '/**/*.map']), esbuildFilter, gulp_sourcemaps_1.default.init({ loadMaps: true }), event_stream_1.default.map((f, cb) => { - esbuild_1.default.build({ - entryPoints: [f.path], - minify: true, - sourcemap: 'external', - outdir: '.', - packages: 'external', // "external all the things", see https://esbuild.github.io/api/#packages - platform: 'neutral', // makes esm - target: [target], - write: false, - }).then(res => { - const jsOrCSSFile = res.outputFiles.find(f => /\.(js|css)$/.test(f.path)); - const sourceMapFile = res.outputFiles.find(f => /\.(js|css)\.map$/.test(f.path)); - const contents = Buffer.from(jsOrCSSFile.contents); - const unicodeMatch = contents.toString().match(/[^\x00-\xFF]+/g); - if (unicodeMatch) { - cb(new Error(`Found non-ascii character ${unicodeMatch[0]} in the minified output of ${f.path}. Non-ASCII characters in the output can cause performance problems when loading. Please review if you have introduced a regular expression that esbuild is not automatically converting and convert it to using unicode escape sequences.`)); - } - else { - f.contents = contents; - f.sourceMap = JSON.parse(sourceMapFile.text); - cb(undefined, f); - } - }, cb); - }), esbuildFilter.restore, svgFilter, svgmin(), svgFilter.restore, gulp_sourcemaps_1.default.write('./', { - sourceMappingURL, - sourceRoot: undefined, - includeContent: true, - addComment: true - }), gulp_1.default.dest(src + '-min'), (err) => cb(err)); - }; -} -function getBuildTarget() { - const tsconfigPath = path_1.default.join(REPO_ROOT_PATH, 'src', 'tsconfig.base.json'); - return (0, tsconfigUtils_1.getTargetStringFromTsConfig)(tsconfigPath); -} -//# sourceMappingURL=optimize.js.map \ No newline at end of file diff --git a/build/lib/optimize.ts b/build/lib/optimize.ts index 1e824a54106..2e6756eba3f 100644 --- a/build/lib/optimize.ts +++ b/build/lib/optimize.ts @@ -10,12 +10,16 @@ import path from 'path'; import fs from 'fs'; import pump from 'pump'; import VinylFile from 'vinyl'; -import * as bundle from './bundle'; +import * as bundle from './bundle.ts'; import esbuild from 'esbuild'; import sourcemaps from 'gulp-sourcemaps'; import fancyLog from 'fancy-log'; import ansiColors from 'ansi-colors'; -import { getTargetStringFromTsConfig } from './tsconfigUtils'; +import { getTargetStringFromTsConfig } from './tsconfigUtils.ts'; +import svgmin from 'gulp-svgmin'; +import { createRequire } from 'module'; + +const require = createRequire(import.meta.url); declare module 'gulp-sourcemaps' { interface WriteOptions { @@ -28,7 +32,7 @@ declare module 'gulp-sourcemaps' { } } -const REPO_ROOT_PATH = path.join(__dirname, '../..'); +const REPO_ROOT_PATH = path.join(import.meta.dirname, '../..'); export interface IBundleESMTaskOpts { /** @@ -227,7 +231,6 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => const target = getBuildTarget(); return cb => { - const svgmin = require('gulp-svgmin') as typeof import('gulp-svgmin'); const esbuildFilter = filter('**/*.{js,css}', { restore: true }); const svgFilter = filter('**/*.svg', { restore: true }); diff --git a/build/lib/policies/basePolicy.js b/build/lib/policies/basePolicy.js deleted file mode 100644 index 5c1b919d428..00000000000 --- a/build/lib/policies/basePolicy.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BasePolicy = void 0; -const render_1 = require("./render"); -class BasePolicy { - type; - name; - category; - minimumVersion; - description; - moduleName; - constructor(type, name, category, minimumVersion, description, moduleName) { - this.type = type; - this.name = name; - this.category = category; - this.minimumVersion = minimumVersion; - this.description = description; - this.moduleName = moduleName; - } - renderADMLString(nlsString, translations) { - return (0, render_1.renderADMLString)(this.name, this.moduleName, nlsString, translations); - } - renderADMX(regKey) { - return [ - ``, - ` `, - ` `, - ` `, - ...this.renderADMXElements(), - ` `, - `` - ]; - } - renderADMLStrings(translations) { - return [ - `${this.name}`, - this.renderADMLString(this.description, translations) - ]; - } - renderADMLPresentation() { - return `${this.renderADMLPresentationContents()}`; - } - renderProfile() { - return [`${this.name}`, this.renderProfileValue()]; - } - renderProfileManifest(translations) { - return ` -${this.renderProfileManifestValue(translations)} -`; - } -} -exports.BasePolicy = BasePolicy; -//# sourceMappingURL=basePolicy.js.map \ No newline at end of file diff --git a/build/lib/policies/basePolicy.ts b/build/lib/policies/basePolicy.ts index f0477d244f0..7f650ba7b2e 100644 --- a/build/lib/policies/basePolicy.ts +++ b/build/lib/policies/basePolicy.ts @@ -3,18 +3,32 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { renderADMLString } from './render'; -import { Category, LanguageTranslations, NlsString, Policy, PolicyType } from './types'; +import { renderADMLString } from './render.ts'; +import type { Category, LanguageTranslations, NlsString, Policy, PolicyType } from './types.ts'; export abstract class BasePolicy implements Policy { + readonly type: PolicyType; + readonly name: string; + readonly category: Category; + readonly minimumVersion: string; + protected description: NlsString; + protected moduleName: string; + constructor( - readonly type: PolicyType, - readonly name: string, - readonly category: Category, - readonly minimumVersion: string, - protected description: NlsString, - protected moduleName: string, - ) { } + type: PolicyType, + name: string, + category: Category, + minimumVersion: string, + description: NlsString, + moduleName: string, + ) { + this.type = type; + this.name = name; + this.category = category; + this.minimumVersion = minimumVersion; + this.description = description; + this.moduleName = moduleName; + } protected renderADMLString(nlsString: NlsString, translations?: LanguageTranslations): string { return renderADMLString(this.name, this.moduleName, nlsString, translations); diff --git a/build/lib/policies/booleanPolicy.js b/build/lib/policies/booleanPolicy.js deleted file mode 100644 index 77ea3d9a42e..00000000000 --- a/build/lib/policies/booleanPolicy.js +++ /dev/null @@ -1,52 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BooleanPolicy = void 0; -const basePolicy_1 = require("./basePolicy"); -const render_1 = require("./render"); -const types_1 = require("./types"); -class BooleanPolicy extends basePolicy_1.BasePolicy { - static from(category, policy) { - const { name, minimumVersion, localization, type } = policy; - if (type !== 'boolean') { - return undefined; - } - return new BooleanPolicy(name, { moduleName: '', name: { nlsKey: category.name.key, value: category.name.value } }, minimumVersion, { nlsKey: localization.description.key, value: localization.description.value }, ''); - } - constructor(name, category, minimumVersion, description, moduleName) { - super(types_1.PolicyType.Boolean, name, category, minimumVersion, description, moduleName); - } - renderADMXElements() { - return [ - ``, - ` `, - `` - ]; - } - renderADMLPresentationContents() { - return `${this.name}`; - } - renderJsonValue() { - return false; - } - renderProfileValue() { - return ``; - } - renderProfileManifestValue(translations) { - return `pfm_default - -pfm_description -${(0, render_1.renderProfileString)(this.name, this.moduleName, this.description, translations)} -pfm_name -${this.name} -pfm_title -${this.name} -pfm_type -boolean`; - } -} -exports.BooleanPolicy = BooleanPolicy; -//# sourceMappingURL=booleanPolicy.js.map \ No newline at end of file diff --git a/build/lib/policies/booleanPolicy.ts b/build/lib/policies/booleanPolicy.ts index 538140b3db2..59e2402eb3c 100644 --- a/build/lib/policies/booleanPolicy.ts +++ b/build/lib/policies/booleanPolicy.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { BasePolicy } from './basePolicy'; -import { CategoryDto, PolicyDto } from './policyDto'; -import { renderProfileString } from './render'; -import { Category, NlsString, PolicyType, LanguageTranslations } from './types'; +import { BasePolicy } from './basePolicy.ts'; +import type { CategoryDto, PolicyDto } from './policyDto.ts'; +import { renderProfileString } from './render.ts'; +import { type Category, type NlsString, PolicyType, type LanguageTranslations } from './types.ts'; export class BooleanPolicy extends BasePolicy { diff --git a/build/lib/policies/copyPolicyDto.js b/build/lib/policies/copyPolicyDto.js deleted file mode 100644 index a223bb4c0ef..00000000000 --- a/build/lib/policies/copyPolicyDto.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = __importStar(require("fs")); -const path = __importStar(require("path")); -const sourceFile = path.join(__dirname, '../../../src/vs/workbench/contrib/policyExport/common/policyDto.ts'); -const destFile = path.join(__dirname, 'policyDto.ts'); -try { - // Check if source file exists - if (!fs.existsSync(sourceFile)) { - console.error(`Error: Source file not found: ${sourceFile}`); - console.error('Please ensure policyDto.ts exists in src/vs/workbench/contrib/policyExport/common/'); - process.exit(1); - } - // Copy the file - fs.copyFileSync(sourceFile, destFile); -} -catch (error) { - console.error(`Error copying policyDto.ts: ${error.message}`); - process.exit(1); -} -//# sourceMappingURL=copyPolicyDto.js.map \ No newline at end of file diff --git a/build/lib/policies/copyPolicyDto.ts b/build/lib/policies/copyPolicyDto.ts index 4fb74456837..6bf8cd88802 100644 --- a/build/lib/policies/copyPolicyDto.ts +++ b/build/lib/policies/copyPolicyDto.ts @@ -6,8 +6,8 @@ import * as fs from 'fs'; import * as path from 'path'; -const sourceFile = path.join(__dirname, '../../../src/vs/workbench/contrib/policyExport/common/policyDto.ts'); -const destFile = path.join(__dirname, 'policyDto.ts'); +const sourceFile = path.join(import.meta.dirname, '../../../src/vs/workbench/contrib/policyExport/common/policyDto.ts'); +const destFile = path.join(import.meta.dirname, 'policyDto.ts'); try { // Check if source file exists diff --git a/build/lib/policies/numberPolicy.js b/build/lib/policies/numberPolicy.js deleted file mode 100644 index 3bc0b98d19a..00000000000 --- a/build/lib/policies/numberPolicy.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NumberPolicy = void 0; -const basePolicy_1 = require("./basePolicy"); -const render_1 = require("./render"); -const types_1 = require("./types"); -class NumberPolicy extends basePolicy_1.BasePolicy { - defaultValue; - static from(category, policy) { - const { type, default: defaultValue, name, minimumVersion, localization } = policy; - if (type !== 'number') { - return undefined; - } - if (typeof defaultValue !== 'number') { - throw new Error(`Missing required 'default' property.`); - } - return new NumberPolicy(name, { moduleName: '', name: { nlsKey: category.name.key, value: category.name.value } }, minimumVersion, { nlsKey: localization.description.key, value: localization.description.value }, '', defaultValue); - } - constructor(name, category, minimumVersion, description, moduleName, defaultValue) { - super(types_1.PolicyType.Number, name, category, minimumVersion, description, moduleName); - this.defaultValue = defaultValue; - } - renderADMXElements() { - return [ - `` - // `` - ]; - } - renderADMLPresentationContents() { - return `${this.name}`; - } - renderJsonValue() { - return this.defaultValue; - } - renderProfileValue() { - return `${this.defaultValue}`; - } - renderProfileManifestValue(translations) { - return `pfm_default -${this.defaultValue} -pfm_description -${(0, render_1.renderProfileString)(this.name, this.moduleName, this.description, translations)} -pfm_name -${this.name} -pfm_title -${this.name} -pfm_type -integer`; - } -} -exports.NumberPolicy = NumberPolicy; -//# sourceMappingURL=numberPolicy.js.map \ No newline at end of file diff --git a/build/lib/policies/numberPolicy.ts b/build/lib/policies/numberPolicy.ts index db4143e1f7f..3091e004677 100644 --- a/build/lib/policies/numberPolicy.ts +++ b/build/lib/policies/numberPolicy.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { BasePolicy } from './basePolicy'; -import { CategoryDto, PolicyDto } from './policyDto'; -import { renderProfileString } from './render'; -import { Category, NlsString, PolicyType, LanguageTranslations } from './types'; +import { BasePolicy } from './basePolicy.ts'; +import type { CategoryDto, PolicyDto } from './policyDto.ts'; +import { renderProfileString } from './render.ts'; +import { type Category, type NlsString, PolicyType, type LanguageTranslations } from './types.ts'; export class NumberPolicy extends BasePolicy { @@ -24,15 +24,18 @@ export class NumberPolicy extends BasePolicy { return new NumberPolicy(name, { moduleName: '', name: { nlsKey: category.name.key, value: category.name.value } }, minimumVersion, { nlsKey: localization.description.key, value: localization.description.value }, '', defaultValue); } + protected readonly defaultValue: number; + private constructor( name: string, category: Category, minimumVersion: string, description: NlsString, moduleName: string, - protected readonly defaultValue: number, + defaultValue: number, ) { super(PolicyType.Number, name, category, minimumVersion, description, moduleName); + this.defaultValue = defaultValue; } protected renderADMXElements(): string[] { diff --git a/build/lib/policies/objectPolicy.js b/build/lib/policies/objectPolicy.js deleted file mode 100644 index 43a7aaa3fc9..00000000000 --- a/build/lib/policies/objectPolicy.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ObjectPolicy = void 0; -const basePolicy_1 = require("./basePolicy"); -const render_1 = require("./render"); -const types_1 = require("./types"); -class ObjectPolicy extends basePolicy_1.BasePolicy { - static from(category, policy) { - const { type, name, minimumVersion, localization } = policy; - if (type !== 'object' && type !== 'array') { - return undefined; - } - return new ObjectPolicy(name, { moduleName: '', name: { nlsKey: category.name.key, value: category.name.value } }, minimumVersion, { nlsKey: localization.description.key, value: localization.description.value }, ''); - } - constructor(name, category, minimumVersion, description, moduleName) { - super(types_1.PolicyType.Object, name, category, minimumVersion, description, moduleName); - } - renderADMXElements() { - return [``]; - } - renderADMLPresentationContents() { - return ``; - } - renderJsonValue() { - return ''; - } - renderProfileValue() { - return ``; - } - renderProfileManifestValue(translations) { - return `pfm_default - -pfm_description -${(0, render_1.renderProfileString)(this.name, this.moduleName, this.description, translations)} -pfm_name -${this.name} -pfm_title -${this.name} -pfm_type -string -`; - } -} -exports.ObjectPolicy = ObjectPolicy; -//# sourceMappingURL=objectPolicy.js.map \ No newline at end of file diff --git a/build/lib/policies/objectPolicy.ts b/build/lib/policies/objectPolicy.ts index 3bbc916636f..b565b06e8bb 100644 --- a/build/lib/policies/objectPolicy.ts +++ b/build/lib/policies/objectPolicy.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { BasePolicy } from './basePolicy'; -import { CategoryDto, PolicyDto } from './policyDto'; -import { renderProfileString } from './render'; -import { Category, NlsString, PolicyType, LanguageTranslations } from './types'; +import { BasePolicy } from './basePolicy.ts'; +import type { CategoryDto, PolicyDto } from './policyDto.ts'; +import { renderProfileString } from './render.ts'; +import { type Category, type NlsString, PolicyType, type LanguageTranslations } from './types.ts'; export class ObjectPolicy extends BasePolicy { diff --git a/build/lib/policies/policyGenerator.js b/build/lib/policies/policyGenerator.js deleted file mode 100644 index 132e55873da..00000000000 --- a/build/lib/policies/policyGenerator.js +++ /dev/null @@ -1,243 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const minimist_1 = __importDefault(require("minimist")); -const fs_1 = require("fs"); -const path_1 = __importDefault(require("path")); -const JSONC = __importStar(require("jsonc-parser")); -const booleanPolicy_1 = require("./booleanPolicy"); -const numberPolicy_1 = require("./numberPolicy"); -const objectPolicy_1 = require("./objectPolicy"); -const stringEnumPolicy_1 = require("./stringEnumPolicy"); -const stringPolicy_1 = require("./stringPolicy"); -const types_1 = require("./types"); -const render_1 = require("./render"); -const product = require('../../../product.json'); -const packageJson = require('../../../package.json'); -async function getSpecificNLS(resourceUrlTemplate, languageId, version) { - const resource = { - publisher: 'ms-ceintl', - name: `vscode-language-pack-${languageId}`, - version: `${version[0]}.${version[1]}.${version[2]}`, - path: 'extension/translations/main.i18n.json' - }; - const url = resourceUrlTemplate.replace(/\{([^}]+)\}/g, (_, key) => resource[key]); - const res = await fetch(url); - if (res.status !== 200) { - throw new Error(`[${res.status}] Error downloading language pack ${languageId}@${version}`); - } - const { contents: result } = await res.json(); - // TODO: support module namespacing - // Flatten all moduleName keys to empty string - const flattened = { '': {} }; - for (const moduleName in result) { - for (const nlsKey in result[moduleName]) { - flattened[''][nlsKey] = result[moduleName][nlsKey]; - } - } - return flattened; -} -function parseVersion(version) { - const [, major, minor, patch] = /^(\d+)\.(\d+)\.(\d+)/.exec(version); - return [parseInt(major), parseInt(minor), parseInt(patch)]; -} -function compareVersions(a, b) { - if (a[0] !== b[0]) { - return a[0] - b[0]; - } - if (a[1] !== b[1]) { - return a[1] - b[1]; - } - return a[2] - b[2]; -} -async function queryVersions(serviceUrl, languageId) { - const res = await fetch(`${serviceUrl}/extensionquery`, { - method: 'POST', - headers: { - 'Accept': 'application/json;api-version=3.0-preview.1', - 'Content-Type': 'application/json', - 'User-Agent': 'VS Code Build', - }, - body: JSON.stringify({ - filters: [{ criteria: [{ filterType: 7, value: `ms-ceintl.vscode-language-pack-${languageId}` }] }], - flags: 0x1 - }) - }); - if (res.status !== 200) { - throw new Error(`[${res.status}] Error querying for extension: ${languageId}`); - } - const result = await res.json(); - return result.results[0].extensions[0].versions.map(v => parseVersion(v.version)).sort(compareVersions); -} -async function getNLS(extensionGalleryServiceUrl, resourceUrlTemplate, languageId, version) { - const versions = await queryVersions(extensionGalleryServiceUrl, languageId); - const nextMinor = [version[0], version[1] + 1, 0]; - const compatibleVersions = versions.filter(v => compareVersions(v, nextMinor) < 0); - const latestCompatibleVersion = compatibleVersions.at(-1); // order is newest to oldest - if (!latestCompatibleVersion) { - throw new Error(`No compatible language pack found for ${languageId} for version ${version}`); - } - return await getSpecificNLS(resourceUrlTemplate, languageId, latestCompatibleVersion); -} -// TODO: add more policy types -const PolicyTypes = [ - booleanPolicy_1.BooleanPolicy, - numberPolicy_1.NumberPolicy, - stringEnumPolicy_1.StringEnumPolicy, - stringPolicy_1.StringPolicy, - objectPolicy_1.ObjectPolicy -]; -async function parsePolicies(policyDataFile) { - const contents = JSONC.parse(await fs_1.promises.readFile(policyDataFile, { encoding: 'utf8' })); - const categories = new Map(); - for (const category of contents.categories) { - categories.set(category.key, category); - } - const policies = []; - for (const policy of contents.policies) { - const category = categories.get(policy.category); - if (!category) { - throw new Error(`Unknown category: ${policy.category}`); - } - let result; - for (const policyType of PolicyTypes) { - if (result = policyType.from(category, policy)) { - break; - } - } - if (!result) { - throw new Error(`Unsupported policy type: ${policy.type} for policy ${policy.name}`); - } - policies.push(result); - } - // Sort policies first by category name, then by policy name - policies.sort((a, b) => { - const categoryCompare = a.category.name.value.localeCompare(b.category.name.value); - if (categoryCompare !== 0) { - return categoryCompare; - } - return a.name.localeCompare(b.name); - }); - return policies; -} -async function getTranslations() { - const extensionGalleryServiceUrl = product.extensionsGallery?.serviceUrl; - if (!extensionGalleryServiceUrl) { - console.warn(`Skipping policy localization: No 'extensionGallery.serviceUrl' found in 'product.json'.`); - return []; - } - const resourceUrlTemplate = product.extensionsGallery?.resourceUrlTemplate; - if (!resourceUrlTemplate) { - console.warn(`Skipping policy localization: No 'resourceUrlTemplate' found in 'product.json'.`); - return []; - } - const version = parseVersion(packageJson.version); - const languageIds = Object.keys(types_1.Languages); - return await Promise.all(languageIds.map(languageId => getNLS(extensionGalleryServiceUrl, resourceUrlTemplate, languageId, version) - .then(languageTranslations => ({ languageId, languageTranslations })))); -} -async function windowsMain(policies, translations) { - const root = '.build/policies/win32'; - const { admx, adml } = (0, render_1.renderGP)(product, policies, translations); - await fs_1.promises.rm(root, { recursive: true, force: true }); - await fs_1.promises.mkdir(root, { recursive: true }); - await fs_1.promises.writeFile(path_1.default.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n')); - for (const { languageId, contents } of adml) { - const languagePath = path_1.default.join(root, languageId === 'en-us' ? 'en-us' : types_1.Languages[languageId]); - await fs_1.promises.mkdir(languagePath, { recursive: true }); - await fs_1.promises.writeFile(path_1.default.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n')); - } -} -async function darwinMain(policies, translations) { - const bundleIdentifier = product.darwinBundleIdentifier; - if (!bundleIdentifier || !product.darwinProfilePayloadUUID || !product.darwinProfileUUID) { - throw new Error(`Missing required product information.`); - } - const root = '.build/policies/darwin'; - const { profile, manifests } = (0, render_1.renderMacOSPolicy)(product, policies, translations); - await fs_1.promises.rm(root, { recursive: true, force: true }); - await fs_1.promises.mkdir(root, { recursive: true }); - await fs_1.promises.writeFile(path_1.default.join(root, `${bundleIdentifier}.mobileconfig`), profile.replace(/\r?\n/g, '\n')); - for (const { languageId, contents } of manifests) { - const languagePath = path_1.default.join(root, languageId === 'en-us' ? 'en-us' : types_1.Languages[languageId]); - await fs_1.promises.mkdir(languagePath, { recursive: true }); - await fs_1.promises.writeFile(path_1.default.join(languagePath, `${bundleIdentifier}.plist`), contents.replace(/\r?\n/g, '\n')); - } -} -async function linuxMain(policies) { - const root = '.build/policies/linux'; - const policyFileContents = JSON.stringify((0, render_1.renderJsonPolicies)(policies), undefined, 4); - await fs_1.promises.rm(root, { recursive: true, force: true }); - await fs_1.promises.mkdir(root, { recursive: true }); - const jsonPath = path_1.default.join(root, `policy.json`); - await fs_1.promises.writeFile(jsonPath, policyFileContents.replace(/\r?\n/g, '\n')); -} -async function main() { - const args = (0, minimist_1.default)(process.argv.slice(2)); - if (args._.length !== 2) { - console.error(`Usage: node build/lib/policies `); - process.exit(1); - } - const policyDataFile = args._[0]; - const platform = args._[1]; - const [policies, translations] = await Promise.all([parsePolicies(policyDataFile), getTranslations()]); - if (platform === 'darwin') { - await darwinMain(policies, translations); - } - else if (platform === 'win32') { - await windowsMain(policies, translations); - } - else if (platform === 'linux') { - await linuxMain(policies); - } - else { - console.error(`Usage: node build/lib/policies `); - process.exit(1); - } -} -if (require.main === module) { - main().catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=policyGenerator.js.map \ No newline at end of file diff --git a/build/lib/policies/policyGenerator.ts b/build/lib/policies/policyGenerator.ts index 50ea96b1280..e0de81f4d32 100644 --- a/build/lib/policies/policyGenerator.ts +++ b/build/lib/policies/policyGenerator.ts @@ -4,20 +4,20 @@ *--------------------------------------------------------------------------------------------*/ import minimist from 'minimist'; -import { promises as fs } from 'fs'; +import * as fs from 'fs'; import path from 'path'; -import { CategoryDto, ExportedPolicyDataDto } from './policyDto'; +import { type CategoryDto, type ExportedPolicyDataDto } from './policyDto.ts'; import * as JSONC from 'jsonc-parser'; -import { BooleanPolicy } from './booleanPolicy'; -import { NumberPolicy } from './numberPolicy'; -import { ObjectPolicy } from './objectPolicy'; -import { StringEnumPolicy } from './stringEnumPolicy'; -import { StringPolicy } from './stringPolicy'; -import { Version, LanguageTranslations, Policy, Translations, Languages, ProductJson } from './types'; -import { renderGP, renderJsonPolicies, renderMacOSPolicy } from './render'; +import { BooleanPolicy } from './booleanPolicy.ts'; +import { NumberPolicy } from './numberPolicy.ts'; +import { ObjectPolicy } from './objectPolicy.ts'; +import { StringEnumPolicy } from './stringEnumPolicy.ts'; +import { StringPolicy } from './stringPolicy.ts'; +import { type Version, type LanguageTranslations, type Policy, type Translations, Languages, type ProductJson } from './types.ts'; +import { renderGP, renderJsonPolicies, renderMacOSPolicy } from './render.ts'; -const product = require('../../../product.json') as ProductJson; -const packageJson = require('../../../package.json'); +const product: ProductJson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../../product.json'), 'utf8')); +const packageJson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../../package.json'), 'utf8')); async function getSpecificNLS(resourceUrlTemplate: string, languageId: string, version: Version): Promise { const resource = { @@ -104,7 +104,7 @@ const PolicyTypes = [ ]; async function parsePolicies(policyDataFile: string): Promise { - const contents = JSONC.parse(await fs.readFile(policyDataFile, { encoding: 'utf8' })) as ExportedPolicyDataDto; + const contents = JSONC.parse(await fs.promises.readFile(policyDataFile, { encoding: 'utf8' })) as ExportedPolicyDataDto; const categories = new Map(); for (const category of contents.categories) { categories.set(category.key, category); @@ -171,15 +171,15 @@ async function windowsMain(policies: Policy[], translations: Translations) { const root = '.build/policies/win32'; const { admx, adml } = renderGP(product, policies, translations); - await fs.rm(root, { recursive: true, force: true }); - await fs.mkdir(root, { recursive: true }); + await fs.promises.rm(root, { recursive: true, force: true }); + await fs.promises.mkdir(root, { recursive: true }); - await fs.writeFile(path.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n')); + await fs.promises.writeFile(path.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n')); for (const { languageId, contents } of adml) { const languagePath = path.join(root, languageId === 'en-us' ? 'en-us' : Languages[languageId as keyof typeof Languages]); - await fs.mkdir(languagePath, { recursive: true }); - await fs.writeFile(path.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n')); + await fs.promises.mkdir(languagePath, { recursive: true }); + await fs.promises.writeFile(path.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n')); } } @@ -191,14 +191,14 @@ async function darwinMain(policies: Policy[], translations: Translations) { const root = '.build/policies/darwin'; const { profile, manifests } = renderMacOSPolicy(product, policies, translations); - await fs.rm(root, { recursive: true, force: true }); - await fs.mkdir(root, { recursive: true }); - await fs.writeFile(path.join(root, `${bundleIdentifier}.mobileconfig`), profile.replace(/\r?\n/g, '\n')); + await fs.promises.rm(root, { recursive: true, force: true }); + await fs.promises.mkdir(root, { recursive: true }); + await fs.promises.writeFile(path.join(root, `${bundleIdentifier}.mobileconfig`), profile.replace(/\r?\n/g, '\n')); for (const { languageId, contents } of manifests) { const languagePath = path.join(root, languageId === 'en-us' ? 'en-us' : Languages[languageId as keyof typeof Languages]); - await fs.mkdir(languagePath, { recursive: true }); - await fs.writeFile(path.join(languagePath, `${bundleIdentifier}.plist`), contents.replace(/\r?\n/g, '\n')); + await fs.promises.mkdir(languagePath, { recursive: true }); + await fs.promises.writeFile(path.join(languagePath, `${bundleIdentifier}.plist`), contents.replace(/\r?\n/g, '\n')); } } @@ -206,11 +206,11 @@ async function linuxMain(policies: Policy[]) { const root = '.build/policies/linux'; const policyFileContents = JSON.stringify(renderJsonPolicies(policies), undefined, 4); - await fs.rm(root, { recursive: true, force: true }); - await fs.mkdir(root, { recursive: true }); + await fs.promises.rm(root, { recursive: true, force: true }); + await fs.promises.mkdir(root, { recursive: true }); const jsonPath = path.join(root, `policy.json`); - await fs.writeFile(jsonPath, policyFileContents.replace(/\r?\n/g, '\n')); + await fs.promises.writeFile(jsonPath, policyFileContents.replace(/\r?\n/g, '\n')); } async function main() { @@ -236,7 +236,7 @@ async function main() { } } -if (require.main === module) { +if (import.meta.main) { main().catch(err => { console.error(err); process.exit(1); diff --git a/build/lib/policies/render.js b/build/lib/policies/render.js deleted file mode 100644 index 8661dab9154..00000000000 --- a/build/lib/policies/render.js +++ /dev/null @@ -1,283 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.renderADMLString = renderADMLString; -exports.renderProfileString = renderProfileString; -exports.renderADMX = renderADMX; -exports.renderADML = renderADML; -exports.renderProfileManifest = renderProfileManifest; -exports.renderMacOSPolicy = renderMacOSPolicy; -exports.renderGP = renderGP; -exports.renderJsonPolicies = renderJsonPolicies; -function renderADMLString(prefix, moduleName, nlsString, translations) { - let value; - if (translations) { - const moduleTranslations = translations[moduleName]; - if (moduleTranslations) { - value = moduleTranslations[nlsString.nlsKey]; - } - } - if (!value) { - value = nlsString.value; - } - return `${value}`; -} -function renderProfileString(_prefix, moduleName, nlsString, translations) { - let value; - if (translations) { - const moduleTranslations = translations[moduleName]; - if (moduleTranslations) { - value = moduleTranslations[nlsString.nlsKey]; - } - } - if (!value) { - value = nlsString.value; - } - return value; -} -function renderADMX(regKey, versions, categories, policies) { - versions = versions.map(v => v.replace(/\./g, '_')); - return ` - - - - - - - - ${versions.map(v => ``).join(`\n `)} - - - - - ${categories.map(c => ``).join(`\n `)} - - - ${policies.map(p => p.renderADMX(regKey)).flat().join(`\n `)} - - -`; -} -function renderADML(appName, versions, categories, policies, translations) { - return ` - - - - - - ${appName} - ${versions.map(v => `${appName} >= ${v}`).join(`\n `)} - ${categories.map(c => renderADMLString('Category', c.moduleName, c.name, translations)).join(`\n `)} - ${policies.map(p => p.renderADMLStrings(translations)).flat().join(`\n `)} - - - ${policies.map(p => p.renderADMLPresentation()).join(`\n `)} - - - -`; -} -function renderProfileManifest(appName, bundleIdentifier, _versions, _categories, policies, translations) { - const requiredPayloadFields = ` - - pfm_default - Configure ${appName} - pfm_name - PayloadDescription - pfm_title - Payload Description - pfm_type - string - - - pfm_default - ${appName} - pfm_name - PayloadDisplayName - pfm_require - always - pfm_title - Payload Display Name - pfm_type - string - - - pfm_default - ${bundleIdentifier} - pfm_name - PayloadIdentifier - pfm_require - always - pfm_title - Payload Identifier - pfm_type - string - - - pfm_default - ${bundleIdentifier} - pfm_name - PayloadType - pfm_require - always - pfm_title - Payload Type - pfm_type - string - - - pfm_default - - pfm_name - PayloadUUID - pfm_require - always - pfm_title - Payload UUID - pfm_type - string - - - pfm_default - 1 - pfm_name - PayloadVersion - pfm_range_list - - 1 - - pfm_require - always - pfm_title - Payload Version - pfm_type - integer - - - pfm_default - Microsoft - pfm_name - PayloadOrganization - pfm_title - Payload Organization - pfm_type - string - `; - const profileManifestSubkeys = policies.map(policy => { - return policy.renderProfileManifest(translations); - }).join(''); - return ` - - - - pfm_app_url - https://code.visualstudio.com/ - pfm_description - ${appName} Managed Settings - pfm_documentation_url - https://code.visualstudio.com/docs/setup/enterprise - pfm_domain - ${bundleIdentifier} - pfm_format_version - 1 - pfm_interaction - combined - pfm_last_modified - ${new Date().toISOString().replace(/\.\d+Z$/, 'Z')} - pfm_platforms - - macOS - - pfm_subkeys - - ${requiredPayloadFields} - ${profileManifestSubkeys} - - pfm_title - ${appName} - pfm_unique - - pfm_version - 1 - -`; -} -function renderMacOSPolicy(product, policies, translations) { - const appName = product.nameLong; - const bundleIdentifier = product.darwinBundleIdentifier; - const payloadUUID = product.darwinProfilePayloadUUID; - const UUID = product.darwinProfileUUID; - const versions = [...new Set(policies.map(p => p.minimumVersion)).values()].sort(); - const categories = [...new Set(policies.map(p => p.category))]; - const policyEntries = policies.map(policy => policy.renderProfile()) - .flat() - .map(entry => `\t\t\t\t${entry}`) - .join('\n'); - return { - profile: ` - - - - PayloadContent - - - PayloadDisplayName - ${appName} - PayloadIdentifier - ${bundleIdentifier}.${UUID} - PayloadType - ${bundleIdentifier} - PayloadUUID - ${UUID} - PayloadVersion - 1 -${policyEntries} - - - PayloadDescription - This profile manages ${appName}. For more information see https://code.visualstudio.com/docs/setup/enterprise - PayloadDisplayName - ${appName} - PayloadIdentifier - ${bundleIdentifier} - PayloadOrganization - Microsoft - PayloadType - Configuration - PayloadUUID - ${payloadUUID} - PayloadVersion - 1 - TargetDeviceType - 5 - -`, - manifests: [{ languageId: 'en-us', contents: renderProfileManifest(appName, bundleIdentifier, versions, categories, policies) }, - ...translations.map(({ languageId, languageTranslations }) => ({ languageId, contents: renderProfileManifest(appName, bundleIdentifier, versions, categories, policies, languageTranslations) })) - ] - }; -} -function renderGP(product, policies, translations) { - const appName = product.nameLong; - const regKey = product.win32RegValueName; - const versions = [...new Set(policies.map(p => p.minimumVersion)).values()].sort(); - const categories = [...Object.values(policies.reduce((acc, p) => ({ ...acc, [p.category.name.nlsKey]: p.category }), {}))]; - return { - admx: renderADMX(regKey, versions, categories, policies), - adml: [ - { languageId: 'en-us', contents: renderADML(appName, versions, categories, policies) }, - ...translations.map(({ languageId, languageTranslations }) => ({ languageId, contents: renderADML(appName, versions, categories, policies, languageTranslations) })) - ] - }; -} -function renderJsonPolicies(policies) { - const policyObject = {}; - for (const policy of policies) { - policyObject[policy.name] = policy.renderJsonValue(); - } - return policyObject; -} -//# sourceMappingURL=render.js.map \ No newline at end of file diff --git a/build/lib/policies/render.ts b/build/lib/policies/render.ts index 8aa4181753d..47b485d1bf0 100644 --- a/build/lib/policies/render.ts +++ b/build/lib/policies/render.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { NlsString, LanguageTranslations, Category, Policy, Translations, ProductJson } from './types'; +import type { NlsString, LanguageTranslations, Category, Policy, Translations, ProductJson } from './types.ts'; export function renderADMLString(prefix: string, moduleName: string, nlsString: NlsString, translations?: LanguageTranslations): string { let value: string | undefined; diff --git a/build/lib/policies/stringEnumPolicy.js b/build/lib/policies/stringEnumPolicy.js deleted file mode 100644 index 20403b3590a..00000000000 --- a/build/lib/policies/stringEnumPolicy.js +++ /dev/null @@ -1,74 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.StringEnumPolicy = void 0; -const basePolicy_1 = require("./basePolicy"); -const render_1 = require("./render"); -const types_1 = require("./types"); -class StringEnumPolicy extends basePolicy_1.BasePolicy { - enum_; - enumDescriptions; - static from(category, policy) { - const { type, name, minimumVersion, enum: enumValue, localization } = policy; - if (type !== 'string') { - return undefined; - } - const enum_ = enumValue; - if (!enum_) { - return undefined; - } - if (!localization.enumDescriptions || !Array.isArray(localization.enumDescriptions) || localization.enumDescriptions.length !== enum_.length) { - throw new Error(`Invalid policy data: enumDescriptions must exist and have the same length as enum_ for policy "${name}".`); - } - const enumDescriptions = localization.enumDescriptions.map((e) => ({ nlsKey: e.key, value: e.value })); - return new StringEnumPolicy(name, { moduleName: '', name: { nlsKey: category.name.key, value: category.name.value } }, minimumVersion, { nlsKey: localization.description.key, value: localization.description.value }, '', enum_, enumDescriptions); - } - constructor(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions) { - super(types_1.PolicyType.StringEnum, name, category, minimumVersion, description, moduleName); - this.enum_ = enum_; - this.enumDescriptions = enumDescriptions; - } - renderADMXElements() { - return [ - ``, - ...this.enum_.map((value, index) => ` ${value}`), - `` - ]; - } - renderADMLStrings(translations) { - return [ - ...super.renderADMLStrings(translations), - ...this.enumDescriptions.map(e => this.renderADMLString(e, translations)) - ]; - } - renderADMLPresentationContents() { - return ``; - } - renderJsonValue() { - return this.enum_[0]; - } - renderProfileValue() { - return `${this.enum_[0]}`; - } - renderProfileManifestValue(translations) { - return `pfm_default -${this.enum_[0]} -pfm_description -${(0, render_1.renderProfileString)(this.name, this.moduleName, this.description, translations)} -pfm_name -${this.name} -pfm_title -${this.name} -pfm_type -string -pfm_range_list - - ${this.enum_.map(e => `${e}`).join('\n ')} -`; - } -} -exports.StringEnumPolicy = StringEnumPolicy; -//# sourceMappingURL=stringEnumPolicy.js.map \ No newline at end of file diff --git a/build/lib/policies/stringEnumPolicy.ts b/build/lib/policies/stringEnumPolicy.ts index c4adabdace7..be1312fa256 100644 --- a/build/lib/policies/stringEnumPolicy.ts +++ b/build/lib/policies/stringEnumPolicy.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { BasePolicy } from './basePolicy'; -import { CategoryDto, PolicyDto } from './policyDto'; -import { renderProfileString } from './render'; -import { Category, NlsString, PolicyType, LanguageTranslations } from './types'; +import { BasePolicy } from './basePolicy.ts'; +import type { CategoryDto, PolicyDto } from './policyDto.ts'; +import { renderProfileString } from './render.ts'; +import { type Category, type NlsString, PolicyType, type LanguageTranslations } from './types.ts'; export class StringEnumPolicy extends BasePolicy { @@ -38,16 +38,21 @@ export class StringEnumPolicy extends BasePolicy { ); } + protected enum_: string[]; + protected enumDescriptions: NlsString[]; + private constructor( name: string, category: Category, minimumVersion: string, description: NlsString, moduleName: string, - protected enum_: string[], - protected enumDescriptions: NlsString[], + enum_: string[], + enumDescriptions: NlsString[], ) { super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName); + this.enum_ = enum_; + this.enumDescriptions = enumDescriptions; } protected renderADMXElements(): string[] { diff --git a/build/lib/policies/stringPolicy.js b/build/lib/policies/stringPolicy.js deleted file mode 100644 index 1db9e53649b..00000000000 --- a/build/lib/policies/stringPolicy.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.StringPolicy = void 0; -const basePolicy_1 = require("./basePolicy"); -const render_1 = require("./render"); -const types_1 = require("./types"); -class StringPolicy extends basePolicy_1.BasePolicy { - static from(category, policy) { - const { type, name, minimumVersion, localization } = policy; - if (type !== 'string') { - return undefined; - } - return new StringPolicy(name, { moduleName: '', name: { nlsKey: category.name.key, value: category.name.value } }, minimumVersion, { nlsKey: localization.description.key, value: localization.description.value }, ''); - } - constructor(name, category, minimumVersion, description, moduleName) { - super(types_1.PolicyType.String, name, category, minimumVersion, description, moduleName); - } - renderADMXElements() { - return [``]; - } - renderJsonValue() { - return ''; - } - renderADMLPresentationContents() { - return ``; - } - renderProfileValue() { - return ``; - } - renderProfileManifestValue(translations) { - return `pfm_default - -pfm_description -${(0, render_1.renderProfileString)(this.name, this.moduleName, this.description, translations)} -pfm_name -${this.name} -pfm_title -${this.name} -pfm_type -string`; - } -} -exports.StringPolicy = StringPolicy; -//# sourceMappingURL=stringPolicy.js.map \ No newline at end of file diff --git a/build/lib/policies/stringPolicy.ts b/build/lib/policies/stringPolicy.ts index e318a6165d8..e4e07e42c69 100644 --- a/build/lib/policies/stringPolicy.ts +++ b/build/lib/policies/stringPolicy.ts @@ -3,10 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { BasePolicy } from './basePolicy'; -import { CategoryDto, PolicyDto } from './policyDto'; -import { renderProfileString } from './render'; -import { Category, NlsString, PolicyType, LanguageTranslations } from './types'; +import { BasePolicy } from './basePolicy.ts'; +import type { CategoryDto, PolicyDto } from './policyDto.ts'; +import { renderProfileString } from './render.ts'; +import { PolicyType, type Category, type LanguageTranslations, type NlsString } from './types.ts'; export class StringPolicy extends BasePolicy { diff --git a/build/lib/policies/types.js b/build/lib/policies/types.js deleted file mode 100644 index 9eab676dec5..00000000000 --- a/build/lib/policies/types.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Languages = exports.PolicyType = void 0; -var PolicyType; -(function (PolicyType) { - PolicyType["Boolean"] = "boolean"; - PolicyType["Number"] = "number"; - PolicyType["Object"] = "object"; - PolicyType["String"] = "string"; - PolicyType["StringEnum"] = "stringEnum"; -})(PolicyType || (exports.PolicyType = PolicyType = {})); -exports.Languages = { - 'fr': 'fr-fr', - 'it': 'it-it', - 'de': 'de-de', - 'es': 'es-es', - 'ru': 'ru-ru', - 'zh-hans': 'zh-cn', - 'zh-hant': 'zh-tw', - 'ja': 'ja-jp', - 'ko': 'ko-kr', - 'cs': 'cs-cz', - 'pt-br': 'pt-br', - 'tr': 'tr-tr', - 'pl': 'pl-pl', -}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/build/lib/policies/types.ts b/build/lib/policies/types.ts index 861b5205f69..4fe801c23d6 100644 --- a/build/lib/policies/types.ts +++ b/build/lib/policies/types.ts @@ -36,13 +36,14 @@ export interface Category { readonly name: NlsString; } -export enum PolicyType { - Boolean = 'boolean', - Number = 'number', - Object = 'object', - String = 'string', - StringEnum = 'stringEnum', -} +export const PolicyType = Object.freeze({ + Boolean: 'boolean', + Number: 'number', + Object: 'object', + String: 'string', + StringEnum: 'stringEnum', +}); +export type PolicyType = typeof PolicyType[keyof typeof PolicyType]; export const Languages = { 'fr': 'fr-fr', diff --git a/build/lib/preLaunch.js b/build/lib/preLaunch.js deleted file mode 100644 index 75207fe50c0..00000000000 --- a/build/lib/preLaunch.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -// @ts-check -const path_1 = __importDefault(require("path")); -const child_process_1 = require("child_process"); -const fs_1 = require("fs"); -const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm'; -const rootDir = path_1.default.resolve(__dirname, '..', '..'); -function runProcess(command, args = []) { - return new Promise((resolve, reject) => { - const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env, shell: process.platform === 'win32' }); - child.on('exit', err => !err ? resolve() : process.exit(err ?? 1)); - child.on('error', reject); - }); -} -async function exists(subdir) { - try { - await fs_1.promises.stat(path_1.default.join(rootDir, subdir)); - return true; - } - catch { - return false; - } -} -async function ensureNodeModules() { - if (!(await exists('node_modules'))) { - await runProcess(npm, ['ci']); - } -} -async function getElectron() { - await runProcess(npm, ['run', 'electron']); -} -async function ensureCompiled() { - if (!(await exists('out'))) { - await runProcess(npm, ['run', 'compile']); - } -} -async function main() { - await ensureNodeModules(); - await getElectron(); - await ensureCompiled(); - // Can't require this until after dependencies are installed - const { getBuiltInExtensions } = require('./builtInExtensions'); - await getBuiltInExtensions(); -} -if (require.main === module) { - main().catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=preLaunch.js.map \ No newline at end of file diff --git a/build/lib/preLaunch.ts b/build/lib/preLaunch.ts index 0c178afcb59..5e175afde28 100644 --- a/build/lib/preLaunch.ts +++ b/build/lib/preLaunch.ts @@ -2,15 +2,12 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ - -// @ts-check - import path from 'path'; import { spawn } from 'child_process'; import { promises as fs } from 'fs'; const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm'; -const rootDir = path.resolve(__dirname, '..', '..'); +const rootDir = path.resolve(import.meta.dirname, '..', '..'); function runProcess(command: string, args: ReadonlyArray = []) { return new Promise((resolve, reject) => { @@ -51,11 +48,11 @@ async function main() { await ensureCompiled(); // Can't require this until after dependencies are installed - const { getBuiltInExtensions } = require('./builtInExtensions'); + const { getBuiltInExtensions } = await import('./builtInExtensions.ts'); await getBuiltInExtensions(); } -if (require.main === module) { +if (import.meta.main) { main().catch(err => { console.error(err); process.exit(1); diff --git a/build/lib/propertyInitOrderChecker.js b/build/lib/propertyInitOrderChecker.js deleted file mode 100644 index 58921645599..00000000000 --- a/build/lib/propertyInitOrderChecker.js +++ /dev/null @@ -1,249 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); -const ts = __importStar(require("typescript")); -const path = __importStar(require("path")); -const fs = __importStar(require("fs")); -const TS_CONFIG_PATH = path.join(__dirname, '../../', 'src', 'tsconfig.json'); -// -// ############################################################################################# -// -// A custom typescript checker that ensure constructor properties are NOT used to initialize -// defined properties. This is needed for the times when `useDefineForClassFields` is gone. -// -// see https://github.com/microsoft/vscode/issues/243049, https://github.com/microsoft/vscode/issues/186726, -// https://github.com/microsoft/vscode/pull/241544 -// -// ############################################################################################# -// -var EntryKind; -(function (EntryKind) { - EntryKind[EntryKind["Span"] = 0] = "Span"; - EntryKind[EntryKind["Node"] = 1] = "Node"; - EntryKind[EntryKind["StringLiteral"] = 2] = "StringLiteral"; - EntryKind[EntryKind["SearchedLocalFoundProperty"] = 3] = "SearchedLocalFoundProperty"; - EntryKind[EntryKind["SearchedPropertyFoundLocal"] = 4] = "SearchedPropertyFoundLocal"; -})(EntryKind || (EntryKind = {})); -const cancellationToken = { - isCancellationRequested: () => false, - throwIfCancellationRequested: () => { }, -}; -const seenFiles = new Set(); -let errorCount = 0; -function createProgram(tsconfigPath) { - const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile); - const configHostParser = { fileExists: fs.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' }; - const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path.resolve(path.dirname(tsconfigPath)), { noEmit: true }); - const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true); - return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost); -} -const program = createProgram(TS_CONFIG_PATH); -program.getTypeChecker(); -for (const file of program.getSourceFiles()) { - if (!file || file.isDeclarationFile) { - continue; - } - visit(file); -} -if (seenFiles.size) { - console.log(); - console.log(`Found ${errorCount} error${errorCount === 1 ? '' : 's'} in ${seenFiles.size} file${seenFiles.size === 1 ? '' : 's'}.`); - process.exit(errorCount); -} -function visit(node) { - if (ts.isParameter(node) && ts.isParameterPropertyDeclaration(node, node.parent)) { - checkParameterPropertyDeclaration(node); - } - ts.forEachChild(node, visit); -} -function checkParameterPropertyDeclaration(param) { - const uses = [...collectReferences(param.name, [])]; - if (!uses.length) { - return; - } - const sourceFile = param.getSourceFile(); - if (!seenFiles.has(sourceFile)) { - if (seenFiles.size) { - console.log(``); - } - console.log(`${formatFileName(param)}:`); - seenFiles.add(sourceFile); - } - else { - console.log(``); - } - console.log(` Parameter property '${param.name.getText()}' is used before its declaration.`); - for (const { stack, container } of uses) { - const use = stack[stack.length - 1]; - console.log(` at ${formatLocation(use)}: ${formatMember(container)} -> ${formatStack(stack)}`); - errorCount++; - } -} -function* collectReferences(node, stack, requiresInvocationDepth = 0, seen = new Set()) { - for (const use of findAllReferencesInClass(node)) { - const container = findContainer(use); - if (!container || seen.has(container) || ts.isConstructorDeclaration(container)) { - continue; - } - seen.add(container); - const nextStack = [...stack, use]; - let nextRequiresInvocationDepth = requiresInvocationDepth; - if (isInvocation(use) && nextRequiresInvocationDepth > 0) { - nextRequiresInvocationDepth--; - } - if (ts.isPropertyDeclaration(container) && nextRequiresInvocationDepth === 0) { - yield { stack: nextStack, container }; - } - else if (requiresInvocation(container)) { - nextRequiresInvocationDepth++; - } - yield* collectReferences(container.name ?? container, nextStack, nextRequiresInvocationDepth, seen); - } -} -function requiresInvocation(definition) { - return ts.isMethodDeclaration(definition) || ts.isFunctionDeclaration(definition) || ts.isFunctionExpression(definition) || ts.isArrowFunction(definition); -} -function isInvocation(use) { - let location = use; - if (ts.isPropertyAccessExpression(location.parent) && location.parent.name === location) { - location = location.parent; - } - else if (ts.isElementAccessExpression(location.parent) && location.parent.argumentExpression === location) { - location = location.parent; - } - return ts.isCallExpression(location.parent) && location.parent.expression === location - || ts.isTaggedTemplateExpression(location.parent) && location.parent.tag === location; -} -function formatFileName(node) { - const sourceFile = node.getSourceFile(); - return path.resolve(sourceFile.fileName); -} -function formatLocation(node) { - const sourceFile = node.getSourceFile(); - const { line, character } = ts.getLineAndCharacterOfPosition(sourceFile, node.pos); - return `${formatFileName(sourceFile)}(${line + 1},${character + 1})`; -} -function formatStack(stack) { - return stack.slice().reverse().map((use) => formatUse(use)).join(' -> '); -} -function formatMember(container) { - const name = container.name?.getText(); - if (name) { - const className = findClass(container)?.name?.getText(); - if (className) { - return `${className}.${name}`; - } - return name; - } - return ''; -} -function formatUse(use) { - let text = use.getText(); - if (use.parent && ts.isPropertyAccessExpression(use.parent) && use.parent.name === use) { - if (use.parent.expression.kind === ts.SyntaxKind.ThisKeyword) { - text = `this.${text}`; - } - use = use.parent; - } - else if (use.parent && ts.isElementAccessExpression(use.parent) && use.parent.argumentExpression === use) { - if (use.parent.expression.kind === ts.SyntaxKind.ThisKeyword) { - text = `this['${text}']`; - } - use = use.parent; - } - if (ts.isCallExpression(use.parent)) { - text = `${text}(...)`; - } - return text; -} -function findContainer(node) { - return ts.findAncestor(node, ancestor => { - switch (ancestor.kind) { - case ts.SyntaxKind.PropertyDeclaration: - case ts.SyntaxKind.MethodDeclaration: - case ts.SyntaxKind.GetAccessor: - case ts.SyntaxKind.SetAccessor: - case ts.SyntaxKind.Constructor: - case ts.SyntaxKind.ClassStaticBlockDeclaration: - case ts.SyntaxKind.ArrowFunction: - case ts.SyntaxKind.FunctionExpression: - case ts.SyntaxKind.FunctionDeclaration: - case ts.SyntaxKind.Parameter: - return true; - } - return false; - }); -} -function findClass(node) { - return ts.findAncestor(node, ts.isClassLike); -} -function* findAllReferencesInClass(node) { - const classDecl = findClass(node); - if (!classDecl) { - return []; - } - for (const ref of findAllReferences(node)) { - for (const entry of ref.references) { - if (entry.kind !== EntryKind.Node || entry.node === node) { - continue; - } - if (findClass(entry.node) === classDecl) { - yield entry.node; - } - } - } -} -function findAllReferences(node) { - const sourceFile = node.getSourceFile(); - const position = node.getStart(); - const tsInternal = ts; - const name = tsInternal.getTouchingPropertyName(sourceFile, position); - const options = { use: tsInternal.FindAllReferences.FindReferencesUse.References }; - return tsInternal.FindAllReferences.Core.getReferencedSymbolsForNode(position, name, program, [sourceFile], cancellationToken, options) ?? []; -} -var DefinitionKind; -(function (DefinitionKind) { - DefinitionKind[DefinitionKind["Symbol"] = 0] = "Symbol"; - DefinitionKind[DefinitionKind["Label"] = 1] = "Label"; - DefinitionKind[DefinitionKind["Keyword"] = 2] = "Keyword"; - DefinitionKind[DefinitionKind["This"] = 3] = "This"; - DefinitionKind[DefinitionKind["String"] = 4] = "String"; - DefinitionKind[DefinitionKind["TripleSlashReference"] = 5] = "TripleSlashReference"; -})(DefinitionKind || (DefinitionKind = {})); -//# sourceMappingURL=propertyInitOrderChecker.js.map \ No newline at end of file diff --git a/build/lib/propertyInitOrderChecker.ts b/build/lib/propertyInitOrderChecker.ts index eab53477e11..2c07f9c8757 100644 --- a/build/lib/propertyInitOrderChecker.ts +++ b/build/lib/propertyInitOrderChecker.ts @@ -8,7 +8,7 @@ import * as ts from 'typescript'; import * as path from 'path'; import * as fs from 'fs'; -const TS_CONFIG_PATH = path.join(__dirname, '../../', 'src', 'tsconfig.json'); +const TS_CONFIG_PATH = path.join(import.meta.dirname, '../../', 'src', 'tsconfig.json'); // // ############################################################################################# @@ -22,13 +22,15 @@ const TS_CONFIG_PATH = path.join(__dirname, '../../', 'src', 'tsconfig.json'); // ############################################################################################# // -enum EntryKind { - Span, - Node, - StringLiteral, - SearchedLocalFoundProperty, - SearchedPropertyFoundLocal, -} +const EntryKind = Object.freeze({ + Span: 'Span', + Node: 'Node', + StringLiteral: 'StringLiteral', + SearchedLocalFoundProperty: 'SearchedLocalFoundProperty', + SearchedPropertyFoundLocal: 'SearchedPropertyFoundLocal' +}); + +type EntryKind = typeof EntryKind[keyof typeof EntryKind]; const cancellationToken: ts.CancellationToken = { isCancellationRequested: () => false, @@ -281,24 +283,25 @@ interface SymbolAndEntries { readonly references: readonly Entry[]; } -const enum DefinitionKind { - Symbol, - Label, - Keyword, - This, - String, - TripleSlashReference, -} +const DefinitionKind = Object.freeze({ + Symbol: 0, + Label: 1, + Keyword: 2, + This: 3, + String: 4, + TripleSlashReference: 5, +}); +type DefinitionKind = typeof DefinitionKind[keyof typeof DefinitionKind]; type Definition = - | { readonly type: DefinitionKind.Symbol; readonly symbol: ts.Symbol } - | { readonly type: DefinitionKind.Label; readonly node: ts.Identifier } - | { readonly type: DefinitionKind.Keyword; readonly node: ts.Node } - | { readonly type: DefinitionKind.This; readonly node: ts.Node } - | { readonly type: DefinitionKind.String; readonly node: ts.StringLiteralLike } - | { readonly type: DefinitionKind.TripleSlashReference; readonly reference: ts.FileReference; readonly file: ts.SourceFile }; + | { readonly type: DefinitionKind; readonly symbol: ts.Symbol } + | { readonly type: DefinitionKind; readonly node: ts.Identifier } + | { readonly type: DefinitionKind; readonly node: ts.Node } + | { readonly type: DefinitionKind; readonly node: ts.Node } + | { readonly type: DefinitionKind; readonly node: ts.StringLiteralLike } + | { readonly type: DefinitionKind; readonly reference: ts.FileReference; readonly file: ts.SourceFile }; -type NodeEntryKind = EntryKind.Node | EntryKind.StringLiteral | EntryKind.SearchedLocalFoundProperty | EntryKind.SearchedPropertyFoundLocal; +type NodeEntryKind = typeof EntryKind.Node | typeof EntryKind.StringLiteral | typeof EntryKind.SearchedLocalFoundProperty | typeof EntryKind.SearchedPropertyFoundLocal; type Entry = NodeEntry | SpanEntry; interface ContextWithStartAndEndNode { start: ts.Node; @@ -311,7 +314,7 @@ interface NodeEntry { readonly context?: ContextNode; } interface SpanEntry { - readonly kind: EntryKind.Span; + readonly kind: typeof EntryKind.Span; readonly fileName: string; readonly textSpan: ts.TextSpan; } diff --git a/build/lib/reporter.js b/build/lib/reporter.js deleted file mode 100644 index cb7fd272d5d..00000000000 --- a/build/lib/reporter.js +++ /dev/null @@ -1,107 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createReporter = createReporter; -const event_stream_1 = __importDefault(require("event-stream")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -class ErrorLog { - id; - constructor(id) { - this.id = id; - } - allErrors = []; - startTime = null; - count = 0; - onStart() { - if (this.count++ > 0) { - return; - } - this.startTime = new Date().getTime(); - (0, fancy_log_1.default)(`Starting ${ansi_colors_1.default.green('compilation')}${this.id ? ansi_colors_1.default.blue(` ${this.id}`) : ''}...`); - } - onEnd() { - if (--this.count > 0) { - return; - } - this.log(); - } - log() { - const errors = this.allErrors.flat(); - const seen = new Set(); - errors.map(err => { - if (!seen.has(err)) { - seen.add(err); - (0, fancy_log_1.default)(`${ansi_colors_1.default.red('Error')}: ${err}`); - } - }); - (0, fancy_log_1.default)(`Finished ${ansi_colors_1.default.green('compilation')}${this.id ? ansi_colors_1.default.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansi_colors_1.default.magenta((new Date().getTime() - this.startTime) + ' ms')}`); - const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s; - const messages = errors - .map(err => regex.exec(err)) - .filter(match => !!match) - .map(x => x) - .map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message })); - try { - const logFileName = 'log' + (this.id ? `_${this.id}` : ''); - fs_1.default.writeFileSync(path_1.default.join(buildLogFolder, logFileName), JSON.stringify(messages)); - } - catch (err) { - //noop - } - } -} -const errorLogsById = new Map(); -function getErrorLog(id = '') { - let errorLog = errorLogsById.get(id); - if (!errorLog) { - errorLog = new ErrorLog(id); - errorLogsById.set(id, errorLog); - } - return errorLog; -} -const buildLogFolder = path_1.default.join(path_1.default.dirname(path_1.default.dirname(__dirname)), '.build'); -try { - fs_1.default.mkdirSync(buildLogFolder); -} -catch (err) { - // ignore -} -class ReporterError extends Error { - __reporter__ = true; -} -function createReporter(id) { - const errorLog = getErrorLog(id); - const errors = []; - errorLog.allErrors.push(errors); - const result = (err) => errors.push(err); - result.hasErrors = () => errors.length > 0; - result.end = (emitError) => { - errors.length = 0; - errorLog.onStart(); - return event_stream_1.default.through(undefined, function () { - errorLog.onEnd(); - if (emitError && errors.length > 0) { - if (!errors.__logged__) { - errorLog.log(); - } - errors.__logged__ = true; - const err = new ReporterError(`Found ${errors.length} errors`); - this.emit('error', err); - } - else { - this.emit('end'); - } - }); - }; - return result; -} -//# sourceMappingURL=reporter.js.map \ No newline at end of file diff --git a/build/lib/reporter.ts b/build/lib/reporter.ts index 5ea8cb14e74..31a0cb3945d 100644 --- a/build/lib/reporter.ts +++ b/build/lib/reporter.ts @@ -10,7 +10,10 @@ import fs from 'fs'; import path from 'path'; class ErrorLog { - constructor(public id: string) { + public id: string; + + constructor(id: string) { + this.id = id; } allErrors: string[][] = []; startTime: number | null = null; @@ -73,7 +76,7 @@ function getErrorLog(id: string = '') { return errorLog; } -const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build'); +const buildLogFolder = path.join(path.dirname(path.dirname(import.meta.dirname)), '.build'); try { fs.mkdirSync(buildLogFolder); diff --git a/build/lib/snapshotLoader.js b/build/lib/snapshotLoader.js deleted file mode 100644 index 7d9b3f154f1..00000000000 --- a/build/lib/snapshotLoader.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.snaps = void 0; -var snaps; -(function (snaps) { - const fs = require('fs'); - const path = require('path'); - const os = require('os'); - const cp = require('child_process'); - const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`); - const product = require('../../product.json'); - const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1]; - // - let loaderFilepath; - let startupBlobFilepath; - switch (process.platform) { - case 'darwin': - loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`; - startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`; - break; - case 'win32': - case 'linux': - loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`; - startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`; - break; - default: - throw new Error('Unknown platform'); - } - loaderFilepath = path.join(__dirname, '../../../', loaderFilepath); - startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath); - snapshotLoader(loaderFilepath, startupBlobFilepath); - function snapshotLoader(loaderFilepath, startupBlobFilepath) { - const inputFile = fs.readFileSync(loaderFilepath); - const wrappedInputFile = ` - var Monaco_Loader_Init; - (function() { - var doNotInitLoader = true; - ${inputFile.toString()}; - Monaco_Loader_Init = function() { - AMDLoader.init(); - CSSLoaderPlugin.init(); - NLSLoaderPlugin.init(); - - return { define, require }; - } - })(); - `; - const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js'); - console.log(wrappedInputFilepath); - fs.writeFileSync(wrappedInputFilepath, wrappedInputFile); - cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]); - } -})(snaps || (exports.snaps = snaps = {})); -//# sourceMappingURL=snapshotLoader.js.map \ No newline at end of file diff --git a/build/lib/snapshotLoader.ts b/build/lib/snapshotLoader.ts index 3cb2191144d..3df83f73447 100644 --- a/build/lib/snapshotLoader.ts +++ b/build/lib/snapshotLoader.ts @@ -3,14 +3,14 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -export namespace snaps { +export const snaps = (() => { const fs = require('fs'); const path = require('path'); const os = require('os'); const cp = require('child_process'); - const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`); + const mksnapshot = path.join(import.meta.dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`); const product = require('../../product.json'); const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1]; @@ -34,8 +34,8 @@ export namespace snaps { throw new Error('Unknown platform'); } - loaderFilepath = path.join(__dirname, '../../../', loaderFilepath); - startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath); + loaderFilepath = path.join(import.meta.dirname, '../../../', loaderFilepath); + startupBlobFilepath = path.join(import.meta.dirname, '../../../', startupBlobFilepath); snapshotLoader(loaderFilepath, startupBlobFilepath); @@ -62,4 +62,6 @@ export namespace snaps { cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]); } -} + + return {}; +})(); diff --git a/build/lib/standalone.js b/build/lib/standalone.js deleted file mode 100644 index e8f81f92dea..00000000000 --- a/build/lib/standalone.js +++ /dev/null @@ -1,212 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.extractEditor = extractEditor; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const tss = __importStar(require("./treeshaking")); -const dirCache = {}; -function writeFile(filePath, contents) { - function ensureDirs(dirPath) { - if (dirCache[dirPath]) { - return; - } - dirCache[dirPath] = true; - ensureDirs(path_1.default.dirname(dirPath)); - if (fs_1.default.existsSync(dirPath)) { - return; - } - fs_1.default.mkdirSync(dirPath); - } - ensureDirs(path_1.default.dirname(filePath)); - fs_1.default.writeFileSync(filePath, contents); -} -function extractEditor(options) { - const ts = require('typescript'); - const tsConfig = JSON.parse(fs_1.default.readFileSync(path_1.default.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString()); - let compilerOptions; - if (tsConfig.extends) { - compilerOptions = Object.assign({}, require(path_1.default.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions); - delete tsConfig.extends; - } - else { - compilerOptions = tsConfig.compilerOptions; - } - tsConfig.compilerOptions = compilerOptions; - tsConfig.compilerOptions.sourceMap = true; - tsConfig.compilerOptions.outDir = options.tsOutDir; - compilerOptions.noEmit = false; - compilerOptions.noUnusedLocals = false; - compilerOptions.preserveConstEnums = false; - compilerOptions.declaration = false; - options.compilerOptions = compilerOptions; - console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`); - // Take the extra included .d.ts files from `tsconfig.monaco.json` - options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile)); - const result = tss.shake(options); - for (const fileName in result) { - if (result.hasOwnProperty(fileName)) { - let fileContents = result[fileName]; - // Replace .ts? with .js? in new URL() patterns - fileContents = fileContents.replace(/(new\s+URL\s*\(\s*['"`][^'"`]*?)\.ts(\?[^'"`]*['"`])/g, '$1.js$2'); - const relativePath = path_1.default.relative(options.sourcesRoot, fileName); - writeFile(path_1.default.join(options.destRoot, relativePath), fileContents); - } - } - const copied = {}; - const copyFile = (fileName, toFileName) => { - if (copied[fileName]) { - return; - } - copied[fileName] = true; - if (path_1.default.isAbsolute(fileName)) { - const relativePath = path_1.default.relative(options.sourcesRoot, fileName); - const dstPath = path_1.default.join(options.destRoot, toFileName ?? relativePath); - writeFile(dstPath, fs_1.default.readFileSync(fileName)); - } - else { - const srcPath = path_1.default.join(options.sourcesRoot, fileName); - const dstPath = path_1.default.join(options.destRoot, toFileName ?? fileName); - writeFile(dstPath, fs_1.default.readFileSync(srcPath)); - } - }; - const writeOutputFile = (fileName, contents) => { - const relativePath = path_1.default.isAbsolute(fileName) ? path_1.default.relative(options.sourcesRoot, fileName) : fileName; - writeFile(path_1.default.join(options.destRoot, relativePath), contents); - }; - for (const fileName in result) { - if (result.hasOwnProperty(fileName)) { - const fileContents = result[fileName]; - const info = ts.preProcessFile(fileContents); - for (let i = info.importedFiles.length - 1; i >= 0; i--) { - const importedFileName = info.importedFiles[i].fileName; - let importedFilePath = importedFileName; - if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) { - importedFilePath = path_1.default.join(path_1.default.dirname(fileName), importedFilePath); - } - if (/\.css$/.test(importedFilePath)) { - transportCSS(importedFilePath, copyFile, writeOutputFile); - } - else { - const pathToCopy = path_1.default.join(options.sourcesRoot, importedFilePath); - if (fs_1.default.existsSync(pathToCopy) && !fs_1.default.statSync(pathToCopy).isDirectory()) { - copyFile(importedFilePath); - } - } - } - } - } - delete tsConfig.compilerOptions.moduleResolution; - writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t')); - options.additionalFilesToCopyOut?.forEach((file) => { - copyFile(file); - }); - copyFile('vs/loader.js'); - copyFile('typings/css.d.ts'); - copyFile('../node_modules/@vscode/tree-sitter-wasm/wasm/web-tree-sitter.d.ts', '@vscode/tree-sitter-wasm.d.ts'); -} -function transportCSS(module, enqueue, write) { - if (!/\.css/.test(module)) { - return false; - } - const fileContents = fs_1.default.readFileSync(module).toString(); - const inlineResources = 'base64'; // see https://github.com/microsoft/monaco-editor/issues/148 - const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64'); - write(module, newContents); - return true; - function _rewriteOrInlineUrls(contents, forceBase64) { - return _replaceURL(contents, (url) => { - const fontMatch = url.match(/^(.*).ttf\?(.*)$/); - if (fontMatch) { - const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter - const fontPath = path_1.default.join(path_1.default.dirname(module), relativeFontPath); - enqueue(fontPath); - return relativeFontPath; - } - const imagePath = path_1.default.join(path_1.default.dirname(module), url); - const fileContents = fs_1.default.readFileSync(imagePath); - const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png'; - let DATA = ';base64,' + fileContents.toString('base64'); - if (!forceBase64 && /\.svg$/.test(url)) { - // .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris - const newText = fileContents.toString() - .replace(/"/g, '\'') - .replace(//g, '%3E') - .replace(/&/g, '%26') - .replace(/#/g, '%23') - .replace(/\s+/g, ' '); - const encodedData = ',' + newText; - if (encodedData.length < DATA.length) { - DATA = encodedData; - } - } - return '"data:' + MIME + DATA + '"'; - }); - } - function _replaceURL(contents, replacer) { - // Use ")" as the terminator as quotes are oftentimes not used at all - return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_, ...matches) => { - let url = matches[0]; - // Eliminate starting quotes (the initial whitespace is not captured) - if (url.charAt(0) === '"' || url.charAt(0) === '\'') { - url = url.substring(1); - } - // The ending whitespace is captured - while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) { - url = url.substring(0, url.length - 1); - } - // Eliminate ending quotes - if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') { - url = url.substring(0, url.length - 1); - } - if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) { - url = replacer(url); - } - return 'url(' + url + ')'; - }); - } - function _startsWith(haystack, needle) { - return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle; - } -} -//# sourceMappingURL=standalone.js.map \ No newline at end of file diff --git a/build/lib/standalone.ts b/build/lib/standalone.ts index bd2971b9894..3e1006fce12 100644 --- a/build/lib/standalone.ts +++ b/build/lib/standalone.ts @@ -5,7 +5,8 @@ import fs from 'fs'; import path from 'path'; -import * as tss from './treeshaking'; +import * as tss from './treeshaking.ts'; +import ts from 'typescript'; const dirCache: { [dir: string]: boolean } = {}; @@ -27,12 +28,11 @@ function writeFile(filePath: string, contents: Buffer | string): void { } export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string; tsOutDir: string; additionalFilesToCopyOut?: string[] }): void { - const ts = require('typescript') as typeof import('typescript'); - const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString()); let compilerOptions: { [key: string]: any }; if (tsConfig.extends) { - compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions); + const extendedConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, tsConfig.extends)).toString()); + compilerOptions = Object.assign({}, extendedConfig.compilerOptions, tsConfig.compilerOptions); delete tsConfig.extends; } else { compilerOptions = tsConfig.compilerOptions; @@ -52,7 +52,7 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`); // Take the extra included .d.ts files from `tsconfig.monaco.json` - options.typings = (tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile)); + options.typings = (tsConfig.include as string[]).filter(includedFile => /\.d\.ts$/.test(includedFile)); const result = tss.shake(options); for (const fileName in result) { diff --git a/build/lib/stats.js b/build/lib/stats.js deleted file mode 100644 index 3f6d953ae40..00000000000 --- a/build/lib/stats.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createStatsStream = createStatsStream; -const event_stream_1 = __importDefault(require("event-stream")); -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -class Entry { - name; - totalCount; - totalSize; - constructor(name, totalCount, totalSize) { - this.name = name; - this.totalCount = totalCount; - this.totalSize = totalSize; - } - toString(pretty) { - if (!pretty) { - if (this.totalCount === 1) { - return `${this.name}: ${this.totalSize} bytes`; - } - else { - return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`; - } - } - else { - if (this.totalCount === 1) { - return `Stats for '${ansi_colors_1.default.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`; - } - else { - const count = this.totalCount < 100 - ? ansi_colors_1.default.green(this.totalCount.toString()) - : ansi_colors_1.default.red(this.totalCount.toString()); - return `Stats for '${ansi_colors_1.default.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`; - } - } - } -} -const _entries = new Map(); -function createStatsStream(group, log) { - const entry = new Entry(group, 0, 0); - _entries.set(entry.name, entry); - return event_stream_1.default.through(function (data) { - const file = data; - if (typeof file.path === 'string') { - entry.totalCount += 1; - if (Buffer.isBuffer(file.contents)) { - entry.totalSize += file.contents.length; - } - else if (file.stat && typeof file.stat.size === 'number') { - entry.totalSize += file.stat.size; - } - else { - // funky file... - } - } - this.emit('data', data); - }, function () { - if (log) { - if (entry.totalCount === 1) { - (0, fancy_log_1.default)(`Stats for '${ansi_colors_1.default.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`); - } - else { - const count = entry.totalCount < 100 - ? ansi_colors_1.default.green(entry.totalCount.toString()) - : ansi_colors_1.default.red(entry.totalCount.toString()); - (0, fancy_log_1.default)(`Stats for '${ansi_colors_1.default.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`); - } - } - this.emit('end'); - }); -} -//# sourceMappingURL=stats.js.map \ No newline at end of file diff --git a/build/lib/stats.ts b/build/lib/stats.ts index 8db55d3e777..83bf0a4a7ae 100644 --- a/build/lib/stats.ts +++ b/build/lib/stats.ts @@ -9,7 +9,15 @@ import ansiColors from 'ansi-colors'; import File from 'vinyl'; class Entry { - constructor(readonly name: string, public totalCount: number, public totalSize: number) { } + readonly name: string; + public totalCount: number; + public totalSize: number; + + constructor(name: string, totalCount: number, totalSize: number) { + this.name = name; + this.totalCount = totalCount; + this.totalSize = totalSize; + } toString(pretty?: boolean): string { if (!pretty) { diff --git a/build/lib/stylelint/validateVariableNames.js b/build/lib/stylelint/validateVariableNames.js deleted file mode 100644 index b0e064e7b56..00000000000 --- a/build/lib/stylelint/validateVariableNames.js +++ /dev/null @@ -1,37 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getVariableNameValidator = getVariableNameValidator; -const fs_1 = require("fs"); -const path_1 = __importDefault(require("path")); -const RE_VAR_PROP = /var\(\s*(--([\w\-\.]+))/g; -let knownVariables; -function getKnownVariableNames() { - if (!knownVariables) { - const knownVariablesFileContent = (0, fs_1.readFileSync)(path_1.default.join(__dirname, './vscode-known-variables.json'), 'utf8').toString(); - const knownVariablesInfo = JSON.parse(knownVariablesFileContent); - knownVariables = new Set([...knownVariablesInfo.colors, ...knownVariablesInfo.others]); - } - return knownVariables; -} -const iconVariable = /^--vscode-icon-.+-(content|font-family)$/; -function getVariableNameValidator() { - const allVariables = getKnownVariableNames(); - return (value, report) => { - RE_VAR_PROP.lastIndex = 0; // reset lastIndex just to be sure - let match; - while (match = RE_VAR_PROP.exec(value)) { - const variableName = match[1]; - if (variableName && !allVariables.has(variableName) && !iconVariable.test(variableName)) { - report(variableName); - } - } - }; -} -//# sourceMappingURL=validateVariableNames.js.map \ No newline at end of file diff --git a/build/lib/stylelint/validateVariableNames.ts b/build/lib/stylelint/validateVariableNames.ts index b28aed13f4b..0d11cafaa5b 100644 --- a/build/lib/stylelint/validateVariableNames.ts +++ b/build/lib/stylelint/validateVariableNames.ts @@ -11,7 +11,7 @@ const RE_VAR_PROP = /var\(\s*(--([\w\-\.]+))/g; let knownVariables: Set | undefined; function getKnownVariableNames() { if (!knownVariables) { - const knownVariablesFileContent = readFileSync(path.join(__dirname, './vscode-known-variables.json'), 'utf8').toString(); + const knownVariablesFileContent = readFileSync(path.join(import.meta.dirname, './vscode-known-variables.json'), 'utf8').toString(); const knownVariablesInfo = JSON.parse(knownVariablesFileContent); knownVariables = new Set([...knownVariablesInfo.colors, ...knownVariablesInfo.others] as string[]); } diff --git a/build/lib/task.js b/build/lib/task.js deleted file mode 100644 index a30b65b288c..00000000000 --- a/build/lib/task.js +++ /dev/null @@ -1,97 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.series = series; -exports.parallel = parallel; -exports.define = define; -const fancy_log_1 = __importDefault(require("fancy-log")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -function _isPromise(p) { - return typeof p.then === 'function'; -} -function _renderTime(time) { - return `${Math.round(time)} ms`; -} -async function _execute(task) { - const name = task.taskName || task.displayName || ``; - if (!task._tasks) { - (0, fancy_log_1.default)('Starting', ansi_colors_1.default.cyan(name), '...'); - } - const startTime = process.hrtime(); - await _doExecute(task); - const elapsedArr = process.hrtime(startTime); - const elapsedNanoseconds = (elapsedArr[0] * 1e9 + elapsedArr[1]); - if (!task._tasks) { - (0, fancy_log_1.default)(`Finished`, ansi_colors_1.default.cyan(name), 'after', ansi_colors_1.default.magenta(_renderTime(elapsedNanoseconds / 1e6))); - } -} -async function _doExecute(task) { - // Always invoke as if it were a callback task - return new Promise((resolve, reject) => { - if (task.length === 1) { - // this is a callback task - task((err) => { - if (err) { - return reject(err); - } - resolve(); - }); - return; - } - const taskResult = task(); - if (typeof taskResult === 'undefined') { - // this is a sync task - resolve(); - return; - } - if (_isPromise(taskResult)) { - // this is a promise returning task - taskResult.then(resolve, reject); - return; - } - // this is a stream returning task - taskResult.on('end', _ => resolve()); - taskResult.on('error', err => reject(err)); - }); -} -function series(...tasks) { - const result = async () => { - for (let i = 0; i < tasks.length; i++) { - await _execute(tasks[i]); - } - }; - result._tasks = tasks; - return result; -} -function parallel(...tasks) { - const result = async () => { - await Promise.all(tasks.map(t => _execute(t))); - }; - result._tasks = tasks; - return result; -} -function define(name, task) { - if (task._tasks) { - // This is a composite task - const lastTask = task._tasks[task._tasks.length - 1]; - if (lastTask._tasks || lastTask.taskName) { - // This is a composite task without a real task function - // => generate a fake task function - return define(name, series(task, () => Promise.resolve())); - } - lastTask.taskName = name; - task.displayName = name; - return task; - } - // This is a simple task - task.taskName = name; - task.displayName = name; - return task; -} -//# sourceMappingURL=task.js.map \ No newline at end of file diff --git a/build/lib/test/booleanPolicy.test.js b/build/lib/test/booleanPolicy.test.js deleted file mode 100644 index 944916c3d76..00000000000 --- a/build/lib/test/booleanPolicy.test.js +++ /dev/null @@ -1,126 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const booleanPolicy_js_1 = require("../policies/booleanPolicy.js"); -const types_js_1 = require("../policies/types.js"); -suite('BooleanPolicy', () => { - const mockCategory = { - key: 'test.category', - name: { value: 'Category1', key: 'test.category' }, - }; - const mockPolicy = { - key: 'test.boolean.policy', - name: 'TestBooleanPolicy', - category: 'Category1', - minimumVersion: '1.0', - type: 'boolean', - localization: { - description: { key: 'test.policy.description', value: 'Test policy description' } - } - }; - test('should create BooleanPolicy from factory method', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - assert_1.default.strictEqual(policy.name, 'TestBooleanPolicy'); - assert_1.default.strictEqual(policy.minimumVersion, '1.0'); - assert_1.default.strictEqual(policy.category.name.nlsKey, mockCategory.name.key); - assert_1.default.strictEqual(policy.category.name.value, mockCategory.name.value); - assert_1.default.strictEqual(policy.type, types_js_1.PolicyType.Boolean); - }); - test('should render ADMX elements correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admx = policy.renderADMX('TestKey'); - assert_1.default.deepStrictEqual(admx, [ - '', - '\t', - '\t', - '\t', - '', - '\t', - '', - '\t', - '' - ]); - }); - test('should render ADML strings correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admlStrings = policy.renderADMLStrings(); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestBooleanPolicy', - 'Test policy description' - ]); - }); - test('should render ADML strings with translations', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated description' - } - }; - const admlStrings = policy.renderADMLStrings(translations); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestBooleanPolicy', - 'Translated description' - ]); - }); - test('should render ADML presentation correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const presentation = policy.renderADMLPresentation(); - assert_1.default.strictEqual(presentation, 'TestBooleanPolicy'); - }); - test('should render JSON value correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const jsonValue = policy.renderJsonValue(); - assert_1.default.strictEqual(jsonValue, false); - }); - test('should render profile value correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profileValue = policy.renderProfileValue(); - assert_1.default.strictEqual(profileValue, ''); - }); - test('should render profile correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profile = policy.renderProfile(); - assert_1.default.strictEqual(profile.length, 2); - assert_1.default.strictEqual(profile[0], 'TestBooleanPolicy'); - assert_1.default.strictEqual(profile[1], ''); - }); - test('should render profile manifest value correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifestValue = policy.renderProfileManifestValue(); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n\npfm_description\nTest policy description\npfm_name\nTestBooleanPolicy\npfm_title\nTestBooleanPolicy\npfm_type\nboolean'); - }); - test('should render profile manifest value with translations', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated manifest description' - } - }; - const manifestValue = policy.renderProfileManifestValue(translations); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n\npfm_description\nTranslated manifest description\npfm_name\nTestBooleanPolicy\npfm_title\nTestBooleanPolicy\npfm_type\nboolean'); - }); - test('should render profile manifest correctly', () => { - const policy = booleanPolicy_js_1.BooleanPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifest = policy.renderProfileManifest(); - assert_1.default.strictEqual(manifest, '\npfm_default\n\npfm_description\nTest policy description\npfm_name\nTestBooleanPolicy\npfm_title\nTestBooleanPolicy\npfm_type\nboolean\n'); - }); -}); -//# sourceMappingURL=booleanPolicy.test.js.map \ No newline at end of file diff --git a/build/lib/test/booleanPolicy.test.ts b/build/lib/test/booleanPolicy.test.ts index 8da223530b9..d64f9fff646 100644 --- a/build/lib/test/booleanPolicy.test.ts +++ b/build/lib/test/booleanPolicy.test.ts @@ -4,9 +4,9 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import { BooleanPolicy } from '../policies/booleanPolicy.js'; -import { LanguageTranslations, PolicyType } from '../policies/types.js'; -import { CategoryDto, PolicyDto } from '../policies/policyDto.js'; +import { BooleanPolicy } from '../policies/booleanPolicy.ts'; +import { type LanguageTranslations, PolicyType } from '../policies/types.ts'; +import type { CategoryDto, PolicyDto } from '../policies/policyDto.ts'; suite('BooleanPolicy', () => { const mockCategory: CategoryDto = { diff --git a/build/lib/test/i18n.test.js b/build/lib/test/i18n.test.js deleted file mode 100644 index 41aa8a7f668..00000000000 --- a/build/lib/test/i18n.test.js +++ /dev/null @@ -1,77 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const i18n = __importStar(require("../i18n")); -suite('XLF Parser Tests', () => { - const sampleXlf = 'Key #1Key #2 &'; - const sampleTranslatedXlf = 'Key #1Кнопка #1Key #2 &Кнопка #2 &'; - const name = 'vs/base/common/keybinding'; - const keys = ['key1', 'key2']; - const messages = ['Key #1', 'Key #2 &']; - const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' }; - test('Keys & messages to XLF conversion', () => { - const xlf = new i18n.XLF('vscode-workbench'); - xlf.addFile(name, keys, messages); - const xlfString = xlf.toString(); - assert_1.default.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf); - }); - test('XLF to keys & messages conversion', () => { - i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) { - assert_1.default.deepStrictEqual(resolvedFiles[0].messages, translatedMessages); - assert_1.default.strictEqual(resolvedFiles[0].name, name); - }); - }); - test('JSON file source path to Transifex resource match', () => { - const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench'; - const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/contrib/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/textfile', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject }; - assert_1.default.deepStrictEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform); - assert_1.default.deepStrictEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib); - assert_1.default.deepStrictEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor); - assert_1.default.deepStrictEqual(i18n.getResource('vs/base/common/errorMessage'), base); - assert_1.default.deepStrictEqual(i18n.getResource('vs/code/electron-main/window'), code); - assert_1.default.deepStrictEqual(i18n.getResource('vs/workbench/contrib/html/browser/webview'), workbenchParts); - assert_1.default.deepStrictEqual(i18n.getResource('vs/workbench/services/textfile/node/testFileService'), workbenchServices); - assert_1.default.deepStrictEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench); - }); -}); -//# sourceMappingURL=i18n.test.js.map \ No newline at end of file diff --git a/build/lib/test/i18n.test.ts b/build/lib/test/i18n.test.ts index 4e4545548b8..7d5bb0433fe 100644 --- a/build/lib/test/i18n.test.ts +++ b/build/lib/test/i18n.test.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import * as i18n from '../i18n'; +import * as i18n from '../i18n.ts'; suite('XLF Parser Tests', () => { const sampleXlf = 'Key #1Key #2 &'; diff --git a/build/lib/test/numberPolicy.test.js b/build/lib/test/numberPolicy.test.js deleted file mode 100644 index 312ec7587ee..00000000000 --- a/build/lib/test/numberPolicy.test.js +++ /dev/null @@ -1,125 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const numberPolicy_js_1 = require("../policies/numberPolicy.js"); -const types_js_1 = require("../policies/types.js"); -suite('NumberPolicy', () => { - const mockCategory = { - key: 'test.category', - name: { value: 'Category1', key: 'test.category' }, - }; - const mockPolicy = { - key: 'test.number.policy', - name: 'TestNumberPolicy', - category: 'Category1', - minimumVersion: '1.0', - type: 'number', - default: 42, - localization: { - description: { key: 'test.policy.description', value: 'Test number policy description' } - } - }; - test('should create NumberPolicy from factory method', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - assert_1.default.strictEqual(policy.name, 'TestNumberPolicy'); - assert_1.default.strictEqual(policy.minimumVersion, '1.0'); - assert_1.default.strictEqual(policy.category.name.nlsKey, mockCategory.name.key); - assert_1.default.strictEqual(policy.category.name.value, mockCategory.name.value); - assert_1.default.strictEqual(policy.type, types_js_1.PolicyType.Number); - }); - test('should render ADMX elements correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admx = policy.renderADMX('TestKey'); - assert_1.default.deepStrictEqual(admx, [ - '', - '\t', - '\t', - '\t', - '', - '\t', - '' - ]); - }); - test('should render ADML strings correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admlStrings = policy.renderADMLStrings(); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestNumberPolicy', - 'Test number policy description' - ]); - }); - test('should render ADML strings with translations', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated description' - } - }; - const admlStrings = policy.renderADMLStrings(translations); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestNumberPolicy', - 'Translated description' - ]); - }); - test('should render ADML presentation correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const presentation = policy.renderADMLPresentation(); - assert_1.default.strictEqual(presentation, 'TestNumberPolicy'); - }); - test('should render JSON value correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const jsonValue = policy.renderJsonValue(); - assert_1.default.strictEqual(jsonValue, 42); - }); - test('should render profile value correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profileValue = policy.renderProfileValue(); - assert_1.default.strictEqual(profileValue, '42'); - }); - test('should render profile correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profile = policy.renderProfile(); - assert_1.default.strictEqual(profile.length, 2); - assert_1.default.strictEqual(profile[0], 'TestNumberPolicy'); - assert_1.default.strictEqual(profile[1], '42'); - }); - test('should render profile manifest value correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifestValue = policy.renderProfileManifestValue(); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n42\npfm_description\nTest number policy description\npfm_name\nTestNumberPolicy\npfm_title\nTestNumberPolicy\npfm_type\ninteger'); - }); - test('should render profile manifest value with translations', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated manifest description' - } - }; - const manifestValue = policy.renderProfileManifestValue(translations); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n42\npfm_description\nTranslated manifest description\npfm_name\nTestNumberPolicy\npfm_title\nTestNumberPolicy\npfm_type\ninteger'); - }); - test('should render profile manifest correctly', () => { - const policy = numberPolicy_js_1.NumberPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifest = policy.renderProfileManifest(); - assert_1.default.strictEqual(manifest, '\npfm_default\n42\npfm_description\nTest number policy description\npfm_name\nTestNumberPolicy\npfm_title\nTestNumberPolicy\npfm_type\ninteger\n'); - }); -}); -//# sourceMappingURL=numberPolicy.test.js.map \ No newline at end of file diff --git a/build/lib/test/numberPolicy.test.ts b/build/lib/test/numberPolicy.test.ts index dfb6276e34e..503403ca5c0 100644 --- a/build/lib/test/numberPolicy.test.ts +++ b/build/lib/test/numberPolicy.test.ts @@ -4,9 +4,9 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import { NumberPolicy } from '../policies/numberPolicy.js'; -import { LanguageTranslations, PolicyType } from '../policies/types.js'; -import { CategoryDto, PolicyDto } from '../policies/policyDto.js'; +import { NumberPolicy } from '../policies/numberPolicy.ts'; +import { type LanguageTranslations, PolicyType } from '../policies/types.ts'; +import type { CategoryDto, PolicyDto } from '../policies/policyDto.ts'; suite('NumberPolicy', () => { const mockCategory: CategoryDto = { diff --git a/build/lib/test/objectPolicy.test.js b/build/lib/test/objectPolicy.test.js deleted file mode 100644 index a34d71383d2..00000000000 --- a/build/lib/test/objectPolicy.test.js +++ /dev/null @@ -1,124 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const objectPolicy_js_1 = require("../policies/objectPolicy.js"); -const types_js_1 = require("../policies/types.js"); -suite('ObjectPolicy', () => { - const mockCategory = { - key: 'test.category', - name: { value: 'Category1', key: 'test.category' }, - }; - const mockPolicy = { - key: 'test.object.policy', - name: 'TestObjectPolicy', - category: 'Category1', - minimumVersion: '1.0', - type: 'object', - localization: { - description: { key: 'test.policy.description', value: 'Test policy description' } - } - }; - test('should create ObjectPolicy from factory method', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - assert_1.default.strictEqual(policy.name, 'TestObjectPolicy'); - assert_1.default.strictEqual(policy.minimumVersion, '1.0'); - assert_1.default.strictEqual(policy.category.name.nlsKey, mockCategory.name.key); - assert_1.default.strictEqual(policy.category.name.value, mockCategory.name.value); - assert_1.default.strictEqual(policy.type, types_js_1.PolicyType.Object); - }); - test('should render ADMX elements correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admx = policy.renderADMX('TestKey'); - assert_1.default.deepStrictEqual(admx, [ - '', - '\t', - '\t', - '\t', - '', - '\t', - '' - ]); - }); - test('should render ADML strings correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admlStrings = policy.renderADMLStrings(); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestObjectPolicy', - 'Test policy description' - ]); - }); - test('should render ADML strings with translations', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated description' - } - }; - const admlStrings = policy.renderADMLStrings(translations); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestObjectPolicy', - 'Translated description' - ]); - }); - test('should render ADML presentation correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const presentation = policy.renderADMLPresentation(); - assert_1.default.strictEqual(presentation, ''); - }); - test('should render JSON value correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const jsonValue = policy.renderJsonValue(); - assert_1.default.strictEqual(jsonValue, ''); - }); - test('should render profile value correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profileValue = policy.renderProfileValue(); - assert_1.default.strictEqual(profileValue, ''); - }); - test('should render profile correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profile = policy.renderProfile(); - assert_1.default.strictEqual(profile.length, 2); - assert_1.default.strictEqual(profile[0], 'TestObjectPolicy'); - assert_1.default.strictEqual(profile[1], ''); - }); - test('should render profile manifest value correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifestValue = policy.renderProfileManifestValue(); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n\npfm_description\nTest policy description\npfm_name\nTestObjectPolicy\npfm_title\nTestObjectPolicy\npfm_type\nstring\n'); - }); - test('should render profile manifest value with translations', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated manifest description' - } - }; - const manifestValue = policy.renderProfileManifestValue(translations); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n\npfm_description\nTranslated manifest description\npfm_name\nTestObjectPolicy\npfm_title\nTestObjectPolicy\npfm_type\nstring\n'); - }); - test('should render profile manifest correctly', () => { - const policy = objectPolicy_js_1.ObjectPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifest = policy.renderProfileManifest(); - assert_1.default.strictEqual(manifest, '\npfm_default\n\npfm_description\nTest policy description\npfm_name\nTestObjectPolicy\npfm_title\nTestObjectPolicy\npfm_type\nstring\n\n'); - }); -}); -//# sourceMappingURL=objectPolicy.test.js.map \ No newline at end of file diff --git a/build/lib/test/objectPolicy.test.ts b/build/lib/test/objectPolicy.test.ts index 6012b8012da..8e688d19b8f 100644 --- a/build/lib/test/objectPolicy.test.ts +++ b/build/lib/test/objectPolicy.test.ts @@ -4,9 +4,9 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import { ObjectPolicy } from '../policies/objectPolicy.js'; -import { LanguageTranslations, PolicyType } from '../policies/types.js'; -import { CategoryDto, PolicyDto } from '../policies/policyDto.js'; +import { ObjectPolicy } from '../policies/objectPolicy.ts'; +import { type LanguageTranslations, PolicyType } from '../policies/types.ts'; +import type { CategoryDto, PolicyDto } from '../policies/policyDto.ts'; suite('ObjectPolicy', () => { const mockCategory: CategoryDto = { diff --git a/build/lib/test/policyConversion.test.js b/build/lib/test/policyConversion.test.js deleted file mode 100644 index 6fc735f1127..00000000000 --- a/build/lib/test/policyConversion.test.js +++ /dev/null @@ -1,465 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const fs_1 = require("fs"); -const path_1 = __importDefault(require("path")); -const booleanPolicy_1 = require("../policies/booleanPolicy"); -const numberPolicy_1 = require("../policies/numberPolicy"); -const objectPolicy_1 = require("../policies/objectPolicy"); -const stringEnumPolicy_1 = require("../policies/stringEnumPolicy"); -const stringPolicy_1 = require("../policies/stringPolicy"); -const render_1 = require("../policies/render"); -const PolicyTypes = [ - booleanPolicy_1.BooleanPolicy, - numberPolicy_1.NumberPolicy, - stringEnumPolicy_1.StringEnumPolicy, - stringPolicy_1.StringPolicy, - objectPolicy_1.ObjectPolicy -]; -function parsePolicies(policyData) { - const categories = new Map(); - for (const category of policyData.categories) { - categories.set(category.key, category); - } - const policies = []; - for (const policy of policyData.policies) { - const category = categories.get(policy.category); - if (!category) { - throw new Error(`Unknown category: ${policy.category}`); - } - let result; - for (const policyType of PolicyTypes) { - if (result = policyType.from(category, policy)) { - break; - } - } - if (!result) { - throw new Error(`Unsupported policy type: ${policy.type} for policy ${policy.name}`); - } - policies.push(result); - } - // Sort policies first by category name, then by policy name - policies.sort((a, b) => { - const categoryCompare = a.category.name.value.localeCompare(b.category.name.value); - if (categoryCompare !== 0) { - return categoryCompare; - } - return a.name.localeCompare(b.name); - }); - return policies; -} -/** - * This is a snapshot of the data taken on Oct. 20 2025 as part of the - * policy refactor effort. Let's make sure that nothing has regressed. - */ -const policies = { - categories: [ - { - key: 'Extensions', - name: { - key: 'extensionsConfigurationTitle', - value: 'Extensions' - } - }, - { - key: 'IntegratedTerminal', - name: { - key: 'terminalIntegratedConfigurationTitle', - value: 'Integrated Terminal' - } - }, - { - key: 'InteractiveSession', - name: { - key: 'interactiveSessionConfigurationTitle', - value: 'Chat' - } - }, - { - key: 'Telemetry', - name: { - key: 'telemetryConfigurationTitle', - value: 'Telemetry' - } - }, - { - key: 'Update', - name: { - key: 'updateConfigurationTitle', - value: 'Update' - } - } - ], - policies: [ - { - key: 'chat.mcp.gallery.serviceUrl', - name: 'McpGalleryServiceUrl', - category: 'InteractiveSession', - minimumVersion: '1.101', - localization: { - description: { - key: 'mcp.gallery.serviceUrl', - value: 'Configure the MCP Gallery service URL to connect to' - } - }, - type: 'string', - default: '' - }, - { - key: 'extensions.gallery.serviceUrl', - name: 'ExtensionGalleryServiceUrl', - category: 'Extensions', - minimumVersion: '1.99', - localization: { - description: { - key: 'extensions.gallery.serviceUrl', - value: 'Configure the Marketplace service URL to connect to' - } - }, - type: 'string', - default: '' - }, - { - key: 'extensions.allowed', - name: 'AllowedExtensions', - category: 'Extensions', - minimumVersion: '1.96', - localization: { - description: { - key: 'extensions.allowed.policy', - value: 'Specify a list of extensions that are allowed to use. This helps maintain a secure and consistent development environment by restricting the use of unauthorized extensions. More information: https://code.visualstudio.com/docs/setup/enterprise#_configure-allowed-extensions' - } - }, - type: 'object', - default: '*' - }, - { - key: 'chat.tools.global.autoApprove', - name: 'ChatToolsAutoApprove', - category: 'InteractiveSession', - minimumVersion: '1.99', - localization: { - description: { - key: 'autoApprove2.description', - value: 'Global auto approve also known as "YOLO mode" disables manual approval completely for all tools in all workspaces, allowing the agent to act fully autonomously. This is extremely dangerous and is *never* recommended, even containerized environments like Codespaces and Dev Containers have user keys forwarded into the container that could be compromised.\n\nThis feature disables critical security protections and makes it much easier for an attacker to compromise the machine.' - } - }, - type: 'boolean', - default: false - }, - { - key: 'chat.mcp.access', - name: 'ChatMCP', - category: 'InteractiveSession', - minimumVersion: '1.99', - localization: { - description: { - key: 'chat.mcp.access', - value: 'Controls access to installed Model Context Protocol servers.' - }, - enumDescriptions: [ - { - key: 'chat.mcp.access.none', - value: 'No access to MCP servers.' - }, - { - key: 'chat.mcp.access.registry', - value: 'Allows access to MCP servers installed from the registry that VS Code is connected to.' - }, - { - key: 'chat.mcp.access.any', - value: 'Allow access to any installed MCP server.' - } - ] - }, - type: 'string', - default: 'all', - enum: [ - 'none', - 'registry', - 'all' - ] - }, - { - key: 'chat.extensionTools.enabled', - name: 'ChatAgentExtensionTools', - category: 'InteractiveSession', - minimumVersion: '1.99', - localization: { - description: { - key: 'chat.extensionToolsEnabled', - value: 'Enable using tools contributed by third-party extensions.' - } - }, - type: 'boolean', - default: true - }, - { - key: 'chat.agent.enabled', - name: 'ChatAgentMode', - category: 'InteractiveSession', - minimumVersion: '1.99', - localization: { - description: { - key: 'chat.agent.enabled.description', - value: 'Enable agent mode for chat. When this is enabled, agent mode can be activated via the dropdown in the view.' - } - }, - type: 'boolean', - default: true - }, - { - key: 'chat.promptFiles', - name: 'ChatPromptFiles', - category: 'InteractiveSession', - minimumVersion: '1.99', - localization: { - description: { - key: 'chat.promptFiles.policy', - value: 'Enables reusable prompt and instruction files in Chat sessions.' - } - }, - type: 'boolean', - default: true - }, - { - key: 'chat.tools.terminal.enableAutoApprove', - name: 'ChatToolsTerminalEnableAutoApprove', - category: 'IntegratedTerminal', - minimumVersion: '1.104', - localization: { - description: { - key: 'autoApproveMode.description', - value: 'Controls whether to allow auto approval in the run in terminal tool.' - } - }, - type: 'boolean', - default: true - }, - { - key: 'update.mode', - name: 'UpdateMode', - category: 'Update', - minimumVersion: '1.67', - localization: { - description: { - key: 'updateMode', - value: 'Configure whether you receive automatic updates. Requires a restart after change. The updates are fetched from a Microsoft online service.' - }, - enumDescriptions: [ - { - key: 'none', - value: 'Disable updates.' - }, - { - key: 'manual', - value: 'Disable automatic background update checks. Updates will be available if you manually check for updates.' - }, - { - key: 'start', - value: 'Check for updates only on startup. Disable automatic background update checks.' - }, - { - key: 'default', - value: 'Enable automatic update checks. Code will check for updates automatically and periodically.' - } - ] - }, - type: 'string', - default: 'default', - enum: [ - 'none', - 'manual', - 'start', - 'default' - ] - }, - { - key: 'telemetry.telemetryLevel', - name: 'TelemetryLevel', - category: 'Telemetry', - minimumVersion: '1.99', - localization: { - description: { - key: 'telemetry.telemetryLevel.policyDescription', - value: 'Controls the level of telemetry.' - }, - enumDescriptions: [ - { - key: 'telemetry.telemetryLevel.default', - value: 'Sends usage data, errors, and crash reports.' - }, - { - key: 'telemetry.telemetryLevel.error', - value: 'Sends general error telemetry and crash reports.' - }, - { - key: 'telemetry.telemetryLevel.crash', - value: 'Sends OS level crash reports.' - }, - { - key: 'telemetry.telemetryLevel.off', - value: 'Disables all product telemetry.' - } - ] - }, - type: 'string', - default: 'all', - enum: [ - 'all', - 'error', - 'crash', - 'off' - ] - }, - { - key: 'telemetry.feedback.enabled', - name: 'EnableFeedback', - category: 'Telemetry', - minimumVersion: '1.99', - localization: { - description: { - key: 'telemetry.feedback.enabled', - value: 'Enable feedback mechanisms such as the issue reporter, surveys, and other feedback options.' - } - }, - type: 'boolean', - default: true - } - ] -}; -const mockProduct = { - nameLong: 'Code - OSS', - darwinBundleIdentifier: 'com.visualstudio.code.oss', - darwinProfilePayloadUUID: 'CF808BE7-53F3-46C6-A7E2-7EDB98A5E959', - darwinProfileUUID: '47827DD9-4734-49A0-AF80-7E19B11495CC', - win32RegValueName: 'CodeOSS' -}; -const frenchTranslations = [ - { - languageId: 'fr-fr', - languageTranslations: { - '': { - 'interactiveSessionConfigurationTitle': 'Session interactive', - 'extensionsConfigurationTitle': 'Extensions', - 'terminalIntegratedConfigurationTitle': 'Terminal intégré', - 'telemetryConfigurationTitle': 'Télémétrie', - 'updateConfigurationTitle': 'Mettre à jour', - 'chat.extensionToolsEnabled': 'Autorisez l’utilisation d’outils fournis par des extensions tierces.', - 'chat.agent.enabled.description': 'Activez le mode Assistant pour la conversation. Lorsque cette option est activée, le mode Assistant peut être activé via la liste déroulante de la vue.', - 'chat.mcp.access': 'Contrôle l’accès aux serveurs de protocole de contexte du modèle.', - 'chat.mcp.access.none': 'Aucun accès aux serveurs MCP.', - 'chat.mcp.access.registry': `Autorise l’accès aux serveurs MCP installés à partir du registre auquel VS Code est connecté.`, - 'chat.mcp.access.any': 'Autorisez l’accès à tout serveur MCP installé.', - 'chat.promptFiles.policy': 'Active les fichiers d’instruction et de requête réutilisables dans les sessions Conversation.', - 'autoApprove2.description': `L’approbation automatique globale, également appelée « mode YOLO », désactive complètement l’approbation manuelle pour tous les outils dans tous les espaces de travail, permettant à l’agent d’agir de manière totalement autonome. Ceci est extrêmement dangereux et est *jamais* recommandé, même dans des environnements conteneurisés comme [Codespaces](https://github.com/features/codespaces) et [Dev Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers), où des clés utilisateur sont transférées dans le conteneur et pourraient être compromises. - -Cette fonctionnalité désactive [les protections de sécurité critiques](https://code.visualstudio.com/docs/copilot/security) et facilite considérablement la compromission de la machine par un attaquant.`, - 'mcp.gallery.serviceUrl': 'Configurer l’URL du service de la galerie MCP à laquelle se connecter', - 'extensions.allowed.policy': 'Spécifiez une liste d’extensions autorisées. Cela permet de maintenir un environnement de développement sécurisé et cohérent en limitant l’utilisation d’extensions non autorisées. Plus d’informations : https://code.visualstudio.com/docs/setup/enterprise#_configure-allowed-extensions', - 'extensions.gallery.serviceUrl': 'Configurer l’URL du service Place de marché à laquelle se connecter', - 'autoApproveMode.description': 'Contrôle s’il faut autoriser l’approbation automatique lors de l’exécution dans l’outil terminal.', - 'telemetry.feedback.enabled': 'Activez les mécanismes de commentaires tels que le système de rapport de problèmes, les sondages et autres options de commentaires.', - 'telemetry.telemetryLevel.policyDescription': 'Contrôle le niveau de télémétrie.', - 'telemetry.telemetryLevel.default': `Envoie les données d'utilisation, les erreurs et les rapports d'erreur.`, - 'telemetry.telemetryLevel.error': `Envoie la télémétrie d'erreur générale et les rapports de plantage.`, - 'telemetry.telemetryLevel.crash': `Envoie des rapports de plantage au niveau du système d'exploitation.`, - 'telemetry.telemetryLevel.off': 'Désactive toutes les données de télémétrie du produit.', - 'updateMode': `Choisissez si vous voulez recevoir des mises à jour automatiques. Nécessite un redémarrage après le changement. Les mises à jour sont récupérées auprès d'un service en ligne Microsoft.`, - 'none': 'Aucun', - 'manual': 'Désactivez la recherche de mises à jour automatique en arrière-plan. Les mises à jour sont disponibles si vous les rechercher manuellement.', - 'start': 'Démarrer', - 'default': 'Système' - } - } - } -]; -suite('Policy E2E conversion', () => { - test('should render macOS policy profile from policies list', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderMacOSPolicy)(mockProduct, parsedPolicies, []); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'darwin', 'com.visualstudio.code.oss.mobileconfig'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - // Compare the rendered profile with the fixture - assert_1.default.strictEqual(result.profile, expectedContent, 'macOS policy profile should match the fixture'); - }); - test('should render macOS manifest from policies list', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderMacOSPolicy)(mockProduct, parsedPolicies, []); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'darwin', 'en-us', 'com.visualstudio.code.oss.plist'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - // Find the en-us manifest - const enUsManifest = result.manifests.find(m => m.languageId === 'en-us'); - assert_1.default.ok(enUsManifest, 'en-us manifest should exist'); - // Compare the rendered manifest with the fixture, ignoring the timestamp - // The pfm_last_modified field contains a timestamp that will differ each time - const normalizeTimestamp = (content) => content.replace(/.*?<\/date>/, 'TIMESTAMP'); - assert_1.default.strictEqual(normalizeTimestamp(enUsManifest.contents), normalizeTimestamp(expectedContent), 'macOS manifest should match the fixture (ignoring timestamp)'); - }); - test('should render Windows ADMX from policies list', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderGP)(mockProduct, parsedPolicies, []); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'win32', 'CodeOSS.admx'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - // Compare the rendered ADMX with the fixture - assert_1.default.strictEqual(result.admx, expectedContent, 'Windows ADMX should match the fixture'); - }); - test('should render Windows ADML from policies list', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderGP)(mockProduct, parsedPolicies, []); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'win32', 'en-us', 'CodeOSS.adml'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - // Find the en-us ADML - const enUsAdml = result.adml.find(a => a.languageId === 'en-us'); - assert_1.default.ok(enUsAdml, 'en-us ADML should exist'); - // Compare the rendered ADML with the fixture - assert_1.default.strictEqual(enUsAdml.contents, expectedContent, 'Windows ADML should match the fixture'); - }); - test('should render macOS manifest with fr-fr locale', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderMacOSPolicy)(mockProduct, parsedPolicies, frenchTranslations); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'darwin', 'fr-fr', 'com.visualstudio.code.oss.plist'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - // Find the fr-fr manifest - const frFrManifest = result.manifests.find(m => m.languageId === 'fr-fr'); - assert_1.default.ok(frFrManifest, 'fr-fr manifest should exist'); - // Compare the rendered manifest with the fixture, ignoring the timestamp - const normalizeTimestamp = (content) => content.replace(/.*?<\/date>/, 'TIMESTAMP'); - assert_1.default.strictEqual(normalizeTimestamp(frFrManifest.contents), normalizeTimestamp(expectedContent), 'macOS fr-fr manifest should match the fixture (ignoring timestamp)'); - }); - test('should render Windows ADML with fr-fr locale', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderGP)(mockProduct, parsedPolicies, frenchTranslations); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'win32', 'fr-fr', 'CodeOSS.adml'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - // Find the fr-fr ADML - const frFrAdml = result.adml.find(a => a.languageId === 'fr-fr'); - assert_1.default.ok(frFrAdml, 'fr-fr ADML should exist'); - // Compare the rendered ADML with the fixture - assert_1.default.strictEqual(frFrAdml.contents, expectedContent, 'Windows fr-fr ADML should match the fixture'); - }); - test('should render Linux policy JSON from policies list', async () => { - const parsedPolicies = parsePolicies(policies); - const result = (0, render_1.renderJsonPolicies)(parsedPolicies); - // Load the expected fixture file - const fixturePath = path_1.default.join(__dirname, 'fixtures', 'policies', 'linux', 'policy.json'); - const expectedContent = await fs_1.promises.readFile(fixturePath, 'utf-8'); - const expectedJson = JSON.parse(expectedContent); - // Compare the rendered JSON with the fixture - assert_1.default.deepStrictEqual(result, expectedJson, 'Linux policy JSON should match the fixture'); - }); -}); -//# sourceMappingURL=policyConversion.test.js.map \ No newline at end of file diff --git a/build/lib/test/policyConversion.test.ts b/build/lib/test/policyConversion.test.ts index 0610b0cd980..bb4036a7ab9 100644 --- a/build/lib/test/policyConversion.test.ts +++ b/build/lib/test/policyConversion.test.ts @@ -6,14 +6,14 @@ import assert from 'assert'; import { promises as fs } from 'fs'; import path from 'path'; -import { ExportedPolicyDataDto, CategoryDto } from '../policies/policyDto'; -import { BooleanPolicy } from '../policies/booleanPolicy'; -import { NumberPolicy } from '../policies/numberPolicy'; -import { ObjectPolicy } from '../policies/objectPolicy'; -import { StringEnumPolicy } from '../policies/stringEnumPolicy'; -import { StringPolicy } from '../policies/stringPolicy'; -import { Policy, ProductJson } from '../policies/types'; -import { renderGP, renderMacOSPolicy, renderJsonPolicies } from '../policies/render'; +import type { ExportedPolicyDataDto, CategoryDto } from '../policies/policyDto.ts'; +import { BooleanPolicy } from '../policies/booleanPolicy.ts'; +import { NumberPolicy } from '../policies/numberPolicy.ts'; +import { ObjectPolicy } from '../policies/objectPolicy.ts'; +import { StringEnumPolicy } from '../policies/stringEnumPolicy.ts'; +import { StringPolicy } from '../policies/stringPolicy.ts'; +import type { Policy, ProductJson } from '../policies/types.ts'; +import { renderGP, renderMacOSPolicy, renderJsonPolicies } from '../policies/render.ts'; const PolicyTypes = [ BooleanPolicy, @@ -398,7 +398,7 @@ suite('Policy E2E conversion', () => { const result = renderMacOSPolicy(mockProduct, parsedPolicies, []); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'darwin', 'com.visualstudio.code.oss.mobileconfig'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'darwin', 'com.visualstudio.code.oss.mobileconfig'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); // Compare the rendered profile with the fixture @@ -410,7 +410,7 @@ suite('Policy E2E conversion', () => { const result = renderMacOSPolicy(mockProduct, parsedPolicies, []); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'darwin', 'en-us', 'com.visualstudio.code.oss.plist'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'darwin', 'en-us', 'com.visualstudio.code.oss.plist'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); // Find the en-us manifest @@ -432,7 +432,7 @@ suite('Policy E2E conversion', () => { const result = renderGP(mockProduct, parsedPolicies, []); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'win32', 'CodeOSS.admx'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'win32', 'CodeOSS.admx'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); // Compare the rendered ADMX with the fixture @@ -444,7 +444,7 @@ suite('Policy E2E conversion', () => { const result = renderGP(mockProduct, parsedPolicies, []); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'win32', 'en-us', 'CodeOSS.adml'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'win32', 'en-us', 'CodeOSS.adml'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); // Find the en-us ADML @@ -460,7 +460,7 @@ suite('Policy E2E conversion', () => { const result = renderMacOSPolicy(mockProduct, parsedPolicies, frenchTranslations); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'darwin', 'fr-fr', 'com.visualstudio.code.oss.plist'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'darwin', 'fr-fr', 'com.visualstudio.code.oss.plist'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); // Find the fr-fr manifest @@ -481,7 +481,7 @@ suite('Policy E2E conversion', () => { const result = renderGP(mockProduct, parsedPolicies, frenchTranslations); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'win32', 'fr-fr', 'CodeOSS.adml'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'win32', 'fr-fr', 'CodeOSS.adml'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); // Find the fr-fr ADML @@ -497,7 +497,7 @@ suite('Policy E2E conversion', () => { const result = renderJsonPolicies(parsedPolicies); // Load the expected fixture file - const fixturePath = path.join(__dirname, 'fixtures', 'policies', 'linux', 'policy.json'); + const fixturePath = path.join(import.meta.dirname, 'fixtures', 'policies', 'linux', 'policy.json'); const expectedContent = await fs.readFile(fixturePath, 'utf-8'); const expectedJson = JSON.parse(expectedContent); diff --git a/build/lib/test/render.test.js b/build/lib/test/render.test.js deleted file mode 100644 index 87c7fa14621..00000000000 --- a/build/lib/test/render.test.js +++ /dev/null @@ -1,855 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const render_js_1 = require("../policies/render.js"); -const types_js_1 = require("../policies/types.js"); -suite('Render Functions', () => { - suite('renderADMLString', () => { - test('should render ADML string without translations', () => { - const nlsString = { - value: 'Test description', - nlsKey: 'test.description' - }; - const result = (0, render_js_1.renderADMLString)('TestPrefix', 'testModule', nlsString); - assert_1.default.strictEqual(result, 'Test description'); - }); - test('should replace dots with underscores in nls key', () => { - const nlsString = { - value: 'Test value', - nlsKey: 'my.test.nls.key' - }; - const result = (0, render_js_1.renderADMLString)('Prefix', 'testModule', nlsString); - assert_1.default.ok(result.includes('id="Prefix_my_test_nls_key"')); - }); - test('should use translation when available', () => { - const nlsString = { - value: 'Original value', - nlsKey: 'test.key' - }; - const translations = { - 'testModule': { - 'test.key': 'Translated value' - } - }; - const result = (0, render_js_1.renderADMLString)('TestPrefix', 'testModule', nlsString, translations); - assert_1.default.ok(result.includes('>Translated value')); - }); - test('should fallback to original value when translation not found', () => { - const nlsString = { - value: 'Original value', - nlsKey: 'test.key' - }; - const translations = { - 'testModule': { - 'other.key': 'Other translation' - } - }; - const result = (0, render_js_1.renderADMLString)('TestPrefix', 'testModule', nlsString, translations); - assert_1.default.ok(result.includes('>Original value')); - }); - }); - suite('renderProfileString', () => { - test('should render profile string without translations', () => { - const nlsString = { - value: 'Profile description', - nlsKey: 'profile.description' - }; - const result = (0, render_js_1.renderProfileString)('ProfilePrefix', 'testModule', nlsString); - assert_1.default.strictEqual(result, 'Profile description'); - }); - test('should use translation when available', () => { - const nlsString = { - value: 'Original profile value', - nlsKey: 'profile.key' - }; - const translations = { - 'testModule': { - 'profile.key': 'Translated profile value' - } - }; - const result = (0, render_js_1.renderProfileString)('ProfilePrefix', 'testModule', nlsString, translations); - assert_1.default.strictEqual(result, 'Translated profile value'); - }); - test('should fallback to original value when translation not found', () => { - const nlsString = { - value: 'Original profile value', - nlsKey: 'profile.key' - }; - const translations = { - 'testModule': { - 'other.key': 'Other translation' - } - }; - const result = (0, render_js_1.renderProfileString)('ProfilePrefix', 'testModule', nlsString, translations); - assert_1.default.strictEqual(result, 'Original profile value'); - }); - }); - suite('renderADMX', () => { - const mockCategory = { - moduleName: 'testModule', - name: { value: 'Test Category', nlsKey: 'test.category' } - }; - const mockPolicy = { - name: 'TestPolicy', - type: types_js_1.PolicyType.Boolean, - category: mockCategory, - minimumVersion: '1.85', - renderADMX: (regKey) => [ - ``, - ` `, - `` - ], - renderADMLStrings: () => ['Test Policy'], - renderADMLPresentation: () => '', - renderProfile: () => ['TestPolicy', ''], - renderProfileManifest: () => 'pfm_nameTestPolicy', - renderJsonValue: () => null - }; - test('should render ADMX with correct XML structure', () => { - const result = (0, render_js_1.renderADMX)('VSCode', ['1.85'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - }); - test('should include policy namespaces with regKey', () => { - const result = (0, render_js_1.renderADMX)('TestApp', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes(' { - const result = (0, render_js_1.renderADMX)('VSCode', ['1.85.0', '1.90.1'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('Supported_1_85_0')); - assert_1.default.ok(result.includes('Supported_1_90_1')); - assert_1.default.ok(!result.includes('Supported_1.85.0')); - }); - test('should include categories in correct structure', () => { - const result = (0, render_js_1.renderADMX)('VSCode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - }); - test('should include policies section', () => { - const result = (0, render_js_1.renderADMX)('VSCode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('TestPolicy')); - assert_1.default.ok(result.includes('')); - }); - test('should handle multiple versions', () => { - const result = (0, render_js_1.renderADMX)('VSCode', ['1.0', '1.5', '2.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('Supported_1_0')); - assert_1.default.ok(result.includes('Supported_1_5')); - assert_1.default.ok(result.includes('Supported_2_0')); - }); - test('should handle multiple categories', () => { - const category1 = { moduleName: 'testModule', name: { value: 'Cat1', nlsKey: 'cat1' } }; - const category2 = { moduleName: 'testModule', name: { value: 'Cat2', nlsKey: 'cat2' } }; - const result = (0, render_js_1.renderADMX)('VSCode', ['1.0'], [category1, category2], [mockPolicy]); - assert_1.default.ok(result.includes('Category_cat1')); - assert_1.default.ok(result.includes('Category_cat2')); - }); - test('should handle multiple policies', () => { - const policy2 = { - name: 'TestPolicy2', - type: types_js_1.PolicyType.String, - category: mockCategory, - minimumVersion: '1.85', - renderADMX: (regKey) => [ - ``, - ` `, - `` - ], - renderADMLStrings: () => ['Test Policy 2'], - renderADMLPresentation: () => '', - renderProfile: () => ['TestPolicy2', ''], - renderProfileManifest: () => 'pfm_nameTestPolicy2', - renderJsonValue: () => null - }; - const result = (0, render_js_1.renderADMX)('VSCode', ['1.0'], [mockCategory], [mockPolicy, policy2]); - assert_1.default.ok(result.includes('TestPolicy')); - assert_1.default.ok(result.includes('TestPolicy2')); - }); - }); - suite('renderADML', () => { - const mockCategory = { - moduleName: 'testModule', - name: { value: 'Test Category', nlsKey: 'test.category' } - }; - const mockPolicy = { - name: 'TestPolicy', - type: types_js_1.PolicyType.String, - category: mockCategory, - minimumVersion: '1.85', - renderADMX: () => [], - renderADMLStrings: (translations) => [ - `Test Policy ${translations?.['testModule']?.['test.policy'] || 'Default'}` - ], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => null - }; - test('should render ADML with correct XML structure', () => { - const result = (0, render_js_1.renderADML)('VS Code', ['1.85'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - }); - test('should include application name', () => { - const result = (0, render_js_1.renderADML)('My Application', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('My Application')); - }); - test('should include supported versions with escaped greater-than', () => { - const result = (0, render_js_1.renderADML)('VS Code', ['1.85', '1.90'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('VS Code >= 1.85')); - assert_1.default.ok(result.includes('VS Code >= 1.90')); - }); - test('should include category strings', () => { - const result = (0, render_js_1.renderADML)('VS Code', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('Category_test_category')); - }); - test('should include policy strings', () => { - const result = (0, render_js_1.renderADML)('VS Code', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('TestPolicy')); - assert_1.default.ok(result.includes('Test Policy Default')); - }); - test('should include policy presentations', () => { - const result = (0, render_js_1.renderADML)('VS Code', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - }); - test('should pass translations to policy strings', () => { - const translations = { - 'testModule': { - 'test.policy': 'Translated' - } - }; - const result = (0, render_js_1.renderADML)('VS Code', ['1.0'], [mockCategory], [mockPolicy], translations); - assert_1.default.ok(result.includes('Test Policy Translated')); - }); - test('should handle multiple categories', () => { - const category1 = { moduleName: 'testModule', name: { value: 'Cat1', nlsKey: 'cat1' } }; - const category2 = { moduleName: 'testModule', name: { value: 'Cat2', nlsKey: 'cat2' } }; - const result = (0, render_js_1.renderADML)('VS Code', ['1.0'], [category1, category2], [mockPolicy]); - assert_1.default.ok(result.includes('Category_cat1')); - assert_1.default.ok(result.includes('Category_cat2')); - }); - }); - suite('renderProfileManifest', () => { - const mockCategory = { - moduleName: 'testModule', - name: { value: 'Test Category', nlsKey: 'test.category' } - }; - const mockPolicy = { - name: 'TestPolicy', - type: types_js_1.PolicyType.Boolean, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: (translations) => ` -pfm_name -TestPolicy -pfm_description -${translations?.['testModule']?.['test.desc'] || 'Default Desc'} -`, - renderJsonValue: () => null - }; - test('should render profile manifest with correct XML structure', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - assert_1.default.ok(result.includes('')); - }); - test('should include app name', () => { - const result = (0, render_js_1.renderProfileManifest)('My App', 'com.example.myapp', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('My App Managed Settings')); - assert_1.default.ok(result.includes('My App')); - }); - test('should include bundle identifier', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('com.microsoft.vscode')); - }); - test('should include required payload fields', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('PayloadDescription')); - assert_1.default.ok(result.includes('PayloadDisplayName')); - assert_1.default.ok(result.includes('PayloadIdentifier')); - assert_1.default.ok(result.includes('PayloadType')); - assert_1.default.ok(result.includes('PayloadUUID')); - assert_1.default.ok(result.includes('PayloadVersion')); - assert_1.default.ok(result.includes('PayloadOrganization')); - }); - test('should include policy manifests in subkeys', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('pfm_subkeys')); - assert_1.default.ok(result.includes('TestPolicy')); - assert_1.default.ok(result.includes('Default Desc')); - }); - test('should pass translations to policy manifests', () => { - const translations = { - 'testModule': { - 'test.desc': 'Translated Description' - } - }; - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy], translations); - assert_1.default.ok(result.includes('Translated Description')); - }); - test('should include VS Code specific URLs', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('https://code.visualstudio.com/')); - assert_1.default.ok(result.includes('https://code.visualstudio.com/docs/setup/enterprise')); - }); - test('should include last modified date', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('pfm_last_modified')); - assert_1.default.ok(result.includes('')); - }); - test('should mark manifest as unique', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('pfm_unique')); - assert_1.default.ok(result.includes('')); - }); - test('should handle multiple policies', () => { - const policy2 = { - ...mockPolicy, - name: 'TestPolicy2', - renderProfileManifest: () => ` -pfm_name -TestPolicy2 -` - }; - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy, policy2]); - assert_1.default.ok(result.includes('TestPolicy')); - assert_1.default.ok(result.includes('TestPolicy2')); - }); - test('should set format version to 1', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('pfm_format_version')); - assert_1.default.ok(result.includes('1')); - }); - test('should set interaction to combined', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('pfm_interaction')); - assert_1.default.ok(result.includes('combined')); - }); - test('should set platform to macOS', () => { - const result = (0, render_js_1.renderProfileManifest)('VS Code', 'com.microsoft.vscode', ['1.0'], [mockCategory], [mockPolicy]); - assert_1.default.ok(result.includes('pfm_platforms')); - assert_1.default.ok(result.includes('macOS')); - }); - }); - suite('renderMacOSPolicy', () => { - const mockCategory = { - moduleName: 'testModule', - name: { value: 'Test Category', nlsKey: 'test.category' } - }; - const mockPolicy = { - name: 'TestPolicy', - type: types_js_1.PolicyType.Boolean, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => ['TestPolicy', ''], - renderProfileManifest: (translations) => ` -pfm_name -TestPolicy -pfm_description -${translations?.['testModule']?.['test.desc'] || 'Default Desc'} -`, - renderJsonValue: () => null - }; - test('should render complete macOS policy profile', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy], []); - const expected = ` - - - - PayloadContent - - - PayloadDisplayName - VS Code - PayloadIdentifier - com.microsoft.vscode.uuid - PayloadType - com.microsoft.vscode - PayloadUUID - uuid - PayloadVersion - 1 - TestPolicy - - - - PayloadDescription - This profile manages VS Code. For more information see https://code.visualstudio.com/docs/setup/enterprise - PayloadDisplayName - VS Code - PayloadIdentifier - com.microsoft.vscode - PayloadOrganization - Microsoft - PayloadType - Configuration - PayloadUUID - payload-uuid - PayloadVersion - 1 - TargetDeviceType - 5 - -`; - assert_1.default.strictEqual(result.profile, expected); - }); - test('should include en-us manifest by default', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy], []); - assert_1.default.strictEqual(result.manifests.length, 1); - assert_1.default.strictEqual(result.manifests[0].languageId, 'en-us'); - assert_1.default.ok(result.manifests[0].contents.includes('VS Code Managed Settings')); - }); - test('should include translations', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const translations = [ - { languageId: 'fr-fr', languageTranslations: { 'testModule': { 'test.desc': 'Description Française' } } }, - { languageId: 'de-de', languageTranslations: { 'testModule': { 'test.desc': 'Deutsche Beschreibung' } } } - ]; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy], translations); - assert_1.default.strictEqual(result.manifests.length, 3); // en-us + 2 translations - assert_1.default.strictEqual(result.manifests[0].languageId, 'en-us'); - assert_1.default.strictEqual(result.manifests[1].languageId, 'fr-fr'); - assert_1.default.strictEqual(result.manifests[2].languageId, 'de-de'); - assert_1.default.ok(result.manifests[1].contents.includes('Description Française')); - assert_1.default.ok(result.manifests[2].contents.includes('Deutsche Beschreibung')); - }); - test('should handle multiple policies with correct indentation', () => { - const policy2 = { - ...mockPolicy, - name: 'TestPolicy2', - renderProfile: () => ['TestPolicy2', 'test value'] - }; - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy, policy2], []); - assert_1.default.ok(result.profile.includes('TestPolicy')); - assert_1.default.ok(result.profile.includes('')); - assert_1.default.ok(result.profile.includes('TestPolicy2')); - assert_1.default.ok(result.profile.includes('test value')); - }); - test('should use provided UUIDs in profile', () => { - const product = { - nameLong: 'My App', - darwinBundleIdentifier: 'com.example.app', - darwinProfilePayloadUUID: 'custom-payload-uuid', - darwinProfileUUID: 'custom-uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy], []); - assert_1.default.ok(result.profile.includes('custom-payload-uuid')); - assert_1.default.ok(result.profile.includes('custom-uuid')); - assert_1.default.ok(result.profile.includes('com.example.app.custom-uuid')); - }); - test('should include enterprise documentation link', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy], []); - assert_1.default.ok(result.profile.includes('https://code.visualstudio.com/docs/setup/enterprise')); - }); - test('should set TargetDeviceType to 5', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderMacOSPolicy)(product, [mockPolicy], []); - assert_1.default.ok(result.profile.includes('TargetDeviceType')); - assert_1.default.ok(result.profile.includes('5')); - }); - }); - suite('renderGP', () => { - const mockCategory = { - moduleName: 'testModule', - name: { value: 'Test Category', nlsKey: 'test.category' } - }; - const mockPolicy = { - name: 'TestPolicy', - type: types_js_1.PolicyType.Boolean, - category: mockCategory, - minimumVersion: '1.85', - renderADMX: (regKey) => [ - ``, - ` `, - `` - ], - renderADMLStrings: (translations) => [ - `${translations?.['testModule']?.['test.policy'] || 'Test Policy'}` - ], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => null - }; - test('should render complete GP with ADMX and ADML', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.admx); - assert_1.default.ok(result.adml); - assert_1.default.ok(Array.isArray(result.adml)); - }); - test('should include regKey in ADMX', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'CustomRegKey' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.admx.includes('CustomRegKey')); - assert_1.default.ok(result.admx.includes('Software\\Policies\\Microsoft\\CustomRegKey')); - }); - test('should include en-us ADML by default', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.strictEqual(result.adml.length, 1); - assert_1.default.strictEqual(result.adml[0].languageId, 'en-us'); - assert_1.default.ok(result.adml[0].contents.includes('VS Code')); - }); - test('should include translations in ADML', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const translations = [ - { languageId: 'fr-fr', languageTranslations: { 'testModule': { 'test.policy': 'Politique de test' } } }, - { languageId: 'de-de', languageTranslations: { 'testModule': { 'test.policy': 'Testrichtlinie' } } } - ]; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], translations); - assert_1.default.strictEqual(result.adml.length, 3); // en-us + 2 translations - assert_1.default.strictEqual(result.adml[0].languageId, 'en-us'); - assert_1.default.strictEqual(result.adml[1].languageId, 'fr-fr'); - assert_1.default.strictEqual(result.adml[2].languageId, 'de-de'); - assert_1.default.ok(result.adml[1].contents.includes('Politique de test')); - assert_1.default.ok(result.adml[2].contents.includes('Testrichtlinie')); - }); - test('should pass versions to ADMX', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.admx.includes('Supported_1_85')); - }); - test('should pass versions to ADML', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.adml[0].contents.includes('VS Code >= 1.85')); - }); - test('should pass categories to ADMX', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.admx.includes('test.category')); - }); - test('should pass categories to ADML', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.adml[0].contents.includes('Category_test_category')); - }); - test('should handle multiple policies', () => { - const policy2 = { - ...mockPolicy, - name: 'TestPolicy2', - renderADMX: (regKey) => [ - ``, - ` `, - `` - ], - renderADMLStrings: () => ['Test Policy 2'] - }; - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy, policy2], []); - assert_1.default.ok(result.admx.includes('TestPolicy')); - assert_1.default.ok(result.admx.includes('TestPolicy2')); - assert_1.default.ok(result.adml[0].contents.includes('TestPolicy')); - assert_1.default.ok(result.adml[0].contents.includes('TestPolicy2')); - }); - test('should include app name in ADML', () => { - const product = { - nameLong: 'My Custom App', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok(result.adml[0].contents.includes('My Custom App')); - }); - test('should return structured result with admx and adml properties', () => { - const product = { - nameLong: 'VS Code', - darwinBundleIdentifier: 'com.microsoft.vscode', - darwinProfilePayloadUUID: 'payload-uuid', - darwinProfileUUID: 'uuid', - win32RegValueName: 'VSCode' - }; - const result = (0, render_js_1.renderGP)(product, [mockPolicy], []); - assert_1.default.ok('admx' in result); - assert_1.default.ok('adml' in result); - assert_1.default.strictEqual(typeof result.admx, 'string'); - assert_1.default.ok(Array.isArray(result.adml)); - }); - }); - suite('renderJsonPolicies', () => { - const mockCategory = { - moduleName: 'testModule', - name: { value: 'Test Category', nlsKey: 'test.category' } - }; - test('should render boolean policy JSON value', () => { - const booleanPolicy = { - name: 'BooleanPolicy', - type: types_js_1.PolicyType.Boolean, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => false - }; - const result = (0, render_js_1.renderJsonPolicies)([booleanPolicy]); - assert_1.default.deepStrictEqual(result, { BooleanPolicy: false }); - }); - test('should render number policy JSON value', () => { - const numberPolicy = { - name: 'NumberPolicy', - type: types_js_1.PolicyType.Number, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => 42 - }; - const result = (0, render_js_1.renderJsonPolicies)([numberPolicy]); - assert_1.default.deepStrictEqual(result, { NumberPolicy: 42 }); - }); - test('should render string policy JSON value', () => { - const stringPolicy = { - name: 'StringPolicy', - type: types_js_1.PolicyType.String, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => '' - }; - const result = (0, render_js_1.renderJsonPolicies)([stringPolicy]); - assert_1.default.deepStrictEqual(result, { StringPolicy: '' }); - }); - test('should render string enum policy JSON value', () => { - const stringEnumPolicy = { - name: 'StringEnumPolicy', - type: types_js_1.PolicyType.StringEnum, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => 'auto' - }; - const result = (0, render_js_1.renderJsonPolicies)([stringEnumPolicy]); - assert_1.default.deepStrictEqual(result, { StringEnumPolicy: 'auto' }); - }); - test('should render object policy JSON value', () => { - const objectPolicy = { - name: 'ObjectPolicy', - type: types_js_1.PolicyType.Object, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => '' - }; - const result = (0, render_js_1.renderJsonPolicies)([objectPolicy]); - assert_1.default.deepStrictEqual(result, { ObjectPolicy: '' }); - }); - test('should render multiple policies', () => { - const booleanPolicy = { - name: 'BooleanPolicy', - type: types_js_1.PolicyType.Boolean, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => true - }; - const numberPolicy = { - name: 'NumberPolicy', - type: types_js_1.PolicyType.Number, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => 100 - }; - const stringPolicy = { - name: 'StringPolicy', - type: types_js_1.PolicyType.String, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => 'test-value' - }; - const result = (0, render_js_1.renderJsonPolicies)([booleanPolicy, numberPolicy, stringPolicy]); - assert_1.default.deepStrictEqual(result, { - BooleanPolicy: true, - NumberPolicy: 100, - StringPolicy: 'test-value' - }); - }); - test('should handle empty policies array', () => { - const result = (0, render_js_1.renderJsonPolicies)([]); - assert_1.default.deepStrictEqual(result, {}); - }); - test('should handle null JSON value', () => { - const nullPolicy = { - name: 'NullPolicy', - type: types_js_1.PolicyType.String, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => null - }; - const result = (0, render_js_1.renderJsonPolicies)([nullPolicy]); - assert_1.default.deepStrictEqual(result, { NullPolicy: null }); - }); - test('should handle object JSON value', () => { - const objectPolicy = { - name: 'ComplexObjectPolicy', - type: types_js_1.PolicyType.Object, - category: mockCategory, - minimumVersion: '1.0', - renderADMX: () => [], - renderADMLStrings: () => [], - renderADMLPresentation: () => '', - renderProfile: () => [], - renderProfileManifest: () => '', - renderJsonValue: () => ({ nested: { value: 123 } }) - }; - const result = (0, render_js_1.renderJsonPolicies)([objectPolicy]); - assert_1.default.deepStrictEqual(result, { ComplexObjectPolicy: { nested: { value: 123 } } }); - }); - }); -}); -//# sourceMappingURL=render.test.js.map \ No newline at end of file diff --git a/build/lib/test/render.test.ts b/build/lib/test/render.test.ts index 325831247c4..130bbc78132 100644 --- a/build/lib/test/render.test.ts +++ b/build/lib/test/render.test.ts @@ -4,8 +4,8 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import { renderADMLString, renderProfileString, renderADMX, renderADML, renderProfileManifest, renderMacOSPolicy, renderGP, renderJsonPolicies } from '../policies/render.js'; -import { NlsString, LanguageTranslations, Category, Policy, PolicyType } from '../policies/types.js'; +import { renderADMLString, renderProfileString, renderADMX, renderADML, renderProfileManifest, renderMacOSPolicy, renderGP, renderJsonPolicies } from '../policies/render.ts'; +import { type NlsString, type LanguageTranslations, type Category, type Policy, PolicyType } from '../policies/types.ts'; suite('Render Functions', () => { diff --git a/build/lib/test/stringEnumPolicy.test.js b/build/lib/test/stringEnumPolicy.test.js deleted file mode 100644 index d1700730544..00000000000 --- a/build/lib/test/stringEnumPolicy.test.js +++ /dev/null @@ -1,142 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const stringEnumPolicy_js_1 = require("../policies/stringEnumPolicy.js"); -const types_js_1 = require("../policies/types.js"); -suite('StringEnumPolicy', () => { - const mockCategory = { - key: 'test.category', - name: { value: 'Category1', key: 'test.category' }, - }; - const mockPolicy = { - key: 'test.stringenum.policy', - name: 'TestStringEnumPolicy', - category: 'Category1', - minimumVersion: '1.0', - type: 'string', - localization: { - description: { key: 'test.policy.description', value: 'Test policy description' }, - enumDescriptions: [ - { key: 'test.option.one', value: 'Option One' }, - { key: 'test.option.two', value: 'Option Two' }, - { key: 'test.option.three', value: 'Option Three' } - ] - }, - enum: ['auto', 'manual', 'disabled'] - }; - test('should create StringEnumPolicy from factory method', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - assert_1.default.strictEqual(policy.name, 'TestStringEnumPolicy'); - assert_1.default.strictEqual(policy.minimumVersion, '1.0'); - assert_1.default.strictEqual(policy.category.name.nlsKey, mockCategory.name.key); - assert_1.default.strictEqual(policy.category.name.value, mockCategory.name.value); - assert_1.default.strictEqual(policy.type, types_js_1.PolicyType.StringEnum); - }); - test('should render ADMX elements correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admx = policy.renderADMX('TestKey'); - assert_1.default.deepStrictEqual(admx, [ - '', - '\t', - '\t', - '\t', - '', - '\tauto', - '\tmanual', - '\tdisabled', - '', - '\t', - '' - ]); - }); - test('should render ADML strings correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admlStrings = policy.renderADMLStrings(); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestStringEnumPolicy', - 'Test policy description', - 'Option One', - 'Option Two', - 'Option Three' - ]); - }); - test('should render ADML strings with translations', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated description', - 'test.option.one': 'Translated Option One', - 'test.option.two': 'Translated Option Two' - } - }; - const admlStrings = policy.renderADMLStrings(translations); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestStringEnumPolicy', - 'Translated description', - 'Translated Option One', - 'Translated Option Two', - 'Option Three' - ]); - }); - test('should render ADML presentation correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const presentation = policy.renderADMLPresentation(); - assert_1.default.strictEqual(presentation, ''); - }); - test('should render JSON value correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const jsonValue = policy.renderJsonValue(); - assert_1.default.strictEqual(jsonValue, 'auto'); - }); - test('should render profile value correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profileValue = policy.renderProfileValue(); - assert_1.default.strictEqual(profileValue, 'auto'); - }); - test('should render profile correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profile = policy.renderProfile(); - assert_1.default.strictEqual(profile.length, 2); - assert_1.default.strictEqual(profile[0], 'TestStringEnumPolicy'); - assert_1.default.strictEqual(profile[1], 'auto'); - }); - test('should render profile manifest value correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifestValue = policy.renderProfileManifestValue(); - assert_1.default.strictEqual(manifestValue, 'pfm_default\nauto\npfm_description\nTest policy description\npfm_name\nTestStringEnumPolicy\npfm_title\nTestStringEnumPolicy\npfm_type\nstring\npfm_range_list\n\n\tauto\n\tmanual\n\tdisabled\n'); - }); - test('should render profile manifest value with translations', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated manifest description' - } - }; - const manifestValue = policy.renderProfileManifestValue(translations); - assert_1.default.strictEqual(manifestValue, 'pfm_default\nauto\npfm_description\nTranslated manifest description\npfm_name\nTestStringEnumPolicy\npfm_title\nTestStringEnumPolicy\npfm_type\nstring\npfm_range_list\n\n\tauto\n\tmanual\n\tdisabled\n'); - }); - test('should render profile manifest correctly', () => { - const policy = stringEnumPolicy_js_1.StringEnumPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifest = policy.renderProfileManifest(); - assert_1.default.strictEqual(manifest, '\npfm_default\nauto\npfm_description\nTest policy description\npfm_name\nTestStringEnumPolicy\npfm_title\nTestStringEnumPolicy\npfm_type\nstring\npfm_range_list\n\n\tauto\n\tmanual\n\tdisabled\n\n'); - }); -}); -//# sourceMappingURL=stringEnumPolicy.test.js.map \ No newline at end of file diff --git a/build/lib/test/stringEnumPolicy.test.ts b/build/lib/test/stringEnumPolicy.test.ts index 3ee3856afd7..db36ce6a316 100644 --- a/build/lib/test/stringEnumPolicy.test.ts +++ b/build/lib/test/stringEnumPolicy.test.ts @@ -4,9 +4,9 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import { StringEnumPolicy } from '../policies/stringEnumPolicy.js'; -import { LanguageTranslations, PolicyType } from '../policies/types.js'; -import { CategoryDto, PolicyDto } from '../policies/policyDto.js'; +import { StringEnumPolicy } from '../policies/stringEnumPolicy.ts'; +import { PolicyType, type LanguageTranslations } from '../policies/types.ts'; +import type { CategoryDto, PolicyDto } from '../policies/policyDto.ts'; suite('StringEnumPolicy', () => { const mockCategory: CategoryDto = { diff --git a/build/lib/test/stringPolicy.test.js b/build/lib/test/stringPolicy.test.js deleted file mode 100644 index 6919da78f88..00000000000 --- a/build/lib/test/stringPolicy.test.js +++ /dev/null @@ -1,125 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __importDefault(require("assert")); -const stringPolicy_js_1 = require("../policies/stringPolicy.js"); -const types_js_1 = require("../policies/types.js"); -suite('StringPolicy', () => { - const mockCategory = { - key: 'test.category', - name: { value: 'Category1', key: 'test.category' }, - }; - const mockPolicy = { - key: 'test.string.policy', - name: 'TestStringPolicy', - category: 'Category1', - minimumVersion: '1.0', - type: 'string', - default: '', - localization: { - description: { key: 'test.policy.description', value: 'Test string policy description' } - } - }; - test('should create StringPolicy from factory method', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - assert_1.default.strictEqual(policy.name, 'TestStringPolicy'); - assert_1.default.strictEqual(policy.minimumVersion, '1.0'); - assert_1.default.strictEqual(policy.category.name.nlsKey, mockCategory.name.key); - assert_1.default.strictEqual(policy.category.name.value, mockCategory.name.value); - assert_1.default.strictEqual(policy.type, types_js_1.PolicyType.String); - }); - test('should render ADMX elements correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admx = policy.renderADMX('TestKey'); - assert_1.default.deepStrictEqual(admx, [ - '', - '\t', - '\t', - '\t', - '', - '\t', - '' - ]); - }); - test('should render ADML strings correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const admlStrings = policy.renderADMLStrings(); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestStringPolicy', - 'Test string policy description' - ]); - }); - test('should render ADML strings with translations', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated description' - } - }; - const admlStrings = policy.renderADMLStrings(translations); - assert_1.default.deepStrictEqual(admlStrings, [ - 'TestStringPolicy', - 'Translated description' - ]); - }); - test('should render ADML presentation correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const presentation = policy.renderADMLPresentation(); - assert_1.default.strictEqual(presentation, ''); - }); - test('should render JSON value correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const jsonValue = policy.renderJsonValue(); - assert_1.default.strictEqual(jsonValue, ''); - }); - test('should render profile value correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profileValue = policy.renderProfileValue(); - assert_1.default.strictEqual(profileValue, ''); - }); - test('should render profile correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const profile = policy.renderProfile(); - assert_1.default.strictEqual(profile.length, 2); - assert_1.default.strictEqual(profile[0], 'TestStringPolicy'); - assert_1.default.strictEqual(profile[1], ''); - }); - test('should render profile manifest value correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifestValue = policy.renderProfileManifestValue(); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n\npfm_description\nTest string policy description\npfm_name\nTestStringPolicy\npfm_title\nTestStringPolicy\npfm_type\nstring'); - }); - test('should render profile manifest value with translations', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const translations = { - '': { - 'test.policy.description': 'Translated manifest description' - } - }; - const manifestValue = policy.renderProfileManifestValue(translations); - assert_1.default.strictEqual(manifestValue, 'pfm_default\n\npfm_description\nTranslated manifest description\npfm_name\nTestStringPolicy\npfm_title\nTestStringPolicy\npfm_type\nstring'); - }); - test('should render profile manifest correctly', () => { - const policy = stringPolicy_js_1.StringPolicy.from(mockCategory, mockPolicy); - assert_1.default.ok(policy); - const manifest = policy.renderProfileManifest(); - assert_1.default.strictEqual(manifest, '\npfm_default\n\npfm_description\nTest string policy description\npfm_name\nTestStringPolicy\npfm_title\nTestStringPolicy\npfm_type\nstring\n'); - }); -}); -//# sourceMappingURL=stringPolicy.test.js.map \ No newline at end of file diff --git a/build/lib/test/stringPolicy.test.ts b/build/lib/test/stringPolicy.test.ts index a76c38c7dcb..7f69da33869 100644 --- a/build/lib/test/stringPolicy.test.ts +++ b/build/lib/test/stringPolicy.test.ts @@ -4,9 +4,9 @@ *--------------------------------------------------------------------------------------------*/ import assert from 'assert'; -import { StringPolicy } from '../policies/stringPolicy.js'; -import { LanguageTranslations, PolicyType } from '../policies/types.js'; -import { CategoryDto, PolicyDto } from '../policies/policyDto.js'; +import { StringPolicy } from '../policies/stringPolicy.ts'; +import { PolicyType, type LanguageTranslations } from '../policies/types.ts'; +import type { CategoryDto, PolicyDto } from '../policies/policyDto.ts'; suite('StringPolicy', () => { const mockCategory: CategoryDto = { diff --git a/build/lib/treeshaking.js b/build/lib/treeshaking.js deleted file mode 100644 index feca811d9f9..00000000000 --- a/build/lib/treeshaking.js +++ /dev/null @@ -1,778 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.toStringShakeLevel = toStringShakeLevel; -exports.shake = shake; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const typeScriptLanguageServiceHost_1 = require("./typeScriptLanguageServiceHost"); -var ShakeLevel; -(function (ShakeLevel) { - ShakeLevel[ShakeLevel["Files"] = 0] = "Files"; - ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile"; - ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers"; -})(ShakeLevel || (ShakeLevel = {})); -function toStringShakeLevel(shakeLevel) { - switch (shakeLevel) { - case ShakeLevel.Files: - return 'Files (0)'; - case ShakeLevel.InnerFile: - return 'InnerFile (1)'; - case ShakeLevel.ClassMembers: - return 'ClassMembers (2)'; - } -} -function printDiagnostics(options, diagnostics) { - for (const diag of diagnostics) { - let result = ''; - if (diag.file) { - result += `${path_1.default.join(options.sourcesRoot, diag.file.fileName)}`; - } - if (diag.file && diag.start) { - const location = diag.file.getLineAndCharacterOfPosition(diag.start); - result += `:${location.line + 1}:${location.character}`; - } - result += ` - ` + JSON.stringify(diag.messageText); - console.log(result); - } -} -function shake(options) { - const ts = require('typescript'); - const languageService = createTypeScriptLanguageService(ts, options); - const program = languageService.getProgram(); - const globalDiagnostics = program.getGlobalDiagnostics(); - if (globalDiagnostics.length > 0) { - printDiagnostics(options, globalDiagnostics); - throw new Error(`Compilation Errors encountered.`); - } - const syntacticDiagnostics = program.getSyntacticDiagnostics(); - if (syntacticDiagnostics.length > 0) { - printDiagnostics(options, syntacticDiagnostics); - throw new Error(`Compilation Errors encountered.`); - } - const semanticDiagnostics = program.getSemanticDiagnostics(); - if (semanticDiagnostics.length > 0) { - printDiagnostics(options, semanticDiagnostics); - throw new Error(`Compilation Errors encountered.`); - } - markNodes(ts, languageService, options); - return generateResult(ts, languageService, options.shakeLevel); -} -//#region Discovery, LanguageService & Setup -function createTypeScriptLanguageService(ts, options) { - // Discover referenced files - const FILES = new Map(); - // Add entrypoints - options.entryPoints.forEach(entryPoint => { - const filePath = path_1.default.join(options.sourcesRoot, entryPoint); - FILES.set(path_1.default.normalize(filePath), fs_1.default.readFileSync(filePath).toString()); - }); - // Add fake usage files - options.inlineEntryPoints.forEach((inlineEntryPoint, index) => { - FILES.set(path_1.default.normalize(path_1.default.join(options.sourcesRoot, `inlineEntryPoint.${index}.ts`)), inlineEntryPoint); - }); - // Add additional typings - options.typings.forEach((typing) => { - const filePath = path_1.default.join(options.sourcesRoot, typing); - FILES.set(path_1.default.normalize(filePath), fs_1.default.readFileSync(filePath).toString()); - }); - const basePath = path_1.default.join(options.sourcesRoot, '..'); - const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, basePath).options; - const host = new typeScriptLanguageServiceHost_1.TypeScriptLanguageServiceHost(ts, FILES, compilerOptions); - return ts.createLanguageService(host); -} -//#endregion -//#region Tree Shaking -var NodeColor; -(function (NodeColor) { - NodeColor[NodeColor["White"] = 0] = "White"; - NodeColor[NodeColor["Gray"] = 1] = "Gray"; - NodeColor[NodeColor["Black"] = 2] = "Black"; -})(NodeColor || (NodeColor = {})); -function getColor(node) { - return node.$$$color || 0 /* NodeColor.White */; -} -function setColor(node, color) { - node.$$$color = color; -} -function markNeededSourceFile(node) { - node.$$$neededSourceFile = true; -} -function isNeededSourceFile(node) { - return Boolean(node.$$$neededSourceFile); -} -function nodeOrParentIsBlack(node) { - while (node) { - const color = getColor(node); - if (color === 2 /* NodeColor.Black */) { - return true; - } - node = node.parent; - } - return false; -} -function nodeOrChildIsBlack(node) { - if (getColor(node) === 2 /* NodeColor.Black */) { - return true; - } - for (const child of node.getChildren()) { - if (nodeOrChildIsBlack(child)) { - return true; - } - } - return false; -} -function isSymbolWithDeclarations(symbol) { - return !!(symbol && symbol.declarations); -} -function isVariableStatementWithSideEffects(ts, node) { - if (!ts.isVariableStatement(node)) { - return false; - } - let hasSideEffects = false; - const visitNode = (node) => { - if (hasSideEffects) { - // no need to go on - return; - } - if (ts.isCallExpression(node) || ts.isNewExpression(node)) { - // TODO: assuming `createDecorator` and `refineServiceDecorator` calls are side-effect free - const isSideEffectFree = /(createDecorator|refineServiceDecorator)/.test(node.expression.getText()); - if (!isSideEffectFree) { - hasSideEffects = true; - } - } - node.forEachChild(visitNode); - }; - node.forEachChild(visitNode); - return hasSideEffects; -} -function isStaticMemberWithSideEffects(ts, node) { - if (!ts.isPropertyDeclaration(node)) { - return false; - } - if (!node.modifiers) { - return false; - } - if (!node.modifiers.some(mod => mod.kind === ts.SyntaxKind.StaticKeyword)) { - return false; - } - let hasSideEffects = false; - const visitNode = (node) => { - if (hasSideEffects) { - // no need to go on - return; - } - if (ts.isCallExpression(node) || ts.isNewExpression(node)) { - hasSideEffects = true; - } - node.forEachChild(visitNode); - }; - node.forEachChild(visitNode); - return hasSideEffects; -} -function markNodes(ts, languageService, options) { - const program = languageService.getProgram(); - if (!program) { - throw new Error('Could not get program from language service'); - } - if (options.shakeLevel === ShakeLevel.Files) { - // Mark all source files Black - program.getSourceFiles().forEach((sourceFile) => { - setColor(sourceFile, 2 /* NodeColor.Black */); - }); - return; - } - const black_queue = []; - const gray_queue = []; - const export_import_queue = []; - const sourceFilesLoaded = {}; - function enqueueTopLevelModuleStatements(sourceFile) { - sourceFile.forEachChild((node) => { - if (ts.isImportDeclaration(node)) { - if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) { - setColor(node, 2 /* NodeColor.Black */); - enqueueImport(node, node.moduleSpecifier.text); - } - return; - } - if (ts.isExportDeclaration(node)) { - if (!node.exportClause && node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) { - // export * from "foo"; - setColor(node, 2 /* NodeColor.Black */); - enqueueImport(node, node.moduleSpecifier.text); - } - if (node.exportClause && ts.isNamedExports(node.exportClause)) { - for (const exportSpecifier of node.exportClause.elements) { - export_import_queue.push(exportSpecifier); - } - } - return; - } - if (isVariableStatementWithSideEffects(ts, node)) { - enqueue_black(node); - } - if (ts.isExpressionStatement(node) - || ts.isIfStatement(node) - || ts.isIterationStatement(node, true) - || ts.isExportAssignment(node)) { - enqueue_black(node); - } - if (ts.isImportEqualsDeclaration(node)) { - if (/export/.test(node.getFullText(sourceFile))) { - // e.g. "export import Severity = BaseSeverity;" - enqueue_black(node); - } - } - }); - } - /** - * Return the parent of `node` which is an ImportDeclaration - */ - function findParentImportDeclaration(node) { - let _node = node; - do { - if (ts.isImportDeclaration(_node)) { - return _node; - } - _node = _node.parent; - } while (_node); - return null; - } - function enqueue_gray(node) { - if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* NodeColor.Gray */) { - return; - } - setColor(node, 1 /* NodeColor.Gray */); - gray_queue.push(node); - } - function enqueue_black(node) { - const previousColor = getColor(node); - if (previousColor === 2 /* NodeColor.Black */) { - return; - } - if (previousColor === 1 /* NodeColor.Gray */) { - // remove from gray queue - gray_queue.splice(gray_queue.indexOf(node), 1); - setColor(node, 0 /* NodeColor.White */); - // add to black queue - enqueue_black(node); - // move from one queue to the other - // black_queue.push(node); - // setColor(node, NodeColor.Black); - return; - } - if (nodeOrParentIsBlack(node)) { - return; - } - const fileName = node.getSourceFile().fileName; - if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) { - setColor(node, 2 /* NodeColor.Black */); - return; - } - const sourceFile = node.getSourceFile(); - if (!sourceFilesLoaded[sourceFile.fileName]) { - sourceFilesLoaded[sourceFile.fileName] = true; - enqueueTopLevelModuleStatements(sourceFile); - } - if (ts.isSourceFile(node)) { - return; - } - setColor(node, 2 /* NodeColor.Black */); - black_queue.push(node); - if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isPropertyDeclaration(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) { - const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth()); - if (references) { - for (let i = 0, len = references.length; i < len; i++) { - const reference = references[i]; - const referenceSourceFile = program.getSourceFile(reference.fileName); - if (!referenceSourceFile) { - continue; - } - const referenceNode = getTokenAtPosition(ts, referenceSourceFile, reference.textSpan.start, false, false); - if (ts.isMethodDeclaration(referenceNode.parent) - || ts.isPropertyDeclaration(referenceNode.parent) - || ts.isGetAccessor(referenceNode.parent) - || ts.isSetAccessor(referenceNode.parent)) { - enqueue_gray(referenceNode.parent); - } - } - } - } - } - function enqueueFile(filename) { - const sourceFile = program.getSourceFile(filename); - if (!sourceFile) { - console.warn(`Cannot find source file ${filename}`); - return; - } - // This source file should survive even if it is empty - markNeededSourceFile(sourceFile); - enqueue_black(sourceFile); - } - function enqueueImport(node, importText) { - if (options.importIgnorePattern.test(importText)) { - // this import should be ignored - return; - } - const nodeSourceFile = node.getSourceFile(); - let fullPath; - if (/(^\.\/)|(^\.\.\/)/.test(importText)) { - if (importText.endsWith('.js')) { // ESM: code imports require to be relative and to have a '.js' file extension - importText = importText.substr(0, importText.length - 3); - } - fullPath = path_1.default.join(path_1.default.dirname(nodeSourceFile.fileName), importText); - } - else { - fullPath = importText; - } - if (fs_1.default.existsSync(fullPath + '.ts')) { - fullPath = fullPath + '.ts'; - } - else { - fullPath = fullPath + '.js'; - } - enqueueFile(fullPath); - } - options.entryPoints.forEach(moduleId => enqueueFile(path_1.default.join(options.sourcesRoot, moduleId))); - // Add fake usage files - options.inlineEntryPoints.forEach((_, index) => enqueueFile(path_1.default.join(options.sourcesRoot, `inlineEntryPoint.${index}.ts`))); - let step = 0; - const checker = program.getTypeChecker(); - while (black_queue.length > 0 || gray_queue.length > 0) { - ++step; - let node; - if (step % 100 === 0) { - console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`); - } - if (black_queue.length === 0) { - for (let i = 0; i < gray_queue.length; i++) { - const node = gray_queue[i]; - const nodeParent = node.parent; - if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) { - gray_queue.splice(i, 1); - black_queue.push(node); - setColor(node, 2 /* NodeColor.Black */); - i--; - } - } - } - if (black_queue.length > 0) { - node = black_queue.shift(); - } - else { - // only gray nodes remaining... - break; - } - const nodeSourceFile = node.getSourceFile(); - const loop = (node) => { - const symbols = getRealNodeSymbol(ts, checker, node); - for (const { symbol, symbolImportNode } of symbols) { - if (symbolImportNode) { - setColor(symbolImportNode, 2 /* NodeColor.Black */); - const importDeclarationNode = findParentImportDeclaration(symbolImportNode); - if (importDeclarationNode && ts.isStringLiteral(importDeclarationNode.moduleSpecifier)) { - enqueueImport(importDeclarationNode, importDeclarationNode.moduleSpecifier.text); - } - } - if (isSymbolWithDeclarations(symbol) && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) { - for (let i = 0, len = symbol.declarations.length; i < len; i++) { - const declaration = symbol.declarations[i]; - if (ts.isSourceFile(declaration)) { - // Do not enqueue full source files - // (they can be the declaration of a module import) - continue; - } - if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) { - enqueue_black(declaration.name); - for (let j = 0; j < declaration.members.length; j++) { - const member = declaration.members[j]; - const memberName = member.name ? member.name.getText() : null; - if (ts.isConstructorDeclaration(member) - || ts.isConstructSignatureDeclaration(member) - || ts.isIndexSignatureDeclaration(member) - || ts.isCallSignatureDeclaration(member) - || memberName === '[Symbol.iterator]' - || memberName === '[Symbol.toStringTag]' - || memberName === 'toJSON' - || memberName === 'toString' - || memberName === 'dispose' // TODO: keeping all `dispose` methods - || /^_(.*)Brand$/.test(memberName || '') // TODO: keeping all members ending with `Brand`... - ) { - enqueue_black(member); - } - if (isStaticMemberWithSideEffects(ts, member)) { - enqueue_black(member); - } - } - // queue the heritage clauses - if (declaration.heritageClauses) { - for (const heritageClause of declaration.heritageClauses) { - enqueue_black(heritageClause); - } - } - } - else { - enqueue_black(declaration); - } - } - } - } - node.forEachChild(loop); - }; - node.forEachChild(loop); - } - while (export_import_queue.length > 0) { - const node = export_import_queue.shift(); - if (nodeOrParentIsBlack(node)) { - continue; - } - if (!node.symbol) { - continue; - } - const aliased = checker.getAliasedSymbol(node.symbol); - if (aliased.declarations && aliased.declarations.length > 0) { - if (nodeOrParentIsBlack(aliased.declarations[0]) || nodeOrChildIsBlack(aliased.declarations[0])) { - setColor(node, 2 /* NodeColor.Black */); - } - } - } -} -function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) { - for (let i = 0, len = symbol.declarations.length; i < len; i++) { - const declaration = symbol.declarations[i]; - const declarationSourceFile = declaration.getSourceFile(); - if (nodeSourceFile === declarationSourceFile) { - if (declaration.pos <= node.pos && node.end <= declaration.end) { - return true; - } - } - } - return false; -} -function generateResult(ts, languageService, shakeLevel) { - const program = languageService.getProgram(); - if (!program) { - throw new Error('Could not get program from language service'); - } - const result = {}; - const writeFile = (filePath, contents) => { - result[filePath] = contents; - }; - program.getSourceFiles().forEach((sourceFile) => { - const fileName = sourceFile.fileName; - if (/^defaultLib:/.test(fileName)) { - return; - } - const destination = fileName; - if (/\.d\.ts$/.test(fileName)) { - if (nodeOrChildIsBlack(sourceFile)) { - writeFile(destination, sourceFile.text); - } - return; - } - const text = sourceFile.text; - let result = ''; - function keep(node) { - result += text.substring(node.pos, node.end); - } - function write(data) { - result += data; - } - function writeMarkedNodes(node) { - if (getColor(node) === 2 /* NodeColor.Black */) { - return keep(node); - } - // Always keep certain top-level statements - if (ts.isSourceFile(node.parent)) { - if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') { - return keep(node); - } - if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) { - return keep(node); - } - } - // Keep the entire import in import * as X cases - if (ts.isImportDeclaration(node)) { - if (node.importClause && node.importClause.namedBindings) { - if (ts.isNamespaceImport(node.importClause.namedBindings)) { - if (getColor(node.importClause.namedBindings) === 2 /* NodeColor.Black */) { - return keep(node); - } - } - else { - const survivingImports = []; - for (const importNode of node.importClause.namedBindings.elements) { - if (getColor(importNode) === 2 /* NodeColor.Black */) { - survivingImports.push(importNode.getFullText(sourceFile)); - } - } - const leadingTriviaWidth = node.getLeadingTriviaWidth(); - const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth); - if (survivingImports.length > 0) { - if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* NodeColor.Black */) { - return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); - } - return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); - } - else { - if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* NodeColor.Black */) { - return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`); - } - } - } - } - else { - if (node.importClause && getColor(node.importClause) === 2 /* NodeColor.Black */) { - return keep(node); - } - } - } - if (ts.isExportDeclaration(node)) { - if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) { - const survivingExports = []; - for (const exportSpecifier of node.exportClause.elements) { - if (getColor(exportSpecifier) === 2 /* NodeColor.Black */) { - survivingExports.push(exportSpecifier.getFullText(sourceFile)); - } - } - const leadingTriviaWidth = node.getLeadingTriviaWidth(); - const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth); - if (survivingExports.length > 0) { - return write(`${leadingTrivia}export {${survivingExports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); - } - } - } - if (shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) { - let toWrite = node.getFullText(); - for (let i = node.members.length - 1; i >= 0; i--) { - const member = node.members[i]; - if (getColor(member) === 2 /* NodeColor.Black */ || !member.name) { - // keep method - continue; - } - const pos = member.pos - node.pos; - const end = member.end - node.pos; - toWrite = toWrite.substring(0, pos) + toWrite.substring(end); - } - return write(toWrite); - } - if (ts.isFunctionDeclaration(node)) { - // Do not go inside functions if they haven't been marked - return; - } - node.forEachChild(writeMarkedNodes); - } - if (getColor(sourceFile) !== 2 /* NodeColor.Black */) { - if (!nodeOrChildIsBlack(sourceFile)) { - // none of the elements are reachable - if (isNeededSourceFile(sourceFile)) { - // this source file must be written, even if nothing is used from it - // because there is an import somewhere for it. - // However, TS complains with empty files with the error "x" is not a module, - // so we will export a dummy variable - result = 'export const __dummy = 0;'; - } - else { - // don't write this file at all! - return; - } - } - else { - sourceFile.forEachChild(writeMarkedNodes); - result += sourceFile.endOfFileToken.getFullText(sourceFile); - } - } - else { - result = text; - } - writeFile(destination, result); - }); - return result; -} -//#endregion -//#region Utils -function isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration) { - if (!program.isSourceFileDefaultLibrary(declaration.getSourceFile()) && declaration.heritageClauses) { - for (const heritageClause of declaration.heritageClauses) { - for (const type of heritageClause.types) { - const symbol = findSymbolFromHeritageType(ts, checker, type); - if (symbol) { - const decl = symbol.valueDeclaration || (symbol.declarations && symbol.declarations[0]); - if (decl && program.isSourceFileDefaultLibrary(decl.getSourceFile())) { - return true; - } - } - } - } - } - return false; -} -function findSymbolFromHeritageType(ts, checker, type) { - if (ts.isExpressionWithTypeArguments(type)) { - return findSymbolFromHeritageType(ts, checker, type.expression); - } - if (ts.isIdentifier(type)) { - const tmp = getRealNodeSymbol(ts, checker, type); - return (tmp.length > 0 ? tmp[0].symbol : null); - } - if (ts.isPropertyAccessExpression(type)) { - return findSymbolFromHeritageType(ts, checker, type.name); - } - return null; -} -class SymbolImportTuple { - symbol; - symbolImportNode; - constructor(symbol, symbolImportNode) { - this.symbol = symbol; - this.symbolImportNode = symbolImportNode; - } -} -/** - * Returns the node's symbol and the `import` node (if the symbol resolved from a different module) - */ -function getRealNodeSymbol(ts, checker, node) { - // Go to the original declaration for cases: - // - // (1) when the aliased symbol was declared in the location(parent). - // (2) when the aliased symbol is originating from an import. - // - function shouldSkipAlias(node, declaration) { - if (!ts.isShorthandPropertyAssignment(node) && node.kind !== ts.SyntaxKind.Identifier) { - return false; - } - if (node.parent === declaration) { - return true; - } - switch (declaration.kind) { - case ts.SyntaxKind.ImportClause: - case ts.SyntaxKind.ImportEqualsDeclaration: - return true; - case ts.SyntaxKind.ImportSpecifier: - return declaration.parent.kind === ts.SyntaxKind.NamedImports; - default: - return false; - } - } - if (!ts.isShorthandPropertyAssignment(node)) { - if (node.getChildCount() !== 0) { - return []; - } - } - const { parent } = node; - let symbol = (ts.isShorthandPropertyAssignment(node) - ? checker.getShorthandAssignmentValueSymbol(node) - : checker.getSymbolAtLocation(node)); - let importNode = null; - // If this is an alias, and the request came at the declaration location - // get the aliased symbol instead. This allows for goto def on an import e.g. - // import {A, B} from "mod"; - // to jump to the implementation directly. - if (symbol && symbol.flags & ts.SymbolFlags.Alias && symbol.declarations && shouldSkipAlias(node, symbol.declarations[0])) { - const aliased = checker.getAliasedSymbol(symbol); - if (aliased.declarations) { - // We should mark the import as visited - importNode = symbol.declarations[0]; - symbol = aliased; - } - } - if (symbol) { - // Because name in short-hand property assignment has two different meanings: property name and property value, - // using go-to-definition at such position should go to the variable declaration of the property value rather than - // go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition - // is performed at the location of property access, we would like to go to definition of the property in the short-hand - // assignment. This case and others are handled by the following code. - if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) { - symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration); - } - // If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the - // declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern - // and return the property declaration for the referenced property. - // For example: - // import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo" - // - // function bar(onfulfilled: (value: T) => void) { //....} - // interface Test { - // pr/*destination*/op1: number - // } - // bar(({pr/*goto*/op1})=>{}); - if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) && - (node === (parent.propertyName || parent.name))) { - const name = ts.getNameFromPropertyName(node); - const type = checker.getTypeAtLocation(parent.parent); - if (name && type) { - if (type.isUnion()) { - return generateMultipleSymbols(type, name, importNode); - } - else { - const prop = type.getProperty(name); - if (prop) { - symbol = prop; - } - } - } - } - // If the current location we want to find its definition is in an object literal, try to get the contextual type for the - // object literal, lookup the property symbol in the contextual type, and use this for goto-definition. - // For example - // interface Props{ - // /*first*/prop1: number - // prop2: boolean - // } - // function Foo(arg: Props) {} - // Foo( { pr/*1*/op1: 10, prop2: false }) - const element = ts.getContainingObjectLiteralElement(node); - if (element) { - const contextualType = element && checker.getContextualType(element.parent); - if (contextualType) { - const propertySymbols = ts.getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false); - if (propertySymbols) { - symbol = propertySymbols[0]; - } - } - } - } - if (symbol && symbol.declarations) { - return [new SymbolImportTuple(symbol, importNode)]; - } - return []; - function generateMultipleSymbols(type, name, importNode) { - const result = []; - for (const t of type.types) { - const prop = t.getProperty(name); - if (prop && prop.declarations) { - result.push(new SymbolImportTuple(prop, importNode)); - } - } - return result; - } -} -/** Get the token whose text contains the position */ -function getTokenAtPosition(ts, sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) { - let current = sourceFile; - outer: while (true) { - // find the child that contains 'position' - for (const child of current.getChildren()) { - const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true); - if (start > position) { - // If this child begins after position, then all subsequent children will as well. - break; - } - const end = child.getEnd(); - if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) { - current = child; - continue outer; - } - } - return current; - } -} -//#endregion -//# sourceMappingURL=treeshaking.js.map \ No newline at end of file diff --git a/build/lib/treeshaking.ts b/build/lib/treeshaking.ts index 3d1e785e073..463e701f73f 100644 --- a/build/lib/treeshaking.ts +++ b/build/lib/treeshaking.ts @@ -5,14 +5,16 @@ import fs from 'fs'; import path from 'path'; -import type * as ts from 'typescript'; -import { IFileMap, TypeScriptLanguageServiceHost } from './typeScriptLanguageServiceHost'; +import * as ts from 'typescript'; +import { type IFileMap, TypeScriptLanguageServiceHost } from './typeScriptLanguageServiceHost.ts'; -enum ShakeLevel { - Files = 0, - InnerFile = 1, - ClassMembers = 2 -} +const ShakeLevel = Object.freeze({ + Files: 0, + InnerFile: 1, + ClassMembers: 2 +}); + +type ShakeLevel = typeof ShakeLevel[keyof typeof ShakeLevel]; export function toStringShakeLevel(shakeLevel: ShakeLevel): string { switch (shakeLevel) { @@ -77,7 +79,6 @@ function printDiagnostics(options: ITreeShakingOptions, diagnostics: ReadonlyArr } export function shake(options: ITreeShakingOptions): ITreeShakingResult { - const ts = require('typescript') as typeof import('typescript'); const languageService = createTypeScriptLanguageService(ts, options); const program = languageService.getProgram()!; @@ -136,11 +137,12 @@ function createTypeScriptLanguageService(ts: typeof import('typescript'), option //#region Tree Shaking -const enum NodeColor { - White = 0, - Gray = 1, - Black = 2 -} +const NodeColor = Object.freeze({ + White: 0, + Gray: 1, + Black: 2 +}); +type NodeColor = typeof NodeColor[keyof typeof NodeColor]; type ObjectLiteralElementWithName = ts.ObjectLiteralElement & { name: ts.PropertyName; parent: ts.ObjectLiteralExpression | ts.JsxAttributes }; @@ -755,10 +757,16 @@ function findSymbolFromHeritageType(ts: typeof import('typescript'), checker: ts } class SymbolImportTuple { + public readonly symbol: ts.Symbol | null; + public readonly symbolImportNode: ts.Declaration | null; + constructor( - public readonly symbol: ts.Symbol | null, - public readonly symbolImportNode: ts.Declaration | null - ) { } + symbol: ts.Symbol | null, + symbolImportNode: ts.Declaration | null + ) { + this.symbol = symbol; + this.symbolImportNode = symbolImportNode; + } } /** diff --git a/build/lib/tsb/builder.js b/build/lib/tsb/builder.js deleted file mode 100644 index eb8e7bca1b3..00000000000 --- a/build/lib/tsb/builder.js +++ /dev/null @@ -1,664 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.CancellationToken = void 0; -exports.createTypeScriptBuilder = createTypeScriptBuilder; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const crypto_1 = __importDefault(require("crypto")); -const utils = __importStar(require("./utils")); -const ansi_colors_1 = __importDefault(require("ansi-colors")); -const typescript_1 = __importDefault(require("typescript")); -const vinyl_1 = __importDefault(require("vinyl")); -const source_map_1 = require("source-map"); -var CancellationToken; -(function (CancellationToken) { - CancellationToken.None = { - isCancellationRequested() { return false; } - }; -})(CancellationToken || (exports.CancellationToken = CancellationToken = {})); -function normalize(path) { - return path.replace(/\\/g, '/'); -} -function createTypeScriptBuilder(config, projectFile, cmd) { - const _log = config.logFn; - const host = new LanguageServiceHost(cmd, projectFile, _log); - const outHost = new LanguageServiceHost({ ...cmd, options: { ...cmd.options, sourceRoot: cmd.options.outDir } }, cmd.options.outDir ?? '', _log); - const toBeCheckedForCycles = []; - const service = typescript_1.default.createLanguageService(host, typescript_1.default.createDocumentRegistry()); - const lastBuildVersion = Object.create(null); - const lastDtsHash = Object.create(null); - const userWantsDeclarations = cmd.options.declaration; - let oldErrors = Object.create(null); - let headUsed = process.memoryUsage().heapUsed; - let emitSourceMapsInStream = true; - // always emit declaraction files - host.getCompilationSettings().declaration = true; - function file(file) { - // support gulp-sourcemaps - if (file.sourceMap) { - emitSourceMapsInStream = false; - } - if (!file.contents) { - host.removeScriptSnapshot(file.path); - delete lastBuildVersion[normalize(file.path)]; - } - else { - host.addScriptSnapshot(file.path, new VinylScriptSnapshot(file)); - } - } - function baseFor(snapshot) { - if (snapshot instanceof VinylScriptSnapshot) { - return cmd.options.outDir || snapshot.getBase(); - } - else { - return ''; - } - } - function isExternalModule(sourceFile) { - return !!sourceFile.externalModuleIndicator - || /declare\s+module\s+('|")(.+)\1/.test(sourceFile.getText()); - } - function build(out, onError, token = CancellationToken.None) { - function checkSyntaxSoon(fileName) { - return new Promise(resolve => { - process.nextTick(function () { - if (!host.getScriptSnapshot(fileName, false)) { - resolve([]); // no script, no problems - } - else { - resolve(service.getSyntacticDiagnostics(fileName)); - } - }); - }); - } - function checkSemanticsSoon(fileName) { - return new Promise(resolve => { - process.nextTick(function () { - if (!host.getScriptSnapshot(fileName, false)) { - resolve([]); // no script, no problems - } - else { - resolve(service.getSemanticDiagnostics(fileName)); - } - }); - }); - } - function emitSoon(fileName) { - return new Promise(resolve => { - process.nextTick(function () { - if (/\.d\.ts$/.test(fileName)) { - // if it's already a d.ts file just emit it signature - const snapshot = host.getScriptSnapshot(fileName); - const signature = crypto_1.default.createHash('sha256') - .update(snapshot.getText(0, snapshot.getLength())) - .digest('base64'); - return resolve({ - fileName, - signature, - files: [] - }); - } - const output = service.getEmitOutput(fileName); - const files = []; - let signature; - for (const file of output.outputFiles) { - if (!emitSourceMapsInStream && /\.js\.map$/.test(file.name)) { - continue; - } - if (/\.d\.ts$/.test(file.name)) { - signature = crypto_1.default.createHash('sha256') - .update(file.text) - .digest('base64'); - if (!userWantsDeclarations) { - // don't leak .d.ts files if users don't want them - continue; - } - } - const vinyl = new vinyl_1.default({ - path: file.name, - contents: Buffer.from(file.text), - base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined - }); - if (!emitSourceMapsInStream && /\.js$/.test(file.name)) { - const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0]; - if (sourcemapFile) { - const extname = path_1.default.extname(vinyl.relative); - const basename = path_1.default.basename(vinyl.relative, extname); - const dirname = path_1.default.dirname(vinyl.relative); - const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts'; - let sourceMap = JSON.parse(sourcemapFile.text); - sourceMap.sources[0] = tsname.replace(/\\/g, '/'); - // check for an "input source" map and combine them - // in step 1 we extract all line edit from the input source map, and - // in step 2 we apply the line edits to the typescript source map - const snapshot = host.getScriptSnapshot(fileName); - if (snapshot instanceof VinylScriptSnapshot && snapshot.sourceMap) { - const inputSMC = new source_map_1.SourceMapConsumer(snapshot.sourceMap); - const tsSMC = new source_map_1.SourceMapConsumer(sourceMap); - let didChange = false; - const smg = new source_map_1.SourceMapGenerator({ - file: sourceMap.file, - sourceRoot: sourceMap.sourceRoot - }); - // step 1 - const lineEdits = new Map(); - inputSMC.eachMapping(m => { - if (m.originalLine === m.generatedLine) { - // same line mapping - let array = lineEdits.get(m.originalLine); - if (!array) { - array = []; - lineEdits.set(m.originalLine, array); - } - array.push([m.originalColumn, m.generatedColumn]); - } - else { - // NOT SUPPORTED - } - }); - // step 2 - tsSMC.eachMapping(m => { - didChange = true; - const edits = lineEdits.get(m.originalLine); - let originalColumnDelta = 0; - if (edits) { - for (const [from, to] of edits) { - if (to >= m.originalColumn) { - break; - } - originalColumnDelta = from - to; - } - } - smg.addMapping({ - source: m.source, - name: m.name, - generated: { line: m.generatedLine, column: m.generatedColumn }, - original: { line: m.originalLine, column: m.originalColumn + originalColumnDelta } - }); - }); - if (didChange) { - [tsSMC, inputSMC].forEach((consumer) => { - consumer.sources.forEach((sourceFile) => { - smg._sources.add(sourceFile); - const sourceContent = consumer.sourceContentFor(sourceFile); - if (sourceContent !== null) { - smg.setSourceContent(sourceFile, sourceContent); - } - }); - }); - sourceMap = JSON.parse(smg.toString()); - // const filename = '/Users/jrieken/Code/vscode/src2/' + vinyl.relative + '.map'; - // fs.promises.mkdir(path.dirname(filename), { recursive: true }).then(async () => { - // await fs.promises.writeFile(filename, smg.toString()); - // await fs.promises.writeFile('/Users/jrieken/Code/vscode/src2/' + vinyl.relative, vinyl.contents); - // }); - } - } - vinyl.sourceMap = sourceMap; - } - } - files.push(vinyl); - } - resolve({ - fileName, - signature, - files - }); - }); - }); - } - const newErrors = Object.create(null); - const t1 = Date.now(); - const toBeEmitted = []; - const toBeCheckedSyntactically = []; - const toBeCheckedSemantically = []; - const filesWithChangedSignature = []; - const dependentFiles = []; - const newLastBuildVersion = new Map(); - for (const fileName of host.getScriptFileNames()) { - if (lastBuildVersion[fileName] !== host.getScriptVersion(fileName)) { - toBeEmitted.push(fileName); - toBeCheckedSyntactically.push(fileName); - toBeCheckedSemantically.push(fileName); - } - } - return new Promise(resolve => { - const semanticCheckInfo = new Map(); - const seenAsDependentFile = new Set(); - function workOnNext() { - let promise; - // let fileName: string; - // someone told us to stop this - if (token.isCancellationRequested()) { - _log('[CANCEL]', '>>This compile run was cancelled<<'); - newLastBuildVersion.clear(); - resolve(); - return; - } - // (1st) emit code - else if (toBeEmitted.length) { - const fileName = toBeEmitted.pop(); - promise = emitSoon(fileName).then(value => { - for (const file of value.files) { - _log('[emit code]', file.path); - out(file); - } - // remember when this was build - newLastBuildVersion.set(fileName, host.getScriptVersion(fileName)); - // remeber the signature - if (value.signature && lastDtsHash[fileName] !== value.signature) { - lastDtsHash[fileName] = value.signature; - filesWithChangedSignature.push(fileName); - } - // line up for cycle check - const jsValue = value.files.find(candidate => candidate.basename.endsWith('.js')); - if (jsValue) { - outHost.addScriptSnapshot(jsValue.path, new ScriptSnapshot(String(jsValue.contents), new Date())); - toBeCheckedForCycles.push(normalize(jsValue.path)); - } - }).catch(e => { - // can't just skip this or make a result up.. - host.error(`ERROR emitting ${fileName}`); - host.error(e); - }); - } - // (2nd) check syntax - else if (toBeCheckedSyntactically.length) { - const fileName = toBeCheckedSyntactically.pop(); - _log('[check syntax]', fileName); - promise = checkSyntaxSoon(fileName).then(diagnostics => { - delete oldErrors[fileName]; - if (diagnostics.length > 0) { - diagnostics.forEach(d => onError(d)); - newErrors[fileName] = diagnostics; - // stop the world when there are syntax errors - toBeCheckedSyntactically.length = 0; - toBeCheckedSemantically.length = 0; - filesWithChangedSignature.length = 0; - } - }); - } - // (3rd) check semantics - else if (toBeCheckedSemantically.length) { - let fileName = toBeCheckedSemantically.pop(); - while (fileName && semanticCheckInfo.has(fileName)) { - fileName = toBeCheckedSemantically.pop(); - } - if (fileName) { - _log('[check semantics]', fileName); - promise = checkSemanticsSoon(fileName).then(diagnostics => { - delete oldErrors[fileName]; - semanticCheckInfo.set(fileName, diagnostics.length); - if (diagnostics.length > 0) { - diagnostics.forEach(d => onError(d)); - newErrors[fileName] = diagnostics; - } - }); - } - } - // (4th) check dependents - else if (filesWithChangedSignature.length) { - while (filesWithChangedSignature.length) { - const fileName = filesWithChangedSignature.pop(); - if (!isExternalModule(service.getProgram().getSourceFile(fileName))) { - _log('[check semantics*]', fileName + ' is an internal module and it has changed shape -> check whatever hasn\'t been checked yet'); - toBeCheckedSemantically.push(...host.getScriptFileNames()); - filesWithChangedSignature.length = 0; - dependentFiles.length = 0; - break; - } - host.collectDependents(fileName, dependentFiles); - } - } - // (5th) dependents contd - else if (dependentFiles.length) { - let fileName = dependentFiles.pop(); - while (fileName && seenAsDependentFile.has(fileName)) { - fileName = dependentFiles.pop(); - } - if (fileName) { - seenAsDependentFile.add(fileName); - const value = semanticCheckInfo.get(fileName); - if (value === 0) { - // already validated successfully -> look at dependents next - host.collectDependents(fileName, dependentFiles); - } - else if (typeof value === 'undefined') { - // first validate -> look at dependents next - dependentFiles.push(fileName); - toBeCheckedSemantically.push(fileName); - } - } - } - // (last) done - else { - resolve(); - return; - } - if (!promise) { - promise = Promise.resolve(); - } - promise.then(function () { - // change to change - process.nextTick(workOnNext); - }).catch(err => { - console.error(err); - }); - } - workOnNext(); - }).then(() => { - // check for cyclic dependencies - const cycles = outHost.getCyclicDependencies(toBeCheckedForCycles); - toBeCheckedForCycles.length = 0; - for (const [filename, error] of cycles) { - const cyclicDepErrors = []; - if (error) { - cyclicDepErrors.push({ - category: typescript_1.default.DiagnosticCategory.Error, - code: 1, - file: undefined, - start: undefined, - length: undefined, - messageText: `CYCLIC dependency: ${error}` - }); - } - delete oldErrors[filename]; - newErrors[filename] = cyclicDepErrors; - cyclicDepErrors.forEach(d => onError(d)); - } - }).then(() => { - // store the build versions to not rebuilt the next time - newLastBuildVersion.forEach((value, key) => { - lastBuildVersion[key] = value; - }); - // print old errors and keep them - for (const [key, value] of Object.entries(oldErrors)) { - value.forEach(diag => onError(diag)); - newErrors[key] = value; - } - oldErrors = newErrors; - // print stats - const headNow = process.memoryUsage().heapUsed; - const MB = 1024 * 1024; - _log('[tsb]', `time: ${ansi_colors_1.default.yellow((Date.now() - t1) + 'ms')} + \nmem: ${ansi_colors_1.default.cyan(Math.ceil(headNow / MB) + 'MB')} ${ansi_colors_1.default.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`); - headUsed = headNow; - }); - } - return { - file, - build, - languageService: service - }; -} -class ScriptSnapshot { - _text; - _mtime; - constructor(text, mtime) { - this._text = text; - this._mtime = mtime; - } - getVersion() { - return this._mtime.toUTCString(); - } - getText(start, end) { - return this._text.substring(start, end); - } - getLength() { - return this._text.length; - } - getChangeRange(_oldSnapshot) { - return undefined; - } -} -class VinylScriptSnapshot extends ScriptSnapshot { - _base; - sourceMap; - constructor(file) { - super(file.contents.toString(), file.stat.mtime); - this._base = file.base; - this.sourceMap = file.sourceMap; - } - getBase() { - return this._base; - } -} -class LanguageServiceHost { - _cmdLine; - _projectPath; - _log; - _snapshots; - _filesInProject; - _filesAdded; - _dependencies; - _dependenciesRecomputeList; - _fileNameToDeclaredModule; - _projectVersion; - constructor(_cmdLine, _projectPath, _log) { - this._cmdLine = _cmdLine; - this._projectPath = _projectPath; - this._log = _log; - this._snapshots = Object.create(null); - this._filesInProject = new Set(_cmdLine.fileNames); - this._filesAdded = new Set(); - this._dependencies = new utils.graph.Graph(); - this._dependenciesRecomputeList = []; - this._fileNameToDeclaredModule = Object.create(null); - this._projectVersion = 1; - } - log(_s) { - // console.log(s); - } - trace(_s) { - // console.log(s); - } - error(s) { - console.error(s); - } - getCompilationSettings() { - return this._cmdLine.options; - } - getProjectVersion() { - return String(this._projectVersion); - } - getScriptFileNames() { - const res = Object.keys(this._snapshots).filter(path => this._filesInProject.has(path) || this._filesAdded.has(path)); - return res; - } - getScriptVersion(filename) { - filename = normalize(filename); - const result = this._snapshots[filename]; - if (result) { - return result.getVersion(); - } - return 'UNKNWON_FILE_' + Math.random().toString(16).slice(2); - } - getScriptSnapshot(filename, resolve = true) { - filename = normalize(filename); - let result = this._snapshots[filename]; - if (!result && resolve) { - try { - result = new VinylScriptSnapshot(new vinyl_1.default({ - path: filename, - contents: fs_1.default.readFileSync(filename), - base: this.getCompilationSettings().outDir, - stat: fs_1.default.statSync(filename) - })); - this.addScriptSnapshot(filename, result); - } - catch (e) { - // ignore - } - } - return result; - } - static _declareModule = /declare\s+module\s+('|")(.+)\1/g; - addScriptSnapshot(filename, snapshot) { - this._projectVersion++; - filename = normalize(filename); - const old = this._snapshots[filename]; - if (!old && !this._filesInProject.has(filename) && !filename.endsWith('.d.ts')) { - // ^^^^^^^^^^^^^^^^^^^^^^^^^^ - // not very proper! - this._filesAdded.add(filename); - } - if (!old || old.getVersion() !== snapshot.getVersion()) { - this._dependenciesRecomputeList.push(filename); - // (cheap) check for declare module - LanguageServiceHost._declareModule.lastIndex = 0; - let match; - while ((match = LanguageServiceHost._declareModule.exec(snapshot.getText(0, snapshot.getLength())))) { - let declaredModules = this._fileNameToDeclaredModule[filename]; - if (!declaredModules) { - this._fileNameToDeclaredModule[filename] = declaredModules = []; - } - declaredModules.push(match[2]); - } - } - this._snapshots[filename] = snapshot; - return old; - } - removeScriptSnapshot(filename) { - filename = normalize(filename); - this._log('removeScriptSnapshot', filename); - this._filesInProject.delete(filename); - this._filesAdded.delete(filename); - this._projectVersion++; - delete this._fileNameToDeclaredModule[filename]; - return delete this._snapshots[filename]; - } - getCurrentDirectory() { - return path_1.default.dirname(this._projectPath); - } - getDefaultLibFileName(options) { - return typescript_1.default.getDefaultLibFilePath(options); - } - directoryExists = typescript_1.default.sys.directoryExists; - getDirectories = typescript_1.default.sys.getDirectories; - fileExists = typescript_1.default.sys.fileExists; - readFile = typescript_1.default.sys.readFile; - readDirectory = typescript_1.default.sys.readDirectory; - // ---- dependency management - collectDependents(filename, target) { - while (this._dependenciesRecomputeList.length) { - this._processFile(this._dependenciesRecomputeList.pop()); - } - filename = normalize(filename); - const node = this._dependencies.lookup(filename); - if (node) { - node.incoming.forEach(entry => target.push(entry.data)); - } - } - getCyclicDependencies(filenames) { - // Ensure dependencies are up to date - while (this._dependenciesRecomputeList.length) { - this._processFile(this._dependenciesRecomputeList.pop()); - } - const cycles = this._dependencies.findCycles(filenames.sort((a, b) => a.localeCompare(b))); - const result = new Map(); - for (const [key, value] of cycles) { - result.set(key, value?.join(' -> ')); - } - return result; - } - _processFile(filename) { - if (filename.match(/.*\.d\.ts$/)) { - return; - } - filename = normalize(filename); - const snapshot = this.getScriptSnapshot(filename); - if (!snapshot) { - this._log('processFile', `Missing snapshot for: ${filename}`); - return; - } - const info = typescript_1.default.preProcessFile(snapshot.getText(0, snapshot.getLength()), true); - // (0) clear out old dependencies - this._dependencies.resetNode(filename); - // (1) ///-references - info.referencedFiles.forEach(ref => { - const resolvedPath = path_1.default.resolve(path_1.default.dirname(filename), ref.fileName); - const normalizedPath = normalize(resolvedPath); - this._dependencies.inertEdge(filename, normalizedPath); - }); - // (2) import-require statements - info.importedFiles.forEach(ref => { - if (!ref.fileName.startsWith('.')) { - // node module? - return; - } - if (ref.fileName.endsWith('.css')) { - return; - } - const stopDirname = normalize(this.getCurrentDirectory()); - let dirname = filename; - let found = false; - while (!found && dirname.indexOf(stopDirname) === 0) { - dirname = path_1.default.dirname(dirname); - let resolvedPath = path_1.default.resolve(dirname, ref.fileName); - if (resolvedPath.endsWith('.js')) { - resolvedPath = resolvedPath.slice(0, -3); - } - const normalizedPath = normalize(resolvedPath); - if (this.getScriptSnapshot(normalizedPath + '.ts')) { - this._dependencies.inertEdge(filename, normalizedPath + '.ts'); - found = true; - } - else if (this.getScriptSnapshot(normalizedPath + '.d.ts')) { - this._dependencies.inertEdge(filename, normalizedPath + '.d.ts'); - found = true; - } - else if (this.getScriptSnapshot(normalizedPath + '.js')) { - this._dependencies.inertEdge(filename, normalizedPath + '.js'); - found = true; - } - } - if (!found) { - for (const key in this._fileNameToDeclaredModule) { - if (this._fileNameToDeclaredModule[key] && ~this._fileNameToDeclaredModule[key].indexOf(ref.fileName)) { - this._dependencies.inertEdge(filename, key); - } - } - } - }); - } -} -//# sourceMappingURL=builder.js.map \ No newline at end of file diff --git a/build/lib/tsb/builder.ts b/build/lib/tsb/builder.ts index 64081ac4797..628afc05427 100644 --- a/build/lib/tsb/builder.ts +++ b/build/lib/tsb/builder.ts @@ -6,11 +6,11 @@ import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; -import * as utils from './utils'; +import * as utils from './utils.ts'; import colors from 'ansi-colors'; import ts from 'typescript'; import Vinyl from 'vinyl'; -import { RawSourceMap, SourceMapConsumer, SourceMapGenerator } from 'source-map'; +import { type RawSourceMap, SourceMapConsumer, SourceMapGenerator } from 'source-map'; export interface IConfiguration { logFn: (topic: string, message: string) => void; @@ -21,11 +21,11 @@ export interface CancellationToken { isCancellationRequested(): boolean; } -export namespace CancellationToken { - export const None: CancellationToken = { +export const CancellationToken = new class { + None: CancellationToken = { isCancellationRequested() { return false; } }; -} +}; export interface ITypeScriptBuilder { build(out: (file: Vinyl) => void, onError: (err: ts.Diagnostic) => void, token?: CancellationToken): Promise; @@ -167,7 +167,7 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str const dirname = path.dirname(vinyl.relative); const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts'; - let sourceMap = JSON.parse(sourcemapFile.text); + let sourceMap = JSON.parse(sourcemapFile.text) as RawSourceMap; sourceMap.sources[0] = tsname.replace(/\\/g, '/'); // check for an "input source" map and combine them @@ -227,7 +227,7 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str } [tsSMC, inputSMC].forEach((consumer) => { - (consumer).sources.forEach((sourceFile: string) => { + (consumer as SourceMapConsumer & { sources: string[] }).sources.forEach((sourceFile: string) => { (smg as SourceMapGeneratorWithSources)._sources.add(sourceFile); const sourceContent = consumer.sourceContentFor(sourceFile); if (sourceContent !== null) { @@ -529,19 +529,25 @@ class LanguageServiceHost implements ts.LanguageServiceHost { private readonly _snapshots: { [path: string]: ScriptSnapshot }; private readonly _filesInProject: Set; private readonly _filesAdded: Set; - private readonly _dependencies: utils.graph.Graph; + private readonly _dependencies: InstanceType>; private readonly _dependenciesRecomputeList: string[]; private readonly _fileNameToDeclaredModule: { [path: string]: string[] }; private _projectVersion: number; + private readonly _cmdLine: ts.ParsedCommandLine; + private readonly _projectPath: string; + private readonly _log: (topic: string, message: string) => void; constructor( - private readonly _cmdLine: ts.ParsedCommandLine, - private readonly _projectPath: string, - private readonly _log: (topic: string, message: string) => void + cmdLine: ts.ParsedCommandLine, + projectPath: string, + log: (topic: string, message: string) => void ) { + this._cmdLine = cmdLine; + this._projectPath = projectPath; + this._log = log; this._snapshots = Object.create(null); - this._filesInProject = new Set(_cmdLine.fileNames); + this._filesInProject = new Set(this._cmdLine.fileNames); this._filesAdded = new Set(); this._dependencies = new utils.graph.Graph(); this._dependenciesRecomputeList = []; @@ -665,7 +671,7 @@ class LanguageServiceHost implements ts.LanguageServiceHost { filename = normalize(filename); const node = this._dependencies.lookup(filename); if (node) { - node.incoming.forEach(entry => target.push(entry.data)); + node.incoming.forEach((entry: any) => target.push(entry.data)); } } diff --git a/build/lib/tsb/index.js b/build/lib/tsb/index.js deleted file mode 100644 index 552eea5014f..00000000000 --- a/build/lib/tsb/index.js +++ /dev/null @@ -1,171 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; - }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.create = create; -const vinyl_1 = __importDefault(require("vinyl")); -const through_1 = __importDefault(require("through")); -const builder = __importStar(require("./builder")); -const typescript_1 = __importDefault(require("typescript")); -const stream_1 = require("stream"); -const path_1 = require("path"); -const utils_1 = require("./utils"); -const fs_1 = require("fs"); -const fancy_log_1 = __importDefault(require("fancy-log")); -const transpiler_1 = require("./transpiler"); -const colors = require("ansi-colors"); -class EmptyDuplex extends stream_1.Duplex { - _write(_chunk, _encoding, callback) { callback(); } - _read() { this.push(null); } -} -function createNullCompiler() { - const result = function () { return new EmptyDuplex(); }; - result.src = () => new EmptyDuplex(); - return result; -} -const _defaultOnError = (err) => console.log(JSON.stringify(err, null, 4)); -function create(projectPath, existingOptions, config, onError = _defaultOnError) { - function printDiagnostic(diag) { - if (diag instanceof Error) { - onError(diag.message); - } - else if (!diag.file || !diag.start) { - onError(typescript_1.default.flattenDiagnosticMessageText(diag.messageText, '\n')); - } - else { - const lineAndCh = diag.file.getLineAndCharacterOfPosition(diag.start); - onError(utils_1.strings.format('{0}({1},{2}): {3}', diag.file.fileName, lineAndCh.line + 1, lineAndCh.character + 1, typescript_1.default.flattenDiagnosticMessageText(diag.messageText, '\n'))); - } - } - const parsed = typescript_1.default.readConfigFile(projectPath, typescript_1.default.sys.readFile); - if (parsed.error) { - printDiagnostic(parsed.error); - return createNullCompiler(); - } - const cmdLine = typescript_1.default.parseJsonConfigFileContent(parsed.config, typescript_1.default.sys, (0, path_1.dirname)(projectPath), existingOptions); - if (cmdLine.errors.length > 0) { - cmdLine.errors.forEach(printDiagnostic); - return createNullCompiler(); - } - function logFn(topic, message) { - if (config.verbose) { - (0, fancy_log_1.default)(colors.cyan(topic), message); - } - } - // FULL COMPILE stream doing transpile, syntax and semantic diagnostics - function createCompileStream(builder, token) { - return (0, through_1.default)(function (file) { - // give the file to the compiler - if (file.isStream()) { - this.emit('error', 'no support for streams'); - return; - } - builder.file(file); - }, function () { - // start the compilation process - builder.build(file => this.queue(file), printDiagnostic, token).catch(e => console.error(e)).then(() => this.queue(null)); - }); - } - // TRANSPILE ONLY stream doing just TS to JS conversion - function createTranspileStream(transpiler) { - return (0, through_1.default)(function (file) { - // give the file to the compiler - if (file.isStream()) { - this.emit('error', 'no support for streams'); - return; - } - if (!file.contents) { - return; - } - if (!config.transpileOnlyIncludesDts && file.path.endsWith('.d.ts')) { - return; - } - if (!transpiler.onOutfile) { - transpiler.onOutfile = file => this.queue(file); - } - transpiler.transpile(file); - }, function () { - transpiler.join().then(() => { - this.queue(null); - transpiler.onOutfile = undefined; - }); - }); - } - let result; - if (config.transpileOnly) { - const transpiler = !config.transpileWithEsbuild - ? new transpiler_1.TscTranspiler(logFn, printDiagnostic, projectPath, cmdLine) - : new transpiler_1.ESBuildTranspiler(logFn, printDiagnostic, projectPath, cmdLine); - result = (() => createTranspileStream(transpiler)); - } - else { - const _builder = builder.createTypeScriptBuilder({ logFn }, projectPath, cmdLine); - result = ((token) => createCompileStream(_builder, token)); - } - result.src = (opts) => { - let _pos = 0; - const _fileNames = cmdLine.fileNames.slice(0); - return new class extends stream_1.Readable { - constructor() { - super({ objectMode: true }); - } - _read() { - let more = true; - let path; - for (; more && _pos < _fileNames.length; _pos++) { - path = _fileNames[_pos]; - more = this.push(new vinyl_1.default({ - path, - contents: (0, fs_1.readFileSync)(path), - stat: (0, fs_1.statSync)(path), - cwd: opts && opts.cwd, - base: opts && opts.base || (0, path_1.dirname)(projectPath) - })); - } - if (_pos >= _fileNames.length) { - this.push(null); - } - } - }; - }; - return result; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/build/lib/tsb/index.ts b/build/lib/tsb/index.ts index 165ad1ce3b8..31c1c3f15f8 100644 --- a/build/lib/tsb/index.ts +++ b/build/lib/tsb/index.ts @@ -5,15 +5,15 @@ import Vinyl from 'vinyl'; import through from 'through'; -import * as builder from './builder'; +import * as builder from './builder.ts'; import ts from 'typescript'; import { Readable, Writable, Duplex } from 'stream'; import { dirname } from 'path'; -import { strings } from './utils'; +import { strings } from './utils.ts'; import { readFileSync, statSync } from 'fs'; import log from 'fancy-log'; -import { ESBuildTranspiler, ITranspiler, TscTranspiler } from './transpiler'; -import colors = require('ansi-colors'); +import { ESBuildTranspiler, type ITranspiler, TscTranspiler } from './transpiler.ts'; +import colors from 'ansi-colors'; export interface IncrementalCompiler { (token?: any): Readable & Writable; @@ -164,5 +164,5 @@ export function create( }; }; - return result; + return result as IncrementalCompiler; } diff --git a/build/lib/tsb/transpiler.js b/build/lib/tsb/transpiler.js deleted file mode 100644 index 07c19c5bae2..00000000000 --- a/build/lib/tsb/transpiler.js +++ /dev/null @@ -1,306 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ESBuildTranspiler = exports.TscTranspiler = void 0; -const esbuild_1 = __importDefault(require("esbuild")); -const typescript_1 = __importDefault(require("typescript")); -const node_worker_threads_1 = __importDefault(require("node:worker_threads")); -const vinyl_1 = __importDefault(require("vinyl")); -const node_os_1 = require("node:os"); -const tsconfigUtils_1 = require("../tsconfigUtils"); -function transpile(tsSrc, options) { - const isAmd = /\n(import|export)/m.test(tsSrc); - if (!isAmd && options.compilerOptions?.module === typescript_1.default.ModuleKind.AMD) { - // enforce NONE module-system for not-amd cases - options = { ...options, ...{ compilerOptions: { ...options.compilerOptions, module: typescript_1.default.ModuleKind.None } } }; - } - const out = typescript_1.default.transpileModule(tsSrc, options); - return { - jsSrc: out.outputText, - diag: out.diagnostics ?? [] - }; -} -if (!node_worker_threads_1.default.isMainThread) { - // WORKER - node_worker_threads_1.default.parentPort?.addListener('message', (req) => { - const res = { - jsSrcs: [], - diagnostics: [] - }; - for (const tsSrc of req.tsSrcs) { - const out = transpile(tsSrc, req.options); - res.jsSrcs.push(out.jsSrc); - res.diagnostics.push(out.diag); - } - node_worker_threads_1.default.parentPort.postMessage(res); - }); -} -class OutputFileNameOracle { - getOutputFileName; - constructor(cmdLine, configFilePath) { - this.getOutputFileName = (file) => { - try { - // windows: path-sep normalizing - file = typescript_1.default.normalizePath(file); - if (!cmdLine.options.configFilePath) { - // this is needed for the INTERNAL getOutputFileNames-call below... - cmdLine.options.configFilePath = configFilePath; - } - const isDts = file.endsWith('.d.ts'); - if (isDts) { - file = file.slice(0, -5) + '.ts'; - cmdLine.fileNames.push(file); - } - const outfile = typescript_1.default.getOutputFileNames(cmdLine, file, true)[0]; - if (isDts) { - cmdLine.fileNames.pop(); - } - return outfile; - } - catch (err) { - console.error(file, cmdLine.fileNames); - console.error(err); - throw err; - } - }; - } -} -class TranspileWorker { - static pool = 1; - id = TranspileWorker.pool++; - _worker = new node_worker_threads_1.default.Worker(__filename); - _pending; - _durations = []; - constructor(outFileFn) { - this._worker.addListener('message', (res) => { - if (!this._pending) { - console.error('RECEIVING data WITHOUT request'); - return; - } - const [resolve, reject, files, options, t1] = this._pending; - const outFiles = []; - const diag = []; - for (let i = 0; i < res.jsSrcs.length; i++) { - // inputs and outputs are aligned across the arrays - const file = files[i]; - const jsSrc = res.jsSrcs[i]; - const diag = res.diagnostics[i]; - if (diag.length > 0) { - diag.push(...diag); - continue; - } - let SuffixTypes; - (function (SuffixTypes) { - SuffixTypes[SuffixTypes["Dts"] = 5] = "Dts"; - SuffixTypes[SuffixTypes["Ts"] = 3] = "Ts"; - SuffixTypes[SuffixTypes["Unknown"] = 0] = "Unknown"; - })(SuffixTypes || (SuffixTypes = {})); - const suffixLen = file.path.endsWith('.d.ts') ? 5 /* SuffixTypes.Dts */ : file.path.endsWith('.ts') ? 3 /* SuffixTypes.Ts */ : 0 /* SuffixTypes.Unknown */; - // check if output of a DTS-files isn't just "empty" and iff so - // skip this file - if (suffixLen === 5 /* SuffixTypes.Dts */ && _isDefaultEmpty(jsSrc)) { - continue; - } - const outBase = options.compilerOptions?.outDir ?? file.base; - const outPath = outFileFn(file.path); - outFiles.push(new vinyl_1.default({ - path: outPath, - base: outBase, - contents: Buffer.from(jsSrc), - })); - } - this._pending = undefined; - this._durations.push(Date.now() - t1); - if (diag.length > 0) { - reject(diag); - } - else { - resolve(outFiles); - } - }); - } - terminate() { - // console.log(`Worker#${this.id} ENDS after ${this._durations.length} jobs (total: ${this._durations.reduce((p, c) => p + c, 0)}, avg: ${this._durations.reduce((p, c) => p + c, 0) / this._durations.length})`); - this._worker.terminate(); - } - get isBusy() { - return this._pending !== undefined; - } - next(files, options) { - if (this._pending !== undefined) { - throw new Error('BUSY'); - } - return new Promise((resolve, reject) => { - this._pending = [resolve, reject, files, options, Date.now()]; - const req = { - options, - tsSrcs: files.map(file => String(file.contents)) - }; - this._worker.postMessage(req); - }); - } -} -class TscTranspiler { - _onError; - _cmdLine; - static P = Math.floor((0, node_os_1.cpus)().length * .5); - _outputFileNames; - onOutfile; - _workerPool = []; - _queue = []; - _allJobs = []; - constructor(logFn, _onError, configFilePath, _cmdLine) { - this._onError = _onError; - this._cmdLine = _cmdLine; - logFn('Transpile', `will use ${TscTranspiler.P} transpile worker`); - this._outputFileNames = new OutputFileNameOracle(_cmdLine, configFilePath); - } - async join() { - // wait for all penindg jobs - this._consumeQueue(); - await Promise.allSettled(this._allJobs); - this._allJobs.length = 0; - // terminate all worker - this._workerPool.forEach(w => w.terminate()); - this._workerPool.length = 0; - } - transpile(file) { - if (this._cmdLine.options.noEmit) { - // not doing ANYTHING here - return; - } - const newLen = this._queue.push(file); - if (newLen > TscTranspiler.P ** 2) { - this._consumeQueue(); - } - } - _consumeQueue() { - if (this._queue.length === 0) { - // no work... - return; - } - // kinda LAZYily create workers - if (this._workerPool.length === 0) { - for (let i = 0; i < TscTranspiler.P; i++) { - this._workerPool.push(new TranspileWorker(file => this._outputFileNames.getOutputFileName(file))); - } - } - const freeWorker = this._workerPool.filter(w => !w.isBusy); - if (freeWorker.length === 0) { - // OK, they will pick up work themselves - return; - } - for (const worker of freeWorker) { - if (this._queue.length === 0) { - break; - } - const job = new Promise(resolve => { - const consume = () => { - const files = this._queue.splice(0, TscTranspiler.P); - if (files.length === 0) { - // DONE - resolve(undefined); - return; - } - // work on the NEXT file - // const [inFile, outFn] = req; - worker.next(files, { compilerOptions: this._cmdLine.options }).then(outFiles => { - if (this.onOutfile) { - outFiles.map(this.onOutfile, this); - } - consume(); - }).catch(err => { - this._onError(err); - }); - }; - consume(); - }); - this._allJobs.push(job); - } - } -} -exports.TscTranspiler = TscTranspiler; -class ESBuildTranspiler { - _logFn; - _onError; - _cmdLine; - _outputFileNames; - _jobs = []; - onOutfile; - _transformOpts; - constructor(_logFn, _onError, configFilePath, _cmdLine) { - this._logFn = _logFn; - this._onError = _onError; - this._cmdLine = _cmdLine; - _logFn('Transpile', `will use ESBuild to transpile source files`); - this._outputFileNames = new OutputFileNameOracle(_cmdLine, configFilePath); - const isExtension = configFilePath.includes('extensions'); - const target = (0, tsconfigUtils_1.getTargetStringFromTsConfig)(configFilePath); - this._transformOpts = { - target: [target], - format: isExtension ? 'cjs' : 'esm', - platform: isExtension ? 'node' : undefined, - loader: 'ts', - sourcemap: 'inline', - tsconfigRaw: JSON.stringify({ - compilerOptions: { - ...this._cmdLine.options, - ...{ - module: isExtension ? typescript_1.default.ModuleKind.CommonJS : undefined - } - } - }), - supported: { - 'class-static-blocks': false, // SEE https://github.com/evanw/esbuild/issues/3823, - 'dynamic-import': !isExtension, // see https://github.com/evanw/esbuild/issues/1281 - 'class-field': !isExtension - } - }; - } - async join() { - const jobs = this._jobs.slice(); - this._jobs.length = 0; - await Promise.allSettled(jobs); - } - transpile(file) { - if (!(file.contents instanceof Buffer)) { - throw Error('file.contents must be a Buffer'); - } - const t1 = Date.now(); - this._jobs.push(esbuild_1.default.transform(file.contents, { - ...this._transformOpts, - sourcefile: file.path, - }).then(result => { - // check if output of a DTS-files isn't just "empty" and iff so - // skip this file - if (file.path.endsWith('.d.ts') && _isDefaultEmpty(result.code)) { - return; - } - const outBase = this._cmdLine.options.outDir ?? file.base; - const outPath = this._outputFileNames.getOutputFileName(file.path); - this.onOutfile(new vinyl_1.default({ - path: outPath, - base: outBase, - contents: Buffer.from(result.code), - })); - this._logFn('Transpile', `esbuild took ${Date.now() - t1}ms for ${file.path}`); - }).catch(err => { - this._onError(err); - })); - } -} -exports.ESBuildTranspiler = ESBuildTranspiler; -function _isDefaultEmpty(src) { - return src - .replace('"use strict";', '') - .replace(/\/\/# sourceMappingURL.*^/, '') - .replace(/\/\*[\s\S]*?\*\/|([^\\:]|^)\/\/.*$/gm, '$1') - .trim().length === 0; -} -//# sourceMappingURL=transpiler.js.map \ No newline at end of file diff --git a/build/lib/tsb/transpiler.ts b/build/lib/tsb/transpiler.ts index f81039d70b6..72883a2ab0c 100644 --- a/build/lib/tsb/transpiler.ts +++ b/build/lib/tsb/transpiler.ts @@ -8,7 +8,7 @@ import ts from 'typescript'; import threads from 'node:worker_threads'; import Vinyl from 'vinyl'; import { cpus } from 'node:os'; -import { getTargetStringFromTsConfig } from '../tsconfigUtils'; +import { getTargetStringFromTsConfig } from '../tsconfigUtils.ts'; interface TranspileReq { readonly tsSrcs: string[]; @@ -65,7 +65,7 @@ class OutputFileNameOracle { try { // windows: path-sep normalizing - file = (ts).normalizePath(file); + file = (ts as InternalTsApi).normalizePath(file); if (!cmdLine.options.configFilePath) { // this is needed for the INTERNAL getOutputFileNames-call below... @@ -76,7 +76,7 @@ class OutputFileNameOracle { file = file.slice(0, -5) + '.ts'; cmdLine.fileNames.push(file); } - const outfile = (ts).getOutputFileNames(cmdLine, file, true)[0]; + const outfile = (ts as InternalTsApi).getOutputFileNames(cmdLine, file, true)[0]; if (isDts) { cmdLine.fileNames.pop(); } @@ -97,7 +97,7 @@ class TranspileWorker { readonly id = TranspileWorker.pool++; - private _worker = new threads.Worker(__filename); + private _worker = new threads.Worker(import.meta.filename); private _pending?: [resolve: Function, reject: Function, file: Vinyl[], options: ts.TranspileOptions, t1: number]; private _durations: number[] = []; @@ -124,11 +124,11 @@ class TranspileWorker { diag.push(...diag); continue; } - const enum SuffixTypes { - Dts = 5, - Ts = 3, - Unknown = 0 - } + const SuffixTypes = { + Dts: 5, + Ts: 3, + Unknown: 0 + } as const; const suffixLen = file.path.endsWith('.d.ts') ? SuffixTypes.Dts : file.path.endsWith('.ts') ? SuffixTypes.Ts : SuffixTypes.Unknown; @@ -203,14 +203,21 @@ export class TscTranspiler implements ITranspiler { private _queue: Vinyl[] = []; private _allJobs: Promise[] = []; + private readonly _logFn: (topic: string, message: string) => void; + private readonly _onError: (err: any) => void; + private readonly _cmdLine: ts.ParsedCommandLine; + constructor( logFn: (topic: string, message: string) => void, - private readonly _onError: (err: any) => void, + onError: (err: any) => void, configFilePath: string, - private readonly _cmdLine: ts.ParsedCommandLine + cmdLine: ts.ParsedCommandLine ) { - logFn('Transpile', `will use ${TscTranspiler.P} transpile worker`); - this._outputFileNames = new OutputFileNameOracle(_cmdLine, configFilePath); + this._logFn = logFn; + this._onError = onError; + this._cmdLine = cmdLine; + this._logFn('Transpile', `will use ${TscTranspiler.P} transpile worker`); + this._outputFileNames = new OutputFileNameOracle(this._cmdLine, configFilePath); } async join() { @@ -300,15 +307,21 @@ export class ESBuildTranspiler implements ITranspiler { onOutfile?: ((file: Vinyl) => void) | undefined; private readonly _transformOpts: esbuild.TransformOptions; + private readonly _logFn: (topic: string, message: string) => void; + private readonly _onError: (err: any) => void; + private readonly _cmdLine: ts.ParsedCommandLine; constructor( - private readonly _logFn: (topic: string, message: string) => void, - private readonly _onError: (err: any) => void, + logFn: (topic: string, message: string) => void, + onError: (err: any) => void, configFilePath: string, - private readonly _cmdLine: ts.ParsedCommandLine + cmdLine: ts.ParsedCommandLine ) { - _logFn('Transpile', `will use ESBuild to transpile source files`); - this._outputFileNames = new OutputFileNameOracle(_cmdLine, configFilePath); + this._logFn = logFn; + this._onError = onError; + this._cmdLine = cmdLine; + this._logFn('Transpile', `will use ESBuild to transpile source files`); + this._outputFileNames = new OutputFileNameOracle(this._cmdLine, configFilePath); const isExtension = configFilePath.includes('extensions'); diff --git a/build/lib/tsb/utils.js b/build/lib/tsb/utils.js deleted file mode 100644 index 2ea820c6e6b..00000000000 --- a/build/lib/tsb/utils.js +++ /dev/null @@ -1,96 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.graph = exports.strings = void 0; -var strings; -(function (strings) { - function format(value, ...rest) { - return value.replace(/({\d+})/g, function (match) { - const index = Number(match.substring(1, match.length - 1)); - return String(rest[index]) || match; - }); - } - strings.format = format; -})(strings || (exports.strings = strings = {})); -var graph; -(function (graph) { - class Node { - data; - incoming = new Map(); - outgoing = new Map(); - constructor(data) { - this.data = data; - } - } - graph.Node = Node; - class Graph { - _nodes = new Map(); - inertEdge(from, to) { - const fromNode = this.lookupOrInsertNode(from); - const toNode = this.lookupOrInsertNode(to); - fromNode.outgoing.set(toNode.data, toNode); - toNode.incoming.set(fromNode.data, fromNode); - } - resetNode(data) { - const node = this._nodes.get(data); - if (!node) { - return; - } - for (const outDep of node.outgoing.values()) { - outDep.incoming.delete(node.data); - } - node.outgoing.clear(); - } - lookupOrInsertNode(data) { - let node = this._nodes.get(data); - if (!node) { - node = new Node(data); - this._nodes.set(data, node); - } - return node; - } - lookup(data) { - return this._nodes.get(data) ?? null; - } - findCycles(allData) { - const result = new Map(); - const checked = new Set(); - for (const data of allData) { - const node = this.lookup(data); - if (!node) { - continue; - } - const r = this._findCycle(node, checked, new Set()); - result.set(node.data, r); - } - return result; - } - _findCycle(node, checked, seen) { - if (checked.has(node.data)) { - return undefined; - } - let result; - for (const child of node.outgoing.values()) { - if (seen.has(child.data)) { - const seenArr = Array.from(seen); - const idx = seenArr.indexOf(child.data); - seenArr.push(child.data); - return idx > 0 ? seenArr.slice(idx) : seenArr; - } - seen.add(child.data); - result = this._findCycle(child, checked, seen); - seen.delete(child.data); - if (result) { - break; - } - } - checked.add(node.data); - return result; - } - } - graph.Graph = Graph; -})(graph || (exports.graph = graph = {})); -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/build/lib/tsb/utils.ts b/build/lib/tsb/utils.ts index 7f0bbdd5f23..4c5abb3e9c6 100644 --- a/build/lib/tsb/utils.ts +++ b/build/lib/tsb/utils.ts @@ -3,29 +3,32 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -export namespace strings { +export const strings = (() => { - export function format(value: string, ...rest: unknown[]): string { - return value.replace(/({\d+})/g, function (match) { + function format(value: string, ...rest: unknown[]): string { + return value.replace(/(\{\d+\})/g, function (match) { const index = Number(match.substring(1, match.length - 1)); return String(rest[index]) || match; }); } -} -export namespace graph { + return { format }; +})(); - export class Node { +export const graph = (() => { + + class Node { readonly incoming = new Map>(); readonly outgoing = new Map>(); + readonly data: T; - constructor(readonly data: T) { - + constructor(data: T) { + this.data = data; } } - export class Graph { + class Graph { private _nodes = new Map>(); @@ -103,4 +106,5 @@ export namespace graph { } } -} + return { Node, Graph }; +})(); diff --git a/build/lib/tsconfigUtils.js b/build/lib/tsconfigUtils.js deleted file mode 100644 index a20e2d6f77d..00000000000 --- a/build/lib/tsconfigUtils.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getTargetStringFromTsConfig = getTargetStringFromTsConfig; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -const path_1 = require("path"); -const typescript_1 = __importDefault(require("typescript")); -/** - * Get the target (e.g. 'ES2024') from a tsconfig.json file. - */ -function getTargetStringFromTsConfig(configFilePath) { - const parsed = typescript_1.default.readConfigFile(configFilePath, typescript_1.default.sys.readFile); - if (parsed.error) { - throw new Error(`Cannot determine target from ${configFilePath}. TS error: ${parsed.error.messageText}`); - } - const cmdLine = typescript_1.default.parseJsonConfigFileContent(parsed.config, typescript_1.default.sys, (0, path_1.dirname)(configFilePath), {}); - const resolved = typeof cmdLine.options.target !== 'undefined' ? typescript_1.default.ScriptTarget[cmdLine.options.target] : undefined; - if (!resolved) { - throw new Error(`Could not resolve target in ${configFilePath}`); - } - return resolved; -} -//# sourceMappingURL=tsconfigUtils.js.map \ No newline at end of file diff --git a/build/lib/typeScriptLanguageServiceHost.js b/build/lib/typeScriptLanguageServiceHost.js deleted file mode 100644 index 6ba0802102d..00000000000 --- a/build/lib/typeScriptLanguageServiceHost.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TypeScriptLanguageServiceHost = void 0; -const typescript_1 = __importDefault(require("typescript")); -const node_fs_1 = __importDefault(require("node:fs")); -const node_path_1 = require("node:path"); -function normalizePath(filePath) { - return (0, node_path_1.normalize)(filePath); -} -/** - * A TypeScript language service host - */ -class TypeScriptLanguageServiceHost { - ts; - topLevelFiles; - compilerOptions; - constructor(ts, topLevelFiles, compilerOptions) { - this.ts = ts; - this.topLevelFiles = topLevelFiles; - this.compilerOptions = compilerOptions; - } - // --- language service host --------------- - getCompilationSettings() { - return this.compilerOptions; - } - getScriptFileNames() { - return [ - ...this.topLevelFiles.keys(), - this.ts.getDefaultLibFilePath(this.compilerOptions) - ]; - } - getScriptVersion(_fileName) { - return '1'; - } - getProjectVersion() { - return '1'; - } - getScriptSnapshot(fileName) { - fileName = normalizePath(fileName); - if (this.topLevelFiles.has(fileName)) { - return this.ts.ScriptSnapshot.fromString(this.topLevelFiles.get(fileName)); - } - else { - return typescript_1.default.ScriptSnapshot.fromString(node_fs_1.default.readFileSync(fileName).toString()); - } - } - getScriptKind(_fileName) { - return this.ts.ScriptKind.TS; - } - getCurrentDirectory() { - return ''; - } - getDefaultLibFileName(options) { - return this.ts.getDefaultLibFilePath(options); - } - readFile(path, encoding) { - path = normalizePath(path); - if (this.topLevelFiles.get(path)) { - return this.topLevelFiles.get(path); - } - return typescript_1.default.sys.readFile(path, encoding); - } - fileExists(path) { - path = normalizePath(path); - if (this.topLevelFiles.has(path)) { - return true; - } - return typescript_1.default.sys.fileExists(path); - } -} -exports.TypeScriptLanguageServiceHost = TypeScriptLanguageServiceHost; -//# sourceMappingURL=typeScriptLanguageServiceHost.js.map \ No newline at end of file diff --git a/build/lib/typeScriptLanguageServiceHost.ts b/build/lib/typeScriptLanguageServiceHost.ts index f3bacd617d5..94c304fe094 100644 --- a/build/lib/typeScriptLanguageServiceHost.ts +++ b/build/lib/typeScriptLanguageServiceHost.ts @@ -18,11 +18,19 @@ function normalizePath(filePath: string): string { */ export class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost { + private readonly ts: typeof import('typescript'); + private readonly topLevelFiles: IFileMap; + private readonly compilerOptions: ts.CompilerOptions; + constructor( - private readonly ts: typeof import('typescript'), - private readonly topLevelFiles: IFileMap, - private readonly compilerOptions: ts.CompilerOptions, - ) { } + ts: typeof import('typescript'), + topLevelFiles: IFileMap, + compilerOptions: ts.CompilerOptions, + ) { + this.ts = ts; + this.topLevelFiles = topLevelFiles; + this.compilerOptions = compilerOptions; + } // --- language service host --------------- getCompilationSettings(): ts.CompilerOptions { diff --git a/build/lib/util.js b/build/lib/util.js deleted file mode 100644 index 9d2f3b13a06..00000000000 --- a/build/lib/util.js +++ /dev/null @@ -1,364 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.VinylStat = void 0; -exports.incremental = incremental; -exports.debounce = debounce; -exports.fixWin32DirectoryPermissions = fixWin32DirectoryPermissions; -exports.setExecutableBit = setExecutableBit; -exports.toFileUri = toFileUri; -exports.skipDirectories = skipDirectories; -exports.cleanNodeModules = cleanNodeModules; -exports.loadSourcemaps = loadSourcemaps; -exports.stripSourceMappingURL = stripSourceMappingURL; -exports.$if = $if; -exports.appendOwnPathSourceURL = appendOwnPathSourceURL; -exports.rewriteSourceMappingURL = rewriteSourceMappingURL; -exports.rimraf = rimraf; -exports.rreddir = rreddir; -exports.ensureDir = ensureDir; -exports.rebase = rebase; -exports.filter = filter; -exports.streamToPromise = streamToPromise; -exports.getElectronVersion = getElectronVersion; -const event_stream_1 = __importDefault(require("event-stream")); -const debounce_1 = __importDefault(require("debounce")); -const gulp_filter_1 = __importDefault(require("gulp-filter")); -const gulp_rename_1 = __importDefault(require("gulp-rename")); -const path_1 = __importDefault(require("path")); -const fs_1 = __importDefault(require("fs")); -const rimraf_1 = __importDefault(require("rimraf")); -const url_1 = require("url"); -const ternary_stream_1 = __importDefault(require("ternary-stream")); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const NoCancellationToken = { isCancellationRequested: () => false }; -function incremental(streamProvider, initial, supportsCancellation) { - const input = event_stream_1.default.through(); - const output = event_stream_1.default.through(); - let state = 'idle'; - let buffer = Object.create(null); - const token = !supportsCancellation ? undefined : { isCancellationRequested: () => Object.keys(buffer).length > 0 }; - const run = (input, isCancellable) => { - state = 'running'; - const stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken); - input - .pipe(stream) - .pipe(event_stream_1.default.through(undefined, () => { - state = 'idle'; - eventuallyRun(); - })) - .pipe(output); - }; - if (initial) { - run(initial, false); - } - const eventuallyRun = (0, debounce_1.default)(() => { - const paths = Object.keys(buffer); - if (paths.length === 0) { - return; - } - const data = paths.map(path => buffer[path]); - buffer = Object.create(null); - run(event_stream_1.default.readArray(data), true); - }, 500); - input.on('data', (f) => { - buffer[f.path] = f; - if (state === 'idle') { - eventuallyRun(); - } - }); - return event_stream_1.default.duplex(input, output); -} -function debounce(task, duration = 500) { - const input = event_stream_1.default.through(); - const output = event_stream_1.default.through(); - let state = 'idle'; - const run = () => { - state = 'running'; - task() - .pipe(event_stream_1.default.through(undefined, () => { - const shouldRunAgain = state === 'stale'; - state = 'idle'; - if (shouldRunAgain) { - eventuallyRun(); - } - })) - .pipe(output); - }; - run(); - const eventuallyRun = (0, debounce_1.default)(() => run(), duration); - input.on('data', () => { - if (state === 'idle') { - eventuallyRun(); - } - else { - state = 'stale'; - } - }); - return event_stream_1.default.duplex(input, output); -} -function fixWin32DirectoryPermissions() { - if (!/win32/.test(process.platform)) { - return event_stream_1.default.through(); - } - return event_stream_1.default.mapSync(f => { - if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) { - f.stat.mode = 16877; - } - return f; - }); -} -function setExecutableBit(pattern) { - const setBit = event_stream_1.default.mapSync(f => { - if (!f.stat) { - const stat = { isFile() { return true; }, mode: 0 }; - f.stat = stat; - } - f.stat.mode = /* 100755 */ 33261; - return f; - }); - if (!pattern) { - return setBit; - } - const input = event_stream_1.default.through(); - const filter = (0, gulp_filter_1.default)(pattern, { restore: true }); - const output = input - .pipe(filter) - .pipe(setBit) - .pipe(filter.restore); - return event_stream_1.default.duplex(input, output); -} -function toFileUri(filePath) { - const match = filePath.match(/^([a-z])\:(.*)$/i); - if (match) { - filePath = '/' + match[1].toUpperCase() + ':' + match[2]; - } - return 'file://' + filePath.replace(/\\/g, '/'); -} -function skipDirectories() { - return event_stream_1.default.mapSync(f => { - if (!f.isDirectory()) { - return f; - } - }); -} -function cleanNodeModules(rulePath) { - const rules = fs_1.default.readFileSync(rulePath, 'utf8') - .split(/\r?\n/g) - .map(line => line.trim()) - .filter(line => line && !/^#/.test(line)); - const excludes = rules.filter(line => !/^!/.test(line)).map(line => `!**/node_modules/${line}`); - const includes = rules.filter(line => /^!/.test(line)).map(line => `**/node_modules/${line.substr(1)}`); - const input = event_stream_1.default.through(); - const output = event_stream_1.default.merge(input.pipe((0, gulp_filter_1.default)(['**', ...excludes])), input.pipe((0, gulp_filter_1.default)(includes))); - return event_stream_1.default.duplex(input, output); -} -function loadSourcemaps() { - const input = event_stream_1.default.through(); - const output = input - .pipe(event_stream_1.default.map((f, cb) => { - if (f.sourceMap) { - cb(undefined, f); - return; - } - if (!f.contents) { - cb(undefined, f); - return; - } - const contents = f.contents.toString('utf8'); - const reg = /\/\/# sourceMappingURL=(.*)$/g; - let lastMatch = null; - let match = null; - while (match = reg.exec(contents)) { - lastMatch = match; - } - if (!lastMatch) { - f.sourceMap = { - version: '3', - names: [], - mappings: '', - sources: [f.relative.replace(/\\/g, '/')], - sourcesContent: [contents] - }; - cb(undefined, f); - return; - } - f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8'); - fs_1.default.readFile(path_1.default.join(path_1.default.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => { - if (err) { - return cb(err); - } - f.sourceMap = JSON.parse(contents); - cb(undefined, f); - }); - })); - return event_stream_1.default.duplex(input, output); -} -function stripSourceMappingURL() { - const input = event_stream_1.default.through(); - const output = input - .pipe(event_stream_1.default.mapSync(f => { - const contents = f.contents.toString('utf8'); - f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8'); - return f; - })); - return event_stream_1.default.duplex(input, output); -} -/** Splits items in the stream based on the predicate, sending them to onTrue if true, or onFalse otherwise */ -function $if(test, onTrue, onFalse = event_stream_1.default.through()) { - if (typeof test === 'boolean') { - return test ? onTrue : onFalse; - } - return (0, ternary_stream_1.default)(test, onTrue, onFalse); -} -/** Operator that appends the js files' original path a sourceURL, so debug locations map */ -function appendOwnPathSourceURL() { - const input = event_stream_1.default.through(); - const output = input - .pipe(event_stream_1.default.mapSync(f => { - if (!(f.contents instanceof Buffer)) { - throw new Error(`contents of ${f.path} are not a buffer`); - } - f.contents = Buffer.concat([f.contents, Buffer.from(`\n//# sourceURL=${(0, url_1.pathToFileURL)(f.path)}`)]); - return f; - })); - return event_stream_1.default.duplex(input, output); -} -function rewriteSourceMappingURL(sourceMappingURLBase) { - const input = event_stream_1.default.through(); - const output = input - .pipe(event_stream_1.default.mapSync(f => { - const contents = f.contents.toString('utf8'); - const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path_1.default.dirname(f.relative).replace(/\\/g, '/')}/$1`; - f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str)); - return f; - })); - return event_stream_1.default.duplex(input, output); -} -function rimraf(dir) { - const result = () => new Promise((c, e) => { - let retries = 0; - const retry = () => { - (0, rimraf_1.default)(dir, { maxBusyTries: 1 }, (err) => { - if (!err) { - return c(); - } - if (err.code === 'ENOTEMPTY' && ++retries < 5) { - return setTimeout(() => retry(), 10); - } - return e(err); - }); - }; - retry(); - }); - result.taskName = `clean-${path_1.default.basename(dir).toLowerCase()}`; - return result; -} -function _rreaddir(dirPath, prepend, result) { - const entries = fs_1.default.readdirSync(dirPath, { withFileTypes: true }); - for (const entry of entries) { - if (entry.isDirectory()) { - _rreaddir(path_1.default.join(dirPath, entry.name), `${prepend}/${entry.name}`, result); - } - else { - result.push(`${prepend}/${entry.name}`); - } - } -} -function rreddir(dirPath) { - const result = []; - _rreaddir(dirPath, '', result); - return result; -} -function ensureDir(dirPath) { - if (fs_1.default.existsSync(dirPath)) { - return; - } - ensureDir(path_1.default.dirname(dirPath)); - fs_1.default.mkdirSync(dirPath); -} -function rebase(count) { - return (0, gulp_rename_1.default)(f => { - const parts = f.dirname ? f.dirname.split(/[\/\\]/) : []; - f.dirname = parts.slice(count).join(path_1.default.sep); - }); -} -function filter(fn) { - const result = event_stream_1.default.through(function (data) { - if (fn(data)) { - this.emit('data', data); - } - else { - result.restore.push(data); - } - }); - result.restore = event_stream_1.default.through(); - return result; -} -function streamToPromise(stream) { - return new Promise((c, e) => { - stream.on('error', err => e(err)); - stream.on('end', () => c()); - }); -} -function getElectronVersion() { - const npmrc = fs_1.default.readFileSync(path_1.default.join(root, '.npmrc'), 'utf8'); - const electronVersion = /^target="(.*)"$/m.exec(npmrc)[1]; - const msBuildId = /^ms_build_id="(.*)"$/m.exec(npmrc)[1]; - return { electronVersion, msBuildId }; -} -class VinylStat { - dev; - ino; - mode; - nlink; - uid; - gid; - rdev; - size; - blksize; - blocks; - atimeMs; - mtimeMs; - ctimeMs; - birthtimeMs; - atime; - mtime; - ctime; - birthtime; - constructor(stat) { - this.dev = stat.dev ?? 0; - this.ino = stat.ino ?? 0; - this.mode = stat.mode ?? 0; - this.nlink = stat.nlink ?? 0; - this.uid = stat.uid ?? 0; - this.gid = stat.gid ?? 0; - this.rdev = stat.rdev ?? 0; - this.size = stat.size ?? 0; - this.blksize = stat.blksize ?? 0; - this.blocks = stat.blocks ?? 0; - this.atimeMs = stat.atimeMs ?? 0; - this.mtimeMs = stat.mtimeMs ?? 0; - this.ctimeMs = stat.ctimeMs ?? 0; - this.birthtimeMs = stat.birthtimeMs ?? 0; - this.atime = stat.atime ?? new Date(0); - this.mtime = stat.mtime ?? new Date(0); - this.ctime = stat.ctime ?? new Date(0); - this.birthtime = stat.birthtime ?? new Date(0); - } - isFile() { return true; } - isDirectory() { return false; } - isBlockDevice() { return false; } - isCharacterDevice() { return false; } - isSymbolicLink() { return false; } - isFIFO() { return false; } - isSocket() { return false; } -} -exports.VinylStat = VinylStat; -//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/build/lib/util.ts b/build/lib/util.ts index 5f3b2f67333..f1354b858c9 100644 --- a/build/lib/util.ts +++ b/build/lib/util.ts @@ -11,12 +11,12 @@ import path from 'path'; import fs from 'fs'; import _rimraf from 'rimraf'; import VinylFile from 'vinyl'; -import { ThroughStream } from 'through'; +import through from 'through'; import sm from 'source-map'; import { pathToFileURL } from 'url'; import ternaryStream from 'ternary-stream'; -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); export interface ICancellationToken { isCancellationRequested(): boolean; @@ -203,8 +203,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream { return; } - const contents = (f.contents).toString('utf8'); - + const contents = (f.contents as Buffer).toString('utf8'); const reg = /\/\/# sourceMappingURL=(.*)$/g; let lastMatch: RegExpExecArray | null = null; let match: RegExpExecArray | null = null; @@ -244,7 +243,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream { const output = input .pipe(es.mapSync(f => { - const contents = (f.contents).toString('utf8'); + const contents = (f.contents as Buffer).toString('utf8'); f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8'); return f; })); @@ -283,7 +282,7 @@ export function rewriteSourceMappingURL(sourceMappingURLBase: string): NodeJS.Re const output = input .pipe(es.mapSync(f => { - const contents = (f.contents).toString('utf8'); + const contents = (f.contents as Buffer).toString('utf8'); const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`; f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str)); return f; @@ -350,7 +349,7 @@ export function rebase(count: number): NodeJS.ReadWriteStream { } export interface FilterStream extends NodeJS.ReadWriteStream { - restore: ThroughStream; + restore: through.ThroughStream; } export function filter(fn: (data: any) => boolean): FilterStream { diff --git a/build/lib/watch/index.js b/build/lib/watch/index.js deleted file mode 100644 index 84b9f96fb97..00000000000 --- a/build/lib/watch/index.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = default_1; -const watch = process.platform === 'win32' ? require('./watch-win32') : require('vscode-gulp-watch'); -function default_1(...args) { - return watch.apply(null, args); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/build/lib/watch/index.ts b/build/lib/watch/index.ts index c43d3f1f83e..763cacc6d89 100644 --- a/build/lib/watch/index.ts +++ b/build/lib/watch/index.ts @@ -2,9 +2,11 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import { createRequire } from 'node:module'; -const watch = process.platform === 'win32' ? require('./watch-win32') : require('vscode-gulp-watch'); +const require = createRequire(import.meta.url); +const watch = process.platform === 'win32' ? require('./watch-win32.ts').default : require('vscode-gulp-watch'); -export default function (...args: any[]): any { +export default function (...args: any[]): ReturnType { return watch.apply(null, args); } diff --git a/build/lib/watch/watch-win32.js b/build/lib/watch/watch-win32.js deleted file mode 100644 index 7b77981d620..00000000000 --- a/build/lib/watch/watch-win32.js +++ /dev/null @@ -1,104 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path_1 = __importDefault(require("path")); -const child_process_1 = __importDefault(require("child_process")); -const fs_1 = __importDefault(require("fs")); -const vinyl_1 = __importDefault(require("vinyl")); -const event_stream_1 = __importDefault(require("event-stream")); -const gulp_filter_1 = __importDefault(require("gulp-filter")); -const watcherPath = path_1.default.join(__dirname, 'watcher.exe'); -function toChangeType(type) { - switch (type) { - case '0': return 'change'; - case '1': return 'add'; - default: return 'unlink'; - } -} -function watch(root) { - const result = event_stream_1.default.through(); - let child = child_process_1.default.spawn(watcherPath, [root]); - child.stdout.on('data', function (data) { - const lines = data.toString('utf8').split('\n'); - for (let i = 0; i < lines.length; i++) { - const line = lines[i].trim(); - if (line.length === 0) { - continue; - } - const changeType = line[0]; - const changePath = line.substr(2); - // filter as early as possible - if (/^\.git/.test(changePath) || /(^|\\)out($|\\)/.test(changePath)) { - continue; - } - const changePathFull = path_1.default.join(root, changePath); - const file = new vinyl_1.default({ - path: changePathFull, - base: root - }); - file.event = toChangeType(changeType); - result.emit('data', file); - } - }); - child.stderr.on('data', function (data) { - result.emit('error', data); - }); - child.on('exit', function (code) { - result.emit('error', 'Watcher died with code ' + code); - child = null; - }); - process.once('SIGTERM', function () { process.exit(0); }); - process.once('SIGTERM', function () { process.exit(0); }); - process.once('exit', function () { if (child) { - child.kill(); - } }); - return result; -} -const cache = Object.create(null); -module.exports = function (pattern, options) { - options = options || {}; - const cwd = path_1.default.normalize(options.cwd || process.cwd()); - let watcher = cache[cwd]; - if (!watcher) { - watcher = cache[cwd] = watch(cwd); - } - const rebase = !options.base ? event_stream_1.default.through() : event_stream_1.default.mapSync(function (f) { - f.base = options.base; - return f; - }); - return watcher - .pipe((0, gulp_filter_1.default)(['**', '!.git{,/**}'], { dot: options.dot })) // ignore all things git - .pipe((0, gulp_filter_1.default)(pattern, { dot: options.dot })) - .pipe(event_stream_1.default.map(function (file, cb) { - fs_1.default.stat(file.path, function (err, stat) { - if (err && err.code === 'ENOENT') { - return cb(undefined, file); - } - if (err) { - return cb(); - } - if (!stat.isFile()) { - return cb(); - } - fs_1.default.readFile(file.path, function (err, contents) { - if (err && err.code === 'ENOENT') { - return cb(undefined, file); - } - if (err) { - return cb(); - } - file.contents = contents; - file.stat = stat; - cb(undefined, file); - }); - }); - })) - .pipe(rebase); -}; -//# sourceMappingURL=watch-win32.js.map \ No newline at end of file diff --git a/build/lib/watch/watch-win32.ts b/build/lib/watch/watch-win32.ts index 38cbdea80b2..12b8ffc0ac3 100644 --- a/build/lib/watch/watch-win32.ts +++ b/build/lib/watch/watch-win32.ts @@ -11,7 +11,7 @@ import es from 'event-stream'; import filter from 'gulp-filter'; import { Stream } from 'stream'; -const watcherPath = path.join(__dirname, 'watcher.exe'); +const watcherPath = path.join(import.meta.dirname, 'watcher.exe'); function toChangeType(type: '0' | '1' | '2'): 'change' | 'add' | 'unlink' { switch (type) { @@ -33,7 +33,7 @@ function watch(root: string): Stream { continue; } - const changeType = <'0' | '1' | '2'>line[0]; + const changeType = line[0] as '0' | '1' | '2'; const changePath = line.substr(2); // filter as early as possible @@ -70,7 +70,7 @@ function watch(root: string): Stream { const cache: { [cwd: string]: Stream } = Object.create(null); -module.exports = function (pattern: string | string[] | filter.FileFunction, options?: { cwd?: string; base?: string; dot?: boolean }) { +export default function (pattern: string | string[] | filter.FileFunction, options?: { cwd?: string; base?: string; dot?: boolean }) { options = options || {}; const cwd = path.normalize(options.cwd || process.cwd()); @@ -105,4 +105,4 @@ module.exports = function (pattern: string | string[] | filter.FileFunction, opt }); })) .pipe(rebase); -}; +} diff --git a/build/linux/debian/calculate-deps.js b/build/linux/debian/calculate-deps.js deleted file mode 100644 index 34276ce7705..00000000000 --- a/build/linux/debian/calculate-deps.js +++ /dev/null @@ -1,89 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.generatePackageDeps = generatePackageDeps; -const child_process_1 = require("child_process"); -const fs_1 = require("fs"); -const os_1 = require("os"); -const path_1 = __importDefault(require("path")); -const cgmanifest_json_1 = __importDefault(require("../../../cgmanifest.json")); -const dep_lists_1 = require("./dep-lists"); -function generatePackageDeps(files, arch, chromiumSysroot, vscodeSysroot) { - const dependencies = files.map(file => calculatePackageDeps(file, arch, chromiumSysroot, vscodeSysroot)); - const additionalDepsSet = new Set(dep_lists_1.additionalDeps); - dependencies.push(additionalDepsSet); - return dependencies; -} -// Based on https://source.chromium.org/chromium/chromium/src/+/main:chrome/installer/linux/debian/calculate_package_deps.py. -function calculatePackageDeps(binaryPath, arch, chromiumSysroot, vscodeSysroot) { - try { - if (!((0, fs_1.statSync)(binaryPath).mode & fs_1.constants.S_IXUSR)) { - throw new Error(`Binary ${binaryPath} needs to have an executable bit set.`); - } - } - catch (e) { - // The package might not exist. Don't re-throw the error here. - console.error('Tried to stat ' + binaryPath + ' but failed.'); - } - // Get the Chromium dpkg-shlibdeps file. - const chromiumManifest = cgmanifest_json_1.default.registrations.filter(registration => { - return registration.component.type === 'git' && registration.component.git.name === 'chromium'; - }); - const dpkgShlibdepsUrl = `https://raw.githubusercontent.com/chromium/chromium/${chromiumManifest[0].version}/third_party/dpkg-shlibdeps/dpkg-shlibdeps.pl`; - const dpkgShlibdepsScriptLocation = `${(0, os_1.tmpdir)()}/dpkg-shlibdeps.pl`; - const result = (0, child_process_1.spawnSync)('curl', [dpkgShlibdepsUrl, '-o', dpkgShlibdepsScriptLocation]); - if (result.status !== 0) { - throw new Error('Cannot retrieve dpkg-shlibdeps. Stderr:\n' + result.stderr); - } - const cmd = [dpkgShlibdepsScriptLocation, '--ignore-weak-undefined']; - switch (arch) { - case 'amd64': - cmd.push(`-l${chromiumSysroot}/usr/lib/x86_64-linux-gnu`, `-l${chromiumSysroot}/lib/x86_64-linux-gnu`, `-l${vscodeSysroot}/usr/lib/x86_64-linux-gnu`, `-l${vscodeSysroot}/lib/x86_64-linux-gnu`); - break; - case 'armhf': - cmd.push(`-l${chromiumSysroot}/usr/lib/arm-linux-gnueabihf`, `-l${chromiumSysroot}/lib/arm-linux-gnueabihf`, `-l${vscodeSysroot}/usr/lib/arm-linux-gnueabihf`, `-l${vscodeSysroot}/lib/arm-linux-gnueabihf`); - break; - case 'arm64': - cmd.push(`-l${chromiumSysroot}/usr/lib/aarch64-linux-gnu`, `-l${chromiumSysroot}/lib/aarch64-linux-gnu`, `-l${vscodeSysroot}/usr/lib/aarch64-linux-gnu`, `-l${vscodeSysroot}/lib/aarch64-linux-gnu`); - break; - } - cmd.push(`-l${chromiumSysroot}/usr/lib`); - cmd.push(`-L${vscodeSysroot}/debian/libxkbfile1/DEBIAN/shlibs`); - cmd.push('-O', '-e', path_1.default.resolve(binaryPath)); - const dpkgShlibdepsResult = (0, child_process_1.spawnSync)('perl', cmd, { cwd: chromiumSysroot }); - if (dpkgShlibdepsResult.status !== 0) { - throw new Error(`dpkg-shlibdeps failed with exit code ${dpkgShlibdepsResult.status}. stderr:\n${dpkgShlibdepsResult.stderr} `); - } - const shlibsDependsPrefix = 'shlibs:Depends='; - const requiresList = dpkgShlibdepsResult.stdout.toString('utf-8').trimEnd().split('\n'); - let depsStr = ''; - for (const line of requiresList) { - if (line.startsWith(shlibsDependsPrefix)) { - depsStr = line.substring(shlibsDependsPrefix.length); - } - } - // Refs https://chromium-review.googlesource.com/c/chromium/src/+/3572926 - // Chromium depends on libgcc_s, is from the package libgcc1. However, in - // Bullseye, the package was renamed to libgcc-s1. To avoid adding a dep - // on the newer package, this hack skips the dep. This is safe because - // libgcc-s1 is a dependency of libc6. This hack can be removed once - // support for Debian Buster and Ubuntu Bionic are dropped. - // - // Remove kerberos native module related dependencies as the versions - // computed from sysroot will not satisfy the minimum supported distros - // Refs https://github.com/microsoft/vscode/issues/188881. - // TODO(deepak1556): remove this workaround in favor of computing the - // versions from build container for native modules. - const filteredDeps = depsStr.split(', ').filter(dependency => { - return !dependency.startsWith('libgcc-s1'); - }).sort(); - const requires = new Set(filteredDeps); - return requires; -} -//# sourceMappingURL=calculate-deps.js.map \ No newline at end of file diff --git a/build/linux/debian/calculate-deps.ts b/build/linux/debian/calculate-deps.ts index addc38696a8..98a96302e19 100644 --- a/build/linux/debian/calculate-deps.ts +++ b/build/linux/debian/calculate-deps.ts @@ -7,9 +7,9 @@ import { spawnSync } from 'child_process'; import { constants, statSync } from 'fs'; import { tmpdir } from 'os'; import path from 'path'; -import manifests from '../../../cgmanifest.json'; -import { additionalDeps } from './dep-lists'; -import { DebianArchString } from './types'; +import manifests from '../../../cgmanifest.json' with { type: 'json' }; +import { additionalDeps } from './dep-lists.ts'; +import type { DebianArchString } from './types.ts'; export function generatePackageDeps(files: string[], arch: DebianArchString, chromiumSysroot: string, vscodeSysroot: string): Set[] { const dependencies: Set[] = files.map(file => calculatePackageDeps(file, arch, chromiumSysroot, vscodeSysroot)); diff --git a/build/linux/debian/dep-lists.js b/build/linux/debian/dep-lists.js deleted file mode 100644 index 6282d354736..00000000000 --- a/build/linux/debian/dep-lists.js +++ /dev/null @@ -1,143 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.referenceGeneratedDepsByArch = exports.recommendedDeps = exports.additionalDeps = void 0; -// Based on https://source.chromium.org/chromium/chromium/src/+/main:chrome/installer/linux/debian/additional_deps -// Additional dependencies not in the dpkg-shlibdeps output. -exports.additionalDeps = [ - 'ca-certificates', // Make sure users have SSL certificates. - 'libgtk-3-0 (>= 3.9.10) | libgtk-4-1', - 'libnss3 (>= 3.26)', - 'libcurl3-gnutls | libcurl3-nss | libcurl4 | libcurl3', // For Breakpad crash reports. - 'xdg-utils (>= 1.0.2)', // OS integration -]; -// Based on https://source.chromium.org/chromium/chromium/src/+/main:chrome/installer/linux/debian/manual_recommends -// Dependencies that we can only recommend -// for now since some of the older distros don't support them. -exports.recommendedDeps = [ - 'libvulkan1' // Move to additionalDeps once support for Trusty and Jessie are dropped. -]; -exports.referenceGeneratedDepsByArch = { - 'amd64': [ - 'ca-certificates', - 'libasound2 (>= 1.0.17)', - 'libatk-bridge2.0-0 (>= 2.5.3)', - 'libatk1.0-0 (>= 2.11.90)', - 'libatspi2.0-0 (>= 2.9.90)', - 'libc6 (>= 2.14)', - 'libc6 (>= 2.16)', - 'libc6 (>= 2.17)', - 'libc6 (>= 2.2.5)', - 'libc6 (>= 2.25)', - 'libc6 (>= 2.28)', - 'libcairo2 (>= 1.6.0)', - 'libcurl3-gnutls | libcurl3-nss | libcurl4 | libcurl3', - 'libdbus-1-3 (>= 1.9.14)', - 'libexpat1 (>= 2.1~beta3)', - 'libgbm1 (>= 17.1.0~rc2)', - 'libglib2.0-0 (>= 2.39.4)', - 'libgtk-3-0 (>= 3.9.10)', - 'libgtk-3-0 (>= 3.9.10) | libgtk-4-1', - 'libnspr4 (>= 2:4.9-2~)', - 'libnss3 (>= 2:3.30)', - 'libnss3 (>= 3.26)', - 'libpango-1.0-0 (>= 1.14.0)', - 'libudev1 (>= 183)', - 'libx11-6', - 'libx11-6 (>= 2:1.4.99.1)', - 'libxcb1 (>= 1.9.2)', - 'libxcomposite1 (>= 1:0.4.4-1)', - 'libxdamage1 (>= 1:1.1)', - 'libxext6', - 'libxfixes3', - 'libxkbcommon0 (>= 0.5.0)', - 'libxkbfile1 (>= 1:1.1.0)', - 'libxrandr2', - 'xdg-utils (>= 1.0.2)' - ], - 'armhf': [ - 'ca-certificates', - 'libasound2 (>= 1.0.17)', - 'libatk-bridge2.0-0 (>= 2.5.3)', - 'libatk1.0-0 (>= 2.11.90)', - 'libatspi2.0-0 (>= 2.9.90)', - 'libc6 (>= 2.16)', - 'libc6 (>= 2.17)', - 'libc6 (>= 2.25)', - 'libc6 (>= 2.28)', - 'libc6 (>= 2.4)', - 'libc6 (>= 2.9)', - 'libcairo2 (>= 1.6.0)', - 'libcurl3-gnutls | libcurl3-nss | libcurl4 | libcurl3', - 'libdbus-1-3 (>= 1.9.14)', - 'libexpat1 (>= 2.1~beta3)', - 'libgbm1 (>= 17.1.0~rc2)', - 'libglib2.0-0 (>= 2.39.4)', - 'libgtk-3-0 (>= 3.9.10)', - 'libgtk-3-0 (>= 3.9.10) | libgtk-4-1', - 'libnspr4 (>= 2:4.9-2~)', - 'libnss3 (>= 2:3.30)', - 'libnss3 (>= 3.26)', - 'libpango-1.0-0 (>= 1.14.0)', - 'libstdc++6 (>= 4.1.1)', - 'libstdc++6 (>= 5)', - 'libstdc++6 (>= 5.2)', - 'libstdc++6 (>= 6)', - 'libstdc++6 (>= 9)', - 'libudev1 (>= 183)', - 'libx11-6', - 'libx11-6 (>= 2:1.4.99.1)', - 'libxcb1 (>= 1.9.2)', - 'libxcomposite1 (>= 1:0.4.4-1)', - 'libxdamage1 (>= 1:1.1)', - 'libxext6', - 'libxfixes3', - 'libxkbcommon0 (>= 0.5.0)', - 'libxkbfile1 (>= 1:1.1.0)', - 'libxrandr2', - 'xdg-utils (>= 1.0.2)' - ], - 'arm64': [ - 'ca-certificates', - 'libasound2 (>= 1.0.17)', - 'libatk-bridge2.0-0 (>= 2.5.3)', - 'libatk1.0-0 (>= 2.11.90)', - 'libatspi2.0-0 (>= 2.9.90)', - 'libc6 (>= 2.17)', - 'libc6 (>= 2.25)', - 'libc6 (>= 2.28)', - 'libcairo2 (>= 1.6.0)', - 'libcurl3-gnutls | libcurl3-nss | libcurl4 | libcurl3', - 'libdbus-1-3 (>= 1.9.14)', - 'libexpat1 (>= 2.1~beta3)', - 'libgbm1 (>= 17.1.0~rc2)', - 'libglib2.0-0 (>= 2.39.4)', - 'libgtk-3-0 (>= 3.9.10)', - 'libgtk-3-0 (>= 3.9.10) | libgtk-4-1', - 'libnspr4 (>= 2:4.9-2~)', - 'libnss3 (>= 2:3.30)', - 'libnss3 (>= 3.26)', - 'libpango-1.0-0 (>= 1.14.0)', - 'libstdc++6 (>= 4.1.1)', - 'libstdc++6 (>= 5)', - 'libstdc++6 (>= 5.2)', - 'libstdc++6 (>= 6)', - 'libstdc++6 (>= 9)', - 'libudev1 (>= 183)', - 'libx11-6', - 'libx11-6 (>= 2:1.4.99.1)', - 'libxcb1 (>= 1.9.2)', - 'libxcomposite1 (>= 1:0.4.4-1)', - 'libxdamage1 (>= 1:1.1)', - 'libxext6', - 'libxfixes3', - 'libxkbcommon0 (>= 0.5.0)', - 'libxkbfile1 (>= 1:1.1.0)', - 'libxrandr2', - 'xdg-utils (>= 1.0.2)' - ] -}; -//# sourceMappingURL=dep-lists.js.map \ No newline at end of file diff --git a/build/linux/debian/install-sysroot.js b/build/linux/debian/install-sysroot.js deleted file mode 100644 index 4a9a46e6bd6..00000000000 --- a/build/linux/debian/install-sysroot.js +++ /dev/null @@ -1,227 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getVSCodeSysroot = getVSCodeSysroot; -exports.getChromiumSysroot = getChromiumSysroot; -const child_process_1 = require("child_process"); -const os_1 = require("os"); -const fs_1 = __importDefault(require("fs")); -const https_1 = __importDefault(require("https")); -const path_1 = __importDefault(require("path")); -const crypto_1 = require("crypto"); -// Based on https://source.chromium.org/chromium/chromium/src/+/main:build/linux/sysroot_scripts/install-sysroot.py. -const URL_PREFIX = 'https://msftelectronbuild.z5.web.core.windows.net'; -const URL_PATH = 'sysroots/toolchain'; -const REPO_ROOT = path_1.default.dirname(path_1.default.dirname(path_1.default.dirname(__dirname))); -const ghApiHeaders = { - Accept: 'application/vnd.github.v3+json', - 'User-Agent': 'VSCode Build', -}; -if (process.env.GITHUB_TOKEN) { - ghApiHeaders.Authorization = 'Basic ' + Buffer.from(process.env.GITHUB_TOKEN).toString('base64'); -} -const ghDownloadHeaders = { - ...ghApiHeaders, - Accept: 'application/octet-stream', -}; -function getElectronVersion() { - const npmrc = fs_1.default.readFileSync(path_1.default.join(REPO_ROOT, '.npmrc'), 'utf8'); - const electronVersion = /^target="(.*)"$/m.exec(npmrc)[1]; - const msBuildId = /^ms_build_id="(.*)"$/m.exec(npmrc)[1]; - return { electronVersion, msBuildId }; -} -function getSha(filename) { - const hash = (0, crypto_1.createHash)('sha256'); - // Read file 1 MB at a time - const fd = fs_1.default.openSync(filename, 'r'); - const buffer = Buffer.alloc(1024 * 1024); - let position = 0; - let bytesRead = 0; - while ((bytesRead = fs_1.default.readSync(fd, buffer, 0, buffer.length, position)) === buffer.length) { - hash.update(buffer); - position += bytesRead; - } - hash.update(buffer.slice(0, bytesRead)); - return hash.digest('hex'); -} -function getVSCodeSysrootChecksum(expectedName) { - const checksums = fs_1.default.readFileSync(path_1.default.join(REPO_ROOT, 'build', 'checksums', 'vscode-sysroot.txt'), 'utf8'); - for (const line of checksums.split('\n')) { - const [checksum, name] = line.split(/\s+/); - if (name === expectedName) { - return checksum; - } - } - return undefined; -} -/* - * Do not use the fetch implementation from build/lib/fetch as it relies on vinyl streams - * and vinyl-fs breaks the symlinks in the compiler toolchain sysroot. We use the native - * tar implementation for that reason. - */ -async function fetchUrl(options, retries = 10, retryDelay = 1000) { - try { - const controller = new AbortController(); - const timeout = setTimeout(() => controller.abort(), 30 * 1000); - const version = '20250407-330404'; - try { - const response = await fetch(`https://api.github.com/repos/Microsoft/vscode-linux-build-agent/releases/tags/v${version}`, { - headers: ghApiHeaders, - signal: controller.signal - }); - if (response.ok && (response.status >= 200 && response.status < 300)) { - console.log(`Fetch completed: Status ${response.status}.`); - const contents = Buffer.from(await response.arrayBuffer()); - const asset = JSON.parse(contents.toString()).assets.find((a) => a.name === options.assetName); - if (!asset) { - throw new Error(`Could not find asset in release of Microsoft/vscode-linux-build-agent @ ${version}`); - } - console.log(`Found asset ${options.assetName} @ ${asset.url}.`); - const assetResponse = await fetch(asset.url, { - headers: ghDownloadHeaders - }); - if (assetResponse.ok && (assetResponse.status >= 200 && assetResponse.status < 300)) { - const assetContents = Buffer.from(await assetResponse.arrayBuffer()); - console.log(`Fetched response body buffer: ${assetContents.byteLength} bytes`); - if (options.checksumSha256) { - const actualSHA256Checksum = (0, crypto_1.createHash)('sha256').update(assetContents).digest('hex'); - if (actualSHA256Checksum !== options.checksumSha256) { - throw new Error(`Checksum mismatch for ${asset.url} (expected ${options.checksumSha256}, actual ${actualSHA256Checksum}))`); - } - } - console.log(`Verified SHA256 checksums match for ${asset.url}`); - const tarCommand = `tar -xz -C ${options.dest}`; - (0, child_process_1.execSync)(tarCommand, { input: assetContents }); - console.log(`Fetch complete!`); - return; - } - throw new Error(`Request ${asset.url} failed with status code: ${assetResponse.status}`); - } - throw new Error(`Request https://api.github.com failed with status code: ${response.status}`); - } - finally { - clearTimeout(timeout); - } - } - catch (e) { - if (retries > 0) { - console.log(`Fetching failed: ${e}`); - await new Promise(resolve => setTimeout(resolve, retryDelay)); - return fetchUrl(options, retries - 1, retryDelay); - } - throw e; - } -} -async function getVSCodeSysroot(arch, isMusl = false) { - let expectedName; - let triple; - const prefix = process.env['VSCODE_SYSROOT_PREFIX'] ?? '-glibc-2.28-gcc-10.5.0'; - switch (arch) { - case 'amd64': - expectedName = `x86_64-linux-gnu${prefix}.tar.gz`; - triple = 'x86_64-linux-gnu'; - break; - case 'arm64': - if (isMusl) { - expectedName = 'aarch64-linux-musl-gcc-10.3.0.tar.gz'; - triple = 'aarch64-linux-musl'; - } - else { - expectedName = `aarch64-linux-gnu${prefix}.tar.gz`; - triple = 'aarch64-linux-gnu'; - } - break; - case 'armhf': - expectedName = `arm-rpi-linux-gnueabihf${prefix}.tar.gz`; - triple = 'arm-rpi-linux-gnueabihf'; - break; - } - console.log(`Fetching ${expectedName} for ${triple}`); - const checksumSha256 = getVSCodeSysrootChecksum(expectedName); - if (!checksumSha256) { - throw new Error(`Could not find checksum for ${expectedName}`); - } - const sysroot = process.env['VSCODE_SYSROOT_DIR'] ?? path_1.default.join((0, os_1.tmpdir)(), `vscode-${arch}-sysroot`); - const stamp = path_1.default.join(sysroot, '.stamp'); - let result = `${sysroot}/${triple}/${triple}/sysroot`; - if (isMusl) { - result = `${sysroot}/output/${triple}`; - } - if (fs_1.default.existsSync(stamp) && fs_1.default.readFileSync(stamp).toString() === expectedName) { - return result; - } - console.log(`Installing ${arch} root image: ${sysroot}`); - fs_1.default.rmSync(sysroot, { recursive: true, force: true }); - fs_1.default.mkdirSync(sysroot, { recursive: true }); - await fetchUrl({ - checksumSha256, - assetName: expectedName, - dest: sysroot - }); - fs_1.default.writeFileSync(stamp, expectedName); - return result; -} -async function getChromiumSysroot(arch) { - const sysrootJSONUrl = `https://raw.githubusercontent.com/electron/electron/v${getElectronVersion().electronVersion}/script/sysroots.json`; - const sysrootDictLocation = `${(0, os_1.tmpdir)()}/sysroots.json`; - const result = (0, child_process_1.spawnSync)('curl', [sysrootJSONUrl, '-o', sysrootDictLocation]); - if (result.status !== 0) { - throw new Error('Cannot retrieve sysroots.json. Stderr:\n' + result.stderr); - } - const sysrootInfo = require(sysrootDictLocation); - const sysrootArch = `bullseye_${arch}`; - const sysrootDict = sysrootInfo[sysrootArch]; - const tarballFilename = sysrootDict['Tarball']; - const tarballSha = sysrootDict['Sha256Sum']; - const sysroot = path_1.default.join((0, os_1.tmpdir)(), sysrootDict['SysrootDir']); - const url = [URL_PREFIX, URL_PATH, tarballSha].join('/'); - const stamp = path_1.default.join(sysroot, '.stamp'); - if (fs_1.default.existsSync(stamp) && fs_1.default.readFileSync(stamp).toString() === url) { - return sysroot; - } - console.log(`Installing Debian ${arch} root image: ${sysroot}`); - fs_1.default.rmSync(sysroot, { recursive: true, force: true }); - fs_1.default.mkdirSync(sysroot); - const tarball = path_1.default.join(sysroot, tarballFilename); - console.log(`Downloading ${url}`); - let downloadSuccess = false; - for (let i = 0; i < 3 && !downloadSuccess; i++) { - fs_1.default.writeFileSync(tarball, ''); - await new Promise((c) => { - https_1.default.get(url, (res) => { - res.on('data', (chunk) => { - fs_1.default.appendFileSync(tarball, chunk); - }); - res.on('end', () => { - downloadSuccess = true; - c(); - }); - }).on('error', (err) => { - console.error('Encountered an error during the download attempt: ' + err.message); - c(); - }); - }); - } - if (!downloadSuccess) { - fs_1.default.rmSync(tarball); - throw new Error('Failed to download ' + url); - } - const sha = getSha(tarball); - if (sha !== tarballSha) { - throw new Error(`Tarball sha1sum is wrong. Expected ${tarballSha}, actual ${sha}`); - } - const proc = (0, child_process_1.spawnSync)('tar', ['xf', tarball, '-C', sysroot]); - if (proc.status) { - throw new Error('Tarball extraction failed with code ' + proc.status); - } - fs_1.default.rmSync(tarball); - fs_1.default.writeFileSync(stamp, url); - return sysroot; -} -//# sourceMappingURL=install-sysroot.js.map \ No newline at end of file diff --git a/build/linux/debian/install-sysroot.ts b/build/linux/debian/install-sysroot.ts index 4b7ebd1b846..2cab657c1b7 100644 --- a/build/linux/debian/install-sysroot.ts +++ b/build/linux/debian/install-sysroot.ts @@ -9,12 +9,12 @@ import fs from 'fs'; import https from 'https'; import path from 'path'; import { createHash } from 'crypto'; -import { DebianArchString } from './types'; +import type { DebianArchString } from './types.ts'; // Based on https://source.chromium.org/chromium/chromium/src/+/main:build/linux/sysroot_scripts/install-sysroot.py. const URL_PREFIX = 'https://msftelectronbuild.z5.web.core.windows.net'; const URL_PATH = 'sysroots/toolchain'; -const REPO_ROOT = path.dirname(path.dirname(path.dirname(__dirname))); +const REPO_ROOT = path.dirname(path.dirname(path.dirname(import.meta.dirname))); const ghApiHeaders: Record = { Accept: 'application/vnd.github.v3+json', @@ -188,7 +188,7 @@ export async function getChromiumSysroot(arch: DebianArchString): Promise { - return !bundledDeps.some(bundledDep => dependency.startsWith(bundledDep)); - }).sort(); - const referenceGeneratedDeps = packageType === 'deb' ? - dep_lists_1.referenceGeneratedDepsByArch[arch] : - dep_lists_2.referenceGeneratedDepsByArch[arch]; - if (JSON.stringify(sortedDependencies) !== JSON.stringify(referenceGeneratedDeps)) { - const failMessage = 'The dependencies list has changed.' - + '\nOld:\n' + referenceGeneratedDeps.join('\n') - + '\nNew:\n' + sortedDependencies.join('\n'); - if (FAIL_BUILD_FOR_NEW_DEPENDENCIES) { - throw new Error(failMessage); - } - else { - console.warn(failMessage); - } - } - return sortedDependencies; -} -// Based on https://source.chromium.org/chromium/chromium/src/+/main:chrome/installer/linux/rpm/merge_package_deps.py. -function mergePackageDeps(inputDeps) { - const requires = new Set(); - for (const depSet of inputDeps) { - for (const dep of depSet) { - const trimmedDependency = dep.trim(); - if (trimmedDependency.length && !trimmedDependency.startsWith('#')) { - requires.add(trimmedDependency); - } - } - } - return requires; -} -//# sourceMappingURL=dependencies-generator.js.map \ No newline at end of file diff --git a/build/linux/dependencies-generator.ts b/build/linux/dependencies-generator.ts index abb01b9e49d..8f307b21942 100644 --- a/build/linux/dependencies-generator.ts +++ b/build/linux/dependencies-generator.ts @@ -2,19 +2,16 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ - -'use strict'; - import { spawnSync } from 'child_process'; import path from 'path'; -import { getChromiumSysroot, getVSCodeSysroot } from './debian/install-sysroot'; -import { generatePackageDeps as generatePackageDepsDebian } from './debian/calculate-deps'; -import { generatePackageDeps as generatePackageDepsRpm } from './rpm/calculate-deps'; -import { referenceGeneratedDepsByArch as debianGeneratedDeps } from './debian/dep-lists'; -import { referenceGeneratedDepsByArch as rpmGeneratedDeps } from './rpm/dep-lists'; -import { DebianArchString, isDebianArchString } from './debian/types'; -import { isRpmArchString, RpmArchString } from './rpm/types'; -import product = require('../../product.json'); +import { getChromiumSysroot, getVSCodeSysroot } from './debian/install-sysroot.ts'; +import { generatePackageDeps as generatePackageDepsDebian } from './debian/calculate-deps.ts'; +import { generatePackageDeps as generatePackageDepsRpm } from './rpm/calculate-deps.ts'; +import { referenceGeneratedDepsByArch as debianGeneratedDeps } from './debian/dep-lists.ts'; +import { referenceGeneratedDepsByArch as rpmGeneratedDeps } from './rpm/dep-lists.ts'; +import { type DebianArchString, isDebianArchString } from './debian/types.ts'; +import { isRpmArchString, type RpmArchString } from './rpm/types.ts'; +import product from '../../product.json' with { type: 'json' }; // A flag that can easily be toggled. // Make sure to compile the build directory after toggling the value. diff --git a/build/linux/libcxx-fetcher.js b/build/linux/libcxx-fetcher.js deleted file mode 100644 index d6c998e5aea..00000000000 --- a/build/linux/libcxx-fetcher.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.downloadLibcxxHeaders = downloadLibcxxHeaders; -exports.downloadLibcxxObjects = downloadLibcxxObjects; -// Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available. -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const debug_1 = __importDefault(require("debug")); -const extract_zip_1 = __importDefault(require("extract-zip")); -const get_1 = require("@electron/get"); -const root = path_1.default.dirname(path_1.default.dirname(__dirname)); -const d = (0, debug_1.default)('libcxx-fetcher'); -async function downloadLibcxxHeaders(outDir, electronVersion, lib_name) { - if (await fs_1.default.existsSync(path_1.default.resolve(outDir, 'include'))) { - return; - } - if (!await fs_1.default.existsSync(outDir)) { - await fs_1.default.mkdirSync(outDir, { recursive: true }); - } - d(`downloading ${lib_name}_headers`); - const headers = await (0, get_1.downloadArtifact)({ - version: electronVersion, - isGeneric: true, - artifactName: `${lib_name}_headers.zip`, - }); - d(`unpacking ${lib_name}_headers from ${headers}`); - await (0, extract_zip_1.default)(headers, { dir: outDir }); -} -async function downloadLibcxxObjects(outDir, electronVersion, targetArch = 'x64') { - if (await fs_1.default.existsSync(path_1.default.resolve(outDir, 'libc++.a'))) { - return; - } - if (!await fs_1.default.existsSync(outDir)) { - await fs_1.default.mkdirSync(outDir, { recursive: true }); - } - d(`downloading libcxx-objects-linux-${targetArch}`); - const objects = await (0, get_1.downloadArtifact)({ - version: electronVersion, - platform: 'linux', - artifactName: 'libcxx-objects', - arch: targetArch, - }); - d(`unpacking libcxx-objects from ${objects}`); - await (0, extract_zip_1.default)(objects, { dir: outDir }); -} -async function main() { - const libcxxObjectsDirPath = process.env['VSCODE_LIBCXX_OBJECTS_DIR']; - const libcxxHeadersDownloadDir = process.env['VSCODE_LIBCXX_HEADERS_DIR']; - const libcxxabiHeadersDownloadDir = process.env['VSCODE_LIBCXXABI_HEADERS_DIR']; - const arch = process.env['VSCODE_ARCH']; - const packageJSON = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'package.json'), 'utf8')); - const electronVersion = packageJSON.devDependencies.electron; - if (!libcxxObjectsDirPath || !libcxxHeadersDownloadDir || !libcxxabiHeadersDownloadDir) { - throw new Error('Required build env not set'); - } - await downloadLibcxxObjects(libcxxObjectsDirPath, electronVersion, arch); - await downloadLibcxxHeaders(libcxxHeadersDownloadDir, electronVersion, 'libcxx'); - await downloadLibcxxHeaders(libcxxabiHeadersDownloadDir, electronVersion, 'libcxxabi'); -} -if (require.main === module) { - main().catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=libcxx-fetcher.js.map \ No newline at end of file diff --git a/build/linux/libcxx-fetcher.ts b/build/linux/libcxx-fetcher.ts index 6bdbd8a4f30..981fbd3392e 100644 --- a/build/linux/libcxx-fetcher.ts +++ b/build/linux/libcxx-fetcher.ts @@ -11,7 +11,7 @@ import debug from 'debug'; import extract from 'extract-zip'; import { downloadArtifact } from '@electron/get'; -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); const d = debug('libcxx-fetcher'); @@ -71,7 +71,7 @@ async function main(): Promise { await downloadLibcxxHeaders(libcxxabiHeadersDownloadDir, electronVersion, 'libcxxabi'); } -if (require.main === module) { +if (import.meta.main) { main().catch(err => { console.error(err); process.exit(1); diff --git a/build/linux/rpm/calculate-deps.js b/build/linux/rpm/calculate-deps.js deleted file mode 100644 index b19e26f1854..00000000000 --- a/build/linux/rpm/calculate-deps.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.generatePackageDeps = generatePackageDeps; -const child_process_1 = require("child_process"); -const fs_1 = require("fs"); -const dep_lists_1 = require("./dep-lists"); -function generatePackageDeps(files) { - const dependencies = files.map(file => calculatePackageDeps(file)); - const additionalDepsSet = new Set(dep_lists_1.additionalDeps); - dependencies.push(additionalDepsSet); - return dependencies; -} -// Based on https://source.chromium.org/chromium/chromium/src/+/main:chrome/installer/linux/rpm/calculate_package_deps.py. -function calculatePackageDeps(binaryPath) { - try { - if (!((0, fs_1.statSync)(binaryPath).mode & fs_1.constants.S_IXUSR)) { - throw new Error(`Binary ${binaryPath} needs to have an executable bit set.`); - } - } - catch (e) { - // The package might not exist. Don't re-throw the error here. - console.error('Tried to stat ' + binaryPath + ' but failed.'); - } - const findRequiresResult = (0, child_process_1.spawnSync)('/usr/lib/rpm/find-requires', { input: binaryPath + '\n' }); - if (findRequiresResult.status !== 0) { - throw new Error(`find-requires failed with exit code ${findRequiresResult.status}.\nstderr: ${findRequiresResult.stderr}`); - } - const requires = new Set(findRequiresResult.stdout.toString('utf-8').trimEnd().split('\n')); - return requires; -} -//# sourceMappingURL=calculate-deps.js.map \ No newline at end of file diff --git a/build/linux/rpm/calculate-deps.ts b/build/linux/rpm/calculate-deps.ts index 4be2200c018..0a1f0107594 100644 --- a/build/linux/rpm/calculate-deps.ts +++ b/build/linux/rpm/calculate-deps.ts @@ -5,7 +5,7 @@ import { spawnSync } from 'child_process'; import { constants, statSync } from 'fs'; -import { additionalDeps } from './dep-lists'; +import { additionalDeps } from './dep-lists.ts'; export function generatePackageDeps(files: string[]): Set[] { const dependencies: Set[] = files.map(file => calculatePackageDeps(file)); diff --git a/build/linux/rpm/dep-lists.js b/build/linux/rpm/dep-lists.js deleted file mode 100644 index 1bbef8a3261..00000000000 --- a/build/linux/rpm/dep-lists.js +++ /dev/null @@ -1,320 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.referenceGeneratedDepsByArch = exports.additionalDeps = void 0; -// Based on https://source.chromium.org/chromium/chromium/src/+/main:chrome/installer/linux/rpm/additional_deps -// Additional dependencies not in the rpm find-requires output. -exports.additionalDeps = [ - 'ca-certificates', // Make sure users have SSL certificates. - 'libgtk-3.so.0()(64bit)', - 'libnss3.so(NSS_3.22)(64bit)', - 'libssl3.so(NSS_3.28)(64bit)', - 'rpmlib(FileDigests) <= 4.6.0-1', - 'libvulkan.so.1()(64bit)', - 'libcurl.so.4()(64bit)', - 'xdg-utils' // OS integration -]; -exports.referenceGeneratedDepsByArch = { - 'x86_64': [ - 'ca-certificates', - 'ld-linux-x86-64.so.2()(64bit)', - 'ld-linux-x86-64.so.2(GLIBC_2.2.5)(64bit)', - 'ld-linux-x86-64.so.2(GLIBC_2.3)(64bit)', - 'libX11.so.6()(64bit)', - 'libXcomposite.so.1()(64bit)', - 'libXdamage.so.1()(64bit)', - 'libXext.so.6()(64bit)', - 'libXfixes.so.3()(64bit)', - 'libXrandr.so.2()(64bit)', - 'libasound.so.2()(64bit)', - 'libasound.so.2(ALSA_0.9)(64bit)', - 'libasound.so.2(ALSA_0.9.0rc4)(64bit)', - 'libatk-1.0.so.0()(64bit)', - 'libatk-bridge-2.0.so.0()(64bit)', - 'libatspi.so.0()(64bit)', - 'libc.so.6()(64bit)', - 'libc.so.6(GLIBC_2.10)(64bit)', - 'libc.so.6(GLIBC_2.11)(64bit)', - 'libc.so.6(GLIBC_2.12)(64bit)', - 'libc.so.6(GLIBC_2.14)(64bit)', - 'libc.so.6(GLIBC_2.15)(64bit)', - 'libc.so.6(GLIBC_2.16)(64bit)', - 'libc.so.6(GLIBC_2.17)(64bit)', - 'libc.so.6(GLIBC_2.18)(64bit)', - 'libc.so.6(GLIBC_2.2.5)(64bit)', - 'libc.so.6(GLIBC_2.25)(64bit)', - 'libc.so.6(GLIBC_2.27)(64bit)', - 'libc.so.6(GLIBC_2.28)(64bit)', - 'libc.so.6(GLIBC_2.3)(64bit)', - 'libc.so.6(GLIBC_2.3.2)(64bit)', - 'libc.so.6(GLIBC_2.3.3)(64bit)', - 'libc.so.6(GLIBC_2.3.4)(64bit)', - 'libc.so.6(GLIBC_2.4)(64bit)', - 'libc.so.6(GLIBC_2.6)(64bit)', - 'libc.so.6(GLIBC_2.7)(64bit)', - 'libc.so.6(GLIBC_2.8)(64bit)', - 'libc.so.6(GLIBC_2.9)(64bit)', - 'libcairo.so.2()(64bit)', - 'libcurl.so.4()(64bit)', - 'libdbus-1.so.3()(64bit)', - 'libdbus-1.so.3(LIBDBUS_1_3)(64bit)', - 'libdl.so.2()(64bit)', - 'libdl.so.2(GLIBC_2.2.5)(64bit)', - 'libexpat.so.1()(64bit)', - 'libgbm.so.1()(64bit)', - 'libgcc_s.so.1()(64bit)', - 'libgcc_s.so.1(GCC_3.0)(64bit)', - 'libgcc_s.so.1(GCC_3.3)(64bit)', - 'libgcc_s.so.1(GCC_4.0.0)(64bit)', - 'libgcc_s.so.1(GCC_4.2.0)(64bit)', - 'libgio-2.0.so.0()(64bit)', - 'libglib-2.0.so.0()(64bit)', - 'libgobject-2.0.so.0()(64bit)', - 'libgtk-3.so.0()(64bit)', - 'libm.so.6()(64bit)', - 'libm.so.6(GLIBC_2.2.5)(64bit)', - 'libnspr4.so()(64bit)', - 'libnss3.so()(64bit)', - 'libnss3.so(NSS_3.11)(64bit)', - 'libnss3.so(NSS_3.12)(64bit)', - 'libnss3.so(NSS_3.12.1)(64bit)', - 'libnss3.so(NSS_3.2)(64bit)', - 'libnss3.so(NSS_3.22)(64bit)', - 'libnss3.so(NSS_3.3)(64bit)', - 'libnss3.so(NSS_3.30)(64bit)', - 'libnss3.so(NSS_3.4)(64bit)', - 'libnss3.so(NSS_3.5)(64bit)', - 'libnss3.so(NSS_3.6)(64bit)', - 'libnss3.so(NSS_3.9.2)(64bit)', - 'libnssutil3.so()(64bit)', - 'libnssutil3.so(NSSUTIL_3.12.3)(64bit)', - 'libpango-1.0.so.0()(64bit)', - 'libpthread.so.0()(64bit)', - 'libpthread.so.0(GLIBC_2.12)(64bit)', - 'libpthread.so.0(GLIBC_2.2.5)(64bit)', - 'libpthread.so.0(GLIBC_2.3.2)(64bit)', - 'libpthread.so.0(GLIBC_2.3.3)(64bit)', - 'libpthread.so.0(GLIBC_2.3.4)(64bit)', - 'librt.so.1()(64bit)', - 'librt.so.1(GLIBC_2.2.5)(64bit)', - 'libsmime3.so()(64bit)', - 'libsmime3.so(NSS_3.10)(64bit)', - 'libsmime3.so(NSS_3.2)(64bit)', - 'libssl3.so(NSS_3.28)(64bit)', - 'libudev.so.1()(64bit)', - 'libudev.so.1(LIBUDEV_183)(64bit)', - 'libutil.so.1()(64bit)', - 'libutil.so.1(GLIBC_2.2.5)(64bit)', - 'libxcb.so.1()(64bit)', - 'libxkbcommon.so.0()(64bit)', - 'libxkbcommon.so.0(V_0.5.0)(64bit)', - 'libxkbfile.so.1()(64bit)', - 'rpmlib(FileDigests) <= 4.6.0-1', - 'rtld(GNU_HASH)', - 'xdg-utils' - ], - 'armv7hl': [ - 'ca-certificates', - 'ld-linux-armhf.so.3', - 'ld-linux-armhf.so.3(GLIBC_2.4)', - 'libX11.so.6', - 'libXcomposite.so.1', - 'libXdamage.so.1', - 'libXext.so.6', - 'libXfixes.so.3', - 'libXrandr.so.2', - 'libasound.so.2', - 'libasound.so.2(ALSA_0.9)', - 'libasound.so.2(ALSA_0.9.0rc4)', - 'libatk-1.0.so.0', - 'libatk-bridge-2.0.so.0', - 'libatspi.so.0', - 'libc.so.6', - 'libc.so.6(GLIBC_2.10)', - 'libc.so.6(GLIBC_2.11)', - 'libc.so.6(GLIBC_2.12)', - 'libc.so.6(GLIBC_2.14)', - 'libc.so.6(GLIBC_2.15)', - 'libc.so.6(GLIBC_2.16)', - 'libc.so.6(GLIBC_2.17)', - 'libc.so.6(GLIBC_2.18)', - 'libc.so.6(GLIBC_2.25)', - 'libc.so.6(GLIBC_2.27)', - 'libc.so.6(GLIBC_2.28)', - 'libc.so.6(GLIBC_2.4)', - 'libc.so.6(GLIBC_2.6)', - 'libc.so.6(GLIBC_2.7)', - 'libc.so.6(GLIBC_2.8)', - 'libc.so.6(GLIBC_2.9)', - 'libcairo.so.2', - 'libcurl.so.4()(64bit)', - 'libdbus-1.so.3', - 'libdbus-1.so.3(LIBDBUS_1_3)', - 'libdl.so.2', - 'libdl.so.2(GLIBC_2.4)', - 'libexpat.so.1', - 'libgbm.so.1', - 'libgcc_s.so.1', - 'libgcc_s.so.1(GCC_3.0)', - 'libgcc_s.so.1(GCC_3.5)', - 'libgcc_s.so.1(GCC_4.3.0)', - 'libgio-2.0.so.0', - 'libglib-2.0.so.0', - 'libgobject-2.0.so.0', - 'libgtk-3.so.0', - 'libgtk-3.so.0()(64bit)', - 'libm.so.6', - 'libm.so.6(GLIBC_2.4)', - 'libnspr4.so', - 'libnss3.so', - 'libnss3.so(NSS_3.11)', - 'libnss3.so(NSS_3.12)', - 'libnss3.so(NSS_3.12.1)', - 'libnss3.so(NSS_3.2)', - 'libnss3.so(NSS_3.22)', - 'libnss3.so(NSS_3.22)(64bit)', - 'libnss3.so(NSS_3.3)', - 'libnss3.so(NSS_3.30)', - 'libnss3.so(NSS_3.4)', - 'libnss3.so(NSS_3.5)', - 'libnss3.so(NSS_3.6)', - 'libnss3.so(NSS_3.9.2)', - 'libnssutil3.so', - 'libnssutil3.so(NSSUTIL_3.12.3)', - 'libpango-1.0.so.0', - 'libpthread.so.0', - 'libpthread.so.0(GLIBC_2.12)', - 'libpthread.so.0(GLIBC_2.4)', - 'librt.so.1', - 'librt.so.1(GLIBC_2.4)', - 'libsmime3.so', - 'libsmime3.so(NSS_3.10)', - 'libsmime3.so(NSS_3.2)', - 'libssl3.so(NSS_3.28)(64bit)', - 'libstdc++.so.6', - 'libstdc++.so.6(CXXABI_1.3)', - 'libstdc++.so.6(CXXABI_1.3.5)', - 'libstdc++.so.6(CXXABI_1.3.8)', - 'libstdc++.so.6(CXXABI_1.3.9)', - 'libstdc++.so.6(CXXABI_ARM_1.3.3)', - 'libstdc++.so.6(GLIBCXX_3.4)', - 'libstdc++.so.6(GLIBCXX_3.4.11)', - 'libstdc++.so.6(GLIBCXX_3.4.14)', - 'libstdc++.so.6(GLIBCXX_3.4.15)', - 'libstdc++.so.6(GLIBCXX_3.4.18)', - 'libstdc++.so.6(GLIBCXX_3.4.19)', - 'libstdc++.so.6(GLIBCXX_3.4.20)', - 'libstdc++.so.6(GLIBCXX_3.4.21)', - 'libstdc++.so.6(GLIBCXX_3.4.22)', - 'libstdc++.so.6(GLIBCXX_3.4.26)', - 'libstdc++.so.6(GLIBCXX_3.4.5)', - 'libstdc++.so.6(GLIBCXX_3.4.9)', - 'libudev.so.1', - 'libudev.so.1(LIBUDEV_183)', - 'libutil.so.1', - 'libutil.so.1(GLIBC_2.4)', - 'libxcb.so.1', - 'libxkbcommon.so.0', - 'libxkbcommon.so.0(V_0.5.0)', - 'libxkbfile.so.1', - 'rpmlib(FileDigests) <= 4.6.0-1', - 'rtld(GNU_HASH)', - 'xdg-utils' - ], - 'aarch64': [ - 'ca-certificates', - 'ld-linux-aarch64.so.1()(64bit)', - 'ld-linux-aarch64.so.1(GLIBC_2.17)(64bit)', - 'libX11.so.6()(64bit)', - 'libXcomposite.so.1()(64bit)', - 'libXdamage.so.1()(64bit)', - 'libXext.so.6()(64bit)', - 'libXfixes.so.3()(64bit)', - 'libXrandr.so.2()(64bit)', - 'libasound.so.2()(64bit)', - 'libasound.so.2(ALSA_0.9)(64bit)', - 'libasound.so.2(ALSA_0.9.0rc4)(64bit)', - 'libatk-1.0.so.0()(64bit)', - 'libatk-bridge-2.0.so.0()(64bit)', - 'libatspi.so.0()(64bit)', - 'libc.so.6()(64bit)', - 'libc.so.6(GLIBC_2.17)(64bit)', - 'libc.so.6(GLIBC_2.18)(64bit)', - 'libc.so.6(GLIBC_2.25)(64bit)', - 'libc.so.6(GLIBC_2.27)(64bit)', - 'libc.so.6(GLIBC_2.28)(64bit)', - 'libcairo.so.2()(64bit)', - 'libcurl.so.4()(64bit)', - 'libdbus-1.so.3()(64bit)', - 'libdbus-1.so.3(LIBDBUS_1_3)(64bit)', - 'libdl.so.2()(64bit)', - 'libdl.so.2(GLIBC_2.17)(64bit)', - 'libexpat.so.1()(64bit)', - 'libgbm.so.1()(64bit)', - 'libgcc_s.so.1()(64bit)', - 'libgcc_s.so.1(GCC_3.0)(64bit)', - 'libgcc_s.so.1(GCC_3.3)(64bit)', - 'libgcc_s.so.1(GCC_4.2.0)(64bit)', - 'libgcc_s.so.1(GCC_4.5.0)(64bit)', - 'libgio-2.0.so.0()(64bit)', - 'libglib-2.0.so.0()(64bit)', - 'libgobject-2.0.so.0()(64bit)', - 'libgtk-3.so.0()(64bit)', - 'libm.so.6()(64bit)', - 'libm.so.6(GLIBC_2.17)(64bit)', - 'libnspr4.so()(64bit)', - 'libnss3.so()(64bit)', - 'libnss3.so(NSS_3.11)(64bit)', - 'libnss3.so(NSS_3.12)(64bit)', - 'libnss3.so(NSS_3.12.1)(64bit)', - 'libnss3.so(NSS_3.2)(64bit)', - 'libnss3.so(NSS_3.22)(64bit)', - 'libnss3.so(NSS_3.3)(64bit)', - 'libnss3.so(NSS_3.30)(64bit)', - 'libnss3.so(NSS_3.4)(64bit)', - 'libnss3.so(NSS_3.5)(64bit)', - 'libnss3.so(NSS_3.6)(64bit)', - 'libnss3.so(NSS_3.9.2)(64bit)', - 'libnssutil3.so()(64bit)', - 'libnssutil3.so(NSSUTIL_3.12.3)(64bit)', - 'libpango-1.0.so.0()(64bit)', - 'libpthread.so.0()(64bit)', - 'libpthread.so.0(GLIBC_2.17)(64bit)', - 'libsmime3.so()(64bit)', - 'libsmime3.so(NSS_3.10)(64bit)', - 'libsmime3.so(NSS_3.2)(64bit)', - 'libssl3.so(NSS_3.28)(64bit)', - 'libstdc++.so.6()(64bit)', - 'libstdc++.so.6(CXXABI_1.3)(64bit)', - 'libstdc++.so.6(CXXABI_1.3.5)(64bit)', - 'libstdc++.so.6(CXXABI_1.3.8)(64bit)', - 'libstdc++.so.6(CXXABI_1.3.9)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.11)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.14)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.15)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.18)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.19)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.20)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.21)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.22)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.26)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.5)(64bit)', - 'libstdc++.so.6(GLIBCXX_3.4.9)(64bit)', - 'libudev.so.1()(64bit)', - 'libudev.so.1(LIBUDEV_183)(64bit)', - 'libutil.so.1()(64bit)', - 'libutil.so.1(GLIBC_2.17)(64bit)', - 'libxcb.so.1()(64bit)', - 'libxkbcommon.so.0()(64bit)', - 'libxkbcommon.so.0(V_0.5.0)(64bit)', - 'libxkbfile.so.1()(64bit)', - 'rpmlib(FileDigests) <= 4.6.0-1', - 'rtld(GNU_HASH)', - 'xdg-utils' - ] -}; -//# sourceMappingURL=dep-lists.js.map \ No newline at end of file diff --git a/build/npm/dirs.js b/build/npm/dirs.js index 935d8a8a529..46666c12248 100644 --- a/build/npm/dirs.js +++ b/build/npm/dirs.js @@ -3,13 +3,13 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -const fs = require('fs'); +import { existsSync } from 'fs'; // Complete list of directories where npm should be executed to install node modules -const dirs = [ +export const dirs = [ '', 'build', - 'build/monaco-editor-playground', + 'build/vite', 'extensions', 'extensions/configuration-editing', 'extensions/css-language-features', @@ -60,10 +60,8 @@ const dirs = [ '.vscode/extensions/vscode-selfhost-test-provider', ]; -if (fs.existsSync(`${__dirname}/../../.build/distro/npm`)) { +if (existsSync(`${import.meta.dirname}/../../.build/distro/npm`)) { dirs.push('.build/distro/npm'); dirs.push('.build/distro/npm/remote'); dirs.push('.build/distro/npm/remote/web'); } - -exports.dirs = dirs; diff --git a/build/npm/postinstall.js b/build/npm/postinstall.js index fa8da7d08c6..9bfdf17a391 100644 --- a/build/npm/postinstall.js +++ b/build/npm/postinstall.js @@ -3,13 +3,14 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -const fs = require('fs'); -const path = require('path'); -const os = require('os'); -const cp = require('child_process'); -const { dirs } = require('./dirs'); +import * as fs from 'fs'; +import path from 'path'; +import * as os from 'os'; +import * as child_process from 'child_process'; +import { dirs } from './dirs.js'; + const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm'; -const root = path.dirname(path.dirname(__dirname)); +const root = path.dirname(path.dirname(import.meta.dirname)); function log(dir, message) { if (process.stdout.isTTY) { @@ -22,7 +23,7 @@ function log(dir, message) { function run(command, args, opts) { log(opts.cwd || '.', '$ ' + command + ' ' + args.join(' ')); - const result = cp.spawnSync(command, args, opts); + const result = child_process.spawnSync(command, args, opts); if (result.error) { console.error(`ERR Failed to spawn process: ${result.error}`); @@ -89,8 +90,8 @@ function setNpmrcConfig(dir, env) { // Use our bundled node-gyp version env['npm_config_node_gyp'] = process.platform === 'win32' - ? path.join(__dirname, 'gyp', 'node_modules', '.bin', 'node-gyp.cmd') - : path.join(__dirname, 'gyp', 'node_modules', '.bin', 'node-gyp'); + ? path.join(import.meta.dirname, 'gyp', 'node_modules', '.bin', 'node-gyp.cmd') + : path.join(import.meta.dirname, 'gyp', 'node_modules', '.bin', 'node-gyp'); // Force node-gyp to use process.config on macOS // which defines clang variable as expected. Otherwise we @@ -185,5 +186,5 @@ for (let dir of dirs) { npmInstall(dir, opts); } -cp.execSync('git config pull.rebase merges'); -cp.execSync('git config blame.ignoreRevsFile .git-blame-ignore-revs'); +child_process.execSync('git config pull.rebase merges'); +child_process.execSync('git config blame.ignoreRevsFile .git-blame-ignore-revs'); diff --git a/build/npm/preinstall.js b/build/npm/preinstall.js index 79ce65dfd9a..1ec3da4ef50 100644 --- a/build/npm/preinstall.js +++ b/build/npm/preinstall.js @@ -2,9 +2,10 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -// @ts-check -const path = require('path'); -const fs = require('fs'); +import path from 'path'; +import * as fs from 'fs'; +import * as child_process from 'child_process'; +import * as os from 'os'; if (!process.env['VSCODE_SKIP_NODE_VERSION_CHECK']) { // Get the running Node.js version @@ -14,7 +15,7 @@ if (!process.env['VSCODE_SKIP_NODE_VERSION_CHECK']) { const patchNodeVersion = parseInt(nodeVersion[3]); // Get the required Node.js version from .nvmrc - const nvmrcPath = path.join(__dirname, '..', '..', '.nvmrc'); + const nvmrcPath = path.join(import.meta.dirname, '..', '..', '.nvmrc'); const requiredVersion = fs.readFileSync(nvmrcPath, 'utf8').trim(); const requiredVersionMatch = /^(\d+)\.(\d+)\.(\d+)/.exec(requiredVersion); @@ -40,9 +41,6 @@ if (process.env.npm_execpath?.includes('yarn')) { throw new Error(); } -const cp = require('child_process'); -const os = require('os'); - if (process.platform === 'win32') { if (!hasSupportedVisualStudioVersion()) { console.error('\x1b[1;31m*** Invalid C/C++ Compiler Toolchain. Please check https://github.com/microsoft/vscode/wiki/How-to-Contribute#prerequisites.\x1b[0;0m'); @@ -60,8 +58,6 @@ if (process.arch !== os.arch()) { } function hasSupportedVisualStudioVersion() { - const fs = require('fs'); - const path = require('path'); // Translated over from // https://source.chromium.org/chromium/chromium/src/+/master:build/vs_toolchain.py;l=140-175 const supportedVersions = ['2022', '2019']; @@ -102,9 +98,9 @@ function hasSupportedVisualStudioVersion() { function installHeaders() { const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm'; - cp.execSync(`${npm} ${process.env.npm_command || 'ci'}`, { + child_process.execSync(`${npm} ${process.env.npm_command || 'ci'}`, { env: process.env, - cwd: path.join(__dirname, 'gyp'), + cwd: path.join(import.meta.dirname, 'gyp'), stdio: 'inherit' }); @@ -112,20 +108,20 @@ function installHeaders() { // file checked into our repository. So from that point it is safe to construct the path // to that executable const node_gyp = process.platform === 'win32' - ? path.join(__dirname, 'gyp', 'node_modules', '.bin', 'node-gyp.cmd') - : path.join(__dirname, 'gyp', 'node_modules', '.bin', 'node-gyp'); + ? path.join(import.meta.dirname, 'gyp', 'node_modules', '.bin', 'node-gyp.cmd') + : path.join(import.meta.dirname, 'gyp', 'node_modules', '.bin', 'node-gyp'); - const local = getHeaderInfo(path.join(__dirname, '..', '..', '.npmrc')); - const remote = getHeaderInfo(path.join(__dirname, '..', '..', 'remote', '.npmrc')); + const local = getHeaderInfo(path.join(import.meta.dirname, '..', '..', '.npmrc')); + const remote = getHeaderInfo(path.join(import.meta.dirname, '..', '..', 'remote', '.npmrc')); if (local !== undefined) { // Both disturl and target come from a file checked into our repository - cp.execFileSync(node_gyp, ['install', '--dist-url', local.disturl, local.target], { shell: true }); + child_process.execFileSync(node_gyp, ['install', '--dist-url', local.disturl, local.target], { shell: true }); } if (remote !== undefined) { // Both disturl and target come from a file checked into our repository - cp.execFileSync(node_gyp, ['install', '--dist-url', remote.disturl, remote.target], { shell: true }); + child_process.execFileSync(node_gyp, ['install', '--dist-url', remote.disturl, remote.target], { shell: true }); } // On Linux, apply a patch to the downloaded headers @@ -139,7 +135,7 @@ function installHeaders() { if (fs.existsSync(localHeaderPath)) { console.log('Applying v8-source-location.patch to', localHeaderPath); try { - cp.execFileSync('patch', ['-p0', '-i', path.join(__dirname, 'gyp', 'custom-headers', 'v8-source-location.patch')], { + child_process.execFileSync('patch', ['-p0', '-i', path.join(import.meta.dirname, 'gyp', 'custom-headers', 'v8-source-location.patch')], { cwd: localHeaderPath }); } catch (error) { diff --git a/build/package.json b/build/package.json index 0948204b038..3e2b16aa73d 100644 --- a/build/package.json +++ b/build/package.json @@ -62,15 +62,14 @@ "workerpool": "^6.4.0", "yauzl": "^2.10.0" }, - "type": "commonjs", + "type": "module", "scripts": { - "copy-policy-dto": "node lib/policies/copyPolicyDto.js", + "copy-policy-dto": "node lib/policies/copyPolicyDto.ts", "prebuild-ts": "npm run copy-policy-dto", - "build-ts": "cd .. && npx tsgo --project build/tsconfig.build.json", - "compile": "npm run build-ts", - "watch": "npm run build-ts -- --watch", - "npmCheckJs": "npm run build-ts -- --noEmit", - "test": "npx mocha --ui tdd 'lib/**/*.test.js'" + "typecheck": "cd .. && npx tsgo --project build/tsconfig.json", + "compile": "npm run copy-policy-dto && npm run typecheck", + "watch": "npm run typecheck -- --watch", + "test": "mocha --ui tdd 'lib/**/*.test.ts'" }, "optionalDependencies": { "tree-sitter-typescript": "^0.23.2", diff --git a/build/setup-npm-registry.js b/build/setup-npm-registry.js index 07bcf2296fa..cd6ba54e73f 100644 --- a/build/setup-npm-registry.js +++ b/build/setup-npm-registry.js @@ -3,10 +3,8 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ // @ts-check -'use strict'; - -const fs = require('fs').promises; -const path = require('path'); +import { promises as fs } from 'fs'; +import path from 'path'; /** * @param {string} dir diff --git a/build/stylelint.mjs b/build/stylelint.mjs index 767fa28c2fe..f4080ca13e0 100644 --- a/build/stylelint.mjs +++ b/build/stylelint.mjs @@ -7,12 +7,12 @@ import es from 'event-stream'; import vfs from 'vinyl-fs'; import { stylelintFilter } from './filters.js'; -import { getVariableNameValidator } from './lib/stylelint/validateVariableNames.js'; +import { getVariableNameValidator } from './lib/stylelint/validateVariableNames.ts'; /** * use regex on lines * - * @param {function(string, boolean):void} reporter + * @param {(arg0: string, arg1: boolean) => void} reporter */ export default function gulpstylelint(reporter) { const variableValidator = getVariableNameValidator(); @@ -66,8 +66,7 @@ function stylelint() { .pipe(es.through(function () { /* noop, important for the stream to end */ })); } -const normalizeScriptPath = (/** @type {string} */ p) => p.replace(/\.(js|ts)$/, ''); -if (normalizeScriptPath(import.meta.filename) === normalizeScriptPath(process.argv[1])) { +if (import.meta.main) { stylelint().on('error', (err) => { console.error(); console.error(err); diff --git a/build/tsconfig.build.json b/build/tsconfig.build.json deleted file mode 100644 index dc3305690bc..00000000000 --- a/build/tsconfig.build.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "allowJs": false, - "checkJs": false, - "noEmit": false, - "skipLibCheck": true - }, - "include": [ - "**/*.ts" - ] -} diff --git a/build/tsconfig.json b/build/tsconfig.json index a3cf3fbe89d..209a6e3897d 100644 --- a/build/tsconfig.json +++ b/build/tsconfig.json @@ -5,8 +5,10 @@ "ES2024" ], "module": "nodenext", - "alwaysStrict": true, - "removeComments": false, + "noEmit": true, + "erasableSyntaxOnly": true, + "verbatimModuleSyntax": true, + "allowImportingTsExtensions": true, "preserveConstEnums": true, "sourceMap": true, "resolveJsonModule": true, @@ -14,21 +16,18 @@ // use the tsconfig.build.json for compiling which disable JavaScript // type checking so that JavaScript file are not transpiled "allowJs": true, + "checkJs": false, + "skipLibCheck": true, "strict": true, "exactOptionalPropertyTypes": false, "useUnknownInCatchVariables": false, "noUnusedLocals": true, - "noUnusedParameters": true, - "newLine": "lf", - "noEmit": true + "noUnusedParameters": true }, - "include": [ - "**/*.ts", - "**/*.js", - "**/*.mjs", - ], "exclude": [ "node_modules/**", - "monaco-editor-playground/**" + "monaco-editor-playground/**", + "builtin/**", + "vite/**" ] } diff --git a/build/monaco-editor-playground/index-workbench.ts b/build/vite/index-workbench.ts similarity index 100% rename from build/monaco-editor-playground/index-workbench.ts rename to build/vite/index-workbench.ts index 2f63c6b4c6e..e237f661f5d 100644 --- a/build/monaco-editor-playground/index-workbench.ts +++ b/build/vite/index-workbench.ts @@ -3,6 +3,6 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import './setup-dev'; import '../../src/vs/code/browser/workbench/workbench'; +import './setup-dev'; diff --git a/build/monaco-editor-playground/index.html b/build/vite/index.html similarity index 100% rename from build/monaco-editor-playground/index.html rename to build/vite/index.html diff --git a/build/monaco-editor-playground/index.ts b/build/vite/index.ts similarity index 100% rename from build/monaco-editor-playground/index.ts rename to build/vite/index.ts diff --git a/build/monaco-editor-playground/package-lock.json b/build/vite/package-lock.json similarity index 100% rename from build/monaco-editor-playground/package-lock.json rename to build/vite/package-lock.json diff --git a/build/monaco-editor-playground/package.json b/build/vite/package.json similarity index 100% rename from build/monaco-editor-playground/package.json rename to build/vite/package.json diff --git a/build/monaco-editor-playground/rollup-url-to-module-plugin/index.mjs b/build/vite/rollup-url-to-module-plugin/index.mjs similarity index 100% rename from build/monaco-editor-playground/rollup-url-to-module-plugin/index.mjs rename to build/vite/rollup-url-to-module-plugin/index.mjs diff --git a/build/monaco-editor-playground/setup-dev.ts b/build/vite/setup-dev.ts similarity index 91% rename from build/monaco-editor-playground/setup-dev.ts rename to build/vite/setup-dev.ts index 87505545b71..c1df4861082 100644 --- a/build/monaco-editor-playground/setup-dev.ts +++ b/build/vite/setup-dev.ts @@ -13,3 +13,6 @@ import { StandaloneWebWorkerService } from '../../src/vs/editor/standalone/brows enableHotReload(); registerSingleton(IWebWorkerService, StandaloneWebWorkerService, InstantiationType.Eager); + +globalThis._VSCODE_DISABLE_CSS_IMPORT_MAP = true; +globalThis._VSCODE_USE_RELATIVE_IMPORTS = true; diff --git a/build/monaco-editor-playground/style.css b/build/vite/style.css similarity index 100% rename from build/monaco-editor-playground/style.css rename to build/vite/style.css diff --git a/build/monaco-editor-playground/tsconfig.json b/build/vite/tsconfig.json similarity index 100% rename from build/monaco-editor-playground/tsconfig.json rename to build/vite/tsconfig.json diff --git a/build/monaco-editor-playground/vite.config.ts b/build/vite/vite.config.ts similarity index 82% rename from build/monaco-editor-playground/vite.config.ts rename to build/vite/vite.config.ts index ac1536cf578..2824b717cca 100644 --- a/build/monaco-editor-playground/vite.config.ts +++ b/build/vite/vite.config.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { defineConfig, Plugin } from 'vite'; +import { createLogger, defineConfig, Plugin } from 'vite'; import path, { join } from 'path'; /// @ts-ignore import { urlToEsmPlugin } from './rollup-url-to-module-plugin/index.mjs'; @@ -114,14 +114,43 @@ if (import.meta.hot) { }; } +const logger = createLogger(); +const loggerWarn = logger.warn; + +logger.warn = (msg, options) => { + // amdX and the baseUrl code cannot be analyzed by vite. + // However, they are not needed, so it is okay to silence the warning. + if (msg.indexOf('vs/amdX.ts') !== -1) { + return; + } + if (msg.indexOf('await import(new URL(`vs/workbench/workbench.desktop.main.js`, baseUrl).href)') !== -1) { + return; + } + if (msg.indexOf('const result2 = await import(workbenchUrl);') !== -1) { + return; + } + + // See https://github.com/microsoft/vscode/issues/278153 + if (msg.indexOf('marked.esm.js.map') !== -1 || msg.indexOf('purify.es.mjs.map') !== -1) { + return; + } + + loggerWarn(msg, options); +}; + export default defineConfig({ plugins: [ urlToEsmPlugin(), injectBuiltinExtensionsPlugin(), createHotClassSupport() ], + customLogger: logger, esbuild: { - target: 'es6', // to fix property initialization issues, not needed when loading monaco-editor from npm package + tsconfigRaw: { + compilerOptions: { + experimentalDecorators: true, + } + } }, root: '../..', // To support /out/... paths server: { diff --git a/build/linux/rpm/types.js b/build/vite/workbench-electron.ts similarity index 59% rename from build/linux/rpm/types.js rename to build/vite/workbench-electron.ts index a20b9c2fe02..49578ca4948 100644 --- a/build/linux/rpm/types.js +++ b/build/vite/workbench-electron.ts @@ -1,11 +1,8 @@ -"use strict"; /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isRpmArchString = isRpmArchString; -function isRpmArchString(s) { - return ['x86_64', 'armv7hl', 'aarch64'].includes(s); -} -//# sourceMappingURL=types.js.map \ No newline at end of file + +import './setup-dev'; +import '../../src/vs/code/electron-browser/workbench/workbench'; + diff --git a/build/vite/workbench-vite-electron.html b/build/vite/workbench-vite-electron.html new file mode 100644 index 00000000000..87019c6c01a --- /dev/null +++ b/build/vite/workbench-vite-electron.html @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/build/monaco-editor-playground/workbench-vite.html b/build/vite/workbench-vite.html similarity index 100% rename from build/monaco-editor-playground/workbench-vite.html rename to build/vite/workbench-vite.html diff --git a/build/win32/explorer-dll-fetcher.js b/build/win32/explorer-dll-fetcher.js deleted file mode 100644 index 1b160974324..00000000000 --- a/build/win32/explorer-dll-fetcher.js +++ /dev/null @@ -1,65 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -'use strict'; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.downloadExplorerDll = downloadExplorerDll; -const fs_1 = __importDefault(require("fs")); -const debug_1 = __importDefault(require("debug")); -const path_1 = __importDefault(require("path")); -const get_1 = require("@electron/get"); -const product_json_1 = __importDefault(require("../../product.json")); -const product = product_json_1.default; -const d = (0, debug_1.default)('explorer-dll-fetcher'); -async function downloadExplorerDll(outDir, quality = 'stable', targetArch = 'x64') { - const fileNamePrefix = quality === 'insider' ? 'code_insider' : 'code'; - const fileName = `${fileNamePrefix}_explorer_command_${targetArch}.dll`; - if (!await fs_1.default.existsSync(outDir)) { - await fs_1.default.mkdirSync(outDir, { recursive: true }); - } - // Read and parse checksums file - const checksumsFilePath = path_1.default.join(path_1.default.dirname(__dirname), 'checksums', 'explorer-dll.txt'); - const checksumsContent = fs_1.default.readFileSync(checksumsFilePath, 'utf8'); - const checksums = {}; - checksumsContent.split('\n').forEach(line => { - const trimmedLine = line.trim(); - if (trimmedLine) { - const [checksum, filename] = trimmedLine.split(/\s+/); - if (checksum && filename) { - checksums[filename] = checksum; - } - } - }); - d(`downloading ${fileName}`); - const artifact = await (0, get_1.downloadArtifact)({ - isGeneric: true, - version: 'v4.0.0-350164', - artifactName: fileName, - checksums, - mirrorOptions: { - mirror: 'https://github.com/microsoft/vscode-explorer-command/releases/download/', - customDir: 'v4.0.0-350164', - customFilename: fileName - } - }); - d(`moving ${artifact} to ${outDir}`); - await fs_1.default.copyFileSync(artifact, path_1.default.join(outDir, fileName)); -} -async function main(outputDir) { - const arch = process.env['VSCODE_ARCH']; - if (!outputDir) { - throw new Error('Required build env not set'); - } - await downloadExplorerDll(outputDir, product.quality, arch); -} -if (require.main === module) { - main(process.argv[2]).catch(err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=explorer-dll-fetcher.js.map \ No newline at end of file diff --git a/build/win32/explorer-dll-fetcher.ts b/build/win32/explorer-dll-fetcher.ts index 33e21b4e4a8..d5eac8a128d 100644 --- a/build/win32/explorer-dll-fetcher.ts +++ b/build/win32/explorer-dll-fetcher.ts @@ -2,14 +2,11 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ - -'use strict'; - import fs from 'fs'; import debug from 'debug'; import path from 'path'; import { downloadArtifact } from '@electron/get'; -import productJson from '../../product.json'; +import productJson from '../../product.json' with { type: 'json' }; interface ProductConfiguration { quality?: string; @@ -29,7 +26,7 @@ export async function downloadExplorerDll(outDir: string, quality: string = 'sta } // Read and parse checksums file - const checksumsFilePath = path.join(path.dirname(__dirname), 'checksums', 'explorer-dll.txt'); + const checksumsFilePath = path.join(path.dirname(import.meta.dirname), 'checksums', 'explorer-dll.txt'); const checksumsContent = fs.readFileSync(checksumsFilePath, 'utf8'); const checksums: Record = {}; @@ -70,7 +67,7 @@ async function main(outputDir?: string): Promise { await downloadExplorerDll(outputDir, product.quality, arch); } -if (require.main === module) { +if (import.meta.main) { main(process.argv[2]).catch(err => { console.error(err); process.exit(1); diff --git a/eslint.config.js b/eslint.config.js index a2c242bcc19..6549496d91d 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -461,7 +461,6 @@ export default tseslint.config( 'src/vs/platform/keybinding/common/keybindingResolver.ts', 'src/vs/platform/keybinding/common/keybindingsRegistry.ts', 'src/vs/platform/keybinding/common/resolvedKeybindingItem.ts', - 'src/vs/platform/keyboardLayout/common/keyboardConfig.ts', 'src/vs/platform/languagePacks/node/languagePacks.ts', 'src/vs/platform/list/browser/listService.ts', 'src/vs/platform/log/browser/log.ts', @@ -525,9 +524,6 @@ export default tseslint.config( 'src/vs/editor/contrib/dropOrPasteInto/browser/dropIntoEditorContribution.ts', 'src/vs/editor/contrib/find/browser/findController.ts', 'src/vs/editor/contrib/find/browser/findModel.ts', - 'src/vs/editor/contrib/find/browser/findWidgetSearchHistory.ts', - 'src/vs/editor/contrib/find/browser/replaceWidgetHistory.ts', - 'src/vs/editor/contrib/folding/browser/folding.ts', 'src/vs/editor/contrib/gotoSymbol/browser/goToCommands.ts', 'src/vs/editor/contrib/gotoSymbol/browser/symbolNavigation.ts', 'src/vs/editor/contrib/hover/browser/hoverActions.ts', @@ -609,7 +605,6 @@ export default tseslint.config( 'src/vs/workbench/contrib/authentication/browser/actions/manageTrustedMcpServersForAccountAction.ts', 'src/vs/workbench/contrib/bulkEdit/browser/bulkCellEdits.ts', 'src/vs/workbench/contrib/bulkEdit/browser/bulkTextEdits.ts', - 'src/vs/workbench/contrib/bulkEdit/browser/opaqueEdits.ts', 'src/vs/workbench/contrib/bulkEdit/browser/preview/bulkEditPane.ts', 'src/vs/workbench/contrib/bulkEdit/browser/preview/bulkEditPreview.ts', 'src/vs/workbench/contrib/chat/browser/actions/chatCodeblockActions.ts', @@ -641,7 +636,6 @@ export default tseslint.config( 'src/vs/workbench/contrib/codeEditor/browser/outline/documentSymbolsOutline.ts', 'src/vs/workbench/contrib/codeEditor/electron-browser/selectionClipboard.ts', 'src/vs/workbench/contrib/commands/common/commands.contribution.ts', - 'src/vs/workbench/contrib/comments/browser/commentNode.ts', 'src/vs/workbench/contrib/comments/browser/commentsAccessibleView.ts', 'src/vs/workbench/contrib/comments/browser/commentsTreeViewer.ts', 'src/vs/workbench/contrib/comments/browser/commentsView.ts', @@ -684,7 +678,6 @@ export default tseslint.config( 'src/vs/workbench/contrib/inlineChat/browser/inlineChatActions.ts', 'src/vs/workbench/contrib/inlineChat/browser/inlineChatController.ts', 'src/vs/workbench/contrib/inlineChat/browser/inlineChatStrategies.ts', - 'src/vs/workbench/contrib/issue/browser/issueReporterModel.ts', 'src/vs/workbench/contrib/markdown/browser/markdownDocumentRenderer.ts', 'src/vs/workbench/contrib/markdown/browser/markdownSettingRenderer.ts', 'src/vs/workbench/contrib/markers/browser/markers.contribution.ts', @@ -717,7 +710,6 @@ export default tseslint.config( 'src/vs/workbench/contrib/notebook/browser/viewParts/notebookEditorStickyScroll.ts', 'src/vs/workbench/contrib/notebook/browser/viewParts/notebookHorizontalTracker.ts', 'src/vs/workbench/contrib/notebook/browser/viewParts/notebookKernelQuickPickStrategy.ts', - 'src/vs/workbench/contrib/notebook/browser/viewParts/notebookViewZones.ts', 'src/vs/workbench/contrib/notebook/common/model/notebookCellTextModel.ts', 'src/vs/workbench/contrib/notebook/common/model/notebookMetadataTextModel.ts', 'src/vs/workbench/contrib/notebook/common/model/notebookTextModel.ts', @@ -726,7 +718,6 @@ export default tseslint.config( 'src/vs/workbench/contrib/notebook/common/notebookRange.ts', 'src/vs/workbench/contrib/notebook/test/browser/testNotebookEditor.ts', 'src/vs/workbench/contrib/performance/electron-browser/startupProfiler.ts', - 'src/vs/workbench/contrib/preferences/browser/keybindingsEditor.ts', 'src/vs/workbench/contrib/preferences/browser/preferences.contribution.ts', 'src/vs/workbench/contrib/preferences/browser/preferencesRenderers.ts', 'src/vs/workbench/contrib/preferences/browser/settingsEditor2.ts', @@ -763,7 +754,6 @@ export default tseslint.config( 'src/vs/workbench/contrib/searchEditor/browser/searchEditorInput.ts', 'src/vs/workbench/contrib/snippets/browser/commands/configureSnippets.ts', 'src/vs/workbench/contrib/snippets/browser/commands/insertSnippet.ts', - 'src/vs/workbench/contrib/snippets/browser/snippetsFile.ts', 'src/vs/workbench/contrib/snippets/browser/snippetsService.ts', 'src/vs/workbench/contrib/tasks/browser/abstractTaskService.ts', 'src/vs/workbench/contrib/tasks/browser/runAutomaticTasks.ts', @@ -786,9 +776,7 @@ export default tseslint.config( 'src/vs/workbench/contrib/webviewPanel/browser/webviewEditor.ts', 'src/vs/workbench/contrib/webviewPanel/browser/webviewEditorInputSerializer.ts', 'src/vs/workbench/contrib/webviewPanel/browser/webviewWorkbenchService.ts', - 'src/vs/workbench/contrib/webviewView/browser/webviewViewPane.ts', 'src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStarted.ts', - 'src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedAccessibleView.ts', 'src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedService.ts', 'src/vs/workbench/contrib/welcomeWalkthrough/browser/walkThroughPart.ts', 'src/vs/workbench/services/authentication/common/authentication.ts', diff --git a/extensions/git/src/blame.ts b/extensions/git/src/blame.ts index cb0c00204e3..96f5dec14a1 100644 --- a/extensions/git/src/blame.ts +++ b/extensions/git/src/blame.ts @@ -15,7 +15,7 @@ import { getWorkingTreeAndIndexDiffInformation, getWorkingTreeDiffInformation } import { provideSourceControlHistoryItemAvatar, provideSourceControlHistoryItemHoverCommands, provideSourceControlHistoryItemMessageLinks } from './historyItemDetailsProvider'; import { AvatarQuery, AvatarQueryCommit } from './api/git'; import { LRUCache } from './cache'; -import { AVATAR_SIZE, getHistoryItemHover, getHistoryItemHoverCommitHashCommands, processHistoryItemRemoteHoverCommands } from './historyProvider'; +import { AVATAR_SIZE, getCommitHover, getHoverCommitHashCommands, processHoverRemoteCommands } from './hover'; function lineRangesContainLine(changes: readonly TextEditorChange[], lineNumber: number): boolean { return changes.some(c => c.modified.startLineNumber <= lineNumber && lineNumber < c.modified.endLineNumberExclusive); @@ -251,8 +251,8 @@ export class GitBlameController { // Commands const commands: Command[][] = [ - getHistoryItemHoverCommitHashCommands(documentUri, hash), - processHistoryItemRemoteHoverCommands(remoteHoverCommands, hash) + getHoverCommitHashCommands(documentUri, hash), + processHoverRemoteCommands(remoteHoverCommands, hash) ]; commands.push([{ @@ -262,7 +262,7 @@ export class GitBlameController { arguments: ['git.blame'] }] satisfies Command[]); - return getHistoryItemHover(commitAvatar, authorName, authorEmail, authorDate, message, commitInformation?.shortStat, undefined, commands); + return getCommitHover(commitAvatar, authorName, authorEmail, authorDate, message, commitInformation?.shortStat, commands); } private _onDidChangeConfiguration(e?: ConfigurationChangeEvent): void { diff --git a/extensions/git/src/historyProvider.ts b/extensions/git/src/historyProvider.ts index c5ba7b2883d..a1b02953fe5 100644 --- a/extensions/git/src/historyProvider.ts +++ b/extensions/git/src/historyProvider.ts @@ -4,29 +4,26 @@ *--------------------------------------------------------------------------------------------*/ -import { CancellationToken, Disposable, Event, EventEmitter, FileDecoration, FileDecorationProvider, SourceControlHistoryItem, SourceControlHistoryItemChange, SourceControlHistoryOptions, SourceControlHistoryProvider, ThemeIcon, Uri, window, LogOutputChannel, SourceControlHistoryItemRef, l10n, SourceControlHistoryItemRefsChangeEvent, workspace, ConfigurationChangeEvent, MarkdownString, Command, commands } from 'vscode'; +import { CancellationToken, Disposable, Event, EventEmitter, FileDecoration, FileDecorationProvider, SourceControlHistoryItem, SourceControlHistoryItemChange, SourceControlHistoryOptions, SourceControlHistoryProvider, ThemeIcon, Uri, window, LogOutputChannel, SourceControlHistoryItemRef, l10n, SourceControlHistoryItemRefsChangeEvent, workspace, ConfigurationChangeEvent, Command, commands } from 'vscode'; import { Repository, Resource } from './repository'; -import { IDisposable, deltaHistoryItemRefs, dispose, filterEvent, fromNow, getCommitShortHash, subject, truncate } from './util'; +import { IDisposable, deltaHistoryItemRefs, dispose, filterEvent, subject, truncate } from './util'; import { toMultiFileDiffEditorUris } from './uri'; import { AvatarQuery, AvatarQueryCommit, Branch, LogOptions, Ref, RefType } from './api/git'; import { emojify, ensureEmojis } from './emoji'; -import { Commit, CommitShortStat } from './git'; +import { Commit } from './git'; import { OperationKind, OperationResult } from './operation'; import { ISourceControlHistoryItemDetailsProviderRegistry, provideSourceControlHistoryItemAvatar, provideSourceControlHistoryItemHoverCommands, provideSourceControlHistoryItemMessageLinks } from './historyItemDetailsProvider'; import { throttle } from './decorators'; +import { getHistoryItemHover, getHoverCommitHashCommands, processHoverRemoteCommands } from './hover'; -type SourceControlHistoryItemRefWithRenderOptions = SourceControlHistoryItemRef & { - backgroundColor?: string; -}; - -function compareSourceControlHistoryItemRef(ref1: SourceControlHistoryItemRefWithRenderOptions, ref2: SourceControlHistoryItemRefWithRenderOptions): number { - const getOrder = (ref: SourceControlHistoryItemRefWithRenderOptions): number => { +function compareSourceControlHistoryItemRef(ref1: SourceControlHistoryItemRef, ref2: SourceControlHistoryItemRef): number { + const getOrder = (ref: SourceControlHistoryItemRef): number => { if (ref.id.startsWith('refs/heads/')) { - return ref.backgroundColor ? 1 : 5; + return 1; } else if (ref.id.startsWith('refs/remotes/')) { - return ref.backgroundColor ? 2 : 15; + return 2; } else if (ref.id.startsWith('refs/tags/')) { - return ref.backgroundColor ? 3 : 25; + return 3; } return 99; @@ -308,11 +305,11 @@ export class GitHistoryProvider implements SourceControlHistoryProvider, FileDec const references = this._resolveHistoryItemRefs(commit); const commands: Command[][] = [ - getHistoryItemHoverCommitHashCommands(Uri.file(this.repository.root), commit.hash), - processHistoryItemRemoteHoverCommands(remoteHoverCommands, commit.hash) + getHoverCommitHashCommands(Uri.file(this.repository.root), commit.hash), + processHoverRemoteCommands(remoteHoverCommands, commit.hash) ]; - const tooltip = getHistoryItemHover(avatarUrl, commit.authorName, commit.authorEmail, commit.authorDate ?? commit.commitDate, messageWithLinks, commit.shortStat, references, commands); + const tooltip = getHistoryItemHover(avatarUrl, commit.authorName, commit.authorEmail, commit.authorDate ?? commit.commitDate, messageWithLinks, commit.shortStat, commands); historyItems.push({ id: commit.hash, @@ -489,8 +486,8 @@ export class GitHistoryProvider implements SourceControlHistoryProvider, FileDec return this.historyItemDecorations.get(uri.toString()); } - private _resolveHistoryItemRefs(commit: Commit): SourceControlHistoryItemRefWithRenderOptions[] { - const references: SourceControlHistoryItemRefWithRenderOptions[] = []; + private _resolveHistoryItemRefs(commit: Commit): SourceControlHistoryItemRef[] { + const references: SourceControlHistoryItemRef[] = []; for (const ref of commit.refNames) { if (ref === 'refs/remotes/origin/HEAD') { @@ -504,8 +501,7 @@ export class GitHistoryProvider implements SourceControlHistoryProvider, FileDec name: ref.substring('HEAD -> refs/heads/'.length), revision: commit.hash, category: l10n.t('branches'), - icon: new ThemeIcon('target'), - backgroundColor: `--vscode-scmGraph-historyItemRefColor` + icon: new ThemeIcon('target') }); break; case ref.startsWith('refs/heads/'): @@ -523,12 +519,7 @@ export class GitHistoryProvider implements SourceControlHistoryProvider, FileDec name: ref.substring('refs/remotes/'.length), revision: commit.hash, category: l10n.t('remote branches'), - icon: new ThemeIcon('cloud'), - backgroundColor: ref === this.currentHistoryItemRemoteRef?.id - ? `--vscode-scmGraph-historyItemRemoteRefColor` - : ref === this.currentHistoryItemBaseRef?.id - ? `--vscode-scmGraph-historyItemBaseRefColor` - : undefined + icon: new ThemeIcon('cloud') }); break; case ref.startsWith('tag: refs/tags/'): @@ -537,10 +528,7 @@ export class GitHistoryProvider implements SourceControlHistoryProvider, FileDec name: ref.substring('tag: refs/tags/'.length), revision: commit.hash, category: l10n.t('tags'), - icon: new ThemeIcon('tag'), - backgroundColor: ref === this.currentHistoryItemRef?.id - ? `--vscode-scmGraph-historyItemRefColor` - : undefined + icon: new ThemeIcon('tag') }); break; } @@ -621,127 +609,3 @@ export class GitHistoryProvider implements SourceControlHistoryProvider, FileDec dispose(this.disposables); } } - -export const AVATAR_SIZE = 20; - -export function getHistoryItemHoverCommitHashCommands(documentUri: Uri, hash: string): Command[] { - return [{ - title: `$(git-commit) ${getCommitShortHash(documentUri, hash)}`, - tooltip: l10n.t('Open Commit'), - command: 'git.viewCommit', - arguments: [documentUri, hash, documentUri] - }, { - title: `$(copy)`, - tooltip: l10n.t('Copy Commit Hash'), - command: 'git.copyContentToClipboard', - arguments: [hash] - }] satisfies Command[]; -} - -export function processHistoryItemRemoteHoverCommands(commands: Command[], hash: string): Command[] { - return commands.map(command => ({ - ...command, - arguments: [...command.arguments ?? [], hash] - } satisfies Command)); -} - -export function getHistoryItemHover(authorAvatar: string | undefined, authorName: string | undefined, authorEmail: string | undefined, authorDate: Date | number | undefined, message: string, shortStats: CommitShortStat | undefined, references: SourceControlHistoryItemRefWithRenderOptions[] | undefined, commands: Command[][] | undefined): MarkdownString { - const markdownString = new MarkdownString('', true); - markdownString.isTrusted = { - enabledCommands: commands?.flat().map(c => c.command) ?? [] - }; - - // Author - if (authorName) { - // Avatar - if (authorAvatar) { - markdownString.appendMarkdown('!['); - markdownString.appendText(authorName); - markdownString.appendMarkdown(']('); - markdownString.appendText(authorAvatar); - markdownString.appendMarkdown(`|width=${AVATAR_SIZE},height=${AVATAR_SIZE})`); - } else { - markdownString.appendMarkdown('$(account)'); - } - - // Email - if (authorEmail) { - markdownString.appendMarkdown(' [**'); - markdownString.appendText(authorName); - markdownString.appendMarkdown('**](mailto:'); - markdownString.appendText(authorEmail); - markdownString.appendMarkdown(')'); - } else { - markdownString.appendMarkdown(' **'); - markdownString.appendText(authorName); - markdownString.appendMarkdown('**'); - } - - // Date - if (authorDate && !isNaN(new Date(authorDate).getTime())) { - const dateString = new Date(authorDate).toLocaleString(undefined, { - year: 'numeric', month: 'long', day: 'numeric', hour: 'numeric', minute: 'numeric' - }); - - markdownString.appendMarkdown(', $(history)'); - markdownString.appendText(` ${fromNow(authorDate, true, true)} (${dateString})`); - } - - markdownString.appendMarkdown('\n\n'); - } - - // Subject | Message (escape image syntax) - markdownString.appendMarkdown(`${emojify(message.replace(/!\[/g, '![').replace(/\r\n|\r|\n/g, '\n\n'))}\n\n`); - markdownString.appendMarkdown(`---\n\n`); - - // Short stats - if (shortStats) { - markdownString.appendMarkdown(`${shortStats.files === 1 ? - l10n.t('{0} file changed', shortStats.files) : - l10n.t('{0} files changed', shortStats.files)}`); - - if (shortStats.insertions) { - markdownString.appendMarkdown(`, ${shortStats.insertions === 1 ? - l10n.t('{0} insertion{1}', shortStats.insertions, '(+)') : - l10n.t('{0} insertions{1}', shortStats.insertions, '(+)')}`); - } - - if (shortStats.deletions) { - markdownString.appendMarkdown(`, ${shortStats.deletions === 1 ? - l10n.t('{0} deletion{1}', shortStats.deletions, '(-)') : - l10n.t('{0} deletions{1}', shortStats.deletions, '(-)')}`); - } - - markdownString.appendMarkdown(`\n\n---\n\n`); - } - - // References - if (references && references.length > 0) { - for (const reference of references) { - const labelIconId = reference.icon instanceof ThemeIcon ? reference.icon.id : ''; - const backgroundColor = `var(${reference.backgroundColor ?? '--vscode-scmGraph-historyItemHoverDefaultLabelBackground'})`; - const color = reference.backgroundColor ? `var(--vscode-scmGraph-historyItemHoverLabelForeground)` : `var(--vscode-scmGraph-historyItemHoverDefaultLabelForeground)`; - - markdownString.appendMarkdown(` $(${labelIconId}) `); - markdownString.appendText(reference.name); - markdownString.appendMarkdown(`  `); - } - - markdownString.appendMarkdown(`\n\n---\n\n`); - } - - // Commands - if (commands && commands.length > 0) { - for (let index = 0; index < commands.length; index++) { - if (index !== 0) { - markdownString.appendMarkdown('  |  '); - } - - const commandsMarkdown = commands[index] - .map(command => `[${command.title}](command:${command.command}?${encodeURIComponent(JSON.stringify(command.arguments))} "${command.tooltip}")`); - markdownString.appendMarkdown(commandsMarkdown.join(' ')); - } - } - - return markdownString; -} diff --git a/extensions/git/src/hover.ts b/extensions/git/src/hover.ts new file mode 100644 index 00000000000..7d33893a348 --- /dev/null +++ b/extensions/git/src/hover.ts @@ -0,0 +1,161 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Command, l10n, MarkdownString, Uri } from 'vscode'; +import { fromNow, getCommitShortHash } from './util'; +import { emojify } from './emoji'; +import { CommitShortStat } from './git'; + +export const AVATAR_SIZE = 20; + +export function getCommitHover(authorAvatar: string | undefined, authorName: string | undefined, authorEmail: string | undefined, authorDate: Date | number | undefined, message: string, shortStats: CommitShortStat | undefined, commands: Command[][] | undefined): MarkdownString { + const markdownString = new MarkdownString('', true); + markdownString.isTrusted = { + enabledCommands: commands?.flat().map(c => c.command) ?? [] + }; + + // Author, Subject | Message (escape image syntax) + appendContent(markdownString, authorAvatar, authorName, authorEmail, authorDate, message); + + // Short stats + if (shortStats) { + appendShortStats(markdownString, shortStats); + } + + // Commands + if (commands && commands.length > 0) { + appendCommands(markdownString, commands); + } + + return markdownString; +} + +export function getHistoryItemHover(authorAvatar: string | undefined, authorName: string | undefined, authorEmail: string | undefined, authorDate: Date | number | undefined, message: string, shortStats: CommitShortStat | undefined, commands: Command[][] | undefined): MarkdownString[] { + const hoverContent: MarkdownString[] = []; + + // Author, Subject | Message (escape image syntax) + const authorMarkdownString = new MarkdownString('', true); + appendContent(authorMarkdownString, authorAvatar, authorName, authorEmail, authorDate, message); + hoverContent.push(authorMarkdownString); + + // Short stats + if (shortStats) { + const shortStatsMarkdownString = new MarkdownString('', true); + shortStatsMarkdownString.supportHtml = true; + appendShortStats(shortStatsMarkdownString, shortStats); + hoverContent.push(shortStatsMarkdownString); + } + + // Commands + if (commands && commands.length > 0) { + const commandsMarkdownString = new MarkdownString('', true); + commandsMarkdownString.isTrusted = { + enabledCommands: commands?.flat().map(c => c.command) ?? [] + }; + appendCommands(commandsMarkdownString, commands); + hoverContent.push(commandsMarkdownString); + } + + return hoverContent; +} + +function appendContent(markdownString: MarkdownString, authorAvatar: string | undefined, authorName: string | undefined, authorEmail: string | undefined, authorDate: Date | number | undefined, message: string): void { + // Author + if (authorName) { + // Avatar + if (authorAvatar) { + markdownString.appendMarkdown('!['); + markdownString.appendText(authorName); + markdownString.appendMarkdown(']('); + markdownString.appendText(authorAvatar); + markdownString.appendMarkdown(`|width=${AVATAR_SIZE},height=${AVATAR_SIZE})`); + } else { + markdownString.appendMarkdown('$(account)'); + } + + // Email + if (authorEmail) { + markdownString.appendMarkdown(' [**'); + markdownString.appendText(authorName); + markdownString.appendMarkdown('**](mailto:'); + markdownString.appendText(authorEmail); + markdownString.appendMarkdown(')'); + } else { + markdownString.appendMarkdown(' **'); + markdownString.appendText(authorName); + markdownString.appendMarkdown('**'); + } + + // Date + if (authorDate && !isNaN(new Date(authorDate).getTime())) { + const dateString = new Date(authorDate).toLocaleString(undefined, { + year: 'numeric', month: 'long', day: 'numeric', hour: 'numeric', minute: 'numeric' + }); + + markdownString.appendMarkdown(', $(history)'); + markdownString.appendText(` ${fromNow(authorDate, true, true)} (${dateString})`); + } + + markdownString.appendMarkdown('\n\n'); + } + + // Subject | Message (escape image syntax) + markdownString.appendMarkdown(`${emojify(message.replace(/!\[/g, '![').replace(/\r\n|\r|\n/g, '\n\n'))}`); + markdownString.appendMarkdown(`\n\n---\n\n`); +} + +function appendShortStats(markdownString: MarkdownString, shortStats: { files: number; insertions: number; deletions: number }): void { + // Short stats + markdownString.appendMarkdown(`${shortStats.files === 1 ? + l10n.t('{0} file changed', shortStats.files) : + l10n.t('{0} files changed', shortStats.files)}`); + + if (shortStats.insertions) { + markdownString.appendMarkdown(`, ${shortStats.insertions === 1 ? + l10n.t('{0} insertion{1}', shortStats.insertions, '(+)') : + l10n.t('{0} insertions{1}', shortStats.insertions, '(+)')}`); + } + + if (shortStats.deletions) { + markdownString.appendMarkdown(`, ${shortStats.deletions === 1 ? + l10n.t('{0} deletion{1}', shortStats.deletions, '(-)') : + l10n.t('{0} deletions{1}', shortStats.deletions, '(-)')}`); + } + + markdownString.appendMarkdown(`\n\n---\n\n`); +} + +function appendCommands(markdownString: MarkdownString, commands: Command[][]): void { + for (let index = 0; index < commands.length; index++) { + if (index !== 0) { + markdownString.appendMarkdown('  |  '); + } + + const commandsMarkdown = commands[index] + .map(command => `[${command.title}](command:${command.command}?${encodeURIComponent(JSON.stringify(command.arguments))} "${command.tooltip}")`); + markdownString.appendMarkdown(commandsMarkdown.join(' ')); + } +} + +export function getHoverCommitHashCommands(documentUri: Uri, hash: string): Command[] { + return [{ + title: `$(git-commit) ${getCommitShortHash(documentUri, hash)}`, + tooltip: l10n.t('Open Commit'), + command: 'git.viewCommit', + arguments: [documentUri, hash, documentUri] + }, { + title: `$(copy)`, + tooltip: l10n.t('Copy Commit Hash'), + command: 'git.copyContentToClipboard', + arguments: [hash] + }] satisfies Command[]; +} + +export function processHoverRemoteCommands(commands: Command[], hash: string): Command[] { + return commands.map(command => ({ + ...command, + arguments: [...command.arguments ?? [], hash] + } satisfies Command)); +} diff --git a/extensions/git/src/timelineProvider.ts b/extensions/git/src/timelineProvider.ts index 52452c4c94e..1ccf04a423d 100644 --- a/extensions/git/src/timelineProvider.ts +++ b/extensions/git/src/timelineProvider.ts @@ -13,7 +13,7 @@ import { OperationKind, OperationResult } from './operation'; import { truncate } from './util'; import { provideSourceControlHistoryItemAvatar, provideSourceControlHistoryItemHoverCommands, provideSourceControlHistoryItemMessageLinks } from './historyItemDetailsProvider'; import { AvatarQuery, AvatarQueryCommit } from './api/git'; -import { getHistoryItemHover, getHistoryItemHoverCommitHashCommands, processHistoryItemRemoteHoverCommands } from './historyProvider'; +import { getCommitHover, getHoverCommitHashCommands, processHoverRemoteCommands } from './hover'; export class GitTimelineItem extends TimelineItem { static is(item: TimelineItem): item is GitTimelineItem { @@ -198,11 +198,11 @@ export class GitTimelineProvider implements TimelineProvider { const messageWithLinks = await provideSourceControlHistoryItemMessageLinks(this.model, repo, message) ?? message; const commands: Command[][] = [ - getHistoryItemHoverCommitHashCommands(uri, c.hash), - processHistoryItemRemoteHoverCommands(commitRemoteSourceCommands, c.hash) + getHoverCommitHashCommands(uri, c.hash), + processHoverRemoteCommands(commitRemoteSourceCommands, c.hash) ]; - item.tooltip = getHistoryItemHover(avatars?.get(c.hash), c.authorName, c.authorEmail, date, messageWithLinks, c.shortStat, undefined, commands); + item.tooltip = getCommitHover(avatars?.get(c.hash), c.authorName, c.authorEmail, date, messageWithLinks, c.shortStat, commands); const cmd = this.commands.resolveTimelineOpenDiffCommand(item, uri); if (cmd) { @@ -227,7 +227,7 @@ export class GitTimelineProvider implements TimelineProvider { // TODO@eamodio: Replace with a better icon -- reflecting its status maybe? item.iconPath = new ThemeIcon('git-commit'); item.description = ''; - item.tooltip = getHistoryItemHover(undefined, you, undefined, date, Resource.getStatusText(index.type), undefined, undefined, undefined); + item.tooltip = getCommitHover(undefined, you, undefined, date, Resource.getStatusText(index.type), undefined, undefined); const cmd = this.commands.resolveTimelineOpenDiffCommand(item, uri); if (cmd) { @@ -249,7 +249,7 @@ export class GitTimelineProvider implements TimelineProvider { const item = new GitTimelineItem('', index ? '~' : 'HEAD', l10n.t('Uncommitted Changes'), date.getTime(), 'working', 'git:file:working'); item.iconPath = new ThemeIcon('circle-outline'); item.description = ''; - item.tooltip = getHistoryItemHover(undefined, you, undefined, date, Resource.getStatusText(working.type), undefined, undefined, undefined); + item.tooltip = getCommitHover(undefined, you, undefined, date, Resource.getStatusText(working.type), undefined, undefined); const cmd = this.commands.resolveTimelineOpenDiffCommand(item, uri); if (cmd) { diff --git a/extensions/mangle-loader.js b/extensions/mangle-loader.js index 016d0f69033..ed32a85e633 100644 --- a/extensions/mangle-loader.js +++ b/extensions/mangle-loader.js @@ -8,7 +8,7 @@ const fs = require('fs'); const webpack = require('webpack'); const fancyLog = require('fancy-log'); const ansiColors = require('ansi-colors'); -const { Mangler } = require('../build/lib/mangle/index'); +const { Mangler } = require('../build/lib/mangle/index.js'); /** * Map of project paths to mangled file contents diff --git a/extensions/shared.webpack.config.mjs b/extensions/shared.webpack.config.mjs index f54499dc227..12b1ea522a4 100644 --- a/extensions/shared.webpack.config.mjs +++ b/extensions/shared.webpack.config.mjs @@ -42,17 +42,21 @@ function withNodeDefaults(/**@type WebpackConfig & { context: string }*/extConfi rules: [{ test: /\.ts$/, exclude: /node_modules/, - use: [{ - // configure TypeScript loader: - // * enable sources maps for end-to-end source maps - loader: 'ts-loader', - options: tsLoaderOptions - }, { - loader: path.resolve(import.meta.dirname, 'mangle-loader.js'), - options: { - configFile: path.join(extConfig.context, 'tsconfig.json') + use: [ + { + // configure TypeScript loader: + // * enable sources maps for end-to-end source maps + loader: 'ts-loader', + options: tsLoaderOptions }, - },] + // disable mangling for now, SEE https://github.com/microsoft/vscode/issues/204692 + // { + // loader: path.resolve(import.meta.dirname, 'mangle-loader.js'), + // options: { + // configFile: path.join(extConfig.context, 'tsconfig.json') + // }, + // }, + ] }] }, externals: { @@ -135,12 +139,13 @@ function withBrowserDefaults(/**@type WebpackConfig & { context: string }*/extCo // ...(additionalOptions ? {} : { configFile: additionalOptions.configFile }), } }, - { - loader: path.resolve(import.meta.dirname, 'mangle-loader.js'), - options: { - configFile: path.join(extConfig.context, additionalOptions?.configFile ?? 'tsconfig.json') - }, - }, + // disable mangling for now, SEE https://github.com/microsoft/vscode/issues/204692 + // { + // loader: path.resolve(import.meta.dirname, 'mangle-loader.js'), + // options: { + // configFile: path.join(extConfig.context, additionalOptions?.configFile ?? 'tsconfig.json') + // }, + // }, ] }, { test: /\.wasm$/, diff --git a/extensions/terminal-suggest/package.json b/extensions/terminal-suggest/package.json index 66e99b20e11..734e3e91c82 100644 --- a/extensions/terminal-suggest/package.json +++ b/extensions/terminal-suggest/package.json @@ -36,8 +36,8 @@ "scripts": { "compile": "npx gulp compile-extension:terminal-suggest", "watch": "npx gulp watch-extension:terminal-suggest", - "pull-zshbuiltins": "ts-node ./scripts/pullZshBuiltins.ts", - "pull-fishbuiltins": "ts-node ./scripts/pullFishBuiltins.ts" + "pull-zshbuiltins": "node ./scripts/pullZshBuiltins.ts", + "pull-fishbuiltins": "node ./scripts/pullFishBuiltins.ts" }, "main": "./out/terminalSuggestMain", "activationEvents": [ diff --git a/gulpfile.mjs b/gulpfile.mjs index 21d7757da7d..5acdbee578a 100644 --- a/gulpfile.mjs +++ b/gulpfile.mjs @@ -2,4 +2,4 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import './build/gulpfile.mjs'; +import './build/gulpfile.ts'; diff --git a/package-lock.json b/package-lock.json index 5351efd3098..ecc4bbe2216 100644 --- a/package-lock.json +++ b/package-lock.json @@ -149,7 +149,6 @@ "source-map-support": "^0.3.2", "style-loader": "^3.3.2", "ts-loader": "^9.5.1", - "ts-node": "^10.9.1", "tsec": "0.2.7", "tslib": "^2.6.3", "typescript": "^6.0.0-dev.20251110", @@ -791,28 +790,6 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", - "dev": true, - "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", - "dev": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - }, "node_modules/@discoveryjs/json-ext": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.3.tgz", @@ -1900,30 +1877,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@tsconfig/node10": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", - "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", - "dev": true - }, - "node_modules/@tsconfig/node12": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", - "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", - "dev": true - }, - "node_modules/@tsconfig/node14": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", - "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", - "dev": true - }, - "node_modules/@tsconfig/node16": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz", - "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==", - "dev": true - }, "node_modules/@types/cacheable-request": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", @@ -3787,15 +3740,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-walk": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", - "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/agent-base": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", @@ -4005,12 +3949,6 @@ "node": ">=14" } }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -5606,12 +5544,6 @@ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==", "dev": true }, - "node_modules/create-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", - "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "dev": true - }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -11906,12 +11838,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true - }, "node_modules/make-iterator": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/make-iterator/-/make-iterator-1.0.1.tgz", @@ -17122,63 +17048,12 @@ "code-block-writer": "^12.0.0" } }, - "node_modules/ts-node": { - "version": "10.9.1", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", - "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", - "dev": true, - "dependencies": { - "@cspotcode/source-map-support": "^0.8.0", - "@tsconfig/node10": "^1.0.7", - "@tsconfig/node12": "^1.0.7", - "@tsconfig/node14": "^1.0.0", - "@tsconfig/node16": "^1.0.2", - "acorn": "^8.4.1", - "acorn-walk": "^8.1.1", - "arg": "^4.1.0", - "create-require": "^1.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "v8-compile-cache-lib": "^3.0.1", - "yn": "3.1.1" - }, - "bin": { - "ts-node": "dist/bin.js", - "ts-node-cwd": "dist/bin-cwd.js", - "ts-node-esm": "dist/bin-esm.js", - "ts-node-script": "dist/bin-script.js", - "ts-node-transpile-only": "dist/bin-transpile.js", - "ts-script": "dist/bin-script-deprecated.js" - }, - "peerDependencies": { - "@swc/core": ">=1.2.50", - "@swc/wasm": ">=1.2.50", - "@types/node": "*", - "typescript": ">=2.7" - }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "@swc/wasm": { - "optional": true - } - } - }, - "node_modules/ts-node/node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", - "dev": true, - "engines": { - "node": ">=0.3.1" - } - }, "node_modules/tsec": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/tsec/-/tsec-0.2.7.tgz", "integrity": "sha512-Pj9DuBBWLEo8p7QsbrEdXzW/u6QJBcib0ZGOTXkeSDx+PLXFY7hwyZE9Tfhp3TA3LQNpYouyT0WmzXRyUW4otQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "glob": "^7.1.1", "minimatch": "^3.0.3" @@ -17193,16 +17068,17 @@ } }, "node_modules/tsec/node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, @@ -17633,12 +17509,6 @@ "uuid": "dist/bin/uuid" } }, - "node_modules/v8-compile-cache-lib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", - "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "dev": true - }, "node_modules/v8-inspect-profiler": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/v8-inspect-profiler/-/v8-inspect-profiler-0.1.1.tgz", @@ -18472,15 +18342,6 @@ "node": ">= 4.0.0" } }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index ccc9212544e..5f3c09c187e 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "test-build-scripts": "cd build && npm run test", "preinstall": "node build/npm/preinstall.js", "postinstall": "node build/npm/postinstall.js", - "compile": "node ./node_modules/gulp/bin/gulp.js compile", + "compile": "npm run gulp compile", "compile-check-ts-native": "tsgo --project ./src/tsconfig.json --noEmit --skipLibCheck", "watch": "npm-run-all -lp watch-client watch-extensions", "watchd": "deemon npm run watch", @@ -35,20 +35,20 @@ "kill-watch-extensionsd": "deemon --kill npm run watch-extensions", "precommit": "node build/hygiene.mjs", "gulp": "node --max-old-space-size=8192 ./node_modules/gulp/bin/gulp.js", - "electron": "node build/lib/electron", + "electron": "node build/lib/electron.ts", "7z": "7z", "update-grammars": "node build/npm/update-all-grammars.mjs", "update-localization-extension": "node build/npm/update-localization-extension.js", "mixin-telemetry-docs": "node build/npm/mixin-telemetry-docs.mjs", - "smoketest": "node build/lib/preLaunch.js && cd test/smoke && npm run compile && node test/index.js", + "smoketest": "node build/lib/preLaunch.ts && cd test/smoke && npm run compile && node test/index.js", "smoketest-no-compile": "cd test/smoke && node test/index.js", - "download-builtin-extensions": "node build/lib/builtInExtensions.js", - "download-builtin-extensions-cg": "node build/lib/builtInExtensionsCG.js", + "download-builtin-extensions": "node build/lib/builtInExtensions.ts", + "download-builtin-extensions-cg": "node build/lib/builtInExtensionsCG.ts", "monaco-compile-check": "tsgo --project src/tsconfig.monaco.json --noEmit", "tsec-compile-check": "node node_modules/tsec/bin/tsec -p src/tsconfig.tsec.json", "vscode-dts-compile-check": "tsgo --project src/tsconfig.vscode-dts.json && tsgo --project src/tsconfig.vscode-proposed-dts.json", - "valid-layers-check": "node build/checker/layersChecker.js && tsgo --project build/checker/tsconfig.browser.json && tsgo --project build/checker/tsconfig.worker.json && tsgo --project build/checker/tsconfig.node.json && tsgo --project build/checker/tsconfig.electron-browser.json && tsgo --project build/checker/tsconfig.electron-main.json && tsgo --project build/checker/tsconfig.electron-utility.json", - "define-class-fields-check": "node build/lib/propertyInitOrderChecker.js && tsgo --project src/tsconfig.defineClassFields.json", + "valid-layers-check": "node build/checker/layersChecker.ts && tsgo --project build/checker/tsconfig.browser.json && tsgo --project build/checker/tsconfig.worker.json && tsgo --project build/checker/tsconfig.node.json && tsgo --project build/checker/tsconfig.electron-browser.json && tsgo --project build/checker/tsconfig.electron-main.json && tsgo --project build/checker/tsconfig.electron-utility.json", + "define-class-fields-check": "node build/lib/propertyInitOrderChecker.ts && tsgo --project src/tsconfig.defineClassFields.json", "update-distro": "node build/npm/update-distro.mjs", "web": "echo 'npm run web' is replaced by './scripts/code-server' or './scripts/code-web'", "compile-cli": "gulp compile-cli", @@ -211,7 +211,6 @@ "source-map-support": "^0.3.2", "style-loader": "^3.3.2", "ts-loader": "^9.5.1", - "ts-node": "^10.9.1", "tsec": "0.2.7", "tslib": "^2.6.3", "typescript": "^6.0.0-dev.20251110", diff --git a/scripts/code-cli.bat b/scripts/code-cli.bat index f450801965a..e28f03f6cdc 100644 --- a/scripts/code-cli.bat +++ b/scripts/code-cli.bat @@ -6,7 +6,7 @@ title VSCode Dev pushd %~dp0.. :: Get electron, compile, built-in extensions -if "%VSCODE_SKIP_PRELAUNCH%"=="" node build/lib/preLaunch.js +if "%VSCODE_SKIP_PRELAUNCH%"=="" node build/lib/preLaunch.ts for /f "tokens=2 delims=:," %%a in ('findstr /R /C:"\"nameShort\":.*" product.json') do set NAMESHORT=%%~a set NAMESHORT=%NAMESHORT: "=% diff --git a/scripts/code-cli.sh b/scripts/code-cli.sh index 3bf8793980d..220c34d1a7e 100755 --- a/scripts/code-cli.sh +++ b/scripts/code-cli.sh @@ -20,7 +20,7 @@ function code() { # Get electron, compile, built-in extensions if [[ -z "${VSCODE_SKIP_PRELAUNCH}" ]]; then - node build/lib/preLaunch.js + node build/lib/preLaunch.ts fi # Manage built-in extensions diff --git a/scripts/code-server.bat b/scripts/code-server.bat index 940926c88ec..4dbc83c0873 100644 --- a/scripts/code-server.bat +++ b/scripts/code-server.bat @@ -12,7 +12,9 @@ set NODE_ENV=development set VSCODE_DEV=1 :: Get electron, compile, built-in extensions -if "%VSCODE_SKIP_PRELAUNCH%"=="" node build/lib/preLaunch.js +if "%VSCODE_SKIP_PRELAUNCH%"=="" ( + node build/lib/preLaunch.ts +) :: Node executable FOR /F "tokens=*" %%g IN ('node build/lib/node.js') do (SET NODE=%%g) diff --git a/scripts/code-server.sh b/scripts/code-server.sh index 6070edf8cd1..59d53726240 100755 --- a/scripts/code-server.sh +++ b/scripts/code-server.sh @@ -12,7 +12,7 @@ function code() { # Get electron, compile, built-in extensions if [[ -z "${VSCODE_SKIP_PRELAUNCH}" ]]; then - node build/lib/preLaunch.js + node build/lib/preLaunch.ts fi NODE=$(node build/lib/node.js) diff --git a/scripts/code.bat b/scripts/code.bat index f102c6e881d..784efeaecaf 100644 --- a/scripts/code.bat +++ b/scripts/code.bat @@ -6,7 +6,9 @@ title VSCode Dev pushd %~dp0\.. :: Get electron, compile, built-in extensions -if "%VSCODE_SKIP_PRELAUNCH%"=="" node build/lib/preLaunch.js +if "%VSCODE_SKIP_PRELAUNCH%"=="" ( + node build/lib/preLaunch.ts +) for /f "tokens=2 delims=:," %%a in ('findstr /R /C:"\"nameShort\":.*" product.json') do set NAMESHORT=%%~a set NAMESHORT=%NAMESHORT: "=% diff --git a/scripts/code.sh b/scripts/code.sh index c29b632cbcb..1ddbfce7d1a 100755 --- a/scripts/code.sh +++ b/scripts/code.sh @@ -26,7 +26,7 @@ function code() { # Get electron, compile, built-in extensions if [[ -z "${VSCODE_SKIP_PRELAUNCH}" ]]; then - node build/lib/preLaunch.js + node build/lib/preLaunch.ts fi # Manage built-in extensions diff --git a/src/typings/vscode-globals-product.d.ts b/src/typings/vscode-globals-product.d.ts index 2cd632e77a0..ab169bd82d0 100644 --- a/src/typings/vscode-globals-product.d.ts +++ b/src/typings/vscode-globals-product.d.ts @@ -27,6 +27,20 @@ declare global { */ var _VSCODE_PACKAGE_JSON: Record; + /** + * Used to disable CSS import map loading during development. Needed + * when a bundler is used that loads the css directly. + * @deprecated Avoid using this variable. + */ + var _VSCODE_DISABLE_CSS_IMPORT_MAP: boolean | undefined; + + /** + * If this variable is set, and the source code references another module + * via import, the (relative) module should be referenced (instead of the + * JS module in the out folder). + * @deprecated Avoid using this variable. + */ + var _VSCODE_USE_RELATIVE_IMPORTS: boolean | undefined; } // fake export to make global work diff --git a/src/vs/base/browser/ui/codicons/codicon/codicon.ttf b/src/vs/base/browser/ui/codicons/codicon/codicon.ttf index 9669eb4807c..d4a3a260215 100644 Binary files a/src/vs/base/browser/ui/codicons/codicon/codicon.ttf and b/src/vs/base/browser/ui/codicons/codicon/codicon.ttf differ diff --git a/src/vs/base/parts/ipc/electron-main/ipcMain.ts b/src/vs/base/parts/ipc/electron-main/ipcMain.ts index ace40529015..0137b8924eb 100644 --- a/src/vs/base/parts/ipc/electron-main/ipcMain.ts +++ b/src/vs/base/parts/ipc/electron-main/ipcMain.ts @@ -128,6 +128,12 @@ class ValidatedIpcMain implements Event.NodeEventEmitter { return false; // unexpected URL } + if (process.env.VSCODE_DEV) { + if (url === process.env.DEV_WINDOW_SRC && (host === 'localhost' || host.startsWith('localhost:'))) { + return true; // development support where the window is served from localhost + } + } + if (host !== VSCODE_AUTHORITY) { onUnexpectedError(`Refused to handle ipcMain event for channel '${channel}' because of a bad origin of '${host}'.`); return false; // unexpected sender diff --git a/src/vs/code/electron-browser/workbench/workbench-dev.html b/src/vs/code/electron-browser/workbench/workbench-dev.html index 1121fc7c047..13ff778a58c 100644 --- a/src/vs/code/electron-browser/workbench/workbench-dev.html +++ b/src/vs/code/electron-browser/workbench/workbench-dev.html @@ -73,5 +73,5 @@ - + diff --git a/src/vs/code/electron-browser/workbench/workbench.ts b/src/vs/code/electron-browser/workbench/workbench.ts index 7d6c8fac0c7..da8713718c7 100644 --- a/src/vs/code/electron-browser/workbench/workbench.ts +++ b/src/vs/code/electron-browser/workbench/workbench.ts @@ -273,7 +273,7 @@ //#region Window Helpers - async function load(esModule: string, options: ILoadOptions): Promise> { + async function load(options: ILoadOptions): Promise> { // Window Configuration from Preload Script const configuration = await resolveWindowConfiguration(); @@ -296,8 +296,14 @@ // ESM Import try { - const result = await import(new URL(`${esModule}.js`, baseUrl).href); + let workbenchUrl: string; + if (!!safeProcess.env['VSCODE_DEV'] && globalThis._VSCODE_USE_RELATIVE_IMPORTS) { + workbenchUrl = '../../../workbench/workbench.desktop.main.js'; // for dev purposes only + } else { + workbenchUrl = new URL(`vs/workbench/workbench.desktop.main.js`, baseUrl).href; + } + const result = await import(workbenchUrl); if (developerDeveloperKeybindingsDisposable && removeDeveloperKeybindingsAfterLoad) { developerDeveloperKeybindingsDisposable(); } @@ -449,6 +455,10 @@ // DEV: a blob URL that loads the CSS via a dynamic @import-rule. // DEV --------------------------------------------------------------------------------------- + if (globalThis._VSCODE_DISABLE_CSS_IMPORT_MAP) { + return; // disabled in certain development setups + } + if (Array.isArray(configuration.cssModules) && configuration.cssModules.length > 0) { performance.mark('code/willAddCssLoader'); @@ -484,7 +494,7 @@ //#endregion - const { result, configuration } = await load('vs/workbench/workbench.desktop.main', + const { result, configuration } = await load( { configureDeveloperSettings: function (windowConfig) { return { diff --git a/src/vs/editor/browser/services/editorWorkerService.ts b/src/vs/editor/browser/services/editorWorkerService.ts index 0a221ebefaf..211ba9ccbfb 100644 --- a/src/vs/editor/browser/services/editorWorkerService.ts +++ b/src/vs/editor/browser/services/editorWorkerService.ts @@ -75,7 +75,7 @@ export class EditorWorkerService extends Disposable implements IEditorWorkerServ const workerDescriptor = new WebWorkerDescriptor({ esmModuleLocation: () => FileAccess.asBrowserUri('vs/editor/common/services/editorWebWorkerMain.js'), - esmModuleLocationBundler: () => new URL('../../common/services/editorWebWorkerMain.ts?worker', import.meta.url), + esmModuleLocationBundler: () => new URL('../../common/services/editorWebWorkerMain.ts?workerModule', import.meta.url), label: 'editorWorkerService' }); diff --git a/src/vs/editor/contrib/find/browser/findWidgetSearchHistory.ts b/src/vs/editor/contrib/find/browser/findWidgetSearchHistory.ts index c065053c4f5..414d3f51bc4 100644 --- a/src/vs/editor/contrib/find/browser/findWidgetSearchHistory.ts +++ b/src/vs/editor/contrib/find/browser/findWidgetSearchHistory.ts @@ -53,7 +53,7 @@ export class FindWidgetSearchHistory implements IHistory { this.save(); } - forEach(callbackfn: (value: string, value2: string, set: Set) => void, thisArg?: any): void { + forEach(callbackfn: (value: string, value2: string, set: Set) => void, thisArg?: unknown): void { // fetch latest from storage this.load(); return this.inMemoryValues.forEach(callbackfn); diff --git a/src/vs/editor/contrib/find/browser/replaceWidgetHistory.ts b/src/vs/editor/contrib/find/browser/replaceWidgetHistory.ts index a570cc7b9e2..45440ed2909 100644 --- a/src/vs/editor/contrib/find/browser/replaceWidgetHistory.ts +++ b/src/vs/editor/contrib/find/browser/replaceWidgetHistory.ts @@ -53,7 +53,7 @@ export class ReplaceWidgetHistory implements IHistory { this.save(); } - forEach(callbackfn: (value: string, value2: string, set: Set) => void, thisArg?: any): void { + forEach(callbackfn: (value: string, value2: string, set: Set) => void, thisArg?: unknown): void { // fetch latest from storage this.load(); return this.inMemoryValues.forEach(callbackfn); diff --git a/src/vs/editor/contrib/folding/browser/folding.ts b/src/vs/editor/contrib/folding/browser/folding.ts index 6c4a72cc721..c42c01a6bc3 100644 --- a/src/vs/editor/contrib/folding/browser/folding.ts +++ b/src/vs/editor/contrib/folding/browser/folding.ts @@ -613,7 +613,7 @@ interface FoldingArguments { selectionLines?: number[]; } -function foldingArgumentsConstraint(args: any) { +function foldingArgumentsConstraint(args: unknown) { if (!types.isUndefined(args)) { if (!types.isObject(args)) { return false; diff --git a/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/inlineEditsLongDistanceHint.ts b/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/inlineEditsLongDistanceHint.ts index e1678c6a1e1..bd183afbacc 100644 --- a/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/inlineEditsLongDistanceHint.ts +++ b/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/inlineEditsLongDistanceHint.ts @@ -28,14 +28,16 @@ import { Point } from '../../../../../../../common/core/2d/point.js'; import { Size2D } from '../../../../../../../common/core/2d/size.js'; import { getMaxTowerHeightInAvailableArea } from '../../utils/towersLayout.js'; import { IThemeService } from '../../../../../../../../platform/theme/common/themeService.js'; +import { IKeybindingService } from '../../../../../../../../platform/keybinding/common/keybinding.js'; import { getEditorBlendedColor, inlineEditIndicatorPrimaryBackground, inlineEditIndicatorSecondaryBackground, inlineEditIndicatorsuccessfulBackground } from '../../theme.js'; -import { asCssVariable, descriptionForeground, editorBackground } from '../../../../../../../../platform/theme/common/colorRegistry.js'; +import { asCssVariable, descriptionForeground, editorBackground, editorWidgetBackground } from '../../../../../../../../platform/theme/common/colorRegistry.js'; import { ILongDistancePreviewProps, LongDistancePreviewEditor } from './longDistancePreviewEditor.js'; import { InlineSuggestionGutterMenuData, SimpleInlineSuggestModel } from '../../components/gutterIndicatorView.js'; +import { jumpToNextInlineEditId } from '../../../../controller/commandIds.js'; const BORDER_RADIUS = 4; -const MAX_WIDGET_WIDTH = 400; -const MIN_WIDGET_WIDTH = 200; +const MAX_WIDGET_WIDTH = { EMPTY_SPACE: 425, OVERLAY: 375 }; +const MIN_WIDGET_WIDTH = 250; export class InlineEditsLongDistanceHint extends Disposable implements IInlineEditsView { @@ -52,6 +54,7 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd private readonly _tabAction: IObservable, @IInstantiationService private readonly _instantiationService: IInstantiationService, @IThemeService private readonly _themeService: IThemeService, + @IKeybindingService private readonly _keybindingService: IKeybindingService, ) { super(); @@ -191,7 +194,7 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd const lineNumber = lineSizes.lineRange.startLineNumber + idx; let linePaddingLeft = 20; if (lineNumber === viewState.hint.lineNumber) { - linePaddingLeft = 100; + linePaddingLeft = 40; } return new Size2D(Math.max(0, editorTrueContentWidth - s.width - linePaddingLeft), s.height); }); @@ -230,9 +233,13 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd const horizontalWidgetRange = OffsetRange.ofStartAndLength(editorTrueContentRight - maxWidth, maxWidth); return { horizontalWidgetRange, verticalWidgetRange }; }); + + let position: 'overlay' | 'empty-space' = 'empty-space'; if (!possibleWidgetOutline) { + position = 'overlay'; + const maxAvailableWidth = Math.min(editorLayout.width - editorLayout.contentLeft, MAX_WIDGET_WIDTH.OVERLAY); possibleWidgetOutline = { - horizontalWidgetRange: OffsetRange.ofStartAndLength(editorTrueContentRight - MAX_WIDGET_WIDTH, MAX_WIDGET_WIDTH), + horizontalWidgetRange: OffsetRange.ofStartAndLength(editorTrueContentRight - maxAvailableWidth, maxAvailableWidth), verticalWidgetRange: getWidgetVerticalOutline(viewState.hint.lineNumber + 2).delta(10), }; } @@ -251,12 +258,12 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd debugView(debugLogRects({ rectAvailableSpace }, this._editor.getDomNode()!), reader); } - const maxWidgetWidth = Math.min(MAX_WIDGET_WIDTH, previewEditorContentLayout.maxEditorWidth + previewEditorMargin + widgetPadding); + const maxWidgetWidth = Math.min(position === 'overlay' ? MAX_WIDGET_WIDTH.OVERLAY : MAX_WIDGET_WIDTH.EMPTY_SPACE, previewEditorContentLayout.maxEditorWidth + previewEditorMargin + widgetPadding); const layout = distributeFlexBoxLayout(rectAvailableSpace.width, { spaceBefore: { min: 0, max: 10, priority: 1 }, content: { min: 50, rules: [{ max: 150, priority: 2 }, { max: maxWidgetWidth, priority: 1 }] }, - spaceAfter: { min: 20 }, + spaceAfter: { min: 10 }, }); if (!layout) { @@ -318,13 +325,14 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd derived(this, _reader => [this._widgetContent]), ]); - private readonly _widgetContent = derived(this, reader => // TODO how to not use derived but not move into constructor? + private readonly _widgetContent = derived(this, reader => // TODO@hediet: remove when n.div lazily creates previewEditor.element node n.div({ + class: 'inline-edits-long-distance-hint-widget', style: { position: 'absolute', overflow: 'hidden', cursor: 'pointer', - background: 'var(--vscode-editorWidget-background)', + background: asCssVariable(editorWidgetBackground), padding: this._previewEditorLayoutInfo.map(i => i?.widgetPadding), boxSizing: 'border-box', borderRadius: BORDER_RADIUS, @@ -347,7 +355,7 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd style: { overflow: 'hidden', padding: this._previewEditorLayoutInfo.map(i => i?.previewEditorMargin), - background: 'var(--vscode-editor-background)', + background: asCssVariable(editorBackground), pointerEvents: 'none', }, }, [ @@ -371,7 +379,7 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd const icon = SymbolKinds.toIcon(item.kind); outlineElements.push(n.div({ class: 'breadcrumb-item', - style: { display: 'flex', alignItems: 'center', flex: '1 1 auto', overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap' }, + style: { display: 'flex', alignItems: 'center', flex: '1 1 auto', whiteSpace: 'nowrap', overflow: 'hidden', textOverflow: 'ellipsis' }, }, [ renderIcon(icon), '\u00a0', @@ -383,15 +391,20 @@ export class InlineEditsLongDistanceHint extends Disposable implements IInlineEd ])); } } - children.push(n.div({ class: 'outline-elements' }, outlineElements)); + children.push(n.div({ class: 'outline-elements', style: { overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap' } }, outlineElements)); // Show Edit Direction const arrowIcon = isEditBelowHint(viewState) ? Codicon.arrowDown : Codicon.arrowUp; + const keybinding = this._keybindingService.lookupKeybinding(jumpToNextInlineEditId); + let label = 'Go to suggestion'; + if (keybinding && keybinding.getLabel() === 'Tab') { + label = 'Tab to suggestion'; + } children.push(n.div({ class: 'go-to-label', - style: { display: 'flex', alignItems: 'center', flex: '0 0 auto', marginLeft: '14px' }, + style: { position: 'relative', display: 'flex', alignItems: 'center', flex: '0 0 auto', paddingLeft: '6px' }, }, [ - 'Go To Edit', + label, '\u00a0', renderIcon(arrowIcon), ])); diff --git a/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/longDistancePreviewEditor.ts b/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/longDistancePreviewEditor.ts index f15c0fa1d1f..c09a8537846 100644 --- a/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/longDistancePreviewEditor.ts +++ b/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/inlineEditsViews/longDistanceHint/longDistancePreviewEditor.ts @@ -61,6 +61,17 @@ export class LongDistancePreviewEditor extends Disposable { return (state?.mode === 'original' ? decorations?.originalDecorations : decorations?.modifiedDecorations) ?? []; }))); + // Mirror the cursor position. Allows the gutter arrow to point in the correct direction. + this._register(autorun((reader) => { + if (!this._properties.read(reader)) { + return; + } + const cursorPosition = this._parentEditorObs.cursorPosition.read(reader); + if (cursorPosition) { + this.previewEditor.setPosition(this._previewTextModel.validatePosition(cursorPosition), 'longDistanceHintPreview'); + } + })); + this._register(autorun(reader => { const state = this._properties.read(reader); if (!state) { @@ -208,14 +219,13 @@ export class LongDistancePreviewEditor extends Disposable { const firstCharacterChange = state.mode === 'modified' ? diff[0].innerChanges[0].modifiedRange : diff[0].innerChanges[0].originalRange; - // find the horizontal range we want to show. - // use 5 characters before the first change, at most 1 indentation - const left = this._previewEditorObs.getLeftOfPosition(firstCharacterChange.getStartPosition(), reader); - const right = this._previewEditorObs.getLeftOfPosition(firstCharacterChange.getEndPosition(), reader); + const preferredRange = growUntilVariableBoundaries(editor.getModel()!, firstCharacterChange, 5); + const left = this._previewEditorObs.getLeftOfPosition(preferredRange.getStartPosition(), reader); + const right = this._previewEditorObs.getLeftOfPosition(preferredRange.getEndPosition(), reader); - const indentCol = editor.getModel()!.getLineFirstNonWhitespaceColumn(firstCharacterChange.startLineNumber); - const indentationEnd = this._previewEditorObs.getLeftOfPosition(new Position(firstCharacterChange.startLineNumber, indentCol), reader); + const indentCol = editor.getModel()!.getLineFirstNonWhitespaceColumn(preferredRange.startLineNumber); + const indentationEnd = this._previewEditorObs.getLeftOfPosition(new Position(preferredRange.startLineNumber, indentCol), reader); const preferredRangeToReveal = new OffsetRange(left, right); @@ -303,3 +313,36 @@ export class LongDistancePreviewEditor extends Disposable { return { originalDecorations, modifiedDecorations }; }); } + +/* + * Grows the range on each ends until it includes a none-variable-name character + * or the next character would be a whitespace character + * or the maxGrow limit is reached + */ +function growUntilVariableBoundaries(textModel: ITextModel, range: Range, maxGrow: number): Range { + const startPosition = range.getStartPosition(); + const endPosition = range.getEndPosition(); + const line = textModel.getLineContent(startPosition.lineNumber); + + function isVariableNameCharacter(col: number): boolean { + const char = line.charAt(col - 1); + return (/[a-zA-Z0-9_]/).test(char); + } + + function isWhitespace(col: number): boolean { + const char = line.charAt(col - 1); + return char === ' ' || char === '\t'; + } + + let startColumn = startPosition.column; + while (startColumn > 1 && isVariableNameCharacter(startColumn) && !isWhitespace(startColumn - 1) && startPosition.column - startColumn < maxGrow) { + startColumn--; + } + + let endColumn = endPosition.column - 1; + while (endColumn <= line.length && isVariableNameCharacter(endColumn) && !isWhitespace(endColumn + 1) && endColumn - endPosition.column < maxGrow) { + endColumn++; + } + + return new Range(startPosition.lineNumber, startPosition.column, endPosition.lineNumber, endColumn + 1); +} diff --git a/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/view.css b/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/view.css index f93e4862603..d8642a02b8c 100644 --- a/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/view.css +++ b/src/vs/editor/contrib/inlineCompletions/browser/view/inlineEdits/view.css @@ -217,3 +217,13 @@ } } } + +.go-to-label::before { + content: ''; + position: absolute; + left: -12px; + top: 0; + width: 12px; + height: 100%; + background: linear-gradient(to left, var(--vscode-editorWidget-background) 0, transparent 12px); +} diff --git a/src/vs/editor/standalone/browser/services/standaloneWebWorkerService.ts b/src/vs/editor/standalone/browser/services/standaloneWebWorkerService.ts index 25e3b9c3231..9e60c93dfde 100644 --- a/src/vs/editor/standalone/browser/services/standaloneWebWorkerService.ts +++ b/src/vs/editor/standalone/browser/services/standaloneWebWorkerService.ts @@ -35,7 +35,7 @@ export class StandaloneWebWorkerService extends WebWorkerService { } if (!descriptor.esmModuleLocationBundler) { - throw new Error(`You must define a function MonacoEnvironment.getWorkerUrl or MonacoEnvironment.getWorker`); + throw new Error(`You must define a function MonacoEnvironment.getWorkerUrl or MonacoEnvironment.getWorker for the worker label: ${descriptor.label}`); } const url = typeof descriptor.esmModuleLocationBundler === 'function' ? descriptor.esmModuleLocationBundler() : descriptor.esmModuleLocationBundler; diff --git a/src/vs/platform/keyboardLayout/common/keyboardConfig.ts b/src/vs/platform/keyboardLayout/common/keyboardConfig.ts index 67be262ced3..1cfcb863d90 100644 --- a/src/vs/platform/keyboardLayout/common/keyboardConfig.ts +++ b/src/vs/platform/keyboardLayout/common/keyboardConfig.ts @@ -20,7 +20,7 @@ export interface IKeyboardConfig { } export function readKeyboardConfig(configurationService: IConfigurationService): IKeyboardConfig { - const keyboard = configurationService.getValue<{ dispatch: any; mapAltGrToCtrlAlt: any } | undefined>('keyboard'); + const keyboard = configurationService.getValue<{ dispatch: string; mapAltGrToCtrlAlt: boolean } | undefined>('keyboard'); const dispatch = (keyboard?.dispatch === 'keyCode' ? DispatchConfig.KeyCode : DispatchConfig.Code); const mapAltGrToCtrlAlt = Boolean(keyboard?.mapAltGrToCtrlAlt); return { dispatch, mapAltGrToCtrlAlt }; diff --git a/src/vs/platform/webWorker/browser/webWorkerDescriptor.ts b/src/vs/platform/webWorker/browser/webWorkerDescriptor.ts index 5deeaeba084..dae19f87750 100644 --- a/src/vs/platform/webWorker/browser/webWorkerDescriptor.ts +++ b/src/vs/platform/webWorker/browser/webWorkerDescriptor.ts @@ -13,7 +13,7 @@ export class WebWorkerDescriptor { constructor(args: { /** The location of the esm module after transpilation */ esmModuleLocation?: URI | (() => URI); - /** The location of the esm module when used in a bundler environment. Refer to the typescript file in the src folder and use `?worker`. */ + /** The location of the esm module when used in a bundler environment. Refer to the typescript file in the src folder and use `?workerModule`. */ esmModuleLocationBundler?: URL | (() => URL); label: string; }) { diff --git a/src/vs/platform/windows/electron-main/windowImpl.ts b/src/vs/platform/windows/electron-main/windowImpl.ts index 67c832b0ad1..7a5a81088b5 100644 --- a/src/vs/platform/windows/electron-main/windowImpl.ts +++ b/src/vs/platform/windows/electron-main/windowImpl.ts @@ -1158,7 +1158,13 @@ export class CodeWindow extends BaseWindow implements ICodeWindow { this.readyState = ReadyState.NAVIGATING; // Load URL - this._win.loadURL(FileAccess.asBrowserUri(`vs/code/electron-browser/workbench/workbench${this.environmentMainService.isBuilt ? '' : '-dev'}.html`).toString(true)); + let windowUrl: string; + if (process.env.VSCODE_DEV && process.env.VSCODE_DEV_SERVER_URL) { + windowUrl = process.env.VSCODE_DEV_SERVER_URL; // support URL override for development + } else { + windowUrl = FileAccess.asBrowserUri(`vs/code/electron-browser/workbench/workbench${this.environmentMainService.isBuilt ? '' : '-dev'}.html`).toString(true); + } + this._win.loadURL(windowUrl); // Remember that we did load const wasLoaded = this.wasLoaded; diff --git a/src/vs/workbench/api/browser/mainThreadChatCodeMapper.ts b/src/vs/workbench/api/browser/mainThreadChatCodeMapper.ts index 4f15773352b..dad9a26c43a 100644 --- a/src/vs/workbench/api/browser/mainThreadChatCodeMapper.ts +++ b/src/vs/workbench/api/browser/mainThreadChatCodeMapper.ts @@ -38,7 +38,7 @@ export class MainThreadChatCodemapper extends Disposable implements MainThreadCo codeBlocks: uiRequest.codeBlocks, chatRequestId: uiRequest.chatRequestId, chatRequestModel: uiRequest.chatRequestModel, - chatSessionId: uiRequest.chatSessionId, + chatSessionResource: uiRequest.chatSessionResource, location: uiRequest.location }; try { diff --git a/src/vs/workbench/api/common/extHost.protocol.ts b/src/vs/workbench/api/common/extHost.protocol.ts index a816b1dbbbe..0e79e14f5b0 100644 --- a/src/vs/workbench/api/common/extHost.protocol.ts +++ b/src/vs/workbench/api/common/extHost.protocol.ts @@ -1713,7 +1713,7 @@ export interface SCMHistoryItemDto { readonly deletions: number; }; readonly references?: SCMHistoryItemRefDto[]; - readonly tooltip?: string | IMarkdownString | undefined; + readonly tooltip?: IMarkdownString | Array | undefined; } export interface SCMHistoryItemChangeDto { diff --git a/src/vs/workbench/api/common/extHostCodeMapper.ts b/src/vs/workbench/api/common/extHostCodeMapper.ts index 5e22a066b8e..179defae065 100644 --- a/src/vs/workbench/api/common/extHostCodeMapper.ts +++ b/src/vs/workbench/api/common/extHostCodeMapper.ts @@ -11,6 +11,7 @@ import * as extHostProtocol from './extHost.protocol.js'; import { NotebookEdit, TextEdit } from './extHostTypeConverters.js'; import { URI } from '../../../base/common/uri.js'; import { asArray } from '../../../base/common/arrays.js'; +import { LocalChatSessionUri } from '../../contrib/chat/common/chatUri.js'; export class ExtHostCodeMapper implements extHostProtocol.ExtHostCodeMapperShape { @@ -53,7 +54,7 @@ export class ExtHostCodeMapper implements extHostProtocol.ExtHostCodeMapperShape location: internalRequest.location, chatRequestId: internalRequest.chatRequestId, chatRequestModel: internalRequest.chatRequestModel, - chatSessionId: internalRequest.chatSessionId, + chatSessionId: internalRequest.chatSessionResource ? LocalChatSessionUri.parseLocalSessionId(URI.revive(internalRequest.chatSessionResource)) : undefined, codeBlocks: internalRequest.codeBlocks.map(block => { return { code: block.code, diff --git a/src/vs/workbench/api/common/extHostSCM.ts b/src/vs/workbench/api/common/extHostSCM.ts index c69172d978f..c435fdc2f8b 100644 --- a/src/vs/workbench/api/common/extHostSCM.ts +++ b/src/vs/workbench/api/common/extHostSCM.ts @@ -75,7 +75,9 @@ function getHistoryItemIconDto(icon: vscode.Uri | { light: vscode.Uri; dark: vsc function toSCMHistoryItemDto(historyItem: vscode.SourceControlHistoryItem): SCMHistoryItemDto { const authorIcon = getHistoryItemIconDto(historyItem.authorIcon); - const tooltip = MarkdownString.fromStrict(historyItem.tooltip); + const tooltip = Array.isArray(historyItem.tooltip) + ? MarkdownString.fromMany(historyItem.tooltip) + : historyItem.tooltip ? MarkdownString.from(historyItem.tooltip) : undefined; const references = historyItem.references?.map(r => ({ ...r, icon: getHistoryItemIconDto(r.icon) diff --git a/src/vs/workbench/browser/parts/editor/editorConfiguration.ts b/src/vs/workbench/browser/parts/editor/editorConfiguration.ts index 924011d6f3c..20a5b9d355a 100644 --- a/src/vs/workbench/browser/parts/editor/editorConfiguration.ts +++ b/src/vs/workbench/browser/parts/editor/editorConfiguration.ts @@ -25,7 +25,6 @@ export class DynamicEditorConfigurations extends Disposable implements IWorkbenc 'terminalEditor', 'mainThreadWebview-simpleBrowser.view', 'mainThreadWebview-browserPreview', - 'workbench.editor.chatSession', 'workbench.editor.processExplorer' ]); diff --git a/src/vs/workbench/contrib/bulkEdit/browser/opaqueEdits.ts b/src/vs/workbench/contrib/bulkEdit/browser/opaqueEdits.ts index a8615ee859e..21e16a8e7a8 100644 --- a/src/vs/workbench/contrib/bulkEdit/browser/opaqueEdits.ts +++ b/src/vs/workbench/contrib/bulkEdit/browser/opaqueEdits.ts @@ -13,7 +13,7 @@ import { IUndoRedoService, UndoRedoElementType, UndoRedoGroup, UndoRedoSource } export class ResourceAttachmentEdit extends ResourceEdit implements ICustomEdit { - static is(candidate: any): candidate is ICustomEdit { + static is(candidate: unknown): candidate is ICustomEdit { if (candidate instanceof ResourceAttachmentEdit) { return true; } else { diff --git a/src/vs/workbench/contrib/chat/browser/actions/chatActions.ts b/src/vs/workbench/contrib/chat/browser/actions/chatActions.ts index ab07d381d89..2ccecb2b96d 100644 --- a/src/vs/workbench/contrib/chat/browser/actions/chatActions.ts +++ b/src/vs/workbench/contrib/chat/browser/actions/chatActions.ts @@ -516,8 +516,8 @@ export function registerChatActions() { ContextKeyExpr.equals('view', ChatViewId), ChatContextKeys.inEmptyStateWithHistoryEnabled.negate() ), - group: 'navigation', - order: 2 + group: '2_history', + order: 1 }, { id: MenuId.EditorTitle, diff --git a/src/vs/workbench/contrib/chat/browser/actions/chatSessionActions.ts b/src/vs/workbench/contrib/chat/browser/actions/chatSessionActions.ts index 3beb481833c..c3b1f8195f6 100644 --- a/src/vs/workbench/contrib/chat/browser/actions/chatSessionActions.ts +++ b/src/vs/workbench/contrib/chat/browser/actions/chatSessionActions.ts @@ -441,7 +441,10 @@ MenuRegistry.appendMenuItem(MenuId.ChatSessionsMenu, { }, group: 'inline', order: 1, - when: ChatContextKeys.sessionType.isEqualTo(localChatSessionType) + when: ContextKeyExpr.and( + ChatContextKeys.sessionType.isEqualTo(localChatSessionType), + ChatContextKeys.isCombinedSessionViewer.negate() + ) }); // Register delete menu item - only show for non-active sessions (history items) diff --git a/src/vs/workbench/contrib/chat/browser/actions/codeBlockOperations.ts b/src/vs/workbench/contrib/chat/browser/actions/codeBlockOperations.ts index ba1f56252c9..fd6be070247 100644 --- a/src/vs/workbench/contrib/chat/browser/actions/codeBlockOperations.ts +++ b/src/vs/workbench/contrib/chat/browser/actions/codeBlockOperations.ts @@ -36,7 +36,6 @@ import { CellKind, ICellEditOperation, NOTEBOOK_EDITOR_ID } from '../../../noteb import { INotebookService } from '../../../notebook/common/notebookService.js'; import { ICodeMapperCodeBlock, ICodeMapperRequest, ICodeMapperResponse, ICodeMapperService } from '../../common/chatCodeMapperService.js'; import { ChatUserAction, IChatService } from '../../common/chatService.js'; -import { chatSessionResourceToId } from '../../common/chatUri.js'; import { IChatRequestViewModel, isRequestVM, isResponseVM } from '../../common/chatViewModel.js'; import { ICodeBlockActionContext } from '../codeBlockPart.js'; @@ -342,7 +341,7 @@ export class ApplyCodeBlockOperation { return new AsyncIterableObject(async executor => { const request: ICodeMapperRequest = { codeBlocks: [codeBlock], - chatSessionId: chatSessionResource && chatSessionResourceToId(chatSessionResource), + chatSessionResource, }; const response: ICodeMapperResponse = { textEdit: (target: URI, edit: TextEdit[]) => { @@ -363,7 +362,7 @@ export class ApplyCodeBlockOperation { return new AsyncIterableObject<[URI, TextEdit[]] | ICellEditOperation[]>(async executor => { const request: ICodeMapperRequest = { codeBlocks: [codeBlock], - chatSessionId: chatSessionResource && chatSessionResourceToId(chatSessionResource), + chatSessionResource, location: 'panel' }; const response: ICodeMapperResponse = { diff --git a/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsView.ts b/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsView.ts index 766335bd7f4..913bd2a6a7d 100644 --- a/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsView.ts +++ b/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsView.ts @@ -44,13 +44,14 @@ import { Event } from '../../../../../base/common/event.js'; import { MutableDisposable } from '../../../../../base/common/lifecycle.js'; import { ITreeContextMenuEvent } from '../../../../../base/browser/ui/tree/tree.js'; import { MarshalledId } from '../../../../../base/common/marshallingIds.js'; -import { getActionBarActions } from '../../../../../platform/actions/browser/menuEntryActionViewItem.js'; +import { getActionBarActions, getFlatActionBarActions } from '../../../../../platform/actions/browser/menuEntryActionViewItem.js'; import { IChatService } from '../../common/chatService.js'; import { IChatWidgetService } from '../chat.js'; import { AGENT_SESSIONS_VIEW_ID, AGENT_SESSIONS_VIEW_CONTAINER_ID, AgentSessionProviders } from './agentSessions.js'; import { TreeFindMode } from '../../../../../base/browser/ui/tree/abstractTree.js'; import { SIDE_GROUP } from '../../../../services/editor/common/editorService.js'; import { IMarshalledChatSessionContext } from '../actions/chatSessionActions.js'; +import { distinct } from '../../../../../base/common/arrays.js'; export class AgentSessionsView extends ViewPane { @@ -158,18 +159,13 @@ export class AgentSessionsView extends ViewPane { } const provider = await this.chatSessionsService.activateChatSessionItemProvider(session.providerType); - - const menu = this.menuService.createMenu(MenuId.ChatSessionsMenu, this.contextKeyService.createOverlay(getSessionItemContextOverlay( - session, - provider, - this.chatWidgetService, - this.chatService, - this.editorGroupsService - ))); + const contextOverlay = getSessionItemContextOverlay(session, provider, this.chatWidgetService, this.chatService, this.editorGroupsService); + contextOverlay.push([ChatContextKeys.isCombinedSessionViewer.key, true]); + const menu = this.menuService.createMenu(MenuId.ChatSessionsMenu, this.contextKeyService.createOverlay(contextOverlay)); const marshalledSession: IMarshalledChatSessionContext = { session, $mid: MarshalledId.ChatSessionContext }; - const { secondary } = getActionBarActions(menu.getActions({ arg: marshalledSession, shouldForwardArgs: true }), 'inline'); this.contextMenuService.showContextMenu({ - getActions: () => secondary, + this.contextMenuService.showContextMenu({ + getActions: () => distinct(getFlatActionBarActions(menu.getActions({ arg: marshalledSession, shouldForwardArgs: true })), action => action.id), getAnchor: () => anchor, getActionsContext: () => marshalledSession, }); diff --git a/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsViewer.ts b/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsViewer.ts index b9140f2095f..cbd6058efd6 100644 --- a/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsViewer.ts +++ b/src/vs/workbench/contrib/chat/browser/agentSessions/agentSessionsViewer.ts @@ -164,15 +164,8 @@ export class AgentSessionRenderer implements ICompressibleTreeRenderer, template: IAgentSessionItemTemplate): void { - // In progress: show duration - if (session.element.status === ChatSessionStatus.InProgress) { - template.description.textContent = this.getInProgressDescription(session.element); - const timer = template.elementDisposable.add(new IntervalTimer()); - timer.cancelAndSet(() => template.description.textContent = this.getInProgressDescription(session.element), 1000 /* every second */); - } - - // Otherwise support description as string - else if (typeof session.element.description === 'string') { + // Support description as string + if (typeof session.element.description === 'string') { template.description.textContent = session.element.description; } @@ -191,7 +184,9 @@ export class AgentSessionRenderer implements ICompressibleTreeRenderer session.element.timing.inProgressTime @@ -209,17 +204,6 @@ export class AgentSessionRenderer implements ICompressibleTreeRenderer, template: IAgentSessionItemTemplate): void { - const getStatus = (session: IAgentSessionViewModel) => `${session.providerLabel} • ${fromNow(session.timing.endTime || session.timing.startTime)}`; + + const getStatus = (session: IAgentSessionViewModel) => { + let timeLabel: string | undefined; + if (session.status === ChatSessionStatus.InProgress && session.timing.inProgressTime) { + timeLabel = this.toDuration(session.timing.inProgressTime, Date.now()); + } + + if (!timeLabel) { + timeLabel = fromNow(session.timing.endTime || session.timing.startTime, true); + } + return `${session.providerLabel} • ${timeLabel}`; + }; template.status.textContent = getStatus(session.element); const timer = template.elementDisposable.add(new IntervalTimer()); - timer.cancelAndSet(() => template.status.textContent = getStatus(session.element), 60 * 1000 /* every minute */); + timer.cancelAndSet(() => template.status.textContent = getStatus(session.element), session.element.status === ChatSessionStatus.InProgress ? 1000 /* every second */ : 60 * 1000 /* every minute */); } private renderHover(session: ITreeNode, template: IAgentSessionItemTemplate): void { diff --git a/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsactions.css b/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsactions.css index 9d5a967ce23..85e5adecffb 100644 --- a/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsactions.css +++ b/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsactions.css @@ -15,6 +15,7 @@ display: flex; gap: 4px; padding: 0 4px; /* to make space for hover effect */ + font-variant-numeric: tabular-nums; } .agent-session-diff-files { diff --git a/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsviewer.css b/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsviewer.css index 16017adb600..25a25fd83b1 100644 --- a/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsviewer.css +++ b/src/vs/workbench/contrib/chat/browser/agentSessions/media/agentsessionsviewer.css @@ -90,6 +90,7 @@ .agent-session-status { padding: 0 4px 0 0; /* to align with diff area above */ + font-variant-numeric: tabular-nums; } } } diff --git a/src/vs/workbench/contrib/chat/browser/chat.contribution.ts b/src/vs/workbench/contrib/chat/browser/chat.contribution.ts index 5b8ac17f346..0251b256d13 100644 --- a/src/vs/workbench/contrib/chat/browser/chat.contribution.ts +++ b/src/vs/workbench/contrib/chat/browser/chat.contribution.ts @@ -529,7 +529,7 @@ configurationRegistry.registerConfiguration({ type: 'string', enum: ['disabled', 'view', 'single-view'], description: nls.localize('chat.sessionsViewLocation.description', "Controls where to show the agent sessions menu."), - default: 'view', + default: product.quality === 'stable' ? 'view' : 'single-view', tags: ['experimental'], experiment: { mode: 'auto' diff --git a/src/vs/workbench/contrib/chat/browser/chatAttachmentWidgets.ts b/src/vs/workbench/contrib/chat/browser/chatAttachmentWidgets.ts index 20aae8999bb..b13754a0914 100644 --- a/src/vs/workbench/contrib/chat/browser/chatAttachmentWidgets.ts +++ b/src/vs/workbench/contrib/chat/browser/chatAttachmentWidgets.ts @@ -42,6 +42,7 @@ import { FileKind, IFileService } from '../../../../platform/files/common/files. import { IHoverService } from '../../../../platform/hover/browser/hover.js'; import { IInstantiationService, ServicesAccessor } from '../../../../platform/instantiation/common/instantiation.js'; import { ILabelService } from '../../../../platform/label/common/label.js'; +import { IMarkdownRendererService } from '../../../../platform/markdown/browser/markdownRenderer.js'; import { IOpenerService, OpenInternalOptions } from '../../../../platform/opener/common/opener.js'; import { FolderThemeIcon, IThemeService } from '../../../../platform/theme/common/themeService.js'; import { fillEditorsDragData } from '../../../browser/dnd.js'; @@ -52,6 +53,7 @@ import { IPreferencesService } from '../../../services/preferences/common/prefer import { revealInSideBarCommand } from '../../files/browser/fileActions.contribution.js'; import { CellUri } from '../../notebook/common/notebookCommon.js'; import { INotebookService } from '../../notebook/common/notebookService.js'; +import { toHistoryItemHoverContent } from '../../scm/browser/scmHistory.js'; import { getHistoryItemEditorTitle } from '../../scm/browser/util.js'; import { ITerminalService } from '../../terminal/browser/terminal.js'; import { IChatContentReference } from '../common/chatService.js'; @@ -913,6 +915,7 @@ export class SCMHistoryItemAttachmentWidget extends AbstractChatAttachmentWidget container: HTMLElement, contextResourceLabels: ResourceLabels, @ICommandService commandService: ICommandService, + @IMarkdownRendererService markdownRendererService: IMarkdownRendererService, @IHoverService hoverService: IHoverService, @IOpenerService openerService: IOpenerService, @IThemeService themeService: IThemeService @@ -924,12 +927,12 @@ export class SCMHistoryItemAttachmentWidget extends AbstractChatAttachmentWidget this.element.style.cursor = 'pointer'; this.element.ariaLabel = localize('chat.attachment', "Attached context, {0}", attachment.name); - const historyItem = attachment.historyItem; - const hoverContent = historyItem.tooltip ?? historyItem.message; + const { content, disposables } = toHistoryItemHoverContent(markdownRendererService, attachment.historyItem, false); this._store.add(hoverService.setupDelayedHover(this.element, { ...commonHoverOptions, - content: hoverContent, + content, }, commonHoverLifecycleOptions)); + this._store.add(disposables); this._store.add(dom.addDisposableListener(this.element, dom.EventType.CLICK, (e: MouseEvent) => { dom.EventHelper.stop(e, true); @@ -963,6 +966,7 @@ export class SCMHistoryItemChangeAttachmentWidget extends AbstractChatAttachment contextResourceLabels: ResourceLabels, @ICommandService commandService: ICommandService, @IHoverService hoverService: IHoverService, + @IMarkdownRendererService markdownRendererService: IMarkdownRendererService, @IOpenerService openerService: IOpenerService, @IThemeService themeService: IThemeService, @IEditorService private readonly editorService: IEditorService, @@ -974,12 +978,11 @@ export class SCMHistoryItemChangeAttachmentWidget extends AbstractChatAttachment this.element.ariaLabel = localize('chat.attachment', "Attached context, {0}", attachment.name); - const historyItem = attachment.historyItem; - const hoverContent = historyItem.tooltip ?? historyItem.message; + const { content, disposables } = toHistoryItemHoverContent(markdownRendererService, attachment.historyItem, false); this._store.add(hoverService.setupDelayedHover(this.element, { - ...commonHoverOptions, - content: hoverContent, + ...commonHoverOptions, content, }, commonHoverLifecycleOptions)); + this._store.add(disposables); this.addResourceOpenHandlers(attachment.value, undefined); this.attachClearButton(); diff --git a/src/vs/workbench/contrib/chat/browser/chatContentParts/chatMarkdownContentPart.ts b/src/vs/workbench/contrib/chat/browser/chatContentParts/chatMarkdownContentPart.ts index 5da098ad61a..f933b7e83a2 100644 --- a/src/vs/workbench/contrib/chat/browser/chatContentParts/chatMarkdownContentPart.ts +++ b/src/vs/workbench/contrib/chat/browser/chatContentParts/chatMarkdownContentPart.ts @@ -79,6 +79,10 @@ export interface IChatMarkdownContentPartOptions { }; } +interface IMarkdownPartCodeBlockInfo extends IChatCodeBlockInfo { + isStreamingEdit: boolean; +} + export class ChatMarkdownContentPart extends Disposable implements IChatContentPart { private static ID_POOL = 0; @@ -91,7 +95,10 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP private readonly _onDidChangeHeight = this._register(new Emitter()); readonly onDidChangeHeight = this._onDidChangeHeight.event; - readonly codeblocks: IChatCodeBlockInfo[] = []; + private readonly _codeblocks: IMarkdownPartCodeBlockInfo[] = []; + public get codeblocks(): IChatCodeBlockInfo[] { + return this._codeblocks; + } private readonly mathLayoutParticipants = new Set<() => void>(); @@ -247,12 +254,13 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP this._register(ref.object.onDidChangeContentHeight(() => this._onDidChangeHeight.fire())); const ownerMarkdownPartId = this.codeblocksPartId; - const info: IChatCodeBlockInfo = new class implements IChatCodeBlockInfo { + const info: IMarkdownPartCodeBlockInfo = new class implements IMarkdownPartCodeBlockInfo { readonly ownerMarkdownPartId = ownerMarkdownPartId; readonly codeBlockIndex = globalIndex; readonly elementId = element.id; readonly chatSessionResource = element.sessionResource; readonly languageId = languageId; + readonly isStreamingEdit = false; readonly editDeltaInfo = EditDeltaInfo.fromText(text); codemapperUri = undefined; // will be set async get uri() { @@ -265,7 +273,7 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP ref.object.focus(); } }(); - this.codeblocks.push(info); + this._codeblocks.push(info); orderedDisposablesList.push(ref); return ref.object.element; } else { @@ -275,18 +283,19 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP // TODO@joyceerhl: remove this code when we change the codeblockUri API to make the URI available synchronously this.codeBlockModelCollection.update(codeBlockInfo.element.sessionResource, codeBlockInfo.element, codeBlockInfo.codeBlockIndex, { text, languageId: codeBlockInfo.languageId, isComplete: isCodeBlockComplete }).then((e) => { // Update the existing object's codemapperUri - this.codeblocks[codeBlockInfo.codeBlockPartIndex].codemapperUri = e.codemapperUri; + this._codeblocks[codeBlockInfo.codeBlockPartIndex].codemapperUri = e.codemapperUri; this._onDidChangeHeight.fire(); }); } this.allRefs.push(ref); const ownerMarkdownPartId = this.codeblocksPartId; - const info: IChatCodeBlockInfo = new class implements IChatCodeBlockInfo { + const info: IMarkdownPartCodeBlockInfo = new class implements IMarkdownPartCodeBlockInfo { readonly ownerMarkdownPartId = ownerMarkdownPartId; readonly codeBlockIndex = globalIndex; readonly elementId = element.id; readonly codemapperUri = codeblockEntry?.codemapperUri; readonly chatSessionResource = element.sessionResource; + readonly isStreamingEdit = !isCodeBlockComplete; get uri() { return undefined; } @@ -297,7 +306,7 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP readonly languageId = languageId; readonly editDeltaInfo = EditDeltaInfo.fromText(text); }(); - this.codeblocks.push(info); + this._codeblocks.push(info); orderedDisposablesList.push(ref); return ref.object.element; } @@ -310,7 +319,7 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP // Ideally this would happen earlier, but we need to parse the markdown. if (isResponseVM(element) && !element.model.codeBlockInfos && element.model.isComplete) { - element.model.initializeCodeBlockInfos(this.codeblocks.map(info => { + element.model.initializeCodeBlockInfos(this._codeblocks.map(info => { return { suggestionId: this.aiEditTelemetryService.createSuggestionId({ presentation: 'codeBlock', @@ -391,7 +400,7 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP if (isResponseVM(data.element)) { this.codeBlockModelCollection.update(data.element.sessionResource, data.element, data.codeBlockIndex, { text, languageId: data.languageId, isComplete }).then((e) => { // Update the existing object's codemapperUri - this.codeblocks[data.codeBlockPartIndex].codemapperUri = e.codemapperUri; + this._codeblocks[data.codeBlockPartIndex].codemapperUri = e.codemapperUri; this._onDidChangeHeight.fire(); }); } @@ -404,8 +413,21 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP } hasSameContent(other: IChatProgressRenderableResponseContent): boolean { - return other.kind === 'markdownContent' && !!(other.content.value === this.markdown.content.value - || this.codeblocks.at(-1)?.codemapperUri !== undefined && other.content.value.lastIndexOf('```') === this.markdown.content.value.lastIndexOf('```')); + if (other.kind !== 'markdownContent') { + return false; + } + + if (other.content.value === this.markdown.content.value) { + return true; + } + + // If we are streaming in code shown in an edit pill, do not re-render the entire content as long as it's coming in + const lastCodeblock = this._codeblocks.at(-1); + if (lastCodeblock && lastCodeblock.codemapperUri !== undefined && lastCodeblock.isStreamingEdit) { + return other.content.value.lastIndexOf('```') === this.markdown.content.value.lastIndexOf('```'); + } + + return false; } layout(width: number): void { @@ -415,7 +437,7 @@ export class ChatMarkdownContentPart extends Disposable implements IChatContentP } else if (ref.object instanceof MarkdownDiffBlockPart) { ref.object.layout(width); } else if (ref.object instanceof CollapsedCodeBlock) { - const codeblockModel = this.codeblocks[index]; + const codeblockModel = this._codeblocks[index]; if (codeblockModel.codemapperUri && ref.object.uri?.toString() !== codeblockModel.codemapperUri.toString()) { ref.object.render(codeblockModel.codemapperUri); } diff --git a/src/vs/workbench/contrib/chat/browser/chatEditing/chatEditingModifiedNotebookEntry.ts b/src/vs/workbench/contrib/chat/browser/chatEditing/chatEditingModifiedNotebookEntry.ts index 963ae5404f8..38b6b9b6f7a 100644 --- a/src/vs/workbench/contrib/chat/browser/chatEditing/chatEditingModifiedNotebookEntry.ts +++ b/src/vs/workbench/contrib/chat/browser/chatEditing/chatEditingModifiedNotebookEntry.ts @@ -389,16 +389,7 @@ export class ChatEditingModifiedNotebookEntry extends AbstractChatEditingModifie break; } case NotebookCellsChangeType.OutputItem: { - const index = getCorrespondingOriginalCellIndex(event.index, this._cellsDiffInfo.get()); - if (typeof index === 'number') { - const edit: ICellEditOperation = { - editType: CellEditType.OutputItems, - outputId: event.outputId, - append: event.append, - items: event.outputItems - }; - this.originalModel.applyEdits([edit], true, undefined, () => undefined, undefined, false); - } + // outputs are shared between original and modified model, so the original model is already updated. break; } case NotebookCellsChangeType.Move: { diff --git a/src/vs/workbench/contrib/chat/browser/chatInputPart.ts b/src/vs/workbench/contrib/chat/browser/chatInputPart.ts index 7b26251eccd..ebe5adda62a 100644 --- a/src/vs/workbench/contrib/chat/browser/chatInputPart.ts +++ b/src/vs/workbench/contrib/chat/browser/chatInputPart.ts @@ -529,6 +529,7 @@ export class ChatInputPart extends Disposable implements IHistoryNavigationWidge this._register(this.chatModeService.onDidChangeChatModes(() => this.validateCurrentChatMode())); this._register(autorun(r => { const mode = this._currentModeObservable.read(r); + this.chatModeKindKey.set(mode.kind); const model = mode.model?.read(r); if (model) { this.switchModelByQualifiedName(model); @@ -837,7 +838,6 @@ export class ChatInputPart extends Disposable implements IHistoryNavigationWidge } this._currentModeObservable.set(mode, undefined); - this.chatModeKindKey.set(mode.kind); this._onDidChangeCurrentChatMode.fire(); // Sync to model (mode is now persisted in the model's input state) diff --git a/src/vs/workbench/contrib/chat/browser/chatManagement/chatManagementEditor.ts b/src/vs/workbench/contrib/chat/browser/chatManagement/chatManagementEditor.ts index 2c3f76fd769..36355c7870b 100644 --- a/src/vs/workbench/contrib/chat/browser/chatManagement/chatManagementEditor.ts +++ b/src/vs/workbench/contrib/chat/browser/chatManagement/chatManagementEditor.ts @@ -71,6 +71,7 @@ export class ModelsManagementEditor extends EditorPane { if (this.dimension) { this.layout(this.dimension); } + this.modelsWidget?.render(); } override layout(dimension: Dimension): void { diff --git a/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsViewModel.ts b/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsViewModel.ts index ad145de5cd7..5857ff16e2d 100644 --- a/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsViewModel.ts +++ b/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsViewModel.ts @@ -6,12 +6,14 @@ import { distinct, coalesce } from '../../../../../base/common/arrays.js'; import { IMatch, IFilter, or, matchesCamelCase, matchesWords, matchesBaseContiguousSubString } from '../../../../../base/common/filters.js'; import { Emitter } from '../../../../../base/common/event.js'; -import { EditorModel } from '../../../../common/editor/editorModel.js'; import { ILanguageModelsService, ILanguageModelChatMetadata, IUserFriendlyLanguageModel } from '../../../chat/common/languageModels.js'; import { IChatEntitlementService } from '../../../../services/chat/common/chatEntitlementService.js'; +import { localize } from '../../../../../nls.js'; +import { Disposable } from '../../../../../base/common/lifecycle.js'; export const MODEL_ENTRY_TEMPLATE_ID = 'model.entry.template'; export const VENDOR_ENTRY_TEMPLATE_ID = 'vendor.entry.template'; +export const GROUP_ENTRY_TEMPLATE_ID = 'group.entry.template'; const wordFilter = or(matchesBaseContiguousSubString, matchesWords); const CAPABILITY_REGEX = /@capability:\s*([^\s]+)/gi; @@ -67,11 +69,24 @@ export interface IVendorItemEntry { collapsed: boolean; } -export function isVendorEntry(entry: IModelItemEntry | IVendorItemEntry): entry is IVendorItemEntry { +export interface IGroupItemEntry { + type: 'group'; + id: string; + group: string; + label: string; + templateId: string; + collapsed: boolean; +} + +export function isVendorEntry(entry: IViewModelEntry): entry is IVendorItemEntry { return entry.type === 'vendor'; } -export type IViewModelEntry = IModelItemEntry | IVendorItemEntry; +export function isGroupEntry(entry: IViewModelEntry): entry is IGroupItemEntry { + return entry.type === 'group'; +} + +export type IViewModelEntry = IModelItemEntry | IVendorItemEntry | IGroupItemEntry; export interface IViewModelChangeEvent { at: number; @@ -79,14 +94,35 @@ export interface IViewModelChangeEvent { added: IViewModelEntry[]; } -export class ChatModelsViewModel extends EditorModel { +export const enum ChatModelGroup { + Vendor = 'vendor', + Visibility = 'visibility' +} + +export class ChatModelsViewModel extends Disposable { private readonly _onDidChange = this._register(new Emitter()); readonly onDidChange = this._onDidChange.event; + private readonly _onDidChangeGrouping = this._register(new Emitter()); + readonly onDidChangeGrouping = this._onDidChangeGrouping.event; + private modelEntries: IModelEntry[]; - private readonly collapsedVendors = new Set(); + private readonly collapsedGroups = new Set(); private searchValue: string = ''; + private modelsSorted: boolean = false; + + private _groupBy: ChatModelGroup = ChatModelGroup.Vendor; + get groupBy(): ChatModelGroup { return this._groupBy; } + set groupBy(groupBy: ChatModelGroup) { + if (this._groupBy !== groupBy) { + this._groupBy = groupBy; + this.collapsedGroups.clear(); + this.modelEntries = this.sortModels(this.modelEntries); + this.filter(this.searchValue); + this._onDidChangeGrouping.fire(groupBy); + } + } constructor( @ILanguageModelsService private readonly languageModelsService: ILanguageModelsService, @@ -111,14 +147,21 @@ export class ChatModelsViewModel extends EditorModel { selectedEntry: IViewModelEntry | undefined; + public shouldRefilter(): boolean { + return !this.modelsSorted; + } + filter(searchValue: string): readonly IViewModelEntry[] { this.searchValue = searchValue; + if (!this.modelsSorted) { + this.modelEntries = this.sortModels(this.modelEntries); + } const filtered = this.filterModels(this.modelEntries, searchValue); this.splice(0, this._viewModelEntries.length, filtered); return this.viewModelEntries; } - private filterModels(modelEntries: IModelEntry[], searchValue: string): (IVendorItemEntry | IModelItemEntry)[] { + private filterModels(modelEntries: IModelEntry[], searchValue: string): IViewModelEntry[] { let visible: boolean | undefined; const visibleMatches = VISIBLE_REGEX.exec(searchValue); @@ -161,33 +204,14 @@ export class ChatModelsViewModel extends EditorModel { const isFiltering = searchValue !== '' || capabilities.length > 0 || providerNames.length > 0 || visible !== undefined; - const result: (IVendorItemEntry | IModelItemEntry)[] = []; + const result: IViewModelEntry[] = []; const words = searchValue.split(' '); const allVendors = new Set(this.modelEntries.map(m => m.vendor)); const showHeaders = allVendors.size > 1; - const addedVendors = new Set(); + const addedGroups = new Set(); const lowerProviders = providerNames.map(p => p.toLowerCase().trim()); for (const modelEntry of modelEntries) { - if (!isFiltering && showHeaders && this.collapsedVendors.has(modelEntry.vendor)) { - if (!addedVendors.has(modelEntry.vendor)) { - const vendorInfo = this.languageModelsService.getVendors().find(v => v.vendor === modelEntry.vendor); - result.push({ - type: 'vendor', - id: `vendor-${modelEntry.vendor}`, - vendorEntry: { - vendor: modelEntry.vendor, - vendorDisplayName: modelEntry.vendorDisplayName, - managementCommand: vendorInfo?.managementCommand - }, - templateId: VENDOR_ENTRY_TEMPLATE_ID, - collapsed: true - }); - addedVendors.add(modelEntry.vendor); - } - continue; - } - if (visible !== undefined) { if ((modelEntry.metadata.isUserSelectable ?? false) !== visible) { continue; @@ -234,20 +258,48 @@ export class ChatModelsViewModel extends EditorModel { } } - if (showHeaders && !addedVendors.has(modelEntry.vendor)) { - const vendorInfo = this.languageModelsService.getVendors().find(v => v.vendor === modelEntry.vendor); - result.push({ - type: 'vendor', - id: `vendor-${modelEntry.vendor}`, - vendorEntry: { - vendor: modelEntry.vendor, - vendorDisplayName: modelEntry.vendorDisplayName, - managementCommand: vendorInfo?.managementCommand - }, - templateId: VENDOR_ENTRY_TEMPLATE_ID, - collapsed: false - }); - addedVendors.add(modelEntry.vendor); + if (this.groupBy === ChatModelGroup.Vendor) { + if (showHeaders) { + if (!addedGroups.has(modelEntry.vendor)) { + const isCollapsed = !isFiltering && this.collapsedGroups.has(modelEntry.vendor); + const vendorInfo = this.languageModelsService.getVendors().find(v => v.vendor === modelEntry.vendor); + result.push({ + type: 'vendor', + id: `vendor-${modelEntry.vendor}`, + vendorEntry: { + vendor: modelEntry.vendor, + vendorDisplayName: modelEntry.vendorDisplayName, + managementCommand: vendorInfo?.managementCommand + }, + templateId: VENDOR_ENTRY_TEMPLATE_ID, + collapsed: isCollapsed + }); + addedGroups.add(modelEntry.vendor); + } + + if (!isFiltering && this.collapsedGroups.has(modelEntry.vendor)) { + continue; + } + } + } else if (this.groupBy === ChatModelGroup.Visibility) { + const isVisible = modelEntry.metadata.isUserSelectable ?? false; + const groupKey = isVisible ? 'visible' : 'hidden'; + if (!addedGroups.has(groupKey)) { + const isCollapsed = !isFiltering && this.collapsedGroups.has(groupKey); + result.push({ + type: 'group', + id: `group-${groupKey}`, + group: groupKey, + label: isVisible ? localize('visible', "Visible") : localize('hidden', "Hidden"), + templateId: GROUP_ENTRY_TEMPLATE_ID, + collapsed: isCollapsed + }); + addedGroups.add(groupKey); + } + + if (!isFiltering && this.collapsedGroups.has(groupKey)) { + continue; + } } const modelId = ChatModelsViewModel.getId(modelEntry); @@ -303,6 +355,35 @@ export class ChatModelsViewModel extends EditorModel { return matchedCapabilities; } + private sortModels(modelEntries: IModelEntry[]): IModelEntry[] { + if (this.groupBy === ChatModelGroup.Visibility) { + modelEntries.sort((a, b) => { + const aVisible = a.metadata.isUserSelectable ?? false; + const bVisible = b.metadata.isUserSelectable ?? false; + if (aVisible === bVisible) { + if (a.vendor === b.vendor) { + return a.metadata.name.localeCompare(b.metadata.name); + } + if (a.vendor === 'copilot') { return -1; } + if (b.vendor === 'copilot') { return 1; } + return a.vendorDisplayName.localeCompare(b.vendorDisplayName); + } + return aVisible ? -1 : 1; + }); + } else if (this.groupBy === ChatModelGroup.Vendor) { + modelEntries.sort((a, b) => { + if (a.vendor === b.vendor) { + return a.metadata.name.localeCompare(b.metadata.name); + } + if (a.vendor === 'copilot') { return -1; } + if (b.vendor === 'copilot') { return 1; } + return a.vendorDisplayName.localeCompare(b.vendorDisplayName); + }); + } + this.modelsSorted = true; + return modelEntries; + } + getVendors(): IUserFriendlyLanguageModel[] { return [...this.languageModelsService.getVendors()].sort((a, b) => { if (a.vendor === 'copilot') { return -1; } @@ -311,11 +392,6 @@ export class ChatModelsViewModel extends EditorModel { }); } - override async resolve(): Promise { - await this.refresh(); - return super.resolve(); - } - async refresh(): Promise { this.modelEntries = []; for (const vendor of this.getVendors()) { @@ -339,7 +415,8 @@ export class ChatModelsViewModel extends EditorModel { this.modelEntries.push(...models.sort((a, b) => a.metadata.name.localeCompare(b.metadata.name))); } - this.modelEntries = distinct(this.modelEntries, modelEntry => ChatModelsViewModel.getId(modelEntry)); + const modelEntries = distinct(this.modelEntries, modelEntry => ChatModelsViewModel.getId(modelEntry)); + this.modelEntries = this._groupBy === ChatModelGroup.Visibility ? this.sortModels(modelEntries) : modelEntries; this.filter(this.searchValue); } @@ -349,9 +426,12 @@ export class ChatModelsViewModel extends EditorModel { this.languageModelsService.updateModelPickerPreference(model.modelEntry.identifier, newVisibility); const metadata = this.languageModelsService.lookupLanguageModel(model.modelEntry.identifier); const index = this.viewModelEntries.indexOf(model); - if (metadata) { + if (metadata && index !== -1) { model.id = ChatModelsViewModel.getId(model.modelEntry); model.modelEntry.metadata = metadata; + if (this.groupBy === ChatModelGroup.Visibility) { + this.modelsSorted = false; + } this.splice(index, 1, [model]); } } @@ -360,10 +440,14 @@ export class ChatModelsViewModel extends EditorModel { return `${modelEntry.identifier}.${modelEntry.metadata.version}-visible:${modelEntry.metadata.isUserSelectable}`; } - toggleVendorCollapsed(vendorEntry: IVendorItemEntry): void { - this.selectedEntry = vendorEntry; - if (!this.collapsedVendors.delete(vendorEntry.vendorEntry.vendor)) { - this.collapsedVendors.add(vendorEntry.vendorEntry.vendor); + toggleCollapsed(viewModelEntry: IViewModelEntry): void { + const id = isGroupEntry(viewModelEntry) ? viewModelEntry.group : isVendorEntry(viewModelEntry) ? viewModelEntry.vendorEntry.vendor : undefined; + if (!id) { + return; + } + this.selectedEntry = viewModelEntry; + if (!this.collapsedGroups.delete(id)) { + this.collapsedGroups.add(id); } this.filter(this.searchValue); } diff --git a/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsWidget.ts b/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsWidget.ts index a7608c3200c..10b1081ccb8 100644 --- a/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsWidget.ts +++ b/src/vs/workbench/contrib/chat/browser/chatManagement/chatModelsWidget.ts @@ -19,10 +19,10 @@ import { IHoverService } from '../../../../../platform/hover/browser/hover.js'; import { MarkdownString } from '../../../../../base/common/htmlContent.js'; import { IExtensionService } from '../../../../services/extensions/common/extensions.js'; import { IContextMenuService } from '../../../../../platform/contextview/browser/contextView.js'; -import { IAction, toAction, Action, Separator } from '../../../../../base/common/actions.js'; +import { IAction, toAction, Action, Separator, SubmenuAction } from '../../../../../base/common/actions.js'; import { ActionBar } from '../../../../../base/browser/ui/actionbar/actionbar.js'; import { Codicon } from '../../../../../base/common/codicons.js'; -import { ChatModelsViewModel, IModelEntry, IModelItemEntry, IVendorItemEntry, SEARCH_SUGGESTIONS, isVendorEntry } from './chatModelsViewModel.js'; +import { ChatModelsViewModel, IModelEntry, IModelItemEntry, IVendorItemEntry, IGroupItemEntry, SEARCH_SUGGESTIONS, isVendorEntry, isGroupEntry, ChatModelGroup } from './chatModelsViewModel.js'; import { HighlightedLabel } from '../../../../../base/browser/ui/highlightedlabel/highlightedLabel.js'; import { SuggestEnabledInput } from '../../../codeEditor/browser/suggestEnabledInput/suggestEnabledInput.js'; import { Delayer } from '../../../../../base/common/async.js'; @@ -44,7 +44,7 @@ const HEADER_HEIGHT = 30; const VENDOR_ROW_HEIGHT = 30; const MODEL_ROW_HEIGHT = 26; -type TableEntry = IModelItemEntry | IVendorItemEntry; +type TableEntry = IModelItemEntry | IVendorItemEntry | IGroupItemEntry; export function getModelHoverContent(model: IModelEntry): MarkdownString { const markdown = new MarkdownString('', { isTrusted: true, supportThemeIcons: true }); @@ -151,6 +151,20 @@ class ModelsSearchFilterDropdownMenuActionViewItem extends DropdownMenuActionVie ); } + private createGroupByAction(grouping: ChatModelGroup, label: string): IAction { + return { + id: `groupBy.${grouping}`, + label, + class: undefined, + enabled: true, + tooltip: localize('groupByTooltip', "Group by {0}", label), + checked: this.viewModel.groupBy === grouping, + run: () => { + this.viewModel.groupBy = grouping; + } + }; + } + private createProviderAction(vendor: string, displayName: string): IAction { const query = `@provider:"${displayName}"`; const currentQuery = this.searchWidget.getValue(); @@ -229,6 +243,13 @@ class ModelsSearchFilterDropdownMenuActionViewItem extends DropdownMenuActionVie actions.push(...configuredVendors.map(vendor => this.createProviderAction(vendor.vendor, vendor.vendorDisplayName))); } + // Group By + actions.push(new Separator()); + const groupByActions: IAction[] = []; + groupByActions.push(this.createGroupByAction(ChatModelGroup.Vendor, localize('groupBy.provider', 'Provider'))); + groupByActions.push(this.createGroupByAction(ChatModelGroup.Visibility, localize('groupBy.visibility', 'Visibility'))); + actions.push(new SubmenuAction('groupBy', localize('groupBy', "Group By"), groupByActions)); + return actions; } } @@ -236,7 +257,7 @@ class ModelsSearchFilterDropdownMenuActionViewItem extends DropdownMenuActionVie class Delegate implements ITableVirtualDelegate { readonly headerRowHeight = HEADER_HEIGHT; getHeight(element: TableEntry): number { - return isVendorEntry(element) ? VENDOR_ROW_HEIGHT : MODEL_ROW_HEIGHT; + return isVendorEntry(element) || isGroupEntry(element) ? VENDOR_ROW_HEIGHT : MODEL_ROW_HEIGHT; } } @@ -253,18 +274,22 @@ abstract class ModelsTableColumnRenderer { - this.viewModel.toggleVendorCollapsed(entry); - } + run: () => this.viewModel.toggleCollapsed(entry) }; } @@ -387,6 +414,10 @@ class ModelNameColumnRenderer extends ModelsTableColumnRenderer { + static readonly TEMPLATE_ID = 'provider'; + + readonly templateId: string = ProviderColumnRenderer.TEMPLATE_ID; + + renderTemplate(container: HTMLElement): IProviderColumnTemplateData { + const disposables = new DisposableStore(); + const elementDisposables = new DisposableStore(); + const providerElement = DOM.append(container, $('.model-provider')); + return { + container, + providerElement, + disposables, + elementDisposables + }; + } + + override renderVendorElement(entry: IVendorItemEntry, index: number, templateData: IProviderColumnTemplateData): void { + templateData.providerElement.textContent = ''; + } + + override renderGroupElement(entry: IGroupItemEntry, index: number, templateData: IProviderColumnTemplateData): void { + templateData.providerElement.textContent = ''; + } + + override renderModelElement(entry: IModelItemEntry, index: number, templateData: IProviderColumnTemplateData): void { + templateData.providerElement.textContent = entry.modelEntry.vendorDisplayName; + } +} + + + function formatTokenCount(count: number): string { if (count >= 1000000) { return `${(count / 1000000).toFixed(1)}M`; @@ -683,6 +763,8 @@ export class ChatModelsWidget extends Disposable { private readonly searchFocusContextKey: IContextKey; + private tableDisposables = this._register(new DisposableStore()); + constructor( @ILanguageModelsService private readonly languageModelsService: ILanguageModelsService, @IInstantiationService private readonly instantiationService: IInstantiationService, @@ -701,7 +783,7 @@ export class ChatModelsWidget extends Disposable { this.element = DOM.$('.models-widget'); this.create(this.element); - const loadingPromise = this.extensionService.whenInstalledExtensionsRegistered().then(() => this.viewModel.resolve()); + const loadingPromise = this.extensionService.whenInstalledExtensionsRegistered().then(() => this.viewModel.refresh()); this.editorProgressService.showWhile(loadingPromise, 300); } @@ -806,14 +888,24 @@ export class ChatModelsWidget extends Disposable { this.tableContainer = DOM.append(container, $('.models-table-container')); // Create table + this.createTable(); + this._register(this.viewModel.onDidChangeGrouping(() => this.createTable())); + return; + } + + private createTable(): void { + this.tableDisposables.clear(); + DOM.clearNode(this.tableContainer); + const gutterColumnRenderer = this.instantiationService.createInstance(GutterColumnRenderer, this.viewModel); const modelNameColumnRenderer = this.instantiationService.createInstance(ModelNameColumnRenderer); const costColumnRenderer = this.instantiationService.createInstance(MultiplierColumnRenderer); const tokenLimitsColumnRenderer = this.instantiationService.createInstance(TokenLimitsColumnRenderer); const capabilitiesColumnRenderer = this.instantiationService.createInstance(CapabilitiesColumnRenderer); const actionsColumnRenderer = this.instantiationService.createInstance(ActionsColumnRenderer, this.viewModel); + const providerColumnRenderer = this.instantiationService.createInstance(ProviderColumnRenderer); - this._register(capabilitiesColumnRenderer.onDidClickCapability(capability => { + this.tableDisposables.add(capabilitiesColumnRenderer.onDidClickCapability(capability => { const currentQuery = this.searchWidget.getValue(); const query = `@capability:${capability}`; const newQuery = toggleFilter(currentQuery, query); @@ -821,63 +913,79 @@ export class ChatModelsWidget extends Disposable { this.searchWidget.focus(); })); - this.table = this._register(this.instantiationService.createInstance( + const columns = [ + { + label: '', + tooltip: '', + weight: 0.05, + minimumWidth: 40, + maximumWidth: 40, + templateId: GutterColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + }, + { + label: localize('modelName', 'Name'), + tooltip: '', + weight: 0.35, + minimumWidth: 200, + templateId: ModelNameColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + } + ]; + + if (this.viewModel.groupBy === ChatModelGroup.Visibility) { + columns.push({ + label: localize('provider', 'Provider'), + tooltip: '', + weight: 0.15, + minimumWidth: 100, + templateId: ProviderColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + }); + } + + columns.push( + { + label: localize('capabilities', 'Capabilities'), + tooltip: '', + weight: 0.25, + minimumWidth: 180, + templateId: CapabilitiesColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + }, + { + label: localize('tokenLimits', 'Context Size'), + tooltip: '', + weight: 0.1, + minimumWidth: 140, + templateId: TokenLimitsColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + }, + { + label: localize('cost', 'Multiplier'), + tooltip: '', + weight: 0.05, + minimumWidth: 60, + templateId: MultiplierColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + }, + { + label: '', + tooltip: '', + weight: 0.05, + minimumWidth: 64, + maximumWidth: 64, + templateId: ActionsColumnRenderer.TEMPLATE_ID, + project(row: TableEntry): TableEntry { return row; } + } + ); + + this.table = this.tableDisposables.add(this.instantiationService.createInstance( WorkbenchTable, 'ModelsWidget', this.tableContainer, new Delegate(), - [ - { - label: '', - tooltip: '', - weight: 0.05, - minimumWidth: 40, - maximumWidth: 40, - templateId: GutterColumnRenderer.TEMPLATE_ID, - project(row: TableEntry): TableEntry { return row; } - }, - { - label: localize('modelName', 'Name'), - tooltip: '', - weight: 0.40, - minimumWidth: 200, - templateId: ModelNameColumnRenderer.TEMPLATE_ID, - project(row: TableEntry): TableEntry { return row; } - }, - { - label: localize('capabilities', 'Capabilities'), - tooltip: '', - weight: 0.30, - minimumWidth: 180, - templateId: CapabilitiesColumnRenderer.TEMPLATE_ID, - project(row: TableEntry): TableEntry { return row; } - }, - { - label: localize('tokenLimits', 'Context Size'), - tooltip: '', - weight: 0.1, - minimumWidth: 140, - templateId: TokenLimitsColumnRenderer.TEMPLATE_ID, - project(row: TableEntry): TableEntry { return row; } - }, - { - label: localize('cost', 'Multiplier'), - tooltip: '', - weight: 0.1, - minimumWidth: 60, - templateId: MultiplierColumnRenderer.TEMPLATE_ID, - project(row: TableEntry): TableEntry { return row; } - }, - { - label: '', - tooltip: '', - weight: 0.05, - minimumWidth: 64, - maximumWidth: 64, - templateId: ActionsColumnRenderer.TEMPLATE_ID, - project(row: TableEntry): TableEntry { return row; } - }, - ], + columns, [ gutterColumnRenderer, modelNameColumnRenderer, @@ -885,6 +993,7 @@ export class ChatModelsWidget extends Disposable { tokenLimitsColumnRenderer, capabilitiesColumnRenderer, actionsColumnRenderer, + providerColumnRenderer ], { identityProvider: { getId: (e: TableEntry) => e.id }, @@ -893,6 +1002,8 @@ export class ChatModelsWidget extends Disposable { getAriaLabel: (e: TableEntry) => { if (isVendorEntry(e)) { return localize('vendor.ariaLabel', '{0} provider', e.vendorEntry.vendorDisplayName); + } else if (isGroupEntry(e)) { + return e.label; } return localize('model.ariaLabel', '{0} from {1}', e.modelEntry.metadata.name, e.modelEntry.vendorDisplayName); }, @@ -905,7 +1016,7 @@ export class ChatModelsWidget extends Disposable { } )) as WorkbenchTable; - this._register(this.table.onContextMenu(e => { + this.tableDisposables.add(this.table.onContextMenu(e => { if (!e.element) { return; } @@ -917,7 +1028,7 @@ export class ChatModelsWidget extends Disposable { label: localize('models.manageProvider', 'Manage {0}...', entry.vendorEntry.vendorDisplayName), run: async () => { await this.commandService.executeCommand(entry.vendorEntry.managementCommand!, entry.vendorEntry.vendor); - await this.viewModel.resolve(); + await this.viewModel.refresh(); } }) ]; @@ -929,7 +1040,7 @@ export class ChatModelsWidget extends Disposable { })); this.table.splice(0, this.table.length, this.viewModel.viewModelEntries); - this._register(this.viewModel.onDidChange(({ at, removed, added }) => { + this.tableDisposables.add(this.viewModel.onDidChange(({ at, removed, added }) => { this.table.splice(at, removed, added); if (this.viewModel.selectedEntry) { const selectedEntryIndex = this.viewModel.viewModelEntries.indexOf(this.viewModel.selectedEntry); @@ -953,18 +1064,26 @@ export class ChatModelsWidget extends Disposable { })); })); - this._register(this.table.onDidOpen(async ({ element, browserEvent }) => { + this.tableDisposables.add(this.table.onDidOpen(async ({ element, browserEvent }) => { if (!element) { return; } - if (isVendorEntry(element)) { - this.viewModel.toggleVendorCollapsed(element); + if (isVendorEntry(element) || isGroupEntry(element)) { + this.viewModel.toggleCollapsed(element); } else if (!DOM.isMouseEvent(browserEvent) || browserEvent.detail === 2) { this.viewModel.toggleVisibility(element); } })); - this._register(this.table.onDidChangeSelection(e => this.viewModel.selectedEntry = e.elements[0])); + this.tableDisposables.add(this.table.onDidChangeSelection(e => this.viewModel.selectedEntry = e.elements[0])); + + this.tableDisposables.add(this.table.onDidBlur(() => { + if (this.viewModel.shouldRefilter()) { + this.viewModel.filter(this.searchWidget.getValue()); + } + })); + + this.layout(this.element.clientHeight, this.element.clientWidth); } private filterModels(): void { @@ -975,7 +1094,7 @@ export class ChatModelsWidget extends Disposable { private async enableProvider(vendorId: string): Promise { await this.languageModelsService.selectLanguageModels({ vendor: vendorId }, true); - await this.viewModel.resolve(); + await this.viewModel.refresh(); } public layout(height: number, width: number): void { @@ -999,4 +1118,10 @@ export class ChatModelsWidget extends Disposable { this.searchWidget.setValue(''); } + public render(): void { + if (this.viewModel.shouldRefilter()) { + this.viewModel.filter(this.searchWidget.getValue()); + } + } + } diff --git a/src/vs/workbench/contrib/chat/browser/chatManagement/media/chatModelsWidget.css b/src/vs/workbench/contrib/chat/browser/chatManagement/media/chatModelsWidget.css index a47f91c270e..95e9af06923 100644 --- a/src/vs/workbench/contrib/chat/browser/chatManagement/media/chatModelsWidget.css +++ b/src/vs/workbench/contrib/chat/browser/chatManagement/media/chatModelsWidget.css @@ -210,3 +210,11 @@ .models-widget .models-table-container .monaco-table .monaco-list:not(.focused) .monaco-list-row[data-parity=odd].focused:not(.selected):not(:hover) .monaco-table-tr:not(.models-vendor-row) { background-color: var(--vscode-editor-background); } + +/** Provider column styling **/ + +.models-widget .models-table-container .monaco-table-td .model-provider { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} diff --git a/src/vs/workbench/contrib/chat/browser/chatSessions/localChatSessionsProvider.ts b/src/vs/workbench/contrib/chat/browser/chatSessions/localChatSessionsProvider.ts index 695fc5669ee..f80b264db43 100644 --- a/src/vs/workbench/contrib/chat/browser/chatSessions/localChatSessionsProvider.ts +++ b/src/vs/workbench/contrib/chat/browser/chatSessions/localChatSessionsProvider.ts @@ -224,14 +224,14 @@ export class LocalChatSessionsProvider extends Disposable implements IChatSessio private getSessionDescription(chatModel: IChatModel): string | undefined { const requests = chatModel.getRequests(); if (requests.length === 0) { - return undefined; + return ''; // signal Chat that has not started yet } // Get the last request to check its response status const lastRequest = requests[requests.length - 1]; const response = lastRequest?.response; if (!response) { - return undefined; + return ''; // signal Chat that has not started yet } // If the response is complete, show Finished diff --git a/src/vs/workbench/contrib/chat/browser/chatSessions/view/sessionsViewPane.ts b/src/vs/workbench/contrib/chat/browser/chatSessions/view/sessionsViewPane.ts index 34c92c5682c..f8dd87df066 100644 --- a/src/vs/workbench/contrib/chat/browser/chatSessions/view/sessionsViewPane.ts +++ b/src/vs/workbench/contrib/chat/browser/chatSessions/view/sessionsViewPane.ts @@ -512,7 +512,8 @@ export class SessionsViewPane extends ViewPane { // Get actions and filter for context menu (all actions that are NOT inline) const actions = menu.getActions({ arg: marshalledSession, shouldForwardArgs: true }); - const { secondary } = getActionBarActions(actions, 'inline'); this.contextMenuService.showContextMenu({ + const { secondary } = getActionBarActions(actions, 'inline'); + this.contextMenuService.showContextMenu({ getActions: () => secondary, getAnchor: () => e.anchor, getActionsContext: () => marshalledSession, diff --git a/src/vs/workbench/contrib/chat/browser/chatSetup.ts b/src/vs/workbench/contrib/chat/browser/chatSetup.ts index 9f504dd75b6..f275faa8289 100644 --- a/src/vs/workbench/contrib/chat/browser/chatSetup.ts +++ b/src/vs/workbench/contrib/chat/browser/chatSetup.ts @@ -87,6 +87,7 @@ import { IPosition } from '../../../../editor/common/core/position.js'; import { IMarker, IMarkerService, MarkerSeverity } from '../../../../platform/markers/common/markers.js'; import { EditorContextKeys } from '../../../../editor/common/editorContextKeys.js'; import { ICodeEditorService } from '../../../../editor/browser/services/codeEditorService.js'; +import { AGENT_SESSIONS_VIEW_CONTAINER_ID } from './agentSessions/agentSessions.js'; const defaultChat = { extensionId: product.defaultChatAgent?.extensionId ?? '', @@ -1672,9 +1673,12 @@ export class ChatTeardownContribution extends Disposable implements IWorkbenchCo const activeContainers = this.viewDescriptorService.getViewContainersByLocation(ViewContainerLocation.AuxiliaryBar).filter( container => this.viewDescriptorService.getViewContainerModel(container).activeViewDescriptors.length > 0 ); + const hasChatView = activeContainers.some(container => container.id === CHAT_SIDEBAR_PANEL_ID); + const hasAgentSessionsView = activeContainers.some(container => container.id === AGENT_SESSIONS_VIEW_CONTAINER_ID); if ( - (activeContainers.length === 0) || // chat view is already gone but we know it was there before - (activeContainers.length === 1 && activeContainers.at(0)?.id === CHAT_SIDEBAR_PANEL_ID) // chat view is the only view which is going to go away + (activeContainers.length === 0) || // chat view is already gone but we know it was there before + (activeContainers.length === 1 && (hasChatView || hasAgentSessionsView)) || // chat view or agent sessions is the only view which is going to go away + (activeContainers.length === 2 && hasChatView && hasAgentSessionsView) // both chat and agent sessions view are going to go away ) { this.layoutService.setPartHidden(true, Parts.AUXILIARYBAR_PART); // hide if there are no views in the secondary sidebar } diff --git a/src/vs/workbench/contrib/chat/browser/chatWidgetService.ts b/src/vs/workbench/contrib/chat/browser/chatWidgetService.ts index 604c9c1b102..683757a1d5f 100644 --- a/src/vs/workbench/contrib/chat/browser/chatWidgetService.ts +++ b/src/vs/workbench/contrib/chat/browser/chatWidgetService.ts @@ -94,7 +94,7 @@ export class ChatWidgetService extends Disposable implements IChatWidgetService openSession(sessionResource: URI, target?: typeof ChatViewPaneTarget): Promise; openSession(sessionResource: URI, target?: PreferredGroup, options?: IChatEditorOptions): Promise; async openSession(sessionResource: URI, target?: typeof ChatViewPaneTarget | PreferredGroup, options?: IChatEditorOptions): Promise { - const alreadyOpenWidget = await this.revealSessionIfAlreadyOpen(sessionResource); + const alreadyOpenWidget = await this.revealSessionIfAlreadyOpen(sessionResource, options?.preserveFocus); if (alreadyOpenWidget) { return alreadyOpenWidget; } @@ -104,7 +104,9 @@ export class ChatWidgetService extends Disposable implements IChatWidgetService const chatViewPane = await this.viewsService.openView(ChatViewId, true); if (chatViewPane) { await chatViewPane.loadSession(sessionResource); - chatViewPane.focusInput(); + if (!options?.preserveFocus) { + chatViewPane.focusInput(); + } } return chatViewPane?.widget; } @@ -141,7 +143,7 @@ export class ChatWidgetService extends Disposable implements IChatWidgetService ]); } - const pane = await this.editorService.openEditor(existingEditor.editor, existingEditor.group); + const pane = await this.editorService.openEditor(existingEditor.editor, { preserveFocus }, existingEditor.group); await ensureFocusTransfer; return pane instanceof ChatEditor ? pane.widget : undefined; } diff --git a/src/vs/workbench/contrib/chat/common/chatCodeMapperService.ts b/src/vs/workbench/contrib/chat/common/chatCodeMapperService.ts index 749850a7aee..25fe2146cb9 100644 --- a/src/vs/workbench/contrib/chat/common/chatCodeMapperService.ts +++ b/src/vs/workbench/contrib/chat/common/chatCodeMapperService.ts @@ -25,7 +25,7 @@ export interface ICodeMapperRequest { readonly codeBlocks: ICodeMapperCodeBlock[]; readonly chatRequestId?: string; readonly chatRequestModel?: string; - readonly chatSessionId?: string; + readonly chatSessionResource?: URI; readonly location?: string; } diff --git a/src/vs/workbench/contrib/chat/common/chatContextKeys.ts b/src/vs/workbench/contrib/chat/common/chatContextKeys.ts index d5ab4e32dda..68b89136a21 100644 --- a/src/vs/workbench/contrib/chat/common/chatContextKeys.ts +++ b/src/vs/workbench/contrib/chat/common/chatContextKeys.ts @@ -93,6 +93,7 @@ export namespace ChatContextKeys { export const sessionType = new RawContextKey('chatSessionType', '', { type: 'string', description: localize('chatSessionType', "The type of the current chat session item.") }); export const isArchivedItem = new RawContextKey('chatIsArchivedItem', false, { type: 'boolean', description: localize('chatIsArchivedItem', "True when the chat session item is archived.") }); + export const isCombinedSessionViewer = new RawContextKey('chatIsCombinedSessionViewer', false, { type: 'boolean', description: localize('chatIsCombinedSessionViewer', "True when the chat session viewer uses the new combined style.") }); // TODO@bpasero eventually retire this context key export const isActiveSession = new RawContextKey('chatIsActiveSession', false, { type: 'boolean', description: localize('chatIsActiveSession', "True when the chat session is currently active (not deletable).") }); export const isKatexMathElement = new RawContextKey('chatIsKatexMathElement', false, { type: 'boolean', description: localize('chatIsKatexMathElement', "True when focusing a KaTeX math element.") }); } diff --git a/src/vs/workbench/contrib/chat/common/tools/editFileTool.ts b/src/vs/workbench/contrib/chat/common/tools/editFileTool.ts index 8e1e3669b4c..d620262ad00 100644 --- a/src/vs/workbench/contrib/chat/common/tools/editFileTool.ts +++ b/src/vs/workbench/contrib/chat/common/tools/editFileTool.ts @@ -89,7 +89,7 @@ export class EditTool implements IToolImpl { location: 'tool', chatRequestId: invocation.chatRequestId, chatRequestModel: invocation.modelId, - chatSessionId: invocation.context.sessionId, + chatSessionResource: invocation.context.sessionResource, }, { textEdit: (target, edits) => { model.acceptResponseProgress(request, { kind: 'textEdit', uri: target, edits }); diff --git a/src/vs/workbench/contrib/chat/test/browser/chatModelsViewModel.test.ts b/src/vs/workbench/contrib/chat/test/browser/chatModelsViewModel.test.ts index 6afc0021d30..4db40bc1983 100644 --- a/src/vs/workbench/contrib/chat/test/browser/chatModelsViewModel.test.ts +++ b/src/vs/workbench/contrib/chat/test/browser/chatModelsViewModel.test.ts @@ -8,7 +8,7 @@ import { Emitter, Event } from '../../../../../base/common/event.js'; import { DisposableStore, IDisposable } from '../../../../../base/common/lifecycle.js'; import { ensureNoDisposablesAreLeakedInTestSuite } from '../../../../../base/test/common/utils.js'; import { ILanguageModelChatMetadata, ILanguageModelChatMetadataAndIdentifier, ILanguageModelChatProvider, ILanguageModelChatSelector, ILanguageModelsService, IUserFriendlyLanguageModel } from '../../common/languageModels.js'; -import { ChatModelsViewModel, IModelItemEntry, IVendorItemEntry, isVendorEntry } from '../../browser/chatManagement/chatModelsViewModel.js'; +import { ChatModelGroup, ChatModelsViewModel, IModelItemEntry, IVendorItemEntry, isVendorEntry, isGroupEntry, IGroupItemEntry } from '../../browser/chatManagement/chatModelsViewModel.js'; import { IChatEntitlementService, ChatEntitlement } from '../../../../services/chat/common/chatEntitlementService.js'; import { IObservable, observableValue } from '../../../../../base/common/observable.js'; import { ExtensionIdentifier } from '../../../../../platform/extensions/common/extensions.js'; @@ -40,7 +40,10 @@ class MockLanguageModelsService implements ILanguageModelsService { } updateModelPickerPreference(modelIdentifier: string, showInModelPicker: boolean): void { - throw new Error('Method not implemented.'); + const metadata = this.models.get(modelIdentifier); + if (metadata) { + this.models.set(modelIdentifier, { ...metadata, isUserSelectable: showInModelPicker }); + } } getVendors(): IUserFriendlyLanguageModel[] { @@ -240,7 +243,7 @@ suite('ChatModelsViewModel', () => { chatEntitlementService )); - await viewModel.resolve(); + await viewModel.refresh(); }); teardown(() => { @@ -258,14 +261,14 @@ suite('ChatModelsViewModel', () => { const vendors = results.filter(isVendorEntry); assert.strictEqual(vendors.length, 2); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 4); }); test('should filter by provider name', () => { const results = viewModel.filter('@provider:copilot'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 2); assert.ok(models.every(m => m.modelEntry.vendor === 'copilot')); }); @@ -273,7 +276,7 @@ suite('ChatModelsViewModel', () => { test('should filter by provider display name', () => { const results = viewModel.filter('@provider:OpenAI'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 2); assert.ok(models.every(m => m.modelEntry.vendor === 'openai')); }); @@ -281,14 +284,14 @@ suite('ChatModelsViewModel', () => { test('should filter by multiple providers with OR logic', () => { const results = viewModel.filter('@provider:copilot @provider:openai'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 4); }); test('should filter by single capability - tools', () => { const results = viewModel.filter('@capability:tools'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 3); assert.ok(models.every(m => m.modelEntry.metadata.capabilities?.toolCalling === true)); }); @@ -296,7 +299,7 @@ suite('ChatModelsViewModel', () => { test('should filter by single capability - vision', () => { const results = viewModel.filter('@capability:vision'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 3); assert.ok(models.every(m => m.modelEntry.metadata.capabilities?.vision === true)); }); @@ -304,7 +307,7 @@ suite('ChatModelsViewModel', () => { test('should filter by single capability - agent', () => { const results = viewModel.filter('@capability:agent'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.id, 'gpt-4o'); }); @@ -312,7 +315,7 @@ suite('ChatModelsViewModel', () => { test('should filter by multiple capabilities with AND logic', () => { const results = viewModel.filter('@capability:tools @capability:vision'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; // Should only return models that have BOTH tools and vision assert.strictEqual(models.length, 2); assert.ok(models.every(m => @@ -324,7 +327,7 @@ suite('ChatModelsViewModel', () => { test('should filter by three capabilities with AND logic', () => { const results = viewModel.filter('@capability:tools @capability:vision @capability:agent'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; // Should only return gpt-4o which has all three assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.id, 'gpt-4o'); @@ -333,7 +336,7 @@ suite('ChatModelsViewModel', () => { test('should return no results when filtering by incompatible capabilities', () => { const results = viewModel.filter('@capability:vision @capability:agent'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; // Only gpt-4o has both vision and agent, but gpt-4-vision doesn't have agent assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.id, 'gpt-4o'); @@ -342,7 +345,7 @@ suite('ChatModelsViewModel', () => { test('should filter by visibility - visible:true', () => { const results = viewModel.filter('@visible:true'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 3); assert.ok(models.every(m => m.modelEntry.metadata.isUserSelectable === true)); }); @@ -350,7 +353,7 @@ suite('ChatModelsViewModel', () => { test('should filter by visibility - visible:false', () => { const results = viewModel.filter('@visible:false'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.isUserSelectable, false); }); @@ -358,7 +361,7 @@ suite('ChatModelsViewModel', () => { test('should combine provider and capability filters', () => { const results = viewModel.filter('@provider:copilot @capability:vision'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 2); assert.ok(models.every(m => m.modelEntry.vendor === 'copilot' && @@ -369,7 +372,7 @@ suite('ChatModelsViewModel', () => { test('should combine provider, capability, and visibility filters', () => { const results = viewModel.filter('@provider:openai @capability:vision @visible:false'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.id, 'gpt-4-vision'); }); @@ -377,7 +380,7 @@ suite('ChatModelsViewModel', () => { test('should filter by text matching model name', () => { const results = viewModel.filter('GPT-4o'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.name, 'GPT-4o'); assert.ok(models[0].modelNameMatches); @@ -386,7 +389,7 @@ suite('ChatModelsViewModel', () => { test('should filter by text matching model id', () => { const results = viewModel.filter('copilot-gpt-4o'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.identifier, 'copilot-gpt-4o'); assert.ok(models[0].modelIdMatches); @@ -395,7 +398,7 @@ suite('ChatModelsViewModel', () => { test('should filter by text matching vendor name', () => { const results = viewModel.filter('GitHub'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 2); assert.ok(models.every(m => m.modelEntry.vendorDisplayName === 'GitHub Copilot')); }); @@ -403,7 +406,7 @@ suite('ChatModelsViewModel', () => { test('should combine text search with capability filter', () => { const results = viewModel.filter('@capability:tools GPT'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; // Should match all models with tools capability and 'GPT' in name assert.strictEqual(models.length, 3); assert.ok(models.every(m => m.modelEntry.metadata.capabilities?.toolCalling === true)); @@ -426,7 +429,7 @@ suite('ChatModelsViewModel', () => { test('should match capability text in free text search', () => { const results = viewModel.filter('vision'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; // Should match models that have vision capability or "vision" in their name assert.ok(models.length > 0); assert.ok(models.every(m => @@ -437,7 +440,7 @@ suite('ChatModelsViewModel', () => { test('should toggle vendor collapsed state', () => { const vendorEntry = viewModel.viewModelEntries.find(r => isVendorEntry(r) && r.vendorEntry.vendor === 'copilot') as IVendorItemEntry; - viewModel.toggleVendorCollapsed(vendorEntry); + viewModel.toggleCollapsed(vendorEntry); const results = viewModel.filter(''); const copilotVendor = results.find(r => isVendorEntry(r) && (r as IVendorItemEntry).vendorEntry.vendor === 'copilot') as IVendorItemEntry; @@ -452,7 +455,7 @@ suite('ChatModelsViewModel', () => { assert.strictEqual(copilotModelsAfterCollapse.length, 0); // Toggle back - viewModel.toggleVendorCollapsed(vendorEntry); + viewModel.toggleCollapsed(vendorEntry); const resultsAfterExpand = viewModel.filter(''); const copilotModelsAfterExpand = resultsAfterExpand.filter(r => !isVendorEntry(r) && (r as IModelItemEntry).modelEntry.vendor === 'copilot' @@ -488,7 +491,7 @@ suite('ChatModelsViewModel', () => { test('should remove filter keywords from text search', () => { const results = viewModel.filter('@provider:copilot @capability:vision GPT'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; // Should only search 'GPT' in model names, not the filter keywords assert.strictEqual(models.length, 2); assert.ok(models.every(m => m.modelEntry.vendor === 'copilot')); @@ -530,7 +533,7 @@ suite('ChatModelsViewModel', () => { test('should include matched capabilities in results', () => { const results = viewModel.filter('@capability:tools @capability:vision'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.ok(models.length > 0); for (const model of models) { @@ -595,7 +598,7 @@ suite('ChatModelsViewModel', () => { test('should not show vendor header when only one vendor exists', async () => { const { viewModel: singleVendorViewModel } = createSingleVendorViewModel(store, chatEntitlementService); - await singleVendorViewModel.resolve(); + await singleVendorViewModel.refresh(); const results = singleVendorViewModel.filter(''); @@ -603,7 +606,7 @@ suite('ChatModelsViewModel', () => { const vendors = results.filter(isVendorEntry); assert.strictEqual(vendors.length, 0, 'Should not show vendor header when only one vendor exists'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 2, 'Should show all models'); assert.ok(models.every(m => m.modelEntry.vendor === 'copilot')); }); @@ -616,13 +619,13 @@ suite('ChatModelsViewModel', () => { const vendors = results.filter(isVendorEntry); assert.strictEqual(vendors.length, 2, 'Should show vendor headers when multiple vendors exist'); - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 4); }); test('should filter single vendor models by capability', async () => { const { viewModel: singleVendorViewModel } = createSingleVendorViewModel(store, chatEntitlementService); - await singleVendorViewModel.resolve(); + await singleVendorViewModel.refresh(); const results = singleVendorViewModel.filter('@capability:agent'); @@ -631,7 +634,7 @@ suite('ChatModelsViewModel', () => { assert.strictEqual(vendors.length, 0, 'Should not show vendor header'); // Should only show the model with agent capability - const models = results.filter(r => !isVendorEntry(r)) as IModelItemEntry[]; + const models = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r)) as IModelItemEntry[]; assert.strictEqual(models.length, 1); assert.strictEqual(models[0].modelEntry.metadata.id, 'gpt-4o'); }); @@ -698,7 +701,7 @@ suite('ChatModelsViewModel', () => { } }); - await viewModel.resolve(); + await viewModel.refresh(); const results = viewModel.filter(''); const vendors = results.filter(isVendorEntry) as IVendorItemEntry[]; @@ -749,10 +752,117 @@ suite('ChatModelsViewModel', () => { test('should not show vendor headers when filtered if only one vendor exists', async () => { const { viewModel: singleVendorViewModel } = createSingleVendorViewModel(store, chatEntitlementService); - await singleVendorViewModel.resolve(); + await singleVendorViewModel.refresh(); const results = singleVendorViewModel.filter('GPT'); const vendors = results.filter(isVendorEntry); assert.strictEqual(vendors.length, 0); }); + + test('should group by visibility', () => { + viewModel.groupBy = ChatModelGroup.Visibility; + const results = viewModel.filter(''); + + const groups = results.filter(isGroupEntry) as IGroupItemEntry[]; + assert.strictEqual(groups.length, 2); + assert.strictEqual(groups[0].group, 'visible'); + assert.strictEqual(groups[1].group, 'hidden'); + + const visibleModels = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r) && r.modelEntry.metadata.isUserSelectable) as IModelItemEntry[]; + const hiddenModels = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r) && !r.modelEntry.metadata.isUserSelectable) as IModelItemEntry[]; + + assert.strictEqual(visibleModels.length, 3); + assert.strictEqual(hiddenModels.length, 1); + }); + + test('should fire onDidChangeGrouping when grouping changes', () => { + let fired = false; + store.add(viewModel.onDidChangeGrouping(() => { + fired = true; + })); + + viewModel.groupBy = ChatModelGroup.Visibility; + assert.strictEqual(fired, true); + }); + + test('should reset collapsed state when grouping changes', () => { + const vendorEntry = viewModel.viewModelEntries.find(r => isVendorEntry(r) && r.vendorEntry.vendor === 'copilot') as IVendorItemEntry; + viewModel.toggleCollapsed(vendorEntry); + + viewModel.groupBy = ChatModelGroup.Visibility; + + const results = viewModel.filter(''); + const groups = results.filter(isGroupEntry) as IGroupItemEntry[]; + assert.ok(groups.every(v => !v.collapsed)); + }); + + test('should sort models within visibility groups', async () => { + languageModelsService.addVendor({ + vendor: 'anthropic', + displayName: 'Anthropic', + managementCommand: undefined, + when: undefined + }); + + languageModelsService.addModel('anthropic', 'anthropic-claude', { + extension: new ExtensionIdentifier('anthropic.api'), + id: 'claude-3', + name: 'Claude 3', + family: 'claude', + version: '1.0', + vendor: 'anthropic', + maxInputTokens: 100000, + maxOutputTokens: 4096, + modelPickerCategory: { label: 'Anthropic', order: 3 }, + isUserSelectable: true, + capabilities: { + toolCalling: true, + vision: false, + agentMode: false + } + }); + + await viewModel.refresh(); + + viewModel.groupBy = ChatModelGroup.Visibility; + const results = viewModel.filter(''); + + const visibleModels = results.filter(r => !isVendorEntry(r) && !isGroupEntry(r) && r.modelEntry.metadata.isUserSelectable) as IModelItemEntry[]; + + assert.strictEqual(visibleModels.length, 4); + assert.strictEqual(visibleModels[0].modelEntry.metadata.name, 'GPT-4'); + assert.strictEqual(visibleModels[0].modelEntry.vendor, 'copilot'); + + assert.strictEqual(visibleModels[1].modelEntry.metadata.name, 'GPT-4o'); + assert.strictEqual(visibleModels[1].modelEntry.vendor, 'copilot'); + + assert.strictEqual(visibleModels[2].modelEntry.metadata.name, 'Claude 3'); + assert.strictEqual(visibleModels[2].modelEntry.vendor, 'anthropic'); + + assert.strictEqual(visibleModels[3].modelEntry.metadata.name, 'GPT-3.5 Turbo'); + assert.strictEqual(visibleModels[3].modelEntry.vendor, 'openai'); + }); + + test('should not resort models when visibility is toggled', async () => { + viewModel.groupBy = ChatModelGroup.Visibility; + + // Initial state: + // Visible: GPT-4, GPT-4o, GPT-3.5 Turbo + // Hidden: GPT-4 Vision + + // Toggle GPT-4 Vision to visible + const hiddenModel = viewModel.viewModelEntries.find(r => !isVendorEntry(r) && !isGroupEntry(r) && r.modelEntry.identifier === 'openai-gpt-4-vision') as IModelItemEntry; + assert.ok(hiddenModel); + const initialIndex = viewModel.viewModelEntries.indexOf(hiddenModel); + + viewModel.toggleVisibility(hiddenModel); + + // Verify it is still at the same index + const newIndex = viewModel.viewModelEntries.indexOf(hiddenModel); + assert.strictEqual(newIndex, initialIndex); + + // Verify metadata is updated + assert.strictEqual(hiddenModel.modelEntry.metadata.isUserSelectable, true); + }); + }); diff --git a/src/vs/workbench/contrib/comments/browser/commentNode.ts b/src/vs/workbench/contrib/comments/browser/commentNode.ts index e0c772782f6..66ea363796f 100644 --- a/src/vs/workbench/contrib/comments/browser/commentNode.ts +++ b/src/vs/workbench/contrib/comments/browser/commentNode.ts @@ -52,7 +52,7 @@ import { IResolvedTextEditorModel, ITextModelService } from '../../../../editor/ import { Position } from '../../../../editor/common/core/position.js'; class CommentsActionRunner extends ActionRunner { - protected override async runAction(action: IAction, context: any[]): Promise { + protected override async runAction(action: IAction, context: unknown[]): Promise { await action.run(...context); } } @@ -279,7 +279,7 @@ export class CommentNode extends Disposable { return result; } - private get commentNodeContext(): [any, MarshalledCommentThread] { + private get commentNodeContext(): [{ thread: languages.CommentThread; commentUniqueId: number; $mid: MarshalledId.CommentNode }, MarshalledCommentThread] { return [{ thread: this.commentThread, commentUniqueId: this.comment.uniqueIdInThread, diff --git a/src/vs/workbench/contrib/issue/browser/issueReporterModel.ts b/src/vs/workbench/contrib/issue/browser/issueReporterModel.ts index 0bbd8acf09a..139ef875c36 100644 --- a/src/vs/workbench/contrib/issue/browser/issueReporterModel.ts +++ b/src/vs/workbench/contrib/issue/browser/issueReporterModel.ts @@ -7,13 +7,18 @@ import { mainWindow } from '../../../../base/browser/window.js'; import { isRemoteDiagnosticError, SystemInfo } from '../../../../platform/diagnostics/common/diagnostics.js'; import { ISettingSearchResult, IssueReporterExtensionData, IssueType } from '../common/issue.js'; +interface VersionInfo { + vscodeVersion: string; + os: string; +} + export interface IssueReporterData { issueType: IssueType; issueDescription?: string; issueTitle?: string; extensionData?: string; - versionInfo?: any; + versionInfo?: VersionInfo; systemInfo?: SystemInfo; systemInfoWeb?: string; processInfo?: string; diff --git a/src/vs/workbench/contrib/mcp/common/mcpLanguageModelToolContribution.ts b/src/vs/workbench/contrib/mcp/common/mcpLanguageModelToolContribution.ts index 00a6dde1cda..f75cb85b4d4 100644 --- a/src/vs/workbench/contrib/mcp/common/mcpLanguageModelToolContribution.ts +++ b/src/vs/workbench/contrib/mcp/common/mcpLanguageModelToolContribution.ts @@ -68,9 +68,11 @@ export class McpLanguageModelToolContribution extends Disposable implements IWor const rec: Rec = { dispose: () => store.dispose() }; const toolSet = new Lazy(() => { const source = rec.source = mcpServerToSourceData(server); + const referenceName = server.definition.label.toLowerCase().replace(/\s+/g, '-'); // see issue https://github.com/microsoft/vscode/issues/278152 const toolSet = store.add(this._toolsService.createToolSet( source, - server.definition.id, server.definition.label, + server.definition.id, + referenceName, { icon: Codicon.mcp, description: localize('mcp.toolset', "{0}: All Tools", server.definition.label) diff --git a/src/vs/workbench/contrib/notebook/browser/viewParts/notebookViewZones.ts b/src/vs/workbench/contrib/notebook/browser/viewParts/notebookViewZones.ts index b0ade6276a4..652fc736628 100644 --- a/src/vs/workbench/contrib/notebook/browser/viewParts/notebookViewZones.ts +++ b/src/vs/workbench/contrib/notebook/browser/viewParts/notebookViewZones.ts @@ -205,7 +205,7 @@ export class NotebookViewZones extends Disposable { } } -function safeInvoke1Arg(func: Function, arg1: any): void { +function safeInvoke1Arg(func: Function, arg1: unknown): void { try { func(arg1); } catch (e) { diff --git a/src/vs/workbench/contrib/preferences/browser/keybindingsEditor.ts b/src/vs/workbench/contrib/preferences/browser/keybindingsEditor.ts index 076761078d5..d8b8e1cc83a 100644 --- a/src/vs/workbench/contrib/preferences/browser/keybindingsEditor.ts +++ b/src/vs/workbench/contrib/preferences/browser/keybindingsEditor.ts @@ -821,7 +821,7 @@ export class KeybindingsEditor extends EditorPane imp }; } - private onKeybindingEditingError(error: any): void { + private onKeybindingEditingError(error: unknown): void { this.notificationService.error(typeof error === 'string' ? error : localize('error', "Error '{0}' while editing the keybinding. Please open 'keybindings.json' file and check for errors.", `${error}`)); } } diff --git a/src/vs/workbench/contrib/scm/browser/media/scm.css b/src/vs/workbench/contrib/scm/browser/media/scm.css index 2c38b64cc5d..7008508d1e2 100644 --- a/src/vs/workbench/contrib/scm/browser/media/scm.css +++ b/src/vs/workbench/contrib/scm/browser/media/scm.css @@ -587,62 +587,53 @@ /* History item hover */ -.monaco-hover.history-item-hover p:first-child { +.monaco-hover.history-item-hover .history-item-hover-container > .rendered-markdown:first-child > p { margin-top: 4px; } -.monaco-hover.history-item-hover p:last-child { +.monaco-hover.history-item-hover .history-item-hover-container > .rendered-markdown:last-child p { margin-bottom: 2px !important; } -.monaco-hover.history-item-hover p:last-child span:not(.codicon) { +.monaco-hover.history-item-hover .history-item-hover-container > .rendered-markdown:last-child p span:not(.codicon) { padding: 2px 0; } -.monaco-hover.history-item-hover hr { +.monaco-hover.history-item-hover .history-item-hover-container > .rendered-markdown hr { margin-top: 4px; margin-bottom: 4px; } -.monaco-hover.history-item-hover hr + p { +.monaco-hover.history-item-hover .history-item-hover-container > .rendered-markdown > p { margin: 4px 0; } -.monaco-hover.history-item-hover hr:nth-of-type(2):nth-last-of-type(2) + p { +.monaco-hover.history-item-hover .history-item-hover-container div:nth-of-type(3):nth-last-of-type(2) > p { display: flex; flex-wrap: wrap; gap: 4px; } -.monaco-hover.history-item-hover span:not(.codicon) { +.monaco-hover.history-item-hover .history-item-hover-container span:not(.codicon) { margin-bottom: 0 !important; } -.monaco-hover.history-item-hover p > span > span.codicon.codicon-git-branch { +.monaco-hover.history-item-hover .history-item-hover-container p > span > span.codicon.codicon-git-branch { font-size: 12px; margin-bottom: 2px !important; } -.monaco-hover.history-item-hover p > span > span.codicon.codicon-tag, -.monaco-hover.history-item-hover p > span > span.codicon.codicon-target { +.monaco-hover.history-item-hover .history-item-hover-container p > span > span.codicon.codicon-tag, +.monaco-hover.history-item-hover .history-item-hover-container p > span > span.codicon.codicon-target { font-size: 14px; margin-bottom: 2px !important; } -.monaco-hover.history-item-hover p > span > span.codicon.codicon-cloud { +.monaco-hover.history-item-hover .history-item-hover-container p > span > span.codicon.codicon-cloud { font-size: 14px; margin-bottom: 1px !important; } -.monaco-hover.history-item-hover .hover-row.status-bar .action { - display: flex; - align-items: center; -} - -.monaco-hover.history-item-hover .hover-row.status-bar .action .codicon { - color: inherit; -} - /* Graph */ .pane-header .scm-graph-view-badge-container { diff --git a/src/vs/workbench/contrib/scm/browser/scmHistory.ts b/src/vs/workbench/contrib/scm/browser/scmHistory.ts index 01123d797f4..a988e91cd31 100644 --- a/src/vs/workbench/contrib/scm/browser/scmHistory.ts +++ b/src/vs/workbench/contrib/scm/browser/scmHistory.ts @@ -9,8 +9,12 @@ import { badgeBackground, chartsBlue, chartsPurple, foreground } from '../../../ import { asCssVariable, ColorIdentifier, registerColor } from '../../../../platform/theme/common/colorUtils.js'; import { ISCMHistoryItem, ISCMHistoryItemGraphNode, ISCMHistoryItemRef, ISCMHistoryItemViewModel, SCMIncomingHistoryItemId, SCMOutgoingHistoryItemId } from '../common/history.js'; import { rot } from '../../../../base/common/numbers.js'; -import { svgElem } from '../../../../base/browser/dom.js'; +import { $, svgElem } from '../../../../base/browser/dom.js'; import { PANEL_BACKGROUND } from '../../../common/theme.js'; +import { DisposableStore, IDisposable } from '../../../../base/common/lifecycle.js'; +import { IMarkdownString, isEmptyMarkdownString, isMarkdownString, MarkdownString } from '../../../../base/common/htmlContent.js'; +import { ThemeIcon } from '../../../../base/common/themables.js'; +import { IMarkdownRendererService } from '../../../../platform/markdown/browser/markdownRenderer.js'; export const SWIMLANE_HEIGHT = 22; export const SWIMLANE_WIDTH = 11; @@ -528,3 +532,52 @@ export function compareHistoryItemRefs( return ref1Order - ref2Order; } + +export function toHistoryItemHoverContent(markdownRendererService: IMarkdownRendererService, historyItem: ISCMHistoryItem, includeReferences: boolean): { content: string | IMarkdownString | HTMLElement; disposables: IDisposable } { + const disposables = new DisposableStore(); + + if (historyItem.tooltip === undefined) { + return { content: historyItem.message, disposables }; + } + + if (isMarkdownString(historyItem.tooltip)) { + return { content: historyItem.tooltip, disposables }; + } + + // References as "injected" into the hover here since the extension does + // not know that color used in the graph to render the history item at which + // the reference is pointing to. They are being added before the last element + // of the array which is assumed to contain the hover commands. + const tooltipSections = historyItem.tooltip.slice(); + + if (includeReferences && historyItem.references?.length) { + const markdownString = new MarkdownString('', { supportHtml: true, supportThemeIcons: true }); + + for (const reference of historyItem.references) { + const labelIconId = ThemeIcon.isThemeIcon(reference.icon) ? reference.icon.id : ''; + + const labelBackgroundColor = reference.color ? asCssVariable(reference.color) : asCssVariable(historyItemHoverDefaultLabelBackground); + const labelForegroundColor = reference.color ? asCssVariable(historyItemHoverLabelForeground) : asCssVariable(historyItemHoverDefaultLabelForeground); + markdownString.appendMarkdown(` $(${labelIconId}) `); + markdownString.appendText(reference.name); + markdownString.appendMarkdown('  '); + } + + markdownString.appendMarkdown(`\n\n---\n\n`); + tooltipSections.splice(tooltipSections.length - 1, 0, markdownString); + } + + // Render tooltip content + const hoverContainer = $('.history-item-hover-container'); + for (const markdownString of tooltipSections) { + if (isEmptyMarkdownString(markdownString)) { + continue; + } + + const renderedContent = markdownRendererService.render(markdownString); + hoverContainer.appendChild(renderedContent.element); + disposables.add(renderedContent); + } + + return { content: hoverContainer, disposables }; +} diff --git a/src/vs/workbench/contrib/scm/browser/scmHistoryViewPane.ts b/src/vs/workbench/contrib/scm/browser/scmHistoryViewPane.ts index a67ececff33..c08e563cd61 100644 --- a/src/vs/workbench/contrib/scm/browser/scmHistoryViewPane.ts +++ b/src/vs/workbench/contrib/scm/browser/scmHistoryViewPane.ts @@ -5,7 +5,7 @@ import './media/scm.css'; import { $, append, h, reset } from '../../../../base/browser/dom.js'; -import { IHoverOptions, IManagedHoverTooltipMarkdownString } from '../../../../base/browser/ui/hover/hover.js'; +import { IHoverOptions, IManagedHoverContent } from '../../../../base/browser/ui/hover/hover.js'; import { IHoverDelegate } from '../../../../base/browser/ui/hover/hoverDelegate.js'; import { IconLabel } from '../../../../base/browser/ui/iconLabel/iconLabel.js'; import { IIdentityProvider, IListVirtualDelegate } from '../../../../base/browser/ui/list/list.js'; @@ -28,7 +28,7 @@ import { asCssVariable, ColorIdentifier, foreground } from '../../../../platform import { IFileIconTheme, IThemeService } from '../../../../platform/theme/common/themeService.js'; import { IViewPaneOptions, ViewAction, ViewPane, ViewPaneShowActions } from '../../../browser/parts/views/viewPane.js'; import { IViewDescriptorService, ViewContainerLocation } from '../../../common/views.js'; -import { renderSCMHistoryItemGraph, toISCMHistoryItemViewModelArray, SWIMLANE_WIDTH, renderSCMHistoryGraphPlaceholder, historyItemHoverLabelForeground, historyItemHoverDefaultLabelBackground, getHistoryItemIndex } from './scmHistory.js'; +import { renderSCMHistoryItemGraph, toISCMHistoryItemViewModelArray, SWIMLANE_WIDTH, renderSCMHistoryGraphPlaceholder, historyItemHoverLabelForeground, historyItemHoverDefaultLabelBackground, getHistoryItemIndex, toHistoryItemHoverContent } from './scmHistory.js'; import { getHistoryItemEditorTitle, getProviderKey, isSCMHistoryItemChangeNode, isSCMHistoryItemChangeViewModelTreeElement, isSCMHistoryItemLoadMoreTreeElement, isSCMHistoryItemViewModelTreeElement, isSCMRepository } from './util.js'; import { ISCMHistoryItem, ISCMHistoryItemChange, ISCMHistoryItemGraphNode, ISCMHistoryItemRef, ISCMHistoryItemViewModel, ISCMHistoryProvider, SCMHistoryItemChangeViewModelTreeElement, SCMHistoryItemLoadMoreTreeElement, SCMHistoryItemViewModelTreeElement, SCMIncomingHistoryItemId, SCMOutgoingHistoryItemId } from '../common/history.js'; import { HISTORY_VIEW_PANE_ID, ISCMProvider, ISCMRepository, ISCMService, ISCMViewService, ViewMode } from '../common/scm.js'; @@ -76,6 +76,8 @@ import { ElementsDragAndDropData, ListViewTargetSector } from '../../../../base/ import { CodeDataTransfers } from '../../../../platform/dnd/browser/dnd.js'; import { SCMHistoryItemTransferData } from './scmHistoryChatContext.js'; import { CancellationToken } from '../../../../base/common/cancellation.js'; +import { IMarkdownRendererService } from '../../../../platform/markdown/browser/markdownRenderer.js'; +import { isMarkdownString } from '../../../../base/common/htmlContent.js'; const PICK_REPOSITORY_ACTION_ID = 'workbench.scm.action.graph.pickRepository'; const PICK_HISTORY_ITEM_REFS_ACTION_ID = 'workbench.scm.action.graph.pickHistoryItemRefs'; @@ -454,6 +456,7 @@ class HistoryItemRenderer implements ICompressibleTreeRenderer | undefined; } export interface ISCMHistoryItemGraphNode { diff --git a/src/vs/workbench/contrib/snippets/browser/snippetsFile.ts b/src/vs/workbench/contrib/snippets/browser/snippetsFile.ts index 98829a81bf0..fb981d5bbb7 100644 --- a/src/vs/workbench/contrib/snippets/browser/snippetsFile.ts +++ b/src/vs/workbench/contrib/snippets/browser/snippetsFile.ts @@ -149,7 +149,7 @@ interface JsonSerializedSnippet { description: string; } -function isJsonSerializedSnippet(thing: any): thing is JsonSerializedSnippet { +function isJsonSerializedSnippet(thing: unknown): thing is JsonSerializedSnippet { return isObject(thing) && Boolean((thing).body); } diff --git a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/outputMonitor.ts b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/outputMonitor.ts index f26d26a1c7e..49d6482b58d 100644 --- a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/outputMonitor.ts +++ b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/outputMonitor.ts @@ -525,27 +525,47 @@ export class OutputMonitor extends Disposable implements IOutputMonitor { ); let inputDataDisposable: IDisposable = Disposable.None; + let instanceDisposedDisposable: IDisposable = Disposable.None; const inputPromise = new Promise(resolve => { + let settled = false; + const settle = (value: boolean, state: OutputMonitorState) => { + if (settled) { + return; + } + settled = true; + part.hide(); + inputDataDisposable.dispose(); + instanceDisposedDisposable.dispose(); + this._state = state; + resolve(value); + }; inputDataDisposable = this._register(execution.instance.onDidInputData((data) => { if (!data || data === '\r' || data === '\n' || data === '\r\n') { - part.hide(); - inputDataDisposable.dispose(); - this._state = OutputMonitorState.PollingForIdle; this._outputMonitorTelemetryCounters.inputToolFreeFormInputCount++; - resolve(true); + settle(true, OutputMonitorState.PollingForIdle); } })); + instanceDisposedDisposable = this._register(execution.instance.onDisposed(() => { + settle(false, OutputMonitorState.Cancelled); + })); }); + const disposeListeners = () => { + inputDataDisposable.dispose(); + instanceDisposedDisposable.dispose(); + }; + const result = await Promise.race([userPrompt, inputPromise]); if (result === focusTerminalSelection) { + execution.instance.focus(true); return await inputPromise; } if (result === undefined) { - inputDataDisposable.dispose(); + disposeListeners(); // Prompt was dismissed without providing input return false; } + disposeListeners(); return !!result; } @@ -556,6 +576,7 @@ export class OutputMonitor extends Disposable implements IOutputMonitor { } const focusTerminalSelection = Symbol('focusTerminalSelection'); let inputDataDisposable: IDisposable = Disposable.None; + let instanceDisposedDisposable: IDisposable = Disposable.None; const { promise: userPrompt, part } = this._createElicitationPart( token, execution.sessionId, @@ -583,27 +604,47 @@ export class OutputMonitor extends Disposable implements IOutputMonitor { getMoreActions(suggestedOption, confirmationPrompt) ); const inputPromise = new Promise(resolve => { - inputDataDisposable = this._register(execution.instance.onDidInputData(() => { + let settled = false; + const settle = (value: boolean, state: OutputMonitorState) => { + if (settled) { + return; + } + settled = true; part.hide(); inputDataDisposable.dispose(); - this._state = OutputMonitorState.PollingForIdle; - resolve(true); + instanceDisposedDisposable.dispose(); + this._state = state; + resolve(value); + }; + inputDataDisposable = this._register(execution.instance.onDidInputData(() => { + settle(true, OutputMonitorState.PollingForIdle); + })); + instanceDisposedDisposable = this._register(execution.instance.onDisposed(() => { + settle(false, OutputMonitorState.Cancelled); })); }); + const disposeListeners = () => { + inputDataDisposable.dispose(); + instanceDisposedDisposable.dispose(); + }; + const optionToRun = await Promise.race([userPrompt, inputPromise]); if (optionToRun === focusTerminalSelection) { + execution.instance.focus(true); return await inputPromise; } if (optionToRun === true) { + disposeListeners(); return true; } if (typeof optionToRun === 'string' && optionToRun.length) { - inputDataDisposable.dispose(); + execution.instance.focus(true); + disposeListeners(); await execution.instance.sendText(optionToRun, true); return optionToRun; } - inputDataDisposable.dispose(); + disposeListeners(); return optionToRun; } diff --git a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/types.ts b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/types.ts index cd6510b14e7..d4a92506c1a 100644 --- a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/types.ts +++ b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/types.ts @@ -20,7 +20,7 @@ export interface IExecution { isActive?: () => Promise; task?: Task | Pick; dependencyTasks?: Task[]; - instance: Pick; + instance: Pick; sessionId: string | undefined; } diff --git a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/test/browser/outputMonitor.test.ts b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/test/browser/outputMonitor.test.ts index 72f093d7d22..e47aa5b86a9 100644 --- a/src/vs/workbench/contrib/terminalContrib/chatAgentTools/test/browser/outputMonitor.test.ts +++ b/src/vs/workbench/contrib/terminalContrib/chatAgentTools/test/browser/outputMonitor.test.ts @@ -23,7 +23,7 @@ import { isNumber } from '../../../../../../base/common/types.js'; suite('OutputMonitor', () => { const store = ensureNoDisposablesAreLeakedInTestSuite(); let monitor: OutputMonitor; - let execution: { getOutput: () => string; isActive?: () => Promise; instance: Pick; sessionId: string }; + let execution: { getOutput: () => string; isActive?: () => Promise; instance: Pick; sessionId: string }; let cts: CancellationTokenSource; let instantiationService: TestInstantiationService; let sendTextCalled: boolean; @@ -39,6 +39,7 @@ suite('OutputMonitor', () => { instanceId: 1, sendText: async () => { sendTextCalled = true; }, onDidInputData: dataEmitter.event, + onDisposed: Event.None, onData: dataEmitter.event, focus: () => { }, // eslint-disable-next-line local/code-no-any-casts diff --git a/src/vs/workbench/contrib/terminalContrib/zoom/browser/terminal.zoom.contribution.ts b/src/vs/workbench/contrib/terminalContrib/zoom/browser/terminal.zoom.contribution.ts index cf4d8e63508..23aa4a58037 100644 --- a/src/vs/workbench/contrib/terminalContrib/zoom/browser/terminal.zoom.contribution.ts +++ b/src/vs/workbench/contrib/terminalContrib/zoom/browser/terminal.zoom.contribution.ts @@ -7,7 +7,7 @@ import type { Terminal as RawXtermTerminal } from '@xterm/xterm'; import { Event } from '../../../../../base/common/event.js'; import { IMouseWheelEvent } from '../../../../../base/browser/mouseEvent.js'; import { MouseWheelClassifier } from '../../../../../base/browser/ui/scrollbar/scrollableElement.js'; -import { Disposable, MutableDisposable, toDisposable } from '../../../../../base/common/lifecycle.js'; +import { Disposable, MutableDisposable } from '../../../../../base/common/lifecycle.js'; import { isMacintosh } from '../../../../../base/common/platform.js'; import { TerminalSettingId } from '../../../../../platform/terminal/common/terminal.js'; import { IDetachedTerminalInstance, ITerminalContribution, ITerminalInstance, IXtermTerminal } from '../../../terminal/browser/terminal.js'; @@ -18,6 +18,7 @@ import { localize2 } from '../../../../../nls.js'; import { isNumber } from '../../../../../base/common/types.js'; import { defaultTerminalFontSize } from '../../../terminal/common/terminalConfiguration.js'; import { TerminalZoomCommandId, TerminalZoomSettingId } from '../common/terminal.zoom.js'; +import * as dom from '../../../../../base/browser/dom.js'; class TerminalMouseWheelZoomContribution extends Disposable implements ITerminalContribution { static readonly ID = 'terminal.mouseWheelZoom'; @@ -71,54 +72,48 @@ class TerminalMouseWheelZoomContribution extends Disposable implements ITerminal let gestureHasZoomModifiers = false; let gestureAccumulatedDelta = 0; - raw.attachCustomWheelEventHandler((browserEvent: WheelEvent) => { - function isWheelEvent(e: MouseEvent): e is IMouseWheelEvent { - return 'wheelDelta' in e && 'wheelDeltaX' in e && 'wheelDeltaY' in e; - } - if (isWheelEvent(browserEvent)) { - if (classifier.isPhysicalMouseWheel()) { - if (this._hasMouseWheelZoomModifiers(browserEvent)) { - const delta = browserEvent.deltaY > 0 ? -1 : 1; - const newFontSize = this._clampFontSize(this._getConfigFontSize() + delta); - this._configurationService.updateValue(TerminalSettingId.FontSize, newFontSize); - // EditorZoom.setZoomLevel(zoomLevel + delta); - browserEvent.preventDefault(); - browserEvent.stopPropagation(); - return false; - } - } else { - // we consider mousewheel events that occur within 50ms of each other to be part of the same gesture - // we don't want to consider mouse wheel events where ctrl/cmd is pressed during the inertia phase - // we also want to accumulate deltaY values from the same gesture and use that to set the zoom level - if (Date.now() - prevMouseWheelTime > 50) { - // reset if more than 50ms have passed - gestureStartFontSize = this._getConfigFontSize(); - gestureHasZoomModifiers = this._hasMouseWheelZoomModifiers(browserEvent); - gestureAccumulatedDelta = 0; - } + const wheelListener = (browserEvent: WheelEvent) => { + if (classifier.isPhysicalMouseWheel()) { + if (this._hasMouseWheelZoomModifiers(browserEvent)) { + const delta = browserEvent.deltaY > 0 ? -1 : 1; + const newFontSize = this._clampFontSize(this._getConfigFontSize() + delta); + this._configurationService.updateValue(TerminalSettingId.FontSize, newFontSize); + // EditorZoom.setZoomLevel(zoomLevel + delta); + browserEvent.preventDefault(); + browserEvent.stopPropagation(); + } + } else { + // we consider mousewheel events that occur within 50ms of each other to be part of the same gesture + // we don't want to consider mouse wheel events where ctrl/cmd is pressed during the inertia phase + // we also want to accumulate deltaY values from the same gesture and use that to set the zoom level + if (Date.now() - prevMouseWheelTime > 50) { + // reset if more than 50ms have passed + gestureStartFontSize = this._getConfigFontSize(); + gestureHasZoomModifiers = this._hasMouseWheelZoomModifiers(browserEvent); + gestureAccumulatedDelta = 0; + } - prevMouseWheelTime = Date.now(); + prevMouseWheelTime = Date.now(); + gestureAccumulatedDelta += browserEvent.deltaY; + + if (gestureHasZoomModifiers) { + const deltaAbs = Math.ceil(Math.abs(gestureAccumulatedDelta / 5)); + const deltaDirection = gestureAccumulatedDelta > 0 ? -1 : 1; + const delta = deltaAbs * deltaDirection; + const newFontSize = this._clampFontSize(gestureStartFontSize + delta); + this._configurationService.updateValue(TerminalSettingId.FontSize, newFontSize); gestureAccumulatedDelta += browserEvent.deltaY; - - if (gestureHasZoomModifiers) { - const deltaAbs = Math.ceil(Math.abs(gestureAccumulatedDelta / 5)); - const deltaDirection = gestureAccumulatedDelta > 0 ? -1 : 1; - const delta = deltaAbs * deltaDirection; - const newFontSize = this._clampFontSize(gestureStartFontSize + delta); - this._configurationService.updateValue(TerminalSettingId.FontSize, newFontSize); - gestureAccumulatedDelta += browserEvent.deltaY; - browserEvent.preventDefault(); - browserEvent.stopPropagation(); - return false; - } + browserEvent.preventDefault(); + browserEvent.stopPropagation(); } } - return true; - }); - this._listener.value = toDisposable(() => raw.attachCustomWheelEventHandler(() => true)); + }; + + // Use the capture phase to ensure we catch the event before the terminal's scrollable element consumes it + this._listener.value = dom.addDisposableListener(raw.element!, dom.EventType.MOUSE_WHEEL, wheelListener, { capture: true, passive: false }); } - private _hasMouseWheelZoomModifiers(browserEvent: IMouseWheelEvent): boolean { + private _hasMouseWheelZoomModifiers(browserEvent: WheelEvent | IMouseWheelEvent): boolean { return ( isMacintosh // on macOS we support cmd + two fingers scroll (`metaKey` set) diff --git a/src/vs/workbench/contrib/webviewView/browser/webviewViewPane.ts b/src/vs/workbench/contrib/webviewView/browser/webviewViewPane.ts index cfb107654c8..efa5a1b2b74 100644 --- a/src/vs/workbench/contrib/webviewView/browser/webviewViewPane.ts +++ b/src/vs/workbench/contrib/webviewView/browser/webviewViewPane.ts @@ -53,7 +53,7 @@ export class WebviewViewPane extends ViewPane { private _container?: HTMLElement; private _rootContainer?: HTMLElement; - private _resizeObserver?: any; + private _resizeObserver?: ResizeObserver; private readonly defaultTitle: string; private setTitle: string | undefined; @@ -138,7 +138,7 @@ export class WebviewViewPane extends ViewPane { }); this._register(toDisposable(() => { - this._resizeObserver.disconnect(); + this._resizeObserver?.disconnect(); })); this._resizeObserver.observe(container); } diff --git a/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStarted.ts b/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStarted.ts index 4801a2a06b5..83eb72be591 100644 --- a/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStarted.ts +++ b/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStarted.ts @@ -1533,7 +1533,7 @@ export class GettingStartedPage extends EditorPane { buildStepList(); this.detailsPageDisposables.add(this.contextService.onDidChangeContext(e => { - if (e.affectsSome(contextKeysToWatch) && this.currentWalkthrough) { + if (e.affectsSome(contextKeysToWatch) && this.currentWalkthrough && this.editorInput) { buildStepList(); this.registerDispatchListeners(); this.selectStep(this.editorInput.selectedStep, false); diff --git a/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedAccessibleView.ts b/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedAccessibleView.ts index e1d9858430b..1858285a566 100644 --- a/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedAccessibleView.ts +++ b/src/vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedAccessibleView.ts @@ -92,7 +92,7 @@ class GettingStartedAccessibleProvider extends Disposable implements IAccessible if (isCommand) { const commandURI = URI.parse(command); - let args: any = []; + let args: unknown[] = []; try { args = parse(decodeURIComponent(commandURI.query)); } catch { diff --git a/src/vs/workbench/electron-browser/actions/windowActions.ts b/src/vs/workbench/electron-browser/actions/windowActions.ts index f710845a994..9ff3b8f6058 100644 --- a/src/vs/workbench/electron-browser/actions/windowActions.ts +++ b/src/vs/workbench/electron-browser/actions/windowActions.ts @@ -65,6 +65,32 @@ export class CloseWindowAction extends Action2 { } } +export class CloseOtherWindowsAction extends Action2 { + + private static readonly ID = 'workbench.action.closeOtherWindows'; + + constructor() { + super({ + id: CloseOtherWindowsAction.ID, + title: localize2('closeOtherWindows', "Close Other Windows"), + f1: true + }); + } + + override async run(accessor: ServicesAccessor): Promise { + const nativeHostService = accessor.get(INativeHostService); + + const currentWindowId = getActiveWindow().vscodeWindowId; + const windows = await nativeHostService.getWindows({ includeAuxiliaryWindows: false }); + + for (const window of windows) { + if (window.id !== currentWindowId) { + nativeHostService.closeWindow({ targetWindowId: window.id }); + } + } + } +} + abstract class BaseZoomAction extends Action2 { private static readonly ZOOM_LEVEL_SETTING_KEY = 'window.zoomLevel'; diff --git a/src/vs/workbench/electron-browser/desktop.contribution.ts b/src/vs/workbench/electron-browser/desktop.contribution.ts index d81319473b6..5f92a46fb0f 100644 --- a/src/vs/workbench/electron-browser/desktop.contribution.ts +++ b/src/vs/workbench/electron-browser/desktop.contribution.ts @@ -10,7 +10,7 @@ import { IConfigurationRegistry, Extensions as ConfigurationExtensions, Configur import { KeyMod, KeyCode } from '../../base/common/keyCodes.js'; import { isLinux, isMacintosh, isWindows } from '../../base/common/platform.js'; import { ConfigureRuntimeArgumentsAction, ToggleDevToolsAction, ReloadWindowWithExtensionsDisabledAction, OpenUserDataFolderAction, ShowGPUInfoAction, StopTracing } from './actions/developerActions.js'; -import { ZoomResetAction, ZoomOutAction, ZoomInAction, CloseWindowAction, SwitchWindowAction, QuickSwitchWindowAction, NewWindowTabHandler, ShowPreviousWindowTabHandler, ShowNextWindowTabHandler, MoveWindowTabToNewWindowHandler, MergeWindowTabsHandlerHandler, ToggleWindowTabsBarHandler, ToggleWindowAlwaysOnTopAction, DisableWindowAlwaysOnTopAction, EnableWindowAlwaysOnTopAction } from './actions/windowActions.js'; +import { ZoomResetAction, ZoomOutAction, ZoomInAction, CloseWindowAction, SwitchWindowAction, QuickSwitchWindowAction, NewWindowTabHandler, ShowPreviousWindowTabHandler, ShowNextWindowTabHandler, MoveWindowTabToNewWindowHandler, MergeWindowTabsHandlerHandler, ToggleWindowTabsBarHandler, ToggleWindowAlwaysOnTopAction, DisableWindowAlwaysOnTopAction, EnableWindowAlwaysOnTopAction, CloseOtherWindowsAction } from './actions/windowActions.js'; import { ContextKeyExpr } from '../../platform/contextkey/common/contextkey.js'; import { KeybindingsRegistry, KeybindingWeight } from '../../platform/keybinding/common/keybindingsRegistry.js'; import { CommandsRegistry } from '../../platform/commands/common/commands.js'; @@ -41,6 +41,7 @@ import { MAX_ZOOM_LEVEL, MIN_ZOOM_LEVEL } from '../../platform/window/electron-b registerAction2(SwitchWindowAction); registerAction2(QuickSwitchWindowAction); registerAction2(CloseWindowAction); + registerAction2(CloseOtherWindowsAction); registerAction2(ToggleWindowAlwaysOnTopAction); registerAction2(EnableWindowAlwaysOnTopAction); registerAction2(DisableWindowAlwaysOnTopAction); diff --git a/src/vs/workbench/services/extensions/browser/webWorkerExtensionHost.ts b/src/vs/workbench/services/extensions/browser/webWorkerExtensionHost.ts index 9352abdfb0f..6c9249b2d22 100644 --- a/src/vs/workbench/services/extensions/browser/webWorkerExtensionHost.ts +++ b/src/vs/workbench/services/extensions/browser/webWorkerExtensionHost.ts @@ -119,8 +119,13 @@ export class WebWorkerExtensionHost extends Disposable implements IExtensionHost console.warn(`The web worker extension host is started in a same-origin iframe!`); } - const relativeExtensionHostIframeSrc = FileAccess.asBrowserUri(iframeModulePath); - return `${relativeExtensionHostIframeSrc.toString(true)}${suffix}`; + const relativeExtensionHostIframeSrc = this._webWorkerService.getWorkerUrl(new WebWorkerDescriptor({ + esmModuleLocation: FileAccess.asBrowserUri(iframeModulePath), + esmModuleLocationBundler: new URL(`../worker/webWorkerExtensionHostIframe.html`, import.meta.url), + label: 'webWorkerExtensionHostIframe' + })); + + return `${relativeExtensionHostIframeSrc}${suffix}`; } public async start(): Promise { @@ -349,5 +354,5 @@ export class WebWorkerExtensionHost extends Disposable implements IExtensionHost const extensionHostWorkerMainDescriptor = new WebWorkerDescriptor({ label: 'extensionHostWorkerMain', esmModuleLocation: () => FileAccess.asBrowserUri('vs/workbench/api/worker/extensionHostWorkerMain.js'), - esmModuleLocationBundler: () => new URL('../../../api/worker/extensionHostWorkerMain.ts?worker', import.meta.url), + esmModuleLocationBundler: () => new URL('../../../api/worker/extensionHostWorkerMain.ts?workerModule', import.meta.url), }); diff --git a/src/vscode-dts/vscode.proposed.scmHistoryProvider.d.ts b/src/vscode-dts/vscode.proposed.scmHistoryProvider.d.ts index 5f0a8eb257c..ef9d93342cf 100644 --- a/src/vscode-dts/vscode.proposed.scmHistoryProvider.d.ts +++ b/src/vscode-dts/vscode.proposed.scmHistoryProvider.d.ts @@ -61,7 +61,7 @@ declare module 'vscode' { readonly timestamp?: number; readonly statistics?: SourceControlHistoryItemStatistics; readonly references?: SourceControlHistoryItemRef[]; - readonly tooltip?: string | MarkdownString | undefined; + readonly tooltip?: MarkdownString | Array | undefined; } export interface SourceControlHistoryItemRef {