Merge branch 'main' into dev/dmitriv/eslint-map-delete

This commit is contained in:
Dmitriy Vasyura
2025-11-21 11:34:34 -08:00
committed by GitHub
329 changed files with 2198 additions and 16599 deletions

View File

@@ -8,13 +8,13 @@
"module": "esnext", "module": "esnext",
"allowImportingTsExtensions": true, "allowImportingTsExtensions": true,
"erasableSyntaxOnly": true, "erasableSyntaxOnly": true,
"verbatimModuleSyntax": true,
"noEmit": true, "noEmit": true,
"strict": true, "strict": true,
"exactOptionalPropertyTypes": false, "exactOptionalPropertyTypes": false,
"useUnknownInCatchVariables": false, "useUnknownInCatchVariables": false,
"noUnusedLocals": true, "noUnusedLocals": true,
"noUnusedParameters": true, "noUnusedParameters": true,
"newLine": "lf",
"typeRoots": [ "typeRoots": [
"." "."
] ]

View File

@@ -51,7 +51,7 @@ jobs:
sudo service xvfb start sudo service xvfb start
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux x64 $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux x64 $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -107,7 +107,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
@@ -115,7 +115,7 @@ jobs:
run: mkdir -p .build run: mkdir -p .build
- name: Prepare built-in extensions cache key - name: Prepare built-in extensions cache key
run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash
- name: Restore built-in extensions cache - name: Restore built-in extensions cache
id: cache-builtin-extensions id: cache-builtin-extensions
@@ -127,7 +127,7 @@ jobs:
- name: Download built-in extensions - name: Download built-in extensions
if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' if: steps.cache-builtin-extensions.outputs.cache-hit != 'true'
run: node build/lib/builtInExtensions.js run: node build/lib/builtInExtensions.ts
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -29,7 +29,7 @@ jobs:
- name: Compute node modules cache key - name: Compute node modules cache key
id: nodeModulesCacheKey id: nodeModulesCacheKey
run: echo "value=$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)" >> $GITHUB_OUTPUT run: echo "value=$(node build/azure-pipelines/common/computeNodeModulesCacheKey.ts)" >> $GITHUB_OUTPUT
- name: Cache node modules - name: Cache node modules
id: cacheNodeModules id: cacheNodeModules
uses: actions/cache@v4 uses: actions/cache@v4

View File

@@ -32,7 +32,7 @@ jobs:
node-version-file: .nvmrc node-version-file: .nvmrc
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -77,7 +77,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
@@ -85,7 +85,7 @@ jobs:
run: mkdir -p .build run: mkdir -p .build
- name: Prepare built-in extensions cache key - name: Prepare built-in extensions cache key
run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash
- name: Restore built-in extensions cache - name: Restore built-in extensions cache
id: cache-builtin-extensions id: cache-builtin-extensions
@@ -97,7 +97,7 @@ jobs:
- name: Download built-in extensions - name: Download built-in extensions
if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' if: steps.cache-builtin-extensions.outputs.cache-hit != 'true'
run: node build/lib/builtInExtensions.js run: node build/lib/builtInExtensions.ts
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -49,7 +49,7 @@ jobs:
sudo service xvfb start sudo service xvfb start
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -105,7 +105,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
@@ -113,7 +113,7 @@ jobs:
run: mkdir -p .build run: mkdir -p .build
- name: Prepare built-in extensions cache key - name: Prepare built-in extensions cache key
run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash
- name: Restore built-in extensions cache - name: Restore built-in extensions cache
id: cache-builtin-extensions id: cache-builtin-extensions
@@ -125,7 +125,7 @@ jobs:
- name: Download built-in extensions - name: Download built-in extensions
if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' if: steps.cache-builtin-extensions.outputs.cache-hit != 'true'
run: node build/lib/builtInExtensions.js run: node build/lib/builtInExtensions.ts
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -21,7 +21,7 @@ jobs:
node-version-file: .nvmrc node-version-file: .nvmrc
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts compile $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -60,7 +60,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
@@ -68,7 +68,7 @@ jobs:
run: | run: |
set -e set -e
mkdir -p .build mkdir -p .build
node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash
- name: Restore built-in extensions cache - name: Restore built-in extensions cache
id: cache-builtin-extensions id: cache-builtin-extensions
@@ -80,7 +80,7 @@ jobs:
- name: Download built-in extensions - name: Download built-in extensions
if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' if: steps.cache-builtin-extensions.outputs.cache-hit != 'true'
run: node build/lib/builtInExtensions.js run: node build/lib/builtInExtensions.ts
env: env:
GITHUB_TOKEN: ${{ secrets.VSCODE_OSS }} GITHUB_TOKEN: ${{ secrets.VSCODE_OSS }}
@@ -100,7 +100,7 @@ jobs:
node-version-file: .nvmrc node-version-file: .nvmrc
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -152,7 +152,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
@@ -172,7 +172,7 @@ jobs:
node-version-file: .nvmrc node-version-file: .nvmrc
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -213,7 +213,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
@@ -236,7 +236,7 @@ jobs:
shell: pwsh shell: pwsh
run: | run: |
mkdir .build -ea 0 mkdir .build -ea 0
node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
uses: actions/cache@v4 uses: actions/cache@v4
@@ -280,6 +280,6 @@ jobs:
run: | run: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt }
exec { mkdir -Force .build/node_modules_cache } exec { mkdir -Force .build/node_modules_cache }
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }

View File

@@ -35,7 +35,7 @@ jobs:
shell: pwsh shell: pwsh
run: | run: |
mkdir .build -ea 0 mkdir .build -ea 0
node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 ${{ env.VSCODE_ARCH }} $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@@ -84,7 +84,7 @@ jobs:
run: | run: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt }
exec { mkdir -Force .build/node_modules_cache } exec { mkdir -Force .build/node_modules_cache }
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
@@ -94,7 +94,7 @@ jobs:
- name: Prepare built-in extensions cache key - name: Prepare built-in extensions cache key
shell: pwsh shell: pwsh
run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash run: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash
- name: Restore built-in extensions cache - name: Restore built-in extensions cache
id: cache-builtin-extensions id: cache-builtin-extensions
@@ -106,7 +106,7 @@ jobs:
- name: Download built-in extensions - name: Download built-in extensions
if: steps.cache-builtin-extensions.outputs.cache-hit != 'true' if: steps.cache-builtin-extensions.outputs.cache-hit != 'true'
run: node build/lib/builtInExtensions.js run: node build/lib/builtInExtensions.ts
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -29,7 +29,7 @@ jobs:
node-version-file: .nvmrc node-version-file: .nvmrc
- name: Prepare node_modules cache key - name: Prepare node_modules cache key
run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash run: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts compile $(node -p process.arch) > .build/packagelockhash
- name: Restore node_modules cache - name: Restore node_modules cache
id: cache-node-modules id: cache-node-modules
@@ -68,7 +68,7 @@ jobs:
if: steps.cache-node-modules.outputs.cache-hit != 'true' if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: | run: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt

View File

@@ -8,6 +8,7 @@
"ms-vscode.vscode-github-issue-notebooks", "ms-vscode.vscode-github-issue-notebooks",
"ms-vscode.extension-test-runner", "ms-vscode.extension-test-runner",
"jrieken.vscode-pr-pinger", "jrieken.vscode-pr-pinger",
"typescriptteam.native-preview" "typescriptteam.native-preview",
"ms-vscode.ts-customized-language-service"
] ]
} }

16
.vscode/launch.json vendored
View File

@@ -282,7 +282,7 @@
// To debug observables you also need the extension "ms-vscode.debug-value-editor" // To debug observables you also need the extension "ms-vscode.debug-value-editor"
"type": "chrome", "type": "chrome",
"request": "launch", "request": "launch",
"name": "Launch VS Code Internal (Dev Debug)", "name": "Launch VS Code Internal (Hot Reload)",
"windows": { "windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.bat" "runtimeExecutable": "${workspaceFolder}/scripts/code.bat"
}, },
@@ -298,7 +298,10 @@
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null, "VSCODE_EXTHOST_WILL_SEND_SOCKET": null,
"VSCODE_SKIP_PRELAUNCH": "1", "VSCODE_SKIP_PRELAUNCH": "1",
"VSCODE_DEV_DEBUG": "1", "VSCODE_DEV_DEBUG": "1",
"VSCODE_DEV_SERVER_URL": "http://localhost:5199/build/vite/workbench-vite-electron.html",
"DEV_WINDOW_SRC": "http://localhost:5199/build/vite/workbench-vite-electron.html",
"VSCODE_DEV_DEBUG_OBSERVABLES": "1", "VSCODE_DEV_DEBUG_OBSERVABLES": "1",
"VSCODE_DEV": "1"
}, },
"cleanUp": "wholeBrowser", "cleanUp": "wholeBrowser",
"runtimeArgs": [ "runtimeArgs": [
@@ -322,6 +325,7 @@
"presentation": { "presentation": {
"hidden": true, "hidden": true,
}, },
"preLaunchTask": "Launch Monaco Editor Vite"
}, },
{ {
"type": "node", "type": "node",
@@ -591,7 +595,7 @@
"name": "Monaco Editor - Playground", "name": "Monaco Editor - Playground",
"type": "chrome", "type": "chrome",
"request": "launch", "request": "launch",
"url": "https://microsoft.github.io/monaco-editor/playground.html?source=http%3A%2F%2Flocalhost%3A5199%2Fbuild%2Fmonaco-editor-playground%2Findex.ts%3Fesm#example-creating-the-editor-hello-world", "url": "https://microsoft.github.io/monaco-editor/playground.html?source=http%3A%2F%2Flocalhost%3A5199%2Fbuild%2Fvite%2Findex.ts%3Fesm#example-creating-the-editor-hello-world",
"preLaunchTask": "Launch Monaco Editor Vite", "preLaunchTask": "Launch Monaco Editor Vite",
"presentation": { "presentation": {
"group": "monaco", "group": "monaco",
@@ -602,7 +606,7 @@
"name": "Monaco Editor - Self Contained Diff Editor", "name": "Monaco Editor - Self Contained Diff Editor",
"type": "chrome", "type": "chrome",
"request": "launch", "request": "launch",
"url": "http://localhost:5199/build/monaco-editor-playground/index.html", "url": "http://localhost:5199/build/vite/index.html",
"preLaunchTask": "Launch Monaco Editor Vite", "preLaunchTask": "Launch Monaco Editor Vite",
"presentation": { "presentation": {
"group": "monaco", "group": "monaco",
@@ -613,7 +617,7 @@
"name": "Monaco Editor - Workbench", "name": "Monaco Editor - Workbench",
"type": "chrome", "type": "chrome",
"request": "launch", "request": "launch",
"url": "http://localhost:5199/build/monaco-editor-playground/workbench-vite.html", "url": "http://localhost:5199/build/vite/workbench-vite.html",
"preLaunchTask": "Launch Monaco Editor Vite", "preLaunchTask": "Launch Monaco Editor Vite",
"presentation": { "presentation": {
"group": "monaco", "group": "monaco",
@@ -638,10 +642,10 @@
} }
}, },
{ {
"name": "VS Code (Debug Observables)", "name": "VS Code (Hot Reload)",
"stopAll": true, "stopAll": true,
"configurations": [ "configurations": [
"Launch VS Code Internal (Dev Debug)", "Launch VS Code Internal (Hot Reload)",
"Attach to Main Process", "Attach to Main Process",
"Attach to Extension Host", "Attach to Extension Host",
"Attach to Shared Process", "Attach to Shared Process",

View File

@@ -214,5 +214,4 @@
"azureMcp.serverMode": "all", "azureMcp.serverMode": "all",
"azureMcp.readOnly": true, "azureMcp.readOnly": true,
"chat.tools.terminal.outputLocation": "none", "chat.tools.terminal.outputLocation": "none",
"chat.agentSessionsViewLocation": "single-view"
} }

4
.vscode/tasks.json vendored
View File

@@ -257,7 +257,7 @@
}, },
{ {
"type": "shell", "type": "shell",
"command": "node build/lib/preLaunch.js", "command": "node build/lib/preLaunch.ts",
"label": "Ensure Prelaunch Dependencies", "label": "Ensure Prelaunch Dependencies",
"presentation": { "presentation": {
"reveal": "silent", "reveal": "silent",
@@ -283,7 +283,7 @@
"type": "shell", "type": "shell",
"command": "npm run dev", "command": "npm run dev",
"options": { "options": {
"cwd": "./build/monaco-editor-playground/" "cwd": "./build/vite/"
}, },
"isBackground": true, "isBackground": true,
"problemMatcher": { "problemMatcher": {

View File

@@ -33,7 +33,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -108,13 +108,13 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
displayName: Mixin distro node modules displayName: Mixin distro node modules
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -77,7 +77,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts alpine $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -156,19 +156,19 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
displayName: Mixin distro node modules displayName: Mixin distro node modules
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive displayName: Create node_modules archive
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- template: ../common/install-builtin-extensions.yml@self - template: ../common/install-builtin-extensions.yml@self

View File

@@ -1,7 +1,7 @@
steps: steps:
- template: ../distro/download-distro.yml@self - template: ../distro/download-distro.yml@self
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- script: node .build/distro/cli-patches/index.js - script: node .build/distro/cli-patches/index.js

View File

@@ -35,7 +35,7 @@ steps:
set -e set -e
if [ -n "$SYSROOT_ARCH" ]; then if [ -n "$SYSROOT_ARCH" ]; then
export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots
node -e '(async () => { const { getVSCodeSysroot } = require("../build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"], process.env["IS_MUSL"] === "1"); })()' node -e 'import { getVSCodeSysroot } from "../build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"], process.env["IS_MUSL"] === "1"); })()'
if [ "$SYSROOT_ARCH" == "arm64" ]; then if [ "$SYSROOT_ARCH" == "arm64" ]; then
if [ -n "$IS_MUSL" ]; then if [ -n "$IS_MUSL" ]; then
export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER="$VSCODE_SYSROOT_DIR/output/bin/aarch64-linux-musl-gcc" export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER="$VSCODE_SYSROOT_DIR/output/bin/aarch64-linux-musl-gcc"

View File

@@ -1,34 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const publish_1 = require("./publish");
const retry_1 = require("./retry");
async function getPipelineArtifacts() {
const result = await (0, publish_1.requestAZDOAPI)('artifacts');
return result.value.filter(a => !/sbom$/.test(a.name));
}
async function main([variableName, artifactName]) {
if (!variableName || !artifactName) {
throw new Error(`Usage: node checkForArtifact.js <variableName> <artifactName>`);
}
try {
const artifacts = await (0, retry_1.retry)(() => getPipelineArtifacts());
const artifact = artifacts.find(a => a.name === artifactName);
console.log(`##vso[task.setvariable variable=${variableName}]${artifact ? 'true' : 'false'}`);
}
catch (err) {
console.error(`ERROR: Failed to get pipeline artifacts: ${err}`);
console.log(`##vso[task.setvariable variable=${variableName}]false`);
}
}
main(process.argv.slice(2))
.then(() => {
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=checkForArtifact.js.map

View File

@@ -3,8 +3,8 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { Artifact, requestAZDOAPI } from './publish'; import { type Artifact, requestAZDOAPI } from './publish.ts';
import { retry } from './retry'; import { retry } from './retry.ts';
async function getPipelineArtifacts(): Promise<Artifact[]> { async function getPipelineArtifacts(): Promise<Artifact[]> {
const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts');
@@ -13,7 +13,7 @@ async function getPipelineArtifacts(): Promise<Artifact[]> {
async function main([variableName, artifactName]: string[]): Promise<void> { async function main([variableName, artifactName]: string[]): Promise<void> {
if (!variableName || !artifactName) { if (!variableName || !artifactName) {
throw new Error(`Usage: node checkForArtifact.js <variableName> <artifactName>`); throw new Error(`Usage: node checkForArtifact.ts <variableName> <artifactName>`);
} }
try { try {

View File

@@ -1,30 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.printBanner = printBanner;
exports.streamProcessOutputAndCheckResult = streamProcessOutputAndCheckResult;
exports.spawnCodesignProcess = spawnCodesignProcess;
const zx_1 = require("zx");
function printBanner(title) {
title = `${title} (${new Date().toISOString()})`;
console.log('\n');
console.log('#'.repeat(75));
console.log(`# ${title.padEnd(71)} #`);
console.log('#'.repeat(75));
console.log('\n');
}
async function streamProcessOutputAndCheckResult(name, promise) {
const result = await promise.pipe(process.stdout);
if (result.ok) {
console.log(`\n${name} completed successfully. Duration: ${result.duration} ms`);
return;
}
throw new Error(`${name} failed: ${result.stderr}`);
}
function spawnCodesignProcess(esrpCliDLLPath, type, folder, glob) {
return (0, zx_1.$) `node build/azure-pipelines/common/sign ${esrpCliDLLPath} ${type} ${folder} ${glob}`;
}
//# sourceMappingURL=codesign.js.map

View File

@@ -26,5 +26,5 @@ export async function streamProcessOutputAndCheckResult(name: string, promise: P
} }
export function spawnCodesignProcess(esrpCliDLLPath: string, type: 'sign-windows' | 'sign-windows-appx' | 'sign-pgp' | 'sign-darwin' | 'notarize-darwin', folder: string, glob: string): ProcessPromise { export function spawnCodesignProcess(esrpCliDLLPath: string, type: 'sign-windows' | 'sign-windows-appx' | 'sign-pgp' | 'sign-darwin' | 'notarize-darwin', folder: string, glob: string): ProcessPromise {
return $`node build/azure-pipelines/common/sign ${esrpCliDLLPath} ${type} ${folder} ${glob}`; return $`node build/azure-pipelines/common/sign.ts ${esrpCliDLLPath} ${type} ${folder} ${glob}`;
} }

View File

@@ -1,19 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const crypto_1 = __importDefault(require("crypto"));
const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../../product.json'), 'utf8'));
const shasum = crypto_1.default.createHash('sha256');
for (const ext of productjson.builtInExtensions) {
shasum.update(`${ext.name}@${ext.version}`);
}
process.stdout.write(shasum.digest('hex'));
//# sourceMappingURL=computeBuiltInDepsCacheKey.js.map

View File

@@ -7,7 +7,7 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import crypto from 'crypto'; import crypto from 'crypto';
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../product.json'), 'utf8')); const productjson = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '../../../product.json'), 'utf8'));
const shasum = crypto.createHash('sha256'); const shasum = crypto.createHash('sha256');
for (const ext of productjson.builtInExtensions) { for (const ext of productjson.builtInExtensions) {

View File

@@ -1,40 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const crypto_1 = __importDefault(require("crypto"));
const { dirs } = require('../../npm/dirs');
const ROOT = path_1.default.join(__dirname, '../../../');
const shasum = crypto_1.default.createHash('sha256');
shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build/.cachesalt')));
shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, '.npmrc')));
shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build', '.npmrc')));
shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'remote', '.npmrc')));
// Add `package.json` and `package-lock.json` files
for (const dir of dirs) {
const packageJsonPath = path_1.default.join(ROOT, dir, 'package.json');
const packageJson = JSON.parse(fs_1.default.readFileSync(packageJsonPath).toString());
const relevantPackageJsonSections = {
dependencies: packageJson.dependencies,
devDependencies: packageJson.devDependencies,
optionalDependencies: packageJson.optionalDependencies,
resolutions: packageJson.resolutions,
distro: packageJson.distro
};
shasum.update(JSON.stringify(relevantPackageJsonSections));
const packageLockPath = path_1.default.join(ROOT, dir, 'package-lock.json');
shasum.update(fs_1.default.readFileSync(packageLockPath));
}
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));
//# sourceMappingURL=computeNodeModulesCacheKey.js.map

View File

@@ -2,13 +2,12 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import crypto from 'crypto'; import crypto from 'crypto';
const { dirs } = require('../../npm/dirs'); import { dirs } from '../../npm/dirs.js';
const ROOT = path.join(__dirname, '../../../'); const ROOT = path.join(import.meta.dirname, '../../../');
const shasum = crypto.createHash('sha256'); const shasum = crypto.createHash('sha256');

View File

@@ -1,55 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const identity_1 = require("@azure/identity");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
if (process.argv.length !== 3) {
console.error('Usage: node createBuild.js VERSION');
process.exit(-1);
}
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main() {
const [, , _version] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
const queuedBy = getEnv('BUILD_QUEUEDBY');
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
console.log('Creating build...');
console.log('Quality:', quality);
console.log('Version:', version);
console.log('Commit:', commit);
const build = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: false,
private: process.env['VSCODE_PRIVATE_BUILD']?.toLowerCase() === 'true',
sourceBranch,
queuedBy,
assets: [],
updates: {}
};
const aadCredentials = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN']));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials });
const scripts = client.database('builds').container(quality).scripts;
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]));
}
main().then(() => {
console.log('Build successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=createBuild.js.map

View File

@@ -5,10 +5,10 @@
import { ClientAssertionCredential } from '@azure/identity'; import { ClientAssertionCredential } from '@azure/identity';
import { CosmosClient } from '@azure/cosmos'; import { CosmosClient } from '@azure/cosmos';
import { retry } from './retry'; import { retry } from './retry.ts';
if (process.argv.length !== 3) { if (process.argv.length !== 3) {
console.error('Usage: node createBuild.js VERSION'); console.error('Usage: node createBuild.ts VERSION');
process.exit(-1); process.exit(-1);
} }

View File

@@ -1,47 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAccessToken = getAccessToken;
const msal_node_1 = require("@azure/msal-node");
function e(name) {
const result = process.env[name];
if (typeof result !== 'string') {
throw new Error(`Missing env: ${name}`);
}
return result;
}
async function getAccessToken(endpoint, tenantId, clientId, idToken) {
const app = new msal_node_1.ConfidentialClientApplication({
auth: {
clientId,
authority: `https://login.microsoftonline.com/${tenantId}`,
clientAssertion: idToken
}
});
const result = await app.acquireTokenByClientCredential({ scopes: [`${endpoint}.default`] });
if (!result) {
throw new Error('Failed to get access token');
}
return {
token: result.accessToken,
expiresOnTimestamp: result.expiresOn.getTime(),
refreshAfterTimestamp: result.refreshOn?.getTime()
};
}
async function main() {
const cosmosDBAccessToken = await getAccessToken(e('AZURE_DOCUMENTDB_ENDPOINT'), e('AZURE_TENANT_ID'), e('AZURE_CLIENT_ID'), e('AZURE_ID_TOKEN'));
const blobServiceAccessToken = await getAccessToken(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_ID_TOKEN']);
console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken }));
}
if (require.main === module) {
main().then(() => {
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
}
//# sourceMappingURL=getPublishAuthTokens.js.map

View File

@@ -3,7 +3,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { AccessToken } from '@azure/core-auth'; import type { AccessToken } from '@azure/core-auth';
import { ConfidentialClientApplication } from '@azure/msal-node'; import { ConfidentialClientApplication } from '@azure/msal-node';
function e(name: string): string { function e(name: string): string {
@@ -44,7 +44,7 @@ async function main() {
console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken })); console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken }));
} }
if (require.main === module) { if (import.meta.main) {
main().then(() => { main().then(() => {
process.exit(0); process.exit(0);
}, err => { }, err => {

View File

@@ -7,7 +7,7 @@ steps:
condition: and(succeeded(), not(contains(variables['Agent.OS'], 'windows'))) condition: and(succeeded(), not(contains(variables['Agent.OS'], 'windows')))
displayName: Create .build folder displayName: Create .build folder
- script: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash - script: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts > .build/builtindepshash
displayName: Prepare built-in extensions cache key displayName: Prepare built-in extensions cache key
- task: Cache@2 - task: Cache@2
@@ -17,7 +17,7 @@ steps:
cacheHitVar: BUILTIN_EXTENSIONS_RESTORED cacheHitVar: BUILTIN_EXTENSIONS_RESTORED
displayName: Restore built-in extensions cache displayName: Restore built-in extensions cache
- script: node build/lib/builtInExtensions.js - script: node build/lib/builtInExtensions.ts
env: env:
GITHUB_TOKEN: "$(github-distro-mixin-password)" GITHUB_TOKEN: "$(github-distro-mixin-password)"
condition: and(succeeded(), ne(variables.BUILTIN_EXTENSIONS_RESTORED, 'true')) condition: and(succeeded(), ne(variables.BUILTIN_EXTENSIONS_RESTORED, 'true'))

View File

@@ -1,44 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
if (process.argv.length !== 3) {
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
process.exit(-1);
}
const ROOT = path_1.default.join(__dirname, '../../../');
function findNodeModulesFiles(location, inNodeModules, result) {
const entries = fs_1.default.readdirSync(path_1.default.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
continue;
}
let stat;
try {
stat = fs_1.default.statSync(path_1.default.join(ROOT, entryPath));
}
catch (err) {
continue;
}
if (stat.isDirectory()) {
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
}
else {
if (inNodeModules) {
result.push(entryPath.substr(1));
}
}
}
}
const result = [];
findNodeModulesFiles('', false, result);
fs_1.default.writeFileSync(process.argv[2], result.join('\n') + '\n');
//# sourceMappingURL=listNodeModules.js.map

View File

@@ -7,11 +7,11 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
if (process.argv.length !== 3) { if (process.argv.length !== 3) {
console.error('Usage: node listNodeModules.js OUTPUT_FILE'); console.error('Usage: node listNodeModules.ts OUTPUT_FILE');
process.exit(-1); process.exit(-1);
} }
const ROOT = path.join(__dirname, '../../../'); const ROOT = path.join(import.meta.dirname, '../../../');
function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) { function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) {
const entries = fs.readdirSync(path.join(ROOT, location)); const entries = fs.readdirSync(path.join(ROOT, location));

View File

@@ -1,724 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.e = e;
exports.requestAZDOAPI = requestAZDOAPI;
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const stream_1 = require("stream");
const promises_1 = require("node:stream/promises");
const yauzl_1 = __importDefault(require("yauzl"));
const crypto_1 = __importDefault(require("crypto"));
const retry_1 = require("./retry");
const cosmos_1 = require("@azure/cosmos");
const child_process_1 = __importDefault(require("child_process"));
const os_1 = __importDefault(require("os"));
const node_worker_threads_1 = require("node:worker_threads");
const msal_node_1 = require("@azure/msal-node");
const storage_blob_1 = require("@azure/storage-blob");
const jws_1 = __importDefault(require("jws"));
const node_timers_1 = require("node:timers");
function e(name) {
const result = process.env[name];
if (typeof result !== 'string') {
throw new Error(`Missing env: ${name}`);
}
return result;
}
function hashStream(hashName, stream) {
return new Promise((c, e) => {
const shasum = crypto_1.default.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest()));
});
}
var StatusCode;
(function (StatusCode) {
StatusCode["Pass"] = "pass";
StatusCode["Aborted"] = "aborted";
StatusCode["Inprogress"] = "inprogress";
StatusCode["FailCanRetry"] = "failCanRetry";
StatusCode["FailDoNotRetry"] = "failDoNotRetry";
StatusCode["PendingAnalysis"] = "pendingAnalysis";
StatusCode["Cancelled"] = "cancelled";
})(StatusCode || (StatusCode = {}));
function getCertificateBuffer(input) {
return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64');
}
function getThumbprint(input, algorithm) {
const buffer = getCertificateBuffer(input);
return crypto_1.default.createHash(algorithm).update(buffer).digest();
}
function getKeyFromPFX(pfx) {
const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx');
const pemKeyPath = path_1.default.join(os_1.default.tmpdir(), 'key.pem');
try {
const pfxCertificate = Buffer.from(pfx, 'base64');
fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate);
child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`);
const raw = fs_1.default.readFileSync(pemKeyPath, 'utf-8');
const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)[0];
return result;
}
finally {
fs_1.default.rmSync(pfxCertificatePath, { force: true });
fs_1.default.rmSync(pemKeyPath, { force: true });
}
}
function getCertificatesFromPFX(pfx) {
const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx');
const pemCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pem');
try {
const pfxCertificate = Buffer.from(pfx, 'base64');
fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate);
child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`);
const raw = fs_1.default.readFileSync(pemCertificatePath, 'utf-8');
const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g);
return matches ? matches.reverse() : [];
}
finally {
fs_1.default.rmSync(pfxCertificatePath, { force: true });
fs_1.default.rmSync(pemCertificatePath, { force: true });
}
}
class ESRPReleaseService {
log;
clientId;
accessToken;
requestSigningCertificates;
requestSigningKey;
containerClient;
stagingSasToken;
static async create(log, tenantId, clientId, authCertificatePfx, requestSigningCertificatePfx, containerClient, stagingSasToken) {
const authKey = getKeyFromPFX(authCertificatePfx);
const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0];
const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx);
const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx);
const app = new msal_node_1.ConfidentialClientApplication({
auth: {
clientId,
authority: `https://login.microsoftonline.com/${tenantId}`,
clientCertificate: {
thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'),
privateKey: authKey,
x5c: authCertificate
}
}
});
const response = await app.acquireTokenByClientCredential({
scopes: ['https://api.esrp.microsoft.com/.default']
});
return new ESRPReleaseService(log, clientId, response.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken);
}
static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/';
constructor(log, clientId, accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken) {
this.log = log;
this.clientId = clientId;
this.accessToken = accessToken;
this.requestSigningCertificates = requestSigningCertificates;
this.requestSigningKey = requestSigningKey;
this.containerClient = containerClient;
this.stagingSasToken = stagingSasToken;
}
async createRelease(version, filePath, friendlyFileName) {
const correlationId = crypto_1.default.randomUUID();
const blobClient = this.containerClient.getBlockBlobClient(correlationId);
this.log(`Uploading ${filePath} to ${blobClient.url}`);
await blobClient.uploadFile(filePath);
this.log('Uploaded blob successfully');
try {
this.log(`Submitting release for ${version}: ${filePath}`);
const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient);
this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`);
// Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times
for (let i = 0; i < 720; i++) {
await new Promise(c => setTimeout(c, 5000));
const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId);
if (releaseStatus.status === 'pass') {
break;
}
else if (releaseStatus.status === 'aborted') {
this.log(JSON.stringify(releaseStatus));
throw new Error(`Release was aborted`);
}
else if (releaseStatus.status !== 'inprogress') {
this.log(JSON.stringify(releaseStatus));
throw new Error(`Unknown error when polling for release`);
}
}
const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId);
if (releaseDetails.status !== 'pass') {
throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`);
}
this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails[0].downloadUrl);
return releaseDetails.files[0].fileDownloadDetails[0].downloadUrl;
}
finally {
this.log(`Deleting blob ${blobClient.url}`);
await blobClient.delete();
this.log('Deleted blob successfully');
}
}
async submitRelease(version, filePath, friendlyFileName, correlationId, blobClient) {
const size = fs_1.default.statSync(filePath).size;
const hash = await hashStream('sha256', fs_1.default.createReadStream(filePath));
const blobUrl = `${blobClient.url}?${this.stagingSasToken}`;
const message = {
customerCorrelationId: correlationId,
esrpCorrelationId: correlationId,
driEmail: ['joao.moreno@microsoft.com'],
createdBy: { userPrincipalName: 'jomo@microsoft.com' },
owners: [{ owner: { userPrincipalName: 'jomo@microsoft.com' } }],
approvers: [{ approver: { userPrincipalName: 'jomo@microsoft.com' }, isAutoApproved: true, isMandatory: false }],
releaseInfo: {
title: 'VS Code',
properties: {
'ReleaseContentType': 'InstallPackage'
},
minimumNumberOfApprovers: 1
},
productInfo: {
name: 'VS Code',
version,
description: 'VS Code'
},
accessPermissionsInfo: {
mainPublisher: 'VSCode',
channelDownloadEntityDetails: {
AllDownloadEntities: ['VSCode']
}
},
routingInfo: {
intent: 'filedownloadlinkgeneration'
},
files: [{
name: path_1.default.basename(filePath),
friendlyFileName,
tenantFileLocation: blobUrl,
tenantFileLocationType: 'AzureBlob',
sourceLocation: {
type: 'azureBlob',
blobUrl
},
hashType: 'sha256',
hash: Array.from(hash),
sizeInBytes: size
}]
};
message.jwsToken = await this.generateJwsToken(message);
const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.accessToken}`
},
body: JSON.stringify(message)
});
if (!res.ok) {
const text = await res.text();
throw new Error(`Failed to submit release: ${res.statusText}\n${text}`);
}
return await res.json();
}
async getReleaseStatus(releaseId) {
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`;
const res = await (0, retry_1.retry)(() => fetch(url, {
headers: {
'Authorization': `Bearer ${this.accessToken}`
}
}));
if (!res.ok) {
const text = await res.text();
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
}
return await res.json();
}
async getReleaseDetails(releaseId) {
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`;
const res = await (0, retry_1.retry)(() => fetch(url, {
headers: {
'Authorization': `Bearer ${this.accessToken}`
}
}));
if (!res.ok) {
const text = await res.text();
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
}
return await res.json();
}
async generateJwsToken(message) {
// Create header with properly typed properties, then override x5c with the non-standard string format
const header = {
alg: 'RS256',
crit: ['exp', 'x5t'],
// Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483)
exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000,
// Release service uses hex format, not base64url :roll_eyes:
x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'),
};
// The Release service expects x5c as a '.' separated string, not the standard array format
header['x5c'] = this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.');
return jws_1.default.sign({
header,
payload: message,
privateKey: this.requestSigningKey,
});
}
}
class State {
statePath;
set = new Set();
constructor() {
const pipelineWorkspacePath = e('PIPELINE_WORKSPACE');
const previousState = fs_1.default.readdirSync(pipelineWorkspacePath)
.map(name => /^artifacts_processed_(\d+)$/.exec(name))
.filter((match) => !!match)
.map(match => ({ name: match[0], attempt: Number(match[1]) }))
.sort((a, b) => b.attempt - a.attempt)[0];
if (previousState) {
const previousStatePath = path_1.default.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt');
fs_1.default.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name));
}
const stageAttempt = e('SYSTEM_STAGEATTEMPT');
this.statePath = path_1.default.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`);
fs_1.default.mkdirSync(path_1.default.dirname(this.statePath), { recursive: true });
fs_1.default.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join(''));
}
get size() {
return this.set.size;
}
has(name) {
return this.set.has(name);
}
add(name) {
this.set.add(name);
fs_1.default.appendFileSync(this.statePath, `${name}\n`);
}
[Symbol.iterator]() {
return this.set[Symbol.iterator]();
}
}
const azdoFetchOptions = {
headers: {
// Pretend we're a web browser to avoid download rate limits
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9',
'Referer': 'https://dev.azure.com',
Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}`
}
};
async function requestAZDOAPI(path) {
const abortController = new AbortController();
const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000);
try {
const res = await (0, retry_1.retry)(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal }));
if (!res.ok) {
throw new Error(`Unexpected status code: ${res.status}`);
}
return await res.json();
}
finally {
clearTimeout(timeout);
}
}
async function getPipelineArtifacts() {
const result = await requestAZDOAPI('artifacts');
return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name));
}
async function getPipelineTimeline() {
return await requestAZDOAPI('timeline');
}
async function downloadArtifact(artifact, downloadPath) {
const abortController = new AbortController();
const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000);
try {
const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal });
if (!res.ok) {
throw new Error(`Unexpected status code: ${res.status}`);
}
await (0, promises_1.pipeline)(stream_1.Readable.fromWeb(res.body), fs_1.default.createWriteStream(downloadPath));
}
finally {
clearTimeout(timeout);
}
}
async function unzip(packagePath, outputPath) {
return new Promise((resolve, reject) => {
yauzl_1.default.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => {
if (err) {
return reject(err);
}
const result = [];
zipfile.on('entry', entry => {
if (/\/$/.test(entry.fileName)) {
zipfile.readEntry();
}
else {
zipfile.openReadStream(entry, (err, istream) => {
if (err) {
return reject(err);
}
const filePath = path_1.default.join(outputPath, entry.fileName);
fs_1.default.mkdirSync(path_1.default.dirname(filePath), { recursive: true });
const ostream = fs_1.default.createWriteStream(filePath);
ostream.on('finish', () => {
result.push(filePath);
zipfile.readEntry();
});
istream?.on('error', err => reject(err));
istream.pipe(ostream);
});
}
});
zipfile.on('close', () => resolve(result));
zipfile.readEntry();
});
});
}
// Contains all of the logic for mapping details to our actual product names in CosmosDB
function getPlatform(product, os, arch, type) {
switch (os) {
case 'win32':
switch (product) {
case 'client': {
switch (type) {
case 'archive':
return `win32-${arch}-archive`;
case 'setup':
return `win32-${arch}`;
case 'user-setup':
return `win32-${arch}-user`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
}
case 'server':
return `server-win32-${arch}`;
case 'web':
return `server-win32-${arch}-web`;
case 'cli':
return `cli-win32-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'alpine':
switch (product) {
case 'server':
return `server-alpine-${arch}`;
case 'web':
return `server-alpine-${arch}-web`;
case 'cli':
return `cli-alpine-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'linux':
switch (type) {
case 'snap':
return `linux-snap-${arch}`;
case 'archive-unsigned':
switch (product) {
case 'client':
return `linux-${arch}`;
case 'server':
return `server-linux-${arch}`;
case 'web':
if (arch === 'standalone') {
return 'web-standalone';
}
return `server-linux-${arch}-web`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'deb-package':
return `linux-deb-${arch}`;
case 'rpm-package':
return `linux-rpm-${arch}`;
case 'cli':
return `cli-linux-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'darwin':
switch (product) {
case 'client':
if (arch === 'x64') {
return 'darwin';
}
return `darwin-${arch}`;
case 'server':
if (arch === 'x64') {
return 'server-darwin';
}
return `server-darwin-${arch}`;
case 'web':
if (arch === 'x64') {
return 'server-darwin-web';
}
return `server-darwin-${arch}-web`;
case 'cli':
return `cli-darwin-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
}
// Contains all of the logic for mapping types to our actual types in CosmosDB
function getRealType(type) {
switch (type) {
case 'user-setup':
return 'setup';
case 'deb-package':
case 'rpm-package':
return 'package';
default:
return type;
}
}
async function withLease(client, fn) {
const lease = client.getBlobLeaseClient();
for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes
try {
await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired
await lease.acquireLease(60);
try {
const abortController = new AbortController();
const refresher = new Promise((c, e) => {
abortController.signal.onabort = () => {
(0, node_timers_1.clearInterval)(interval);
c();
};
const interval = (0, node_timers_1.setInterval)(() => {
lease.renewLease().catch(err => {
(0, node_timers_1.clearInterval)(interval);
e(new Error('Failed to renew lease ' + err));
});
}, 30_000);
});
const result = await Promise.race([fn(), refresher]);
abortController.abort();
return result;
}
finally {
await lease.releaseLease();
}
}
catch (err) {
if (err.statusCode !== 409 && err.statusCode !== 412) {
throw err;
}
await new Promise(c => setTimeout(c, 5000));
}
}
throw new Error('Failed to acquire lease on blob after 30 minutes');
}
async function processArtifact(artifact, filePath) {
const log = (...args) => console.log(`[${artifact.name}]`, ...args);
const match = /^vscode_(?<product>[^_]+)_(?<os>[^_]+)(?:_legacy)?_(?<arch>[^_]+)_(?<unprocessedType>[^_]+)$/.exec(artifact.name);
if (!match) {
throw new Error(`Invalid artifact name: ${artifact.name}`);
}
const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS'));
const quality = e('VSCODE_QUALITY');
const version = e('BUILD_SOURCEVERSION');
const friendlyFileName = `${quality}/${version}/${path_1.default.basename(filePath)}`;
const blobServiceClient = new storage_blob_1.BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken });
const leasesContainerClient = blobServiceClient.getContainerClient('leases');
await leasesContainerClient.createIfNotExists();
const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName);
log(`Acquiring lease for: ${friendlyFileName}`);
await withLease(leaseBlobClient, async () => {
log(`Successfully acquired lease for: ${friendlyFileName}`);
const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`;
const res = await (0, retry_1.retry)(() => fetch(url));
if (res.status === 200) {
log(`Already released and provisioned: ${url}`);
}
else {
const stagingContainerClient = blobServiceClient.getContainerClient('staging');
await stagingContainerClient.createIfNotExists();
const now = new Date().valueOf();
const oneHour = 60 * 60 * 1000;
const oneHourAgo = new Date(now - oneHour);
const oneHourFromNow = new Date(now + oneHour);
const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow);
const sasOptions = { containerName: 'staging', permissions: storage_blob_1.ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow };
const stagingSasToken = (0, storage_blob_1.generateBlobSASQueryParameters)(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString();
const releaseService = await ESRPReleaseService.create(log, e('RELEASE_TENANT_ID'), e('RELEASE_CLIENT_ID'), e('RELEASE_AUTH_CERT'), e('RELEASE_REQUEST_SIGNING_CERT'), stagingContainerClient, stagingSasToken);
await releaseService.createRelease(version, filePath, friendlyFileName);
}
const { product, os, arch, unprocessedType } = match.groups;
const platform = getPlatform(product, os, arch, unprocessedType);
const type = getRealType(unprocessedType);
const size = fs_1.default.statSync(filePath).size;
const stream = fs_1.default.createReadStream(filePath);
const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256
const asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true };
log('Creating asset...');
const result = await (0, retry_1.retry)(async (attempt) => {
log(`Creating asset in Cosmos DB (attempt ${attempt})...`);
const client = new cosmos_1.CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT'), tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) });
const scripts = client.database('builds').container(quality).scripts;
const { resource: result } = await scripts.storedProcedure('createAsset').execute('', [version, asset, true]);
return result;
});
if (result === 'already exists') {
log('Asset already exists!');
}
else {
log('Asset successfully created: ', JSON.stringify(asset, undefined, 2));
}
});
log(`Successfully released lease for: ${friendlyFileName}`);
}
// It is VERY important that we don't download artifacts too much too fast from AZDO.
// AZDO throttles us SEVERELY if we do. Not just that, but they also close open
// sockets, so the whole things turns to a grinding halt. So, downloading and extracting
// happens serially in the main thread, making the downloads are spaced out
// properly. For each extracted artifact, we spawn a worker thread to upload it to
// the CDN and finally update the build in Cosmos DB.
async function main() {
if (!node_worker_threads_1.isMainThread) {
const { artifact, artifactFilePath } = node_worker_threads_1.workerData;
await processArtifact(artifact, artifactFilePath);
return;
}
const done = new State();
const processing = new Set();
for (const name of done) {
console.log(`\u2705 ${name}`);
}
const stages = new Set(['Compile']);
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True' ||
e('VSCODE_BUILD_STAGE_ALPINE') === 'True' ||
e('VSCODE_BUILD_STAGE_MACOS') === 'True' ||
e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') {
stages.add('CompileCLI');
}
if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') {
stages.add('Windows');
}
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') {
stages.add('Linux');
}
if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') {
stages.add('Alpine');
}
if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') {
stages.add('macOS');
}
if (e('VSCODE_BUILD_STAGE_WEB') === 'True') {
stages.add('Web');
}
let timeline;
let artifacts;
let resultPromise = Promise.resolve([]);
const operations = [];
while (true) {
[timeline, artifacts] = await Promise.all([(0, retry_1.retry)(() => getPipelineTimeline()), (0, retry_1.retry)(() => getPipelineArtifacts())]);
const stagesCompleted = new Set(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name));
const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s));
const artifactsInProgress = artifacts.filter(a => processing.has(a.name));
if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) {
break;
}
else if (stagesInProgress.length > 0) {
console.log('Stages in progress:', stagesInProgress.join(', '));
}
else if (artifactsInProgress.length > 0) {
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
}
else {
console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`);
}
for (const artifact of artifacts) {
if (done.has(artifact.name) || processing.has(artifact.name)) {
continue;
}
console.log(`[${artifact.name}] Found new artifact`);
const artifactZipPath = path_1.default.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`);
await (0, retry_1.retry)(async (attempt) => {
const start = Date.now();
console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`);
await downloadArtifact(artifact, artifactZipPath);
const archiveSize = fs_1.default.statSync(artifactZipPath).size;
const downloadDurationS = (Date.now() - start) / 1000;
const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS);
console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`);
});
const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY'));
const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0];
processing.add(artifact.name);
const promise = new Promise((resolve, reject) => {
const worker = new node_worker_threads_1.Worker(__filename, { workerData: { artifact, artifactFilePath } });
worker.on('error', reject);
worker.on('exit', code => {
if (code === 0) {
resolve();
}
else {
reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`));
}
});
});
const operation = promise.then(() => {
processing.delete(artifact.name);
done.add(artifact.name);
console.log(`\u2705 ${artifact.name} `);
});
operations.push({ name: artifact.name, operation });
resultPromise = Promise.allSettled(operations.map(o => o.operation));
}
await new Promise(c => setTimeout(c, 10_000));
}
console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`);
const artifactsInProgress = operations.filter(o => processing.has(o.name));
if (artifactsInProgress.length > 0) {
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
}
const results = await resultPromise;
for (let i = 0; i < operations.length; i++) {
const result = results[i];
if (result.status === 'rejected') {
console.error(`[${operations[i].name}]`, result.reason);
}
}
// Fail the job if any of the artifacts failed to publish
if (results.some(r => r.status === 'rejected')) {
throw new Error('Some artifacts failed to publish');
}
// Also fail the job if any of the stages did not succeed
let shouldFail = false;
for (const stage of stages) {
const record = timeline.records.find(r => r.name === stage && r.type === 'Stage');
if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') {
shouldFail = true;
console.error(`Stage ${stage} did not succeed: ${record.result}`);
}
}
if (shouldFail) {
throw new Error('Some stages did not succeed');
}
console.log(`All ${done.size} artifacts published!`);
}
if (require.main === module) {
main().then(() => {
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
}
//# sourceMappingURL=publish.js.map

View File

@@ -10,7 +10,7 @@ import type { ReadableStream } from 'stream/web';
import { pipeline } from 'node:stream/promises'; import { pipeline } from 'node:stream/promises';
import yauzl from 'yauzl'; import yauzl from 'yauzl';
import crypto from 'crypto'; import crypto from 'crypto';
import { retry } from './retry'; import { retry } from './retry.ts';
import { CosmosClient } from '@azure/cosmos'; import { CosmosClient } from '@azure/cosmos';
import cp from 'child_process'; import cp from 'child_process';
import os from 'os'; import os from 'os';
@@ -73,15 +73,16 @@ interface ReleaseError {
errorMessages: string[]; errorMessages: string[];
} }
const enum StatusCode { const StatusCode = Object.freeze({
Pass = 'pass', Pass: 'pass',
Aborted = 'aborted', Aborted: 'aborted',
Inprogress = 'inprogress', Inprogress: 'inprogress',
FailCanRetry = 'failCanRetry', FailCanRetry: 'failCanRetry',
FailDoNotRetry = 'failDoNotRetry', FailDoNotRetry: 'failDoNotRetry',
PendingAnalysis = 'pendingAnalysis', PendingAnalysis: 'pendingAnalysis',
Cancelled = 'cancelled' Cancelled: 'cancelled'
} });
type StatusCode = typeof StatusCode[keyof typeof StatusCode];
interface ReleaseResultMessage { interface ReleaseResultMessage {
activities: ReleaseActivityInfo[]; activities: ReleaseActivityInfo[];
@@ -349,15 +350,31 @@ class ESRPReleaseService {
private static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/'; private static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/';
private readonly log: (...args: unknown[]) => void;
private readonly clientId: string;
private readonly accessToken: string;
private readonly requestSigningCertificates: string[];
private readonly requestSigningKey: string;
private readonly containerClient: ContainerClient;
private readonly stagingSasToken: string;
private constructor( private constructor(
private readonly log: (...args: unknown[]) => void, log: (...args: unknown[]) => void,
private readonly clientId: string, clientId: string,
private readonly accessToken: string, accessToken: string,
private readonly requestSigningCertificates: string[], requestSigningCertificates: string[],
private readonly requestSigningKey: string, requestSigningKey: string,
private readonly containerClient: ContainerClient, containerClient: ContainerClient,
private readonly stagingSasToken: string stagingSasToken: string
) { } ) {
this.log = log;
this.clientId = clientId;
this.accessToken = accessToken;
this.requestSigningCertificates = requestSigningCertificates;
this.requestSigningKey = requestSigningKey;
this.containerClient = containerClient;
this.stagingSasToken = stagingSasToken;
}
async createRelease(version: string, filePath: string, friendlyFileName: string) { async createRelease(version: string, filePath: string, friendlyFileName: string) {
const correlationId = crypto.randomUUID(); const correlationId = crypto.randomUUID();
@@ -1009,7 +1026,7 @@ async function main() {
processing.add(artifact.name); processing.add(artifact.name);
const promise = new Promise<void>((resolve, reject) => { const promise = new Promise<void>((resolve, reject) => {
const worker = new Worker(__filename, { workerData: { artifact, artifactFilePath } }); const worker = new Worker(import.meta.filename, { workerData: { artifact, artifactFilePath } });
worker.on('error', reject); worker.on('error', reject);
worker.on('exit', code => { worker.on('exit', code => {
if (code === 0) { if (code === 0) {
@@ -1075,7 +1092,7 @@ async function main() {
console.log(`All ${done.size} artifacts published!`); console.log(`All ${done.size} artifacts published!`);
} }
if (require.main === module) { if (import.meta.main) {
main().then(() => { main().then(() => {
process.exit(0); process.exit(0);
}, err => { }, err => {

View File

@@ -1,56 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
async function getConfig(client, quality) {
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
const res = await client.database('builds').container('config').items.query(query).fetchAll();
if (res.resources.length === 0) {
return createDefaultConfig(quality);
}
return res.resources[0];
}
async function main(force) {
const commit = getEnv('BUILD_SOURCEVERSION');
const quality = getEnv('VSCODE_QUALITY');
const { cosmosDBAccessToken } = JSON.parse(getEnv('PUBLISH_AUTH_TOKENS'));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) });
if (!force) {
const config = await getConfig(client, quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
const [, , force] = process.argv;
console.log(process.argv);
main(/^true$/i.test(force)).then(() => {
console.log('Build successfully released');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=releaseBuild.js.map

View File

@@ -4,7 +4,7 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { CosmosClient } from '@azure/cosmos'; import { CosmosClient } from '@azure/cosmos';
import { retry } from './retry'; import { retry } from './retry.ts';
function getEnv(name: string): string { function getEnv(name: string): string {
const result = process.env[name]; const result = process.env[name];

View File

@@ -1,27 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.retry = retry;
async function retry(fn) {
let lastError;
for (let run = 1; run <= 10; run++) {
try {
return await fn(run);
}
catch (err) {
if (!/fetch failed|terminated|aborted|timeout|TimeoutError|Timeout Error|RestError|Client network socket disconnected|socket hang up|ECONNRESET|CredentialUnavailableError|endpoints_resolution_error|Audience validation failed|end of central directory record signature not found/i.test(err.message)) {
throw err;
}
lastError = err;
// maximum delay is 10th retry: ~3 seconds
const millis = Math.floor((Math.random() * 200) + (50 * Math.pow(1.5, run)));
await new Promise(c => setTimeout(c, millis));
}
}
console.error(`Too many retries, aborting.`);
throw lastError;
}
//# sourceMappingURL=retry.js.map

View File

@@ -1,18 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const sign_1 = require("./sign");
const path_1 = __importDefault(require("path"));
(0, sign_1.main)([
process.env['EsrpCliDllPath'],
'sign-windows',
path_1.default.dirname(process.argv[2]),
path_1.default.basename(process.argv[2])
]);
//# sourceMappingURL=sign-win32.js.map

View File

@@ -3,7 +3,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { main } from './sign'; import { main } from './sign.ts';
import path from 'path'; import path from 'path';
main([ main([

View File

@@ -1,209 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Temp = void 0;
exports.main = main;
const child_process_1 = __importDefault(require("child_process"));
const fs_1 = __importDefault(require("fs"));
const crypto_1 = __importDefault(require("crypto"));
const path_1 = __importDefault(require("path"));
const os_1 = __importDefault(require("os"));
class Temp {
_files = [];
tmpNameSync() {
const file = path_1.default.join(os_1.default.tmpdir(), crypto_1.default.randomBytes(20).toString('hex'));
this._files.push(file);
return file;
}
dispose() {
for (const file of this._files) {
try {
fs_1.default.unlinkSync(file);
}
catch (err) {
// noop
}
}
}
}
exports.Temp = Temp;
function getParams(type) {
switch (type) {
case 'sign-windows':
return [
{
keyCode: 'CP-230012',
operationSetCode: 'SigntoolSign',
parameters: [
{ parameterName: 'OpusName', parameterValue: 'VS Code' },
{ parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' },
{ parameterName: 'Append', parameterValue: '/as' },
{ parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' },
{ parameterName: 'PageHash', parameterValue: '/NPH' },
{ parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' }
],
toolName: 'sign',
toolVersion: '1.0'
},
{
keyCode: 'CP-230012',
operationSetCode: 'SigntoolVerify',
parameters: [
{ parameterName: 'VerifyAll', parameterValue: '/all' }
],
toolName: 'sign',
toolVersion: '1.0'
}
];
case 'sign-windows-appx':
return [
{
keyCode: 'CP-229979',
operationSetCode: 'SigntoolSign',
parameters: [
{ parameterName: 'OpusName', parameterValue: 'VS Code' },
{ parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' },
{ parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' },
{ parameterName: 'PageHash', parameterValue: '/NPH' },
{ parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' }
],
toolName: 'sign',
toolVersion: '1.0'
},
{
keyCode: 'CP-229979',
operationSetCode: 'SigntoolVerify',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}
];
case 'sign-pgp':
return [{
keyCode: 'CP-450779-Pgp',
operationSetCode: 'LinuxSign',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}];
case 'sign-darwin':
return [{
keyCode: 'CP-401337-Apple',
operationSetCode: 'MacAppDeveloperSign',
parameters: [{ parameterName: 'Hardening', parameterValue: '--options=runtime' }],
toolName: 'sign',
toolVersion: '1.0'
}];
case 'notarize-darwin':
return [{
keyCode: 'CP-401337-Apple',
operationSetCode: 'MacAppNotarize',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}];
case 'nuget':
return [{
keyCode: 'CP-401405',
operationSetCode: 'NuGetSign',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}, {
keyCode: 'CP-401405',
operationSetCode: 'NuGetVerify',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}];
default:
throw new Error(`Sign type ${type} not found`);
}
}
function main([esrpCliPath, type, folderPath, pattern]) {
const tmp = new Temp();
process.on('exit', () => tmp.dispose());
const key = crypto_1.default.randomBytes(32);
const iv = crypto_1.default.randomBytes(16);
const cipher = crypto_1.default.createCipheriv('aes-256-cbc', key, iv);
const encryptedToken = cipher.update(process.env['SYSTEM_ACCESSTOKEN'].trim(), 'utf8', 'hex') + cipher.final('hex');
const encryptionDetailsPath = tmp.tmpNameSync();
fs_1.default.writeFileSync(encryptionDetailsPath, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const encryptedTokenPath = tmp.tmpNameSync();
fs_1.default.writeFileSync(encryptedTokenPath, encryptedToken);
const patternPath = tmp.tmpNameSync();
fs_1.default.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs_1.default.writeFileSync(paramsPath, JSON.stringify(getParams(type)));
const dotnetVersion = child_process_1.default.execSync('dotnet --version', { encoding: 'utf8' }).trim();
const adoTaskVersion = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(esrpCliPath)));
const federatedTokenData = {
jobId: process.env['SYSTEM_JOBID'],
planId: process.env['SYSTEM_PLANID'],
projectId: process.env['SYSTEM_TEAMPROJECTID'],
hub: process.env['SYSTEM_HOSTTYPE'],
uri: process.env['SYSTEM_COLLECTIONURI'],
managedIdentityId: process.env['VSCODE_ESRP_CLIENT_ID'],
managedIdentityTenantId: process.env['VSCODE_ESRP_TENANT_ID'],
serviceConnectionId: process.env['VSCODE_ESRP_SERVICE_CONNECTION_ID'],
tempDirectory: os_1.default.tmpdir(),
systemAccessToken: encryptedTokenPath,
encryptionKey: encryptionDetailsPath
};
const args = [
esrpCliPath,
'vsts.sign',
'-a',
process.env['ESRP_CLIENT_ID'],
'-d',
process.env['ESRP_TENANT_ID'],
'-k', JSON.stringify({ akv: 'vscode-esrp' }),
'-z', JSON.stringify({ akv: 'vscode-esrp', cert: 'esrp-sign' }),
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-w', dotnetVersion,
'-skipAdoReportAttachment', 'false',
'-pendingAnalysisWaitTimeoutMinutes', '5',
'-adoTaskVersion', adoTaskVersion,
'-resourceUri', 'https://msazurecloud.onmicrosoft.com/api.esrp.microsoft.com',
'-esrpClientId',
process.env['ESRP_CLIENT_ID'],
'-useMSIAuthentication', 'true',
'-federatedTokenData', JSON.stringify(federatedTokenData)
];
try {
child_process_1.default.execFileSync('dotnet', args, { stdio: 'inherit' });
}
catch (err) {
console.error('ESRP failed');
console.error(err);
process.exit(1);
}
}
if (require.main === module) {
main(process.argv.slice(2));
process.exit(0);
}
//# sourceMappingURL=sign.js.map

View File

@@ -216,7 +216,7 @@ export function main([esrpCliPath, type, folderPath, pattern]: string[]) {
} }
} }
if (require.main === module) { if (import.meta.main) {
main(process.argv.slice(2)); main(process.argv.slice(2));
process.exit(0); process.exit(0);
} }

View File

@@ -1,46 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const publish_1 = require("../common/publish");
const retry_1 = require("../common/retry");
async function getPipelineArtifacts() {
const result = await (0, publish_1.requestAZDOAPI)('artifacts');
return result.value.filter(a => !/sbom$/.test(a.name));
}
async function main(artifacts) {
if (artifacts.length === 0) {
throw new Error(`Usage: node waitForArtifacts.js <artifactName1> <artifactName2> ...`);
}
// This loop will run for 30 minutes and waits to the x64 and arm64 artifacts
// to be uploaded to the pipeline by the `macOS` and `macOSARM64` jobs. As soon
// as these artifacts are found, the loop completes and the `macOSUnivesrsal`
// job resumes.
for (let index = 0; index < 60; index++) {
try {
console.log(`Waiting for artifacts (${artifacts.join(', ')}) to be uploaded (${index + 1}/60)...`);
const allArtifacts = await (0, retry_1.retry)(() => getPipelineArtifacts());
console.log(` * Artifacts attached to the pipelines: ${allArtifacts.length > 0 ? allArtifacts.map(a => a.name).join(', ') : 'none'}`);
const foundArtifacts = allArtifacts.filter(a => artifacts.includes(a.name));
console.log(` * Found artifacts: ${foundArtifacts.length > 0 ? foundArtifacts.map(a => a.name).join(', ') : 'none'}`);
if (foundArtifacts.length === artifacts.length) {
console.log(` * All artifacts were found`);
return;
}
}
catch (err) {
console.error(`ERROR: Failed to get pipeline artifacts: ${err}`);
}
await new Promise(c => setTimeout(c, 30_000));
}
throw new Error(`ERROR: Artifacts (${artifacts.join(', ')}) were not uploaded within 30 minutes.`);
}
main(process.argv.splice(2)).then(() => {
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=waitForArtifacts.js.map

View File

@@ -3,8 +3,8 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { Artifact, requestAZDOAPI } from '../common/publish'; import { type Artifact, requestAZDOAPI } from '../common/publish.ts';
import { retry } from '../common/retry'; import { retry } from '../common/retry.ts';
async function getPipelineArtifacts(): Promise<Artifact[]> { async function getPipelineArtifacts(): Promise<Artifact[]> {
const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts');
@@ -13,7 +13,7 @@ async function getPipelineArtifacts(): Promise<Artifact[]> {
async function main(artifacts: string[]): Promise<void> { async function main(artifacts: string[]): Promise<void> {
if (artifacts.length === 0) { if (artifacts.length === 0) {
throw new Error(`Usage: node waitForArtifacts.js <artifactName1> <artifactName2> ...`); throw new Error(`Usage: node waitForArtifacts.ts <artifactName1> <artifactName2> ...`);
} }
// This loop will run for 30 minutes and waits to the x64 and arm64 artifacts // This loop will run for 30 minutes and waits to the x64 and arm64 artifacts

View File

@@ -1,30 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const codesign_1 = require("../common/codesign");
const publish_1 = require("../common/publish");
async function main() {
const arch = (0, publish_1.e)('VSCODE_ARCH');
const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath');
const pipelineWorkspace = (0, publish_1.e)('PIPELINE_WORKSPACE');
const folder = `${pipelineWorkspace}/vscode_client_darwin_${arch}_archive`;
const glob = `VSCode-darwin-${arch}.zip`;
// Codesign
(0, codesign_1.printBanner)('Codesign');
const codeSignTask = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-darwin', folder, glob);
await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign', codeSignTask);
// Notarize
(0, codesign_1.printBanner)('Notarize');
const notarizeTask = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'notarize-darwin', folder, glob);
await (0, codesign_1.streamProcessOutputAndCheckResult)('Notarize', notarizeTask);
}
main().then(() => {
process.exit(0);
}, err => {
console.error(`ERROR: ${err}`);
process.exit(1);
});
//# sourceMappingURL=codesign.js.map

View File

@@ -3,8 +3,8 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign.ts';
import { e } from '../common/publish'; import { e } from '../common/publish.ts';
async function main() { async function main() {
const arch = e('VSCODE_ARCH'); const arch = e('VSCODE_ARCH');

View File

@@ -32,7 +32,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -85,13 +85,13 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -67,7 +67,7 @@ jobs:
GITHUB_TOKEN: "$(github-distro-mixin-password)" GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install build dependencies displayName: Install build dependencies
- pwsh: node build/azure-pipelines/common/waitForArtifacts.js unsigned_vscode_client_darwin_x64_archive unsigned_vscode_client_darwin_arm64_archive - pwsh: node -- build/azure-pipelines/common/waitForArtifacts.ts unsigned_vscode_client_darwin_x64_archive unsigned_vscode_client_darwin_arm64_archive
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: Wait for x64 and arm64 artifacts displayName: Wait for x64 and arm64 artifacts
@@ -80,7 +80,7 @@ jobs:
artifact: unsigned_vscode_client_darwin_arm64_archive artifact: unsigned_vscode_client_darwin_arm64_archive
displayName: Download arm64 artifact displayName: Download arm64 artifact
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- script: | - script: |
@@ -88,14 +88,14 @@ jobs:
unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64 & unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64 &
unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64 & unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64 &
wait wait
DEBUG=* node build/darwin/create-universal-app.js $(agent.builddirectory) DEBUG=* node build/darwin/create-universal-app.ts $(agent.builddirectory)
displayName: Create Universal App displayName: Create Universal App
- script: | - script: |
set -e set -e
APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)"
APP_NAME="`ls $APP_ROOT | head -n 1`" APP_NAME="`ls $APP_ROOT | head -n 1`"
APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.js universal APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.ts universal
displayName: Verify arch of Mach-O objects displayName: Verify arch of Mach-O objects
- script: | - script: |
@@ -107,7 +107,7 @@ jobs:
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1) export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1)
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
DEBUG=electron-osx-sign* node build/darwin/sign.js $(agent.builddirectory) DEBUG=electron-osx-sign* node build/darwin/sign.ts $(agent.builddirectory)
displayName: Set Hardened Entitlements displayName: Set Hardened Entitlements
- script: | - script: |
@@ -132,12 +132,12 @@ jobs:
Pattern: noop Pattern: noop
displayName: 'Install ESRP Tooling' displayName: 'Install ESRP Tooling'
- script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: ✍️ Codesign displayName: ✍️ Codesign
- script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: ✍️ Notarize displayName: ✍️ Notarize

View File

@@ -33,12 +33,12 @@ steps:
archiveFilePatterns: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/*.zip archiveFilePatterns: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/*.zip
destinationFolder: $(Build.ArtifactStagingDirectory)/sign/${{ target }} destinationFolder: $(Build.ArtifactStagingDirectory)/sign/${{ target }}
- script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip"
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: ✍️ Codesign displayName: ✍️ Codesign
- script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" - script: node build/azure-pipelines/common/sign.ts $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip"
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: ✍️ Notarize displayName: ✍️ Notarize

View File

@@ -43,7 +43,7 @@ steps:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -100,25 +100,25 @@ steps:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive displayName: Create node_modules archive
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- template: ../../common/install-builtin-extensions.yml@self - template: ../../common/install-builtin-extensions.yml@self
- ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}:
- script: node build/lib/policies/policyGenerator build/lib/policies/policyData.jsonc darwin - script: npm run copy-policy-dto --prefix build && node build/lib/policies/policyGenerator.ts build/lib/policies/policyData.jsonc darwin
displayName: Generate policy definitions displayName: Generate policy definitions
retryCountOnTaskFailure: 3 retryCountOnTaskFailure: 3
@@ -178,8 +178,8 @@ steps:
set -e set -e
APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)"
APP_NAME="`ls $APP_ROOT | head -n 1`" APP_NAME="`ls $APP_ROOT | head -n 1`"
APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.js $(VSCODE_ARCH) APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.ts $(VSCODE_ARCH)
APP_PATH="$(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)" node build/darwin/verify-macho.js $(VSCODE_ARCH) APP_PATH="$(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)" node build/darwin/verify-macho.ts $(VSCODE_ARCH)
displayName: Verify arch of Mach-O objects displayName: Verify arch of Mach-O objects
- script: | - script: |
@@ -191,7 +191,7 @@ steps:
condition: eq(variables['BUILT_CLIENT'], 'true') condition: eq(variables['BUILT_CLIENT'], 'true')
displayName: Package client displayName: Package client
- pwsh: node build/azure-pipelines/common/checkForArtifact.js CLIENT_ARCHIVE_UPLOADED unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive - pwsh: node build/azure-pipelines/common/checkForArtifact.ts CLIENT_ARCHIVE_UPLOADED unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: Check for client artifact displayName: Check for client artifact
@@ -221,7 +221,7 @@ steps:
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1) export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1)
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
DEBUG=electron-osx-sign* node build/darwin/sign.js $(agent.builddirectory) DEBUG=electron-osx-sign* node build/darwin/sign.ts $(agent.builddirectory)
displayName: Set Hardened Entitlements displayName: Set Hardened Entitlements
- script: | - script: |
@@ -257,7 +257,7 @@ steps:
echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll" echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll"
displayName: Find ESRP CLI displayName: Find ESRP CLI
- script: npx deemon --detach --wait node build/azure-pipelines/darwin/codesign.js - script: npx deemon --detach --wait node build/azure-pipelines/darwin/codesign.ts
env: env:
EsrpCliDllPath: $(EsrpCliDllPath) EsrpCliDllPath: $(EsrpCliDllPath)
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
@@ -271,7 +271,7 @@ steps:
VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }}
- ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}:
- script: npx deemon --attach node build/azure-pipelines/darwin/codesign.js - script: npx deemon --attach node build/azure-pipelines/darwin/codesign.ts
condition: succeededOrFailed() condition: succeededOrFailed()
displayName: "Post-job: ✍️ Codesign & Notarize" displayName: "Post-job: ✍️ Codesign & Notarize"

View File

@@ -1,38 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const { dirs } = require('../../npm/dirs');
function log(...args) {
console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args);
}
function mixin(mixinPath) {
if (!fs_1.default.existsSync(`${mixinPath}/node_modules`)) {
log(`Skipping distro npm dependencies: ${mixinPath} (no node_modules)`);
return;
}
log(`Mixing in distro npm dependencies: ${mixinPath}`);
const distroPackageJson = JSON.parse(fs_1.default.readFileSync(`${mixinPath}/package.json`, 'utf8'));
const targetPath = path_1.default.relative('.build/distro/npm', mixinPath);
for (const dependency of Object.keys(distroPackageJson.dependencies)) {
fs_1.default.rmSync(`./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true });
fs_1.default.cpSync(`${mixinPath}/node_modules/${dependency}`, `./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true, dereference: true });
}
log(`Mixed in distro npm dependencies: ${mixinPath} ✔︎`);
}
function main() {
log(`Mixing in distro npm dependencies...`);
const mixinPaths = dirs.filter(d => /^.build\/distro\/npm/.test(d));
for (const mixinPath of mixinPaths) {
mixin(mixinPath);
}
}
main();
//# sourceMappingURL=mixin-npm.js.map

View File

@@ -5,7 +5,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
const { dirs } = require('../../npm/dirs') as { dirs: string[] }; import { dirs } from '../../npm/dirs.js';
function log(...args: unknown[]): void { function log(...args: unknown[]): void {
console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args);

View File

@@ -1,56 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
function log(...args) {
console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args);
}
function main() {
const quality = process.env['VSCODE_QUALITY'];
if (!quality) {
throw new Error('Missing VSCODE_QUALITY, skipping mixin');
}
log(`Mixing in distro quality...`);
const basePath = `.build/distro/mixin/${quality}`;
for (const name of fs_1.default.readdirSync(basePath)) {
const distroPath = path_1.default.join(basePath, name);
const ossPath = path_1.default.relative(basePath, distroPath);
if (ossPath === 'product.json') {
const distro = JSON.parse(fs_1.default.readFileSync(distroPath, 'utf8'));
const oss = JSON.parse(fs_1.default.readFileSync(ossPath, 'utf8'));
let builtInExtensions = oss.builtInExtensions;
if (Array.isArray(distro.builtInExtensions)) {
log('Overwriting built-in extensions:', distro.builtInExtensions.map(e => e.name));
builtInExtensions = distro.builtInExtensions;
}
else if (distro.builtInExtensions) {
const include = distro.builtInExtensions['include'] ?? [];
const exclude = distro.builtInExtensions['exclude'] ?? [];
log('OSS built-in extensions:', builtInExtensions.map(e => e.name));
log('Including built-in extensions:', include.map(e => e.name));
log('Excluding built-in extensions:', exclude);
builtInExtensions = builtInExtensions.filter(ext => !include.find(e => e.name === ext.name) && !exclude.find(name => name === ext.name));
builtInExtensions = [...builtInExtensions, ...include];
log('Final built-in extensions:', builtInExtensions.map(e => e.name));
}
else {
log('Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name));
}
const result = { webBuiltInExtensions: oss.webBuiltInExtensions, ...distro, builtInExtensions };
fs_1.default.writeFileSync(ossPath, JSON.stringify(result, null, '\t'), 'utf8');
}
else {
fs_1.default.cpSync(distroPath, ossPath, { force: true, recursive: true });
}
log(distroPath, '✔︎');
}
}
main();
//# sourceMappingURL=mixin-quality.js.map

View File

@@ -1,29 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const codesign_1 = require("../common/codesign");
const publish_1 = require("../common/publish");
async function main() {
const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath');
// Start the code sign processes in parallel
// 1. Codesign deb package
// 2. Codesign rpm package
const codesignTask1 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-pgp', '.build/linux/deb', '*.deb');
const codesignTask2 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-pgp', '.build/linux/rpm', '*.rpm');
// Codesign deb package
(0, codesign_1.printBanner)('Codesign deb package');
await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign deb package', codesignTask1);
// Codesign rpm package
(0, codesign_1.printBanner)('Codesign rpm package');
await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign rpm package', codesignTask2);
}
main().then(() => {
process.exit(0);
}, err => {
console.error(`ERROR: ${err}`);
process.exit(1);
});
//# sourceMappingURL=codesign.js.map

View File

@@ -3,8 +3,8 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign.ts';
import { e } from '../common/publish'; import { e } from '../common/publish.ts';
async function main() { async function main() {
const esrpCliDLLPath = e('EsrpCliDllPath'); const esrpCliDLLPath = e('EsrpCliDllPath');

View File

@@ -52,7 +52,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -104,7 +104,7 @@ jobs:
SYSROOT_ARCH="amd64" SYSROOT_ARCH="amd64"
fi fi
export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0 export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0
SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()'
env: env:
VSCODE_ARCH: $(VSCODE_ARCH) VSCODE_ARCH: $(VSCODE_ARCH)
GITHUB_TOKEN: "$(github-distro-mixin-password)" GITHUB_TOKEN: "$(github-distro-mixin-password)"
@@ -137,13 +137,13 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -13,14 +13,14 @@ if [ -d "$VSCODE_CLIENT_SYSROOT_DIR" ]; then
echo "Using cached client sysroot" echo "Using cached client sysroot"
else else
echo "Downloading client sysroot" echo "Downloading client sysroot"
SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_CLIENT_SYSROOT_DIR" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_CLIENT_SYSROOT_DIR" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()'
fi fi
if [ -d "$VSCODE_REMOTE_SYSROOT_DIR" ]; then if [ -d "$VSCODE_REMOTE_SYSROOT_DIR" ]; then
echo "Using cached remote sysroot" echo "Using cached remote sysroot"
else else
echo "Downloading remote sysroot" echo "Downloading remote sysroot"
SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_REMOTE_SYSROOT_DIR" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_DIR="$VSCODE_REMOTE_SYSROOT_DIR" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()'
fi fi
if [ "$npm_config_arch" == "x64" ]; then if [ "$npm_config_arch" == "x64" ]; then
@@ -33,7 +33,7 @@ if [ "$npm_config_arch" == "x64" ]; then
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \ VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \ VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$npm_config_arch" \ VSCODE_ARCH="$npm_config_arch" \
node build/linux/libcxx-fetcher.js node build/linux/libcxx-fetcher.ts
# Set compiler toolchain # Set compiler toolchain
# Flags for the client build are based on # Flags for the client build are based on

View File

@@ -65,7 +65,7 @@ steps:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -121,7 +121,7 @@ steps:
SYSROOT_ARCH="amd64" SYSROOT_ARCH="amd64"
fi fi
export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0 export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0
SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e 'import { getVSCodeSysroot } from "./build/linux/debian/install-sysroot.ts"; (async () => { await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()'
env: env:
VSCODE_ARCH: $(VSCODE_ARCH) VSCODE_ARCH: $(VSCODE_ARCH)
GITHUB_TOKEN: "$(github-distro-mixin-password)" GITHUB_TOKEN: "$(github-distro-mixin-password)"
@@ -153,25 +153,25 @@ steps:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive displayName: Create node_modules archive
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- template: ../../common/install-builtin-extensions.yml@self - template: ../../common/install-builtin-extensions.yml@self
- ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}:
- script: node build/lib/policies/policyGenerator build/lib/policies/policyData.jsonc linux - script: npm run copy-policy-dto --prefix build && node build/lib/policies/policyGenerator.ts build/lib/policies/policyData.jsonc linux
displayName: Generate policy definitions displayName: Generate policy definitions
retryCountOnTaskFailure: 3 retryCountOnTaskFailure: 3
@@ -365,7 +365,7 @@ steps:
echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll" echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll"
displayName: Find ESRP CLI displayName: Find ESRP CLI
- script: npx deemon --detach --wait node build/azure-pipelines/linux/codesign.js - script: npx deemon --detach --wait node build/azure-pipelines/linux/codesign.ts
env: env:
EsrpCliDllPath: $(EsrpCliDllPath) EsrpCliDllPath: $(EsrpCliDllPath)
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
@@ -379,7 +379,7 @@ steps:
VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }}
- ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}:
- script: npx deemon --attach node build/azure-pipelines/linux/codesign.js - script: npx deemon --attach node build/azure-pipelines/linux/codesign.ts
condition: succeededOrFailed() condition: succeededOrFailed()
displayName: "✍️ Post-job: Codesign deb & rpm" displayName: "✍️ Post-job: Codesign deb & rpm"

View File

@@ -33,7 +33,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts compile $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -81,19 +81,19 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive displayName: Create node_modules archive
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- template: common/install-builtin-extensions.yml@self - template: common/install-builtin-extensions.yml@self
@@ -135,7 +135,7 @@ jobs:
AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \
node build/azure-pipelines/upload-sourcemaps node build/azure-pipelines/upload-sourcemaps.ts
displayName: Upload sourcemaps to Azure displayName: Upload sourcemaps to Azure
- script: ./build/azure-pipelines/common/extract-telemetry.sh - script: ./build/azure-pipelines/common/extract-telemetry.sh

View File

@@ -82,7 +82,7 @@ jobs:
$VERSION = node -p "require('./package.json').version" $VERSION = node -p "require('./package.json').version"
Write-Host "Creating build with version: $VERSION" Write-Host "Creating build with version: $VERSION"
exec { node build/azure-pipelines/common/createBuild.js $VERSION } exec { node build/azure-pipelines/common/createBuild.ts $VERSION }
env: env:
AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" AZURE_TENANT_ID: "$(AZURE_TENANT_ID)"
AZURE_CLIENT_ID: "$(AZURE_CLIENT_ID)" AZURE_CLIENT_ID: "$(AZURE_CLIENT_ID)"
@@ -90,7 +90,7 @@ jobs:
displayName: Create build if it hasn't been created before displayName: Create build if it hasn't been created before
- pwsh: | - pwsh: |
$publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens) $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens.ts)
Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens" Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens"
env: env:
AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" AZURE_TENANT_ID: "$(AZURE_TENANT_ID)"
@@ -98,7 +98,7 @@ jobs:
AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)"
displayName: Get publish auth tokens displayName: Get publish auth tokens
- pwsh: node build/azure-pipelines/common/publish.js - pwsh: node build/azure-pipelines/common/publish.ts
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)"
@@ -110,7 +110,7 @@ jobs:
retryCountOnTaskFailure: 3 retryCountOnTaskFailure: 3
- ${{ if and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(parameters.VSCODE_SCHEDULEDBUILD, true)) }}: - ${{ if and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(parameters.VSCODE_SCHEDULEDBUILD, true)) }}:
- script: node build/azure-pipelines/common/releaseBuild.js - script: node build/azure-pipelines/common/releaseBuild.ts
env: env:
PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)"
displayName: Release build displayName: Release build

View File

@@ -27,7 +27,7 @@ steps:
displayName: Install build dependencies displayName: Install build dependencies
- pwsh: | - pwsh: |
$publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens) $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens.ts)
Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens" Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens"
env: env:
AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" AZURE_TENANT_ID: "$(AZURE_TENANT_ID)"
@@ -35,7 +35,7 @@ steps:
AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)"
displayName: Get publish auth tokens displayName: Get publish auth tokens
- script: node build/azure-pipelines/common/releaseBuild.js ${{ parameters.VSCODE_RELEASE }} - script: node build/azure-pipelines/common/releaseBuild.ts ${{ parameters.VSCODE_RELEASE }}
displayName: Release build displayName: Release build
env: env:
PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)"

View File

@@ -1,40 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const child_process_1 = __importDefault(require("child_process"));
let tag = '';
try {
tag = child_process_1.default
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
return false;
}
return true;
}
//# sourceMappingURL=check-version.js.map

View File

@@ -34,7 +34,7 @@ steps:
- bash: | - bash: |
# Install build dependencies # Install build dependencies
(cd build && npm ci) (cd build && npm ci)
node build/azure-pipelines/publish-types/check-version.js node build/azure-pipelines/publish-types/check-version.ts
displayName: Check version displayName: Check version
- bash: | - bash: |
@@ -42,7 +42,7 @@ steps:
git config --global user.name "VSCode" git config --global user.name "VSCode"
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1 git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
node build/azure-pipelines/publish-types/update-types.js node build/azure-pipelines/publish-types/update-types.ts
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`) TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)

View File

@@ -1,80 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const child_process_1 = __importDefault(require("child_process"));
const path_1 = __importDefault(require("path"));
let tag = '';
try {
tag = child_process_1.default
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vscode-dts/vscode.d.ts`;
const outDtsPath = path_1.default.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
child_process_1.default.execSync(`curl ${dtsUri} --output ${outDtsPath}`);
updateDTSFile(outDtsPath, shorttag);
const outPackageJsonPath = path_1.default.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/package.json');
const packageJson = JSON.parse(fs_1.default.readFileSync(outPackageJsonPath, 'utf-8'));
packageJson.version = shorttag + '.9999';
fs_1.default.writeFileSync(outPackageJsonPath, JSON.stringify(packageJson, null, 2) + '\n');
console.log(`Done updating vscode.d.ts at ${outDtsPath} and package.json to version ${packageJson.version}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, shorttag) {
const oldContent = fs_1.default.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, shorttag);
fs_1.default.writeFileSync(outPath, newContent);
}
function repeat(str, times) {
const result = new Array(times);
for (let i = 0; i < times; i++) {
result[i] = str;
}
return result.join('');
}
function convertTabsToSpaces(str) {
return str.replace(/\t/gm, value => repeat(' ', value.length));
}
function getNewFileContent(content, shorttag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the MIT License. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return convertTabsToSpaces(getNewFileHeader(shorttag) + content.slice(oldheader.length));
}
function getNewFileHeader(shorttag) {
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the MIT License.`,
` * See https://github.com/microsoft/vscode/blob/main/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}
//# sourceMappingURL=update-types.js.map

View File

@@ -1,121 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const event_stream_1 = __importDefault(require("event-stream"));
const vinyl_1 = __importDefault(require("vinyl"));
const vinyl_fs_1 = __importDefault(require("vinyl-fs"));
const gulp_filter_1 = __importDefault(require("gulp-filter"));
const gulp_gzip_1 = __importDefault(require("gulp-gzip"));
const mime_1 = __importDefault(require("mime"));
const identity_1 = require("@azure/identity");
const util_1 = require("../lib/util");
const gulp_azure_storage_1 = __importDefault(require("gulp-azure-storage"));
const commit = process.env['BUILD_SOURCEVERSION'];
const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN']));
mime_1.default.define({
'application/typescript': ['ts'],
'application/json': ['code-snippets'],
});
// From default AFD configuration
const MimeTypesToCompress = new Set([
'application/eot',
'application/font',
'application/font-sfnt',
'application/javascript',
'application/json',
'application/opentype',
'application/otf',
'application/pkcs7-mime',
'application/truetype',
'application/ttf',
'application/typescript',
'application/vnd.ms-fontobject',
'application/xhtml+xml',
'application/xml',
'application/xml+rss',
'application/x-font-opentype',
'application/x-font-truetype',
'application/x-font-ttf',
'application/x-httpd-cgi',
'application/x-javascript',
'application/x-mpegurl',
'application/x-opentype',
'application/x-otf',
'application/x-perl',
'application/x-ttf',
'font/eot',
'font/ttf',
'font/otf',
'font/opentype',
'image/svg+xml',
'text/css',
'text/csv',
'text/html',
'text/javascript',
'text/js',
'text/markdown',
'text/plain',
'text/richtext',
'text/tab-separated-values',
'text/xml',
'text/x-script',
'text/x-component',
'text/x-java-source'
]);
function wait(stream) {
return new Promise((c, e) => {
stream.on('end', () => c());
stream.on('error', (err) => e(err));
});
}
async function main() {
const files = [];
const options = (compressed) => ({
account: process.env.AZURE_STORAGE_ACCOUNT,
credential,
container: '$web',
prefix: `${process.env.VSCODE_QUALITY}/${commit}/`,
contentSettings: {
contentEncoding: compressed ? 'gzip' : undefined,
cacheControl: 'max-age=31536000, public'
}
});
const all = vinyl_fs_1.default.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
.pipe((0, gulp_filter_1.default)(f => !f.isDirectory()));
const compressed = all
.pipe((0, gulp_filter_1.default)(f => MimeTypesToCompress.has(mime_1.default.lookup(f.path))))
.pipe((0, gulp_gzip_1.default)({ append: false }))
.pipe(gulp_azure_storage_1.default.upload(options(true)));
const uncompressed = all
.pipe((0, gulp_filter_1.default)(f => !MimeTypesToCompress.has(mime_1.default.lookup(f.path))))
.pipe(gulp_azure_storage_1.default.upload(options(false)));
const out = event_stream_1.default.merge(compressed, uncompressed)
.pipe(event_stream_1.default.through(function (f) {
console.log('Uploaded:', f.relative);
files.push(f.relative);
this.emit('data', f);
}));
console.log(`Uploading files to CDN...`); // debug
await wait(out);
const listing = new vinyl_1.default({
path: 'files.txt',
contents: Buffer.from(files.join('\n')),
stat: new util_1.VinylStat({ mode: 0o666 })
});
const filesOut = event_stream_1.default.readArray([listing])
.pipe((0, gulp_gzip_1.default)({ append: false }))
.pipe(gulp_azure_storage_1.default.upload(options(true)));
console.log(`Uploading: files.txt (${files.length} files)`); // debug
await wait(filesOut);
}
main().catch(err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=upload-cdn.js.map

View File

@@ -10,7 +10,7 @@ import filter from 'gulp-filter';
import gzip from 'gulp-gzip'; import gzip from 'gulp-gzip';
import mime from 'mime'; import mime from 'mime';
import { ClientAssertionCredential } from '@azure/identity'; import { ClientAssertionCredential } from '@azure/identity';
import { VinylStat } from '../lib/util'; import { VinylStat } from '../lib/util.ts';
import azure from 'gulp-azure-storage'; import azure from 'gulp-azure-storage';
const commit = process.env['BUILD_SOURCEVERSION']; const commit = process.env['BUILD_SOURCEVERSION'];

View File

@@ -1,127 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const event_stream_1 = __importDefault(require("event-stream"));
const vinyl_fs_1 = __importDefault(require("vinyl-fs"));
const gulp_merge_json_1 = __importDefault(require("gulp-merge-json"));
const gulp_gzip_1 = __importDefault(require("gulp-gzip"));
const identity_1 = require("@azure/identity");
const path = require("path");
const fs_1 = require("fs");
const gulp_azure_storage_1 = __importDefault(require("gulp-azure-storage"));
const commit = process.env['BUILD_SOURCEVERSION'];
const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN']));
function main() {
return new Promise((c, e) => {
const combinedMetadataJson = event_stream_1.default.merge(
// vscode: we are not using `out-build/nls.metadata.json` here because
// it includes metadata for translators for `keys`. but for our purpose
// we want only the `keys` and `messages` as `string`.
event_stream_1.default.merge(vinyl_fs_1.default.src('out-build/nls.keys.json', { base: 'out-build' }), vinyl_fs_1.default.src('out-build/nls.messages.json', { base: 'out-build' }))
.pipe((0, gulp_merge_json_1.default)({
fileName: 'vscode.json',
jsonSpace: '',
concatArrays: true,
edit: (parsedJson, file) => {
if (file.base === 'out-build') {
if (file.basename === 'nls.keys.json') {
return { keys: parsedJson };
}
else {
return { messages: parsedJson };
}
}
}
})),
// extensions
vinyl_fs_1.default.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }), vinyl_fs_1.default.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }), vinyl_fs_1.default.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' })).pipe((0, gulp_merge_json_1.default)({
fileName: 'combined.nls.metadata.json',
jsonSpace: '',
concatArrays: true,
edit: (parsedJson, file) => {
if (file.basename === 'vscode.json') {
return { vscode: parsedJson };
}
// Handle extensions and follow the same structure as the Core nls file.
switch (file.basename) {
case 'package.nls.json':
// put package.nls.json content in Core NlsMetadata format
// language packs use the key "package" to specify that
// translations are for the package.json file
parsedJson = {
messages: {
package: Object.values(parsedJson)
},
keys: {
package: Object.keys(parsedJson)
},
bundles: {
main: ['package']
}
};
break;
case 'nls.metadata.header.json':
parsedJson = { header: parsedJson };
break;
case 'nls.metadata.json': {
// put nls.metadata.json content in Core NlsMetadata format
const modules = Object.keys(parsedJson);
const json = {
keys: {},
messages: {},
bundles: {
main: []
}
};
for (const module of modules) {
json.messages[module] = parsedJson[module].messages;
json.keys[module] = parsedJson[module].keys;
json.bundles.main.push(module);
}
parsedJson = json;
break;
}
}
// Get extension id and use that as the key
const folderPath = path.join(file.base, file.relative.split('/')[0]);
const manifest = (0, fs_1.readFileSync)(path.join(folderPath, 'package.json'), 'utf-8');
const manifestJson = JSON.parse(manifest);
const key = manifestJson.publisher + '.' + manifestJson.name;
return { [key]: parsedJson };
},
}));
const nlsMessagesJs = vinyl_fs_1.default.src('out-build/nls.messages.js', { base: 'out-build' });
event_stream_1.default.merge(combinedMetadataJson, nlsMessagesJs)
.pipe((0, gulp_gzip_1.default)({ append: false }))
.pipe(vinyl_fs_1.default.dest('./nlsMetadata'))
.pipe(event_stream_1.default.through(function (data) {
console.log(`Uploading ${data.path}`);
// trigger artifact upload
console.log(`##vso[artifact.upload containerfolder=nlsmetadata;artifactname=${data.basename}]${data.path}`);
this.emit('data', data);
}))
.pipe(gulp_azure_storage_1.default.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
credential,
container: '$web',
prefix: `nlsmetadata/${commit}/`,
contentSettings: {
contentEncoding: 'gzip',
cacheControl: 'max-age=31536000, public'
}
}))
.on('end', () => c())
.on('error', (err) => e(err));
});
}
main().catch(err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=upload-nlsmetadata.js.map

View File

@@ -9,7 +9,7 @@ import vfs from 'vinyl-fs';
import merge from 'gulp-merge-json'; import merge from 'gulp-merge-json';
import gzip from 'gulp-gzip'; import gzip from 'gulp-gzip';
import { ClientAssertionCredential } from '@azure/identity'; import { ClientAssertionCredential } from '@azure/identity';
import path = require('path'); import path from 'path';
import { readFileSync } from 'fs'; import { readFileSync } from 'fs';
import azure from 'gulp-azure-storage'; import azure from 'gulp-azure-storage';

View File

@@ -1,101 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = __importDefault(require("path"));
const event_stream_1 = __importDefault(require("event-stream"));
const vinyl_fs_1 = __importDefault(require("vinyl-fs"));
const util = __importStar(require("../lib/util"));
const dependencies_1 = require("../lib/dependencies");
const identity_1 = require("@azure/identity");
const gulp_azure_storage_1 = __importDefault(require("gulp-azure-storage"));
const root = path_1.default.dirname(path_1.default.dirname(__dirname));
const commit = process.env['BUILD_SOURCEVERSION'];
const credential = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN']));
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
function src(base, maps = `${base}/**/*.map`) {
return vinyl_fs_1.default.src(maps, { base })
.pipe(event_stream_1.default.mapSync((f) => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
}
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = src('out-vscode-min'); // client source-maps only
sources.push(vs);
const productionDependencies = (0, dependencies_1.getProductionDependencies)(root);
const productionDependenciesSrc = productionDependencies.map((d) => path_1.default.relative(root, d)).map((d) => `./${d}/**/*.map`);
const nodeModules = vinyl_fs_1.default.src(productionDependenciesSrc, { base: '.' })
.pipe(util.cleanNodeModules(path_1.default.join(root, 'build', '.moduleignore')))
.pipe(util.cleanNodeModules(path_1.default.join(root, 'build', `.moduleignore.${process.platform}`)));
sources.push(nodeModules);
const extensionsOut = vinyl_fs_1.default.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(src(base, maps));
}
return new Promise((c, e) => {
event_stream_1.default.merge(...sources)
.pipe(event_stream_1.default.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(gulp_azure_storage_1.default.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
credential,
container: '$web',
prefix: `sourcemaps/${commit}/`
}))
.on('end', () => c())
.on('error', (err) => e(err));
});
}
main().catch(err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=upload-sourcemaps.js.map

View File

@@ -7,13 +7,13 @@ import path from 'path';
import es from 'event-stream'; import es from 'event-stream';
import Vinyl from 'vinyl'; import Vinyl from 'vinyl';
import vfs from 'vinyl-fs'; import vfs from 'vinyl-fs';
import * as util from '../lib/util'; import * as util from '../lib/util.ts';
import { getProductionDependencies } from '../lib/dependencies'; import { getProductionDependencies } from '../lib/dependencies.ts';
import { ClientAssertionCredential } from '@azure/identity'; import { ClientAssertionCredential } from '@azure/identity';
import Stream from 'stream'; import Stream from 'stream';
import azure from 'gulp-azure-storage'; import azure from 'gulp-azure-storage';
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(import.meta.dirname));
const commit = process.env['BUILD_SOURCEVERSION']; const commit = process.env['BUILD_SOURCEVERSION'];
const credential = new ClientAssertionCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, () => Promise.resolve(process.env['AZURE_ID_TOKEN']!)); const credential = new ClientAssertionCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, () => Promise.resolve(process.env['AZURE_ID_TOKEN']!));

View File

@@ -26,7 +26,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js web $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts web $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -77,13 +77,13 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -46,7 +46,7 @@ jobs:
condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none'))
displayName: Setup NPM Registry displayName: Setup NPM Registry
- script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js web $(node -p process.arch) > .build/packagelockhash - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.ts web $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -101,19 +101,19 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: | - script: |
set -e set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt
mkdir -p .build/node_modules_cache mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive displayName: Create node_modules archive
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- template: ../common/install-builtin-extensions.yml@self - template: ../common/install-builtin-extensions.yml@self
@@ -147,7 +147,7 @@ jobs:
AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \
node build/azure-pipelines/upload-cdn node build/azure-pipelines/upload-cdn.ts
displayName: Upload to CDN displayName: Upload to CDN
- script: | - script: |
@@ -156,7 +156,7 @@ jobs:
AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.js.map node build/azure-pipelines/upload-sourcemaps.ts out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.js.map
displayName: Upload sourcemaps (Web Main) displayName: Upload sourcemaps (Web Main)
- script: | - script: |
@@ -165,7 +165,7 @@ jobs:
AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.internal.js.map node build/azure-pipelines/upload-sourcemaps.ts out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.internal.js.map
displayName: Upload sourcemaps (Web Internal) displayName: Upload sourcemaps (Web Internal)
- script: | - script: |
@@ -174,5 +174,5 @@ jobs:
AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \
node build/azure-pipelines/upload-nlsmetadata node build/azure-pipelines/upload-nlsmetadata.ts
displayName: Upload NLS Metadata displayName: Upload NLS Metadata

View File

@@ -1,73 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const zx_1 = require("zx");
const codesign_1 = require("../common/codesign");
const publish_1 = require("../common/publish");
async function main() {
(0, zx_1.usePwsh)();
const arch = (0, publish_1.e)('VSCODE_ARCH');
const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath');
const codeSigningFolderPath = (0, publish_1.e)('CodeSigningFolderPath');
// Start the code sign processes in parallel
// 1. Codesign executables and shared libraries
// 2. Codesign Powershell scripts
// 3. Codesign context menu appx package (insiders only)
const codesignTask1 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows', codeSigningFolderPath, '*.dll,*.exe,*.node');
const codesignTask2 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.ps1');
const codesignTask3 = process.env['VSCODE_QUALITY'] === 'insider'
? (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.appx')
: undefined;
// Codesign executables and shared libraries
(0, codesign_1.printBanner)('Codesign executables and shared libraries');
await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign executables and shared libraries', codesignTask1);
// Codesign Powershell scripts
(0, codesign_1.printBanner)('Codesign Powershell scripts');
await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign Powershell scripts', codesignTask2);
if (codesignTask3) {
// Codesign context menu appx package
(0, codesign_1.printBanner)('Codesign context menu appx package');
await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign context menu appx package', codesignTask3);
}
// Create build artifact directory
await (0, zx_1.$) `New-Item -ItemType Directory -Path .build/win32-${arch} -Force`;
// Package client
if (process.env['BUILT_CLIENT']) {
// Product version
const version = await (0, zx_1.$) `node -p "require('../VSCode-win32-${arch}/resources/app/package.json').version"`;
(0, codesign_1.printBanner)('Package client');
const clientArchivePath = `.build/win32-${arch}/VSCode-win32-${arch}-${version}.zip`;
await (0, zx_1.$) `7z.exe a -tzip ${clientArchivePath} ../VSCode-win32-${arch}/* "-xr!CodeSignSummary*.md"`.pipe(process.stdout);
await (0, zx_1.$) `7z.exe l ${clientArchivePath}`.pipe(process.stdout);
}
// Package server
if (process.env['BUILT_SERVER']) {
(0, codesign_1.printBanner)('Package server');
const serverArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}.zip`;
await (0, zx_1.$) `7z.exe a -tzip ${serverArchivePath} ../vscode-server-win32-${arch}`.pipe(process.stdout);
await (0, zx_1.$) `7z.exe l ${serverArchivePath}`.pipe(process.stdout);
}
// Package server (web)
if (process.env['BUILT_WEB']) {
(0, codesign_1.printBanner)('Package server (web)');
const webArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}-web.zip`;
await (0, zx_1.$) `7z.exe a -tzip ${webArchivePath} ../vscode-server-win32-${arch}-web`.pipe(process.stdout);
await (0, zx_1.$) `7z.exe l ${webArchivePath}`.pipe(process.stdout);
}
// Sign setup
if (process.env['BUILT_CLIENT']) {
(0, codesign_1.printBanner)('Sign setup packages (system, user)');
const task = (0, zx_1.$) `npm exec -- npm-run-all -lp "gulp vscode-win32-${arch}-system-setup -- --sign" "gulp vscode-win32-${arch}-user-setup -- --sign"`;
await (0, codesign_1.streamProcessOutputAndCheckResult)('Sign setup packages (system, user)', task);
}
}
main().then(() => {
process.exit(0);
}, err => {
console.error(`ERROR: ${err}`);
process.exit(1);
});
//# sourceMappingURL=codesign.js.map

View File

@@ -4,8 +4,8 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { $, usePwsh } from 'zx'; import { $, usePwsh } from 'zx';
import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign.ts';
import { e } from '../common/publish'; import { e } from '../common/publish.ts';
async function main() { async function main() {
usePwsh(); usePwsh();

View File

@@ -39,7 +39,7 @@ jobs:
- pwsh: | - pwsh: |
mkdir .build -ea 0 mkdir .build -ea 0
node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -81,14 +81,14 @@ jobs:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- powershell: node build/azure-pipelines/distro/mixin-npm - powershell: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt }
exec { mkdir -Force .build/node_modules_cache } exec { mkdir -Force .build/node_modules_cache }
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -92,10 +92,10 @@ steps:
retryCountOnTaskFailure: 5 retryCountOnTaskFailure: 5
displayName: Install dependencies displayName: Install dependencies
- script: node build/azure-pipelines/distro/mixin-npm - script: node build/azure-pipelines/distro/mixin-npm.ts
displayName: Mixin distro node modules displayName: Mixin distro node modules
- script: node build/azure-pipelines/distro/mixin-quality - script: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
env: env:
VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }}

View File

@@ -41,7 +41,7 @@ steps:
archiveFilePatterns: $(Build.BinariesDirectory)/pkg/${{ target }}/*.zip archiveFilePatterns: $(Build.BinariesDirectory)/pkg/${{ target }}/*.zip
destinationFolder: $(Build.BinariesDirectory)/sign/${{ target }} destinationFolder: $(Build.BinariesDirectory)/sign/${{ target }}
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath sign-windows $(Build.BinariesDirectory)/sign "*.exe" - powershell: node build\azure-pipelines\common\sign.ts $env:EsrpCliDllPath sign-windows $(Build.BinariesDirectory)/sign "*.exe"
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: ✍️ Codesign displayName: ✍️ Codesign

View File

@@ -55,7 +55,7 @@ steps:
- pwsh: | - pwsh: |
mkdir .build -ea 0 mkdir .build -ea 0
node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash node build/azure-pipelines/common/computeNodeModulesCacheKey.ts win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash
displayName: Prepare node_modules cache key displayName: Prepare node_modules cache key
- task: Cache@2 - task: Cache@2
@@ -101,31 +101,33 @@ steps:
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- powershell: node build/azure-pipelines/distro/mixin-npm - powershell: node build/azure-pipelines/distro/mixin-npm.ts
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Mixin distro node modules displayName: Mixin distro node modules
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } exec { node build/azure-pipelines/common/listNodeModules.ts .build/node_modules_list.txt }
exec { mkdir -Force .build/node_modules_cache } exec { mkdir -Force .build/node_modules_cache }
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive displayName: Create node_modules archive
- powershell: node build/azure-pipelines/distro/mixin-quality - powershell: node build/azure-pipelines/distro/mixin-quality.ts
displayName: Mixin distro quality displayName: Mixin distro quality
- template: ../../common/install-builtin-extensions.yml@self - template: ../../common/install-builtin-extensions.yml@self
- ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}:
- powershell: node build\lib\policies\policyGenerator build\lib\policies\policyData.jsonc win32 - powershell: |
npm run copy-policy-dto --prefix build
node build\lib\policies\policyGenerator.ts build\lib\policies\policyData.jsonc win32
displayName: Generate Group Policy definitions displayName: Generate Group Policy definitions
retryCountOnTaskFailure: 3 retryCountOnTaskFailure: 3
- ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'exploration')) }}: - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'exploration')) }}:
- powershell: node build/win32/explorer-dll-fetcher .build/win32/appx - powershell: node build/win32/explorer-dll-fetcher.ts .build/win32/appx
displayName: Download Explorer dll displayName: Download Explorer dll
- powershell: | - powershell: |
@@ -223,7 +225,7 @@ steps:
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { npx deemon --detach --wait -- npx zx build/azure-pipelines/win32/codesign.js } exec { npx deemon --detach --wait -- npx zx build/azure-pipelines/win32/codesign.ts }
env: env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken) SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: ✍️ Codesign displayName: ✍️ Codesign
@@ -240,7 +242,7 @@ steps:
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { npx deemon --attach -- npx zx build/azure-pipelines/win32/codesign.js } exec { npx deemon --attach -- npx zx build/azure-pipelines/win32/codesign.ts }
condition: succeededOrFailed() condition: succeededOrFailed()
displayName: "✍️ Post-job: Codesign" displayName: "✍️ Post-job: Codesign"

View File

@@ -6,24 +6,24 @@
/** /**
* @param {string} name * @param {string} name
* @returns {import('./lib/bundle').IEntryPoint} * @returns {import('./lib/bundle.js').IEntryPoint}
*/ */
function createModuleDescription(name) { export function createModuleDescription(name) {
return { return {
name name
}; };
} }
exports.workerEditor = createModuleDescription('vs/editor/common/services/editorWebWorkerMain'); export const workerEditor = createModuleDescription('vs/editor/common/services/editorWebWorkerMain');
exports.workerExtensionHost = createModuleDescription('vs/workbench/api/worker/extensionHostWorkerMain'); export const workerExtensionHost = createModuleDescription('vs/workbench/api/worker/extensionHostWorkerMain');
exports.workerNotebook = createModuleDescription('vs/workbench/contrib/notebook/common/services/notebookWebWorkerMain'); export const workerNotebook = createModuleDescription('vs/workbench/contrib/notebook/common/services/notebookWebWorkerMain');
exports.workerLanguageDetection = createModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionWebWorkerMain'); export const workerLanguageDetection = createModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionWebWorkerMain');
exports.workerLocalFileSearch = createModuleDescription('vs/workbench/services/search/worker/localFileSearchMain'); export const workerLocalFileSearch = createModuleDescription('vs/workbench/services/search/worker/localFileSearchMain');
exports.workerProfileAnalysis = createModuleDescription('vs/platform/profiling/electron-browser/profileAnalysisWorkerMain'); export const workerProfileAnalysis = createModuleDescription('vs/platform/profiling/electron-browser/profileAnalysisWorkerMain');
exports.workerOutputLinks = createModuleDescription('vs/workbench/contrib/output/common/outputLinkComputerMain'); export const workerOutputLinks = createModuleDescription('vs/workbench/contrib/output/common/outputLinkComputerMain');
exports.workerBackgroundTokenization = createModuleDescription('vs/workbench/services/textMate/browser/backgroundTokenization/worker/textMateTokenizationWorker.workerMain'); export const workerBackgroundTokenization = createModuleDescription('vs/workbench/services/textMate/browser/backgroundTokenization/worker/textMateTokenizationWorker.workerMain');
exports.workbenchDesktop = [ export const workbenchDesktop = [
createModuleDescription('vs/workbench/contrib/debug/node/telemetryApp'), createModuleDescription('vs/workbench/contrib/debug/node/telemetryApp'),
createModuleDescription('vs/platform/files/node/watcher/watcherMain'), createModuleDescription('vs/platform/files/node/watcher/watcherMain'),
createModuleDescription('vs/platform/terminal/node/ptyHostMain'), createModuleDescription('vs/platform/terminal/node/ptyHostMain'),
@@ -31,15 +31,15 @@ exports.workbenchDesktop = [
createModuleDescription('vs/workbench/workbench.desktop.main') createModuleDescription('vs/workbench/workbench.desktop.main')
]; ];
exports.workbenchWeb = createModuleDescription('vs/workbench/workbench.web.main'); export const workbenchWeb = createModuleDescription('vs/workbench/workbench.web.main');
exports.keyboardMaps = [ export const keyboardMaps = [
createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.linux'), createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.linux'),
createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.darwin'), createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.darwin'),
createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.win') createModuleDescription('vs/workbench/services/keybinding/browser/keyboardLayouts/layout.contribution.win')
]; ];
exports.code = [ export const code = [
// 'vs/code/electron-main/main' is not included here because it comes in via ./src/main.js // 'vs/code/electron-main/main' is not included here because it comes in via ./src/main.js
// 'vs/code/node/cli' is not included here because it comes in via ./src/cli.js // 'vs/code/node/cli' is not included here because it comes in via ./src/cli.js
createModuleDescription('vs/code/node/cliProcessMain'), createModuleDescription('vs/code/node/cliProcessMain'),
@@ -47,9 +47,9 @@ exports.code = [
createModuleDescription('vs/code/electron-browser/workbench/workbench'), createModuleDescription('vs/code/electron-browser/workbench/workbench'),
]; ];
exports.codeWeb = createModuleDescription('vs/code/browser/workbench/workbench'); export const codeWeb = createModuleDescription('vs/code/browser/workbench/workbench');
exports.codeServer = [ export const codeServer = [
// 'vs/server/node/server.main' is not included here because it gets inlined via ./src/server-main.js // 'vs/server/node/server.main' is not included here because it gets inlined via ./src/server-main.js
// 'vs/server/node/server.cli' is not included here because it gets inlined via ./src/server-cli.js // 'vs/server/node/server.cli' is not included here because it gets inlined via ./src/server-cli.js
createModuleDescription('vs/workbench/api/node/extensionHostProcess'), createModuleDescription('vs/workbench/api/node/extensionHostProcess'),
@@ -57,4 +57,24 @@ exports.codeServer = [
createModuleDescription('vs/platform/terminal/node/ptyHostMain') createModuleDescription('vs/platform/terminal/node/ptyHostMain')
]; ];
exports.entrypoint = createModuleDescription; export const entrypoint = createModuleDescription;
const buildfile = {
workerEditor,
workerExtensionHost,
workerNotebook,
workerLanguageDetection,
workerLocalFileSearch,
workerProfileAnalysis,
workerOutputLinks,
workerBackgroundTokenization,
workbenchDesktop,
workbenchWeb,
keyboardMaps,
code,
codeWeb,
codeServer,
entrypoint: createModuleDescription
};
export default buildfile;

View File

@@ -1,136 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const typescript_1 = __importDefault(require("typescript"));
const fs_1 = require("fs");
const path_1 = require("path");
const minimatch_1 = require("minimatch");
//
// #############################################################################################
//
// A custom typescript checker for the specific task of detecting the use of certain types in a
// layer that does not allow such use.
//
// Make changes to below RULES to lift certain files from these checks only if absolutely needed
//
// NOTE: Most layer checks are done via tsconfig.<layer>.json files.
//
// #############################################################################################
//
// Types that are defined in a common layer but are known to be only
// available in native environments should not be allowed in browser
const NATIVE_TYPES = [
'NativeParsedArgs',
'INativeEnvironmentService',
'AbstractNativeEnvironmentService',
'INativeWindowConfiguration',
'ICommonNativeHostService',
'INativeHostService',
'IMainProcessService',
'INativeBrowserElementsService',
];
const RULES = [
// Tests: skip
{
target: '**/vs/**/test/**',
skip: true // -> skip all test files
},
// Common: vs/platform services that can access native types
{
target: `**/vs/platform/{${[
'environment/common/*.ts',
'window/common/window.ts',
'native/common/native.ts',
'native/common/nativeHostService.ts',
'browserElements/common/browserElements.ts',
'browserElements/common/nativeBrowserElementsService.ts'
].join(',')}}`,
disallowedTypes: [ /* Ignore native types that are defined from here */ /* Ignore native types that are defined from here */],
},
// Common: vs/base/parts/sandbox/electron-browser/preload{,-aux}.ts
{
target: '**/vs/base/parts/sandbox/electron-browser/preload{,-aux}.ts',
disallowedTypes: NATIVE_TYPES,
},
// Common
{
target: '**/vs/**/common/**',
disallowedTypes: NATIVE_TYPES,
},
// Common
{
target: '**/vs/**/worker/**',
disallowedTypes: NATIVE_TYPES,
},
// Browser
{
target: '**/vs/**/browser/**',
disallowedTypes: NATIVE_TYPES,
},
// Electron (main, utility)
{
target: '**/vs/**/{electron-main,electron-utility}/**',
disallowedTypes: [
'ipcMain' // not allowed, use validatedIpcMain instead
]
}
];
const TS_CONFIG_PATH = (0, path_1.join)(__dirname, '../../', 'src', 'tsconfig.json');
let hasErrors = false;
function checkFile(program, sourceFile, rule) {
checkNode(sourceFile);
function checkNode(node) {
if (node.kind !== typescript_1.default.SyntaxKind.Identifier) {
return typescript_1.default.forEachChild(node, checkNode); // recurse down
}
const checker = program.getTypeChecker();
const symbol = checker.getSymbolAtLocation(node);
if (!symbol) {
return;
}
let text = symbol.getName();
let _parentSymbol = symbol;
while (_parentSymbol.parent) {
_parentSymbol = _parentSymbol.parent;
}
const parentSymbol = _parentSymbol;
text = parentSymbol.getName();
if (rule.disallowedTypes?.some(disallowed => disallowed === text)) {
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
console.log(`[build/checker/layersChecker.ts]: Reference to type '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1}). Learn more about our source code organization at https://github.com/microsoft/vscode/wiki/Source-Code-Organization.`);
hasErrors = true;
return;
}
}
}
function createProgram(tsconfigPath) {
const tsConfig = typescript_1.default.readConfigFile(tsconfigPath, typescript_1.default.sys.readFile);
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: typescript_1.default.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
const tsConfigParsed = typescript_1.default.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true });
const compilerHost = typescript_1.default.createCompilerHost(tsConfigParsed.options, true);
return typescript_1.default.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
}
//
// Create program and start checking
//
const program = createProgram(TS_CONFIG_PATH);
for (const sourceFile of program.getSourceFiles()) {
for (const rule of RULES) {
if ((0, minimatch_1.match)([sourceFile.fileName], rule.target).length > 0) {
if (!rule.skip) {
checkFile(program, sourceFile, rule);
}
break;
}
}
}
if (hasErrors) {
process.exit(1);
}
//# sourceMappingURL=layersChecker.js.map

View File

@@ -6,7 +6,7 @@
import ts from 'typescript'; import ts from 'typescript';
import { readFileSync, existsSync } from 'fs'; import { readFileSync, existsSync } from 'fs';
import { resolve, dirname, join } from 'path'; import { resolve, dirname, join } from 'path';
import { match } from 'minimatch'; import minimatch from 'minimatch';
// //
// ############################################################################################# // #############################################################################################
@@ -88,7 +88,7 @@ const RULES: IRule[] = [
} }
]; ];
const TS_CONFIG_PATH = join(__dirname, '../../', 'src', 'tsconfig.json'); const TS_CONFIG_PATH = join(import.meta.dirname, '../../', 'src', 'tsconfig.json');
interface IRule { interface IRule {
target: string; target: string;
@@ -151,7 +151,7 @@ const program = createProgram(TS_CONFIG_PATH);
for (const sourceFile of program.getSourceFiles()) { for (const sourceFile of program.getSourceFiles()) {
for (const rule of RULES) { for (const rule of RULES) {
if (match([sourceFile.fileName], rule.target).length > 0) { if (minimatch.match([sourceFile.fileName], rule.target).length > 0) {
if (!rule.skip) { if (!rule.skip) {
checkFile(program, sourceFile, rule); checkFile(program, sourceFile, rule);
} }

View File

@@ -1,63 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = __importDefault(require("path"));
const fs_1 = __importDefault(require("fs"));
const minimatch_1 = __importDefault(require("minimatch"));
const vscode_universal_bundler_1 = require("vscode-universal-bundler");
const root = path_1.default.dirname(path_1.default.dirname(__dirname));
async function main(buildDir) {
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('Build dir not provided');
}
const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8'));
const appName = product.nameLong + '.app';
const x64AppPath = path_1.default.join(buildDir, 'VSCode-darwin-x64', appName);
const arm64AppPath = path_1.default.join(buildDir, 'VSCode-darwin-arm64', appName);
const asarRelativePath = path_1.default.join('Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path_1.default.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path_1.default.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const filesToSkip = [
'**/CodeResources',
'**/Credits.rtf',
'**/policies/{*.mobileconfig,**/*.plist}',
// TODO: Should we consider expanding this to other files in this area?
'**/node_modules/@parcel/node-addon-api/nothing.target.mk',
];
await (0, vscode_universal_bundler_1.makeUniversalApp)({
x64AppPath,
arm64AppPath,
asarPath: asarRelativePath,
outAppPath,
force: true,
mergeASARs: true,
x64ArchFiles: '{*/kerberos.node,**/extensions/microsoft-authentication/dist/libmsalruntime.dylib,**/extensions/microsoft-authentication/dist/msal-node-runtime.node}',
filesToSkipComparison: (file) => {
for (const expected of filesToSkip) {
if ((0, minimatch_1.default)(file, expected)) {
return true;
}
}
return false;
}
});
const productJson = JSON.parse(fs_1.default.readFileSync(productJsonPath, 'utf8'));
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});
fs_1.default.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t'));
}
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}
//# sourceMappingURL=create-universal-app.js.map

View File

@@ -8,7 +8,7 @@ import fs from 'fs';
import minimatch from 'minimatch'; import minimatch from 'minimatch';
import { makeUniversalApp } from 'vscode-universal-bundler'; import { makeUniversalApp } from 'vscode-universal-bundler';
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(import.meta.dirname));
async function main(buildDir?: string) { async function main(buildDir?: string) {
const arch = process.env['VSCODE_ARCH']; const arch = process.env['VSCODE_ARCH'];
@@ -58,7 +58,7 @@ async function main(buildDir?: string) {
fs.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t')); fs.writeFileSync(productJsonPath, JSON.stringify(productJson, null, '\t'));
} }
if (require.main === module) { if (import.meta.main) {
main(process.argv[2]).catch(err => { main(process.argv[2]).catch(err => {
console.error(err); console.error(err);
process.exit(1); process.exit(1);

View File

@@ -1,128 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const osx_sign_1 = require("@electron/osx-sign");
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
const root = path_1.default.dirname(path_1.default.dirname(__dirname));
const baseDir = path_1.default.dirname(__dirname);
const product = JSON.parse(fs_1.default.readFileSync(path_1.default.join(root, 'product.json'), 'utf8'));
const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
function getElectronVersion() {
const npmrc = fs_1.default.readFileSync(path_1.default.join(root, '.npmrc'), 'utf8');
const target = /^target="(.*)"$/m.exec(npmrc)[1];
return target;
}
function getEntitlementsForFile(filePath) {
if (filePath.includes(gpuHelperAppName)) {
return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist');
}
else if (filePath.includes(rendererHelperAppName)) {
return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist');
}
else if (filePath.includes(pluginHelperAppName)) {
return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist');
}
return path_1.default.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist');
}
async function retrySignOnKeychainError(fn, maxRetries = 3) {
let lastError;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await fn();
}
catch (error) {
lastError = error;
// Check if this is the specific keychain error we want to retry
const errorMessage = error instanceof Error ? error.message : String(error);
const isKeychainError = errorMessage.includes('The specified item could not be found in the keychain.');
if (!isKeychainError || attempt === maxRetries) {
throw error;
}
console.log(`Signing attempt ${attempt} failed with keychain error, retrying...`);
console.log(`Error: ${errorMessage}`);
const delay = 1000 * Math.pow(2, attempt - 1);
console.log(`Waiting ${Math.round(delay)}ms before retry ${attempt}/${maxRetries}...`);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
throw lastError;
}
async function main(buildDir) {
const tempDir = process.env['AGENT_TEMPDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
const identity = process.env['CODESIGN_IDENTITY'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
if (!tempDir) {
throw new Error('$AGENT_TEMPDIRECTORY not set');
}
const appRoot = path_1.default.join(buildDir, `VSCode-darwin-${arch}`);
const appName = product.nameLong + '.app';
const infoPlistPath = path_1.default.resolve(appRoot, appName, 'Contents', 'Info.plist');
const appOpts = {
app: path_1.default.join(appRoot, appName),
platform: 'darwin',
optionsForFile: (filePath) => ({
entitlements: getEntitlementsForFile(filePath),
hardenedRuntime: true,
}),
preAutoEntitlements: false,
preEmbedProvisioningProfile: false,
keychain: path_1.default.join(tempDir, 'buildagent.keychain'),
version: getElectronVersion(),
identity,
};
// Only overwrite plist entries for x64 and arm64 builds,
// universal will get its copy from the x64 build.
if (arch !== 'universal') {
await (0, cross_spawn_promise_1.spawn)('plutil', [
'-insert',
'NSAppleEventsUsageDescription',
'-string',
'An application in Visual Studio Code wants to use AppleScript.',
`${infoPlistPath}`
]);
await (0, cross_spawn_promise_1.spawn)('plutil', [
'-replace',
'NSMicrophoneUsageDescription',
'-string',
'An application in Visual Studio Code wants to use the Microphone.',
`${infoPlistPath}`
]);
await (0, cross_spawn_promise_1.spawn)('plutil', [
'-replace',
'NSCameraUsageDescription',
'-string',
'An application in Visual Studio Code wants to use the Camera.',
`${infoPlistPath}`
]);
}
await retrySignOnKeychainError(() => (0, osx_sign_1.sign)(appOpts));
}
if (require.main === module) {
main(process.argv[2]).catch(async err => {
console.error(err);
const tempDir = process.env['AGENT_TEMPDIRECTORY'];
if (tempDir) {
const keychain = path_1.default.join(tempDir, 'buildagent.keychain');
const identities = await (0, cross_spawn_promise_1.spawn)('security', ['find-identity', '-p', 'codesigning', '-v', keychain]);
console.error(`Available identities:\n${identities}`);
const dump = await (0, cross_spawn_promise_1.spawn)('security', ['dump-keychain', keychain]);
console.error(`Keychain dump:\n${dump}`);
}
process.exit(1);
});
}
//# sourceMappingURL=sign.js.map

View File

@@ -5,11 +5,11 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { sign, SignOptions } from '@electron/osx-sign'; import { sign, type SignOptions } from '@electron/osx-sign';
import { spawn } from '@malept/cross-spawn-promise'; import { spawn } from '@malept/cross-spawn-promise';
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(import.meta.dirname));
const baseDir = path.dirname(__dirname); const baseDir = path.dirname(import.meta.dirname);
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const helperAppBaseName = product.nameShort; const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app'; const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
@@ -122,7 +122,7 @@ async function main(buildDir?: string): Promise<void> {
await retrySignOnKeychainError(() => sign(appOpts)); await retrySignOnKeychainError(() => sign(appOpts));
} }
if (require.main === module) { if (import.meta.main) {
main(process.argv[2]).catch(async err => { main(process.argv[2]).catch(async err => {
console.error(err); console.error(err);
const tempDir = process.env['AGENT_TEMPDIRECTORY']; const tempDir = process.env['AGENT_TEMPDIRECTORY'];

View File

@@ -1,136 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const assert_1 = __importDefault(require("assert"));
const path_1 = __importDefault(require("path"));
const promises_1 = require("fs/promises");
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
const minimatch_1 = __importDefault(require("minimatch"));
const MACHO_PREFIX = 'Mach-O ';
const MACHO_64_MAGIC_LE = 0xfeedfacf;
const MACHO_UNIVERSAL_MAGIC_LE = 0xbebafeca;
const MACHO_ARM64_CPU_TYPE = new Set([
0x0c000001,
0x0100000c,
]);
const MACHO_X86_64_CPU_TYPE = new Set([
0x07000001,
0x01000007,
]);
// Files to skip during architecture validation
const FILES_TO_SKIP = [
// MSAL runtime files are only present in ARM64 builds
'**/extensions/microsoft-authentication/dist/libmsalruntime.dylib',
'**/extensions/microsoft-authentication/dist/msal-node-runtime.node',
];
function isFileSkipped(file) {
return FILES_TO_SKIP.some(pattern => (0, minimatch_1.default)(file, pattern));
}
async function read(file, buf, offset, length, position) {
let filehandle;
try {
filehandle = await (0, promises_1.open)(file);
await filehandle.read(buf, offset, length, position);
}
finally {
await filehandle?.close();
}
}
async function checkMachOFiles(appPath, arch) {
const visited = new Set();
const invalidFiles = [];
const header = Buffer.alloc(8);
const file_header_entry_size = 20;
const checkx86_64Arch = (arch === 'x64');
const checkArm64Arch = (arch === 'arm64');
const checkUniversalArch = (arch === 'universal');
const traverse = async (p) => {
p = await (0, promises_1.realpath)(p);
if (visited.has(p)) {
return;
}
visited.add(p);
const info = await (0, promises_1.stat)(p);
if (info.isSymbolicLink()) {
return;
}
if (info.isFile()) {
let fileOutput = '';
try {
fileOutput = await (0, cross_spawn_promise_1.spawn)('file', ['--brief', '--no-pad', p]);
}
catch (e) {
if (e instanceof cross_spawn_promise_1.ExitCodeError) {
/* silently accept error codes from "file" */
}
else {
throw e;
}
}
if (fileOutput.startsWith(MACHO_PREFIX)) {
console.log(`Verifying architecture of ${p}`);
read(p, header, 0, 8, 0).then(_ => {
const header_magic = header.readUInt32LE();
if (header_magic === MACHO_64_MAGIC_LE) {
const cpu_type = header.readUInt32LE(4);
if (checkUniversalArch) {
invalidFiles.push(p);
}
else if (checkArm64Arch && !MACHO_ARM64_CPU_TYPE.has(cpu_type)) {
invalidFiles.push(p);
}
else if (checkx86_64Arch && !MACHO_X86_64_CPU_TYPE.has(cpu_type)) {
invalidFiles.push(p);
}
}
else if (header_magic === MACHO_UNIVERSAL_MAGIC_LE) {
const num_binaries = header.readUInt32BE(4);
assert_1.default.equal(num_binaries, 2);
const file_entries_size = file_header_entry_size * num_binaries;
const file_entries = Buffer.alloc(file_entries_size);
read(p, file_entries, 0, file_entries_size, 8).then(_ => {
for (let i = 0; i < num_binaries; i++) {
const cpu_type = file_entries.readUInt32LE(file_header_entry_size * i);
if (!MACHO_ARM64_CPU_TYPE.has(cpu_type) && !MACHO_X86_64_CPU_TYPE.has(cpu_type)) {
invalidFiles.push(p);
}
}
});
}
});
}
}
if (info.isDirectory()) {
for (const child of await (0, promises_1.readdir)(p)) {
await traverse(path_1.default.resolve(p, child));
}
}
};
await traverse(appPath);
return invalidFiles;
}
const archToCheck = process.argv[2];
(0, assert_1.default)(process.env['APP_PATH'], 'APP_PATH not set');
(0, assert_1.default)(archToCheck === 'x64' || archToCheck === 'arm64' || archToCheck === 'universal', `Invalid architecture ${archToCheck} to check`);
checkMachOFiles(process.env['APP_PATH'], archToCheck).then(invalidFiles => {
// Filter out files that should be skipped
const actualInvalidFiles = invalidFiles.filter(file => !isFileSkipped(file));
if (actualInvalidFiles.length > 0) {
console.error('\x1b[31mThese files are built for the wrong architecture:\x1b[0m');
actualInvalidFiles.forEach(file => console.error(`\x1b[31m${file}\x1b[0m`));
process.exit(1);
}
else {
console.log('\x1b[32mAll files are valid\x1b[0m');
}
}).catch(err => {
console.error(err);
process.exit(1);
});
//# sourceMappingURL=verify-macho.js.map

View File

@@ -4,12 +4,13 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
// @ts-check // @ts-check
import eventStream from 'event-stream'; import eventStream from 'event-stream';
import { src } from 'vinyl-fs'; import vfs from 'vinyl-fs';
import { eslintFilter } from './filters.js'; import { eslintFilter } from './filters.js';
import gulpEslint from './gulp-eslint.js'; import gulpEslint from './gulp-eslint.js';
function eslint() { function eslint() {
return src(eslintFilter, { base: '.', follow: true, allowEmpty: true }) return vfs
.src(eslintFilter, { base: '.', follow: true, allowEmpty: true })
.pipe( .pipe(
gulpEslint((results) => { gulpEslint((results) => {
if (results.warningCount > 0 || results.errorCount > 0) { if (results.warningCount > 0 || results.errorCount > 0) {
@@ -19,8 +20,7 @@ function eslint() {
).pipe(eventStream.through(function () { /* noop, important for the stream to end */ })); ).pipe(eventStream.through(function () { /* noop, important for the stream to end */ }));
} }
const normalizeScriptPath = (/** @type {string} */ p) => p.replace(/\.(js|ts)$/, ''); if (import.meta.main) {
if (normalizeScriptPath(import.meta.filename) === normalizeScriptPath(process.argv[1])) {
eslint().on('error', (err) => { eslint().on('error', (err) => {
console.error(); console.error();
console.error(err); console.error(err);

View File

@@ -13,10 +13,11 @@
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript * all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/ */
const { readFileSync } = require('fs'); import { readFileSync } from 'fs';
const { join } = require('path'); import { join } from 'path';
module.exports.all = [
export const all = [
'*', '*',
'build/**/*', 'build/**/*',
'extensions/**/*', 'extensions/**/*',
@@ -31,7 +32,7 @@ module.exports.all = [
'!**/*.js.map', '!**/*.js.map',
]; ];
module.exports.unicodeFilter = [ export const unicodeFilter = [
'**', '**',
'!**/ThirdPartyNotices.txt', '!**/ThirdPartyNotices.txt',
@@ -68,7 +69,7 @@ module.exports.unicodeFilter = [
'!src/vs/workbench/contrib/terminal/common/scripts/psreadline/**', '!src/vs/workbench/contrib/terminal/common/scripts/psreadline/**',
]; ];
module.exports.indentationFilter = [ export const indentationFilter = [
'**', '**',
// except specific files // except specific files
@@ -151,7 +152,7 @@ module.exports.indentationFilter = [
'!extensions/simple-browser/media/*.js', '!extensions/simple-browser/media/*.js',
]; ];
module.exports.copyrightFilter = [ export const copyrightFilter = [
'**', '**',
'!**/*.desktop', '!**/*.desktop',
'!**/*.json', '!**/*.json',
@@ -193,7 +194,7 @@ module.exports.copyrightFilter = [
'!src/vs/workbench/contrib/terminal/common/scripts/psreadline/**', '!src/vs/workbench/contrib/terminal/common/scripts/psreadline/**',
]; ];
module.exports.tsFormattingFilter = [ export const tsFormattingFilter = [
'src/**/*.ts', 'src/**/*.ts',
'test/**/*.ts', 'test/**/*.ts',
'extensions/**/*.ts', 'extensions/**/*.ts',
@@ -212,19 +213,19 @@ module.exports.tsFormattingFilter = [
'!extensions/terminal-suggest/src/shell/fishBuiltinsCache.ts', '!extensions/terminal-suggest/src/shell/fishBuiltinsCache.ts',
]; ];
module.exports.eslintFilter = [ export const eslintFilter = [
'**/*.js', '**/*.js',
'**/*.cjs', '**/*.cjs',
'**/*.mjs', '**/*.mjs',
'**/*.ts', '**/*.ts',
'.eslint-plugin-local/**/*.ts', '.eslint-plugin-local/**/*.ts',
...readFileSync(join(__dirname, '..', '.eslint-ignore')) ...readFileSync(join(import.meta.dirname, '..', '.eslint-ignore'))
.toString() .toString()
.split(/\r\n|\n/) .split(/\r\n|\n/)
.filter(line => line && !line.startsWith('#')) .filter(line => line && !line.startsWith('#'))
.map(line => line.startsWith('!') ? line.slice(1) : `!${line}`) .map(line => line.startsWith('!') ? line.slice(1) : `!${line}`)
]; ];
module.exports.stylelintFilter = [ export const stylelintFilter = [
'src/**/*.css' 'src/**/*.css'
]; ];

View File

@@ -2,13 +2,10 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { ESLint } from 'eslint';
'use strict'; import fancyLog from 'fancy-log';
import { relative } from 'path';
const { ESLint } = require('eslint'); import Stream, { Transform } from 'stream';
const { Transform, default: Stream } = require('stream');
const { relative } = require('path');
const fancyLog = require('fancy-log');
/** /**
* @typedef {ESLint.LintResult[] & { errorCount: number, warningCount: number}} ESLintResults * @typedef {ESLint.LintResult[] & { errorCount: number, warningCount: number}} ESLintResults
@@ -17,7 +14,7 @@ const fancyLog = require('fancy-log');
/** /**
* @param {(results: ESLintResults) => void} action - A function to handle all ESLint results * @param {(results: ESLintResults) => void} action - A function to handle all ESLint results
*/ */
function eslint(action) { export default function eslint(action) {
const linter = new ESLint({}); const linter = new ESLint({});
const formatter = linter.loadFormatter('compact'); const formatter = linter.loadFormatter('compact');
@@ -82,5 +79,3 @@ function transform(transform, flush) {
flush flush
}); });
} }
module.exports = eslint;

View File

@@ -11,20 +11,19 @@ import ansiColors from 'ansi-colors';
import * as cp from 'child_process'; import * as cp from 'child_process';
import { tmpdir } from 'os'; import { tmpdir } from 'os';
import { existsSync, mkdirSync, rmSync } from 'fs'; import { existsSync, mkdirSync, rmSync } from 'fs';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import watcher from './lib/watch/index.js'; import * as watcher from './lib/watch/index.ts';
import utilModule from './lib/util.js'; import * as utilModule from './lib/util.ts';
import reporterModule from './lib/reporter.js'; import * as reporterModule from './lib/reporter.ts';
import untar from 'gulp-untar'; import untar from 'gulp-untar';
import gunzip from 'gulp-gunzip'; import gunzip from 'gulp-gunzip';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
const { debounce } = utilModule; const { debounce } = utilModule;
const { createReporter } = reporterModule; const { createReporter } = reporterModule;
const __dirname = import.meta.dirname;
const root = 'cli'; const root = 'cli';
const rootAbs = path.resolve(__dirname, '..', root); const rootAbs = path.resolve(import.meta.dirname, '..', root);
const src = `${root}/src`; const src = `${root}/src`;
const platformOpensslDirName = const platformOpensslDirName =
@@ -148,7 +147,7 @@ const compileCliTask = task.define('compile-cli', () => {
const watchCliTask = task.define('watch-cli', () => { const watchCliTask = task.define('watch-cli', () => {
warnIfRustNotInstalled(); warnIfRustNotInstalled();
return watcher.default(`${src}/**`, { read: false }) return watcher(`${src}/**`, { read: false })
.pipe(debounce(compileCliTask)); .pipe(debounce(compileCliTask));
}); });

View File

@@ -4,10 +4,10 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
//@ts-check //@ts-check
import gulp from 'gulp'; import gulp from 'gulp';
import util from './lib/util.js'; import * as util from './lib/util.ts';
import date from './lib/date.js'; import * as date from './lib/date.ts';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import compilation from './lib/compilation.js'; import * as compilation from './lib/compilation.ts';
/** /**
* @param {boolean} disableMangle * @param {boolean} disableMangle

View File

@@ -4,26 +4,25 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
//@ts-check //@ts-check
import gulp from 'gulp'; import gulp from 'gulp';
import * as path from 'path'; import path from 'path';
import util from './lib/util.js'; import * as util from './lib/util.ts';
import getVersionModule from './lib/getVersion.js'; import * as getVersionModule from './lib/getVersion.ts';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import es from 'event-stream'; import es from 'event-stream';
import File from 'vinyl'; import File from 'vinyl';
import i18n from './lib/i18n.js'; import * as i18n from './lib/i18n.ts';
import standalone from './lib/standalone.js'; import * as standalone from './lib/standalone.ts';
import * as cp from 'child_process'; import * as cp from 'child_process';
import compilation from './lib/compilation.js'; import * as compilation from './lib/compilation.ts';
import monacoapi from './lib/monaco-api.js'; import * as monacoapi from './lib/monaco-api.ts';
import * as fs from 'fs'; import * as fs from 'fs';
import filter from 'gulp-filter'; import filter from 'gulp-filter';
import reporterModule from './lib/reporter.js'; import * as reporterModule from './lib/reporter.ts';
import monacoPackage from './monaco/package.json' with { type: 'json' }; import monacoPackage from './monaco/package.json' with { type: 'json' };
const __dirname = import.meta.dirname;
const { getVersion } = getVersionModule; const { getVersion } = getVersionModule;
const { createReporter } = reporterModule; const { createReporter } = reporterModule;
const root = path.dirname(__dirname); const root = path.dirname(import.meta.dirname);
const sha1 = getVersion(root); const sha1 = getVersion(root);
const semver = monacoPackage.version; const semver = monacoPackage.version;
const headerVersion = semver + '(' + sha1 + ')'; const headerVersion = semver + '(' + sha1 + ')';
@@ -242,7 +241,7 @@ function createTscCompileTask(watch) {
args.push('-w'); args.push('-w');
} }
const child = cp.spawn(`node`, args, { const child = cp.spawn(`node`, args, {
cwd: path.join(__dirname, '..'), cwd: path.join(import.meta.dirname, '..'),
// stdio: [null, 'pipe', 'inherit'] // stdio: [null, 'pipe', 'inherit']
}); });
const errors = []; const errors = [];

View File

@@ -12,22 +12,21 @@ import * as path from 'path';
import * as nodeUtil from 'util'; import * as nodeUtil from 'util';
import es from 'event-stream'; import es from 'event-stream';
import filter from 'gulp-filter'; import filter from 'gulp-filter';
import util from './lib/util.js'; import * as util from './lib/util.ts';
import getVersionModule from './lib/getVersion.js'; import * as getVersionModule from './lib/getVersion.ts';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import watcher from './lib/watch/index.js'; import watcher from './lib/watch/index.ts';
import reporterModule from './lib/reporter.js'; import * as reporterModule from './lib/reporter.ts';
import glob from 'glob'; import glob from 'glob';
import plumber from 'gulp-plumber'; import plumber from 'gulp-plumber';
import ext from './lib/extensions.js'; import * as ext from './lib/extensions.ts';
import tsb from './lib/tsb/index.js'; import * as tsb from './lib/tsb/index.ts';
import sourcemaps from 'gulp-sourcemaps'; import sourcemaps from 'gulp-sourcemaps';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
const __dirname = import.meta.dirname;
const { getVersion } = getVersionModule; const { getVersion } = getVersionModule;
const { createReporter } = reporterModule; const { createReporter } = reporterModule;
const root = path.dirname(__dirname); const root = path.dirname(import.meta.dirname);
const commit = getVersion(root); const commit = getVersion(root);
// To save 250ms for each gulp startup, we are caching the result here // To save 250ms for each gulp startup, we are caching the result here
@@ -168,7 +167,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const pipeline = createPipeline(false); const pipeline = createPipeline(false);
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts'])); const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
const input = es.merge(nonts, pipeline.tsProjectSrc()); const input = es.merge(nonts, pipeline.tsProjectSrc());
const watchInput = watcher.default(src, { ...srcOpts, ...{ readDelay: 200 } }); const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
return watchInput return watchInput
.pipe(util.incremental(pipeline, input)) .pipe(util.incremental(pipeline, input))

View File

@@ -6,7 +6,7 @@ import gulp from 'gulp';
import es from 'event-stream'; import es from 'event-stream';
import path from 'path'; import path from 'path';
import fs from 'fs'; import fs from 'fs';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import { hygiene } from './hygiene.mjs'; import { hygiene } from './hygiene.mjs';
const dirName = path.dirname(new URL(import.meta.url).pathname); const dirName = path.dirname(new URL(import.meta.url).pathname);

View File

@@ -6,17 +6,17 @@
import gulp from 'gulp'; import gulp from 'gulp';
import * as path from 'path'; import * as path from 'path';
import es from 'event-stream'; import es from 'event-stream';
import * as util from './lib/util.js'; import * as util from './lib/util.ts';
import * as getVersionModule from './lib/getVersion.js'; import * as getVersionModule from './lib/getVersion.ts';
import * as task from './lib/task.js'; import * as task from './lib/task.ts';
import optimize from './lib/optimize.js'; import * as optimize from './lib/optimize.ts';
import * as inlineMetaModule from './lib/inlineMeta.js'; import * as inlineMetaModule from './lib/inlineMeta.ts';
import product from '../product.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' };
import rename from 'gulp-rename'; import rename from 'gulp-rename';
import replace from 'gulp-replace'; import replace from 'gulp-replace';
import filter from 'gulp-filter'; import filter from 'gulp-filter';
import * as dependenciesModule from './lib/dependencies.js'; import * as dependenciesModule from './lib/dependencies.ts';
import * as dateModule from './lib/date.js'; import * as dateModule from './lib/date.ts';
import vfs from 'vinyl-fs'; import vfs from 'vinyl-fs';
import packageJson from '../package.json' with { type: 'json' }; import packageJson from '../package.json' with { type: 'json' };
import flatmap from 'gulp-flatmap'; import flatmap from 'gulp-flatmap';
@@ -32,7 +32,7 @@ import * as cp from 'child_process';
import log from 'fancy-log'; import log from 'fancy-log';
import buildfile from './buildfile.js'; import buildfile from './buildfile.js';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
import * as fetchModule from './lib/fetch.js'; import * as fetchModule from './lib/fetch.ts';
import jsonEditor from 'gulp-json-editor'; import jsonEditor from 'gulp-json-editor';
const { inlineMeta } = inlineMetaModule; const { inlineMeta } = inlineMetaModule;
@@ -40,9 +40,8 @@ const { getVersion } = getVersionModule;
const { getProductionDependencies } = dependenciesModule; const { getProductionDependencies } = dependenciesModule;
const { readISODate } = dateModule; const { readISODate } = dateModule;
const { fetchUrls, fetchGithub } = fetchModule; const { fetchUrls, fetchGithub } = fetchModule;
const __dirname = import.meta.dirname;
const REPO_ROOT = path.dirname(__dirname); const REPO_ROOT = path.dirname(import.meta.dirname);
const commit = getVersion(REPO_ROOT); const commit = getVersion(REPO_ROOT);
const BUILD_ROOT = path.dirname(REPO_ROOT); const BUILD_ROOT = path.dirname(REPO_ROOT);
const REMOTE_FOLDER = path.join(REPO_ROOT, 'remote'); const REMOTE_FOLDER = path.join(REPO_ROOT, 'remote');
@@ -340,8 +339,8 @@ function packageTask(type, platform, arch, sourceFolderName, destinationFolderNa
const deps = gulp.src(dependenciesSrc, { base: 'remote', dot: true }) const deps = gulp.src(dependenciesSrc, { base: 'remote', dot: true })
// filter out unnecessary files, no source maps in server build // filter out unnecessary files, no source maps in server build
.pipe(filter(['**', '!**/package-lock.json', '!**/*.{js,css}.map'])) .pipe(filter(['**', '!**/package-lock.json', '!**/*.{js,css}.map']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore'))) .pipe(util.cleanNodeModules(path.join(import.meta.dirname, '.moduleignore')))
.pipe(util.cleanNodeModules(path.join(__dirname, `.moduleignore.${process.platform}`))) .pipe(util.cleanNodeModules(path.join(import.meta.dirname, `.moduleignore.${process.platform}`)))
.pipe(jsFilter) .pipe(jsFilter)
.pipe(util.stripSourceMappingURL()) .pipe(util.stripSourceMappingURL())
.pipe(jsFilter.restore); .pipe(jsFilter.restore);

View File

@@ -5,19 +5,18 @@
import gulp from 'gulp'; import gulp from 'gulp';
import * as path from 'path'; import * as path from 'path';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import util from './lib/util.js'; import * as util from './lib/util.ts';
import electron from '@vscode/gulp-electron'; import electron from '@vscode/gulp-electron';
import electronConfigModule from './lib/electron.js'; import * as electronConfigModule from './lib/electron.ts';
import filter from 'gulp-filter'; import filter from 'gulp-filter';
import deps from './lib/dependencies.js'; import * as deps from './lib/dependencies.ts';
import { existsSync, readdirSync } from 'fs'; import { existsSync, readdirSync } from 'fs';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
const { config } = electronConfigModule; const { config } = electronConfigModule;
const __dirname = import.meta.dirname; const root = path.dirname(import.meta.dirname);
const root = path.dirname(__dirname);
const BUILD_TARGETS = [ const BUILD_TARGETS = [
{ platform: 'win32', arch: 'x64' }, { platform: 'win32', arch: 'x64' },

View File

@@ -2,22 +2,19 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import { EventEmitter } from 'events'; import { EventEmitter } from 'events';
import glob from 'glob'; import glob from 'glob';
import gulp from 'gulp'; import gulp from 'gulp';
import { createRequire } from 'node:module'; import { createRequire } from 'node:module';
import { fileURLToPath } from 'url';
import { monacoTypecheckTask /* , monacoTypecheckWatchTask */ } from './gulpfile.editor.mjs'; import { monacoTypecheckTask /* , monacoTypecheckWatchTask */ } from './gulpfile.editor.mjs';
import { compileExtensionMediaTask, compileExtensionsTask, watchExtensionsTask } from './gulpfile.extensions.mjs'; import { compileExtensionMediaTask, compileExtensionsTask, watchExtensionsTask } from './gulpfile.extensions.mjs';
import compilation from './lib/compilation.js'; import * as compilation from './lib/compilation.ts';
import task from './lib/task.js'; import * as task from './lib/task.ts';
import util from './lib/util.js'; import * as util from './lib/util.ts';
EventEmitter.defaultMaxListeners = 100; EventEmitter.defaultMaxListeners = 100;
const require = createRequire(import.meta.url); const require = createRequire(import.meta.url);
const __dirname = import.meta.dirname;
const { transpileTask, compileTask, watchTask, compileApiProposalNamesTask, watchApiProposalNamesTask } = compilation; const { transpileTask, compileTask, watchTask, compileApiProposalNamesTask, watchApiProposalNamesTask } = compilation;
@@ -55,5 +52,7 @@ process.on('unhandledRejection', (reason, p) => {
}); });
// Load all the gulpfiles only if running tasks other than the editor tasks // Load all the gulpfiles only if running tasks other than the editor tasks
glob.sync('gulpfile.*.{mjs,js}', { cwd: __dirname }) glob.sync('gulpfile.*.{mjs,js}', { cwd: import.meta.dirname })
.forEach(f => require(`./${f}`)); .forEach(f => {
return require(`./${f}`);
});

View File

@@ -8,13 +8,13 @@ import replace from 'gulp-replace';
import rename from 'gulp-rename'; import rename from 'gulp-rename';
import es from 'event-stream'; import es from 'event-stream';
import vfs from 'vinyl-fs'; import vfs from 'vinyl-fs';
import * as utilModule from './lib/util.js'; import * as utilModule from './lib/util.ts';
import * as getVersionModule from './lib/getVersion.js'; import * as getVersionModule from './lib/getVersion.ts';
import * as task from './lib/task.js'; import * as task from './lib/task.ts';
import packageJson from '../package.json' with { type: 'json' }; import packageJson from '../package.json' with { type: 'json' };
import product from '../product.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' };
import { getDependencies } from './linux/dependencies-generator.js'; import { getDependencies } from './linux/dependencies-generator.ts';
import * as depLists from './linux/debian/dep-lists.js'; import * as depLists from './linux/debian/dep-lists.ts';
import * as path from 'path'; import * as path from 'path';
import * as cp from 'child_process'; import * as cp from 'child_process';
import { promisify } from 'util'; import { promisify } from 'util';
@@ -23,9 +23,8 @@ import { fileURLToPath } from 'url';
const { rimraf } = utilModule; const { rimraf } = utilModule;
const { getVersion } = getVersionModule; const { getVersion } = getVersionModule;
const { recommendedDeps: debianRecommendedDependencies } = depLists; const { recommendedDeps: debianRecommendedDependencies } = depLists;
const __dirname = import.meta.dirname;
const exec = promisify(cp.exec); const exec = promisify(cp.exec);
const root = path.dirname(__dirname); const root = path.dirname(import.meta.dirname);
const commit = getVersion(root); const commit = getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000); const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);

View File

@@ -13,20 +13,20 @@ import replace from 'gulp-replace';
import filter from 'gulp-filter'; import filter from 'gulp-filter';
import electron from '@vscode/gulp-electron'; import electron from '@vscode/gulp-electron';
import jsonEditor from 'gulp-json-editor'; import jsonEditor from 'gulp-json-editor';
import * as util from './lib/util.js'; import * as util from './lib/util.ts';
import * as getVersionModule from './lib/getVersion.js'; import * as getVersionModule from './lib/getVersion.ts';
import * as dateModule from './lib/date.js'; import * as dateModule from './lib/date.ts';
import * as task from './lib/task.js'; import * as task from './lib/task.ts';
import buildfile from './buildfile.js'; import buildfile from './buildfile.js';
import optimize from './lib/optimize.js'; import * as optimize from './lib/optimize.ts';
import * as inlineMetaModule from './lib/inlineMeta.js'; import * as inlineMetaModule from './lib/inlineMeta.ts';
import packageJson from '../package.json' with { type: 'json' }; import packageJson from '../package.json' with { type: 'json' };
import product from '../product.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' };
import * as crypto from 'crypto'; import * as crypto from 'crypto';
import i18n from './lib/i18n.js'; import * as i18n from './lib/i18n.ts';
import * as dependenciesModule from './lib/dependencies.js'; import * as dependenciesModule from './lib/dependencies.ts';
import electronModule from './lib/electron.js'; import * as electronModule from './lib/electron.ts';
import asarModule from './lib/asar.js'; import * as asarModule from './lib/asar.ts';
import minimist from 'minimist'; import minimist from 'minimist';
import { compileBuildWithoutManglingTask, compileBuildWithManglingTask } from './gulpfile.compile.mjs'; import { compileBuildWithoutManglingTask, compileBuildWithManglingTask } from './gulpfile.compile.mjs';
import { compileNonNativeExtensionsBuildTask, compileNativeExtensionsBuildTask, compileAllExtensionsBuildTask, compileExtensionMediaBuildTask, cleanExtensionsBuildTask } from './gulpfile.extensions.mjs'; import { compileNonNativeExtensionsBuildTask, compileNativeExtensionsBuildTask, compileAllExtensionsBuildTask, compileExtensionMediaBuildTask, cleanExtensionsBuildTask } from './gulpfile.extensions.mjs';
@@ -43,8 +43,7 @@ const { config } = electronModule;
const { createAsar } = asarModule; const { createAsar } = asarModule;
const glob = promisify(globCallback); const glob = promisify(globCallback);
const rcedit = promisify(rceditCallback); const rcedit = promisify(rceditCallback);
const __dirname = import.meta.dirname; const root = path.dirname(import.meta.dirname);
const root = path.dirname(__dirname);
const commit = getVersion(root); const commit = getVersion(root);
// Build // Build
@@ -292,14 +291,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true }); const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path)); const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path));
const root = path.resolve(path.join(__dirname, '..')); const root = path.resolve(path.join(import.meta.dirname, '..'));
const productionDependencies = getProductionDependencies(root); const productionDependencies = getProductionDependencies(root);
const dependenciesSrc = productionDependencies.map(d => path.relative(root, d)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]).flat().concat('!**/*.mk'); const dependenciesSrc = productionDependencies.map(d => path.relative(root, d)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]).flat().concat('!**/*.mk');
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true }) const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**', `!**/${config.version}/**`, '!**/bin/darwin-arm64-87/**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.{js,css}.map'])) .pipe(filter(['**', `!**/${config.version}/**`, '!**/bin/darwin-arm64-87/**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.{js,css}.map']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore'))) .pipe(util.cleanNodeModules(path.join(import.meta.dirname, '.moduleignore')))
.pipe(util.cleanNodeModules(path.join(__dirname, `.moduleignore.${process.platform}`))) .pipe(util.cleanNodeModules(path.join(import.meta.dirname, `.moduleignore.${process.platform}`)))
.pipe(jsFilter) .pipe(jsFilter)
.pipe(util.rewriteSourceMappingURL(sourceMappingURLBase)) .pipe(util.rewriteSourceMappingURL(sourceMappingURLBase))
.pipe(jsFilter.restore) .pipe(jsFilter.restore)

View File

@@ -6,19 +6,19 @@
import gulp from 'gulp'; import gulp from 'gulp';
import * as path from 'path'; import * as path from 'path';
import es from 'event-stream'; import es from 'event-stream';
import * as util from './lib/util.js'; import * as util from './lib/util.ts';
import * as getVersionModule from './lib/getVersion.js'; import * as getVersionModule from './lib/getVersion.ts';
import * as task from './lib/task.js'; import * as task from './lib/task.ts';
import optimize from './lib/optimize.js'; import * as optimize from './lib/optimize.ts';
import * as dateModule from './lib/date.js'; import * as dateModule from './lib/date.ts';
import product from '../product.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' };
import rename from 'gulp-rename'; import rename from 'gulp-rename';
import filter from 'gulp-filter'; import filter from 'gulp-filter';
import * as dependenciesModule from './lib/dependencies.js'; import * as dependenciesModule from './lib/dependencies.ts';
import vfs from 'vinyl-fs'; import vfs from 'vinyl-fs';
import packageJson from '../package.json' with { type: 'json' }; import packageJson from '../package.json' with { type: 'json' };
import { compileBuildWithManglingTask } from './gulpfile.compile.mjs'; import { compileBuildWithManglingTask } from './gulpfile.compile.mjs';
import extensions from './lib/extensions.js'; import * as extensions from './lib/extensions.ts';
import VinylFile from 'vinyl'; import VinylFile from 'vinyl';
import jsonEditor from 'gulp-json-editor'; import jsonEditor from 'gulp-json-editor';
import buildfile from './buildfile.js'; import buildfile from './buildfile.js';
@@ -27,9 +27,8 @@ import { fileURLToPath } from 'url';
const { getVersion } = getVersionModule; const { getVersion } = getVersionModule;
const { readISODate } = dateModule; const { readISODate } = dateModule;
const { getProductionDependencies } = dependenciesModule; const { getProductionDependencies } = dependenciesModule;
const __dirname = import.meta.dirname;
const REPO_ROOT = path.dirname(__dirname); const REPO_ROOT = path.dirname(import.meta.dirname);
const BUILD_ROOT = path.dirname(REPO_ROOT); const BUILD_ROOT = path.dirname(REPO_ROOT);
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web'); const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
@@ -184,7 +183,7 @@ function packageTask(sourceFolderName, destinationFolderName) {
const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true }) const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true })
.pipe(filter(['**', '!**/package-lock.json'])) .pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.webignore'))); .pipe(util.cleanNodeModules(path.join(import.meta.dirname, '.webignore')));
const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' }); const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' });
const manifest = gulp.src('resources/server/manifest.json', { base: 'resources/server' }); const manifest = gulp.src('resources/server/manifest.json', { base: 'resources/server' });

View File

@@ -7,8 +7,8 @@ import * as path from 'path';
import * as fs from 'fs'; import * as fs from 'fs';
import assert from 'assert'; import assert from 'assert';
import * as cp from 'child_process'; import * as cp from 'child_process';
import * as util from './lib/util.js'; import * as util from './lib/util.ts';
import * as task from './lib/task.js'; import * as task from './lib/task.ts';
import pkg from '../package.json' with { type: 'json' }; import pkg from '../package.json' with { type: 'json' };
import product from '../product.json' with { type: 'json' }; import product from '../product.json' with { type: 'json' };
import vfs from 'vinyl-fs'; import vfs from 'vinyl-fs';
@@ -16,13 +16,12 @@ import rcedit from 'rcedit';
import { createRequire } from 'module'; import { createRequire } from 'module';
const require = createRequire(import.meta.url); const require = createRequire(import.meta.url);
const __dirname = import.meta.dirname; const repoPath = path.dirname(import.meta.dirname);
const repoPath = path.dirname(__dirname);
const buildPath = (/** @type {string} */ arch) => path.join(path.dirname(repoPath), `VSCode-win32-${arch}`); const buildPath = (/** @type {string} */ arch) => path.join(path.dirname(repoPath), `VSCode-win32-${arch}`);
const setupDir = (/** @type {string} */ arch, /** @type {string} */ target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`); const setupDir = (/** @type {string} */ arch, /** @type {string} */ target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss'); const issPath = path.join(import.meta.dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe'); const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32'); const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32.ts');
function packageInnoSetup(iss, options, cb) { function packageInnoSetup(iss, options, cb) {
options = options || {}; options = options || {};

View File

@@ -13,7 +13,7 @@ import VinylFile from 'vinyl';
import vfs from 'vinyl-fs'; import vfs from 'vinyl-fs';
import { all, copyrightFilter, eslintFilter, indentationFilter, stylelintFilter, tsFormattingFilter, unicodeFilter } from './filters.js'; import { all, copyrightFilter, eslintFilter, indentationFilter, stylelintFilter, tsFormattingFilter, unicodeFilter } from './filters.js';
import eslint from './gulp-eslint.js'; import eslint from './gulp-eslint.js';
import formatter from './lib/formatter.js'; import * as formatter from './lib/formatter.ts';
import gulpstylelint from './stylelint.mjs'; import gulpstylelint from './stylelint.mjs';
const copyrightHeaderLines = [ const copyrightHeaderLines = [
@@ -117,7 +117,7 @@ export function hygiene(some, runEslint = true) {
this.emit('data', file); this.emit('data', file);
}); });
const formatting = es.map(function (file, cb) { const formatting = es.map(function (/** @type {any} */ file, cb) {
try { try {
const rawInput = file.contents.toString('utf8'); const rawInput = file.contents.toString('utf8');
const rawOutput = formatter.format(file.path, rawInput); const rawOutput = formatter.format(file.path, rawInput);
@@ -269,7 +269,7 @@ function createGitIndexVinyls(paths) {
} }
// this allows us to run hygiene as a git pre-commit hook // this allows us to run hygiene as a git pre-commit hook
if (import.meta.filename === process.argv[1]) { if (import.meta.main) {
process.on('unhandledRejection', (reason, p) => { process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
process.exit(1); process.exit(1);

Some files were not shown because too many files have changed in this diff Show More