Move html extension: setup & scanner

This commit is contained in:
Martin Aeschlimann
2016-08-17 11:37:22 +02:00
parent 2ed511ea28
commit cb5a6e77db
25 changed files with 2428 additions and 862 deletions

View File

@@ -0,0 +1,152 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {
createConnection, IConnection,
TextDocuments, TextDocument, InitializeParams, InitializeResult
} from 'vscode-languageserver';
import {HTMLDocument, LanguageSettings, getLanguageService} from './service/htmlLanguageService';
import * as nls from 'vscode-nls';
nls.config(process.env['VSCODE_NLS_CONFIG']);
// Create a connection for the server
let connection: IConnection = createConnection();
console.log = connection.console.log.bind(connection.console);
console.error = connection.console.error.bind(connection.console);
// Create a simple text document manager. The text document manager
// supports full document sync only
let documents: TextDocuments = new TextDocuments();
// Make the text document manager listen on the connection
// for open, change and close text document events
documents.listen(connection);
// After the server has started the client sends an initilize request. The server receives
// in the passed params the rootPath of the workspace plus the client capabilites
connection.onInitialize((params: InitializeParams): InitializeResult => {
return {
capabilities: {
// Tell the client that the server works in FULL text document sync mode
textDocumentSync: documents.syncKind,
completionProvider: { resolveProvider: true, triggerCharacters: ['"', ':'] },
hoverProvider: true,
documentSymbolProvider: true,
documentRangeFormattingProvider: true,
documentFormattingProvider: true
}
};
});
// create the JSON language service
var languageService = getLanguageService();
// The settings interface describes the server relevant settings part
interface Settings {
html: LanguageSettings;
}
let languageSettings: LanguageSettings;
// The settings have changed. Is send on server activation as well.
connection.onDidChangeConfiguration((change) => {
var settings = <Settings>change.settings;
languageSettings = settings.html;
updateConfiguration();
});
function updateConfiguration() {
languageService.configure(languageSettings);
// Revalidate any open text documents
documents.all().forEach(triggerValidation);
}
// The content of a text document has changed. This event is emitted
// when the text document first opened or when its content has changed.
documents.onDidChangeContent((change) => {
triggerValidation(change.document);
});
// a document has closed: clear all diagnostics
documents.onDidClose(event => {
cleanPendingValidation(event.document);
connection.sendDiagnostics({ uri: event.document.uri, diagnostics: [] });
});
let pendingValidationRequests : {[uri:string]:number} = {};
const validationDelayMs = 200;
function cleanPendingValidation(textDocument: TextDocument): void {
let request = pendingValidationRequests[textDocument.uri];
if (request) {
clearTimeout(request);
delete pendingValidationRequests[textDocument.uri];
}
}
function triggerValidation(textDocument: TextDocument): void {
cleanPendingValidation(textDocument);
pendingValidationRequests[textDocument.uri] = setTimeout(() => {
delete pendingValidationRequests[textDocument.uri];
validateTextDocument(textDocument);
}, validationDelayMs);
}
function validateTextDocument(textDocument: TextDocument): void {
if (textDocument.getText().length === 0) {
// ignore empty documents
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics: [] });
return;
}
let htmlDocument = getHTMLDocument(textDocument);
let diagnostics = languageService.doValidation(textDocument, htmlDocument);
// Send the computed diagnostics to VSCode.
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics });
}
function getHTMLDocument(document: TextDocument): HTMLDocument {
return languageService.parseHTMLDocument(document);
}
connection.onCompletion(textDocumentPosition => {
let document = documents.get(textDocumentPosition.textDocument.uri);
let htmlDocument = getHTMLDocument(document);
return languageService.doComplete(document, textDocumentPosition.position, htmlDocument);
});
connection.onCompletionResolve(completionItem => {
return languageService.doResolve(completionItem);
});
connection.onHover(textDocumentPositionParams => {
let document = documents.get(textDocumentPositionParams.textDocument.uri);
let htmlDocument = getHTMLDocument(document);
return languageService.doHover(document, textDocumentPositionParams.position, htmlDocument);
});
connection.onDocumentSymbol(documentSymbolParams => {
let document = documents.get(documentSymbolParams.textDocument.uri);
let htmlDocument = getHTMLDocument(document);
return languageService.findDocumentSymbols(document, htmlDocument);
});
connection.onDocumentFormatting(formatParams => {
let document = documents.get(formatParams.textDocument.uri);
return languageService.format(document, null, formatParams.options);
});
connection.onDocumentRangeFormatting(formatParams => {
let document = documents.get(formatParams.textDocument.uri);
return languageService.format(document, formatParams.range, formatParams.options);
});
// Listen on the connection
connection.listen();

View File

@@ -0,0 +1,43 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { TextDocument, Position, CompletionItem, CompletionList, Hover, Range, SymbolInformation, Diagnostic, TextEdit, FormattingOptions, MarkedString } from 'vscode-languageserver-types';
export { TextDocument, Position, CompletionItem, CompletionList, Hover, Range, SymbolInformation, Diagnostic, TextEdit, FormattingOptions, MarkedString };
export interface HTMLFormatConfiguration {
wrapLineLength: number;
unformatted: string;
indentInnerHtml: boolean;
preserveNewLines: boolean;
maxPreserveNewLines: number;
indentHandlebars: boolean;
endWithNewline: boolean;
extraLiners: string;
}
export interface LanguageSettings {
validate: boolean;
format: HTMLFormatConfiguration;
}
export declare type HTMLDocument = {};
export interface LanguageService {
configure(settings: LanguageSettings): void;
doValidation(document: TextDocument, htmlDocument: HTMLDocument): Diagnostic[];
parseHTMLDocument(document: TextDocument): HTMLDocument;
doResolve(item: CompletionItem): CompletionItem;
doComplete(document: TextDocument, position: Position, doc: HTMLDocument): CompletionList;
findDocumentSymbols(document: TextDocument, doc: HTMLDocument): SymbolInformation[];
doHover(document: TextDocument, position: Position, doc: HTMLDocument): Hover;
format(document: TextDocument, range: Range, options: FormattingOptions): TextEdit[];
}
export function getLanguageService() : LanguageService {
return null;
}

View File

@@ -0,0 +1,408 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
export enum TokenType {
CommentStart,
Comment,
CommentEnd,
TagStart,
Tag,
TagEnd,
DelimiterAssign,
AttributeName,
AttributeValue,
DoctypeStart,
Doctype,
DoctypeEnd,
Content,
Whitespace,
Unknown,
Script,
Styles,
EOS
}
export interface IToken {
type: TokenType;
offset: number;
len: number;
}
class MultiLineStream {
private source: string;
private len: number;
private position: number;
constructor(source: string) {
this.source = source;
this.len = source.length;
this.position = 0;
}
public eos(): boolean {
return this.len <= this.position;
}
public pos(): number {
return this.position;
}
public goBackTo(pos: number): void {
this.position = pos;
}
public goBack(n: number): void {
this.position -= n;
}
public advance(n: number): void {
this.position += n;
}
public goToEnd(): void {
this.position = this.source.length;
}
public nextChar(): number {
return this.source.charCodeAt(this.position++) || 0;
}
public peekChar(n: number = 0): number {
return this.source.charCodeAt(this.position + n) || 0;
}
public advanceIfChar(ch: number): boolean {
if (ch === this.source.charCodeAt(this.position)) {
this.position++;
return true;
}
return false;
}
public advanceIfChars(ch: number[]): boolean {
let i: number;
if (this.position + ch.length > this.source.length) {
return false;
}
for (i = 0; i < ch.length; i++) {
if (this.source.charCodeAt(this.position + i) !== ch[i]) {
return false;
}
}
this.advance(i);
return true;
}
public advanceIfRegExp(regex: RegExp): string {
let str = this.source.substr(this.position);
let match = str.match(regex);
if (match) {
this.position = this.position + match.index + match[0].length;
return match[0];
}
return '';
}
public advanceUntilRegExp(regex: RegExp): string {
let str = this.source.substr(this.position);
let match = str.match(regex);
if (match) {
this.position = this.position + match.index;
return match[0];
}
return '';
}
public advanceUntilChar(ch: number): boolean {
while (this.position < this.source.length) {
if (this.source.charCodeAt(this.position) === ch) {
return true;
}
this.advance(1);
}
return false;
}
public advanceUntilChars(ch: number[]): boolean {
while (this.position + ch.length < this.source.length) {
for (let i = 0; i < ch.length; i++) {
if (this.source.charCodeAt(this.position + i) !== ch[i]) {
break;
}
return true;
}
this.advance(1);
}
return false;
}
public skipWhitespace(): boolean {
let n = this.advanceWhileChar(ch => {
return ch === _WSP || ch === _TAB || ch === _NWL || ch === _LFD || ch === _CAR;
});
return n > 0;
}
public advanceWhileChar(condition: (ch: number) => boolean): number {
let posNow = this.position;
while (this.position < this.len && condition(this.source.charCodeAt(this.position))) {
this.position++;
}
return this.position - posNow;
}
}
const _BNG = '!'.charCodeAt(0);
const _MIN = '-'.charCodeAt(0);
const _LAN = '<'.charCodeAt(0);
const _RAN = '>'.charCodeAt(0);
const _FSL = '/'.charCodeAt(0);
const _EQS = '='.charCodeAt(0);
const _DQO = '"'.charCodeAt(0);
const _SQO = '\''.charCodeAt(0);
const _NWL = '\n'.charCodeAt(0);
const _CAR = '\r'.charCodeAt(0);
const _LFD = '\f'.charCodeAt(0);
const _WSP = ' '.charCodeAt(0);
const _TAB = '\t'.charCodeAt(0);
export enum ScannerState {
Content,
OpeningStartTag,
OpeningEndTag,
WithinDoctype,
WithinTag,
WithinComment,
WithinScriptContent,
WithinStyleContent,
AttributeName,
AttributeValue
}
export class Scanner {
private _stream: MultiLineStream;
private _state: ScannerState;
private _tokenType: TokenType;
private _tokenOffset: number;
private _hasSpaceAfterTag: boolean;
private _lastTag: string;
public setSource(input: string, initialState: ScannerState = ScannerState.Content): void {
this._stream = new MultiLineStream(input);
this._state = initialState;
}
public get position(): number {
return this._stream.pos();
}
public get scannerState(): number {
return this._state;
}
public get tokenType(): number {
return this._tokenType;
}
public get tokenOffset(): number {
return this._tokenOffset;
}
public get tokenLength(): number {
return this._stream.pos() - this._tokenOffset;
}
private nextElementName(): string {
return this._stream.advanceIfRegExp(/^[_:\w][_:\w-.\d]*/).toLowerCase();
}
private nextAttributeName(): string {
return this._stream.advanceIfRegExp(/^[^\s"'>/=\x00-\x0F\x7F\x80-\x9F]*/).toLowerCase();
}
private finishToken(offset: number, type: TokenType): IToken {
this._tokenType = type;
this._tokenOffset = offset;
return {
offset: offset,
len: this._stream.pos() - offset,
type: type
};
}
public scan(): IToken {
let offset = this._stream.pos();
if (this._stream.eos()) {
return this.finishToken(offset, TokenType.EOS);
}
switch (this._state) {
case ScannerState.WithinComment:
if (this._stream.advanceIfChars([_MIN, _MIN, _RAN])) { // -->
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.CommentEnd);
}
this._stream.advanceUntilChars([_MIN, _MIN, _RAN]); // -->
return this.finishToken(offset, TokenType.Comment);
case ScannerState.WithinDoctype:
if (this._stream.advanceIfChar(_RAN)) {
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.DoctypeEnd);
}
this._stream.advanceUntilChar(_RAN); // >
return this.finishToken(offset, TokenType.Doctype);
case ScannerState.Content:
if (this._stream.advanceIfChar(_LAN)) { // <
if (!this._stream.eos() && this._stream.peekChar() === _BNG) { // !
if (this._stream.advanceIfChars([_BNG, _MIN, _MIN])) { // <!--
this._state = ScannerState.WithinComment;
return this.finishToken(offset, TokenType.CommentStart);
}
if (this._stream.advanceIfRegExp(/^!doctype/i)) {
this._state = ScannerState.WithinDoctype;
return this.finishToken(offset, TokenType.DoctypeStart);
}
}
if (this._stream.advanceIfChar(_FSL)) { // /
this._state = ScannerState.OpeningEndTag;
return this.finishToken(offset, TokenType.TagEnd);
}
this._state = ScannerState.OpeningStartTag;
return this.finishToken(offset, TokenType.TagStart);
}
this._stream.advanceUntilChar(_LAN);
return this.finishToken(offset, TokenType.Content);
case ScannerState.OpeningEndTag:
let tagName = this.nextElementName();
if (tagName.length > 0) {
return this.finishToken(offset, TokenType.Tag);
} else if (this._stream.advanceIfChar(_RAN)) { // >
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.TagEnd);
}
this._stream.advanceUntilChar(_RAN);
return this.finishToken(offset, TokenType.Whitespace);
case ScannerState.OpeningStartTag:
this._lastTag = this.nextElementName();
if (this._lastTag.length > 0) {
this._hasSpaceAfterTag = false;
this._state = ScannerState.WithinTag;
return this.finishToken(offset, TokenType.Tag);
}
break;
case ScannerState.WithinTag:
if (this._stream.skipWhitespace()) {
this._hasSpaceAfterTag = true; // remember that we have seen a whitespace
return this.finishToken(offset, TokenType.Whitespace);
}
if (this._hasSpaceAfterTag) {
let name = this.nextAttributeName();
if (name.length > 0) {
this._state = ScannerState.AttributeName;
this._hasSpaceAfterTag = false;
return this.finishToken(offset, TokenType.AttributeName);
}
}
if (this._stream.advanceIfChars([_FSL, _RAN])) { // />
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.TagStart);
}
if (this._stream.advanceIfChar(_RAN)) { // >
if (this._lastTag === 'script') {
this._state = ScannerState.WithinScriptContent;
} else if (this._lastTag === 'style') {
this._state = ScannerState.WithinStyleContent;
} else {
this._state = ScannerState.Content;
}
return this.finishToken(offset, TokenType.TagStart);
}
this._stream.advance(1);
return this.finishToken(offset, TokenType.Unknown);
case ScannerState.AttributeName:
if (this._stream.skipWhitespace()) {
this._hasSpaceAfterTag = true;
return this.finishToken(offset, TokenType.Whitespace);
}
if (this._stream.advanceIfChar(_EQS)) {
this._state = ScannerState.AttributeValue;
return this.finishToken(offset, TokenType.DelimiterAssign);
}
this._state = ScannerState.WithinTag;
return this.scan(); // no advance yet - jump to WithinTag
case ScannerState.AttributeValue:
if (this._stream.skipWhitespace()) {
return this.finishToken(offset, TokenType.Whitespace);
}
let attributeValue = this._stream.advanceIfRegExp(/^[^\s"'`=<>]+/);
if (attributeValue.length > 0) {
this._state = ScannerState.WithinTag;
this._hasSpaceAfterTag = false;
return this.finishToken(offset, TokenType.AttributeValue);
}
let ch = this._stream.peekChar();
if (ch === _SQO || ch === _DQO) {
this._stream.advance(1); // consume quote
if (this._stream.advanceUntilChar(ch)) {
this._stream.advance(1); // consume quote
}
this._state = ScannerState.WithinTag;
this._hasSpaceAfterTag = false;
return this.finishToken(offset, TokenType.AttributeValue);
}
this._state = ScannerState.WithinTag;
this._hasSpaceAfterTag = false;
return this.scan(); // no advance yet - jump to WithinTag
case ScannerState.WithinScriptContent:
// see http://stackoverflow.com/questions/14574471/how-do-browsers-parse-a-script-tag-exactly
let state = 1;
while (!this._stream.eos()) {
let match = this._stream.advanceIfRegExp(/<!--|-->|<\/?script\s*\/?>?/i);
if (match.length === 0) {
this._stream.goToEnd();
return this.finishToken(offset, TokenType.Script);
} else if (match === '<!--') {
if (state === 1) {
state = 2;
}
} else if (match === '-->') {
state = 1;
} else if (match[1] !== '/') { // <script
if (state === 2) {
state = 3;
}
} else { // </script
if (state === 3) {
state = 2;
} else {
this._stream.goBack(match.length); // to the beginning of the closing tag
break;
}
}
}
this._state = ScannerState.Content;
if (offset < this._stream.pos()) {
return this.finishToken(offset, TokenType.Script);
}
return this.scan(); // no advance yet - jump to content
case ScannerState.WithinScriptContent:
this._stream.advanceUntilRegExp(/<\/style/i);
this._state = ScannerState.Content;
if (offset < this._stream.pos()) {
return this.finishToken(offset, TokenType.Styles);
}
return this.scan(); // no advance yet - jump to content
}
this._stream.advance(1);
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.Unknown);
}
}

View File

@@ -0,0 +1,696 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as assert from 'assert';
import {Scanner, TokenType, ScannerState} from '../parser/htmlScanner';
suite('HTML Scanner', () => {
interface Token {
offset:number;
type:TokenType;
content?: string;
}
function assertTokens(tests: {input: string; tokens: Token[]; }[]) {
let scanner = new Scanner();
let scannerState = ScannerState.Content;
for (let t of tests) {
scanner.setSource(t.input, scannerState);
let token = scanner.scan();
let actual : Token[] = [];
while (token.type !== TokenType.EOS) {
let actualToken : Token= {offset: token.offset, type: token.type};
if (token.type == TokenType.Tag) {
actualToken.content = t.input.substr(token.offset, token.len);
}
actual.push(actualToken);
token = scanner.scan();
}
assert.deepEqual(actual, t.tokens);
scannerState = scanner.scannerState;
}
}
test('Open Start Tag #1', () => {
assertTokens([{
input: '<abc',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' }
]}
]);
});
test('Open Start Tag #2', () => {
assertTokens([{
input: '<input',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'input' }
]}
]);
});
test('Open Start Tag with Invalid Tag', () => {
assertTokens([{
input: '< abc',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Unknown },
{ offset:2, type: TokenType.Content }
]}
]);
});
test('Open Start Tag #3', () => {
assertTokens([{
input: '< abc>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Unknown },
{ offset:2, type: TokenType.Content }
]}
]);
});
test('Open Start Tag #4', () => {
assertTokens([{
input: 'i <len;',
tokens: [
{ offset:0, type: TokenType.Content },
{ offset:2, type: TokenType.TagStart },
{ offset:3, type: TokenType.Tag, content: 'len' },
{ offset:6, type: TokenType.Unknown }
]}
]);
});
test('Open Start Tag #5', () => {
assertTokens([{
input: '<',
tokens: [
{ offset:0, type: TokenType.TagStart }
]}
]);
});
test('Open End Tag', () => {
assertTokens([{
input: '</a',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'a' }
]}
]);
});
test('Complete Start Tag', () => {
assertTokens([{
input: '<abc>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.TagStart }
]}
]);
});
test('Complete Start Tag with Whitespace', () => {
assertTokens([{
input: '<abc >',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.TagStart }
]}
]);
});
test('bug 9809 - Complete Start Tag with Namespaceprefix', () => {
assertTokens([{
input: '<foo:bar>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'foo:bar' },
{ offset:8, type: TokenType.TagStart }
]}
]);
});
test('Complete End Tag', () => {
assertTokens([{
input: '</abc>',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'abc' },
{ offset:5, type: TokenType.TagEnd }
]}
]);
});
test('Complete End Tag with Whitespace', () => {
assertTokens([{
input: '</abc >',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'abc' },
{ offset:5, type: TokenType.Whitespace },
{ offset:7, type: TokenType.TagEnd }
]}
]);
});
test('Empty Tag', () => {
assertTokens([{
input: '<abc />',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.TagStart }
]}
]);
});
test('Embedded Content #1', () => {
assertTokens([{
input: '<script type="text/javascript">var i= 10;</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart },
{ offset:31, type: TokenType.Script },
{ offset:41, type: TokenType.TagEnd },
{ offset:43, type: TokenType.Tag, content: 'script' },
{ offset:49, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #2', () => {
assertTokens([{
input: '<script type="text/javascript">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart }
]}, {
input: 'var i= 10;',
tokens: [
{ offset:0, type: TokenType.Script }
]}, {
input: '</script>',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'script' },
{ offset:8, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #3', () => {
assertTokens([{
input: '<script type="text/javascript">var i= 10;',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart },
{ offset:31, type: TokenType.Script }
]}, {
input: '</script>',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'script' },
{ offset:8, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #4', () => {
assertTokens([{
input: '<script type="text/javascript">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart }
]}, {
input: 'var i= 10;</script>',
tokens: [
{ offset:0, type: TokenType.Script },
{ offset:10, type: TokenType.TagEnd },
{ offset:12, type: TokenType.Tag, content: 'script' },
{ offset:18, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #5', () => {
assertTokens([{
input: '<script type="text/plain">a\n<a</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:25, type: TokenType.TagStart },
{ offset:26, type: TokenType.Script },
{ offset:30, type: TokenType.TagEnd },
{ offset:32, type: TokenType.Tag, content: 'script' },
{ offset:38, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #6', () => {
assertTokens([{
input: '<script>a</script><script>b</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:9, type: TokenType.TagEnd },
{ offset:11, type: TokenType.Tag, content: 'script' },
{ offset:17, type: TokenType.TagEnd },
{ offset:18, type: TokenType.TagStart },
{ offset:19, type: TokenType.Tag, content: 'script' },
{ offset:25, type: TokenType.TagStart },
{ offset:26, type: TokenType.Script },
{ offset:27, type: TokenType.TagEnd },
{ offset:29, type: TokenType.Tag, content: 'script' },
{ offset:35, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #7', () => {
assertTokens([{
input: '<script type="text/javascript"></script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart },
{ offset:31, type: TokenType.TagEnd },
{ offset:33, type: TokenType.Tag, content: 'script' },
{ offset:39, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #8', () => {
assertTokens([{
input: '<script>var i= 10;</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:18, type: TokenType.TagEnd },
{ offset:20, type: TokenType.Tag, content: 'script' },
{ offset:26, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #9', () => {
assertTokens([{
input: '<script type="text/javascript" src="main.js"></script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.Whitespace },
{ offset:31, type: TokenType.AttributeName },
{ offset:34, type: TokenType.DelimiterAssign },
{ offset:35, type: TokenType.AttributeValue },
{ offset:44, type: TokenType.TagStart },
{ offset:45, type: TokenType.TagEnd },
{ offset:47, type: TokenType.Tag, content: 'script' },
{ offset:53, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #10', () => {
assertTokens([{
input: '<script><!-- alert("<script></script>"); --></script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:44, type: TokenType.TagEnd },
{ offset:46, type: TokenType.Tag, content: 'script' },
{ offset:52, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #11', () => {
assertTokens([{
input: '<script><!-- alert("<script></script>"); </script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:41, type: TokenType.TagEnd },
{ offset:43, type: TokenType.Tag, content: 'script' },
{ offset:49, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #12', () => {
assertTokens([{
input: '<script><!-- alert("</script>"); </script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:20, type: TokenType.TagEnd },
{ offset:22, type: TokenType.Tag, content: 'script' },
{ offset:28, type: TokenType.TagEnd },
{ offset:29, type: TokenType.Content },
{ offset:33, type: TokenType.TagEnd },
{ offset:35, type: TokenType.Tag, content: 'script' },
{ offset:41, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #13', () => {
assertTokens([{
input: '<script> alert("<script></script>"); </script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:24, type: TokenType.TagEnd },
{ offset:26, type: TokenType.Tag, content: 'script' },
{ offset:32, type: TokenType.TagEnd },
{ offset:33, type: TokenType.Content },
{ offset:37, type: TokenType.TagEnd },
{ offset:39, type: TokenType.Tag, content: 'script' },
{ offset:45, type: TokenType.TagEnd }
]}
]);
});
test('Tag with Attribute', () => {
assertTokens([{
input: '<abc foo="bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:14, type: TokenType.TagStart }
]}
]);
});
test('Tag with Empty Attribute Value', () => {
assertTokens([{
input: '<abc foo=\'bar\'>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:14, type: TokenType.TagStart }
]}
]);
});
test('Tag with empty attributes', () => {
assertTokens([{
input: '<abc foo="">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:11, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attributes', () => {
assertTokens([{
input: '<abc foo="bar" bar=\'foo\'>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:14, type: TokenType.Whitespace },
{ offset:15, type: TokenType.AttributeName },
{ offset:18, type: TokenType.DelimiterAssign },
{ offset:19, type: TokenType.AttributeValue },
{ offset:24, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attributes, no quotes', () => {
assertTokens([{
input: '<abc foo=bar bar=help-me>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:12, type: TokenType.Whitespace },
{ offset:13, type: TokenType.AttributeName },
{ offset:16, type: TokenType.DelimiterAssign },
{ offset:17, type: TokenType.AttributeValue },
{ offset:24, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attribute And Whitespace', () => {
assertTokens([{
input: '<abc foo= "bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.Whitespace },
{ offset:11, type: TokenType.AttributeValue },
{ offset:16, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attribute And Whitespace #2', () => {
assertTokens([{
input: '<abc foo = "bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.Whitespace },
{ offset:9, type: TokenType.DelimiterAssign },
{ offset:10, type: TokenType.Whitespace },
{ offset:11, type: TokenType.AttributeValue },
{ offset:16, type: TokenType.TagStart }
]}
]);
});
test('Tag with Name-Only-Attribute #1', () => {
assertTokens([{
input: '<abc foo>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.TagStart }
]}
]);
});
test('Tag with Name-Only-Attribute #2', () => {
assertTokens([{
input: '<abc foo bar>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.Whitespace },
{ offset:9, type: TokenType.AttributeName },
{ offset:12, type: TokenType.TagStart }
]}
]);
});
test('Tag with Interesting Attribute Name', () => {
assertTokens([{
input: '<abc foo!@#="bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:11, type: TokenType.DelimiterAssign },
{ offset:12, type: TokenType.AttributeValue },
{ offset:17, type: TokenType.TagStart }
]}
]);
});
test('Tag with Angular Attribute Name', () => {
assertTokens([{
input: '<abc #myinput (click)="bar" [value]="someProperty" *ngIf="someCondition">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:13, type: TokenType.Whitespace },
{ offset:14, type: TokenType.AttributeName },
{ offset:21, type: TokenType.DelimiterAssign },
{ offset:22, type: TokenType.AttributeValue },
{ offset:27, type: TokenType.Whitespace },
{ offset:28, type: TokenType.AttributeName },
{ offset:35, type: TokenType.DelimiterAssign },
{ offset:36, type: TokenType.AttributeValue },
{ offset:50, type: TokenType.Whitespace },
{ offset:51, type: TokenType.AttributeName },
{ offset:56, type: TokenType.DelimiterAssign },
{ offset:57, type: TokenType.AttributeValue },
{ offset:72, type: TokenType.TagStart }
]}
]);
});
test('Tag with Invalid Attribute Value', () => {
assertTokens([{
input: '<abc foo=">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue }
]}
]);
});
test('Simple Comment 1', () => {
assertTokens([{
input: '<!--a-->',
tokens: [
{ offset:0, type: TokenType.CommentStart },
{ offset:4, type: TokenType.Comment },
{ offset:5, type: TokenType.CommentEnd }
]}
]);
});
test('Simple Comment 2', () => {
assertTokens([{
input: '<!--a>foo bar</a -->',
tokens: [
{ offset:0, type: TokenType.CommentStart },
{ offset:4, type: TokenType.Comment },
{ offset:17, type: TokenType.CommentEnd }
]}
]);
});
test('Multiline Comment', () => {
assertTokens([{
input: '<!--a>\nfoo \nbar</a -->',
tokens: [
{ offset:0, type: TokenType.CommentStart },
{ offset:4, type: TokenType.Comment },
{ offset:19, type: TokenType.CommentEnd }
]}
]);
});
test('Simple Doctype', () => {
assertTokens([{
input: '<!Doctype a>',
tokens: [
{ offset:0, type: TokenType.DoctypeStart },
{ offset:9, type: TokenType.Doctype },
{ offset:11, type: TokenType.DoctypeEnd }
]}
]);
});
test('Simple Doctype #2', () => {
assertTokens([{
input: '<!doctype a>',
tokens: [
{ offset:0, type: TokenType.DoctypeStart },
{ offset:9, type: TokenType.Doctype },
{ offset:11, type: TokenType.DoctypeEnd }
]}
]);
});
test('Simple Doctype #4', () => {
assertTokens([{
input: '<!DOCTYPE a\n"foo" \'bar\'>',
tokens: [
{ offset:0, type: TokenType.DoctypeStart },
{ offset:9, type: TokenType.Doctype },
{ offset:23, type: TokenType.DoctypeEnd }
]}
]);
});
});

View File

@@ -0,0 +1,112 @@
/*! *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
and limitations under the License.
***************************************************************************** */
/**
* The Thenable (E.g. PromiseLike) and Promise declarions are taken from TypeScript's
* lib.core.es6.d.ts file. See above Copyright notice.
*/
/**
* Thenable is a common denominator between ES6 promises, Q, jquery.Deferred, WinJS.Promise,
* and others. This API makes no assumption about what promise libary is being used which
* enables reusing existing code without migrating to a specific promise implementation. Still,
* we recommand the use of native promises which are available in VS Code.
*/
interface Thenable<R> {
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult>(onfulfilled?: (value: R) => TResult | Thenable<TResult>, onrejected?: (reason: any) => TResult | Thenable<TResult>): Thenable<TResult>;
then<TResult>(onfulfilled?: (value: R) => TResult | Thenable<TResult>, onrejected?: (reason: any) => void): Thenable<TResult>;
}
/**
* Represents the completion of an asynchronous operation
*/
interface Promise<T> extends Thenable<T> {
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult>(onfulfilled?: (value: T) => TResult | Thenable<TResult>, onrejected?: (reason: any) => TResult | Thenable<TResult>): Promise<TResult>;
then<TResult>(onfulfilled?: (value: T) => TResult | Thenable<TResult>, onrejected?: (reason: any) => void): Promise<TResult>;
/**
* Attaches a callback for only the rejection of the Promise.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of the callback.
*/
catch(onrejected?: (reason: any) => T | Thenable<T>): Promise<T>;
}
interface PromiseConstructor {
/**
* Creates a new Promise.
* @param executor A callback used to initialize the promise. This callback is passed two arguments:
* a resolve callback used resolve the promise with a value or the result of another promise,
* and a reject callback used to reject the promise with a provided reason or error.
*/
new <T>(executor: (resolve: (value?: T | Thenable<T>) => void, reject: (reason?: any) => void) => void): Promise<T>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T>(values: Array<T | Thenable<T>>): Promise<T[]>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T>(values: Array<T | Thenable<T>>): Promise<T>;
/**
* Creates a new rejected promise for the provided reason.
* @param reason The reason the promise was rejected.
* @returns A new rejected Promise.
*/
reject(reason: any): Promise<void>;
/**
* Creates a new rejected promise for the provided reason.
* @param reason The reason the promise was rejected.
* @returns A new rejected Promise.
*/
reject<T>(reason: any): Promise<T>;
/**
* Creates a new resolved promise for the provided value.
* @param value A promise.
* @returns A promise whose internal state matches the provided promise.
*/
resolve<T>(value: T | Thenable<T>): Promise<T>;
/**
* Creates a new resolved promise .
* @returns A resolved promise.
*/
resolve(): Promise<void>;
}
declare var Promise: PromiseConstructor;

View File

@@ -0,0 +1,8 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
/// <reference path='../../../../../src/typings/mocha.d.ts'/>
/// <reference path='../../../../../extensions/node.d.ts'/>
/// <reference path='../../../../../extensions/lib.core.d.ts'/>
/// <reference path='../../../../../extensions/declares.d.ts'/>

View File

@@ -0,0 +1,37 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
export function startsWith(haystack: string, needle: string): boolean {
if (haystack.length < needle.length) {
return false;
}
for (let i = 0; i < needle.length; i++) {
if (haystack[i] !== needle[i]) {
return false;
}
}
return true;
}
/**
* Determines if haystack ends with needle.
*/
export function endsWith(haystack: string, needle: string): boolean {
let diff = haystack.length - needle.length;
if (diff > 0) {
return haystack.lastIndexOf(needle) === diff;
} else if (diff === 0) {
return haystack === needle;
} else {
return false;
}
}
export function convertSimple2RegExpPattern(pattern: string): string {
return pattern.replace(/[\-\\\{\}\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&').replace(/[\*]/g, '.*');
}