Move html extension: setup & scanner

This commit is contained in:
Martin Aeschlimann
2016-08-17 11:37:22 +02:00
parent 2ed511ea28
commit cb5a6e77db
25 changed files with 2428 additions and 862 deletions

View File

@@ -0,0 +1,43 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { TextDocument, Position, CompletionItem, CompletionList, Hover, Range, SymbolInformation, Diagnostic, TextEdit, FormattingOptions, MarkedString } from 'vscode-languageserver-types';
export { TextDocument, Position, CompletionItem, CompletionList, Hover, Range, SymbolInformation, Diagnostic, TextEdit, FormattingOptions, MarkedString };
export interface HTMLFormatConfiguration {
wrapLineLength: number;
unformatted: string;
indentInnerHtml: boolean;
preserveNewLines: boolean;
maxPreserveNewLines: number;
indentHandlebars: boolean;
endWithNewline: boolean;
extraLiners: string;
}
export interface LanguageSettings {
validate: boolean;
format: HTMLFormatConfiguration;
}
export declare type HTMLDocument = {};
export interface LanguageService {
configure(settings: LanguageSettings): void;
doValidation(document: TextDocument, htmlDocument: HTMLDocument): Diagnostic[];
parseHTMLDocument(document: TextDocument): HTMLDocument;
doResolve(item: CompletionItem): CompletionItem;
doComplete(document: TextDocument, position: Position, doc: HTMLDocument): CompletionList;
findDocumentSymbols(document: TextDocument, doc: HTMLDocument): SymbolInformation[];
doHover(document: TextDocument, position: Position, doc: HTMLDocument): Hover;
format(document: TextDocument, range: Range, options: FormattingOptions): TextEdit[];
}
export function getLanguageService() : LanguageService {
return null;
}

View File

@@ -0,0 +1,408 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
export enum TokenType {
CommentStart,
Comment,
CommentEnd,
TagStart,
Tag,
TagEnd,
DelimiterAssign,
AttributeName,
AttributeValue,
DoctypeStart,
Doctype,
DoctypeEnd,
Content,
Whitespace,
Unknown,
Script,
Styles,
EOS
}
export interface IToken {
type: TokenType;
offset: number;
len: number;
}
class MultiLineStream {
private source: string;
private len: number;
private position: number;
constructor(source: string) {
this.source = source;
this.len = source.length;
this.position = 0;
}
public eos(): boolean {
return this.len <= this.position;
}
public pos(): number {
return this.position;
}
public goBackTo(pos: number): void {
this.position = pos;
}
public goBack(n: number): void {
this.position -= n;
}
public advance(n: number): void {
this.position += n;
}
public goToEnd(): void {
this.position = this.source.length;
}
public nextChar(): number {
return this.source.charCodeAt(this.position++) || 0;
}
public peekChar(n: number = 0): number {
return this.source.charCodeAt(this.position + n) || 0;
}
public advanceIfChar(ch: number): boolean {
if (ch === this.source.charCodeAt(this.position)) {
this.position++;
return true;
}
return false;
}
public advanceIfChars(ch: number[]): boolean {
let i: number;
if (this.position + ch.length > this.source.length) {
return false;
}
for (i = 0; i < ch.length; i++) {
if (this.source.charCodeAt(this.position + i) !== ch[i]) {
return false;
}
}
this.advance(i);
return true;
}
public advanceIfRegExp(regex: RegExp): string {
let str = this.source.substr(this.position);
let match = str.match(regex);
if (match) {
this.position = this.position + match.index + match[0].length;
return match[0];
}
return '';
}
public advanceUntilRegExp(regex: RegExp): string {
let str = this.source.substr(this.position);
let match = str.match(regex);
if (match) {
this.position = this.position + match.index;
return match[0];
}
return '';
}
public advanceUntilChar(ch: number): boolean {
while (this.position < this.source.length) {
if (this.source.charCodeAt(this.position) === ch) {
return true;
}
this.advance(1);
}
return false;
}
public advanceUntilChars(ch: number[]): boolean {
while (this.position + ch.length < this.source.length) {
for (let i = 0; i < ch.length; i++) {
if (this.source.charCodeAt(this.position + i) !== ch[i]) {
break;
}
return true;
}
this.advance(1);
}
return false;
}
public skipWhitespace(): boolean {
let n = this.advanceWhileChar(ch => {
return ch === _WSP || ch === _TAB || ch === _NWL || ch === _LFD || ch === _CAR;
});
return n > 0;
}
public advanceWhileChar(condition: (ch: number) => boolean): number {
let posNow = this.position;
while (this.position < this.len && condition(this.source.charCodeAt(this.position))) {
this.position++;
}
return this.position - posNow;
}
}
const _BNG = '!'.charCodeAt(0);
const _MIN = '-'.charCodeAt(0);
const _LAN = '<'.charCodeAt(0);
const _RAN = '>'.charCodeAt(0);
const _FSL = '/'.charCodeAt(0);
const _EQS = '='.charCodeAt(0);
const _DQO = '"'.charCodeAt(0);
const _SQO = '\''.charCodeAt(0);
const _NWL = '\n'.charCodeAt(0);
const _CAR = '\r'.charCodeAt(0);
const _LFD = '\f'.charCodeAt(0);
const _WSP = ' '.charCodeAt(0);
const _TAB = '\t'.charCodeAt(0);
export enum ScannerState {
Content,
OpeningStartTag,
OpeningEndTag,
WithinDoctype,
WithinTag,
WithinComment,
WithinScriptContent,
WithinStyleContent,
AttributeName,
AttributeValue
}
export class Scanner {
private _stream: MultiLineStream;
private _state: ScannerState;
private _tokenType: TokenType;
private _tokenOffset: number;
private _hasSpaceAfterTag: boolean;
private _lastTag: string;
public setSource(input: string, initialState: ScannerState = ScannerState.Content): void {
this._stream = new MultiLineStream(input);
this._state = initialState;
}
public get position(): number {
return this._stream.pos();
}
public get scannerState(): number {
return this._state;
}
public get tokenType(): number {
return this._tokenType;
}
public get tokenOffset(): number {
return this._tokenOffset;
}
public get tokenLength(): number {
return this._stream.pos() - this._tokenOffset;
}
private nextElementName(): string {
return this._stream.advanceIfRegExp(/^[_:\w][_:\w-.\d]*/).toLowerCase();
}
private nextAttributeName(): string {
return this._stream.advanceIfRegExp(/^[^\s"'>/=\x00-\x0F\x7F\x80-\x9F]*/).toLowerCase();
}
private finishToken(offset: number, type: TokenType): IToken {
this._tokenType = type;
this._tokenOffset = offset;
return {
offset: offset,
len: this._stream.pos() - offset,
type: type
};
}
public scan(): IToken {
let offset = this._stream.pos();
if (this._stream.eos()) {
return this.finishToken(offset, TokenType.EOS);
}
switch (this._state) {
case ScannerState.WithinComment:
if (this._stream.advanceIfChars([_MIN, _MIN, _RAN])) { // -->
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.CommentEnd);
}
this._stream.advanceUntilChars([_MIN, _MIN, _RAN]); // -->
return this.finishToken(offset, TokenType.Comment);
case ScannerState.WithinDoctype:
if (this._stream.advanceIfChar(_RAN)) {
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.DoctypeEnd);
}
this._stream.advanceUntilChar(_RAN); // >
return this.finishToken(offset, TokenType.Doctype);
case ScannerState.Content:
if (this._stream.advanceIfChar(_LAN)) { // <
if (!this._stream.eos() && this._stream.peekChar() === _BNG) { // !
if (this._stream.advanceIfChars([_BNG, _MIN, _MIN])) { // <!--
this._state = ScannerState.WithinComment;
return this.finishToken(offset, TokenType.CommentStart);
}
if (this._stream.advanceIfRegExp(/^!doctype/i)) {
this._state = ScannerState.WithinDoctype;
return this.finishToken(offset, TokenType.DoctypeStart);
}
}
if (this._stream.advanceIfChar(_FSL)) { // /
this._state = ScannerState.OpeningEndTag;
return this.finishToken(offset, TokenType.TagEnd);
}
this._state = ScannerState.OpeningStartTag;
return this.finishToken(offset, TokenType.TagStart);
}
this._stream.advanceUntilChar(_LAN);
return this.finishToken(offset, TokenType.Content);
case ScannerState.OpeningEndTag:
let tagName = this.nextElementName();
if (tagName.length > 0) {
return this.finishToken(offset, TokenType.Tag);
} else if (this._stream.advanceIfChar(_RAN)) { // >
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.TagEnd);
}
this._stream.advanceUntilChar(_RAN);
return this.finishToken(offset, TokenType.Whitespace);
case ScannerState.OpeningStartTag:
this._lastTag = this.nextElementName();
if (this._lastTag.length > 0) {
this._hasSpaceAfterTag = false;
this._state = ScannerState.WithinTag;
return this.finishToken(offset, TokenType.Tag);
}
break;
case ScannerState.WithinTag:
if (this._stream.skipWhitespace()) {
this._hasSpaceAfterTag = true; // remember that we have seen a whitespace
return this.finishToken(offset, TokenType.Whitespace);
}
if (this._hasSpaceAfterTag) {
let name = this.nextAttributeName();
if (name.length > 0) {
this._state = ScannerState.AttributeName;
this._hasSpaceAfterTag = false;
return this.finishToken(offset, TokenType.AttributeName);
}
}
if (this._stream.advanceIfChars([_FSL, _RAN])) { // />
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.TagStart);
}
if (this._stream.advanceIfChar(_RAN)) { // >
if (this._lastTag === 'script') {
this._state = ScannerState.WithinScriptContent;
} else if (this._lastTag === 'style') {
this._state = ScannerState.WithinStyleContent;
} else {
this._state = ScannerState.Content;
}
return this.finishToken(offset, TokenType.TagStart);
}
this._stream.advance(1);
return this.finishToken(offset, TokenType.Unknown);
case ScannerState.AttributeName:
if (this._stream.skipWhitespace()) {
this._hasSpaceAfterTag = true;
return this.finishToken(offset, TokenType.Whitespace);
}
if (this._stream.advanceIfChar(_EQS)) {
this._state = ScannerState.AttributeValue;
return this.finishToken(offset, TokenType.DelimiterAssign);
}
this._state = ScannerState.WithinTag;
return this.scan(); // no advance yet - jump to WithinTag
case ScannerState.AttributeValue:
if (this._stream.skipWhitespace()) {
return this.finishToken(offset, TokenType.Whitespace);
}
let attributeValue = this._stream.advanceIfRegExp(/^[^\s"'`=<>]+/);
if (attributeValue.length > 0) {
this._state = ScannerState.WithinTag;
this._hasSpaceAfterTag = false;
return this.finishToken(offset, TokenType.AttributeValue);
}
let ch = this._stream.peekChar();
if (ch === _SQO || ch === _DQO) {
this._stream.advance(1); // consume quote
if (this._stream.advanceUntilChar(ch)) {
this._stream.advance(1); // consume quote
}
this._state = ScannerState.WithinTag;
this._hasSpaceAfterTag = false;
return this.finishToken(offset, TokenType.AttributeValue);
}
this._state = ScannerState.WithinTag;
this._hasSpaceAfterTag = false;
return this.scan(); // no advance yet - jump to WithinTag
case ScannerState.WithinScriptContent:
// see http://stackoverflow.com/questions/14574471/how-do-browsers-parse-a-script-tag-exactly
let state = 1;
while (!this._stream.eos()) {
let match = this._stream.advanceIfRegExp(/<!--|-->|<\/?script\s*\/?>?/i);
if (match.length === 0) {
this._stream.goToEnd();
return this.finishToken(offset, TokenType.Script);
} else if (match === '<!--') {
if (state === 1) {
state = 2;
}
} else if (match === '-->') {
state = 1;
} else if (match[1] !== '/') { // <script
if (state === 2) {
state = 3;
}
} else { // </script
if (state === 3) {
state = 2;
} else {
this._stream.goBack(match.length); // to the beginning of the closing tag
break;
}
}
}
this._state = ScannerState.Content;
if (offset < this._stream.pos()) {
return this.finishToken(offset, TokenType.Script);
}
return this.scan(); // no advance yet - jump to content
case ScannerState.WithinScriptContent:
this._stream.advanceUntilRegExp(/<\/style/i);
this._state = ScannerState.Content;
if (offset < this._stream.pos()) {
return this.finishToken(offset, TokenType.Styles);
}
return this.scan(); // no advance yet - jump to content
}
this._stream.advance(1);
this._state = ScannerState.Content;
return this.finishToken(offset, TokenType.Unknown);
}
}

View File

@@ -0,0 +1,696 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as assert from 'assert';
import {Scanner, TokenType, ScannerState} from '../parser/htmlScanner';
suite('HTML Scanner', () => {
interface Token {
offset:number;
type:TokenType;
content?: string;
}
function assertTokens(tests: {input: string; tokens: Token[]; }[]) {
let scanner = new Scanner();
let scannerState = ScannerState.Content;
for (let t of tests) {
scanner.setSource(t.input, scannerState);
let token = scanner.scan();
let actual : Token[] = [];
while (token.type !== TokenType.EOS) {
let actualToken : Token= {offset: token.offset, type: token.type};
if (token.type == TokenType.Tag) {
actualToken.content = t.input.substr(token.offset, token.len);
}
actual.push(actualToken);
token = scanner.scan();
}
assert.deepEqual(actual, t.tokens);
scannerState = scanner.scannerState;
}
}
test('Open Start Tag #1', () => {
assertTokens([{
input: '<abc',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' }
]}
]);
});
test('Open Start Tag #2', () => {
assertTokens([{
input: '<input',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'input' }
]}
]);
});
test('Open Start Tag with Invalid Tag', () => {
assertTokens([{
input: '< abc',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Unknown },
{ offset:2, type: TokenType.Content }
]}
]);
});
test('Open Start Tag #3', () => {
assertTokens([{
input: '< abc>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Unknown },
{ offset:2, type: TokenType.Content }
]}
]);
});
test('Open Start Tag #4', () => {
assertTokens([{
input: 'i <len;',
tokens: [
{ offset:0, type: TokenType.Content },
{ offset:2, type: TokenType.TagStart },
{ offset:3, type: TokenType.Tag, content: 'len' },
{ offset:6, type: TokenType.Unknown }
]}
]);
});
test('Open Start Tag #5', () => {
assertTokens([{
input: '<',
tokens: [
{ offset:0, type: TokenType.TagStart }
]}
]);
});
test('Open End Tag', () => {
assertTokens([{
input: '</a',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'a' }
]}
]);
});
test('Complete Start Tag', () => {
assertTokens([{
input: '<abc>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.TagStart }
]}
]);
});
test('Complete Start Tag with Whitespace', () => {
assertTokens([{
input: '<abc >',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.TagStart }
]}
]);
});
test('bug 9809 - Complete Start Tag with Namespaceprefix', () => {
assertTokens([{
input: '<foo:bar>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'foo:bar' },
{ offset:8, type: TokenType.TagStart }
]}
]);
});
test('Complete End Tag', () => {
assertTokens([{
input: '</abc>',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'abc' },
{ offset:5, type: TokenType.TagEnd }
]}
]);
});
test('Complete End Tag with Whitespace', () => {
assertTokens([{
input: '</abc >',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'abc' },
{ offset:5, type: TokenType.Whitespace },
{ offset:7, type: TokenType.TagEnd }
]}
]);
});
test('Empty Tag', () => {
assertTokens([{
input: '<abc />',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.TagStart }
]}
]);
});
test('Embedded Content #1', () => {
assertTokens([{
input: '<script type="text/javascript">var i= 10;</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart },
{ offset:31, type: TokenType.Script },
{ offset:41, type: TokenType.TagEnd },
{ offset:43, type: TokenType.Tag, content: 'script' },
{ offset:49, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #2', () => {
assertTokens([{
input: '<script type="text/javascript">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart }
]}, {
input: 'var i= 10;',
tokens: [
{ offset:0, type: TokenType.Script }
]}, {
input: '</script>',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'script' },
{ offset:8, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #3', () => {
assertTokens([{
input: '<script type="text/javascript">var i= 10;',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart },
{ offset:31, type: TokenType.Script }
]}, {
input: '</script>',
tokens: [
{ offset:0, type: TokenType.TagEnd },
{ offset:2, type: TokenType.Tag, content: 'script' },
{ offset:8, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #4', () => {
assertTokens([{
input: '<script type="text/javascript">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart }
]}, {
input: 'var i= 10;</script>',
tokens: [
{ offset:0, type: TokenType.Script },
{ offset:10, type: TokenType.TagEnd },
{ offset:12, type: TokenType.Tag, content: 'script' },
{ offset:18, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #5', () => {
assertTokens([{
input: '<script type="text/plain">a\n<a</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:25, type: TokenType.TagStart },
{ offset:26, type: TokenType.Script },
{ offset:30, type: TokenType.TagEnd },
{ offset:32, type: TokenType.Tag, content: 'script' },
{ offset:38, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #6', () => {
assertTokens([{
input: '<script>a</script><script>b</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:9, type: TokenType.TagEnd },
{ offset:11, type: TokenType.Tag, content: 'script' },
{ offset:17, type: TokenType.TagEnd },
{ offset:18, type: TokenType.TagStart },
{ offset:19, type: TokenType.Tag, content: 'script' },
{ offset:25, type: TokenType.TagStart },
{ offset:26, type: TokenType.Script },
{ offset:27, type: TokenType.TagEnd },
{ offset:29, type: TokenType.Tag, content: 'script' },
{ offset:35, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #7', () => {
assertTokens([{
input: '<script type="text/javascript"></script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.TagStart },
{ offset:31, type: TokenType.TagEnd },
{ offset:33, type: TokenType.Tag, content: 'script' },
{ offset:39, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #8', () => {
assertTokens([{
input: '<script>var i= 10;</script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:18, type: TokenType.TagEnd },
{ offset:20, type: TokenType.Tag, content: 'script' },
{ offset:26, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #9', () => {
assertTokens([{
input: '<script type="text/javascript" src="main.js"></script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.Whitespace },
{ offset:8, type: TokenType.AttributeName },
{ offset:12, type: TokenType.DelimiterAssign },
{ offset:13, type: TokenType.AttributeValue },
{ offset:30, type: TokenType.Whitespace },
{ offset:31, type: TokenType.AttributeName },
{ offset:34, type: TokenType.DelimiterAssign },
{ offset:35, type: TokenType.AttributeValue },
{ offset:44, type: TokenType.TagStart },
{ offset:45, type: TokenType.TagEnd },
{ offset:47, type: TokenType.Tag, content: 'script' },
{ offset:53, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #10', () => {
assertTokens([{
input: '<script><!-- alert("<script></script>"); --></script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:44, type: TokenType.TagEnd },
{ offset:46, type: TokenType.Tag, content: 'script' },
{ offset:52, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #11', () => {
assertTokens([{
input: '<script><!-- alert("<script></script>"); </script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:41, type: TokenType.TagEnd },
{ offset:43, type: TokenType.Tag, content: 'script' },
{ offset:49, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #12', () => {
assertTokens([{
input: '<script><!-- alert("</script>"); </script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:20, type: TokenType.TagEnd },
{ offset:22, type: TokenType.Tag, content: 'script' },
{ offset:28, type: TokenType.TagEnd },
{ offset:29, type: TokenType.Content },
{ offset:33, type: TokenType.TagEnd },
{ offset:35, type: TokenType.Tag, content: 'script' },
{ offset:41, type: TokenType.TagEnd }
]}
]);
});
test('Embedded Content #13', () => {
assertTokens([{
input: '<script> alert("<script></script>"); </script>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'script' },
{ offset:7, type: TokenType.TagStart },
{ offset:8, type: TokenType.Script },
{ offset:24, type: TokenType.TagEnd },
{ offset:26, type: TokenType.Tag, content: 'script' },
{ offset:32, type: TokenType.TagEnd },
{ offset:33, type: TokenType.Content },
{ offset:37, type: TokenType.TagEnd },
{ offset:39, type: TokenType.Tag, content: 'script' },
{ offset:45, type: TokenType.TagEnd }
]}
]);
});
test('Tag with Attribute', () => {
assertTokens([{
input: '<abc foo="bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:14, type: TokenType.TagStart }
]}
]);
});
test('Tag with Empty Attribute Value', () => {
assertTokens([{
input: '<abc foo=\'bar\'>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:14, type: TokenType.TagStart }
]}
]);
});
test('Tag with empty attributes', () => {
assertTokens([{
input: '<abc foo="">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:11, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attributes', () => {
assertTokens([{
input: '<abc foo="bar" bar=\'foo\'>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:14, type: TokenType.Whitespace },
{ offset:15, type: TokenType.AttributeName },
{ offset:18, type: TokenType.DelimiterAssign },
{ offset:19, type: TokenType.AttributeValue },
{ offset:24, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attributes, no quotes', () => {
assertTokens([{
input: '<abc foo=bar bar=help-me>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue },
{ offset:12, type: TokenType.Whitespace },
{ offset:13, type: TokenType.AttributeName },
{ offset:16, type: TokenType.DelimiterAssign },
{ offset:17, type: TokenType.AttributeValue },
{ offset:24, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attribute And Whitespace', () => {
assertTokens([{
input: '<abc foo= "bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.Whitespace },
{ offset:11, type: TokenType.AttributeValue },
{ offset:16, type: TokenType.TagStart }
]}
]);
});
test('Tag with Attribute And Whitespace #2', () => {
assertTokens([{
input: '<abc foo = "bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.Whitespace },
{ offset:9, type: TokenType.DelimiterAssign },
{ offset:10, type: TokenType.Whitespace },
{ offset:11, type: TokenType.AttributeValue },
{ offset:16, type: TokenType.TagStart }
]}
]);
});
test('Tag with Name-Only-Attribute #1', () => {
assertTokens([{
input: '<abc foo>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.TagStart }
]}
]);
});
test('Tag with Name-Only-Attribute #2', () => {
assertTokens([{
input: '<abc foo bar>',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.Whitespace },
{ offset:9, type: TokenType.AttributeName },
{ offset:12, type: TokenType.TagStart }
]}
]);
});
test('Tag with Interesting Attribute Name', () => {
assertTokens([{
input: '<abc foo!@#="bar">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:11, type: TokenType.DelimiterAssign },
{ offset:12, type: TokenType.AttributeValue },
{ offset:17, type: TokenType.TagStart }
]}
]);
});
test('Tag with Angular Attribute Name', () => {
assertTokens([{
input: '<abc #myinput (click)="bar" [value]="someProperty" *ngIf="someCondition">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:13, type: TokenType.Whitespace },
{ offset:14, type: TokenType.AttributeName },
{ offset:21, type: TokenType.DelimiterAssign },
{ offset:22, type: TokenType.AttributeValue },
{ offset:27, type: TokenType.Whitespace },
{ offset:28, type: TokenType.AttributeName },
{ offset:35, type: TokenType.DelimiterAssign },
{ offset:36, type: TokenType.AttributeValue },
{ offset:50, type: TokenType.Whitespace },
{ offset:51, type: TokenType.AttributeName },
{ offset:56, type: TokenType.DelimiterAssign },
{ offset:57, type: TokenType.AttributeValue },
{ offset:72, type: TokenType.TagStart }
]}
]);
});
test('Tag with Invalid Attribute Value', () => {
assertTokens([{
input: '<abc foo=">',
tokens: [
{ offset:0, type: TokenType.TagStart },
{ offset:1, type: TokenType.Tag, content: 'abc' },
{ offset:4, type: TokenType.Whitespace },
{ offset:5, type: TokenType.AttributeName },
{ offset:8, type: TokenType.DelimiterAssign },
{ offset:9, type: TokenType.AttributeValue }
]}
]);
});
test('Simple Comment 1', () => {
assertTokens([{
input: '<!--a-->',
tokens: [
{ offset:0, type: TokenType.CommentStart },
{ offset:4, type: TokenType.Comment },
{ offset:5, type: TokenType.CommentEnd }
]}
]);
});
test('Simple Comment 2', () => {
assertTokens([{
input: '<!--a>foo bar</a -->',
tokens: [
{ offset:0, type: TokenType.CommentStart },
{ offset:4, type: TokenType.Comment },
{ offset:17, type: TokenType.CommentEnd }
]}
]);
});
test('Multiline Comment', () => {
assertTokens([{
input: '<!--a>\nfoo \nbar</a -->',
tokens: [
{ offset:0, type: TokenType.CommentStart },
{ offset:4, type: TokenType.Comment },
{ offset:19, type: TokenType.CommentEnd }
]}
]);
});
test('Simple Doctype', () => {
assertTokens([{
input: '<!Doctype a>',
tokens: [
{ offset:0, type: TokenType.DoctypeStart },
{ offset:9, type: TokenType.Doctype },
{ offset:11, type: TokenType.DoctypeEnd }
]}
]);
});
test('Simple Doctype #2', () => {
assertTokens([{
input: '<!doctype a>',
tokens: [
{ offset:0, type: TokenType.DoctypeStart },
{ offset:9, type: TokenType.Doctype },
{ offset:11, type: TokenType.DoctypeEnd }
]}
]);
});
test('Simple Doctype #4', () => {
assertTokens([{
input: '<!DOCTYPE a\n"foo" \'bar\'>',
tokens: [
{ offset:0, type: TokenType.DoctypeStart },
{ offset:9, type: TokenType.Doctype },
{ offset:23, type: TokenType.DoctypeEnd }
]}
]);
});
});