2024-08-17 20:16:16 -06:00
|
|
|
/*
|
|
|
|
|
* Copyright (c) 2024, Andrew Kaster <andrew@ladybird.org>
|
|
|
|
|
*
|
|
|
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
import AK
|
2024-08-27 01:57:13 -06:00
|
|
|
import Testing
|
2024-08-28 21:25:42 -06:00
|
|
|
import Web
|
2024-08-17 20:16:16 -06:00
|
|
|
|
2024-08-27 01:57:13 -06:00
|
|
|
@Suite
|
2024-08-17 20:16:16 -06:00
|
|
|
struct TestHTMLTokenizerSwift {
|
|
|
|
|
|
2024-08-27 01:57:13 -06:00
|
|
|
@Test func tokenTypes() {
|
2024-08-17 20:16:16 -06:00
|
|
|
let default_token = HTMLToken()
|
|
|
|
|
default_token.type = .Character(codePoint: "a")
|
2024-08-27 01:57:13 -06:00
|
|
|
#expect(default_token.isCharacter())
|
2024-08-19 22:14:03 -06:00
|
|
|
|
2024-08-27 01:57:13 -06:00
|
|
|
#expect("\(default_token)" == "HTMLToken(type: Character(codePoint: a))")
|
2024-08-17 20:16:16 -06:00
|
|
|
}
|
|
|
|
|
|
2024-08-27 01:57:13 -06:00
|
|
|
@Test func parserWhitespace() {
|
2024-08-17 20:16:16 -06:00
|
|
|
for codePoint: Character in ["\t", "\n", "\r", "\u{000C}", " "] {
|
|
|
|
|
let token = HTMLToken(type: .Character(codePoint: codePoint))
|
2024-08-27 01:57:13 -06:00
|
|
|
#expect(token.isParserWhitespace())
|
2024-08-17 20:16:16 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for codePoint: Character in ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"] {
|
|
|
|
|
let token = HTMLToken(type: .Character(codePoint: codePoint))
|
2024-08-27 01:57:13 -06:00
|
|
|
#expect(!token.isParserWhitespace())
|
2024-08-17 20:16:16 -06:00
|
|
|
}
|
|
|
|
|
}
|
2024-08-28 21:25:42 -06:00
|
|
|
|
|
|
|
|
@Test func dataStateNoInput() {
|
|
|
|
|
let tokenizer = HTMLTokenizer()
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data) // initial state
|
|
|
|
|
|
|
|
|
|
let token = tokenizer.nextToken()
|
|
|
|
|
#expect(token?.type == .EndOfFile)
|
|
|
|
|
|
|
|
|
|
let token2 = tokenizer.nextToken()
|
|
|
|
|
#expect(token2 == nil)
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Test func dataStateSingleChar() {
|
|
|
|
|
guard let tokenizer = HTMLTokenizer(input: "X") else {
|
|
|
|
|
Issue.record("Failed to create tokenizer for 'X'")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data) // initial state
|
|
|
|
|
|
|
|
|
|
let token = tokenizer.nextToken()
|
|
|
|
|
#expect(token?.type == .Character(codePoint: "X"))
|
|
|
|
|
|
|
|
|
|
let token2 = tokenizer.nextToken()
|
|
|
|
|
#expect(token2?.type == .EndOfFile)
|
|
|
|
|
|
|
|
|
|
let token3 = tokenizer.nextToken()
|
|
|
|
|
#expect(token3 == nil)
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Test func dataStateAmpersand() {
|
|
|
|
|
guard let tokenizer = HTMLTokenizer(input: "&") else {
|
|
|
|
|
Issue.record("Failed to create tokenizer for '&'")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data) // initial state
|
|
|
|
|
|
|
|
|
|
let token = tokenizer.nextToken()
|
|
|
|
|
#expect(token?.type == .EndOfFile)
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.CharacterReference)
|
|
|
|
|
|
|
|
|
|
let token2 = tokenizer.nextToken()
|
|
|
|
|
#expect(token2 == nil)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Test func dataStateTagOpen() {
|
|
|
|
|
guard let tokenizer = HTMLTokenizer(input: "<") else {
|
|
|
|
|
Issue.record("Failed to create tokenizer for '<'")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data) // initial state
|
|
|
|
|
|
|
|
|
|
let token = tokenizer.nextToken()
|
|
|
|
|
#expect(token?.type == .EndOfFile)
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.TagOpen)
|
|
|
|
|
|
|
|
|
|
let token2 = tokenizer.nextToken()
|
|
|
|
|
#expect(token2 == nil)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Test func dataStateNulChar() {
|
|
|
|
|
guard let tokenizer = HTMLTokenizer(input: "H\0I") else {
|
|
|
|
|
Issue.record("Failed to create tokenizer for 'H\\0I'")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data) // initial state
|
|
|
|
|
|
|
|
|
|
let token = tokenizer.nextToken()
|
|
|
|
|
#expect(token?.type == .Character(codePoint: "H"))
|
|
|
|
|
|
|
|
|
|
let token2 = tokenizer.nextToken()
|
|
|
|
|
#expect(token2?.type == .Character(codePoint: "\u{FFFD}"))
|
|
|
|
|
|
|
|
|
|
let token3 = tokenizer.nextToken()
|
|
|
|
|
#expect(token3?.type == .Character(codePoint: "I"))
|
|
|
|
|
|
|
|
|
|
let token4 = tokenizer.nextToken()
|
|
|
|
|
#expect(token4?.type == .EndOfFile)
|
|
|
|
|
|
|
|
|
|
#expect(tokenizer.state == HTMLTokenizer.State.Data)
|
|
|
|
|
}
|
2024-08-17 20:16:16 -06:00
|
|
|
}
|