2019-10-16 18:21:20 +09:00
|
|
|
package parser
|
|
|
|
|
|
|
|
import (
|
2020-07-02 17:25:31 +09:00
|
|
|
"fmt"
|
2024-02-26 01:11:21 -05:00
|
|
|
"os"
|
2019-10-23 03:21:42 +09:00
|
|
|
"strings"
|
|
|
|
|
2019-10-16 18:21:20 +09:00
|
|
|
"github.com/goccy/go-yaml/ast"
|
2019-10-24 11:07:40 +09:00
|
|
|
"github.com/goccy/go-yaml/internal/errors"
|
2019-11-05 17:02:55 +09:00
|
|
|
"github.com/goccy/go-yaml/lexer"
|
2019-10-16 18:21:20 +09:00
|
|
|
"github.com/goccy/go-yaml/token"
|
|
|
|
)
|
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
type parser struct {
|
|
|
|
idx int
|
|
|
|
size int
|
|
|
|
tokens token.Tokens
|
|
|
|
}
|
|
|
|
|
|
|
|
func newParser(tokens token.Tokens, mode Mode) *parser {
|
|
|
|
filteredTokens := []*token.Token{}
|
|
|
|
if mode&ParseComments != 0 {
|
|
|
|
filteredTokens = tokens
|
|
|
|
} else {
|
|
|
|
for _, tk := range tokens {
|
|
|
|
if tk.Type == token.CommentType {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
// keep prev/next reference between tokens containing comments
|
|
|
|
// https://github.com/goccy/go-yaml/issues/254
|
|
|
|
filteredTokens = append(filteredTokens, tk)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return &parser{
|
|
|
|
idx: 0,
|
|
|
|
size: len(filteredTokens),
|
|
|
|
tokens: token.Tokens(filteredTokens),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) next() bool {
|
|
|
|
return p.idx < p.size
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) previousToken() *token.Token {
|
|
|
|
if p.idx > 0 {
|
|
|
|
return p.tokens[p.idx-1]
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) insertToken(idx int, tk *token.Token) {
|
|
|
|
if p.size < idx {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if p.size == idx {
|
|
|
|
curToken := p.tokens[p.size-1]
|
|
|
|
tk.Next = curToken
|
|
|
|
curToken.Prev = tk
|
|
|
|
|
|
|
|
p.tokens = append(p.tokens, tk)
|
|
|
|
p.size = len(p.tokens)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
curToken := p.tokens[idx]
|
|
|
|
tk.Next = curToken
|
|
|
|
curToken.Prev = tk
|
|
|
|
|
|
|
|
p.tokens = append(p.tokens[:idx+1], p.tokens[idx:]...)
|
|
|
|
p.tokens[idx] = tk
|
|
|
|
p.size = len(p.tokens)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) currentToken() *token.Token {
|
|
|
|
if p.idx >= p.size {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return p.tokens[p.idx]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) nextToken() *token.Token {
|
|
|
|
if p.idx+1 >= p.size {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return p.tokens[p.idx+1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) nextNotCommentToken() *token.Token {
|
|
|
|
for i := p.idx + 1; i < p.size; i++ {
|
|
|
|
tk := p.tokens[i]
|
|
|
|
if tk.Type == token.CommentType {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
return tk
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) afterNextNotCommentToken() *token.Token {
|
|
|
|
notCommentTokenCount := 0
|
|
|
|
for i := p.idx + 1; i < p.size; i++ {
|
|
|
|
tk := p.tokens[i]
|
|
|
|
if tk.Type == token.CommentType {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
notCommentTokenCount++
|
|
|
|
if notCommentTokenCount == 2 {
|
|
|
|
return tk
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) isCurrentCommentToken() bool {
|
|
|
|
tk := p.currentToken()
|
|
|
|
if tk == nil {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return tk.Type == token.CommentType
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) progressIgnoreComment(num int) {
|
|
|
|
if p.size <= p.idx+num {
|
|
|
|
p.idx = p.size
|
|
|
|
} else {
|
|
|
|
p.idx += num
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) progress(num int) {
|
|
|
|
if p.isCurrentCommentToken() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
p.progressIgnoreComment(num)
|
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseMapping(ctx *context) (*ast.MappingNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
mapTk := p.currentToken()
|
2021-09-07 17:31:17 +09:00
|
|
|
node := ast.Mapping(mapTk, true)
|
|
|
|
node.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip MappingStart token
|
2024-11-05 13:28:23 +09:00
|
|
|
|
|
|
|
isFirst := true
|
2024-10-31 18:26:08 +09:00
|
|
|
for p.next() {
|
|
|
|
tk := p.currentToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if tk.Type == token.MappingEndType {
|
|
|
|
node.End = tk
|
2024-11-05 13:28:23 +09:00
|
|
|
break
|
2019-10-16 18:21:20 +09:00
|
|
|
} else if tk.Type == token.CollectEntryType {
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
2024-11-05 13:28:23 +09:00
|
|
|
} else if !isFirst {
|
|
|
|
return nil, errors.ErrSyntax("',' or '}' must be specified", tk)
|
|
|
|
}
|
|
|
|
|
|
|
|
if tk := p.currentToken(); tk != nil && tk.Type == token.MappingEndType {
|
|
|
|
// this case is here: "{ elem, }".
|
|
|
|
// In this case, ignore the last element and break mapping parsing.
|
|
|
|
node.End = tk
|
|
|
|
break
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
|
2024-11-05 13:28:23 +09:00
|
|
|
value, err := p.parseMappingValue(ctx.withFlow(true))
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
mvnode, ok := value.(*ast.MappingValueNode)
|
|
|
|
if !ok {
|
2020-07-02 17:25:31 +09:00
|
|
|
return nil, errors.ErrSyntax("failed to parse flow mapping node", value.GetToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Values = append(node.Values, mvnode)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
2024-11-05 13:28:23 +09:00
|
|
|
isFirst = false
|
|
|
|
}
|
|
|
|
if node.End == nil || node.End.Type != token.MappingEndType {
|
|
|
|
return nil, errors.ErrSyntax("could not find flow mapping end token '}'", node.Start)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-11-05 13:28:23 +09:00
|
|
|
return node, nil
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseSequence(ctx *context) (*ast.SequenceNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
node := ast.Sequence(p.currentToken(), true)
|
2021-09-07 17:31:17 +09:00
|
|
|
node.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip SequenceStart token
|
2024-10-31 15:35:43 +09:00
|
|
|
|
|
|
|
isFirst := true
|
2024-10-31 18:26:08 +09:00
|
|
|
for p.next() {
|
|
|
|
tk := p.currentToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if tk.Type == token.SequenceEndType {
|
|
|
|
node.End = tk
|
|
|
|
break
|
|
|
|
} else if tk.Type == token.CollectEntryType {
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
2024-10-31 15:35:43 +09:00
|
|
|
} else if !isFirst {
|
|
|
|
return nil, errors.ErrSyntax("',' or ']' must be specified", tk)
|
|
|
|
}
|
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
if tk := p.currentToken(); tk != nil && tk.Type == token.SequenceEndType {
|
2024-10-31 15:35:43 +09:00
|
|
|
// this case is here: "[ elem, ]".
|
|
|
|
// In this case, ignore the last element and break sequence parsing.
|
|
|
|
node.End = tk
|
|
|
|
break
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
|
2024-11-12 10:39:52 +09:00
|
|
|
value, err := p.parseToken(ctx.withIndex(uint(len(node.Values))).withFlow(true), p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Values = append(node.Values, value)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
2024-10-31 15:35:43 +09:00
|
|
|
isFirst = false
|
|
|
|
}
|
|
|
|
if node.End == nil || node.End.Type != token.SequenceEndType {
|
|
|
|
return nil, errors.ErrSyntax("sequence end token ']' not found", node.Start)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseTag(ctx *context) (*ast.TagNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
tagToken := p.currentToken()
|
2020-06-21 16:22:04 +09:00
|
|
|
node := ast.Tag(tagToken)
|
2021-09-07 17:31:17 +09:00
|
|
|
node.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip tag token
|
2020-07-02 17:25:31 +09:00
|
|
|
var (
|
|
|
|
value ast.Node
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
switch token.ReservedTagKeyword(tagToken.Value) {
|
|
|
|
case token.MappingTag,
|
|
|
|
token.OrderedMapTag:
|
|
|
|
value, err = p.parseMapping(ctx)
|
|
|
|
case token.IntegerTag,
|
|
|
|
token.FloatTag,
|
|
|
|
token.StringTag,
|
|
|
|
token.BinaryTag,
|
|
|
|
token.TimestampTag,
|
|
|
|
token.NullTag:
|
2024-10-31 18:26:08 +09:00
|
|
|
typ := p.currentToken().Type
|
2020-07-02 17:25:31 +09:00
|
|
|
if typ == token.LiteralType || typ == token.FoldedType {
|
|
|
|
value, err = p.parseLiteral(ctx)
|
|
|
|
} else {
|
2024-10-31 18:26:08 +09:00
|
|
|
value = p.parseScalarValue(p.currentToken())
|
2020-07-02 17:25:31 +09:00
|
|
|
}
|
|
|
|
case token.SequenceTag,
|
|
|
|
token.SetTag:
|
|
|
|
err = errors.ErrSyntax(fmt.Sprintf("sorry, currently not supported %s tag", tagToken.Value), tagToken)
|
|
|
|
default:
|
|
|
|
// custom tag
|
2024-10-31 18:26:08 +09:00
|
|
|
value, err = p.parseToken(ctx, p.currentToken())
|
2020-07-02 17:25:31 +09:00
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Value = value
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-12-22 11:59:49 +09:00
|
|
|
func (p *parser) removeLeftSideNewLineCharacter(src string) string {
|
2020-06-09 10:56:25 +09:00
|
|
|
// CR or LF or CRLF
|
|
|
|
return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n")
|
2019-12-22 11:59:49 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) existsNewLineCharacter(src string) bool {
|
|
|
|
if strings.Index(src, "\n") > 0 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if strings.Index(src, "\r") > 0 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) validateMapKey(tk *token.Token) error {
|
2019-10-23 03:21:42 +09:00
|
|
|
if tk.Type != token.StringType {
|
|
|
|
return nil
|
|
|
|
}
|
2019-12-22 11:59:49 +09:00
|
|
|
origin := p.removeLeftSideNewLineCharacter(tk.Origin)
|
|
|
|
if p.existsNewLineCharacter(origin) {
|
2019-10-23 13:30:22 +09:00
|
|
|
return errors.ErrSyntax("unexpected key name", tk)
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-06-03 17:09:16 +09:00
|
|
|
func (p *parser) createNullToken(base *token.Token) *token.Token {
|
|
|
|
pos := *(base.Position)
|
|
|
|
pos.Column++
|
|
|
|
return token.New("null", "null", &pos)
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseMapValue(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
|
2021-09-07 17:31:17 +09:00
|
|
|
node, err := p.createMapValueNode(ctx, key, colonToken)
|
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2021-09-07 17:31:17 +09:00
|
|
|
}
|
2021-09-07 19:26:22 +09:00
|
|
|
if node != nil && node.GetPath() == "" {
|
2021-09-07 17:31:17 +09:00
|
|
|
node.SetPath(ctx.path)
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) createMapValueNode(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-06-03 17:09:16 +09:00
|
|
|
if tk == nil {
|
|
|
|
nullToken := p.createNullToken(colonToken)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.insertToken(p.idx, nullToken)
|
2020-06-03 17:09:16 +09:00
|
|
|
return ast.Null(nullToken), nil
|
2024-10-30 19:48:44 +09:00
|
|
|
} else if tk.Type == token.CollectEntryType {
|
|
|
|
// implicit null value.
|
|
|
|
return ast.Null(tk), nil
|
2020-06-03 17:09:16 +09:00
|
|
|
}
|
2023-09-14 12:25:21 +09:00
|
|
|
var comment *ast.CommentGroupNode
|
|
|
|
if tk.Type == token.CommentType {
|
|
|
|
comment = p.parseCommentOnly(ctx)
|
|
|
|
if comment != nil {
|
|
|
|
comment.SetPath(ctx.withChild(key.GetToken().Value).path)
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
tk = p.currentToken()
|
2023-09-14 12:25:21 +09:00
|
|
|
}
|
2020-06-03 17:52:23 +09:00
|
|
|
if tk.Position.Column == key.GetToken().Position.Column && tk.Type == token.StringType {
|
|
|
|
// in this case,
|
|
|
|
// ----
|
|
|
|
// key: <value does not defined>
|
|
|
|
// next
|
2023-09-14 12:25:21 +09:00
|
|
|
|
2020-06-03 17:52:23 +09:00
|
|
|
nullToken := p.createNullToken(colonToken)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.insertToken(p.idx, nullToken)
|
2023-09-14 12:25:21 +09:00
|
|
|
nullNode := ast.Null(nullToken)
|
|
|
|
|
|
|
|
if comment != nil {
|
2024-10-28 11:31:15 +09:00
|
|
|
_ = nullNode.SetComment(comment)
|
2023-09-14 12:25:21 +09:00
|
|
|
} else {
|
|
|
|
// If there is a comment, it is already bound to the key node,
|
|
|
|
// so remove the comment from the key to bind it to the null value.
|
|
|
|
keyComment := key.GetComment()
|
|
|
|
if keyComment != nil {
|
|
|
|
if err := key.SetComment(nil); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-10-28 11:31:15 +09:00
|
|
|
_ = nullNode.SetComment(keyComment)
|
2023-09-14 12:25:21 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nullNode, nil
|
2020-06-03 17:52:23 +09:00
|
|
|
}
|
|
|
|
|
2020-06-03 17:09:16 +09:00
|
|
|
if tk.Position.Column < key.GetToken().Position.Column {
|
2020-06-03 17:52:23 +09:00
|
|
|
// in this case,
|
|
|
|
// ----
|
|
|
|
// key: <value does not defined>
|
|
|
|
// next
|
2020-06-03 17:09:16 +09:00
|
|
|
nullToken := p.createNullToken(colonToken)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.insertToken(p.idx, nullToken)
|
2023-09-14 12:25:21 +09:00
|
|
|
nullNode := ast.Null(nullToken)
|
|
|
|
if comment != nil {
|
2024-10-28 11:31:15 +09:00
|
|
|
_ = nullNode.SetComment(comment)
|
2023-09-14 12:25:21 +09:00
|
|
|
}
|
|
|
|
return nullNode, nil
|
2020-06-03 17:09:16 +09:00
|
|
|
}
|
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
value, err := p.parseToken(ctx, p.currentToken())
|
2020-06-03 17:09:16 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-06-03 17:09:16 +09:00
|
|
|
}
|
2023-09-14 12:25:21 +09:00
|
|
|
if comment != nil {
|
2024-10-28 11:31:15 +09:00
|
|
|
_ = value.SetComment(comment)
|
2023-09-14 12:25:21 +09:00
|
|
|
}
|
2020-06-03 17:09:16 +09:00
|
|
|
return value, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) validateMapValue(ctx *context, key, value ast.Node) error {
|
2024-10-31 16:05:37 +09:00
|
|
|
keyTk := key.GetToken()
|
|
|
|
valueTk := value.GetToken()
|
|
|
|
|
|
|
|
if keyTk.Position.Line == valueTk.Position.Line && valueTk.Type == token.SequenceEntryType {
|
|
|
|
return errors.ErrSyntax("block sequence entries are not allowed in this context", valueTk)
|
|
|
|
}
|
|
|
|
if keyTk.Position.Column != valueTk.Position.Column {
|
2020-06-03 17:09:16 +09:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if value.Type() != ast.StringType {
|
|
|
|
return nil
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
ntk := p.nextToken()
|
2020-06-03 17:09:16 +09:00
|
|
|
if ntk == nil || (ntk.Type != token.MappingValueType && ntk.Type != token.SequenceEntryType) {
|
2024-10-31 16:05:37 +09:00
|
|
|
return errors.ErrSyntax("could not find expected ':' token", valueTk)
|
2020-06-03 17:09:16 +09:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseMappingValue(ctx *context) (ast.Node, error) {
|
2020-07-02 17:25:31 +09:00
|
|
|
key, err := p.parseMapKey(ctx)
|
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2021-09-07 19:26:22 +09:00
|
|
|
keyText := key.GetToken().Value
|
|
|
|
key.SetPath(ctx.withChild(keyText).path)
|
2019-10-23 03:21:42 +09:00
|
|
|
if err := p.validateMapKey(key.GetToken()); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
2024-11-05 13:28:23 +09:00
|
|
|
p.progress(1) // progress to mapping value token
|
|
|
|
if ctx.isFlow {
|
|
|
|
// if "{key}" or "{key," style, returns MappingValueNode.
|
|
|
|
node, err := p.parseFlowMapNullValue(ctx, key)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if node != nil {
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
tk := p.currentToken() // get mapping value (':') token.
|
2021-03-01 17:58:17 +09:00
|
|
|
if tk == nil {
|
|
|
|
return nil, errors.ErrSyntax("unexpected map", key.GetToken())
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // progress to value token
|
2024-11-05 13:28:23 +09:00
|
|
|
if ctx.isFlow {
|
|
|
|
// if "{key:}" or "{key:," style, returns MappingValueNode.
|
|
|
|
node, err := p.parseFlowMapNullValue(ctx, key)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if node != nil {
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
}
|
2021-09-07 17:31:17 +09:00
|
|
|
if err := p.setSameLineCommentIfExists(ctx.withChild(keyText), key); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
if key.GetComment() != nil {
|
|
|
|
// if current token is comment, GetComment() is not nil.
|
|
|
|
// then progress to value token
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progressIgnoreComment(1)
|
2020-05-29 15:32:25 +09:00
|
|
|
}
|
2020-06-03 17:09:16 +09:00
|
|
|
|
2021-09-07 17:31:17 +09:00
|
|
|
value, err := p.parseMapValue(ctx.withChild(keyText), key, tk)
|
2020-06-03 17:09:16 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
2020-06-03 17:09:16 +09:00
|
|
|
if err := p.validateMapValue(ctx, key, value); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2020-06-03 17:09:16 +09:00
|
|
|
|
2020-06-21 16:22:04 +09:00
|
|
|
mvnode := ast.MappingValue(tk, key, value)
|
2021-09-07 19:26:22 +09:00
|
|
|
mvnode.SetPath(ctx.withChild(keyText).path)
|
2020-06-21 16:22:04 +09:00
|
|
|
node := ast.Mapping(tk, false, mvnode)
|
2021-09-07 19:26:22 +09:00
|
|
|
node.SetPath(ctx.withChild(keyText).path)
|
2020-06-03 17:09:16 +09:00
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
ntk := p.nextNotCommentToken()
|
|
|
|
antk := p.afterNextNotCommentToken()
|
2024-10-31 15:35:43 +09:00
|
|
|
for ntk != nil && ntk.Position.Column == key.GetToken().Position.Column {
|
|
|
|
if ntk.Type == token.DocumentHeaderType || ntk.Type == token.DocumentEndType {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if antk == nil {
|
|
|
|
return nil, errors.ErrSyntax("required ':' and map value", ntk)
|
|
|
|
}
|
|
|
|
if antk.Type != token.MappingValueType {
|
|
|
|
return nil, errors.ErrSyntax("required ':' and map value", antk)
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progressIgnoreComment(1)
|
|
|
|
value, err := p.parseToken(ctx, p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2019-10-31 12:38:44 +09:00
|
|
|
switch value.Type() {
|
|
|
|
case ast.MappingType:
|
2024-10-28 11:31:15 +09:00
|
|
|
c, _ := value.(*ast.MappingNode)
|
2020-05-28 21:39:06 +09:00
|
|
|
comment := c.GetComment()
|
|
|
|
for idx, v := range c.Values {
|
|
|
|
if idx == 0 && comment != nil {
|
|
|
|
if err := v.SetComment(comment); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
|
|
|
}
|
2019-10-23 03:21:42 +09:00
|
|
|
node.Values = append(node.Values, v)
|
|
|
|
}
|
2019-10-31 12:38:44 +09:00
|
|
|
case ast.MappingValueType:
|
|
|
|
node.Values = append(node.Values, value.(*ast.MappingValueNode))
|
|
|
|
default:
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, fmt.Errorf("failed to parse mapping value node node is %s", value.Type())
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
ntk = p.nextNotCommentToken()
|
|
|
|
antk = p.afterNextNotCommentToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-11-12 10:39:52 +09:00
|
|
|
if err := p.validateMapNextToken(ctx, node); err != nil {
|
|
|
|
return nil, err
|
2024-11-03 21:32:51 +09:00
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
if len(node.Values) == 1 {
|
2023-03-01 16:59:07 +09:00
|
|
|
mapKeyCol := mvnode.Key.GetToken().Position.Column
|
2024-10-31 18:26:08 +09:00
|
|
|
commentTk := p.nextToken()
|
2023-03-01 16:59:07 +09:00
|
|
|
if commentTk != nil && commentTk.Type == token.CommentType && mapKeyCol <= commentTk.Position.Column {
|
|
|
|
// If the comment is in the same or deeper column as the last element column in map value,
|
|
|
|
// treat it as a footer comment for the last element.
|
|
|
|
comment := p.parseFootComment(ctx, mapKeyCol)
|
|
|
|
mvnode.FootComment = comment
|
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
return mvnode, nil
|
|
|
|
}
|
2023-03-01 16:59:07 +09:00
|
|
|
mapCol := node.GetToken().Position.Column
|
2024-10-31 18:26:08 +09:00
|
|
|
commentTk := p.nextToken()
|
2023-03-01 16:59:07 +09:00
|
|
|
if commentTk != nil && commentTk.Type == token.CommentType && mapCol <= commentTk.Position.Column {
|
|
|
|
// If the comment is in the same or deeper column as the last element column in map value,
|
|
|
|
// treat it as a footer comment for the last element.
|
|
|
|
comment := p.parseFootComment(ctx, mapCol)
|
|
|
|
node.FootComment = comment
|
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
return node, nil
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
|
2024-11-12 10:39:52 +09:00
|
|
|
func (p *parser) validateMapNextToken(ctx *context, node *ast.MappingNode) error {
|
|
|
|
keyTk := node.Start
|
|
|
|
if len(node.Values) != 0 {
|
|
|
|
keyTk = node.Values[len(node.Values)-1].Key.GetToken()
|
|
|
|
}
|
|
|
|
tk := p.nextNotCommentToken()
|
|
|
|
if tk == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if ctx.isFlow && (tk.Type == token.CollectEntryType || tk.Type == token.SequenceEndType || tk.Type == token.MappingEndType) {
|
|
|
|
// a: {
|
|
|
|
// key: value
|
|
|
|
// } , <= if context is flow mode, "," or "]" or "}" is allowed.
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if tk.Position.Line > keyTk.Position.Line && tk.Position.Column > keyTk.Position.Column {
|
|
|
|
// a: b
|
|
|
|
// c <= this token is invalid.
|
|
|
|
return errors.ErrSyntax("value is not allowed in this context", tk)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-11-05 13:28:23 +09:00
|
|
|
func (p *parser) parseFlowMapNullValue(ctx *context, key ast.MapKeyNode) (*ast.MappingValueNode, error) {
|
|
|
|
tk := p.currentToken()
|
|
|
|
if tk == nil {
|
|
|
|
return nil, errors.ErrSyntax("unexpected map", key.GetToken())
|
|
|
|
}
|
|
|
|
if tk.Type != token.MappingEndType && tk.Type != token.CollectEntryType {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
nullTk := p.createNullToken(tk)
|
|
|
|
p.insertToken(p.idx, nullTk)
|
|
|
|
value, err := p.parseToken(ctx, nullTk)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
node := ast.MappingValue(tk, key, value)
|
|
|
|
node.SetPath(ctx.withChild(key.GetToken().Value).path)
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseSequenceEntry(ctx *context) (*ast.SequenceNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-06-21 16:22:04 +09:00
|
|
|
sequenceNode := ast.Sequence(tk, false)
|
2021-09-07 17:31:17 +09:00
|
|
|
sequenceNode.SetPath(ctx.path)
|
2019-10-23 20:22:14 +09:00
|
|
|
curColumn := tk.Position.Column
|
2020-05-29 15:32:25 +09:00
|
|
|
for tk.Type == token.SequenceEntryType {
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip sequence token
|
2024-11-03 14:06:57 +09:00
|
|
|
entryTk := tk
|
2024-10-31 18:26:08 +09:00
|
|
|
tk = p.currentToken()
|
2021-12-07 16:13:35 -08:00
|
|
|
if tk == nil {
|
2024-11-03 14:06:57 +09:00
|
|
|
sequenceNode.Values = append(sequenceNode.Values, ast.Null(p.createNullToken(entryTk)))
|
|
|
|
break
|
2021-12-07 16:13:35 -08:00
|
|
|
}
|
2021-07-16 21:49:17 +09:00
|
|
|
var comment *ast.CommentGroupNode
|
|
|
|
if tk.Type == token.CommentType {
|
|
|
|
comment = p.parseCommentOnly(ctx)
|
2024-10-31 18:26:08 +09:00
|
|
|
tk = p.currentToken()
|
2023-09-15 12:11:17 +09:00
|
|
|
if tk.Type == token.SequenceEntryType {
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip sequence token
|
2021-07-16 21:49:17 +09:00
|
|
|
}
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
value, err := p.parseToken(ctx.withIndex(uint(len(sequenceNode.Values))), p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2021-07-16 21:49:17 +09:00
|
|
|
if comment != nil {
|
2021-09-07 19:26:22 +09:00
|
|
|
comment.SetPath(ctx.withIndex(uint(len(sequenceNode.Values))).path)
|
2023-03-01 16:59:07 +09:00
|
|
|
sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, comment)
|
2021-07-16 21:49:17 +09:00
|
|
|
} else {
|
2023-03-01 16:59:07 +09:00
|
|
|
sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, nil)
|
2021-07-16 21:49:17 +09:00
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
sequenceNode.Values = append(sequenceNode.Values, value)
|
2024-10-31 18:26:08 +09:00
|
|
|
tk = p.nextNotCommentToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if tk == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if tk.Type != token.SequenceEntryType {
|
|
|
|
break
|
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
if tk.Position.Column != curColumn {
|
2019-10-16 18:21:20 +09:00
|
|
|
break
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progressIgnoreComment(1)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
commentTk := p.nextToken()
|
2023-03-01 16:59:07 +09:00
|
|
|
if commentTk != nil && commentTk.Type == token.CommentType && curColumn <= commentTk.Position.Column {
|
|
|
|
// If the comment is in the same or deeper column as the last element column in sequence value,
|
|
|
|
// treat it as a footer comment for the last element.
|
|
|
|
comment := p.parseFootComment(ctx, curColumn)
|
|
|
|
sequenceNode.FootComment = comment
|
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
return sequenceNode, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseAnchor(ctx *context) (*ast.AnchorNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-06-21 16:22:04 +09:00
|
|
|
anchor := ast.Anchor(tk)
|
2021-09-07 17:31:17 +09:00
|
|
|
anchor.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
ntk := p.nextToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if ntk == nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected anchor. anchor name is undefined", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip anchor token
|
|
|
|
name, err := p.parseToken(ctx, p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
anchor.Name = name
|
2024-10-31 18:26:08 +09:00
|
|
|
ntk = p.nextToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if ntk == nil {
|
2024-10-31 18:26:08 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected anchor. anchor value is undefined", p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
|
|
|
value, err := p.parseToken(ctx, p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
anchor.Value = value
|
|
|
|
return anchor, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseAlias(ctx *context) (*ast.AliasNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-06-21 16:22:04 +09:00
|
|
|
alias := ast.Alias(tk)
|
2021-09-07 17:31:17 +09:00
|
|
|
alias.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
ntk := p.nextToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if ntk == nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected alias. alias name is undefined", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip alias token
|
|
|
|
name, err := p.parseToken(ctx, p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
alias.Value = name
|
|
|
|
return alias, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseMapKey(ctx *context) (ast.MapKeyNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-07-02 17:25:31 +09:00
|
|
|
if value := p.parseScalarValue(tk); value != nil {
|
|
|
|
return value, nil
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2020-07-02 17:25:31 +09:00
|
|
|
switch tk.Type {
|
|
|
|
case token.MergeKeyType:
|
|
|
|
return ast.MergeKey(tk), nil
|
|
|
|
case token.MappingKeyType:
|
|
|
|
return p.parseMappingKey(ctx)
|
2019-10-30 16:57:59 +09:00
|
|
|
}
|
2020-07-02 17:25:31 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected mapping key", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseStringValue(tk *token.Token) *ast.StringNode {
|
2019-10-16 18:21:20 +09:00
|
|
|
switch tk.Type {
|
|
|
|
case token.StringType,
|
|
|
|
token.SingleQuoteType,
|
|
|
|
token.DoubleQuoteType:
|
|
|
|
return ast.String(tk)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseScalarValueWithComment(ctx *context, tk *token.Token) (ast.ScalarNode, error) {
|
2020-05-29 15:32:25 +09:00
|
|
|
node := p.parseScalarValue(tk)
|
|
|
|
if node == nil {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2021-09-07 17:31:17 +09:00
|
|
|
node.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
if p.isSameLineComment(p.nextToken(), node) {
|
|
|
|
p.progress(1)
|
2020-05-29 16:05:43 +09:00
|
|
|
if err := p.setSameLineCommentIfExists(ctx, node); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-05-29 15:32:25 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseScalarValue(tk *token.Token) ast.ScalarNode {
|
2019-10-16 18:21:20 +09:00
|
|
|
if node := p.parseStringValue(tk); node != nil {
|
|
|
|
return node
|
|
|
|
}
|
|
|
|
switch tk.Type {
|
|
|
|
case token.NullType:
|
|
|
|
return ast.Null(tk)
|
|
|
|
case token.BoolType:
|
|
|
|
return ast.Bool(tk)
|
2019-10-29 19:08:02 +09:00
|
|
|
case token.IntegerType,
|
|
|
|
token.BinaryIntegerType,
|
|
|
|
token.OctetIntegerType,
|
|
|
|
token.HexIntegerType:
|
2019-10-16 18:21:20 +09:00
|
|
|
return ast.Integer(tk)
|
|
|
|
case token.FloatType:
|
|
|
|
return ast.Float(tk)
|
|
|
|
case token.InfinityType:
|
|
|
|
return ast.Infinity(tk)
|
|
|
|
case token.NanType:
|
|
|
|
return ast.Nan(tk)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseDirective(ctx *context) (*ast.DirectiveNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
node := ast.Directive(p.currentToken())
|
|
|
|
p.progress(1) // skip directive token
|
|
|
|
value, err := p.parseToken(ctx, p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Value = value
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
|
|
|
tk := p.currentToken()
|
2020-09-09 20:31:42 +03:00
|
|
|
if tk == nil {
|
|
|
|
// Since current token is nil, use the previous token to specify
|
|
|
|
// the syntax error location.
|
2024-10-31 18:26:08 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected directive value. document not started", p.previousToken())
|
2020-09-09 20:31:42 +03:00
|
|
|
}
|
|
|
|
if tk.Type != token.DocumentHeaderType {
|
2024-10-31 18:26:08 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected directive value. document not started", p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseLiteral(ctx *context) (*ast.LiteralNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
node := ast.Literal(p.currentToken())
|
|
|
|
p.progress(1) // skip literal/folded token
|
2021-07-19 18:48:09 +09:00
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2024-11-01 15:03:27 +09:00
|
|
|
if tk == nil {
|
|
|
|
node.Value = ast.String(token.New("", "", node.Start.Position))
|
|
|
|
return node, nil
|
|
|
|
}
|
2021-07-19 18:48:09 +09:00
|
|
|
var comment *ast.CommentGroupNode
|
|
|
|
if tk.Type == token.CommentType {
|
|
|
|
comment = p.parseCommentOnly(ctx)
|
2021-09-07 19:26:22 +09:00
|
|
|
comment.SetPath(ctx.path)
|
2021-07-19 18:48:09 +09:00
|
|
|
if err := node.SetComment(comment); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2021-07-19 18:48:09 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
tk = p.currentToken()
|
2021-07-19 18:48:09 +09:00
|
|
|
}
|
|
|
|
value, err := p.parseToken(ctx, tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
snode, ok := value.(*ast.StringNode)
|
|
|
|
if !ok {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected token. required string token", value.GetToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Value = snode
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2020-05-29 16:05:43 +09:00
|
|
|
func (p *parser) isSameLineComment(tk *token.Token, node ast.Node) bool {
|
2020-05-28 21:39:06 +09:00
|
|
|
if tk == nil {
|
2020-05-29 16:05:43 +09:00
|
|
|
return false
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
|
|
|
if tk.Type != token.CommentType {
|
2020-05-29 16:05:43 +09:00
|
|
|
return false
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
2020-05-29 16:05:43 +09:00
|
|
|
return tk.Position.Line == node.GetToken().Position.Line
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) setSameLineCommentIfExists(ctx *context, node ast.Node) error {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-05-29 16:05:43 +09:00
|
|
|
if !p.isSameLineComment(tk, node) {
|
2020-05-28 21:39:06 +09:00
|
|
|
return nil
|
|
|
|
}
|
2021-09-07 17:31:17 +09:00
|
|
|
comment := ast.CommentGroup([]*token.Token{tk})
|
|
|
|
comment.SetPath(ctx.path)
|
|
|
|
if err := node.SetComment(comment); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return err
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-06-21 16:22:04 +09:00
|
|
|
func (p *parser) parseDocument(ctx *context) (*ast.DocumentNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
startTk := p.currentToken()
|
|
|
|
p.progress(1) // skip document header token
|
|
|
|
body, err := p.parseToken(ctx, p.currentToken())
|
2019-11-05 17:02:55 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-11-05 17:02:55 +09:00
|
|
|
}
|
2020-06-21 16:22:04 +09:00
|
|
|
node := ast.Document(startTk, body)
|
2024-10-31 18:26:08 +09:00
|
|
|
if ntk := p.nextToken(); ntk != nil && ntk.Type == token.DocumentEndType {
|
2019-11-05 17:02:55 +09:00
|
|
|
node.End = ntk
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1)
|
2019-11-05 17:02:55 +09:00
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2021-07-16 21:49:17 +09:00
|
|
|
func (p *parser) parseCommentOnly(ctx *context) *ast.CommentGroupNode {
|
2020-05-29 15:32:25 +09:00
|
|
|
commentTokens := []*token.Token{}
|
|
|
|
for {
|
2024-10-31 18:26:08 +09:00
|
|
|
tk := p.currentToken()
|
2020-05-29 15:32:25 +09:00
|
|
|
if tk == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if tk.Type != token.CommentType {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
commentTokens = append(commentTokens, tk)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progressIgnoreComment(1) // skip comment token
|
2020-05-29 15:32:25 +09:00
|
|
|
}
|
2021-09-07 19:26:22 +09:00
|
|
|
return ast.CommentGroup(commentTokens)
|
2021-07-16 21:49:17 +09:00
|
|
|
}
|
|
|
|
|
2023-03-01 16:59:07 +09:00
|
|
|
func (p *parser) parseFootComment(ctx *context, col int) *ast.CommentGroupNode {
|
|
|
|
commentTokens := []*token.Token{}
|
|
|
|
for {
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progressIgnoreComment(1)
|
|
|
|
commentTokens = append(commentTokens, p.currentToken())
|
2023-03-01 16:59:07 +09:00
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
nextTk := p.nextToken()
|
2023-03-01 16:59:07 +09:00
|
|
|
if nextTk == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if nextTk.Type != token.CommentType {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if col > nextTk.Position.Column {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ast.CommentGroup(commentTokens)
|
|
|
|
}
|
|
|
|
|
2021-07-16 21:49:17 +09:00
|
|
|
func (p *parser) parseComment(ctx *context) (ast.Node, error) {
|
|
|
|
group := p.parseCommentOnly(ctx)
|
2024-10-31 18:26:08 +09:00
|
|
|
node, err := p.parseToken(ctx, p.currentToken())
|
2020-05-28 21:39:06 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
|
|
|
if node == nil {
|
2021-07-16 21:49:17 +09:00
|
|
|
return group, nil
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
2021-09-07 19:26:22 +09:00
|
|
|
group.SetPath(node.GetPath())
|
2021-07-16 21:49:17 +09:00
|
|
|
if err := node.SetComment(group); err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2022-08-18 13:26:00 +09:00
|
|
|
func (p *parser) parseMappingKey(ctx *context) (*ast.MappingKeyNode, error) {
|
2024-10-31 18:26:08 +09:00
|
|
|
keyTk := p.currentToken()
|
2021-09-07 17:31:17 +09:00
|
|
|
node := ast.MappingKey(keyTk)
|
|
|
|
node.SetPath(ctx.path)
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progress(1) // skip mapping key token
|
|
|
|
value, err := p.parseToken(ctx.withChild(keyTk.Value), p.currentToken())
|
2020-07-02 17:25:31 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-07-02 17:25:31 +09:00
|
|
|
}
|
|
|
|
node.Value = value
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseToken(ctx *context, tk *token.Token) (ast.Node, error) {
|
2021-09-07 17:31:17 +09:00
|
|
|
node, err := p.createNodeFromToken(ctx, tk)
|
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2021-09-07 17:31:17 +09:00
|
|
|
}
|
2021-09-07 19:26:22 +09:00
|
|
|
if node != nil && node.GetPath() == "" {
|
2021-09-07 17:31:17 +09:00
|
|
|
node.SetPath(ctx.path)
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) createNodeFromToken(ctx *context, tk *token.Token) (ast.Node, error) {
|
2020-03-07 14:54:24 +09:00
|
|
|
if tk == nil {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
if tk.NextType() == token.MappingValueType {
|
2020-05-28 21:39:06 +09:00
|
|
|
node, err := p.parseMappingValue(ctx)
|
|
|
|
return node, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
node, err := p.parseScalarValueWithComment(ctx, tk)
|
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2020-05-29 15:32:25 +09:00
|
|
|
}
|
|
|
|
if node != nil {
|
2019-10-16 18:21:20 +09:00
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
switch tk.Type {
|
2020-05-28 21:39:06 +09:00
|
|
|
case token.CommentType:
|
|
|
|
return p.parseComment(ctx)
|
2020-07-02 17:25:31 +09:00
|
|
|
case token.MappingKeyType:
|
|
|
|
return p.parseMappingKey(ctx)
|
2019-11-05 17:02:55 +09:00
|
|
|
case token.DocumentHeaderType:
|
|
|
|
return p.parseDocument(ctx)
|
2019-10-16 18:21:20 +09:00
|
|
|
case token.MappingStartType:
|
|
|
|
return p.parseMapping(ctx)
|
|
|
|
case token.SequenceStartType:
|
|
|
|
return p.parseSequence(ctx)
|
2024-10-31 15:35:43 +09:00
|
|
|
case token.SequenceEndType:
|
|
|
|
// SequenceEndType is always validated in parseSequence.
|
|
|
|
// Therefore, if this is found in other cases, it is treated as a syntax error.
|
|
|
|
return nil, errors.ErrSyntax("could not find '[' character corresponding to ']'", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
case token.SequenceEntryType:
|
|
|
|
return p.parseSequenceEntry(ctx)
|
|
|
|
case token.AnchorType:
|
|
|
|
return p.parseAnchor(ctx)
|
|
|
|
case token.AliasType:
|
|
|
|
return p.parseAlias(ctx)
|
|
|
|
case token.DirectiveType:
|
|
|
|
return p.parseDirective(ctx)
|
|
|
|
case token.TagType:
|
|
|
|
return p.parseTag(ctx)
|
|
|
|
case token.LiteralType, token.FoldedType:
|
|
|
|
return p.parseLiteral(ctx)
|
2024-11-11 21:37:43 +09:00
|
|
|
case token.MappingValueType:
|
|
|
|
return nil, errors.ErrSyntax("found an invalid key for this map", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2024-10-31 18:26:08 +09:00
|
|
|
func (p *parser) parse(ctx *context) (*ast.File, error) {
|
2020-06-21 16:22:04 +09:00
|
|
|
file := &ast.File{Docs: []*ast.DocumentNode{}}
|
2024-10-31 18:26:08 +09:00
|
|
|
for p.next() {
|
|
|
|
node, err := p.parseToken(ctx, p.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
p.progressIgnoreComment(1)
|
2019-10-18 14:27:49 +09:00
|
|
|
if node == nil {
|
|
|
|
continue
|
|
|
|
}
|
2020-06-21 16:22:04 +09:00
|
|
|
if doc, ok := node.(*ast.DocumentNode); ok {
|
2019-11-05 17:02:55 +09:00
|
|
|
file.Docs = append(file.Docs, doc)
|
|
|
|
} else {
|
2020-06-21 16:22:04 +09:00
|
|
|
file.Docs = append(file.Docs, ast.Document(nil, node))
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
}
|
2019-11-05 17:02:55 +09:00
|
|
|
return file, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type Mode uint
|
|
|
|
|
|
|
|
const (
|
|
|
|
ParseComments Mode = 1 << iota // parse comments and add them to AST
|
|
|
|
)
|
|
|
|
|
|
|
|
// ParseBytes parse from byte slice, and returns ast.File
|
|
|
|
func ParseBytes(bytes []byte, mode Mode) (*ast.File, error) {
|
|
|
|
tokens := lexer.Tokenize(string(bytes))
|
|
|
|
f, err := Parse(tokens, mode)
|
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-11-05 17:02:55 +09:00
|
|
|
}
|
|
|
|
return f, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse parse from token instances, and returns ast.File
|
|
|
|
func Parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
|
2024-10-29 20:00:48 +09:00
|
|
|
if tk := tokens.InvalidToken(); tk != nil {
|
|
|
|
return nil, errors.ErrSyntax("found invalid token", tk)
|
|
|
|
}
|
2024-10-31 18:26:08 +09:00
|
|
|
f, err := newParser(tokens, mode).parse(newContext())
|
2019-11-05 17:02:55 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-11-05 17:02:55 +09:00
|
|
|
}
|
|
|
|
return f, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse parse from filename, and returns ast.File
|
|
|
|
func ParseFile(filename string, mode Mode) (*ast.File, error) {
|
2024-02-26 01:11:21 -05:00
|
|
|
file, err := os.ReadFile(filename)
|
2019-11-05 17:02:55 +09:00
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-11-05 17:02:55 +09:00
|
|
|
}
|
|
|
|
f, err := ParseBytes(file, mode)
|
|
|
|
if err != nil {
|
2024-10-28 21:24:15 +09:00
|
|
|
return nil, err
|
2019-11-05 17:02:55 +09:00
|
|
|
}
|
|
|
|
f.Name = filename
|
|
|
|
return f, nil
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|