2019-10-16 18:21:20 +09:00
|
|
|
package parser
|
|
|
|
|
|
|
|
import (
|
2019-11-05 17:02:55 +09:00
|
|
|
"io/ioutil"
|
2019-10-23 03:21:42 +09:00
|
|
|
"strings"
|
|
|
|
|
2019-10-16 18:21:20 +09:00
|
|
|
"github.com/goccy/go-yaml/ast"
|
2019-10-24 11:07:40 +09:00
|
|
|
"github.com/goccy/go-yaml/internal/errors"
|
2019-11-05 17:02:55 +09:00
|
|
|
"github.com/goccy/go-yaml/lexer"
|
2019-10-16 18:21:20 +09:00
|
|
|
"github.com/goccy/go-yaml/token"
|
2019-10-31 12:38:44 +09:00
|
|
|
"golang.org/x/xerrors"
|
2019-10-16 18:21:20 +09:00
|
|
|
)
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
type parser struct{}
|
2019-10-16 18:21:20 +09:00
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseMapping(ctx *context) (ast.Node, error) {
|
2019-10-31 12:47:23 +09:00
|
|
|
node := ast.Mapping(ctx.currentToken(), true)
|
2019-10-16 18:21:20 +09:00
|
|
|
ctx.progress(1) // skip MappingStart token
|
|
|
|
for ctx.next() {
|
|
|
|
tk := ctx.currentToken()
|
|
|
|
if tk.Type == token.MappingEndType {
|
|
|
|
node.End = tk
|
|
|
|
break
|
|
|
|
} else if tk.Type == token.CollectEntryType {
|
|
|
|
ctx.progress(1)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
value, err := p.parseToken(ctx, tk)
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse mapping value in mapping node")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
mvnode, ok := value.(*ast.MappingValueNode)
|
|
|
|
if !ok {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("failed to parse flow mapping value node", value.GetToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Values = append(node.Values, mvnode)
|
|
|
|
ctx.progress(1)
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseSequence(ctx *context) (ast.Node, error) {
|
2019-10-31 13:06:35 +09:00
|
|
|
node := ast.Sequence(ctx.currentToken(), true)
|
2019-10-16 18:21:20 +09:00
|
|
|
ctx.progress(1) // skip SequenceStart token
|
|
|
|
for ctx.next() {
|
|
|
|
tk := ctx.currentToken()
|
|
|
|
if tk.Type == token.SequenceEndType {
|
|
|
|
node.End = tk
|
|
|
|
break
|
|
|
|
} else if tk.Type == token.CollectEntryType {
|
|
|
|
ctx.progress(1)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
value, err := p.parseToken(ctx, tk)
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse sequence value in flow sequence node")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Values = append(node.Values, value)
|
|
|
|
ctx.progress(1)
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseTag(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
node := &ast.TagNode{Start: ctx.currentToken()}
|
|
|
|
ctx.progress(1) // skip tag token
|
|
|
|
value, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse tag value")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Value = value
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-12-22 11:59:49 +09:00
|
|
|
func (p *parser) removeLeftSideNewLineCharacter(src string) string {
|
|
|
|
return strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) existsNewLineCharacter(src string) bool {
|
|
|
|
if strings.Index(src, "\n") > 0 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if strings.Index(src, "\r") > 0 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) validateMapKey(tk *token.Token) error {
|
2019-10-23 03:21:42 +09:00
|
|
|
if tk.Type != token.StringType {
|
|
|
|
return nil
|
|
|
|
}
|
2019-12-22 11:59:49 +09:00
|
|
|
origin := p.removeLeftSideNewLineCharacter(tk.Origin)
|
|
|
|
if p.existsNewLineCharacter(origin) {
|
2019-10-23 13:30:22 +09:00
|
|
|
return errors.ErrSyntax("unexpected key name", tk)
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseMappingValue(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
key := p.parseMapKey(ctx.currentToken())
|
|
|
|
if key == nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected mapping 'key'. key is undefined", ctx.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2019-10-23 03:21:42 +09:00
|
|
|
if err := p.validateMapKey(key.GetToken()); err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "validate mapping key error")
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
if _, ok := key.(ast.ScalarNode); !ok {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected mapping 'key', key is not scalar value", key.GetToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
ctx.progress(1) // progress to mapping value token
|
|
|
|
tk := ctx.currentToken() // get mapping value token
|
|
|
|
ctx.progress(1) // progress to value token
|
2020-05-29 16:05:43 +09:00
|
|
|
if err := p.setSameLineCommentIfExists(ctx, key); err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to set same line comment to node")
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
if key.GetComment() != nil {
|
|
|
|
// if current token is comment, GetComment() is not nil.
|
|
|
|
// then progress to value token
|
|
|
|
ctx.progressIgnoreComment(1)
|
|
|
|
}
|
2019-10-30 16:57:59 +09:00
|
|
|
var value ast.Node
|
|
|
|
if vtk := ctx.currentToken(); vtk == nil {
|
|
|
|
value = ast.Null(token.New("null", "null", tk.Position))
|
|
|
|
} else {
|
|
|
|
v, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse mapping 'value' node")
|
|
|
|
}
|
|
|
|
value = v
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2019-10-21 15:58:46 +09:00
|
|
|
keyColumn := key.GetToken().Position.Column
|
|
|
|
valueColumn := value.GetToken().Position.Column
|
2019-10-23 03:21:42 +09:00
|
|
|
if keyColumn == valueColumn {
|
|
|
|
if value.Type() == ast.StringType {
|
2020-05-29 15:32:25 +09:00
|
|
|
ntk := ctx.nextToken()
|
2019-10-23 03:21:42 +09:00
|
|
|
if ntk == nil || (ntk.Type != token.MappingValueType && ntk.Type != token.SequenceEntryType) {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("could not found expected ':' token", value.GetToken())
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
mvnode := &ast.MappingValueNode{
|
|
|
|
Start: tk,
|
|
|
|
Key: key,
|
|
|
|
Value: value,
|
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
ntk := ctx.nextNotCommentToken()
|
2020-05-28 21:39:06 +09:00
|
|
|
antk := ctx.afterNextNotCommentToken()
|
2019-10-31 12:14:39 +09:00
|
|
|
node := &ast.MappingNode{
|
2019-10-23 20:22:14 +09:00
|
|
|
Start: tk,
|
2019-10-31 12:38:44 +09:00
|
|
|
Values: []*ast.MappingValueNode{mvnode},
|
2019-10-23 20:22:14 +09:00
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
for antk != nil && antk.Type == token.MappingValueType &&
|
2019-10-21 15:58:46 +09:00
|
|
|
ntk.Position.Column == key.GetToken().Position.Column {
|
2020-05-29 15:32:25 +09:00
|
|
|
ctx.progressIgnoreComment(1)
|
2019-10-16 18:21:20 +09:00
|
|
|
value, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-31 12:38:44 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse mapping node")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2019-10-31 12:38:44 +09:00
|
|
|
switch value.Type() {
|
|
|
|
case ast.MappingType:
|
|
|
|
c := value.(*ast.MappingNode)
|
2020-05-28 21:39:06 +09:00
|
|
|
comment := c.GetComment()
|
|
|
|
for idx, v := range c.Values {
|
|
|
|
if idx == 0 && comment != nil {
|
|
|
|
if err := v.SetComment(comment); err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to set comment token to node")
|
|
|
|
}
|
|
|
|
}
|
2019-10-23 03:21:42 +09:00
|
|
|
node.Values = append(node.Values, v)
|
|
|
|
}
|
2019-10-31 12:38:44 +09:00
|
|
|
case ast.MappingValueType:
|
|
|
|
node.Values = append(node.Values, value.(*ast.MappingValueNode))
|
|
|
|
default:
|
|
|
|
return nil, xerrors.Errorf("failed to parse mapping value node node is %s", value.Type())
|
2019-10-23 03:21:42 +09:00
|
|
|
}
|
2020-05-28 21:39:06 +09:00
|
|
|
ntk = ctx.nextNotCommentToken()
|
|
|
|
antk = ctx.afterNextNotCommentToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
if len(node.Values) == 1 {
|
|
|
|
return mvnode, nil
|
|
|
|
}
|
|
|
|
return node, nil
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseSequenceEntry(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
tk := ctx.currentToken()
|
|
|
|
sequenceNode := &ast.SequenceNode{
|
|
|
|
Start: tk,
|
|
|
|
Values: []ast.Node{},
|
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
curColumn := tk.Position.Column
|
2020-05-29 15:32:25 +09:00
|
|
|
for tk.Type == token.SequenceEntryType {
|
|
|
|
ctx.progress(1) // skip sequence token
|
2019-10-16 18:21:20 +09:00
|
|
|
value, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 03:21:42 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse sequence")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
sequenceNode.Values = append(sequenceNode.Values, value)
|
2020-05-29 15:32:25 +09:00
|
|
|
tk = ctx.nextNotCommentToken()
|
2019-10-16 18:21:20 +09:00
|
|
|
if tk == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if tk.Type != token.SequenceEntryType {
|
|
|
|
break
|
|
|
|
}
|
2019-10-23 20:22:14 +09:00
|
|
|
if tk.Position.Column != curColumn {
|
2019-10-16 18:21:20 +09:00
|
|
|
break
|
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
ctx.progressIgnoreComment(1)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
return sequenceNode, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseAnchor(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
tk := ctx.currentToken()
|
|
|
|
anchor := &ast.AnchorNode{Start: tk}
|
|
|
|
ntk := ctx.nextToken()
|
|
|
|
if ntk == nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected anchor. anchor name is undefined", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
ctx.progress(1) // skip anchor token
|
|
|
|
name, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parser anchor name node")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
anchor.Name = name
|
|
|
|
ntk = ctx.nextToken()
|
|
|
|
if ntk == nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected anchor. anchor value is undefined", ctx.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
ctx.progress(1)
|
|
|
|
value, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parser anchor name node")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
anchor.Value = value
|
|
|
|
return anchor, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseAlias(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
tk := ctx.currentToken()
|
|
|
|
alias := &ast.AliasNode{Start: tk}
|
|
|
|
ntk := ctx.nextToken()
|
|
|
|
if ntk == nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected alias. alias name is undefined", tk)
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
ctx.progress(1) // skip alias token
|
|
|
|
name, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parser alias name node")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
alias.Value = name
|
|
|
|
return alias, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseMapKey(tk *token.Token) ast.Node {
|
2019-10-16 18:21:20 +09:00
|
|
|
if node := p.parseStringValue(tk); node != nil {
|
|
|
|
return node
|
|
|
|
}
|
|
|
|
if tk.Type == token.MergeKeyType {
|
|
|
|
return ast.MergeKey(tk)
|
|
|
|
}
|
2019-10-30 16:57:59 +09:00
|
|
|
if tk.Type == token.NullType {
|
|
|
|
return ast.Null(tk)
|
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseStringValue(tk *token.Token) ast.Node {
|
2019-10-16 18:21:20 +09:00
|
|
|
switch tk.Type {
|
|
|
|
case token.StringType,
|
|
|
|
token.SingleQuoteType,
|
|
|
|
token.DoubleQuoteType:
|
|
|
|
return ast.String(tk)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-05-29 15:32:25 +09:00
|
|
|
func (p *parser) parseScalarValueWithComment(ctx *context, tk *token.Token) (ast.Node, error) {
|
|
|
|
node := p.parseScalarValue(tk)
|
|
|
|
if node == nil {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2020-05-29 16:05:43 +09:00
|
|
|
if p.isSameLineComment(ctx.nextToken(), node) {
|
2020-05-29 15:32:25 +09:00
|
|
|
ctx.progress(1)
|
2020-05-29 16:05:43 +09:00
|
|
|
if err := p.setSameLineCommentIfExists(ctx, node); err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to set same line comment to node")
|
2020-05-29 15:32:25 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseScalarValue(tk *token.Token) ast.Node {
|
2019-10-16 18:21:20 +09:00
|
|
|
if node := p.parseStringValue(tk); node != nil {
|
|
|
|
return node
|
|
|
|
}
|
|
|
|
switch tk.Type {
|
|
|
|
case token.NullType:
|
|
|
|
return ast.Null(tk)
|
|
|
|
case token.BoolType:
|
|
|
|
return ast.Bool(tk)
|
2019-10-29 19:08:02 +09:00
|
|
|
case token.IntegerType,
|
|
|
|
token.BinaryIntegerType,
|
|
|
|
token.OctetIntegerType,
|
|
|
|
token.HexIntegerType:
|
2019-10-16 18:21:20 +09:00
|
|
|
return ast.Integer(tk)
|
|
|
|
case token.FloatType:
|
|
|
|
return ast.Float(tk)
|
|
|
|
case token.InfinityType:
|
|
|
|
return ast.Infinity(tk)
|
|
|
|
case token.NanType:
|
|
|
|
return ast.Nan(tk)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseDirective(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
node := &ast.DirectiveNode{Start: ctx.currentToken()}
|
|
|
|
ctx.progress(1) // skip directive token
|
|
|
|
value, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse directive value")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Value = value
|
|
|
|
ctx.progress(1)
|
|
|
|
if ctx.currentToken().Type != token.DocumentHeaderType {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.currentToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseLiteral(ctx *context) (ast.Node, error) {
|
2019-10-16 18:21:20 +09:00
|
|
|
node := &ast.LiteralNode{Start: ctx.currentToken()}
|
|
|
|
ctx.progress(1) // skip literal/folded token
|
|
|
|
value, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse literal/folded value")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
snode, ok := value.(*ast.StringNode)
|
|
|
|
if !ok {
|
2019-10-23 13:30:22 +09:00
|
|
|
return nil, errors.ErrSyntax("unexpected token. required string token", value.GetToken())
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
node.Value = snode
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2020-05-29 16:05:43 +09:00
|
|
|
func (p *parser) isSameLineComment(tk *token.Token, node ast.Node) bool {
|
2020-05-28 21:39:06 +09:00
|
|
|
if tk == nil {
|
2020-05-29 16:05:43 +09:00
|
|
|
return false
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
|
|
|
if tk.Type != token.CommentType {
|
2020-05-29 16:05:43 +09:00
|
|
|
return false
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
2020-05-29 16:05:43 +09:00
|
|
|
return tk.Position.Line == node.GetToken().Position.Line
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *parser) setSameLineCommentIfExists(ctx *context, node ast.Node) error {
|
|
|
|
tk := ctx.currentToken()
|
|
|
|
if !p.isSameLineComment(tk, node) {
|
2020-05-28 21:39:06 +09:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if err := node.SetComment(tk); err != nil {
|
|
|
|
return errors.Wrapf(err, "failed to set comment token to ast.Node")
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseDocument(ctx *context) (*ast.Document, error) {
|
|
|
|
node := &ast.Document{Start: ctx.currentToken()}
|
|
|
|
ctx.progress(1) // skip document header token
|
|
|
|
body, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse document body")
|
|
|
|
}
|
|
|
|
node.Body = body
|
|
|
|
if ntk := ctx.nextToken(); ntk != nil && ntk.Type == token.DocumentEndType {
|
|
|
|
node.End = ntk
|
|
|
|
ctx.progress(1)
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2020-05-28 21:39:06 +09:00
|
|
|
func (p *parser) parseComment(ctx *context) (ast.Node, error) {
|
2020-05-29 15:32:25 +09:00
|
|
|
commentTokens := []*token.Token{}
|
|
|
|
for {
|
|
|
|
tk := ctx.currentToken()
|
|
|
|
if tk == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if tk.Type != token.CommentType {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
commentTokens = append(commentTokens, tk)
|
|
|
|
ctx.progressIgnoreComment(1) // skip comment token
|
|
|
|
}
|
|
|
|
// TODO: support token group. currently merge tokens to one token
|
|
|
|
firstToken := commentTokens[0]
|
|
|
|
values := []string{}
|
|
|
|
origins := []string{}
|
|
|
|
for _, tk := range commentTokens {
|
|
|
|
values = append(values, tk.Value)
|
|
|
|
origins = append(origins, tk.Origin)
|
|
|
|
}
|
|
|
|
firstToken.Value = strings.Join(values, "")
|
|
|
|
firstToken.Value = strings.Join(origins, "")
|
2020-05-28 21:39:06 +09:00
|
|
|
node, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse node after comment")
|
|
|
|
}
|
|
|
|
if node == nil {
|
2020-05-29 15:32:25 +09:00
|
|
|
return ast.Comment(firstToken), nil
|
2020-05-28 21:39:06 +09:00
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
if err := node.SetComment(firstToken); err != nil {
|
2020-05-28 21:39:06 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to set comment token to node")
|
|
|
|
}
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parseToken(ctx *context, tk *token.Token) (ast.Node, error) {
|
2020-03-07 14:54:24 +09:00
|
|
|
if tk == nil {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2019-10-16 18:21:20 +09:00
|
|
|
if tk.NextType() == token.MappingValueType {
|
2020-05-28 21:39:06 +09:00
|
|
|
node, err := p.parseMappingValue(ctx)
|
|
|
|
return node, err
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
node, err := p.parseScalarValueWithComment(ctx, tk)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse scalar value")
|
|
|
|
}
|
|
|
|
if node != nil {
|
2019-10-16 18:21:20 +09:00
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
switch tk.Type {
|
2020-05-28 21:39:06 +09:00
|
|
|
case token.CommentType:
|
|
|
|
return p.parseComment(ctx)
|
2019-11-05 17:02:55 +09:00
|
|
|
case token.DocumentHeaderType:
|
|
|
|
return p.parseDocument(ctx)
|
2019-10-16 18:21:20 +09:00
|
|
|
case token.MappingStartType:
|
|
|
|
return p.parseMapping(ctx)
|
|
|
|
case token.SequenceStartType:
|
|
|
|
return p.parseSequence(ctx)
|
|
|
|
case token.SequenceEntryType:
|
|
|
|
return p.parseSequenceEntry(ctx)
|
|
|
|
case token.AnchorType:
|
|
|
|
return p.parseAnchor(ctx)
|
|
|
|
case token.AliasType:
|
|
|
|
return p.parseAlias(ctx)
|
|
|
|
case token.DirectiveType:
|
|
|
|
return p.parseDirective(ctx)
|
|
|
|
case token.TagType:
|
|
|
|
return p.parseTag(ctx)
|
|
|
|
case token.LiteralType, token.FoldedType:
|
|
|
|
return p.parseLiteral(ctx)
|
|
|
|
}
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2019-11-05 17:02:55 +09:00
|
|
|
func (p *parser) parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
|
|
|
|
ctx := newContext(tokens, mode)
|
|
|
|
file := &ast.File{Docs: []*ast.Document{}}
|
2019-10-16 18:21:20 +09:00
|
|
|
for ctx.next() {
|
|
|
|
node, err := p.parseToken(ctx, ctx.currentToken())
|
|
|
|
if err != nil {
|
2019-10-23 03:21:42 +09:00
|
|
|
return nil, errors.Wrapf(err, "failed to parse")
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
2020-05-29 15:32:25 +09:00
|
|
|
ctx.progressIgnoreComment(1)
|
2019-10-18 14:27:49 +09:00
|
|
|
if node == nil {
|
|
|
|
continue
|
|
|
|
}
|
2019-11-05 17:02:55 +09:00
|
|
|
if doc, ok := node.(*ast.Document); ok {
|
|
|
|
file.Docs = append(file.Docs, doc)
|
|
|
|
} else {
|
|
|
|
file.Docs = append(file.Docs, &ast.Document{Body: node})
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|
|
|
|
}
|
2019-11-05 17:02:55 +09:00
|
|
|
return file, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type Mode uint
|
|
|
|
|
|
|
|
const (
|
|
|
|
ParseComments Mode = 1 << iota // parse comments and add them to AST
|
|
|
|
)
|
|
|
|
|
|
|
|
// ParseBytes parse from byte slice, and returns ast.File
|
|
|
|
func ParseBytes(bytes []byte, mode Mode) (*ast.File, error) {
|
|
|
|
tokens := lexer.Tokenize(string(bytes))
|
|
|
|
f, err := Parse(tokens, mode)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse")
|
|
|
|
}
|
|
|
|
return f, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse parse from token instances, and returns ast.File
|
|
|
|
func Parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
|
|
|
|
var p parser
|
|
|
|
f, err := p.parse(tokens, mode)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse")
|
|
|
|
}
|
|
|
|
return f, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse parse from filename, and returns ast.File
|
|
|
|
func ParseFile(filename string, mode Mode) (*ast.File, error) {
|
|
|
|
file, err := ioutil.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to read file: %s", filename)
|
|
|
|
}
|
|
|
|
f, err := ParseBytes(file, mode)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Wrapf(err, "failed to parse")
|
|
|
|
}
|
|
|
|
f.Name = filename
|
|
|
|
return f, nil
|
2019-10-16 18:21:20 +09:00
|
|
|
}
|