From e884cbc35ef1f0ebd9a5e49841f247f797af47db Mon Sep 17 00:00:00 2001 From: steve-ky Date: Fri, 24 May 2024 12:46:07 +0800 Subject: [PATCH 1/6] Add support for string unescape --- expressions/scanner.go | 92 ++++++++++++++++++-------------- expressions/scanner.rl | 18 +++++-- expressions/scanner_test.go | 12 +++++ filters/standard_filters_test.go | 6 +-- liquid_test.go | 24 +++++++++ 5 files changed, 105 insertions(+), 47 deletions(-) diff --git a/expressions/scanner.go b/expressions/scanner.go index 774a1682..d58551a6 100644 --- a/expressions/scanner.go +++ b/expressions/scanner.go @@ -1,9 +1,12 @@ //line scanner.rl:1 package expressions -import "strconv" +import ( + "fmt" + "strconv" +) -//line scanner.go:9 +//line scanner.go:12 var _expression_actions []byte = []byte{ 0, 1, 0, 1, 1, 1, 2, 1, 10, 1, 11, 1, 12, 1, 13, 1, 14, @@ -209,7 +212,7 @@ const expression_error int = -1 const expression_en_main int = 23 -//line scanner.rl:11 +//line scanner.rl:14 type lexer struct { parseValue @@ -228,7 +231,7 @@ func newLexer(data []byte) *lexer { pe: len(data), } -//line scanner.go:236 +//line scanner.go:239 { lex.cs = expression_start lex.ts = 0 @@ -236,7 +239,7 @@ func newLexer(data []byte) *lexer { lex.act = 0 } -//line scanner.rl:30 +//line scanner.rl:33 return lex } @@ -244,7 +247,7 @@ func (lex *lexer) Lex(out *yySymType) int { eof := lex.pe tok := 0 -//line scanner.go:253 +//line scanner.go:256 { var _klen int var _trans int @@ -265,7 +268,7 @@ func (lex *lexer) Lex(out *yySymType) int { //line NONE:1 lex.ts = (lex.p) -//line scanner.go:273 +//line scanner.go:276 } } @@ -341,28 +344,28 @@ func (lex *lexer) Lex(out *yySymType) int { lex.te = (lex.p) + 1 case 3: -//line scanner.rl:38 +//line scanner.rl:41 lex.act = 8 case 4: -//line scanner.rl:95 +//line scanner.rl:107 lex.act = 9 case 5: -//line scanner.rl:102 +//line scanner.rl:114 lex.act = 14 case 6: -//line scanner.rl:103 +//line scanner.rl:115 lex.act = 15 case 7: -//line scanner.rl:104 +//line scanner.rl:116 lex.act = 16 case 8: -//line scanner.rl:107 +//line scanner.rl:119 lex.act = 17 case 9: -//line scanner.rl:43 +//line scanner.rl:46 lex.act = 20 case 10: -//line scanner.rl:83 +//line scanner.rl:95 lex.te = (lex.p) + 1 { tok = ASSIGN @@ -370,7 +373,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 11: -//line scanner.rl:84 +//line scanner.rl:96 lex.te = (lex.p) + 1 { tok = CYCLE @@ -378,7 +381,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 12: -//line scanner.rl:85 +//line scanner.rl:97 lex.te = (lex.p) + 1 { tok = LOOP @@ -386,7 +389,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 13: -//line scanner.rl:86 +//line scanner.rl:98 lex.te = (lex.p) + 1 { tok = WHEN @@ -394,18 +397,27 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 14: -//line scanner.rl:66 +//line scanner.rl:69 lex.te = (lex.p) + 1 { tok = LITERAL - // TODO unescape \x - out.val = string(lex.data[lex.ts+1 : lex.te-1]) + // unescape double quoted string + if lex.data[lex.ts] == '"' { + qs := string(lex.data[lex.ts:lex.te]) + s, err := strconv.Unquote(qs) + if err != nil { + panic(SyntaxError(fmt.Sprintf("%s to unescape %s", err, qs))) + } + out.val = s + } else { + out.val = string(lex.data[lex.ts+1 : lex.te-1]) + } (lex.p)++ goto _out } case 15: -//line scanner.rl:98 +//line scanner.rl:110 lex.te = (lex.p) + 1 { tok = EQ @@ -413,7 +425,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 16: -//line scanner.rl:99 +//line scanner.rl:111 lex.te = (lex.p) + 1 { tok = NEQ @@ -421,7 +433,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 17: -//line scanner.rl:100 +//line scanner.rl:112 lex.te = (lex.p) + 1 { tok = GE @@ -429,7 +441,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 18: -//line scanner.rl:101 +//line scanner.rl:113 lex.te = (lex.p) + 1 { tok = LE @@ -437,7 +449,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 19: -//line scanner.rl:108 +//line scanner.rl:120 lex.te = (lex.p) + 1 { tok = DOTDOT @@ -445,7 +457,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 20: -//line scanner.rl:110 +//line scanner.rl:122 lex.te = (lex.p) + 1 { tok = KEYWORD @@ -454,7 +466,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 21: -//line scanner.rl:112 +//line scanner.rl:124 lex.te = (lex.p) + 1 { tok = PROPERTY @@ -463,7 +475,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 22: -//line scanner.rl:115 +//line scanner.rl:127 lex.te = (lex.p) + 1 { tok = int(lex.data[lex.ts]) @@ -471,7 +483,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 23: -//line scanner.rl:48 +//line scanner.rl:51 lex.te = (lex.p) (lex.p)-- { @@ -486,7 +498,7 @@ func (lex *lexer) Lex(out *yySymType) int { } case 24: -//line scanner.rl:57 +//line scanner.rl:60 lex.te = (lex.p) (lex.p)-- { @@ -501,7 +513,7 @@ func (lex *lexer) Lex(out *yySymType) int { } case 25: -//line scanner.rl:43 +//line scanner.rl:46 lex.te = (lex.p) (lex.p)-- { @@ -512,7 +524,7 @@ func (lex *lexer) Lex(out *yySymType) int { } case 26: -//line scanner.rl:112 +//line scanner.rl:124 lex.te = (lex.p) (lex.p)-- { @@ -522,12 +534,12 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 27: -//line scanner.rl:114 +//line scanner.rl:126 lex.te = (lex.p) (lex.p)-- case 28: -//line scanner.rl:115 +//line scanner.rl:127 lex.te = (lex.p) (lex.p)-- { @@ -536,7 +548,7 @@ func (lex *lexer) Lex(out *yySymType) int { goto _out } case 29: -//line scanner.rl:48 +//line scanner.rl:51 (lex.p) = (lex.te) - 1 { tok = LITERAL @@ -550,7 +562,7 @@ func (lex *lexer) Lex(out *yySymType) int { } case 30: -//line scanner.rl:115 +//line scanner.rl:127 (lex.p) = (lex.te) - 1 { tok = int(lex.data[lex.ts]) @@ -618,7 +630,7 @@ func (lex *lexer) Lex(out *yySymType) int { } } -//line scanner.go:552 +//line scanner.go:564 } } @@ -633,7 +645,7 @@ func (lex *lexer) Lex(out *yySymType) int { //line NONE:1 lex.ts = 0 -//line scanner.go:566 +//line scanner.go:578 } } @@ -656,7 +668,7 @@ func (lex *lexer) Lex(out *yySymType) int { } } -//line scanner.rl:119 +//line scanner.rl:131 return tok } diff --git a/expressions/scanner.rl b/expressions/scanner.rl index 20a6e2ab..2c2390c6 100644 --- a/expressions/scanner.rl +++ b/expressions/scanner.rl @@ -1,6 +1,9 @@ package expressions -import "strconv" +import ( + "fmt" + "strconv" +) %%{ machine expression; @@ -65,8 +68,17 @@ func (lex *lexer) Lex(out *yySymType) int { } action String { tok = LITERAL - // TODO unescape \x - out.val = string(lex.data[lex.ts+1:lex.te-1]) + // unescape double quoted string + if lex.data[lex.ts] == '"' { + qs := string(lex.data[lex.ts:lex.te]) + s, err := strconv.Unquote(qs) + if err != nil { + panic(SyntaxError(fmt.Sprintf("%s to unescape %s", err, qs))) + } + out.val = s + } else { + out.val = string(lex.data[lex.ts+1:lex.te-1]) + } fbreak; } action Relation { tok = RELATION; out.name = lex.token(); fbreak; } diff --git a/expressions/scanner_test.go b/expressions/scanner_test.go index 9eaff530..5cbe1a98 100644 --- a/expressions/scanner_test.go +++ b/expressions/scanner_test.go @@ -92,4 +92,16 @@ func TestLex(t *testing.T) { // ts, _ = scanExpression(`%loop i in (3..5)`) // require.Len(t, ts, 9) + + // string unescape + ts, _ = scanExpression(`"abc" 'abc' "ab\nc" 'ab\tc'`) + require.Len(t, ts, 4) + require.Equal(t, LITERAL, ts[0].tok) + require.Equal(t, LITERAL, ts[1].tok) + require.Equal(t, LITERAL, ts[2].tok) + require.Equal(t, LITERAL, ts[3].tok) + require.Equal(t, "abc", ts[0].typ.val) + require.Equal(t, "abc", ts[1].typ.val) + require.Equal(t, "ab\nc", ts[2].typ.val) + require.Equal(t, "ab\\tc", ts[3].typ.val) } diff --git a/filters/standard_filters_test.go b/filters/standard_filters_test.go index 23ffaf55..9c6af2e1 100644 --- a/filters/standard_filters_test.go +++ b/filters/standard_filters_test.go @@ -103,12 +103,10 @@ var filterTests = []struct { {`"I strained to see the train through the rain" | remove_first: "rain"`, "I sted to see the train through the rain"}, {`"Liquid" | slice: 0`, "L"}, - {`"Liquid -Liquid" | slice: 0`, "L"}, + {`"Liquid\nLiquid" | slice: 0`, "L"}, {`"Liquid" | slice: 2`, "q"}, {`"Liquid" | slice: 2, 5`, "quid"}, - {`"Liquid -Liquid" | slice: 2, 4`, "quid"}, + {`"Liquid\nLiquid" | slice: 2, 4`, "quid"}, {`"Liquid" | slice: -3, 2`, "ui"}, {`"a/b/c" | split: '/' | join: '-'`, "a-b-c"}, diff --git a/liquid_test.go b/liquid_test.go index 0316b6ad..8f91c2f6 100644 --- a/liquid_test.go +++ b/liquid_test.go @@ -41,3 +41,27 @@ func ExampleIterationKeyedMap() { // Output: a=1. // a=1. } + +func TestStringUnescape(t *testing.T) { + vars := map[string]interface{}{} + engine := NewEngine() + + out, err := engine.ParseAndRenderString(`{{ 'ab\nc' }}`, vars) + require.NoError(t, err) + require.Equal(t, "ab\\nc", out) + + out, err = engine.ParseAndRenderString(`{{ "ab\nc" }}`, vars) + require.NoError(t, err) + require.Equal(t, "ab\nc", out) + + out, err = engine.ParseAndRenderString(`{{ "ab\tc" }}`, vars) + require.NoError(t, err) + require.Equal(t, "ab\tc", out) + + _, err = engine.ParseAndRenderString(`{{ "ab\xc" }}`, vars) + require.Error(t, err) + + out, err = engine.ParseAndRenderString(`{{ 'ab\xc' }}`, vars) + require.NoError(t, err) + require.Equal(t, "ab\\xc", out) +} From f331da6c8d890b193a35921eede3d7008fe32f9e Mon Sep 17 00:00:00 2001 From: steve-ky Date: Fri, 24 May 2024 12:56:53 +0800 Subject: [PATCH 2/6] Use tab to keep code style --- expressions/scanner.rl | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/expressions/scanner.rl b/expressions/scanner.rl index 2c2390c6..4834f725 100644 --- a/expressions/scanner.rl +++ b/expressions/scanner.rl @@ -70,14 +70,14 @@ func (lex *lexer) Lex(out *yySymType) int { tok = LITERAL // unescape double quoted string if lex.data[lex.ts] == '"' { - qs := string(lex.data[lex.ts:lex.te]) - s, err := strconv.Unquote(qs) - if err != nil { - panic(SyntaxError(fmt.Sprintf("%s to unescape %s", err, qs))) - } - out.val = s + qs := string(lex.data[lex.ts:lex.te]) + s, err := strconv.Unquote(qs) + if err != nil { + panic(SyntaxError(fmt.Sprintf("%s to unescape %s", err, qs))) + } + out.val = s } else { - out.val = string(lex.data[lex.ts+1:lex.te-1]) + out.val = string(lex.data[lex.ts+1:lex.te-1]) } fbreak; } From d9f448ae5f2a137106ac80e51ea3d28221f15be9 Mon Sep 17 00:00:00 2001 From: steve-ky Date: Sat, 25 May 2024 21:44:42 +0800 Subject: [PATCH 3/6] fix trimLeft & trimRight --- .gitignore | 1 + engine_test.go | 2 +- parser/parser.go | 16 +++++++++-- render/nodes.go | 2 +- render/render.go | 45 +++++++++--------------------- render/trimwriter.go | 65 -------------------------------------------- 6 files changed, 30 insertions(+), 101 deletions(-) delete mode 100644 render/trimwriter.go diff --git a/.gitignore b/.gitignore index 7abd1c9e..48dd609a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ *.out /liquid *.test +.idea \ No newline at end of file diff --git a/engine_test.go b/engine_test.go index ca7de122..08577e2e 100644 --- a/engine_test.go +++ b/engine_test.go @@ -55,7 +55,7 @@ func TestEngine_ParseAndFRender(t *testing.T) { wr := capWriter{} err := engine.ParseAndFRender(&wr, []byte(test.in), testBindings) require.NoErrorf(t, err, test.in) - require.Equalf(t, strings.ToUpper(test.expected), wr.String(), test.in) + require.Equalf(t, test.expected, wr.String(), test.in) }) } } diff --git a/parser/parser.go b/parser/parser.go index 9211a15d..a211c4ec 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -4,6 +4,7 @@ package parser import ( "fmt" "strings" + "unicode" "github.com/osteele/liquid/expressions" ) @@ -33,7 +34,7 @@ func (c Config) parseTokens(tokens []Token) (ASTNode, Error) { // nolint: gocycl inComment = false inRaw = false ) - for _, tok := range tokens { + for i, tok := range tokens { switch { // The parser needs to know about comment and raw, because tags inside // needn't match each other e.g. {%comment%}{%if%}{%endcomment%} @@ -55,7 +56,7 @@ func (c Config) parseTokens(tokens []Token) (ASTNode, Error) { // nolint: gocycl } *ap = append(*ap, &ASTObject{tok, expr}) case tok.Type == TextTokenType: - *ap = append(*ap, &ASTText{Token: tok}) + *ap = append(*ap, trimAndNewTextNode(tokens, i)) case tok.Type == TagTokenType: if cs, ok := g.BlockSyntax(tok.Name); ok { switch { @@ -103,3 +104,14 @@ func (c Config) parseTokens(tokens []Token) (ASTNode, Error) { // nolint: gocycl } return root, nil } + +func trimAndNewTextNode(tokens []Token, i int) *ASTText { + ast := &ASTText{Token: tokens[i]} + if i > 0 && tokens[i-1].TrimRight { + ast.Source = strings.TrimLeftFunc(ast.Source, unicode.IsSpace) + } + if i+1 < len(tokens) && tokens[i+1].TrimLeft { + ast.Source = strings.TrimRightFunc(ast.Source, unicode.IsSpace) + } + return ast +} diff --git a/render/nodes.go b/render/nodes.go index 6695d2fa..bf66e294 100644 --- a/render/nodes.go +++ b/render/nodes.go @@ -11,7 +11,7 @@ import ( type Node interface { SourceLocation() parser.SourceLoc // for error reporting SourceText() string // for error reporting - render(*trimWriter, nodeContext) Error + render(io.Writer, nodeContext) Error } // BlockNode represents a {% tag %}…{% endtag %}. diff --git a/render/render.go b/render/render.go index c907dc35..cc93b319 100644 --- a/render/render.go +++ b/render/render.go @@ -13,31 +13,20 @@ import ( // Render renders the render tree. func Render(node Node, w io.Writer, vars map[string]interface{}, c Config) Error { - tw := trimWriter{w: w} - if err := node.render(&tw, newNodeContext(vars, c)); err != nil { - return err - } - if err := tw.Flush(); err != nil { - panic(err) - } - return nil + return node.render(w, newNodeContext(vars, c)) } -// RenderASTSequence renders a sequence of nodes. +// RenderSequence renders a sequence of nodes. func (c nodeContext) RenderSequence(w io.Writer, seq []Node) Error { - tw := trimWriter{w: w} for _, n := range seq { - if err := n.render(&tw, c); err != nil { + if err := n.render(w, c); err != nil { return err } } - if err := tw.Flush(); err != nil { - panic(err) - } return nil } -func (n *BlockNode) render(w *trimWriter, ctx nodeContext) Error { +func (n *BlockNode) render(w io.Writer, ctx nodeContext) Error { cd, ok := ctx.config.findBlockDef(n.Name) if !ok || cd.parser == nil { // this should have been detected during compilation; it's an implementation error if it happens here @@ -47,13 +36,11 @@ func (n *BlockNode) render(w *trimWriter, ctx nodeContext) Error { if renderer == nil { panic(fmt.Errorf("unset renderer for %v", n)) } - w.TrimLeft(n.TrimLeft) err := renderer(w, rendererContext{ctx, nil, n}) - w.TrimRight(n.TrimRight) return wrapRenderError(err, n) } -func (n *RawNode) render(w *trimWriter, ctx nodeContext) Error { +func (n *RawNode) render(w io.Writer, ctx nodeContext) Error { for _, s := range n.slices { _, err := io.WriteString(w, s) if err != nil { @@ -63,8 +50,7 @@ func (n *RawNode) render(w *trimWriter, ctx nodeContext) Error { return nil } -func (n *ObjectNode) render(w *trimWriter, ctx nodeContext) Error { - w.TrimLeft(n.TrimLeft) +func (n *ObjectNode) render(w io.Writer, ctx nodeContext) Error { value, err := ctx.Evaluate(n.expr) if err != nil { return wrapRenderError(err, n) @@ -72,14 +58,11 @@ func (n *ObjectNode) render(w *trimWriter, ctx nodeContext) Error { if value == nil && ctx.config.StrictVariables { return wrapRenderError(errors.New("undefined variable"), n) } - if err := wrapRenderError(writeObject(w, value), n); err != nil { - return err - } - w.TrimRight(n.TrimRight) - return nil + err = writeObject(w, value) + return wrapRenderError(err, n) } -func (n *SeqNode) render(w *trimWriter, ctx nodeContext) Error { +func (n *SeqNode) render(w io.Writer, ctx nodeContext) Error { for _, c := range n.Children { if err := c.render(w, ctx); err != nil { return err @@ -88,14 +71,12 @@ func (n *SeqNode) render(w *trimWriter, ctx nodeContext) Error { return nil } -func (n *TagNode) render(w *trimWriter, ctx nodeContext) Error { - w.TrimLeft(n.TrimLeft) - err := wrapRenderError(n.renderer(w, rendererContext{ctx, n, nil}), n) - w.TrimRight(n.TrimRight) - return err +func (n *TagNode) render(w io.Writer, ctx nodeContext) Error { + err := n.renderer(w, rendererContext{ctx, n, nil}) + return wrapRenderError(err, n) } -func (n *TextNode) render(w *trimWriter, ctx nodeContext) Error { +func (n *TextNode) render(w io.Writer, ctx nodeContext) Error { _, err := io.WriteString(w, n.Source) return wrapRenderError(err, n) } diff --git a/render/trimwriter.go b/render/trimwriter.go deleted file mode 100644 index 0e982e9c..00000000 --- a/render/trimwriter.go +++ /dev/null @@ -1,65 +0,0 @@ -package render - -import ( - "bytes" - "io" - "unicode" -) - -// A trimWriter provides whitespace control around a wrapped io.Writer. -// The caller should call TrimLeft(bool) and TrimRight(bool) respectively -// before and after processing a tag or expression, and Flush() at completion. -type trimWriter struct { - w io.Writer - buf bytes.Buffer - trimRight bool -} - -// This violates the letter of the protocol by returning the count of the -// bytes, rather than the actual number of bytes written. We can't know the -// number of bytes written until later, and it won't in general be the same -// as the argument length (that's the whole point of trimming), but speaking -// truthfully here would cause some callers to return io.ErrShortWrite, ruining -// this as an io.Writer. -func (tw *trimWriter) Write(b []byte) (int, error) { - n := len(b) - if tw.trimRight { - b = bytes.TrimLeftFunc(b, unicode.IsSpace) - if n != 0 { - tw.trimRight = false - } - } else if tw.buf.Len() > 0 { - if err := tw.Flush(); err != nil { - return 0, err - } - } - nonWS := bytes.TrimRightFunc(b, unicode.IsSpace) - if len(nonWS) < len(b) { - if _, err := tw.buf.Write(b[len(nonWS):]); err != nil { - return 0, err - } - } - _, err := tw.w.Write(nonWS) - return n, err -} -func (tw *trimWriter) Flush() (err error) { - if tw.buf.Len() > 0 { - _, err = tw.buf.WriteTo(tw.w) - tw.buf.Reset() - } - return -} - -func (tw *trimWriter) TrimLeft(f bool) { - if !f && tw.buf.Len() > 0 { - if err := tw.Flush(); err != nil { - panic(err) - } - } - tw.buf.Reset() - tw.trimRight = false -} - -func (tw *trimWriter) TrimRight(f bool) { - tw.trimRight = f -} From b2e1271cab3645d201e83bc31f249c9a20e27702 Mon Sep 17 00:00:00 2001 From: steve-ky Date: Sat, 25 May 2024 23:33:17 +0800 Subject: [PATCH 4/6] improve formTokenMatcher performance --- engine.go | 2 +- liquid_test.go | 17 +++++++++++++++++ parser/config.go | 21 +++++++++++++++++---- parser/parser.go | 8 ++++---- parser/scanner.go | 9 +-------- parser/scanner_test.go | 12 +++++++++--- render/compiler.go | 8 ++++---- 7 files changed, 53 insertions(+), 24 deletions(-) diff --git a/engine.go b/engine.go index c9a28662..8ceb512b 100644 --- a/engine.go +++ b/engine.go @@ -120,7 +120,7 @@ func (e *Engine) ParseAndRenderString(source string, b Bindings) (string, Source // ParseTemplate, ParseTemplateLocation, ParseAndRender, or ParseAndRenderString. An empty delimiter // stands for the corresponding default: objectLeft = {{, objectRight = }}, tagLeft = {% , tagRight = %} func (e *Engine) Delims(objectLeft, objectRight, tagLeft, tagRight string) *Engine { - e.cfg.Delims = []string{objectLeft, objectRight, tagLeft, tagRight} + e.cfg.Delims(objectLeft, objectRight, tagLeft, tagRight) return e } diff --git a/liquid_test.go b/liquid_test.go index 8f91c2f6..faa4481a 100644 --- a/liquid_test.go +++ b/liquid_test.go @@ -65,3 +65,20 @@ func TestStringUnescape(t *testing.T) { require.NoError(t, err) require.Equal(t, "ab\\xc", out) } + +func TestWhitespaceControl(t *testing.T) { + vars := map[string]interface{}{} + engine := NewEngine() + + out, err := engine.ParseAndRenderString(`t1 {%- if true -%} t2 {%- endif -%} t3`, vars) + require.NoError(t, err) + require.Equal(t, "t1t2t3", out) + + out, err = engine.ParseAndRenderString(`t1 + {%- if true -%} + t2 + {%- endif -%} + t3`, vars) + require.NoError(t, err) + require.Equal(t, "t1t2t3", out) +} diff --git a/parser/config.go b/parser/config.go index 162f1179..912779c8 100644 --- a/parser/config.go +++ b/parser/config.go @@ -1,15 +1,28 @@ package parser -import "github.com/osteele/liquid/expressions" +import ( + "github.com/osteele/liquid/expressions" + "regexp" +) // A Config holds configuration information for parsing and rendering. type Config struct { expressions.Config - Grammar Grammar - Delims []string + Grammar Grammar + delims []string + tokenMatcher *regexp.Regexp } // NewConfig creates a parser Config. func NewConfig(g Grammar) Config { - return Config{Grammar: g} + c := Config{Grammar: g} + // Apply defaults + c.delims = []string{"{{", "}}", "{%", "%}"} + c.tokenMatcher = formTokenMatcher(c.delims) + return c +} + +func (c *Config) Delims(objectLeft, objectRight, tagLeft, tagRight string) { + c.delims = []string{objectLeft, objectRight, tagLeft, tagRight} + c.tokenMatcher = formTokenMatcher(c.delims) } diff --git a/parser/parser.go b/parser/parser.go index a211c4ec..a79ac293 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -10,13 +10,13 @@ import ( ) // Parse parses a source template. It returns an AST root, that can be compiled and evaluated. -func (c Config) Parse(source string, loc SourceLoc) (ASTNode, Error) { - tokens := Scan(source, loc, c.Delims) +func (c *Config) Parse(source string, loc SourceLoc) (ASTNode, Error) { + tokens := Scan(source, loc, c.delims, c.tokenMatcher) return c.parseTokens(tokens) } -// Parse creates an AST from a sequence of tokens. -func (c Config) parseTokens(tokens []Token) (ASTNode, Error) { // nolint: gocyclo +// parseTokens creates an AST from a sequence of tokens. +func (c *Config) parseTokens(tokens []Token) (ASTNode, Error) { // nolint: gocyclo // a stack of control tag state, for matching nested {%if}{%endif%} etc. type frame struct { syntax BlockSyntax diff --git a/parser/scanner.go b/parser/scanner.go index a63b97e4..736ad27b 100644 --- a/parser/scanner.go +++ b/parser/scanner.go @@ -7,14 +7,7 @@ import ( ) // Scan breaks a string into a sequence of Tokens. -func Scan(data string, loc SourceLoc, delims []string) (tokens []Token) { - - // Apply defaults - if len(delims) != 4 { - delims = []string{"{{", "}}", "{%", "%}"} - } - tokenMatcher := formTokenMatcher(delims) - +func Scan(data string, loc SourceLoc, delims []string, tokenMatcher *regexp.Regexp) (tokens []Token) { // TODO error on unterminated {{ and {% // TODO probably an error when a tag contains a {{ or {%, at least outside of a string p, pe := 0, len(data) diff --git a/parser/scanner_test.go b/parser/scanner_test.go index ef52c133..1483b0ed 100644 --- a/parser/scanner_test.go +++ b/parser/scanner_test.go @@ -23,7 +23,9 @@ var scannerCountTests = []struct { } func TestScan(t *testing.T) { - scan := func(src string) []Token { return Scan(src, SourceLoc{}, nil) } + delims := []string{"{{", "}}", "{%", "%}"} + tokenMatcher := formTokenMatcher(delims) + scan := func(src string) []Token { return Scan(src, SourceLoc{}, delims, tokenMatcher) } tokens := scan("12") require.NotNil(t, tokens) require.Len(t, tokens, 1) @@ -68,8 +70,10 @@ func TestScan(t *testing.T) { } func TestScan_ws(t *testing.T) { + delims := []string{"{{", "}}", "{%", "%}"} + tokenMatcher := formTokenMatcher(delims) // whitespace control - scan := func(src string) []Token { return Scan(src, SourceLoc{}, nil) } + scan := func(src string) []Token { return Scan(src, SourceLoc{}, delims, tokenMatcher) } wsTests := []struct { in, expect string @@ -115,8 +119,10 @@ var scannerCountTestsDelims = []struct { } func TestScan_delims(t *testing.T) { + delims := []string{"OBJECT@LEFT", "OBJECT#RIGHT", "TAG*LEFT", "TAG!RIGHT"} + tokenMatcher := formTokenMatcher(delims) scan := func(src string) []Token { - return Scan(src, SourceLoc{}, []string{"OBJECT@LEFT", "OBJECT#RIGHT", "TAG*LEFT", "TAG!RIGHT"}) + return Scan(src, SourceLoc{}, delims, tokenMatcher) } tokens := scan("12") require.NotNil(t, tokens) diff --git a/render/compiler.go b/render/compiler.go index 745504cb..ae897935 100644 --- a/render/compiler.go +++ b/render/compiler.go @@ -7,7 +7,7 @@ import ( ) // Compile parses a source template. It returns an AST root, that can be evaluated. -func (c Config) Compile(source string, loc parser.SourceLoc) (Node, parser.Error) { +func (c *Config) Compile(source string, loc parser.SourceLoc) (Node, parser.Error) { root, err := c.Parse(source, loc) if err != nil { return nil, err @@ -16,7 +16,7 @@ func (c Config) Compile(source string, loc parser.SourceLoc) (Node, parser.Error } // nolint: gocyclo -func (c Config) compileNode(n parser.ASTNode) (Node, parser.Error) { +func (c *Config) compileNode(n parser.ASTNode) (Node, parser.Error) { switch n := n.(type) { case *parser.ASTBlock: body, err := c.compileNodes(n.Body) @@ -71,7 +71,7 @@ func (c Config) compileNode(n parser.ASTNode) (Node, parser.Error) { } } -func (c Config) compileBlocks(blocks []*parser.ASTBlock) ([]*BlockNode, parser.Error) { +func (c *Config) compileBlocks(blocks []*parser.ASTBlock) ([]*BlockNode, parser.Error) { out := make([]*BlockNode, 0, len(blocks)) for _, child := range blocks { compiled, err := c.compileNode(child) @@ -83,7 +83,7 @@ func (c Config) compileBlocks(blocks []*parser.ASTBlock) ([]*BlockNode, parser.E return out, nil } -func (c Config) compileNodes(nodes []parser.ASTNode) ([]Node, parser.Error) { +func (c *Config) compileNodes(nodes []parser.ASTNode) ([]Node, parser.Error) { out := make([]Node, 0, len(nodes)) for _, child := range nodes { compiled, err := c.compileNode(child) From 67ba9a50e920b816c961af049aa75d5c3d74f727 Mon Sep 17 00:00:00 2001 From: steve-ky Date: Sat, 25 May 2024 23:39:16 +0800 Subject: [PATCH 5/6] fix parser_test nil pointer issue --- parser/parser_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parser/parser_test.go b/parser/parser_test.go index 78393f70..5d14d06d 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -49,7 +49,7 @@ var parserTests = []struct{ in string }{ } func TestParseErrors(t *testing.T) { - cfg := Config{Grammar: grammarFake{}} + cfg := NewConfig(grammarFake{}) for i, test := range parseErrorTests { t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) { _, err := cfg.Parse(test.in, SourceLoc{}) @@ -60,7 +60,7 @@ func TestParseErrors(t *testing.T) { } func TestParser(t *testing.T) { - cfg := Config{Grammar: grammarFake{}} + cfg := NewConfig(grammarFake{}) for i, test := range parserTests { t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) { _, err := cfg.Parse(test.in, SourceLoc{}) From f408f7092617d555e84d8a3a954eac48a3aeed31 Mon Sep 17 00:00:00 2001 From: steve-ky Date: Sun, 26 May 2024 22:09:52 +0800 Subject: [PATCH 6/6] improve number filters --- drops.go | 4 +- drops_test.go | 18 +-- engine.go | 2 +- engine_examples_test.go | 10 +- engine_test.go | 12 +- evaluator/evaluator.go | 34 ------ expressions/expressions_test.go | 4 +- expressions/parser_test.go | 2 +- expressions/scanner.go | 4 +- expressions/scanner.rl | 2 +- expressions/scanner_test.go | 4 +- filters/standard_filters.go | 202 +++++++++++++++++++++++-------- filters/standard_filters_test.go | 52 +++++--- go.mod | 4 +- go.sum | 4 +- liquid.go | 6 +- liquid_test.go | 14 +-- render/context.go | 30 ++--- tags/iteration_tags.go | 23 ++-- values/convert.go | 66 ++++++++-- values/drop.go | 6 +- values/value.go | 41 ++++++- 22 files changed, 358 insertions(+), 186 deletions(-) delete mode 100644 evaluator/evaluator.go diff --git a/drops.go b/drops.go index 6d00b68c..f6d637a8 100644 --- a/drops.go +++ b/drops.go @@ -2,12 +2,12 @@ package liquid // Drop indicates that the object will present to templates as its ToLiquid value. type Drop interface { - ToLiquid() interface{} + ToLiquid() any } // FromDrop returns returns object.ToLiquid() if object's type implement this function; // else the object itself. -func FromDrop(object interface{}) interface{} { +func FromDrop(object any) any { switch object := object.(type) { case Drop: return object.ToLiquid() diff --git a/drops_test.go b/drops_test.go index 44121548..d6f67cdc 100644 --- a/drops_test.go +++ b/drops_test.go @@ -10,7 +10,7 @@ import ( type dropTest struct{} -func (d dropTest) ToLiquid() interface{} { return "drop" } +func (d dropTest) ToLiquid() any { return "drop" } func TestDrops(t *testing.T) { require.Equal(t, "drop", FromDrop(dropTest{})) @@ -20,8 +20,8 @@ func TestDrops(t *testing.T) { type redConvertible struct{} -func (c redConvertible) ToLiquid() interface{} { - return map[string]interface{}{ +func (c redConvertible) ToLiquid() any { + return map[string]any{ "color": "red", } } @@ -29,13 +29,13 @@ func (c redConvertible) ToLiquid() interface{} { func ExampleDrop_map() { // type redConvertible struct{} // - // func (c redConvertible) ToLiquid() interface{} { - // return map[string]interface{}{ + // func (c redConvertible) ToLiquid() any { + // return map[string]any{ // "color": "red", // } // } engine := NewEngine() - bindings := map[string]interface{}{ + bindings := map[string]any{ "car": redConvertible{}, } template := `{{ car.color }}` @@ -49,7 +49,7 @@ func ExampleDrop_map() { type car struct{ color, model string } -func (c car) ToLiquid() interface{} { +func (c car) ToLiquid() any { return carDrop{c.model, c.color} } @@ -65,7 +65,7 @@ func (c carDrop) Drive() string { func ExampleDrop_struct() { // type car struct{ color, model string } // - // func (c car) ToLiquid() interface{} { + // func (c car) ToLiquid() any { // return carDrop{c.model, c.color} // } // @@ -79,7 +79,7 @@ func ExampleDrop_struct() { // } engine := NewEngine() - bindings := map[string]interface{}{ + bindings := map[string]any{ "car": car{"blue", "S85"}, } template := `{{ car.color }} {{ car.Drive }} Model {{ car.Model }}` diff --git a/engine.go b/engine.go index 8ceb512b..36455d87 100644 --- a/engine.go +++ b/engine.go @@ -43,7 +43,7 @@ func (e *Engine) RegisterBlock(name string, td Renderer) { // * https://github.com/osteele/liquid/blob/main/filters/standard_filters.go // // * https://github.com/osteele/gojekyll/blob/master/filters/filters.go -func (e *Engine) RegisterFilter(name string, fn interface{}) { +func (e *Engine) RegisterFilter(name string, fn any) { e.cfg.AddFilter(name, fn) } diff --git a/engine_examples_test.go b/engine_examples_test.go index 81607129..9369210a 100644 --- a/engine_examples_test.go +++ b/engine_examples_test.go @@ -11,7 +11,7 @@ import ( func Example() { engine := NewEngine() source := `

{{ page.title }}

` - bindings := map[string]interface{}{ + bindings := map[string]any{ "page": map[string]string{ "title": "Introduction", }, @@ -27,7 +27,7 @@ func Example() { func ExampleEngine_ParseAndRenderString() { engine := NewEngine() source := `{{ hello | capitalize | append: " Mundo" }}` - bindings := map[string]interface{}{"hello": "hola"} + bindings := map[string]any{"hello": "hola"} out, err := engine.ParseAndRenderString(source, bindings) if err != nil { log.Fatalln(err) @@ -38,7 +38,7 @@ func ExampleEngine_ParseAndRenderString() { func ExampleEngine_ParseTemplate() { engine := NewEngine() source := `{{ hello | capitalize | append: " Mundo" }}` - bindings := map[string]interface{}{"hello": "hola"} + bindings := map[string]any{"hello": "hola"} tpl, err := engine.ParseString(source) if err != nil { log.Fatalln(err) @@ -54,7 +54,7 @@ func ExampleEngine_RegisterFilter() { engine := NewEngine() engine.RegisterFilter("has_prefix", strings.HasPrefix) template := `{{ title | has_prefix: "Intro" }}` - bindings := map[string]interface{}{ + bindings := map[string]any{ "title": "Introduction", } out, err := engine.ParseAndRenderString(template, bindings) @@ -74,7 +74,7 @@ func ExampleEngine_RegisterFilter_optional_argument() { return a + b(1) }) template := `10 + 1 = {{ m | inc }}; 20 + 5 = {{ n | inc: 5 }}` - bindings := map[string]interface{}{ + bindings := map[string]any{ "m": 10, "n": "20", } diff --git a/engine_test.go b/engine_test.go index 08577e2e..44a3ebf1 100644 --- a/engine_test.go +++ b/engine_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" ) -var emptyBindings = map[string]interface{}{} +var emptyBindings = map[string]any{} // There's a lot more tests in the filters and tags sub-packages. // This collects a minimal set for testing end-to-end. @@ -21,10 +21,10 @@ var liquidTests = []struct{ in, expected string }{ {`{{ "upper" | upcase }}`, "UPPER"}, } -var testBindings = map[string]interface{}{ +var testBindings = map[string]any{ "x": 123, "ar": []string{"first", "second", "third"}, - "page": map[string]interface{}{ + "page": map[string]any{ "title": "Introduction", }, } @@ -61,8 +61,8 @@ func TestEngine_ParseAndFRender(t *testing.T) { } func TestEngine_ParseAndRenderString_ptr_to_hash(t *testing.T) { - params := map[string]interface{}{ - "message": &map[string]interface{}{ + params := map[string]any{ + "message": &map[string]any{ "Text": "hello", "jsonNumber": json.Number("123"), }, @@ -77,7 +77,7 @@ func TestEngine_ParseAndRenderString_ptr_to_hash(t *testing.T) { type testStruct struct{ Text string } func TestEngine_ParseAndRenderString_struct(t *testing.T) { - params := map[string]interface{}{ + params := map[string]any{ "message": testStruct{ Text: "hello", }, diff --git a/evaluator/evaluator.go b/evaluator/evaluator.go deleted file mode 100644 index 497254d4..00000000 --- a/evaluator/evaluator.go +++ /dev/null @@ -1,34 +0,0 @@ -// Package evaluator is an interim internal package that forwards to package values. -package evaluator - -import ( - "reflect" - "time" - - "github.com/osteele/liquid/values" -) - -// Convert should be replaced by values.Convert. -func Convert(value interface{}, typ reflect.Type) (interface{}, error) { - return values.Convert(value, typ) -} - -// MustConvertItem should be replaced by values.Convert. -func MustConvertItem(item interface{}, array interface{}) interface{} { - return values.MustConvertItem(item, array) -} - -// Sort should be replaced by values. -func Sort(data []interface{}) { - values.Sort(data) -} - -// SortByProperty should be replaced by values.SortByProperty -func SortByProperty(data []interface{}, key string, nilFirst bool) { - values.SortByProperty(data, key, nilFirst) -} - -// ParseDate should be replaced by values.SortByProperty -func ParseDate(s string) (time.Time, error) { - return values.ParseDate(s) -} diff --git a/expressions/expressions_test.go b/expressions/expressions_test.go index ef4fd3c9..6f5f98d6 100644 --- a/expressions/expressions_test.go +++ b/expressions/expressions_test.go @@ -15,7 +15,7 @@ var evaluatorTests = []struct { expected interface{} }{ // Literals - {`12`, 12}, + {`12`, int64(12)}, {`12.3`, 12.3}, {`true`, true}, {`false`, false}, @@ -53,7 +53,7 @@ var evaluatorTests = []struct { {`(range.begin..range.end)`, values.NewRange(1, 5)}, // Expressions - {`(1)`, 1}, + {`(1)`, int64(1)}, {`(n)`, 123}, // Operators diff --git a/expressions/parser_test.go b/expressions/parser_test.go index 8517b272..78493393 100644 --- a/expressions/parser_test.go +++ b/expressions/parser_test.go @@ -14,7 +14,7 @@ var parseTests = []struct { {`true`, true}, {`false`, false}, {`nil`, nil}, - {`2`, 2}, + {`2`, int64(2)}, {`"s"`, "s"}, {`a`, 1}, {`obj.prop`, 2}, diff --git a/expressions/scanner.go b/expressions/scanner.go index d58551a6..aa8ffc7b 100644 --- a/expressions/scanner.go +++ b/expressions/scanner.go @@ -492,7 +492,7 @@ func (lex *lexer) Lex(out *yySymType) int { if err != nil { panic(err) } - out.val = int(n) + out.val = n (lex.p)++ goto _out @@ -556,7 +556,7 @@ func (lex *lexer) Lex(out *yySymType) int { if err != nil { panic(err) } - out.val = int(n) + out.val = n (lex.p)++ goto _out diff --git a/expressions/scanner.rl b/expressions/scanner.rl index 4834f725..70ff1017 100644 --- a/expressions/scanner.rl +++ b/expressions/scanner.rl @@ -54,7 +54,7 @@ func (lex *lexer) Lex(out *yySymType) int { if err != nil { panic(err) } - out.val = int(n) + out.val = n fbreak; } action Float { diff --git a/expressions/scanner_test.go b/expressions/scanner_test.go index 5cbe1a98..80490a0a 100644 --- a/expressions/scanner_test.go +++ b/expressions/scanner_test.go @@ -39,7 +39,7 @@ func TestLex(t *testing.T) { require.Equal(t, IDENTIFIER, ts[0].tok) require.Equal(t, "abc", ts[0].typ.name) require.Equal(t, LITERAL, ts[2].tok) - require.Equal(t, 123, ts[2].typ.val) + require.Equal(t, int64(123), ts[2].typ.val) // verify these don't match "for", "or", or "false" ts, _ = scanExpression("forage") @@ -67,7 +67,7 @@ func TestLex(t *testing.T) { require.Equal(t, true, ts[0].typ.val) require.Equal(t, false, ts[1].typ.val) require.Equal(t, nil, ts[2].typ.val) - require.Equal(t, 2, ts[3].typ.val) + require.Equal(t, int64(2), ts[3].typ.val) require.Equal(t, 2.3, ts[4].typ.val) require.Equal(t, "abc", ts[5].typ.val) require.Equal(t, "abc", ts[6].typ.val) diff --git a/filters/standard_filters.go b/filters/standard_filters.go index f6e71367..fca9ea2c 100644 --- a/filters/standard_filters.go +++ b/filters/standard_filters.go @@ -19,25 +19,25 @@ import ( // A FilterDictionary holds filters. type FilterDictionary interface { - AddFilter(string, interface{}) + AddFilter(string, any) } // AddStandardFilters defines the standard Liquid filters. func AddStandardFilters(fd FilterDictionary) { // nolint: gocyclo // value filters - fd.AddFilter("default", func(value, defaultValue interface{}) interface{} { + fd.AddFilter("default", func(value, defaultValue any) any { if value == nil || value == false || values.IsEmpty(value) { value = defaultValue } return value }) - fd.AddFilter("json", func(a interface{}) interface{} { + fd.AddFilter("json", func(a any) any { result, _ := json.Marshal(a) return result }) // array filters - fd.AddFilter("compact", func(a []interface{}) (result []interface{}) { + fd.AddFilter("compact", func(a []any) (result []any) { for _, item := range a { if item != nil { result = append(result, item) @@ -45,12 +45,12 @@ func AddStandardFilters(fd FilterDictionary) { // nolint: gocyclo } return }) - fd.AddFilter("concat", func(a, b []interface{}) (result []interface{}) { - result = make([]interface{}, 0, len(a)+len(b)) + fd.AddFilter("concat", func(a, b []any) (result []any) { + result = make([]any, 0, len(a)+len(b)) return append(append(result, a...), b...) }) fd.AddFilter("join", joinFilter) - fd.AddFilter("map", func(a []interface{}, key string) (result []interface{}) { + fd.AddFilter("map", func(a []any, key string) (result []any) { keyValue := values.ValueOf(key) for _, obj := range a { value := values.ValueOf(obj) @@ -62,13 +62,13 @@ func AddStandardFilters(fd FilterDictionary) { // nolint: gocyclo fd.AddFilter("sort", sortFilter) // https://shopify.github.io/liquid/ does not demonstrate first and last as filters, // but https://help.shopify.com/themes/liquid/filters/array-filters does - fd.AddFilter("first", func(a []interface{}) interface{} { + fd.AddFilter("first", func(a []any) any { if len(a) == 0 { return nil } return a[0] }) - fd.AddFilter("last", func(a []interface{}) interface{} { + fd.AddFilter("last", func(a []any) any { if len(a) == 0 { return nil } @@ -83,37 +83,133 @@ func AddStandardFilters(fd FilterDictionary) { // nolint: gocyclo }) // number filters - fd.AddFilter("abs", math.Abs) - fd.AddFilter("ceil", func(a float64) int { - return int(math.Ceil(a)) - }) - fd.AddFilter("floor", func(a float64) int { - return int(math.Floor(a)) - }) - fd.AddFilter("modulo", math.Mod) - fd.AddFilter("minus", func(a, b float64) float64 { - return a - b - }) - fd.AddFilter("plus", func(a, b float64) float64 { - return a + b - }) - fd.AddFilter("times", func(a, b float64) float64 { - return a * b - }) - fd.AddFilter("divided_by", func(a float64, b interface{}) interface{} { - switch q := b.(type) { - case int, int16, int32, int64: - return int(a) / q.(int) - case float32, float64: - return a / b.(float64) - default: - return nil + fd.AddFilter("abs", func(a any) any { + if ia, ok := values.ToInt64(a); ok { + if ia < 0 { + return -ia + } else { + return ia + } + } + if fa, ok := values.ToFloat64(a); ok { + return math.Abs(fa) + } + return math.NaN() + }) + fd.AddFilter("ceil", func(a any) any { + if ia, ok := values.ToInt64(a); ok { + return ia + } + if fa, ok := values.ToFloat64(a); ok { + return int64(math.Ceil(fa)) + } + return math.NaN() + }) + fd.AddFilter("floor", func(a any) any { + if ia, ok := values.ToInt64(a); ok { + return ia + } + if fa, ok := values.ToFloat64(a); ok { + return int64(math.Floor(fa)) + } + return math.NaN() + }) + fd.AddFilter("modulo", func(a, b any) any { + if fa, ok := values.ToFloat64(a); ok { + if fb, ok := values.ToFloat64(b); ok { + return math.Mod(fa, fb) + } + } + return math.NaN() + }) + fd.AddFilter("minus", func(a, b any) any { + if ia, ok := values.ToInt64(a); ok { + if ib, ok := values.ToInt64(b); ok { + return ia - ib + } + } + if fa, ok := values.ToFloat64(a); ok { + if fb, ok := values.ToFloat64(b); ok { + return fa - fb + } + } + return math.NaN() + }) + fd.AddFilter("plus", func(a, b any) any { + if ia, ok := values.ToInt64(a); ok { + if ib, ok := values.ToInt64(b); ok { + return ia + ib + } } + if fa, ok := values.ToFloat64(a); ok { + if fb, ok := values.ToFloat64(b); ok { + return fa + fb + } + } + return math.NaN() }) - fd.AddFilter("round", func(n float64, places func(int) int) float64 { - pl := places(0) - exp := math.Pow10(pl) - return math.Floor(n*exp+0.5) / exp + fd.AddFilter("times", func(a, b any) any { + if ia, ok := values.ToInt64(a); ok { + if ib, ok := values.ToInt64(b); ok { + return ia * ib + } + } + if fa, ok := values.ToFloat64(a); ok { + if fb, ok := values.ToFloat64(b); ok { + return fa * fb + } + } + return math.NaN() + }) + fd.AddFilter("divided_by", func(a any, b any) any { + if ia, ok := values.ToInt64(a); ok { + if ib, ok := values.ToInt64(b); ok { + if ib == 0 { + if ia == 0 { + return math.NaN() + } + return math.Inf(int(ia)) + } + return ia / ib + } + } + if fa, ok := values.ToFloat64(a); ok { + if fb, ok := values.ToFloat64(b); ok { + if fb == 0 { + if fa == 0 { + return math.NaN() + } + return math.Inf(sign(fa)) + } + return fa / fb + } + } + return math.NaN() + }) + //fd.AddFilter("round", func(a any, places func(int) int) float64 { + // if ia, ok := values.ToInt64(a); ok { + // return float64(ia) + // } + // if fa, ok := values.ToFloat64(a); ok { + // pl := places(0) + // exp := math.Pow10(pl) + // return math.Floor(fa*exp+0.5) / exp + // } + // return math.NaN() + //}) + fd.AddFilter("round", func(a any, places any) float64 { + pl, ok := values.ToInt64(places) + if !ok { + return math.NaN() + } + if ia, ok := values.ToInt64(a); ok { + return float64(ia) + } + if fa, ok := values.ToFloat64(a); ok { + exp := math.Pow10(int(pl)) + return math.Floor(fa*exp+0.5) / exp + } + return math.NaN() }) // sequence filters @@ -207,19 +303,19 @@ func AddStandardFilters(fd FilterDictionary) { // nolint: gocyclo // debugging filters // inspect is from Jekyll - fd.AddFilter("inspect", func(value interface{}) string { + fd.AddFilter("inspect", func(value any) string { s, err := json.Marshal(value) if err != nil { return fmt.Sprintf("%#v", value) } return string(s) }) - fd.AddFilter("type", func(value interface{}) string { + fd.AddFilter("type", func(value any) string { return fmt.Sprintf("%T", value) }) } -func joinFilter(a []interface{}, sep func(string) string) interface{} { +func joinFilter(a []any, sep func(string) string) any { ss := make([]string, 0, len(a)) s := sep(" ") for _, v := range a { @@ -230,8 +326,8 @@ func joinFilter(a []interface{}, sep func(string) string) interface{} { return strings.Join(ss, s) } -func reverseFilter(a []interface{}) interface{} { - result := make([]interface{}, len(a)) +func reverseFilter(a []any) any { + result := make([]any, len(a)) for i, x := range a { result[len(result)-1-i] = x } @@ -240,7 +336,7 @@ func reverseFilter(a []interface{}) interface{} { var wsre = regexp.MustCompile(`[[:space:]]+`) -func splitFilter(s, sep string) interface{} { +func splitFilter(s, sep string) any { result := strings.Split(s, sep) if sep == " " { // Special case for Ruby, therefore Liquid @@ -253,9 +349,9 @@ func splitFilter(s, sep string) interface{} { return result } -func uniqFilter(a []interface{}) (result []interface{}) { - seenMap := map[interface{}]bool{} - seen := func(item interface{}) bool { +func uniqFilter(a []any) (result []any) { + seenMap := map[any]bool{} + seen := func(item any) bool { if k := reflect.TypeOf(item).Kind(); k < reflect.Array || k == reflect.Ptr || k == reflect.UnsafePointer { if seenMap[item] { return true @@ -279,9 +375,19 @@ func uniqFilter(a []interface{}) (result []interface{}) { return } -func eqItems(a, b interface{}) bool { +func eqItems(a, b any) bool { if reflect.TypeOf(a).Comparable() && reflect.TypeOf(b).Comparable() { return a == b } return reflect.DeepEqual(a, b) } + +func sign(a float64) int { + if a > 0 { + return 1 + } else if a < 0 { + return -1 + } else { + return 0 + } +} diff --git a/filters/standard_filters_test.go b/filters/standard_filters_test.go index 9c6af2e1..b1c7953a 100644 --- a/filters/standard_filters_test.go +++ b/filters/standard_filters_test.go @@ -2,6 +2,7 @@ package filters import ( "fmt" + "math" "os" "testing" "time" @@ -151,39 +152,39 @@ var filterTests = []struct { {`"Tetsuro Takara" | url_encode`, "Tetsuro+Takara"}, // number filters - {`-17 | abs`, 17.0}, - {`4 | abs`, 4.0}, + {`-17 | abs`, int64(17)}, + {`4 | abs`, int64(4)}, {`"-19.86" | abs`, 19.86}, - {`1.2 | ceil`, 2}, - {`2.0 | ceil`, 2}, - {`183.357 | ceil`, 184}, - {`"3.5" | ceil`, 4}, + {`1.2 | ceil`, int64(2)}, + {`2.0 | ceil`, int64(2)}, + {`183.357 | ceil`, int64(184)}, + {`"3.5" | ceil`, int64(4)}, - {`1.2 | floor`, 1}, - {`2.0 | floor`, 2}, - {`183.357 | floor`, 183}, + {`1.2 | floor`, int64(1)}, + {`2.0 | floor`, int64(2)}, + {`183.357 | floor`, int64(183)}, - {`4 | plus: 2`, 6.0}, + {`4 | plus: 2`, int64(6)}, {`183.357 | plus: 12`, 195.357}, - {`4 | minus: 2`, 2.0}, - {`16 | minus: 4`, 12.0}, + {`4 | minus: 2`, int64(2)}, + {`16 | minus: 4`, int64(12)}, {`183.357 | minus: 12`, 171.357}, - {`3 | times: 2`, 6.0}, - {`24 | times: 7`, 168.0}, + {`3 | times: 2`, int64(6)}, + {`24 | times: 7`, int64(168)}, {`183.357 | times: 12`, 2200.284}, {`3 | modulo: 2`, 1.0}, {`24 | modulo: 7`, 3.0}, // {`183.357 | modulo: 12 | `, 3.357}, // TODO test suit use inexact - {`16 | divided_by: 4`, 4}, - {`5 | divided_by: 3`, 1}, - {`20 | divided_by: 7`, 2}, + {`16 | divided_by: 4`, int64(4)}, + {`5 | divided_by: 3`, int64(1)}, + {`20 | divided_by: 7`, int64(2)}, {`20 | divided_by: 7.0`, 2.857142857142857}, - {`20 | divided_by: 's'`, nil}, + {`20 | divided_by: 's'`, math.NaN()}, {`1.2 | round`, 1.0}, {`2.7 | round`, 3.0}, @@ -192,7 +193,7 @@ var filterTests = []struct { // Jekyll extensions; added here for convenient testing // TODO add this just to the test environment {`map | inspect`, `{"a":1}`}, - {`1 | type`, `int`}, + {`1 | type`, `int64`}, {`"1" | type`, `string`}, } @@ -271,7 +272,9 @@ func TestFilters(t *testing.T) { t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) { actual, err := expressions.EvaluateString(test.in, context) require.NoErrorf(t, err, test.in) - require.Equalf(t, test.expected, actual, test.in) + if !bothNaN(test.expected, actual) { + require.Equalf(t, test.expected, actual, test.in) + } }) } } @@ -283,3 +286,12 @@ func timeMustParse(s string) time.Time { } return t } + +func bothNaN(a, b any) bool { + if fa, ok := a.(float64); ok { + if fb, ok := b.(float64); ok { + return math.IsNaN(fa) && math.IsNaN(fb) + } + } + return false +} diff --git a/go.mod b/go.mod index b0b73397..ffeb4a21 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/osteele/liquid -go 1.17 +go 1.18 require ( github.com/osteele/tuesday v1.0.3 @@ -11,5 +11,5 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 8a4fe491..8b103e46 100644 --- a/go.sum +++ b/go.sum @@ -13,5 +13,5 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/liquid.go b/liquid.go index 249492dc..addf3eb5 100644 --- a/liquid.go +++ b/liquid.go @@ -15,8 +15,8 @@ import ( // Bindings is a map of variable names to values. // // Clients need not use this type. It is used solely for documentation. Callers can use instances -// of map[string]interface{} itself as argument values to functions declared with this parameter type. -type Bindings map[string]interface{} +// of map[string]any itself as argument values to functions declared with this parameter type. +type Bindings map[string]any // A Renderer returns the rendered string for a block. This is the type of a tag definition. // @@ -35,6 +35,6 @@ type SourceError interface { // IterationKeyedMap returns a map whose {% for %} tag iteration values are its keys, instead of [key, value] pairs. // Use this to create a Go map with the semantics of a Ruby struct drop. -func IterationKeyedMap(m map[string]interface{}) tags.IterationKeyedMap { +func IterationKeyedMap(m map[string]any) tags.IterationKeyedMap { return m } diff --git a/liquid_test.go b/liquid_test.go index faa4481a..a50cbb64 100644 --- a/liquid_test.go +++ b/liquid_test.go @@ -9,8 +9,8 @@ import ( ) func TestIterationKeyedMap(t *testing.T) { - vars := map[string]interface{}{ - "keyed_map": IterationKeyedMap(map[string]interface{}{"a": 1, "b": 2}), + vars := map[string]any{ + "keyed_map": IterationKeyedMap(map[string]any{"a": 1, "b": 2}), } engine := NewEngine() tpl, err := engine.ParseTemplate([]byte(`{% for k in keyed_map %}{{ k }}={{ keyed_map[k] }}.{% endfor %}`)) @@ -21,9 +21,9 @@ func TestIterationKeyedMap(t *testing.T) { } func ExampleIterationKeyedMap() { - vars := map[string]interface{}{ - "map": map[string]interface{}{"a": 1}, - "keyed_map": IterationKeyedMap(map[string]interface{}{"a": 1}), + vars := map[string]any{ + "map": map[string]any{"a": 1}, + "keyed_map": IterationKeyedMap(map[string]any{"a": 1}), } engine := NewEngine() out, err := engine.ParseAndRenderString( @@ -43,7 +43,7 @@ func ExampleIterationKeyedMap() { } func TestStringUnescape(t *testing.T) { - vars := map[string]interface{}{} + vars := map[string]any{} engine := NewEngine() out, err := engine.ParseAndRenderString(`{{ 'ab\nc' }}`, vars) @@ -67,7 +67,7 @@ func TestStringUnescape(t *testing.T) { } func TestWhitespaceControl(t *testing.T) { - vars := map[string]interface{}{} + vars := map[string]any{} engine := NewEngine() out, err := engine.ParseAndRenderString(`t1 {%- if true -%} t2 {%- endif -%} t3`, vars) diff --git a/render/context.go b/render/context.go index aa365a24..057d6ce8 100644 --- a/render/context.go +++ b/render/context.go @@ -15,17 +15,17 @@ import ( // Context provides the rendering context for a tag renderer. type Context interface { // Bindings returns the current lexical environment. - Bindings() map[string]interface{} + Bindings() map[string]any // Get retrieves the value of a variable from the current lexical environment. - Get(name string) interface{} + Get(name string) any // Errorf creates a SourceError, that includes the source location. // Use this to distinguish errors in the template from implementation errors // in the template engine. - Errorf(format string, a ...interface{}) Error + Errorf(format string, a ...any) Error // Evaluate evaluates a compiled expression within the current lexical context. - Evaluate(expressions.Expression) (interface{}, error) + Evaluate(expressions.Expression) (any, error) // EvaluateString compiles and evaluates a string expression such as “x”, “x < 10", or “a.b | split | first | default: 10”, within the current lexical context. - EvaluateString(string) (interface{}, error) + EvaluateString(string) (any, error) // ExpandTagArg renders the current tag argument string as a Liquid template. // It enables the implementation of tags such as Jekyll's "{% include {{ page.my_variable }} %}" andjekyll-avatar's "{% avatar {{page.author}} %}". ExpandTagArg() (string, error) @@ -40,10 +40,10 @@ type Context interface { RenderChildren(io.Writer) Error // RenderFile parses and renders a template. It's used in the implementation of the {% include %} tag. // RenderFile does not cache the compiled template. - RenderFile(string, map[string]interface{}) (string, error) + RenderFile(string, map[string]any) (string, error) // Set updates the value of a variable in the current lexical environment. // It's used in the implementation of the {% assign %} and {% capture %} tags. - Set(name string, value interface{}) + Set(name string, value any) // SourceFile retrieves the value set by template.SetSourcePath. // It's used in the implementation of the {% include %} tag. SourceFile() string @@ -74,7 +74,7 @@ func (i invalidLocation) SourceText() string { var invalidLoc parser.Locatable = invalidLocation{} -func (c rendererContext) Errorf(format string, a ...interface{}) Error { +func (c rendererContext) Errorf(format string, a ...any) Error { switch { case c.node != nil: return renderErrorf(c.node, format, a...) @@ -97,22 +97,22 @@ func (c rendererContext) WrapError(err error) Error { } } -func (c rendererContext) Evaluate(expr expressions.Expression) (out interface{}, err error) { +func (c rendererContext) Evaluate(expr expressions.Expression) (out any, err error) { return c.ctx.Evaluate(expr) } // EvaluateString evaluates an expression within the template context. -func (c rendererContext) EvaluateString(source string) (out interface{}, err error) { +func (c rendererContext) EvaluateString(source string) (out any, err error) { return expressions.EvaluateString(source, expressions.NewContext(c.ctx.bindings, c.ctx.config.Config.Config)) } // Bindings returns the current lexical environment. -func (c rendererContext) Bindings() map[string]interface{} { +func (c rendererContext) Bindings() map[string]any { return c.ctx.bindings } // Get gets a variable value within an evaluation context. -func (c rendererContext) Get(name string) interface{} { +func (c rendererContext) Get(name string) any { return c.ctx.bindings[name] } @@ -146,7 +146,7 @@ func (c rendererContext) RenderChildren(w io.Writer) Error { return c.ctx.RenderSequence(w, c.cn.Body) } -func (c rendererContext) RenderFile(filename string, b map[string]interface{}) (string, error) { +func (c rendererContext) RenderFile(filename string, b map[string]any) (string, error) { source, err := ioutil.ReadFile(filename) if err != nil && os.IsNotExist(err) { // Is it cached? @@ -162,7 +162,7 @@ func (c rendererContext) RenderFile(filename string, b map[string]interface{}) ( if err != nil { return "", err } - bindings := map[string]interface{}{} + bindings := map[string]any{} for k, v := range c.ctx.bindings { bindings[k] = v } @@ -186,7 +186,7 @@ func (c rendererContext) InnerString() (string, error) { } // Set sets a variable value from an evaluation context. -func (c rendererContext) Set(name string, value interface{}) { +func (c rendererContext) Set(name string, value any) { c.ctx.bindings[name] = value } diff --git a/tags/iteration_tags.go b/tags/iteration_tags.go index 702e443f..0c10c702 100644 --- a/tags/iteration_tags.go +++ b/tags/iteration_tags.go @@ -7,16 +7,17 @@ import ( "reflect" "sort" - yaml "gopkg.in/yaml.v2" + "gopkg.in/yaml.v2" "github.com/osteele/liquid/expressions" "github.com/osteele/liquid/render" + "github.com/osteele/liquid/values" ) // An IterationKeyedMap is a map that yields its keys, instead of (key, value) pairs, when iterated. type IterationKeyedMap map[string]interface{} -const forloopVarName = "forloop" +const forLoopVarName = "forloop" var errLoopContinueLoop = fmt.Errorf("continue outside a loop") var errLoopBreak = fmt.Errorf("break outside a loop") @@ -45,7 +46,7 @@ func cycleTag(args string) (func(io.Writer, render.Context) error, error) { } cycle := stmt.Cycle return func(w io.Writer, ctx render.Context) error { - loopVar := ctx.Get(forloopVarName) + loopVar := ctx.Get(forLoopVarName) if loopVar == nil { return ctx.Errorf("cycle must be within a forloop") } @@ -98,14 +99,14 @@ func (loop loopRenderer) render(w io.Writer, ctx render.Context) error { // shallow-bind the loop variables; restore on exit defer func(index, forloop interface{}) { - ctx.Set(forloopVarName, index) + ctx.Set(forLoopVarName, index) ctx.Set(loop.Variable, forloop) - }(ctx.Get(forloopVarName), ctx.Get(loop.Variable)) + }(ctx.Get(forLoopVarName), ctx.Get(loop.Variable)) cycleMap := map[string]int{} loop: for i, len := 0, iter.Len(); i < len; i++ { ctx.Set(loop.Variable, iter.Index(i)) - ctx.Set(forloopVarName, map[string]interface{}{ + ctx.Set(forLoopVarName, map[string]interface{}{ "first": i == 0, "last": i == len-1, "index": i + 1, @@ -139,7 +140,7 @@ func makeLoopDecorator(loop loopRenderer, ctx render.Context) (loopDecorator, er if err != nil { return nil, err } - cols, ok := val.(int) + cols, ok := values.ToInt64(val) if !ok { return nil, ctx.Errorf("loop cols must be an integer") } @@ -199,12 +200,12 @@ func applyLoopModifiers(loop expressions.Loop, ctx render.Context, iter iterable if err != nil { return nil, err } - offset, ok := val.(int) + offset, ok := values.ToInt64(val) if !ok { return nil, ctx.Errorf("loop offset must be an integer") } if offset > 0 { - iter = offsetWrapper{iter, offset} + iter = offsetWrapper{iter, int(offset)} } } @@ -213,12 +214,12 @@ func applyLoopModifiers(loop expressions.Loop, ctx render.Context, iter iterable if err != nil { return nil, err } - limit, ok := val.(int) + limit, ok := values.ToInt64(val) if !ok { return nil, ctx.Errorf("loop limit must be an integer") } if limit >= 0 { - iter = limitWrapper{iter, limit} + iter = limitWrapper{iter, int(limit)} } } diff --git a/values/convert.go b/values/convert.go index a688de82..37631e50 100644 --- a/values/convert.go +++ b/values/convert.go @@ -15,13 +15,13 @@ type TypeError string func (e TypeError) Error() string { return string(e) } -func typeErrorf(format string, a ...interface{}) TypeError { +func typeErrorf(format string, a ...any) TypeError { return TypeError(fmt.Sprintf(format, a...)) } var timeType = reflect.TypeOf(time.Now()) -func conversionError(modifier string, value interface{}, typ reflect.Type) error { +func conversionError(modifier string, value any, typ reflect.Type) error { if modifier != "" { modifier += " " } @@ -32,7 +32,7 @@ func conversionError(modifier string, value interface{}, typ reflect.Type) error return typeErrorf("can't convert %s%T(%v) to type %s", modifier, value, value, typ) } -func convertValueToInt(value interface{}, typ reflect.Type) (int64, error) { +func convertValueToInt(value any, typ reflect.Type) (int64, error) { switch value := value.(type) { case bool: if value { @@ -51,12 +51,11 @@ func convertValueToInt(value interface{}, typ reflect.Type) (int64, error) { return 0, conversionError("", value, typ) } return v, nil - } return 0, conversionError("", value, typ) } -func convertValueToFloat(value interface{}, typ reflect.Type) (float64, error) { +func convertValueToFloat(value any, typ reflect.Type) (float64, error) { switch value := value.(type) { // case int is handled by rv.Convert(typ) in Convert function case string: @@ -78,7 +77,7 @@ func convertValueToFloat(value interface{}, typ reflect.Type) (float64, error) { // Convert value to the type. This is a more aggressive conversion, that will // recursively create new map and slice values as necessary. It doesn't // handle circular references. -func Convert(value interface{}, typ reflect.Type) (interface{}, error) { // nolint: gocyclo +func Convert(value any, typ reflect.Type) (any, error) { // nolint: gocyclo value = ToLiquid(value) rv := reflect.ValueOf(value) // int.Convert(string) returns "\x01" not "1", so guard against that in the following test @@ -241,7 +240,7 @@ func Convert(value interface{}, typ reflect.Type) (interface{}, error) { // noli } // MustConvert is like Convert, but panics if conversion fails. -func MustConvert(value interface{}, t reflect.Type) interface{} { +func MustConvert(value any, t reflect.Type) any { out, err := Convert(value, t) if err != nil { panic(err) @@ -251,10 +250,61 @@ func MustConvert(value interface{}, t reflect.Type) interface{} { // MustConvertItem converts item to conform to the type array's element, else panics. // Unlike MustConvert, the second argument is a value not a type. -func MustConvertItem(item interface{}, array interface{}) interface{} { +func MustConvertItem(item any, array any) any { item, err := Convert(item, reflect.TypeOf(array).Elem()) if err != nil { panic(typeErrorf("can't convert %#v to %s: %s", item, reflect.TypeOf(array).Elem(), err)) } return item } + +func ToInt64(a any) (int64, bool) { + switch ia := a.(type) { + case int: + return int64(ia), true + case int64: + return ia, true + case int8: + return int64(ia), true + case int16: + return int64(ia), true + case int32: + return int64(ia), true + case uint: + return int64(ia), true + case uint64: + return int64(ia), true + case uint8: + return int64(ia), true + case uint16: + return int64(ia), true + case uint32: + return int64(ia), true + } + if a == nil { + return 0, true + } + if s, ok := a.(string); ok { + if s == "" { + return 0, true + } + } + return 0, false +} + +func ToFloat64(a any) (float64, bool) { + switch v := a.(type) { + case float64: + return v, true + case float32: + return float64(v), true + } + if v, ok := ToInt64(a); ok { + return float64(v), true + } + out, err := Convert(a, float64Type) + if err != nil { + return 0, false + } + return out.(float64), true +} diff --git a/values/drop.go b/values/drop.go index 6c1b3721..3ddab17d 100644 --- a/values/drop.go +++ b/values/drop.go @@ -5,11 +5,11 @@ import ( ) type drop interface { - ToLiquid() interface{} + ToLiquid() any } // ToLiquid converts an object to Liquid, if it implements the Drop interface. -func ToLiquid(value interface{}) interface{} { +func ToLiquid(value any) any { switch value := value.(type) { case drop: return value.ToLiquid() @@ -34,6 +34,6 @@ func (w *dropWrapper) Less(o Value) bool { return w.Resolve().Less(o) func (w *dropWrapper) IndexValue(i Value) Value { return w.Resolve().IndexValue(i) } func (w *dropWrapper) Contains(o Value) bool { return w.Resolve().Contains(o) } func (w *dropWrapper) Int() int { return w.Resolve().Int() } -func (w *dropWrapper) Interface() interface{} { return w.Resolve().Interface() } +func (w *dropWrapper) Interface() any { return w.Resolve().Interface() } func (w *dropWrapper) PropertyValue(k Value) Value { return w.Resolve().PropertyValue(k) } func (w *dropWrapper) Test() bool { return w.Resolve().Test() } diff --git a/values/value.go b/values/value.go index 6bee0be0..fced3aea 100644 --- a/values/value.go +++ b/values/value.go @@ -100,8 +100,27 @@ func (v wrapperValue) PropertyValue(Value) Value { return nilValue } func (v wrapperValue) Test() bool { return v.value != nil && v.value != false } func (v wrapperValue) Int() int { - if n, ok := v.value.(int); ok { - return n + switch ia := v.value.(type) { + case int: + return ia + case int64: + return int(ia) + case int8: + return int(ia) + case int16: + return int(ia) + case int32: + return int(ia) + case uint: + return int(ia) + case uint64: + return int(ia) + case uint8: + return int(ia) + case uint16: + return int(ia) + case uint32: + return int(ia) } panic(conversionError("", v.value, reflect.TypeOf(1))) } @@ -135,6 +154,24 @@ func (av arrayValue) IndexValue(iv Value) Value { switch ix := iv.Interface().(type) { case int: n = ix + case int64: + n = int(ix) + case int8: + n = int(ix) + case int16: + n = int(ix) + case int32: + n = int(ix) + case uint: + n = int(ix) + case uint64: + n = int(ix) + case uint8: + n = int(ix) + case uint16: + n = int(ix) + case uint32: + n = int(ix) case float32: // Ruby array indexing truncates floats n = int(ix)