mirror of
https://github.com/danog/liquid.git
synced 2024-11-30 05:58:59 +01:00
Coverage
This commit is contained in:
parent
983b9f50e5
commit
caca7a2b60
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
*.output
|
*.output
|
||||||
liquid
|
liquid
|
||||||
|
*.out
|
||||||
|
@ -71,11 +71,11 @@ func (b tagBuilder) Governs(_ []string) tagBuilder {
|
|||||||
|
|
||||||
// SameSyntaxAs tells the parser that this tag has the same syntax as the named tag.
|
// SameSyntaxAs tells the parser that this tag has the same syntax as the named tag.
|
||||||
func (b tagBuilder) SameSyntaxAs(name string) tagBuilder {
|
func (b tagBuilder) SameSyntaxAs(name string) tagBuilder {
|
||||||
ot := b.s.controlTags[name]
|
rt := b.s.controlTags[name]
|
||||||
if ot == nil {
|
if rt == nil {
|
||||||
panic(fmt.Errorf("undefined: %s", name))
|
panic(fmt.Errorf("undefined: %s", name))
|
||||||
}
|
}
|
||||||
b.tag.syntaxModel = ot
|
b.tag.syntaxModel = rt
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ func (c Chunk) MarshalYAML() (interface{}, error) {
|
|||||||
case TagChunkType:
|
case TagChunkType:
|
||||||
return map[string]interface{}{"tag": c.Name, "args": c.Args}, nil
|
return map[string]interface{}{"tag": c.Name, "args": c.Args}, nil
|
||||||
case ObjChunkType:
|
case ObjChunkType:
|
||||||
return map[string]interface{}{"obj": c.Name}, nil
|
return map[string]interface{}{"obj": c.Args}, nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unknown chunk tag type: %v", c.Type)
|
return nil, fmt.Errorf("unknown chunk tag type: %v", c.Type)
|
||||||
}
|
}
|
||||||
|
@ -2,22 +2,29 @@ package chunks
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func addTestTags(s Settings) {
|
func addParserTestTags(s Settings) {
|
||||||
s.AddStartTag("case").Branch("when")
|
s.AddStartTag("case").Branch("when")
|
||||||
s.AddStartTag("comment")
|
s.AddStartTag("comment")
|
||||||
s.AddStartTag("for").Governs([]string{"break"})
|
s.AddStartTag("for").Governs([]string{"break"})
|
||||||
s.AddStartTag("if").Branch("else").Branch("elsif")
|
s.AddStartTag("if").Branch("else").Branch("elsif")
|
||||||
|
s.AddStartTag("unless").SameSyntaxAs("if")
|
||||||
s.AddStartTag("raw")
|
s.AddStartTag("raw")
|
||||||
|
s.AddStartTag("err1").Parser(func(c ASTControlTag) (func(io.Writer, RenderContext) error, error) {
|
||||||
|
return nil, fmt.Errorf("stage 1 error")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
var parseErrorTests = []struct{ in, expected string }{
|
var parseErrorTests = []struct{ in, expected string }{
|
||||||
{"{%unknown_tag%}", "unknown tag"},
|
{"{%unknown_tag%}", "unknown tag"},
|
||||||
{"{%if test%}", "unterminated if tag"},
|
{"{%if test%}", "unterminated if tag"},
|
||||||
|
{"{%if test%}{% endunless %}", "not inside unless"},
|
||||||
|
{`{% err1 %}{% enderr1 %}`, "stage 1 error"},
|
||||||
// {"{%for syntax error%}{%endfor%}", "parse error"},
|
// {"{%for syntax error%}{%endfor%}", "parse error"},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -25,13 +32,14 @@ var parserTests = []struct{ in string }{
|
|||||||
{`{% for item in list %}{% endfor %}`},
|
{`{% for item in list %}{% endfor %}`},
|
||||||
{`{% if test %}{% else %}{% endif %}`},
|
{`{% if test %}{% else %}{% endif %}`},
|
||||||
{`{% if test %}{% if test %}{% endif %}{% endif %}`},
|
{`{% if test %}{% if test %}{% endif %}{% endif %}`},
|
||||||
|
{`{% unless test %}{% else %}{% endunless %}`},
|
||||||
{`{% for item in list %}{% if test %}{% else %}{% endif x %}{% endfor %}`},
|
{`{% for item in list %}{% if test %}{% else %}{% endif x %}{% endfor %}`},
|
||||||
{`{% if true %}{% raw %}{% endraw %}{% endif %}`},
|
{`{% if true %}{% raw %}{% endraw %}{% endif %}`},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseErrors(t *testing.T) {
|
func TestParseErrors(t *testing.T) {
|
||||||
settings := NewSettings()
|
settings := NewSettings()
|
||||||
addTestTags(settings)
|
addParserTestTags(settings)
|
||||||
for i, test := range parseErrorTests {
|
for i, test := range parseErrorTests {
|
||||||
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
||||||
ast, err := settings.Parse(test.in)
|
ast, err := settings.Parse(test.in)
|
||||||
@ -44,7 +52,7 @@ func TestParseErrors(t *testing.T) {
|
|||||||
|
|
||||||
func TestParser(t *testing.T) {
|
func TestParser(t *testing.T) {
|
||||||
settings := NewSettings()
|
settings := NewSettings()
|
||||||
addTestTags(settings)
|
addParserTestTags(settings)
|
||||||
for i, test := range parserTests {
|
for i, test := range parserTests {
|
||||||
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
||||||
_, err := settings.Parse(test.in)
|
_, err := settings.Parse(test.in)
|
||||||
|
@ -3,17 +3,39 @@ package chunks
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
var renderTests = []struct{ in, expected string }{
|
func addRenderTestTags(s Settings) {
|
||||||
// {"{%if syntax error%}{%endif%}", "parse error"},
|
s.AddStartTag("parse").Parser(func(c ASTControlTag) (func(io.Writer, RenderContext) error, error) {
|
||||||
|
a := c.Args
|
||||||
|
return func(w io.Writer, c RenderContext) error {
|
||||||
|
_, err := w.Write([]byte(a))
|
||||||
|
return err
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
s.AddStartTag("err2").Parser(func(c ASTControlTag) (func(io.Writer, RenderContext) error, error) {
|
||||||
|
return func(w io.Writer, c RenderContext) error {
|
||||||
|
return fmt.Errorf("stage 2 error")
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var renderTests = []struct{ in, out string }{
|
||||||
{`{{ 12 }}`, "12"},
|
{`{{ 12 }}`, "12"},
|
||||||
{`{{ x }}`, "123"},
|
{`{{ x }}`, "123"},
|
||||||
{`{{ page.title }}`, "Introduction"},
|
{`{{ page.title }}`, "Introduction"},
|
||||||
{`{{ ar[1] }}`, "second"},
|
{`{{ ar[1] }}`, "second"},
|
||||||
|
{`{% parse args %}{% endparse %}`, "args"},
|
||||||
|
}
|
||||||
|
|
||||||
|
var renderErrorTests = []struct{ in, out string }{
|
||||||
|
// {"{%if syntax error%}{%endif%}", "parse error"},
|
||||||
|
{`{% err2 %}{% enderr2 %}`, "stage 2 error"},
|
||||||
}
|
}
|
||||||
|
|
||||||
var renderTestBindings = map[string]interface{}{
|
var renderTestBindings = map[string]interface{}{
|
||||||
@ -45,6 +67,7 @@ var renderTestBindings = map[string]interface{}{
|
|||||||
|
|
||||||
func TestRender(t *testing.T) {
|
func TestRender(t *testing.T) {
|
||||||
settings := NewSettings()
|
settings := NewSettings()
|
||||||
|
addRenderTestTags(settings)
|
||||||
context := NewContext(renderTestBindings, settings)
|
context := NewContext(renderTestBindings, settings)
|
||||||
for i, test := range renderTests {
|
for i, test := range renderTests {
|
||||||
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
||||||
@ -53,7 +76,22 @@ func TestRender(t *testing.T) {
|
|||||||
buf := new(bytes.Buffer)
|
buf := new(bytes.Buffer)
|
||||||
err = ast.Render(buf, context)
|
err = ast.Render(buf, context)
|
||||||
require.NoErrorf(t, err, test.in)
|
require.NoErrorf(t, err, test.in)
|
||||||
require.Equalf(t, test.expected, buf.String(), test.in)
|
require.Equalf(t, test.out, buf.String(), test.in)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRenderErrors(t *testing.T) {
|
||||||
|
settings := NewSettings()
|
||||||
|
addRenderTestTags(settings)
|
||||||
|
context := NewContext(renderTestBindings, settings)
|
||||||
|
for i, test := range renderErrorTests {
|
||||||
|
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
||||||
|
ast, err := settings.Parse(test.in)
|
||||||
|
require.NoErrorf(t, err, test.in)
|
||||||
|
err = ast.Render(ioutil.Discard, context)
|
||||||
|
require.Errorf(t, err, test.in)
|
||||||
|
require.Containsf(t, err.Error(), test.out, test.in)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ var scannerCountTests = []struct {
|
|||||||
{`{{ expr arg }}{{ expr arg }}`, 2},
|
{`{{ expr arg }}{{ expr arg }}`, 2},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScanner(t *testing.T) {
|
func TestChunkScanner(t *testing.T) {
|
||||||
tokens := Scan("12", "")
|
tokens := Scan("12", "")
|
||||||
require.NotNil(t, tokens)
|
require.NotNil(t, tokens)
|
||||||
require.Len(t, tokens, 1)
|
require.Len(t, tokens, 1)
|
||||||
@ -55,6 +55,10 @@ func TestScanner(t *testing.T) {
|
|||||||
require.Equal(t, "tag", tokens[0].Name)
|
require.Equal(t, "tag", tokens[0].Name)
|
||||||
require.Equal(t, "args", tokens[0].Args)
|
require.Equal(t, "args", tokens[0].Args)
|
||||||
|
|
||||||
|
tokens = Scan("pre{% tag args %}mid{{ object }}post", "")
|
||||||
|
require.Equal(t, `[TextChunkType{"pre"} TagChunkType{Tag:"tag", Args:"args"} TextChunkType{"mid"} ObjChunkType{"object"} TextChunkType{"post"}]`, fmt.Sprint(tokens))
|
||||||
|
require.Equal(t, "- text: pre\n- args: args\n tag: tag\n- text: mid\n- obj: object\n- text: post\n", MustYAML(tokens))
|
||||||
|
|
||||||
for i, test := range scannerCountTests {
|
for i, test := range scannerCountTests {
|
||||||
t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) {
|
||||||
tokens := Scan(test.in, "")
|
tokens := Scan(test.in, "")
|
||||||
|
@ -2,6 +2,7 @@ package expressions
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
@ -91,9 +92,12 @@ var evaluatorTests = []struct {
|
|||||||
|
|
||||||
{`"seafood" contains "foo"`, true},
|
{`"seafood" contains "foo"`, true},
|
||||||
{`"seafood" contains "bar"`, false},
|
{`"seafood" contains "bar"`, false},
|
||||||
|
|
||||||
|
// filters
|
||||||
|
{`"seafood" | length`, 8},
|
||||||
}
|
}
|
||||||
|
|
||||||
var evaluatorTestContext = NewContext(map[string]interface{}{
|
var evaluatorTestBindings = (map[string]interface{}{
|
||||||
"n": 123,
|
"n": 123,
|
||||||
"array": []string{"first", "second", "third"},
|
"array": []string{"first", "second", "third"},
|
||||||
"empty_list": []interface{}{},
|
"empty_list": []interface{}{},
|
||||||
@ -103,12 +107,15 @@ var evaluatorTestContext = NewContext(map[string]interface{}{
|
|||||||
"b": map[string]interface{}{"c": "d"},
|
"b": map[string]interface{}{"c": "d"},
|
||||||
"c": []string{"r", "g", "b"},
|
"c": []string{"r", "g", "b"},
|
||||||
},
|
},
|
||||||
}, NewSettings())
|
})
|
||||||
|
|
||||||
func TestEvaluator(t *testing.T) {
|
func TestEvaluator(t *testing.T) {
|
||||||
|
settings := NewSettings()
|
||||||
|
settings.AddFilter("length", strings.Count)
|
||||||
|
context := NewContext(evaluatorTestBindings, settings)
|
||||||
for i, test := range evaluatorTests {
|
for i, test := range evaluatorTests {
|
||||||
t.Run(fmt.Sprint(i), func(t *testing.T) {
|
t.Run(fmt.Sprint(i), func(t *testing.T) {
|
||||||
val, err := EvaluateString(test.in, evaluatorTestContext)
|
val, err := EvaluateString(test.in, context)
|
||||||
require.NoErrorf(t, err, test.in)
|
require.NoErrorf(t, err, test.in)
|
||||||
require.Equalf(t, test.expected, val, test.in)
|
require.Equalf(t, test.expected, val, test.in)
|
||||||
})
|
})
|
||||||
|
@ -39,7 +39,7 @@ func Parse(source string) (expr Expression, err error) {
|
|||||||
lexer := newLexer([]byte(source + ";"))
|
lexer := newLexer([]byte(source + ";"))
|
||||||
n := yyParse(lexer)
|
n := yyParse(lexer)
|
||||||
if n != 0 {
|
if n != 0 {
|
||||||
return nil, fmt.Errorf("parse error in %s", source)
|
return nil, fmt.Errorf("parse error in %q", source)
|
||||||
}
|
}
|
||||||
return &expression{lexer.val}, nil
|
return &expression{lexer.val}, nil
|
||||||
}
|
}
|
||||||
|
@ -8,10 +8,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var parseErrorTests = []struct{ in, expected string }{
|
var parseErrorTests = []struct{ in, expected string }{
|
||||||
// {"a | unknown_filter", "undefined filter: unknown_filter"},
|
{"a syntax error", "parse error"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseErrors(t *testing.T) {
|
func TestExpressionParseErrors(t *testing.T) {
|
||||||
for i, test := range parseErrorTests {
|
for i, test := range parseErrorTests {
|
||||||
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
||||||
expr, err := Parse(test.in)
|
expr, err := Parse(test.in)
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
|
|
||||||
//line scanner.rl:1
|
//line scanner.rl:1
|
||||||
package expressions
|
package expressions
|
||||||
// Adapted from https://github.com/mhamrah/thermostat
|
|
||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
import "strconv"
|
import "strconv"
|
||||||
|
|
||||||
|
|
||||||
//line scanner.go:11
|
//line scanner.go:10
|
||||||
var _expression_actions []byte = []byte{
|
var _expression_actions []byte = []byte{
|
||||||
0, 1, 0, 1, 1, 1, 2, 1, 12,
|
0, 1, 0, 1, 1, 1, 2, 1, 12,
|
||||||
1, 13, 1, 14, 1, 15, 1, 16,
|
1, 13, 1, 14, 1, 15, 1, 16,
|
||||||
@ -176,7 +175,7 @@ const expression_error int = -1
|
|||||||
const expression_en_main int = 11
|
const expression_en_main int = 11
|
||||||
|
|
||||||
|
|
||||||
//line scanner.rl:13
|
//line scanner.rl:12
|
||||||
|
|
||||||
|
|
||||||
type lexer struct {
|
type lexer struct {
|
||||||
@ -196,7 +195,7 @@ func newLexer(data []byte) *lexer {
|
|||||||
pe: len(data),
|
pe: len(data),
|
||||||
}
|
}
|
||||||
|
|
||||||
//line scanner.go:200
|
//line scanner.go:199
|
||||||
{
|
{
|
||||||
lex.cs = expression_start
|
lex.cs = expression_start
|
||||||
lex.ts = 0
|
lex.ts = 0
|
||||||
@ -204,7 +203,7 @@ func newLexer(data []byte) *lexer {
|
|||||||
lex.act = 0
|
lex.act = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
//line scanner.rl:32
|
//line scanner.rl:31
|
||||||
return lex
|
return lex
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -213,7 +212,7 @@ func (lex *lexer) Lex(out *yySymType) int {
|
|||||||
tok := 0
|
tok := 0
|
||||||
|
|
||||||
|
|
||||||
//line scanner.go:217
|
//line scanner.go:216
|
||||||
{
|
{
|
||||||
var _klen int
|
var _klen int
|
||||||
var _trans int
|
var _trans int
|
||||||
@ -233,7 +232,7 @@ _resume:
|
|||||||
//line NONE:1
|
//line NONE:1
|
||||||
lex.ts = ( lex.p)
|
lex.ts = ( lex.p)
|
||||||
|
|
||||||
//line scanner.go:237
|
//line scanner.go:236
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -308,44 +307,44 @@ _eof_trans:
|
|||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
|
|
||||||
case 3:
|
case 3:
|
||||||
//line scanner.rl:59
|
//line scanner.rl:58
|
||||||
lex.act = 4;
|
lex.act = 4;
|
||||||
case 4:
|
case 4:
|
||||||
//line scanner.rl:40
|
//line scanner.rl:39
|
||||||
lex.act = 6;
|
lex.act = 6;
|
||||||
case 5:
|
case 5:
|
||||||
//line scanner.rl:95
|
//line scanner.rl:94
|
||||||
lex.act = 11;
|
lex.act = 11;
|
||||||
case 6:
|
case 6:
|
||||||
//line scanner.rl:96
|
//line scanner.rl:95
|
||||||
lex.act = 12;
|
lex.act = 12;
|
||||||
case 7:
|
case 7:
|
||||||
//line scanner.rl:97
|
//line scanner.rl:96
|
||||||
lex.act = 13;
|
lex.act = 13;
|
||||||
case 8:
|
case 8:
|
||||||
//line scanner.rl:98
|
//line scanner.rl:97
|
||||||
lex.act = 14;
|
lex.act = 14;
|
||||||
case 9:
|
case 9:
|
||||||
//line scanner.rl:99
|
//line scanner.rl:98
|
||||||
lex.act = 15;
|
lex.act = 15;
|
||||||
case 10:
|
case 10:
|
||||||
//line scanner.rl:45
|
//line scanner.rl:44
|
||||||
lex.act = 17;
|
lex.act = 17;
|
||||||
case 11:
|
case 11:
|
||||||
//line scanner.rl:103
|
//line scanner.rl:102
|
||||||
lex.act = 19;
|
lex.act = 19;
|
||||||
case 12:
|
case 12:
|
||||||
//line scanner.rl:85
|
//line scanner.rl:84
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = ASSIGN; ( lex.p)++; goto _out
|
{ tok = ASSIGN; ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 13:
|
case 13:
|
||||||
//line scanner.rl:86
|
//line scanner.rl:85
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = LOOP; ( lex.p)++; goto _out
|
{ tok = LOOP; ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 14:
|
case 14:
|
||||||
//line scanner.rl:68
|
//line scanner.rl:67
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{
|
{
|
||||||
tok = LITERAL
|
tok = LITERAL
|
||||||
@ -355,37 +354,37 @@ _eof_trans:
|
|||||||
|
|
||||||
}
|
}
|
||||||
case 15:
|
case 15:
|
||||||
//line scanner.rl:91
|
//line scanner.rl:90
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = EQ; ( lex.p)++; goto _out
|
{ tok = EQ; ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 16:
|
case 16:
|
||||||
//line scanner.rl:92
|
//line scanner.rl:91
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = NEQ; ( lex.p)++; goto _out
|
{ tok = NEQ; ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 17:
|
case 17:
|
||||||
//line scanner.rl:93
|
//line scanner.rl:92
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = GE; ( lex.p)++; goto _out
|
{ tok = GE; ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 18:
|
case 18:
|
||||||
//line scanner.rl:94
|
//line scanner.rl:93
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = LE; ( lex.p)++; goto _out
|
{ tok = LE; ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 19:
|
case 19:
|
||||||
//line scanner.rl:100
|
//line scanner.rl:99
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = KEYWORD; out.name = string(lex.data[lex.ts:lex.te-1]); ( lex.p)++; goto _out
|
{ tok = KEYWORD; out.name = string(lex.data[lex.ts:lex.te-1]); ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 20:
|
case 20:
|
||||||
//line scanner.rl:103
|
//line scanner.rl:102
|
||||||
lex.te = ( lex.p)+1
|
lex.te = ( lex.p)+1
|
||||||
{ tok = int(lex.data[lex.ts]); ( lex.p)++; goto _out
|
{ tok = int(lex.data[lex.ts]); ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 21:
|
case 21:
|
||||||
//line scanner.rl:50
|
//line scanner.rl:49
|
||||||
lex.te = ( lex.p)
|
lex.te = ( lex.p)
|
||||||
( lex.p)--
|
( lex.p)--
|
||||||
{
|
{
|
||||||
@ -399,7 +398,7 @@ _eof_trans:
|
|||||||
|
|
||||||
}
|
}
|
||||||
case 22:
|
case 22:
|
||||||
//line scanner.rl:45
|
//line scanner.rl:44
|
||||||
lex.te = ( lex.p)
|
lex.te = ( lex.p)
|
||||||
( lex.p)--
|
( lex.p)--
|
||||||
{
|
{
|
||||||
@ -409,18 +408,18 @@ _eof_trans:
|
|||||||
|
|
||||||
}
|
}
|
||||||
case 23:
|
case 23:
|
||||||
//line scanner.rl:102
|
//line scanner.rl:101
|
||||||
lex.te = ( lex.p)
|
lex.te = ( lex.p)
|
||||||
( lex.p)--
|
( lex.p)--
|
||||||
|
|
||||||
case 24:
|
case 24:
|
||||||
//line scanner.rl:103
|
//line scanner.rl:102
|
||||||
lex.te = ( lex.p)
|
lex.te = ( lex.p)
|
||||||
( lex.p)--
|
( lex.p)--
|
||||||
{ tok = int(lex.data[lex.ts]); ( lex.p)++; goto _out
|
{ tok = int(lex.data[lex.ts]); ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
case 25:
|
case 25:
|
||||||
//line scanner.rl:103
|
//line scanner.rl:102
|
||||||
( lex.p) = ( lex.te) - 1
|
( lex.p) = ( lex.te) - 1
|
||||||
{ tok = int(lex.data[lex.ts]); ( lex.p)++; goto _out
|
{ tok = int(lex.data[lex.ts]); ( lex.p)++; goto _out
|
||||||
}
|
}
|
||||||
@ -481,7 +480,7 @@ _eof_trans:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//line scanner.go:485
|
//line scanner.go:484
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -495,7 +494,7 @@ _again:
|
|||||||
//line NONE:1
|
//line NONE:1
|
||||||
lex.ts = 0
|
lex.ts = 0
|
||||||
|
|
||||||
//line scanner.go:499
|
//line scanner.go:498
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -514,12 +513,12 @@ _again:
|
|||||||
_out: {}
|
_out: {}
|
||||||
}
|
}
|
||||||
|
|
||||||
//line scanner.rl:107
|
//line scanner.rl:106
|
||||||
|
|
||||||
|
|
||||||
return tok
|
return tok
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lex *lexer) Error(e string) {
|
func (lex *lexer) Error(e string) {
|
||||||
fmt.Println("error:", e)
|
// fmt.Println("scan error:", e)
|
||||||
}
|
}
|
@ -109,5 +109,5 @@ func (lex *lexer) Lex(out *yySymType) int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (lex *lexer) Error(e string) {
|
func (lex *lexer) Error(e string) {
|
||||||
fmt.Println("error:", e)
|
// fmt.Println("scan error:", e)
|
||||||
}
|
}
|
||||||
|
@ -32,4 +32,10 @@ func TestExpressionScanner(t *testing.T) {
|
|||||||
tokens, err := scanExpression("abc > 123")
|
tokens, err := scanExpression("abc > 123")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, tokens, 3)
|
require.Len(t, tokens, 3)
|
||||||
|
|
||||||
|
tokens, _ = scanExpression("forage")
|
||||||
|
require.Len(t, tokens, 1)
|
||||||
|
|
||||||
|
tokens, _ = scanExpression("orange")
|
||||||
|
require.Len(t, tokens, 1)
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,8 @@ func conversionError(modifier string, value interface{}, typ reflect.Type) error
|
|||||||
// handle circular references.
|
// handle circular references.
|
||||||
func Convert(value interface{}, target reflect.Type) (interface{}, error) { // nolint: gocyclo
|
func Convert(value interface{}, target reflect.Type) (interface{}, error) { // nolint: gocyclo
|
||||||
r := reflect.ValueOf(value)
|
r := reflect.ValueOf(value)
|
||||||
if r.Type().ConvertibleTo(target) {
|
// convert int.Convert(string) yields "\x01" not "1"
|
||||||
|
if target.Kind() != reflect.String && r.Type().ConvertibleTo(target) {
|
||||||
return r.Convert(target).Interface(), nil
|
return r.Convert(target).Interface(), nil
|
||||||
}
|
}
|
||||||
if reflect.PtrTo(r.Type()) == target {
|
if reflect.PtrTo(r.Type()) == target {
|
||||||
@ -97,6 +98,8 @@ func Convert(value interface{}, target reflect.Type) (interface{}, error) { // n
|
|||||||
}
|
}
|
||||||
return out.Interface(), nil
|
return out.Interface(), nil
|
||||||
}
|
}
|
||||||
|
case reflect.String:
|
||||||
|
return fmt.Sprint(value), nil
|
||||||
}
|
}
|
||||||
return nil, conversionError("", value, target)
|
return nil, conversionError("", value, target)
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,10 @@ var convertTests = []struct {
|
|||||||
{"1.2", 1.0, float64(1.2)},
|
{"1.2", 1.0, float64(1.2)},
|
||||||
{true, 1, 1},
|
{true, 1, 1},
|
||||||
{false, 1, 0},
|
{false, 1, 0},
|
||||||
|
{1, "", "1"},
|
||||||
|
{false, "", "false"},
|
||||||
|
{true, "", "true"},
|
||||||
|
{"string", "", "string"},
|
||||||
}
|
}
|
||||||
|
|
||||||
var eqTests = []struct {
|
var eqTests = []struct {
|
||||||
@ -63,13 +67,23 @@ var lessTests = []struct {
|
|||||||
{[]string{"a"}, []string{"a"}, false},
|
{[]string{"a"}, []string{"a"}, false},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCall(t *testing.T) {
|
||||||
|
fn := func(a, b string) string {
|
||||||
|
return a + "," + b + "."
|
||||||
|
}
|
||||||
|
args := []interface{}{5, 10}
|
||||||
|
value, err := Call(reflect.ValueOf(fn), args)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "5,10.", value)
|
||||||
|
}
|
||||||
func TestConvert(t *testing.T) {
|
func TestConvert(t *testing.T) {
|
||||||
for i, test := range convertTests {
|
for i, test := range convertTests {
|
||||||
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%02d", i+1), func(t *testing.T) {
|
||||||
typ := reflect.TypeOf(test.proto)
|
typ := reflect.TypeOf(test.proto)
|
||||||
value, err := Convert(test.value, typ)
|
value, err := Convert(test.value, typ)
|
||||||
require.NoError(t, err)
|
name := fmt.Sprintf("Convert %#v -> %v", test.value, typ)
|
||||||
require.Equalf(t, test.expected, value, "Convert %#v -> %#v", test.value, test, typ)
|
require.NoErrorf(t, err, name)
|
||||||
|
require.Equalf(t, test.expected, value, name)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,9 +2,12 @@ package liquid
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"log"
|
"log"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/osteele/liquid/chunks"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -37,9 +40,9 @@ func TestLiquid(t *testing.T) {
|
|||||||
|
|
||||||
func Example() {
|
func Example() {
|
||||||
engine := NewEngine()
|
engine := NewEngine()
|
||||||
template := `<h1>{{page.title}}</h1>`
|
template := `<h1>{{ page.title }}</h1>`
|
||||||
bindings := map[string]interface{}{
|
bindings := map[string]interface{}{
|
||||||
"page": map[string]interface{}{
|
"page": map[string]string{
|
||||||
"title": "Introduction",
|
"title": "Introduction",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -51,3 +54,59 @@ func Example() {
|
|||||||
fmt.Println(out)
|
fmt.Println(out)
|
||||||
// Output: <h1>Introduction</h1>
|
// Output: <h1>Introduction</h1>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Example_filter() {
|
||||||
|
engine := NewEngine()
|
||||||
|
engine.DefineFilter("has_prefix", strings.HasPrefix)
|
||||||
|
template := `{{ title | has_prefix: "Intro" }}`
|
||||||
|
|
||||||
|
bindings := map[string]interface{}{
|
||||||
|
"title": "Introduction",
|
||||||
|
}
|
||||||
|
out, err := engine.ParseAndRenderString(template, NewContext(bindings))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalln(err)
|
||||||
|
}
|
||||||
|
fmt.Println(out)
|
||||||
|
// Output: true
|
||||||
|
}
|
||||||
|
|
||||||
|
func Example_tag() {
|
||||||
|
engine := NewEngine()
|
||||||
|
engine.DefineTag("echo", func(w io.Writer, c chunks.RenderContext) error {
|
||||||
|
args := c.TagArgs()
|
||||||
|
_, err := w.Write([]byte(args))
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
template := `{% echo hello world %}`
|
||||||
|
bindings := map[string]interface{}{}
|
||||||
|
out, err := engine.ParseAndRenderString(template, NewContext(bindings))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalln(err)
|
||||||
|
}
|
||||||
|
fmt.Println(out)
|
||||||
|
// Output: hello world
|
||||||
|
}
|
||||||
|
|
||||||
|
func Example_tag_pair() {
|
||||||
|
engine := NewEngine()
|
||||||
|
engine.DefineStartTag("length", func(w io.Writer, c chunks.RenderContext) error {
|
||||||
|
s, err := c.InnerString()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
n := len(s)
|
||||||
|
_, err = w.Write([]byte(fmt.Sprint(n)))
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
template := `{% length %}abc{% endlength %}`
|
||||||
|
bindings := map[string]interface{}{}
|
||||||
|
out, err := engine.ParseAndRenderString(template, NewContext(bindings))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalln(err)
|
||||||
|
}
|
||||||
|
fmt.Println(out)
|
||||||
|
// Output: 3
|
||||||
|
}
|
||||||
|
12
scripts/coverage
Executable file
12
scripts/coverage
Executable file
@ -0,0 +1,12 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo 'mode: set' > coverage.out
|
||||||
|
|
||||||
|
for p in $(go list -f '{{.ImportPath}}' ./...); do
|
||||||
|
rm -f package-coverage.out
|
||||||
|
go test -coverprofile=package-coverage.out $p
|
||||||
|
[[ -f package-coverage.out ]] && grep -v 'mode: set' package-coverage.out >> coverage.out
|
||||||
|
rm -f package-coverage.out
|
||||||
|
done
|
Loading…
Reference in New Issue
Block a user