pqarrays
pqarrays/lexer_test.go
Fix bug parsing empty arrays, make golint and go vet happy. Add comments to make golint happy. Also, because comments are a good thing to have. Turn += 1 and -= 1 into ++ and --, respectively, so golint will be happy. Fix an improperly formated errorf, where a rune was being treated as a string. Thanks, go vet! Fix whitespace parsing, returning the parse functions again instead of just skipping the one character. Now if we have more than one whitespace character in a row, they'll all be skipped. Add a parseStringOrNullOrEnd parse function that will be called after the tokenArrayStart character, to fix a bug where empty arrays were expecting a string or null and getting the array end character. This is only valid after tokenArrayStart, however; in other places where parseSeparatorOrDelim is used, it wouldn't be appropriate. Add a parser test for an empty array.
| paddy@0 | 1 package pqarrays |
| paddy@0 | 2 |
| paddy@0 | 3 import ( |
| paddy@0 | 4 "testing" |
| paddy@0 | 5 ) |
| paddy@0 | 6 |
| paddy@0 | 7 var testInputs = map[string][]token{ |
| paddy@0 | 8 ``: []token{{typ: tokenError, val: "expected array to start before "}}, |
| paddy@0 | 9 `{}`: []token{{typ: tokenArrayStart, val: "{"}, {typ: tokenArrayEnd, val: "}"}}, |
| paddy@0 | 10 `{lions}`: []token{{typ: tokenArrayStart, val: "{"}, {typ: tokenString, val: "lions"}, {typ: tokenArrayEnd, val: "}"}}, |
| paddy@0 | 11 `{lions,tigers}`: []token{{typ: tokenArrayStart, val: "{"}, {typ: tokenString, val: "lions"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "tigers"}, {typ: tokenArrayEnd, val: "}"}}, |
| paddy@0 | 12 `{lions,tigers,bears}`: []token{{typ: tokenArrayStart, val: "{"}, {typ: tokenString, val: "lions"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "tigers"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "bears"}, {typ: tokenArrayEnd, val: "}"}}, |
| paddy@0 | 13 `{lions,tigers,bears,"oh my!"}`: []token{{typ: tokenArrayStart, val: "{"}, {typ: tokenString, val: "lions"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "tigers"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "bears"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "oh my!"}, {typ: tokenArrayEnd, val: "}"}}, |
| paddy@0 | 14 `{{two,dimensional},{array,"of items"}}`: []token{{typ: tokenArrayStart, val: "{"}, {typ: tokenArrayStart, val: "{"}, {typ: tokenString, val: "two"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "dimensional"}, {typ: tokenArrayEnd, val: "}"}, {typ: tokenSeparator, val: ","}, {typ: tokenArrayStart, val: "{"}, {typ: tokenString, val: "array"}, {typ: tokenSeparator, val: ","}, {typ: tokenString, val: "of items"}, {typ: tokenArrayEnd, val: "}"}, {typ: tokenArrayEnd, val: "}"}}, |
| paddy@0 | 15 } |
| paddy@0 | 16 |
| paddy@0 | 17 func TestInputsTable(t *testing.T) { |
| paddy@0 | 18 for input, expectedTokens := range testInputs { |
| paddy@0 | 19 l := lex(input) |
| paddy@0 | 20 var tokens []token |
| paddy@0 | 21 for { |
| paddy@0 | 22 tok := l.nextToken() |
| paddy@0 | 23 if tok.typ == tokenEOF { |
| paddy@0 | 24 break |
| paddy@0 | 25 } |
| paddy@0 | 26 tokens = append(tokens, tok) |
| paddy@0 | 27 if tok.typ == tokenError { |
| paddy@0 | 28 break |
| paddy@0 | 29 } |
| paddy@0 | 30 } |
| paddy@0 | 31 t.Logf("%#+v\n", tokens) |
| paddy@0 | 32 if len(tokens) != len(expectedTokens) { |
| paddy@0 | 33 t.Fatalf("Expected %d tokens, got %d\n", len(expectedTokens), len(tokens)) |
| paddy@0 | 34 } |
| paddy@0 | 35 for pos, tok := range tokens { |
| paddy@0 | 36 if expectedTokens[pos].typ != tok.typ { |
| paddy@0 | 37 t.Errorf("Expected token in pos %d to have type of %s, got %s instead.", pos, expectedTokens[pos].typ, tok.typ) |
| paddy@0 | 38 } |
| paddy@0 | 39 if expectedTokens[pos].val != tok.val { |
| paddy@0 | 40 t.Errorf("Expected token in pos %d to have value of `%s`, got `%s` instead.", pos, expectedTokens[pos].val, tok.val) |
| paddy@0 | 41 } |
| paddy@0 | 42 } |
| paddy@0 | 43 } |
| paddy@0 | 44 } |