2017-02-28 18:11:24 -06:00
|
|
|
// Copyright 2017 The go-ethereum Authors
|
|
|
|
// This file is part of the go-ethereum library.
|
|
|
|
//
|
|
|
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU Lesser General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
//
|
|
|
|
// The go-ethereum library is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU Lesser General Public License for more details.
|
|
|
|
//
|
|
|
|
// You should have received a copy of the GNU Lesser General Public License
|
|
|
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
package asm
|
|
|
|
|
2017-07-31 06:02:36 -05:00
|
|
|
import (
|
|
|
|
"reflect"
|
|
|
|
"testing"
|
|
|
|
)
|
2017-02-28 18:11:24 -06:00
|
|
|
|
|
|
|
func lexAll(src string) []token {
|
2019-03-14 04:35:55 -05:00
|
|
|
ch := Lex([]byte(src), false)
|
2017-02-28 18:11:24 -06:00
|
|
|
|
|
|
|
var tokens []token
|
|
|
|
for i := range ch {
|
|
|
|
tokens = append(tokens, i)
|
|
|
|
}
|
|
|
|
return tokens
|
|
|
|
}
|
|
|
|
|
2017-07-31 06:02:36 -05:00
|
|
|
func TestLexer(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
input string
|
|
|
|
tokens []token
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
input: ";; this is a comment",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: eof}},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: "0x12345678",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: number, text: "0x12345678"}, {typ: eof}},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: "0x123ggg",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: number, text: "0x123"}, {typ: element, text: "ggg"}, {typ: eof}},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: "12345678",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: number, text: "12345678"}, {typ: eof}},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: "123abc",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: number, text: "123"}, {typ: element, text: "abc"}, {typ: eof}},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: "0123abc",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: number, text: "0123"}, {typ: element, text: "abc"}, {typ: eof}},
|
|
|
|
},
|
2021-05-18 03:22:58 -05:00
|
|
|
{
|
|
|
|
input: "00123abc",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: number, text: "00123"}, {typ: element, text: "abc"}, {typ: eof}},
|
|
|
|
},
|
2019-11-23 05:52:17 -06:00
|
|
|
{
|
|
|
|
input: "@foo",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: label, text: "foo"}, {typ: eof}},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: "@label123",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: label, text: "label123"}, {typ: eof}},
|
|
|
|
},
|
2023-09-29 02:52:22 -05:00
|
|
|
// Comment after label
|
2023-09-19 06:41:16 -05:00
|
|
|
{
|
|
|
|
input: "@label123 ;; comment",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: label, text: "label123"}, {typ: eof}},
|
|
|
|
},
|
2023-09-29 02:52:22 -05:00
|
|
|
// Comment after instruction
|
2023-09-19 06:41:16 -05:00
|
|
|
{
|
|
|
|
input: "push 3 ;; comment\nadd",
|
|
|
|
tokens: []token{{typ: lineStart}, {typ: element, text: "push"}, {typ: number, text: "3"}, {typ: lineEnd, text: "\n"}, {typ: lineStart, lineno: 1}, {typ: element, lineno: 1, text: "add"}, {typ: eof, lineno: 1}},
|
|
|
|
},
|
2017-07-31 06:02:36 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
tokens := lexAll(test.input)
|
|
|
|
if !reflect.DeepEqual(tokens, test.tokens) {
|
|
|
|
t.Errorf("input %q\ngot: %+v\nwant: %+v", test.input, tokens, test.tokens)
|
|
|
|
}
|
2017-02-28 18:11:24 -06:00
|
|
|
}
|
|
|
|
}
|