1
0
mirror of https://github.com/DanilaFe/abacus synced 2024-12-24 00:10:09 -08:00

Write basic tokenizer tests.

This commit is contained in:
Danila Fedorin 2017-07-30 14:52:10 -07:00
parent 127644ea46
commit b553c05f49

View File

@ -0,0 +1,50 @@
package org.nwapw.abacus.tests;
import org.junit.Assert;
import org.junit.Test;
import org.nwapw.abacus.lexing.pattern.Match;
import org.nwapw.abacus.parsing.LexerTokenizer;
import org.nwapw.abacus.tree.TokenType;
import java.util.List;
public class TokenizerTests {
private LexerTokenizer lexerTokenizer = new LexerTokenizer();
@Test
public void testInteger(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("11");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
}
@Test
public void testLeadingZeroDecimal(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("0.1");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
}
@Test
public void testNonLeadingDecimal(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString(".1");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
}
@Test
public void testSimpleChars(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("( ,)");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 4);
Assert.assertEquals(tokens.get(0).getType(), TokenType.OPEN_PARENTH);
Assert.assertEquals(tokens.get(1).getType(), TokenType.WHITESPACE);
Assert.assertEquals(tokens.get(2).getType(), TokenType.COMMA);
Assert.assertEquals(tokens.get(3).getType(), TokenType.CLOSE_PARENTH);
}
}