1
0
mirror of https://github.com/DanilaFe/abacus synced 2024-11-16 15:43:08 -08:00
Abacus/src/test/java/org/nwapw/abacus/tests/TokenizerTests.java

51 lines
1.7 KiB
Java
Raw Normal View History

2017-07-30 14:52:10 -07:00
package org.nwapw.abacus.tests;
import org.junit.Assert;
import org.junit.Test;
import org.nwapw.abacus.lexing.pattern.Match;
import org.nwapw.abacus.parsing.LexerTokenizer;
import org.nwapw.abacus.tree.TokenType;
import java.util.List;
public class TokenizerTests {
private LexerTokenizer lexerTokenizer = new LexerTokenizer();
@Test
public void testInteger(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("11");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
}
@Test
public void testLeadingZeroDecimal(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("0.1");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
}
@Test
public void testNonLeadingDecimal(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString(".1");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
}
@Test
public void testSimpleChars(){
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("( ,)");
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 4);
Assert.assertEquals(tokens.get(0).getType(), TokenType.OPEN_PARENTH);
Assert.assertEquals(tokens.get(1).getType(), TokenType.WHITESPACE);
Assert.assertEquals(tokens.get(2).getType(), TokenType.COMMA);
Assert.assertEquals(tokens.get(3).getType(), TokenType.CLOSE_PARENTH);
}
}