1
0
mirror of https://github.com/DanilaFe/abacus synced 2024-10-04 12:13:55 -07:00

Write tests involving plugin loading, and generalize token testing code.

This commit is contained in:
Danila Fedorin 2017-07-30 19:21:26 -07:00
parent b35d3a3cd4
commit 6b8d8497e2

View File

@ -1,50 +1,124 @@
package org.nwapw.abacus.tests; package org.nwapw.abacus.tests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.nwapw.abacus.Abacus;
import org.nwapw.abacus.function.Function;
import org.nwapw.abacus.function.Operator;
import org.nwapw.abacus.function.OperatorAssociativity;
import org.nwapw.abacus.function.OperatorType;
import org.nwapw.abacus.lexing.pattern.Match; import org.nwapw.abacus.lexing.pattern.Match;
import org.nwapw.abacus.number.NumberInterface;
import org.nwapw.abacus.parsing.LexerTokenizer; import org.nwapw.abacus.parsing.LexerTokenizer;
import org.nwapw.abacus.plugin.Plugin;
import org.nwapw.abacus.tree.TokenType; import org.nwapw.abacus.tree.TokenType;
import java.util.List; import java.util.List;
public class TokenizerTests { public class TokenizerTests {
private LexerTokenizer lexerTokenizer = new LexerTokenizer(); private static Abacus abacus = new Abacus();
private static LexerTokenizer lexerTokenizer = new LexerTokenizer();
private static Function subtractFunction = new Function() {
@Override
protected boolean matchesParams(NumberInterface[] params) {
return params.length == 2;
}
@Test @Override
public void testInteger(){ protected NumberInterface applyInternal(NumberInterface[] params) {
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("11"); return params[0].subtract(params[1]);
Assert.assertNotNull(tokens); }
Assert.assertEquals(tokens.size(), 1); };
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM); private static Plugin testPlugin = new Plugin(abacus.getPluginManager()) {
@Override
public void onEnable() {
registerOperator("+", new Operator(OperatorAssociativity.LEFT, OperatorType.BINARY_INFIX,
0, subtractFunction));
registerOperator("-", new Operator(OperatorAssociativity.LEFT, OperatorType.BINARY_INFIX,
0, subtractFunction));
registerFunction("subtract", subtractFunction);
}
@Override
public void onDisable() {
}
};
private static void assertTokensMatch(List<Match<TokenType>> tokenList, TokenType[] expectedTypes) {
Assert.assertNotNull(tokenList);
Assert.assertEquals(tokenList.size(), expectedTypes.length);
for (int i = 0; i < expectedTypes.length; i++) {
Assert.assertEquals(expectedTypes[i], tokenList.get(i).getType());
}
}
@BeforeClass
public static void prepareTests() {
abacus.getPluginManager().addListener(lexerTokenizer);
abacus.getPluginManager().addInstantiated(testPlugin);
abacus.getPluginManager().load();
} }
@Test @Test
public void testLeadingZeroDecimal(){ public void testInteger() {
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("0.1"); assertTokensMatch(lexerTokenizer.tokenizeString("11"), new TokenType[]{ TokenType.NUM } );
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
} }
@Test @Test
public void testNonLeadingDecimal(){ public void testLeadingZeroDecimal() {
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString(".1"); assertTokensMatch(lexerTokenizer.tokenizeString("0.1"), new TokenType[]{ TokenType.NUM } );
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), 1);
Assert.assertEquals(tokens.get(0).getType(), TokenType.NUM);
} }
@Test @Test
public void testSimpleChars(){ public void testNonLeadingDecimal() {
List<Match<TokenType>> tokens = lexerTokenizer.tokenizeString("( ,)"); assertTokensMatch(lexerTokenizer.tokenizeString(".1"), new TokenType[]{ TokenType.NUM } );
Assert.assertNotNull(tokens); }
Assert.assertEquals(tokens.size(), 4);
Assert.assertEquals(tokens.get(0).getType(), TokenType.OPEN_PARENTH); @Test
Assert.assertEquals(tokens.get(1).getType(), TokenType.WHITESPACE); public void testSimpleChars() {
Assert.assertEquals(tokens.get(2).getType(), TokenType.COMMA); TokenType[] types = {
Assert.assertEquals(tokens.get(3).getType(), TokenType.CLOSE_PARENTH); TokenType.OPEN_PARENTH,
TokenType.WHITESPACE,
TokenType.COMMA,
TokenType.CLOSE_PARENTH
};
assertTokensMatch(lexerTokenizer.tokenizeString("( ,)"), types);
}
@Test
public void testFunctionParsing() {
TokenType[] types = {
TokenType.FUNCTION,
TokenType.OPEN_PARENTH,
TokenType.NUM,
TokenType.COMMA,
TokenType.NUM,
TokenType.CLOSE_PARENTH
};
assertTokensMatch(lexerTokenizer.tokenizeString("subtract(1,2)"), types);
}
@Test
public void testOperatorParsing(){
TokenType[] types = {
TokenType.NUM,
TokenType.OP,
TokenType.NUM
};
assertTokensMatch(lexerTokenizer.tokenizeString("1-1"), types);
}
@Test
public void testSanitizedOperators(){
TokenType[] types = {
TokenType.NUM,
TokenType.OP,
TokenType.NUM
};
assertTokensMatch(lexerTokenizer.tokenizeString("1+1"), types);
} }
} }