1
0
mirror of https://github.com/DanilaFe/abacus synced 2026-01-11 09:35:23 +00:00

Format code.

This commit is contained in:
2017-07-30 21:11:32 -07:00
parent 122874b97a
commit 3ce74303ed
39 changed files with 695 additions and 561 deletions

View File

@@ -10,7 +10,7 @@ import java.util.List;
public class LexerTests {
@Test
public void testBasicSuccess(){
public void testBasicSuccess() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("abc", 0);
lexer.register("def", 1);
@@ -22,7 +22,7 @@ public class LexerTests {
}
@Test
public void testBasicFailure(){
public void testBasicFailure() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("abc", 0);
lexer.register("def", 1);
@@ -30,20 +30,20 @@ public class LexerTests {
}
@Test
public void testNoPatterns(){
public void testNoPatterns() {
Lexer<Integer> lexer = new Lexer<>();
Assert.assertNull(lexer.lexAll("abcdefabc", 0, Integer::compare));
}
@Test
public void testEmptyMatches(){
public void testEmptyMatches() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("a?", 0);
Assert.assertNull(lexer.lexAll("", 0, Integer::compare));
}
@Test
public void testOneOrMore(){
public void testOneOrMore() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("a+", 0);
List<Match<Integer>> tokens = lexer.lexAll("aaaa", 0, Integer::compare);
@@ -52,7 +52,7 @@ public class LexerTests {
}
@Test
public void testZeroOrMore(){
public void testZeroOrMore() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("a*", 0);
List<Match<Integer>> tokens = lexer.lexAll("aaaa", 0, Integer::compare);
@@ -61,7 +61,7 @@ public class LexerTests {
}
@Test
public void testZeroOrOne(){
public void testZeroOrOne() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("a?", 0);
List<Match<Integer>> tokens = lexer.lexAll("aaaa", 0, Integer::compare);
@@ -70,7 +70,7 @@ public class LexerTests {
}
@Test
public void testGreedyMatching(){
public void testGreedyMatching() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("a*a", 0);
List<Match<Integer>> tokens = lexer.lexAll("aaaa", 0, Integer::compare);
@@ -79,20 +79,20 @@ public class LexerTests {
}
@Test
public void testAnyCharacter(){
public void testAnyCharacter() {
String testString = "abcdef";
Lexer<Integer> lexer = new Lexer<>();
lexer.register(".", 0);
List<Match<Integer>> tokens = lexer.lexAll(testString, 0, Integer::compare);
Assert.assertNotNull(tokens);
Assert.assertEquals(tokens.size(), testString.length());
for(int i = 0; i < tokens.size(); i++){
for (int i = 0; i < tokens.size(); i++) {
Assert.assertEquals(testString.substring(i, i + 1), tokens.get(i).getContent());
}
}
@Test
public void testBasicGroup(){
public void testBasicGroup() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("(abc)", 0);
List<Match<Integer>> tokens = lexer.lexAll("abc", 0, Integer::compare);
@@ -102,27 +102,27 @@ public class LexerTests {
}
@Test
public void testBasicRangeSuccess(){
public void testBasicRangeSuccess() {
String testString = "abcdef";
Lexer<Integer> lexer = new Lexer<>();
lexer.register("[a-f]", 0);
List<Match<Integer>> tokens = lexer.lexAll(testString, 0, Integer::compare);
Assert.assertNotNull(tokens);
Assert.assertEquals(testString.length(), tokens.size());
for(int i = 0; i < tokens.size(); i++){
for (int i = 0; i < tokens.size(); i++) {
Assert.assertEquals(testString.substring(i, i + 1), tokens.get(i).getContent());
}
}
@Test
public void testBasicRangeFailure(){
public void testBasicRangeFailure() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("[a-f]", 0);
Assert.assertNull(lexer.lexAll("g", 0, Integer::compare));
}
@Test
public void testGroupAndOperator(){
public void testGroupAndOperator() {
Lexer<Integer> lexer = new Lexer<>();
lexer.register("(abc)+", 0);
List<Match<Integer>> tokens = lexer.lexAll("abcabc", 0, Integer::compare);

View File

@@ -64,17 +64,17 @@ public class TokenizerTests {
@Test
public void testInteger() {
assertTokensMatch(lexerTokenizer.tokenizeString("11"), new TokenType[]{ TokenType.NUM } );
assertTokensMatch(lexerTokenizer.tokenizeString("11"), new TokenType[]{TokenType.NUM});
}
@Test
public void testLeadingZeroDecimal() {
assertTokensMatch(lexerTokenizer.tokenizeString("0.1"), new TokenType[]{ TokenType.NUM } );
assertTokensMatch(lexerTokenizer.tokenizeString("0.1"), new TokenType[]{TokenType.NUM});
}
@Test
public void testNonLeadingDecimal() {
assertTokensMatch(lexerTokenizer.tokenizeString(".1"), new TokenType[]{ TokenType.NUM } );
assertTokensMatch(lexerTokenizer.tokenizeString(".1"), new TokenType[]{TokenType.NUM});
}
@Test
@@ -102,7 +102,7 @@ public class TokenizerTests {
}
@Test
public void testOperatorParsing(){
public void testOperatorParsing() {
TokenType[] types = {
TokenType.NUM,
TokenType.OP,
@@ -112,7 +112,7 @@ public class TokenizerTests {
}
@Test
public void testSanitizedOperators(){
public void testSanitizedOperators() {
TokenType[] types = {
TokenType.NUM,
TokenType.OP,