mirror of
https://github.com/DanilaFe/abacus
synced 2026-01-30 02:25:19 +00:00
Format code.
This commit is contained in:
@@ -29,7 +29,7 @@ public class LexerTokenizer implements Tokenizer<Match<TokenType>>, PluginListen
|
||||
/**
|
||||
* Creates a new lexer tokenizer.
|
||||
*/
|
||||
public LexerTokenizer(){
|
||||
public LexerTokenizer() {
|
||||
lexer = new Lexer<TokenType>() {{
|
||||
register(" ", TokenType.WHITESPACE);
|
||||
register(",", TokenType.COMMA);
|
||||
@@ -46,20 +46,20 @@ public class LexerTokenizer implements Tokenizer<Match<TokenType>>, PluginListen
|
||||
|
||||
@Override
|
||||
public void onLoad(PluginManager manager) {
|
||||
for(String operator : manager.getAllOperators()){
|
||||
lexer.register(Pattern.sanitize(operator), TokenType.OP);
|
||||
for (String operator : manager.getAllOperators()) {
|
||||
lexer.register(Pattern.sanitize(operator), TokenType.OP);
|
||||
}
|
||||
for(String function : manager.getAllFunctions()){
|
||||
for (String function : manager.getAllFunctions()) {
|
||||
lexer.register(Pattern.sanitize(function), TokenType.FUNCTION);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onUnload(PluginManager manager) {
|
||||
for(String operator : manager.getAllOperators()){
|
||||
for (String operator : manager.getAllOperators()) {
|
||||
lexer.unregister(Pattern.sanitize(operator), TokenType.OP);
|
||||
}
|
||||
for(String function : manager.getAllFunctions()){
|
||||
for (String function : manager.getAllFunctions()) {
|
||||
lexer.unregister(Pattern.sanitize(function), TokenType.FUNCTION);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,12 +7,14 @@ import java.util.List;
|
||||
/**
|
||||
* An itnerface that provides the ability to convert a list of tokens
|
||||
* into a parse tree.
|
||||
*
|
||||
* @param <T> the type of tokens accepted by this parser.
|
||||
*/
|
||||
public interface Parser<T> {
|
||||
|
||||
/**
|
||||
* Constructs a tree out of the given tokens.
|
||||
*
|
||||
* @param tokens the tokens to construct a tree from.
|
||||
* @return the constructed tree, or null on error.
|
||||
*/
|
||||
|
||||
@@ -36,9 +36,10 @@ public class ShuntingYardParser implements Parser<Match<TokenType>>, PluginListe
|
||||
|
||||
/**
|
||||
* Creates a new Shunting Yard parser with the given Abacus instance.
|
||||
*
|
||||
* @param abacus the abacus instance.
|
||||
*/
|
||||
public ShuntingYardParser(Abacus abacus){
|
||||
public ShuntingYardParser(Abacus abacus) {
|
||||
this.abacus = abacus;
|
||||
precedenceMap = new HashMap<>();
|
||||
associativityMap = new HashMap<>();
|
||||
@@ -47,39 +48,40 @@ public class ShuntingYardParser implements Parser<Match<TokenType>>, PluginListe
|
||||
|
||||
/**
|
||||
* Rearranges tokens into a postfix list, using Shunting Yard.
|
||||
*
|
||||
* @param from the tokens to be rearranged.
|
||||
* @return the resulting list of rearranged tokens.
|
||||
*/
|
||||
public List<Match<TokenType>> intoPostfix(List<Match<TokenType>> from){
|
||||
public List<Match<TokenType>> intoPostfix(List<Match<TokenType>> from) {
|
||||
ArrayList<Match<TokenType>> output = new ArrayList<>();
|
||||
Stack<Match<TokenType>> tokenStack = new Stack<>();
|
||||
while(!from.isEmpty()){
|
||||
while (!from.isEmpty()) {
|
||||
Match<TokenType> match = from.remove(0);
|
||||
TokenType matchType = match.getType();
|
||||
if(matchType == TokenType.NUM) {
|
||||
if (matchType == TokenType.NUM) {
|
||||
output.add(match);
|
||||
} else if(matchType == TokenType.FUNCTION) {
|
||||
output.add(new Match<>("" , TokenType.INTERNAL_FUNCTION_END));
|
||||
} else if (matchType == TokenType.FUNCTION) {
|
||||
output.add(new Match<>("", TokenType.INTERNAL_FUNCTION_END));
|
||||
tokenStack.push(match);
|
||||
} else if(matchType == TokenType.OP){
|
||||
} else if (matchType == TokenType.OP) {
|
||||
String tokenString = match.getContent();
|
||||
OperatorType type = typeMap.get(tokenString);
|
||||
int precedence = precedenceMap.get(tokenString);
|
||||
OperatorAssociativity associativity = associativityMap.get(tokenString);
|
||||
|
||||
if(type == OperatorType.UNARY_POSTFIX){
|
||||
if (type == OperatorType.UNARY_POSTFIX) {
|
||||
output.add(match);
|
||||
continue;
|
||||
}
|
||||
|
||||
while(!tokenStack.empty()) {
|
||||
while (!tokenStack.empty()) {
|
||||
Match<TokenType> otherMatch = tokenStack.peek();
|
||||
TokenType otherMatchType = otherMatch.getType();
|
||||
if(!(otherMatchType == TokenType.OP || otherMatchType == TokenType.FUNCTION)) break;
|
||||
if (!(otherMatchType == TokenType.OP || otherMatchType == TokenType.FUNCTION)) break;
|
||||
|
||||
if(otherMatchType == TokenType.OP){
|
||||
if (otherMatchType == TokenType.OP) {
|
||||
int otherPrecedence = precedenceMap.get(match.getContent());
|
||||
if(otherPrecedence < precedence ||
|
||||
if (otherPrecedence < precedence ||
|
||||
(associativity == OperatorAssociativity.RIGHT && otherPrecedence == precedence)) {
|
||||
break;
|
||||
}
|
||||
@@ -87,22 +89,22 @@ public class ShuntingYardParser implements Parser<Match<TokenType>>, PluginListe
|
||||
output.add(tokenStack.pop());
|
||||
}
|
||||
tokenStack.push(match);
|
||||
} else if(matchType == TokenType.OPEN_PARENTH){
|
||||
} else if (matchType == TokenType.OPEN_PARENTH) {
|
||||
tokenStack.push(match);
|
||||
} else if(matchType == TokenType.CLOSE_PARENTH || matchType == TokenType.COMMA){
|
||||
while(!tokenStack.empty() && tokenStack.peek().getType() != TokenType.OPEN_PARENTH){
|
||||
} else if (matchType == TokenType.CLOSE_PARENTH || matchType == TokenType.COMMA) {
|
||||
while (!tokenStack.empty() && tokenStack.peek().getType() != TokenType.OPEN_PARENTH) {
|
||||
output.add(tokenStack.pop());
|
||||
}
|
||||
if(tokenStack.empty()) return null;
|
||||
if(matchType == TokenType.CLOSE_PARENTH){
|
||||
if (tokenStack.empty()) return null;
|
||||
if (matchType == TokenType.CLOSE_PARENTH) {
|
||||
tokenStack.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
while(!tokenStack.empty()){
|
||||
while (!tokenStack.empty()) {
|
||||
Match<TokenType> match = tokenStack.peek();
|
||||
TokenType matchType = match.getType();
|
||||
if(!(matchType == TokenType.OP || matchType == TokenType.FUNCTION)) return null;
|
||||
if (!(matchType == TokenType.OP || matchType == TokenType.FUNCTION)) return null;
|
||||
output.add(tokenStack.pop());
|
||||
}
|
||||
return output;
|
||||
@@ -110,37 +112,38 @@ public class ShuntingYardParser implements Parser<Match<TokenType>>, PluginListe
|
||||
|
||||
/**
|
||||
* Constructs a tree recursively from a list of tokens.
|
||||
*
|
||||
* @param matches the list of tokens from the source string.
|
||||
* @return the construct tree expression.
|
||||
*/
|
||||
public TreeNode constructRecursive(List<Match<TokenType>> matches){
|
||||
if(matches.size() == 0) return null;
|
||||
public TreeNode constructRecursive(List<Match<TokenType>> matches) {
|
||||
if (matches.size() == 0) return null;
|
||||
Match<TokenType> match = matches.remove(0);
|
||||
TokenType matchType = match.getType();
|
||||
if(matchType == TokenType.OP){
|
||||
if (matchType == TokenType.OP) {
|
||||
String operator = match.getContent();
|
||||
OperatorType type = typeMap.get(operator);
|
||||
if(type == OperatorType.BINARY_INFIX){
|
||||
if (type == OperatorType.BINARY_INFIX) {
|
||||
TreeNode right = constructRecursive(matches);
|
||||
TreeNode left = constructRecursive(matches);
|
||||
if(left == null || right == null) return null;
|
||||
if (left == null || right == null) return null;
|
||||
else return new BinaryInfixNode(operator, left, right);
|
||||
} else {
|
||||
TreeNode applyTo = constructRecursive(matches);
|
||||
if(applyTo == null) return null;
|
||||
if (applyTo == null) return null;
|
||||
else return new UnaryPrefixNode(operator, applyTo);
|
||||
}
|
||||
} else if(matchType == TokenType.NUM){
|
||||
} else if (matchType == TokenType.NUM) {
|
||||
return new NumberNode(abacus.numberFromString(match.getContent()));
|
||||
} else if(matchType == TokenType.FUNCTION){
|
||||
} else if (matchType == TokenType.FUNCTION) {
|
||||
String functionName = match.getContent();
|
||||
FunctionNode node = new FunctionNode(functionName);
|
||||
while(!matches.isEmpty() && matches.get(0).getType() != TokenType.INTERNAL_FUNCTION_END){
|
||||
while (!matches.isEmpty() && matches.get(0).getType() != TokenType.INTERNAL_FUNCTION_END) {
|
||||
TreeNode argument = constructRecursive(matches);
|
||||
if(argument == null) return null;
|
||||
if (argument == null) return null;
|
||||
node.prependChild(argument);
|
||||
}
|
||||
if(matches.isEmpty()) return null;
|
||||
if (matches.isEmpty()) return null;
|
||||
matches.remove(0);
|
||||
return node;
|
||||
}
|
||||
@@ -156,7 +159,7 @@ public class ShuntingYardParser implements Parser<Match<TokenType>>, PluginListe
|
||||
|
||||
@Override
|
||||
public void onLoad(PluginManager manager) {
|
||||
for(String operator : manager.getAllOperators()){
|
||||
for (String operator : manager.getAllOperators()) {
|
||||
Operator operatorInstance = manager.operatorFor(operator);
|
||||
precedenceMap.put(operator, operatorInstance.getPrecedence());
|
||||
associativityMap.put(operator, operatorInstance.getAssociativity());
|
||||
|
||||
@@ -4,12 +4,14 @@ import java.util.List;
|
||||
|
||||
/**
|
||||
* Interface that provides the ability to convert a string into a list of tokens.
|
||||
*
|
||||
* @param <T> the type of the tokens produced.
|
||||
*/
|
||||
public interface Tokenizer<T> {
|
||||
|
||||
/**
|
||||
* Converts a string into tokens.
|
||||
*
|
||||
* @param string the string to convert.
|
||||
* @return the list of tokens, or null on error.
|
||||
*/
|
||||
|
||||
@@ -10,6 +10,7 @@ import java.util.List;
|
||||
* working with any parameters at all, and the generics
|
||||
* in this class are used only to ensure the tokenizer and parser
|
||||
* are of the same type.
|
||||
*
|
||||
* @param <T> the type of tokens created by the tokenizer and used by the parser.
|
||||
*/
|
||||
public class TreeBuilder<T> {
|
||||
@@ -25,22 +26,24 @@ public class TreeBuilder<T> {
|
||||
|
||||
/**
|
||||
* Create a new Tree Builder with the given tokenizer and parser
|
||||
*
|
||||
* @param tokenizer the tokenizer to turn strings into tokens
|
||||
* @param parser the parser to turn tokens into a tree
|
||||
* @param parser the parser to turn tokens into a tree
|
||||
*/
|
||||
public TreeBuilder(Tokenizer<T> tokenizer, Parser<T> parser){
|
||||
public TreeBuilder(Tokenizer<T> tokenizer, Parser<T> parser) {
|
||||
this.tokenizer = tokenizer;
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given string into a tree.
|
||||
*
|
||||
* @param input the string to parse into a tree.
|
||||
* @return the resulting tree.
|
||||
*/
|
||||
public TreeNode fromString(String input){
|
||||
public TreeNode fromString(String input) {
|
||||
List<T> tokens = tokenizer.tokenizeString(input);
|
||||
if(tokens == null) return null;
|
||||
if (tokens == null) return null;
|
||||
return parser.constructTree(tokens);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user