mirror of
https://github.com/DanilaFe/abacus
synced 2024-12-22 15:30:09 -08:00
Implement parsing functions.
This commit is contained in:
parent
c4eb70999b
commit
d18e27bdb4
47
src/org/nwapw/abacus/tree/FunctionNode.java
Normal file
47
src/org/nwapw/abacus/tree/FunctionNode.java
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
package org.nwapw.abacus.tree;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
public class FunctionNode extends TreeNode {
|
||||||
|
|
||||||
|
private String function;
|
||||||
|
private ArrayList<TreeNode> children;
|
||||||
|
|
||||||
|
private FunctionNode() { }
|
||||||
|
|
||||||
|
public FunctionNode(String function){
|
||||||
|
this.function = function;
|
||||||
|
children = new ArrayList<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFunction() {
|
||||||
|
return function;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addChild(TreeNode node){
|
||||||
|
children.add(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T> T reduce(Reducer<T> reducer) {
|
||||||
|
Object[] reducedChildren = new Object[children.size()];
|
||||||
|
for(int i = 0; i < reducedChildren.length; i++){
|
||||||
|
reducedChildren[i] = children.get(i).reduce(reducer);
|
||||||
|
if(reducedChildren[i] == null) return null;
|
||||||
|
}
|
||||||
|
return reducer.reduceNode(this, reducedChildren);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
StringBuilder buffer = new StringBuilder();
|
||||||
|
buffer.append(function);
|
||||||
|
buffer.append("(");
|
||||||
|
for(int i = 0; i < children.size(); i++){
|
||||||
|
buffer.append(children.get(i));
|
||||||
|
buffer.append(i == children.size() - 1 ? "" : ", ");
|
||||||
|
}
|
||||||
|
buffer.append(")");
|
||||||
|
return buffer.toString();
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,7 +6,7 @@ package org.nwapw.abacus.tree;
|
||||||
*/
|
*/
|
||||||
public enum TokenType {
|
public enum TokenType {
|
||||||
|
|
||||||
ANY(0), OP(1), NUM(2), WORD(3), OPEN_PARENTH(4), CLOSE_PARENTH(5);
|
INTERNAL_FUNCTION_END(-1), INTERNAL_FUNCTION_START(-1), ANY(0), COMMA(1), OP(2), NUM(3), WORD(4), OPEN_PARENTH(5), CLOSE_PARENTH(6);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The priority by which this token gets sorted.
|
* The priority by which this token gets sorted.
|
||||||
|
|
|
@ -14,6 +14,7 @@ public abstract class TreeNode {
|
||||||
* The lexer used to lex tokens.
|
* The lexer used to lex tokens.
|
||||||
*/
|
*/
|
||||||
protected static Lexer<TokenType> lexer = new Lexer<TokenType>(){{
|
protected static Lexer<TokenType> lexer = new Lexer<TokenType>(){{
|
||||||
|
register(",", TokenType.COMMA);
|
||||||
register("\\+|-|\\*|/|^", TokenType.OP);
|
register("\\+|-|\\*|/|^", TokenType.OP);
|
||||||
register("[0-9]+(\\.[0-9]+)?", TokenType.NUM);
|
register("[0-9]+(\\.[0-9]+)?", TokenType.NUM);
|
||||||
register("[a-zA-Z]+", TokenType.WORD);
|
register("[a-zA-Z]+", TokenType.WORD);
|
||||||
|
@ -67,9 +68,10 @@ public abstract class TreeNode {
|
||||||
Stack<Match<TokenType>> tokenStack = new Stack<>();
|
Stack<Match<TokenType>> tokenStack = new Stack<>();
|
||||||
while(!from.isEmpty()){
|
while(!from.isEmpty()){
|
||||||
Match<TokenType> match = from.remove(0);
|
Match<TokenType> match = from.remove(0);
|
||||||
if(match.getType() == TokenType.NUM) {
|
TokenType matchType = match.getType();
|
||||||
|
if(matchType == TokenType.NUM || matchType == TokenType.WORD) {
|
||||||
output.add(match);
|
output.add(match);
|
||||||
} else if(match.getType() == TokenType.OP){
|
} else if(matchType == TokenType.OP){
|
||||||
String tokenString = source.substring(match.getFrom(), match.getTo());
|
String tokenString = source.substring(match.getFrom(), match.getTo());
|
||||||
int precedence = precedenceMap.get(tokenString);
|
int precedence = precedenceMap.get(tokenString);
|
||||||
OperatorAssociativity associativity = associativityMap.get(tokenString);
|
OperatorAssociativity associativity = associativityMap.get(tokenString);
|
||||||
|
@ -86,14 +88,24 @@ public abstract class TreeNode {
|
||||||
output.add(tokenStack.pop());
|
output.add(tokenStack.pop());
|
||||||
}
|
}
|
||||||
tokenStack.push(match);
|
tokenStack.push(match);
|
||||||
} else if(match.getType() == TokenType.OPEN_PARENTH){
|
} else if(matchType == TokenType.OPEN_PARENTH){
|
||||||
|
if(!output.isEmpty() && output.get(output.size() - 1).getType() == TokenType.WORD){
|
||||||
|
tokenStack.push(output.remove(output.size() - 1));
|
||||||
|
output.add(new Match<>(0, 0, TokenType.INTERNAL_FUNCTION_END));
|
||||||
|
}
|
||||||
tokenStack.push(match);
|
tokenStack.push(match);
|
||||||
} else if(match.getType() == TokenType.CLOSE_PARENTH){
|
} else if(matchType == TokenType.CLOSE_PARENTH || matchType == TokenType.COMMA){
|
||||||
while(!tokenStack.empty() && tokenStack.peek().getType() != TokenType.OPEN_PARENTH){
|
while(!tokenStack.empty() && tokenStack.peek().getType() != TokenType.OPEN_PARENTH){
|
||||||
output.add(tokenStack.pop());
|
output.add(tokenStack.pop());
|
||||||
}
|
}
|
||||||
if(tokenStack.empty()) return null;
|
if(tokenStack.empty()) return null;
|
||||||
|
if(matchType == TokenType.CLOSE_PARENTH){
|
||||||
tokenStack.pop();
|
tokenStack.pop();
|
||||||
|
if(!tokenStack.empty() && tokenStack.peek().getType() == TokenType.WORD) {
|
||||||
|
output.add(tokenStack.pop());
|
||||||
|
output.add(new Match<>(0, 0, TokenType.INTERNAL_FUNCTION_START));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
while(!tokenStack.empty()){
|
while(!tokenStack.empty()){
|
||||||
|
@ -119,6 +131,19 @@ public abstract class TreeNode {
|
||||||
else return new OpNode(source.substring(match.getFrom(), match.getTo()), left, right);
|
else return new OpNode(source.substring(match.getFrom(), match.getTo()), left, right);
|
||||||
} else if(match.getType() == TokenType.NUM){
|
} else if(match.getType() == TokenType.NUM){
|
||||||
return new NumberNode(Double.parseDouble(source.substring(match.getFrom(), match.getTo())));
|
return new NumberNode(Double.parseDouble(source.substring(match.getFrom(), match.getTo())));
|
||||||
|
} else if(match.getType() == TokenType.INTERNAL_FUNCTION_START){
|
||||||
|
if(matches.isEmpty() || matches.get(0).getType() != TokenType.WORD) return null;
|
||||||
|
Match<TokenType> stringName = matches.remove(0);
|
||||||
|
String functionName = source.substring(stringName.getFrom(), stringName.getTo());
|
||||||
|
FunctionNode node = new FunctionNode(functionName);
|
||||||
|
while(!matches.isEmpty() && matches.get(0).getType() != TokenType.INTERNAL_FUNCTION_END){
|
||||||
|
TreeNode argument = fromStringRecursive(source, matches);
|
||||||
|
if(argument == null) return null;
|
||||||
|
node.addChild(argument);
|
||||||
|
}
|
||||||
|
if(matches.isEmpty()) return null;
|
||||||
|
matches.remove(0);
|
||||||
|
return node;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user