Use state machine to parse type assumptions

This commit is contained in:
Arne Keller 2021-07-05 12:21:55 +02:00
parent 93ec136970
commit df3f24548a
21 changed files with 574 additions and 269 deletions

View File

@ -3,8 +3,6 @@ package edu.kit.typicalc.model;
import edu.kit.typicalc.model.parser.ParseError;
import edu.kit.typicalc.util.Result;
import java.util.Map;
/**
* This interface accepts user input and returns a type inference result.
*/
@ -18,5 +16,5 @@ public interface Model {
* @return either an error or a TypeInfererInterface on success
*/
Result<TypeInfererInterface, ParseError> getTypeInferer(String lambdaTerm,
Map<String, String> typeAssumptions);
String typeAssumptions);
}

View File

@ -27,7 +27,7 @@ public class ModelImpl implements Model {
*/
@Override
public Result<TypeInfererInterface, ParseError> getTypeInferer(String lambdaTerm,
Map<String, String> typeAssumptions) {
String typeAssumptions) {
// Parse Lambda Term
LambdaParser parser = new LambdaParser(lambdaTerm);
Result<LambdaTerm, ParseError> result = parser.parse();

View File

@ -66,7 +66,7 @@ public class LambdaLexer {
Token token = tokens.removeFirst();
return new Result<>(token);
} else {
return new Result<>(new Token(TokenType.EOF, "", 0));
return new Result<>(new Token(TokenType.EOF, "", term, term.length()));
}
}
@ -76,7 +76,7 @@ public class LambdaLexer {
}
if (pos >= term.length()) {
// term ended, return EOF
return new Result<>(new Token(TokenType.EOF, "", pos));
return new Result<>(new Token(TokenType.EOF, "", term, pos));
}
Token t;
char c = term.charAt(pos);
@ -84,7 +84,7 @@ public class LambdaLexer {
case '-':
if (pos + 1 < term.length()) {
if (term.charAt(pos + 1) == '>') {
t = new Token(TokenType.ARROW, "->", pos);
t = new Token(TokenType.ARROW, "->", term, pos);
advance();
advance();
return new Result<>(t);
@ -97,24 +97,37 @@ public class LambdaLexer {
}
// bunch of single-character tokens
case '.':
t = new Token(TokenType.DOT, ".", pos);
t = new Token(TokenType.DOT, ".", term, pos);
advance();
return new Result<>(t);
case ',':
t = new Token(TokenType.COMMA, ",", term, pos);
advance();
return new Result<>(t);
case ':':
t = new Token(TokenType.COLON, ":", term, pos);
advance();
return new Result<>(t);
case '(':
t = new Token(TokenType.LEFT_PARENTHESIS, "(", pos);
t = new Token(TokenType.LEFT_PARENTHESIS, "(", term, pos);
advance();
return new Result<>(t);
case ')':
t = new Token(TokenType.RIGHT_PARENTHESIS, ")", pos);
t = new Token(TokenType.RIGHT_PARENTHESIS, ")", term, pos);
advance();
return new Result<>(t);
case '=':
t = new Token(TokenType.EQUALS, "=", pos);
t = new Token(TokenType.EQUALS, "=", term, pos);
advance();
return new Result<>(t);
case '\\':
case 'λ':
t = new Token(TokenType.LAMBDA, c + "", pos);
t = new Token(TokenType.LAMBDA, Character.toString(c), term, pos);
advance();
return new Result<>(t);
case '!':
case '∀':
t = new Token(TokenType.UNIVERSAL_QUANTIFIER, Character.toString(c), term, pos);
advance();
return new Result<>(t);
default:
@ -156,7 +169,7 @@ public class LambdaLexer {
type = TokenType.VARIABLE;
break;
}
return new Result<>(new Token(type, sb.toString(), startPos));
return new Result<>(new Token(type, sb.toString(), term, startPos));
} else if (Character.isDigit(c)) {
int startPos = pos;
// number literal
@ -165,7 +178,7 @@ public class LambdaLexer {
sb.append(term.charAt(pos));
advance();
} while (pos < term.length() && Character.isDigit(term.charAt(pos)));
return new Result<>(new Token(TokenType.NUMBER, sb.toString(), startPos));
return new Result<>(new Token(TokenType.NUMBER, sb.toString(), term, startPos));
} else {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(c, pos, term));
}

View File

@ -30,7 +30,7 @@ public class LambdaParser {
* When calling a parseX method, token is the first token of X
* (as opposed to the last token of the previous construct).
*/
private Token token = new Token(TokenType.EOF, "", -1);
private Token token = new Token(TokenType.EOF, "", "", -1);
private static final Set<TokenType> ATOM_START_TOKENS
= EnumSet.of(TokenType.VARIABLE, TokenType.NUMBER, TokenType.TRUE,
@ -69,7 +69,7 @@ public class LambdaParser {
TokenType current = token.getType();
Optional<ParseError> error = nextToken();
if (current != type) {
return Optional.of(ParseError.UNEXPECTED_TOKEN.withToken(lastToken, lexer.getTerm()).expectedType(type));
return Optional.of(ParseError.UNEXPECTED_TOKEN.withToken(lastToken).expectedType(type));
}
return error;
}
@ -91,7 +91,7 @@ public class LambdaParser {
}
return new Result<>(null,
(last.getType() == TokenType.EOF ? ParseError.TOO_FEW_TOKENS : ParseError.UNEXPECTED_TOKEN)
.withToken(last, lexer.getTerm())
.withToken(last)
.expectedTypes(ATOM_START_TOKENS));
}
@ -207,7 +207,7 @@ public class LambdaParser {
try {
n = Integer.parseInt(number);
} catch (NumberFormatException e) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withToken(token, lexer.getTerm()));
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withToken(token));
}
error = nextToken();
if (error.isEmpty()) {

View File

@ -19,7 +19,10 @@ public enum ParseError {
/**
* some tokens were required, but not provided
*
* DEPRECATED: use UNEXPECTED_TOKEN with TokenType.EOF instead
*/
@Deprecated
TOO_FEW_TOKENS,
/**
@ -39,12 +42,11 @@ public enum ParseError {
* Attach a token to this error.
*
* @param cause the token that caused the error
* @param term the term that is parsed
* @return this object
*/
public ParseError withToken(Token cause, String term) {
public ParseError withToken(Token cause) {
this.cause = Optional.of(cause);
this.term = term;
this.term = cause.getSourceText();
this.position = cause.getPos();
return this;
}

View File

@ -3,7 +3,7 @@ package edu.kit.typicalc.model.parser;
import java.util.Objects;
/**
* A token of the Prolog language.
* A token of a lambda term or type assumption.
*/
public class Token {
/**
@ -13,6 +13,7 @@ public class Token {
* EOF is a special token to indicate that the end of file is reached.
*/
public enum TokenType {
UNIVERSAL_QUANTIFIER, // or exclamation mark
LAMBDA, // λ or a backslash
VARIABLE, // [a-z][a-zA-Z0-9]* except "let" or "in" or constants
LET, // let
@ -23,6 +24,8 @@ public class Token {
LEFT_PARENTHESIS, // (
RIGHT_PARENTHESIS, // )
DOT, // .
COMMA, // ,
COLON, // :
EQUALS, // =
ARROW, // ->
EOF // pseudo token if end of input is reached
@ -36,6 +39,7 @@ public class Token {
* the text of this token in the source code
*/
private final String text;
private final String sourceText;
private final int pos;
/**
@ -43,11 +47,13 @@ public class Token {
*
* @param type the token type
* @param text text of this token in the source code
* @param pos position this token begins
* @param sourceText context of this token
* @param pos position this token begins in sourceText
*/
public Token(TokenType type, String text, int pos) {
public Token(TokenType type, String text, String sourceText, int pos) {
this.type = type;
this.text = text;
this.sourceText = sourceText;
this.pos = pos;
}
@ -69,6 +75,13 @@ public class Token {
return text;
}
/**
* @return the surrounding text of this token
*/
public String getSourceText() {
return sourceText;
}
/**
* Returns the position this token is in
*
@ -92,11 +105,12 @@ public class Token {
return false;
}
Token token = (Token) o;
return pos == token.pos && type == token.type && Objects.equals(text, token.text);
return pos == token.pos && type == token.type
&& Objects.equals(text, token.text) && sourceText.equals(token.sourceText);
}
@Override
public int hashCode() {
return Objects.hash(type, text, pos);
return Objects.hash(type, text, sourceText, pos);
}
}

View File

@ -4,8 +4,6 @@ import edu.kit.typicalc.model.parser.Token.TokenType;
import edu.kit.typicalc.model.term.*;
import edu.kit.typicalc.model.type.*;
import edu.kit.typicalc.util.Result;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import java.util.*;
import java.util.regex.Matcher;
@ -16,70 +14,39 @@ import java.util.regex.Pattern;
*/
public class TypeAssumptionParser {
public static final Pattern TYPE_NAME_PATTERN = Pattern.compile("[a-zA-Z][a-zA-Z0-9]*");
private static final Pattern TYPE_VARIABLE_PATTERN = Pattern.compile("t(\\d+)");
private static final Set<TokenType> END_TOKENS
= EnumSet.of(TokenType.ARROW, TokenType.RIGHT_PARENTHESIS, TokenType.EOF);
/**
* Parse the given type assumptions.
*
* @param assumptions the type assumptions
* @return if successful, a map of the type assumptions, otherwise an error
*/
public Result<Map<VarTerm, TypeAbstraction>, ParseError> parse(Map<String, String> assumptions) {
Map<VarTerm, TypeAbstraction> typeAssumptions = new LinkedHashMap<>();
for (Map.Entry<String, String> entry : assumptions.entrySet()) {
String typeName = entry.getKey();
if (!TYPE_NAME_PATTERN.matcher(typeName).matches()) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER);
public Result<Map<VarTerm, TypeAbstraction>, ParseError> parse(String assumptions) {
ParserState<Map<VarTerm, TypeAbstraction>> state = new InitialState(new LinkedHashMap<>());
LambdaLexer lexer = new LambdaLexer(cleanAssumptionText(assumptions));
Optional<Token> extraToken = Optional.empty();
while (true) {
Token token1;
if (extraToken.isPresent()) {
token1 = extraToken.get();
} else {
Result<Token, ParseError> token = lexer.nextToken();
if (token.isError()) {
return token.castError();
}
VarTerm var = new VarTerm(typeName);
Result<TypeAbstraction, ParseError> typeAbs = parseTypeDefinition(entry.getValue());
if (typeAbs.isError()) {
return new Result<>(typeAbs);
token1 = token.unwrap();
}
typeAssumptions.put(var, typeAbs.unwrap());
ParserResult<Map<VarTerm, TypeAbstraction>> result = state.handle(token1);
if (result.isResult()) {
return new Result<>(result.getResult());
} else if (result.isError()) {
return new Result<>(null, result.getError());
} else {
state = result.getState();
extraToken = result.extraToken();
}
return new Result<>(typeAssumptions);
}
/**
* Parse a type definition that may use the all quantor.
* @param definition type definition
* @return parsed type
*/
public Result<TypeAbstraction, ParseError> parseTypeDefinition(String definition) {
definition = cleanAssumptionText(definition);
Set<TypeVariable> allQuantified = new HashSet<>();
if (definition.startsWith("")) {
String[] parts = definition.split("\\.");
if (parts.length < 2) {
int colonIndex = definition.indexOf(':');
if (colonIndex >= 0) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(
':', colonIndex, definition).expectedCharacter('.')
);
}
return new Result<>(null, ParseError.TOO_FEW_TOKENS.expectedType(TokenType.DOT));
} else if (parts.length > 2) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(
'.', parts[0].length() + 1 + parts[1].length(), definition));
}
for (String quantified : parts[0].substring(1).split(",")) {
quantified = quantified.trim();
if (!quantified.matches("t\\d+")) {
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(
new Token(TokenType.VARIABLE, quantified, parts[0].indexOf(quantified)), parts[0]
));
}
int i = Integer.parseInt(quantified.substring(1));
allQuantified.add(new TypeVariable(TypeVariableKind.USER_INPUT, i));
}
definition = parts[1];
}
return parseType(definition, allQuantified);
}
private String cleanAssumptionText(String text) {
@ -96,83 +63,307 @@ public class TypeAssumptionParser {
.replace('τ', 't');
}
private Result<TypeAbstraction, ParseError> parseType(String type, Set<TypeVariable> allQuantified) {
// this parser is using the same lexer as the LambdaParser (to avoid types named "let" etc.)
LambdaLexer lexer = new LambdaLexer(type);
Result<Pair<Type, Integer>, ParseError> parsedType = parseType(lexer, 0);
if (parsedType.isError()) {
return new Result<>(null, parsedType.unwrapError());
}
return new Result<>(new TypeAbstraction(parsedType.unwrap().getLeft(), allQuantified));
private static class ParserResult<T> {
private Optional<ParserState<T>> newState = Optional.empty();
private Optional<ParseError> error = Optional.empty();
private Optional<T> result = Optional.empty();
private Optional<Token> extraToken = Optional.empty();
ParserResult(ParseError e) {
this.error = Optional.of(e);
}
private Result<Pair<Type, Integer>, ParseError> parseType(LambdaLexer lexer, int parenCount) {
Type type = null;
int removedParens = 0;
while (true) {
Result<Token, ParseError> token = lexer.nextToken();
if (token.isError()) {
return new Result<>(token);
ParserResult(ParserState<T> state) {
this.newState = Optional.of(state);
}
Token t = token.unwrap();
Result<Type, ParseError> typeResult = null;
ParserResult(T result) {
this.result = Optional.of(result);
}
boolean isTransition() {
return newState.isPresent();
}
ParserState<T> getState() {
return newState.get();
}
boolean isError() {
return error.isPresent();
}
ParseError getError() {
return error.get();
}
<U> ParserResult<U> castError() {
return new ParserResult<>(error.get());
}
boolean isResult() {
return result.isPresent();
}
T getResult() {
return result.get();
}
ParserResult<T> attachToken(Token t) {
this.extraToken = Optional.of(t);
return this;
}
ParserResult<T> copyToken(ParserResult<?> other) {
this.extraToken = other.extraToken;
return this;
}
Optional<Token> extraToken() {
return this.extraToken;
}
}
private interface ParserState<T> {
ParserResult<T> handle(Token t);
}
private static class InitialState implements ParserState<Map<VarTerm, TypeAbstraction>> {
private Map<VarTerm, TypeAbstraction> alreadyParsed = new LinkedHashMap<>();
InitialState(Map<VarTerm, TypeAbstraction> alreadyParsed) {
this.alreadyParsed = alreadyParsed;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case LEFT_PARENTHESIS:
// recursive call, set number of open parentheses to 1
Result<Pair<Type, Integer>, ParseError> type2 = parseType(lexer, 1);
typeResult = type2.map(Pair::getLeft);
removedParens += type2.map(Pair::getRight).unwrapOr(1) - 1;
break;
case VARIABLE:
// named type or type variable
type = parseLiteral(t.getText());
break;
case RIGHT_PARENTHESIS:
removedParens += 1;
break;
case ARROW:
if (type == null) {
// there was no type in front of the arrow
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(t, ""));
}
// recursive call, keep open parentheses count
Result<Pair<Type, Integer>, ParseError> nextType = parseType(lexer, parenCount);
final Type left = type;
typeResult = nextType.map(Pair::getLeft).map(right -> new FunctionType(left, right));
removedParens += nextType.map(Pair::getRight).unwrapOr(0);
break;
return new ParserResult<>(new ExpectingColon(alreadyParsed, new VarTerm(t.getText())));
case EOF:
break;
return new ParserResult<>(alreadyParsed);
default:
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(t, ""));
}
// update type based on Result
if (typeResult != null && typeResult.isError()) {
return new Result<>(typeResult);
}
type = typeResult != null ? typeResult.unwrap() : type;
// after fully processing one token / a type in parenthesis,
// some type should have been parsed
if (type == null) {
return new Result<>(null, ParseError.TOO_FEW_TOKENS);
}
if (parenCount - removedParens < 0) {
// too many closing parenthesis
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(t, ""));
} else if (END_TOKENS.contains(t.getType())) {
// potential end of type
if (parenCount - removedParens == 0) {
// opening and closing parentheses match
return new Result<>(new ImmutablePair<>(type, removedParens));
} else {
// missing closing parenthesis
return new Result<>(null, ParseError.TOO_FEW_TOKENS);
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
private Type parseLiteral(String text) {
private static class ExpectingColon implements ParserState<Map<VarTerm, TypeAbstraction>> {
private Map<VarTerm, TypeAbstraction> alreadyParsed;
private final VarTerm var;
ExpectingColon(Map<VarTerm, TypeAbstraction> alreadyParsed, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.var = var;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case COLON:
return new ParserResult<>(new ExpectingTypeDef(alreadyParsed, var));
default:
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
private static class ExpectingTypeDef implements ParserState<Map<VarTerm, TypeAbstraction>> {
private final Map<VarTerm, TypeAbstraction> alreadyParsed;
private final Set<TypeVariable> typeVariables;
private final VarTerm var;
private Optional<ParserState<Type>> state = Optional.empty();
ExpectingTypeDef(Map<VarTerm, TypeAbstraction> alreadyParsed, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.typeVariables = new TreeSet<>();
this.var = var;
}
ExpectingTypeDef(Map<VarTerm, TypeAbstraction> alreadyParsed, Set<TypeVariable> typeVariables, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.typeVariables = typeVariables;
this.var = var;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case UNIVERSAL_QUANTIFIER:
if (typeVariables.isEmpty()) {
return new ParserResult<>(new ExpectingTypeVariables(alreadyParsed, var));
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
default:
if (state.isPresent()) {
ParserState<Type> status = state.get();
// already parsing type
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
alreadyParsed.put(var, new TypeAbstraction(result.getResult(), typeVariables));
return new ParserResult<>(new ExpectingCommaBeforeNextType(alreadyParsed))
.copyToken(result);
} else if (result.isError()) {
return result.castError();
} else {
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
// attempt to parse as type
ParserResult<Type> result = new ParseTypeState1().handle(t);
if (result.isError()) {
return result.castError();
}
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
}
private static class ExpectingCommaBeforeNextType implements ParserState<Map<VarTerm, TypeAbstraction>> {
private final Map<VarTerm, TypeAbstraction> alreadyParsed;
ExpectingCommaBeforeNextType(Map<VarTerm, TypeAbstraction> alreadyParsed) {
this.alreadyParsed = alreadyParsed;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case EOF:
return new ParserResult<>(alreadyParsed);
case COMMA:
return new ParserResult<>(new InitialState(alreadyParsed));
default:
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
/**
* Main type parsing state.
*/
private static class ParseTypeState1 implements ParserState<Type> {
private Optional<Type> parsedType = Optional.empty();
private Optional<ParserState<Type>> state = Optional.empty();
private Optional<ParserState<Type>> stateParenthesis = Optional.empty();
private int parenthesisInitial = 0;
private int openParens = 0;
@Override
public ParserResult<Type> handle(Token t) {
switch (t.getType()) {
case VARIABLE:
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
return handleInnerParenthesis(t);
}
if (parsedType.isPresent()) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
Type type = parseLiteral(t.getText());
// try parsing function type (see below)
this.parsedType = Optional.of(type);
return new ParserResult<>(this);
case ARROW:
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
return handleInnerParenthesis(t);
}
if (parsedType.isEmpty()) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
// parse function type
state = Optional.of(new ParseTypeStateExpectArrow().handle(t).getState());
return new ParserResult<>(this);
case LEFT_PARENTHESIS:
openParens += 1;
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
return handleInnerParenthesis(t);
}
stateParenthesis = Optional.of(new ParseTypeState1());
parenthesisInitial = openParens - 1;
return new ParserResult<>(this);
case RIGHT_PARENTHESIS:
openParens -= 1;
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
if (openParens == parenthesisInitial) {
// inner state is done parsing
ParserResult<Type> result = handleInnerParenthesis(
new Token(TokenType.EOF, "", "", -1));
if (result.isError()) {
return result.castError();
} else {
parsedType = Optional.of(result.getResult());
stateParenthesis = Optional.empty();
}
} else {
return handleInnerParenthesis(t);
}
}
if (parsedType.isPresent()) {
return new ParserResult<>(this); // parenthesized part may be start of function
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
case COMMA:
case EOF:
if (state.isPresent()) {
return handleInner(t).attachToken(t);
}
if (stateParenthesis.isPresent() && openParens == parenthesisInitial) {
return handleInnerParenthesis(t).attachToken(t);
}
if (parsedType.isPresent() && openParens == parenthesisInitial) {
return new ParserResult<>(parsedType.get()).attachToken(t);
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
default:
if (state.isPresent()) {
return handleInner(t);
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
private ParserResult<Type> handleInner(Token t) {
ParserState<Type> status = state.get();
// already parsing right type of function
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
return new ParserResult<Type>(new FunctionType(parsedType.get(), result.getResult())).copyToken(result);
} else if (result.isError()) {
return result.castError();
} else {
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
private ParserResult<Type> handleInnerParenthesis(Token t) {
ParserState<Type> status = stateParenthesis.get();
// already parsing right type of function
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
return new ParserResult<>(result.getResult()).copyToken(result);
} else if (result.isError()) {
return result.castError();
} else {
stateParenthesis = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
private static Type parseLiteral(String text) {
Matcher typeVariableMatcher = TYPE_VARIABLE_PATTERN.matcher(text);
if (typeVariableMatcher.matches()) {
int typeVariableIndex = Integer.parseInt(typeVariableMatcher.group(1));
@ -181,4 +372,94 @@ public class TypeAssumptionParser {
return new NamedType(text);
}
}
}
private static class ParseTypeStateExpectArrow implements ParserState<Type> {
private Optional<ParserState<Type>> state = Optional.empty();
@Override
public ParserResult<Type> handle(Token t) {
switch (t.getType()) {
case ARROW:
if (state.isEmpty()) {
// try parsing remainder as type
state = Optional.of(new ParseTypeState1());
return new ParserResult<>(this);
}
default:
if (state.isPresent()) {
ParserState<Type> status = state.get();
// already parsing type
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
return result;
} else if (result.isError()) {
return result.castError();
} else {
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
}
private static class ExpectingTypeVariables implements ParserState<Map<VarTerm, TypeAbstraction>> {
private final Map<VarTerm, TypeAbstraction> alreadyParsed;
private final VarTerm var;
private final Set<TypeVariable> variables = new TreeSet<>();
private boolean expectCommaOrDot = false;
ExpectingTypeVariables(Map<VarTerm, TypeAbstraction> alreadyParsed, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.var = var;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case VARIABLE:
if (expectCommaOrDot) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN
.withToken(t)
.expectedTypes(List.of(TokenType.COMMA, Token.TokenType.DOT)));
}
String input = t.getText();
if (!TYPE_VARIABLE_PATTERN.matcher(input).matches()) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
// TODO: somehow convey the expected format
}
int i = Integer.parseInt(input.substring(1));
variables.add(new TypeVariable(TypeVariableKind.USER_INPUT, i));
expectCommaOrDot = true;
return new ParserResult<>(this);
case COMMA:
if (expectCommaOrDot) {
expectCommaOrDot = false;
return new ParserResult<>(this);
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN
.withToken(t)
.expectedType(TokenType.VARIABLE));
}
case DOT:
if (expectCommaOrDot) {
// list of type variables is complete
// parse actual type
return new ParserResult<>(new ExpectingTypeDef(alreadyParsed, variables, var));
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN
.withToken(t)
.expectedType(TokenType.VARIABLE));
}
default:
if (expectCommaOrDot) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t)
.expectedTypes(List.of(TokenType.COMMA, TokenType.DOT)));
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
}

View File

@ -1,7 +1,5 @@
package edu.kit.typicalc.presenter;
import java.util.Map;
import com.vaadin.flow.component.UI;
import edu.kit.typicalc.model.Model;
import edu.kit.typicalc.model.TypeInfererInterface;
@ -32,7 +30,7 @@ public class Presenter implements MainViewListener {
}
@Override
public void typeInferLambdaString(String lambdaTerm, Map<String, String> typeAssumptions) {
public void typeInferLambdaString(String lambdaTerm, String typeAssumptions) {
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer(lambdaTerm, typeAssumptions);
if (result.isError()) {
view.displayError(result.unwrapError());

View File

@ -116,6 +116,10 @@ public class Result<T, E> {
return new Result<>(value != null ? mapping.apply(value) : null, error);
}
public <U> Result<U, E> castError() {
return new Result<>(null, this.error);
}
@Override
public String toString() {
if (isOk()) {

View File

@ -120,11 +120,8 @@ public class InputBar extends HorizontalLayout implements LocaleChangeObserver {
*
* @param typeAssumptions the type assumptions as a map
*/
protected void setTypeAssumptions(Map<String, String> typeAssumptions) {
assumptionInputField.setValue(
typeAssumptions.entrySet().stream()
.map(entry -> entry.getKey().trim() + ": " + entry.getValue().trim())
.collect(Collectors.joining("; ")));
protected void setTypeAssumptions(String typeAssumptions) {
assumptionInputField.setValue(typeAssumptions);
}
protected void setTermAndClickTypeInfer(String term) {

View File

@ -3,8 +3,6 @@ package edu.kit.typicalc.view.main;
import edu.kit.typicalc.model.TypeInfererInterface;
import edu.kit.typicalc.model.parser.ParseError;
import java.util.Map;
/**
* Provides an interface for the presenter to interact with the view. The interaction consists of
* either passing a TypeInfererInterface or a ParseError to the view.
@ -37,6 +35,6 @@ public interface MainView {
* @param lambdaTerm the lambda term to type-infer
* @param typeAssumptions type assumptions to use during the calculation
*/
void typeInferLambdaString(String lambdaTerm, Map<String, String> typeAssumptions);
void typeInferLambdaString(String lambdaTerm, String typeAssumptions);
}
}

View File

@ -74,14 +74,13 @@ public class MainViewImpl extends AppLayout
if (url.getPath().matches(TypeInferenceView.ROUTE + "/.*")) {
List<String> segments = url.getSegments();
String term = segments.get(segments.size() - 1);
Map<String, String> types = url.getQueryParameters().getParameters().entrySet().stream().map(entry ->
Pair.of(entry.getKey(), entry.getValue().get(0))
).collect(Collectors.toMap(Pair::getLeft, Pair::getRight,
(existing, replacement) -> existing, LinkedHashMap::new));
String types = url.getQueryParameters().getParameters()
.entrySet().stream().map(kv -> kv.getKey() + ": " + kv.getValue().get(0))
.collect(Collectors.joining(", "));
upperBar.inferTerm(decodeURL(term), types);
} else if (url.getPath().equals(TypeInferenceView.ROUTE)) {
setContent(new StartPageView());
upperBar.inferTerm(StringUtils.EMPTY, Collections.emptyMap());
upperBar.inferTerm(StringUtils.EMPTY, "");
} else if (url.getPath().equals(StringUtils.EMPTY)) {
setContent(new StartPageView());
}

View File

@ -94,7 +94,7 @@ public class UpperBar extends VerticalLayout implements LocaleChangeObserver {
inputConsumer.accept(termAndAssumptions);
}
private void startInfer(String term, Map<String, String> typeAssumptions) {
private void startInfer(String term, String typeAssumptions) {
presenter.typeInferLambdaString(term, typeAssumptions);
}
@ -104,7 +104,7 @@ public class UpperBar extends VerticalLayout implements LocaleChangeObserver {
* @param term the provided string
* @param typeAssumptions type assumptions to use
*/
protected void inferTerm(String term, Map<String, String> typeAssumptions) {
protected void inferTerm(String term, String typeAssumptions) {
inputBar.setTypeAssumptions(typeAssumptions);
inputBar.setTerm(term);
startInfer(term, typeAssumptions);

View File

@ -23,14 +23,10 @@ class ModelImplTest {
@Test
void getTypeInferer() {
ModelImpl model = new ModelImpl();
Map<String, String> map = new LinkedHashMap<>();
map.put("x", "int");
Map<String, String> map2 = new LinkedHashMap<>();
map2.put("a.x", "3.int");
assertTrue(model.getTypeInferer("test.x.x.test", map2).isError());
assertTrue(model.getTypeInferer("x", map2).isError());
assertTrue(model.getTypeInferer("test.x.x.test", "a.x: 3.int").isError());
assertTrue(model.getTypeInferer("x", "a.x: 3.int").isError());
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("λy.x", map);
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("λy.x", "x: int");
VarTerm y = new VarTerm("y");
VarTerm x = new VarTerm("x");
LambdaTerm term = new AbsTerm(y, x);
@ -66,10 +62,9 @@ class ModelImplTest {
@Test
void quantifiedTypeAssumption() {
ModelImpl model = new ModelImpl();
Map<String, String> assumptions = new HashMap<>();
assumptions.put("id", "∀ t1 . t1 -> t1");
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("(id id) (id true)", assumptions);
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("(id id) (id true)",
"id: ∀ t1 . t1 -> t1");
if (result.isError()) {
System.out.println(result.unwrapError());
fail();
@ -93,7 +88,7 @@ class ModelImplTest {
@Test
void letTermTypeAssumptions() {
Model model = new ModelImpl();
TypeInfererInterface typer = model.getTypeInferer("(let g = λx.x in g) g", new HashMap<>()).unwrap();
TypeInfererInterface typer = model.getTypeInferer("(let g = λx.x in g) g", "").unwrap();
AppStep first = (AppStep) typer.getFirstInferenceStep();
LetStep second = (LetStep) first.getPremise1();
VarStep third = (VarStep) second.getPremise();

View File

@ -28,7 +28,7 @@ public class LambdaParserFuzzTest {
@Fuzz
public void testInference(@From(LambdaTermGenerator.class) String term) {
Model model = new ModelImpl();
Result<TypeInfererInterface, ParseError> typer = model.getTypeInferer(term, new HashMap<>());
Result<TypeInfererInterface, ParseError> typer = model.getTypeInferer(term, "");
InferenceStep first = typer.unwrap().getFirstInferenceStep();
}

View File

@ -98,7 +98,7 @@ class LambdaParserTest {
parser = new LambdaParser("x)");
ParseError error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 1), error.getCause().get());
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "x)", 1), error.getCause().get());
parser = new LambdaParser("??");
assertEquals(ParseError.UNEXPECTED_CHARACTER, parser.parse().unwrapError());
parser = new LambdaParser("");
@ -110,39 +110,40 @@ class LambdaParserTest {
parser = new LambdaParser("x 123333333333333");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_CHARACTER, error);
assertEquals(new Token(TokenType.NUMBER, "123333333333333", 2), error.getCause().get());
assertEquals(new Token(TokenType.NUMBER, "123333333333333", "x 123333333333333", 2),
error.getCause().get());
parser = new LambdaParser("λ)");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 1), error.getCause().get());
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "λ)", 1), error.getCause().get());
parser = new LambdaParser("λx=");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.EQUALS, "=", 2), error.getCause().get());
assertEquals(new Token(TokenType.EQUALS, "=", "λx=", 2), error.getCause().get());
parser = new LambdaParser("λx..");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.DOT, ".", 3), error.getCause().get());
assertEquals(new Token(TokenType.DOT, ".", "λx..", 3), error.getCause().get());
parser = new LambdaParser("let ) =");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 4), error.getCause().get());
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let ) =", 4), error.getCause().get());
parser = new LambdaParser("let x .");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.DOT, ".", 6), error.getCause().get());
assertEquals(new Token(TokenType.DOT, ".", "let x .", 6), error.getCause().get());
parser = new LambdaParser("let x = )");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 8), error.getCause().get());
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let x = )",8), error.getCause().get());
parser = new LambdaParser("let x = y )");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 10), error.getCause().get());
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let x = y )", 10), error.getCause().get());
parser = new LambdaParser("let x = y in )");
error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 13), error.getCause().get());
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let x = y in )", 13), error.getCause().get());
}
@Test

View File

@ -6,6 +6,7 @@ import edu.kit.typicalc.util.Result;
import org.junit.jupiter.api.Test;
import java.util.*;
import java.util.stream.Collectors;
import static edu.kit.typicalc.model.type.NamedType.BOOLEAN;
import static edu.kit.typicalc.model.type.NamedType.INT;
@ -16,9 +17,7 @@ class TypeAssumptionParserTest {
@Test
void simpleType() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("a", "int");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("a: int");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -30,9 +29,7 @@ class TypeAssumptionParserTest {
@Test
void typeVariablesOneDigitIndex() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("x", "t1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("x: t1");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -40,23 +37,19 @@ class TypeAssumptionParserTest {
assertEquals(new VarTerm("x"), assumption.getKey());
assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 1)), assumption.getValue());
Map<String, String> assumptions2 = new LinkedHashMap<>();
assumptions2.put("x", "t001");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type2 = parser.parse(assumptions2);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type2 = parser.parse("x: t009");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types2 = type2.unwrap();
assertEquals(1, types2.size());
Map.Entry<VarTerm, TypeAbstraction> assumption2 = types2.entrySet().stream().findFirst().get();
assertEquals(new VarTerm("x"), assumption2.getKey());
assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 1)), assumption2.getValue());
assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 9)), assumption2.getValue());
}
@Test
void typeVariablesMultipleDigitIndex() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("x", "t123456");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("x: t123456");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -68,9 +61,7 @@ class TypeAssumptionParserTest {
@Test
void namedTypeStartingWithT() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("x", "tau1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("x: tau1");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -82,27 +73,27 @@ class TypeAssumptionParserTest {
@Test
void functionType() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("id", "int -> int");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: int -> int");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
Map.Entry<VarTerm, TypeAbstraction> assumption = types.entrySet().stream().findFirst().get();
assertEquals(new VarTerm("id"), assumption.getKey());
assertEquals(new TypeAbstraction(new FunctionType(INT, INT)), assumption.getValue());
}
Map<String, String> assumptions2 = new LinkedHashMap<>();
assumptions2.put("f", "int -> int -> int");
type = parser.parse(assumptions2);
@Test
void functionType2() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("f: int -> int -> int");
if (type.isError()) {
System.err.println(type.unwrapError());
System.err.println(type.unwrapError().getCause());
}
assertTrue(type.isOk());
types = type.unwrap();
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
assumption = types.entrySet().stream().findFirst().get();
Map.Entry<VarTerm, TypeAbstraction> assumption = types.entrySet().stream().findFirst().get();
assertEquals(new VarTerm("f"), assumption.getKey());
assertEquals(new TypeAbstraction(
new FunctionType(
@ -114,9 +105,7 @@ class TypeAssumptionParserTest {
@Test
void functionTypeWithVariables() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("fun", "t0 -> t0");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("fun: t0 -> t0");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -129,9 +118,7 @@ class TypeAssumptionParserTest {
@Test
void complicatedTypes() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("id", "(int -> int) -> (boolean -> boolean)");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: (int -> int) -> (boolean -> boolean)");
if (type.isError()) {
System.err.println(type.unwrapError());
System.err.println(type.unwrapError().getCause());
@ -147,9 +134,7 @@ class TypeAssumptionParserTest {
new FunctionType(BOOLEAN, BOOLEAN)
)), assumption.getValue());
parser = new TypeAssumptionParser();
Map<String, String> assumptions2 = new LinkedHashMap<>();
assumptions2.put("id", "((int -> int) -> (boolean -> boolean)) -> ((int2 -> boolean2) -> (boolean2 -> int2))");
type = parser.parse(assumptions2);
type = parser.parse("id: ((int -> int) -> (boolean -> boolean)) -> ((int2 -> boolean2) -> (boolean2 -> int2))");
if (type.isError()) {
System.err.println(type.unwrapError());
System.err.println(type.unwrapError().getCause());
@ -177,9 +162,7 @@ class TypeAssumptionParserTest {
@Test
void longFunction() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>();
assumptions.put("fun", "(a -> b -> c) -> d");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("fun: (a -> b -> c) -> d");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -195,9 +178,7 @@ class TypeAssumptionParserTest {
@Test
void allQuantified() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new HashMap<>();
assumptions.put("id", "∀ t1 . t1 -> t1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: ∀ t1 . t1 -> t1");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size());
@ -212,35 +193,59 @@ class TypeAssumptionParserTest {
), quantified), assumption.getValue());
}
@Test
void allQuantifiedCommaSeparation() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("transmute: ∀ t1, t2 . t1 -> t2, createAny: ∀ t1 . t1 ");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(2, types.size());
Set<TypeVariable> quantified = new HashSet<>();
quantified.add(new TypeVariable(TypeVariableKind.USER_INPUT, 1));
quantified.add(new TypeVariable(TypeVariableKind.USER_INPUT, 2));
assertEquals(new TypeAbstraction(
new FunctionType(
new TypeVariable(TypeVariableKind.USER_INPUT, 1),
new TypeVariable(TypeVariableKind.USER_INPUT, 2)
), quantified), types.get(new VarTerm("transmute")));
quantified.remove(new TypeVariable(TypeVariableKind.USER_INPUT, 2));
assertEquals(new TypeAbstraction(
new TypeVariable(TypeVariableKind.USER_INPUT, 1),
quantified), types.get(new VarTerm("createAny")));
}
@Test
void usefulErrors() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new HashMap<>();
assumptions.put("id", "∀ t1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: ∀ t1");
assertTrue(type.isError());
ParseError error = type.unwrapError();
assertEquals(ParseError.TOO_FEW_TOKENS, error);
assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(Token.TokenType.EOF, error.getCause().get().getType());
Collection<Token.TokenType> expected = error.getExpected().get();
assertEquals(1, expected.size());
assertEquals(2, expected.size());
assertTrue(expected.contains(Token.TokenType.DOT));
// TODO: should also expect a comma, once the parser is fixed!
assertTrue(expected.contains(Token.TokenType.COMMA));
}
@Test
void errors() {
Map<String, ParseError> tests = new HashMap<>();
tests.put("", ParseError.TOO_FEW_TOKENS);
tests.put("",
ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.EOF, "", "", 0)));
tests.put("ö", ParseError.UNEXPECTED_CHARACTER);
tests.put("(x", ParseError.TOO_FEW_TOKENS);
tests.put("-> x", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.ARROW, "->", 0), ""));
tests.put("x 11", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.NUMBER, "11", 2), ""));
tests.put("x )", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", 2), ""));
tests.put("(x",
ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.EOF, "", "(x", 2)));
tests.put("-> x",
ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.ARROW, "->", "-> x", 0)));
tests.put("x 11",
ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.NUMBER, "11", "x 11", 2)));
tests.put("x )", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", "x )", 2)));
tests.put("x -> (x) )", ParseError.UNEXPECTED_TOKEN
.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", 9), ""));
.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", "x -> (x) )", 9)));
for (Map.Entry<String, ParseError> entry : tests.entrySet()) {
TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(Map.of("type1", entry.getKey()));
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("type1:" + entry.getKey());
assertTrue(type.isError());
assertEquals(entry.getValue(), type.unwrapError());
if (entry.getValue().getCause().isPresent()) {
@ -248,13 +253,15 @@ class TypeAssumptionParserTest {
}
}
TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(Map.of("föhn", "int"));
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("föhn: int");
assertTrue(type.isError());
assertEquals(ParseError.UNEXPECTED_CHARACTER, type.unwrapError());
assertEquals(1, type.unwrapError().getPosition());
parser = new TypeAssumptionParser();
type = parser.parse(Map.of("1typ", "int"));
type = parser.parse("1typ:int");
assertTrue(type.isError());
assertEquals(ParseError.UNEXPECTED_CHARACTER, type.unwrapError());
assertEquals(ParseError.UNEXPECTED_TOKEN, type.unwrapError());
assertEquals(0, type.unwrapError().getPosition());
}
@Test
@ -281,7 +288,7 @@ class TypeAssumptionParserTest {
tests.put("((x) -> ((x)) -> (x)) -> (x -> (x -> (x)))", new TypeAbstraction(xxxxxx));
for (Map.Entry<String, TypeAbstraction> entry : tests.entrySet()) {
TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(Map.of("type1", entry.getKey()));
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("type1:" + entry.getKey());
assertTrue(type.isOk());
assertEquals(entry.getValue(), type.unwrap().get(new VarTerm("type1")));
}

View File

@ -25,7 +25,7 @@ class LatexCreatorConstraintsTest {
@Test
void singleVarDefaultConstraintTest() {
typeInferer = model.getTypeInferer("x", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("x", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
String constraintSet = AMPERSAND + SPLIT_BEGIN + CONSTRAINT_SET + EQUALS + LATEX_CURLY_LEFT + AMPERSAND
@ -55,7 +55,7 @@ class LatexCreatorConstraintsTest {
@Test
void singleAbsDefaultConstraintTest() {
typeInferer = model.getTypeInferer("λx.y", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("λx.y", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
String constraintSet1 = AMPERSAND + SPLIT_BEGIN + CONSTRAINT_SET + EQUALS + LATEX_CURLY_LEFT + AMPERSAND
@ -103,12 +103,12 @@ class LatexCreatorConstraintsTest {
@Test
void lineBreak() {
typeInferer = model.getTypeInferer("a b c d e f", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("a b c d e f", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{2}=\\alpha_{3} \\rightarrow \\alpha_{1},\\alpha_{4}=\\alpha_{5} \\rightarrow \\alpha_{2},\\alpha_{6}=\\alpha_{7} \\rightarrow \\alpha_{4},\\alpha_{8}=\\alpha_{9} \\rightarrow \\alpha_{6},\\alpha_{10}=\\alpha_{11} \\rightarrow \\alpha_{8},\\alpha_{10}=\\beta_{1},\\alpha_{11}=\\beta_{2},\\alpha_{9}=\\beta_{3},\\alpha_{7}=\\beta_{4},\\alpha_{5}=\\beta_{5},\\\\&\\alpha_{3}=\\beta_{6}\\}\\end{split}\\end{aligned}", actual.get(11));
typeInferer = model.getTypeInferer("let g = a b c d e f in g", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("let g = a b c d e f in g", "").unwrap();
actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{1}=\\alpha_{13}\\}\\end{split}\\\\\n" +
@ -117,7 +117,7 @@ class LatexCreatorConstraintsTest {
@Test
void emptyLetTypeAssumptions() {
typeInferer = model.getTypeInferer("let g = 5 in g", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("let g = 5 in g", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{1}=\\alpha_{3}\\}\\end{split}\\\\\n" +
"&\\begin{split}C_{let}=\\{&\\alpha_{2}=\\texttt{int}\\}\\end{split}\\\\&\\begin{split}\\sigma_{let}:=\\textit{mgu}(C_{let})=[&\\alpha_{2}\\mathrel{\\unicode{x21E8}}\\texttt{int}]\\end{split}\\\\&\\sigma_{let}(\\alpha_{2})=\\texttt{int}\\\\&\\Gamma'=\\sigma_{let}(\\emptyset),\\ \\texttt{g}:ta(\\sigma_{let}(\\alpha_{2}),\\sigma_{let}(\\emptyset))=\\texttt{g}:\\texttt{int}\\end{aligned}", actual.get(6));
@ -126,7 +126,7 @@ class LatexCreatorConstraintsTest {
@Test
void excessiveMemoryUsageAvoided() {
try {
typeInferer = model.getTypeInferer("(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
// should have thrown IllegalStateException by now
fail("did not throw exception");

View File

@ -17,7 +17,7 @@ class LatexCreatorTermTest {
@Test
void absTest() {
typeInferer = model.getTypeInferer("λx.y", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("λx.y", "").unwrap();
assertEquals(LAMBDA + SPACE + MONO_TEXT + "{x}" + DOT_SIGN
+ LATEX_SPACE + MONO_TEXT + "{y}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
@ -26,7 +26,7 @@ class LatexCreatorTermTest {
@Test
void appTest() {
typeInferer = model.getTypeInferer("x y", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("x y", "").unwrap();
assertEquals(MONO_TEXT + "{x}" + LATEX_SPACE + MONO_TEXT + "{y}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex());
@ -34,7 +34,7 @@ class LatexCreatorTermTest {
@Test
void varTest() {
typeInferer = model.getTypeInferer("x", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("x", "").unwrap();
assertEquals(MONO_TEXT + "{x}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex());
@ -42,7 +42,7 @@ class LatexCreatorTermTest {
@Test
void integerTest() {
typeInferer = model.getTypeInferer("5", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("5", "").unwrap();
assertEquals(MONO_TEXT + "{5}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex());
@ -50,7 +50,7 @@ class LatexCreatorTermTest {
@Test
void booleanTest() {
typeInferer = model.getTypeInferer("true", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("true", "").unwrap();
assertEquals(MONO_TEXT + "{true}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex());
@ -58,7 +58,7 @@ class LatexCreatorTermTest {
@Test
void letTest() {
typeInferer = model.getTypeInferer("let f = x in f", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("let f = x in f", "").unwrap();
assertEquals(MONO_TEXT + CURLY_LEFT + BOLD_TEXT + "{let}" + CURLY_RIGHT + LATEX_SPACE + MONO_TEXT + "{f}"
+ EQUALS + MONO_TEXT + "{x}" + LATEX_SPACE + MONO_TEXT + CURLY_LEFT + BOLD_TEXT + "{in}"
+ CURLY_RIGHT + LATEX_SPACE + MONO_TEXT + "{f}",
@ -68,7 +68,7 @@ class LatexCreatorTermTest {
@Test
void appWithParenthesesTest() {
typeInferer = model.getTypeInferer("x (y z)", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("x (y z)", "").unwrap();
assertEquals(MONO_TEXT + "{x}" + LATEX_SPACE + PAREN_LEFT + MONO_TEXT + "{y}"
+ LATEX_SPACE + MONO_TEXT + "{z}" + PAREN_RIGHT,
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),

View File

@ -14,7 +14,7 @@ class LatexCreatorTest {
@Test
void testFailedLet() {
TypeInfererInterface typeInferer = model.getTypeInferer("let fun = 5 5 in fun 42", new HashMap<>()).unwrap();
TypeInfererInterface typeInferer = model.getTypeInferer("let fun = 5 5 in fun 42", "").unwrap();
String latex = new LatexCreator(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getTree();
assertEquals("\\begin{prooftree}\n" +
"\\AxiomC{$\\texttt{5} \\in Const$}\n" +
@ -29,7 +29,7 @@ class LatexCreatorTest {
"\\LeftLabel{\\textrm L{\\small ET}}\n" +
"\\BinaryInfC{$\\vdash\\texttt{\\textbf{let}}\\ \\texttt{fun}=\\texttt{5}\\ \\texttt{5}\\ \\texttt{\\textbf{in}}\\ \\texttt{fun}\\ \\texttt{42}:\\alpha_{1}$}\n" +
"\\end{prooftree}", latex);
typeInferer = model.getTypeInferer("(let fun = 5 5 in fun) 42", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("(let fun = 5 5 in fun) 42", "").unwrap();
latex = new LatexCreator(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getTree();
assertEquals("\\begin{prooftree}\n" +
"\\AxiomC{$\\texttt{5} \\in Const$}\n" +

View File

@ -17,30 +17,28 @@ class LatexCreatorTypeTest {
@Test
void identityTest() {
typeInferer = model.getTypeInferer("λx.x", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("λx.x", "").unwrap();
assertEquals(TREE_VARIABLE + "_{2} " + RIGHT_ARROW + SPACE + TREE_VARIABLE + "_{2}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
}
@Test
void generatedTypeTest() {
typeInferer = model.getTypeInferer("x", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("x", "").unwrap();
assertEquals(GENERATED_ASSUMPTION_VARIABLE + "_{1}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
}
@Test
void namedTypeTest() {
typeInferer = model.getTypeInferer("5", new HashMap<>()).unwrap();
typeInferer = model.getTypeInferer("5", "").unwrap();
assertEquals(MONO_TEXT + "{int}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
}
@Test
void userVariableTest() {
HashMap<String, String> map = new HashMap<>();
map.put("x", "t1");
typeInferer = model.getTypeInferer("x", map).unwrap();
typeInferer = model.getTypeInferer("x", "x: t1").unwrap();
assertEquals(USER_VARIABLE + "_{1}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
}