Use state machine to parse type assumptions

This commit is contained in:
Arne Keller 2021-07-05 12:21:55 +02:00
parent 93ec136970
commit df3f24548a
21 changed files with 574 additions and 269 deletions

View File

@ -3,8 +3,6 @@ package edu.kit.typicalc.model;
import edu.kit.typicalc.model.parser.ParseError; import edu.kit.typicalc.model.parser.ParseError;
import edu.kit.typicalc.util.Result; import edu.kit.typicalc.util.Result;
import java.util.Map;
/** /**
* This interface accepts user input and returns a type inference result. * This interface accepts user input and returns a type inference result.
*/ */
@ -18,5 +16,5 @@ public interface Model {
* @return either an error or a TypeInfererInterface on success * @return either an error or a TypeInfererInterface on success
*/ */
Result<TypeInfererInterface, ParseError> getTypeInferer(String lambdaTerm, Result<TypeInfererInterface, ParseError> getTypeInferer(String lambdaTerm,
Map<String, String> typeAssumptions); String typeAssumptions);
} }

View File

@ -27,7 +27,7 @@ public class ModelImpl implements Model {
*/ */
@Override @Override
public Result<TypeInfererInterface, ParseError> getTypeInferer(String lambdaTerm, public Result<TypeInfererInterface, ParseError> getTypeInferer(String lambdaTerm,
Map<String, String> typeAssumptions) { String typeAssumptions) {
// Parse Lambda Term // Parse Lambda Term
LambdaParser parser = new LambdaParser(lambdaTerm); LambdaParser parser = new LambdaParser(lambdaTerm);
Result<LambdaTerm, ParseError> result = parser.parse(); Result<LambdaTerm, ParseError> result = parser.parse();

View File

@ -66,7 +66,7 @@ public class LambdaLexer {
Token token = tokens.removeFirst(); Token token = tokens.removeFirst();
return new Result<>(token); return new Result<>(token);
} else { } else {
return new Result<>(new Token(TokenType.EOF, "", 0)); return new Result<>(new Token(TokenType.EOF, "", term, term.length()));
} }
} }
@ -76,7 +76,7 @@ public class LambdaLexer {
} }
if (pos >= term.length()) { if (pos >= term.length()) {
// term ended, return EOF // term ended, return EOF
return new Result<>(new Token(TokenType.EOF, "", pos)); return new Result<>(new Token(TokenType.EOF, "", term, pos));
} }
Token t; Token t;
char c = term.charAt(pos); char c = term.charAt(pos);
@ -84,7 +84,7 @@ public class LambdaLexer {
case '-': case '-':
if (pos + 1 < term.length()) { if (pos + 1 < term.length()) {
if (term.charAt(pos + 1) == '>') { if (term.charAt(pos + 1) == '>') {
t = new Token(TokenType.ARROW, "->", pos); t = new Token(TokenType.ARROW, "->", term, pos);
advance(); advance();
advance(); advance();
return new Result<>(t); return new Result<>(t);
@ -97,24 +97,37 @@ public class LambdaLexer {
} }
// bunch of single-character tokens // bunch of single-character tokens
case '.': case '.':
t = new Token(TokenType.DOT, ".", pos); t = new Token(TokenType.DOT, ".", term, pos);
advance();
return new Result<>(t);
case ',':
t = new Token(TokenType.COMMA, ",", term, pos);
advance();
return new Result<>(t);
case ':':
t = new Token(TokenType.COLON, ":", term, pos);
advance(); advance();
return new Result<>(t); return new Result<>(t);
case '(': case '(':
t = new Token(TokenType.LEFT_PARENTHESIS, "(", pos); t = new Token(TokenType.LEFT_PARENTHESIS, "(", term, pos);
advance(); advance();
return new Result<>(t); return new Result<>(t);
case ')': case ')':
t = new Token(TokenType.RIGHT_PARENTHESIS, ")", pos); t = new Token(TokenType.RIGHT_PARENTHESIS, ")", term, pos);
advance(); advance();
return new Result<>(t); return new Result<>(t);
case '=': case '=':
t = new Token(TokenType.EQUALS, "=", pos); t = new Token(TokenType.EQUALS, "=", term, pos);
advance(); advance();
return new Result<>(t); return new Result<>(t);
case '\\': case '\\':
case 'λ': case 'λ':
t = new Token(TokenType.LAMBDA, c + "", pos); t = new Token(TokenType.LAMBDA, Character.toString(c), term, pos);
advance();
return new Result<>(t);
case '!':
case '∀':
t = new Token(TokenType.UNIVERSAL_QUANTIFIER, Character.toString(c), term, pos);
advance(); advance();
return new Result<>(t); return new Result<>(t);
default: default:
@ -156,7 +169,7 @@ public class LambdaLexer {
type = TokenType.VARIABLE; type = TokenType.VARIABLE;
break; break;
} }
return new Result<>(new Token(type, sb.toString(), startPos)); return new Result<>(new Token(type, sb.toString(), term, startPos));
} else if (Character.isDigit(c)) { } else if (Character.isDigit(c)) {
int startPos = pos; int startPos = pos;
// number literal // number literal
@ -165,7 +178,7 @@ public class LambdaLexer {
sb.append(term.charAt(pos)); sb.append(term.charAt(pos));
advance(); advance();
} while (pos < term.length() && Character.isDigit(term.charAt(pos))); } while (pos < term.length() && Character.isDigit(term.charAt(pos)));
return new Result<>(new Token(TokenType.NUMBER, sb.toString(), startPos)); return new Result<>(new Token(TokenType.NUMBER, sb.toString(), term, startPos));
} else { } else {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(c, pos, term)); return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(c, pos, term));
} }

View File

@ -30,7 +30,7 @@ public class LambdaParser {
* When calling a parseX method, token is the first token of X * When calling a parseX method, token is the first token of X
* (as opposed to the last token of the previous construct). * (as opposed to the last token of the previous construct).
*/ */
private Token token = new Token(TokenType.EOF, "", -1); private Token token = new Token(TokenType.EOF, "", "", -1);
private static final Set<TokenType> ATOM_START_TOKENS private static final Set<TokenType> ATOM_START_TOKENS
= EnumSet.of(TokenType.VARIABLE, TokenType.NUMBER, TokenType.TRUE, = EnumSet.of(TokenType.VARIABLE, TokenType.NUMBER, TokenType.TRUE,
@ -69,7 +69,7 @@ public class LambdaParser {
TokenType current = token.getType(); TokenType current = token.getType();
Optional<ParseError> error = nextToken(); Optional<ParseError> error = nextToken();
if (current != type) { if (current != type) {
return Optional.of(ParseError.UNEXPECTED_TOKEN.withToken(lastToken, lexer.getTerm()).expectedType(type)); return Optional.of(ParseError.UNEXPECTED_TOKEN.withToken(lastToken).expectedType(type));
} }
return error; return error;
} }
@ -91,7 +91,7 @@ public class LambdaParser {
} }
return new Result<>(null, return new Result<>(null,
(last.getType() == TokenType.EOF ? ParseError.TOO_FEW_TOKENS : ParseError.UNEXPECTED_TOKEN) (last.getType() == TokenType.EOF ? ParseError.TOO_FEW_TOKENS : ParseError.UNEXPECTED_TOKEN)
.withToken(last, lexer.getTerm()) .withToken(last)
.expectedTypes(ATOM_START_TOKENS)); .expectedTypes(ATOM_START_TOKENS));
} }
@ -207,7 +207,7 @@ public class LambdaParser {
try { try {
n = Integer.parseInt(number); n = Integer.parseInt(number);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withToken(token, lexer.getTerm())); return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withToken(token));
} }
error = nextToken(); error = nextToken();
if (error.isEmpty()) { if (error.isEmpty()) {

View File

@ -19,7 +19,10 @@ public enum ParseError {
/** /**
* some tokens were required, but not provided * some tokens were required, but not provided
*
* DEPRECATED: use UNEXPECTED_TOKEN with TokenType.EOF instead
*/ */
@Deprecated
TOO_FEW_TOKENS, TOO_FEW_TOKENS,
/** /**
@ -39,12 +42,11 @@ public enum ParseError {
* Attach a token to this error. * Attach a token to this error.
* *
* @param cause the token that caused the error * @param cause the token that caused the error
* @param term the term that is parsed
* @return this object * @return this object
*/ */
public ParseError withToken(Token cause, String term) { public ParseError withToken(Token cause) {
this.cause = Optional.of(cause); this.cause = Optional.of(cause);
this.term = term; this.term = cause.getSourceText();
this.position = cause.getPos(); this.position = cause.getPos();
return this; return this;
} }

View File

@ -3,7 +3,7 @@ package edu.kit.typicalc.model.parser;
import java.util.Objects; import java.util.Objects;
/** /**
* A token of the Prolog language. * A token of a lambda term or type assumption.
*/ */
public class Token { public class Token {
/** /**
@ -13,6 +13,7 @@ public class Token {
* EOF is a special token to indicate that the end of file is reached. * EOF is a special token to indicate that the end of file is reached.
*/ */
public enum TokenType { public enum TokenType {
UNIVERSAL_QUANTIFIER, // or exclamation mark
LAMBDA, // λ or a backslash LAMBDA, // λ or a backslash
VARIABLE, // [a-z][a-zA-Z0-9]* except "let" or "in" or constants VARIABLE, // [a-z][a-zA-Z0-9]* except "let" or "in" or constants
LET, // let LET, // let
@ -23,6 +24,8 @@ public class Token {
LEFT_PARENTHESIS, // ( LEFT_PARENTHESIS, // (
RIGHT_PARENTHESIS, // ) RIGHT_PARENTHESIS, // )
DOT, // . DOT, // .
COMMA, // ,
COLON, // :
EQUALS, // = EQUALS, // =
ARROW, // -> ARROW, // ->
EOF // pseudo token if end of input is reached EOF // pseudo token if end of input is reached
@ -36,6 +39,7 @@ public class Token {
* the text of this token in the source code * the text of this token in the source code
*/ */
private final String text; private final String text;
private final String sourceText;
private final int pos; private final int pos;
/** /**
@ -43,11 +47,13 @@ public class Token {
* *
* @param type the token type * @param type the token type
* @param text text of this token in the source code * @param text text of this token in the source code
* @param pos position this token begins * @param sourceText context of this token
* @param pos position this token begins in sourceText
*/ */
public Token(TokenType type, String text, int pos) { public Token(TokenType type, String text, String sourceText, int pos) {
this.type = type; this.type = type;
this.text = text; this.text = text;
this.sourceText = sourceText;
this.pos = pos; this.pos = pos;
} }
@ -69,6 +75,13 @@ public class Token {
return text; return text;
} }
/**
* @return the surrounding text of this token
*/
public String getSourceText() {
return sourceText;
}
/** /**
* Returns the position this token is in * Returns the position this token is in
* *
@ -92,11 +105,12 @@ public class Token {
return false; return false;
} }
Token token = (Token) o; Token token = (Token) o;
return pos == token.pos && type == token.type && Objects.equals(text, token.text); return pos == token.pos && type == token.type
&& Objects.equals(text, token.text) && sourceText.equals(token.sourceText);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(type, text, pos); return Objects.hash(type, text, sourceText, pos);
} }
} }

View File

@ -4,8 +4,6 @@ import edu.kit.typicalc.model.parser.Token.TokenType;
import edu.kit.typicalc.model.term.*; import edu.kit.typicalc.model.term.*;
import edu.kit.typicalc.model.type.*; import edu.kit.typicalc.model.type.*;
import edu.kit.typicalc.util.Result; import edu.kit.typicalc.util.Result;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import java.util.*; import java.util.*;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -16,70 +14,39 @@ import java.util.regex.Pattern;
*/ */
public class TypeAssumptionParser { public class TypeAssumptionParser {
public static final Pattern TYPE_NAME_PATTERN = Pattern.compile("[a-zA-Z][a-zA-Z0-9]*");
private static final Pattern TYPE_VARIABLE_PATTERN = Pattern.compile("t(\\d+)"); private static final Pattern TYPE_VARIABLE_PATTERN = Pattern.compile("t(\\d+)");
private static final Set<TokenType> END_TOKENS
= EnumSet.of(TokenType.ARROW, TokenType.RIGHT_PARENTHESIS, TokenType.EOF);
/** /**
* Parse the given type assumptions. * Parse the given type assumptions.
* *
* @param assumptions the type assumptions * @param assumptions the type assumptions
* @return if successful, a map of the type assumptions, otherwise an error * @return if successful, a map of the type assumptions, otherwise an error
*/ */
public Result<Map<VarTerm, TypeAbstraction>, ParseError> parse(Map<String, String> assumptions) { public Result<Map<VarTerm, TypeAbstraction>, ParseError> parse(String assumptions) {
Map<VarTerm, TypeAbstraction> typeAssumptions = new LinkedHashMap<>(); ParserState<Map<VarTerm, TypeAbstraction>> state = new InitialState(new LinkedHashMap<>());
for (Map.Entry<String, String> entry : assumptions.entrySet()) { LambdaLexer lexer = new LambdaLexer(cleanAssumptionText(assumptions));
String typeName = entry.getKey(); Optional<Token> extraToken = Optional.empty();
if (!TYPE_NAME_PATTERN.matcher(typeName).matches()) { while (true) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER); Token token1;
} if (extraToken.isPresent()) {
VarTerm var = new VarTerm(typeName); token1 = extraToken.get();
Result<TypeAbstraction, ParseError> typeAbs = parseTypeDefinition(entry.getValue()); } else {
if (typeAbs.isError()) { Result<Token, ParseError> token = lexer.nextToken();
return new Result<>(typeAbs); if (token.isError()) {
} return token.castError();
typeAssumptions.put(var, typeAbs.unwrap());
}
return new Result<>(typeAssumptions);
}
/**
* Parse a type definition that may use the all quantor.
* @param definition type definition
* @return parsed type
*/
public Result<TypeAbstraction, ParseError> parseTypeDefinition(String definition) {
definition = cleanAssumptionText(definition);
Set<TypeVariable> allQuantified = new HashSet<>();
if (definition.startsWith("")) {
String[] parts = definition.split("\\.");
if (parts.length < 2) {
int colonIndex = definition.indexOf(':');
if (colonIndex >= 0) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(
':', colonIndex, definition).expectedCharacter('.')
);
} }
return new Result<>(null, ParseError.TOO_FEW_TOKENS.expectedType(TokenType.DOT)); token1 = token.unwrap();
} else if (parts.length > 2) {
return new Result<>(null, ParseError.UNEXPECTED_CHARACTER.withCharacter(
'.', parts[0].length() + 1 + parts[1].length(), definition));
} }
for (String quantified : parts[0].substring(1).split(",")) { ParserResult<Map<VarTerm, TypeAbstraction>> result = state.handle(token1);
quantified = quantified.trim(); if (result.isResult()) {
if (!quantified.matches("t\\d+")) { return new Result<>(result.getResult());
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken( } else if (result.isError()) {
new Token(TokenType.VARIABLE, quantified, parts[0].indexOf(quantified)), parts[0] return new Result<>(null, result.getError());
)); } else {
} state = result.getState();
int i = Integer.parseInt(quantified.substring(1)); extraToken = result.extraToken();
allQuantified.add(new TypeVariable(TypeVariableKind.USER_INPUT, i));
} }
definition = parts[1];
} }
return parseType(definition, allQuantified);
} }
private String cleanAssumptionText(String text) { private String cleanAssumptionText(String text) {
@ -96,89 +63,403 @@ public class TypeAssumptionParser {
.replace('τ', 't'); .replace('τ', 't');
} }
private Result<TypeAbstraction, ParseError> parseType(String type, Set<TypeVariable> allQuantified) { private static class ParserResult<T> {
// this parser is using the same lexer as the LambdaParser (to avoid types named "let" etc.) private Optional<ParserState<T>> newState = Optional.empty();
LambdaLexer lexer = new LambdaLexer(type); private Optional<ParseError> error = Optional.empty();
Result<Pair<Type, Integer>, ParseError> parsedType = parseType(lexer, 0); private Optional<T> result = Optional.empty();
if (parsedType.isError()) { private Optional<Token> extraToken = Optional.empty();
return new Result<>(null, parsedType.unwrapError());
ParserResult(ParseError e) {
this.error = Optional.of(e);
}
ParserResult(ParserState<T> state) {
this.newState = Optional.of(state);
}
ParserResult(T result) {
this.result = Optional.of(result);
}
boolean isTransition() {
return newState.isPresent();
}
ParserState<T> getState() {
return newState.get();
}
boolean isError() {
return error.isPresent();
}
ParseError getError() {
return error.get();
}
<U> ParserResult<U> castError() {
return new ParserResult<>(error.get());
}
boolean isResult() {
return result.isPresent();
}
T getResult() {
return result.get();
}
ParserResult<T> attachToken(Token t) {
this.extraToken = Optional.of(t);
return this;
}
ParserResult<T> copyToken(ParserResult<?> other) {
this.extraToken = other.extraToken;
return this;
}
Optional<Token> extraToken() {
return this.extraToken;
} }
return new Result<>(new TypeAbstraction(parsedType.unwrap().getLeft(), allQuantified));
} }
private Result<Pair<Type, Integer>, ParseError> parseType(LambdaLexer lexer, int parenCount) { private interface ParserState<T> {
Type type = null; ParserResult<T> handle(Token t);
int removedParens = 0; }
while (true) {
Result<Token, ParseError> token = lexer.nextToken(); private static class InitialState implements ParserState<Map<VarTerm, TypeAbstraction>> {
if (token.isError()) { private Map<VarTerm, TypeAbstraction> alreadyParsed = new LinkedHashMap<>();
return new Result<>(token);
} InitialState(Map<VarTerm, TypeAbstraction> alreadyParsed) {
Token t = token.unwrap(); this.alreadyParsed = alreadyParsed;
Result<Type, ParseError> typeResult = null; }
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) { switch (t.getType()) {
case LEFT_PARENTHESIS:
// recursive call, set number of open parentheses to 1
Result<Pair<Type, Integer>, ParseError> type2 = parseType(lexer, 1);
typeResult = type2.map(Pair::getLeft);
removedParens += type2.map(Pair::getRight).unwrapOr(1) - 1;
break;
case VARIABLE: case VARIABLE:
// named type or type variable return new ParserResult<>(new ExpectingColon(alreadyParsed, new VarTerm(t.getText())));
type = parseLiteral(t.getText());
break;
case RIGHT_PARENTHESIS:
removedParens += 1;
break;
case ARROW:
if (type == null) {
// there was no type in front of the arrow
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(t, ""));
}
// recursive call, keep open parentheses count
Result<Pair<Type, Integer>, ParseError> nextType = parseType(lexer, parenCount);
final Type left = type;
typeResult = nextType.map(Pair::getLeft).map(right -> new FunctionType(left, right));
removedParens += nextType.map(Pair::getRight).unwrapOr(0);
break;
case EOF: case EOF:
break; return new ParserResult<>(alreadyParsed);
default: default:
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(t, "")); return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
// update type based on Result
if (typeResult != null && typeResult.isError()) {
return new Result<>(typeResult);
}
type = typeResult != null ? typeResult.unwrap() : type;
// after fully processing one token / a type in parenthesis,
// some type should have been parsed
if (type == null) {
return new Result<>(null, ParseError.TOO_FEW_TOKENS);
}
if (parenCount - removedParens < 0) {
// too many closing parenthesis
return new Result<>(null, ParseError.UNEXPECTED_TOKEN.withToken(t, ""));
} else if (END_TOKENS.contains(t.getType())) {
// potential end of type
if (parenCount - removedParens == 0) {
// opening and closing parentheses match
return new Result<>(new ImmutablePair<>(type, removedParens));
} else {
// missing closing parenthesis
return new Result<>(null, ParseError.TOO_FEW_TOKENS);
}
} }
} }
} }
private Type parseLiteral(String text) { private static class ExpectingColon implements ParserState<Map<VarTerm, TypeAbstraction>> {
Matcher typeVariableMatcher = TYPE_VARIABLE_PATTERN.matcher(text); private Map<VarTerm, TypeAbstraction> alreadyParsed;
if (typeVariableMatcher.matches()) { private final VarTerm var;
int typeVariableIndex = Integer.parseInt(typeVariableMatcher.group(1)); ExpectingColon(Map<VarTerm, TypeAbstraction> alreadyParsed, VarTerm var) {
return new TypeVariable(TypeVariableKind.USER_INPUT, typeVariableIndex); this.alreadyParsed = alreadyParsed;
} else { this.var = var;
return new NamedType(text); }
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case COLON:
return new ParserResult<>(new ExpectingTypeDef(alreadyParsed, var));
default:
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
private static class ExpectingTypeDef implements ParserState<Map<VarTerm, TypeAbstraction>> {
private final Map<VarTerm, TypeAbstraction> alreadyParsed;
private final Set<TypeVariable> typeVariables;
private final VarTerm var;
private Optional<ParserState<Type>> state = Optional.empty();
ExpectingTypeDef(Map<VarTerm, TypeAbstraction> alreadyParsed, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.typeVariables = new TreeSet<>();
this.var = var;
}
ExpectingTypeDef(Map<VarTerm, TypeAbstraction> alreadyParsed, Set<TypeVariable> typeVariables, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.typeVariables = typeVariables;
this.var = var;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case UNIVERSAL_QUANTIFIER:
if (typeVariables.isEmpty()) {
return new ParserResult<>(new ExpectingTypeVariables(alreadyParsed, var));
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
default:
if (state.isPresent()) {
ParserState<Type> status = state.get();
// already parsing type
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
alreadyParsed.put(var, new TypeAbstraction(result.getResult(), typeVariables));
return new ParserResult<>(new ExpectingCommaBeforeNextType(alreadyParsed))
.copyToken(result);
} else if (result.isError()) {
return result.castError();
} else {
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
// attempt to parse as type
ParserResult<Type> result = new ParseTypeState1().handle(t);
if (result.isError()) {
return result.castError();
}
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
}
private static class ExpectingCommaBeforeNextType implements ParserState<Map<VarTerm, TypeAbstraction>> {
private final Map<VarTerm, TypeAbstraction> alreadyParsed;
ExpectingCommaBeforeNextType(Map<VarTerm, TypeAbstraction> alreadyParsed) {
this.alreadyParsed = alreadyParsed;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case EOF:
return new ParserResult<>(alreadyParsed);
case COMMA:
return new ParserResult<>(new InitialState(alreadyParsed));
default:
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
/**
* Main type parsing state.
*/
private static class ParseTypeState1 implements ParserState<Type> {
private Optional<Type> parsedType = Optional.empty();
private Optional<ParserState<Type>> state = Optional.empty();
private Optional<ParserState<Type>> stateParenthesis = Optional.empty();
private int parenthesisInitial = 0;
private int openParens = 0;
@Override
public ParserResult<Type> handle(Token t) {
switch (t.getType()) {
case VARIABLE:
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
return handleInnerParenthesis(t);
}
if (parsedType.isPresent()) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
Type type = parseLiteral(t.getText());
// try parsing function type (see below)
this.parsedType = Optional.of(type);
return new ParserResult<>(this);
case ARROW:
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
return handleInnerParenthesis(t);
}
if (parsedType.isEmpty()) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
// parse function type
state = Optional.of(new ParseTypeStateExpectArrow().handle(t).getState());
return new ParserResult<>(this);
case LEFT_PARENTHESIS:
openParens += 1;
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
return handleInnerParenthesis(t);
}
stateParenthesis = Optional.of(new ParseTypeState1());
parenthesisInitial = openParens - 1;
return new ParserResult<>(this);
case RIGHT_PARENTHESIS:
openParens -= 1;
if (state.isPresent()) {
return handleInner(t);
}
if (stateParenthesis.isPresent()) {
if (openParens == parenthesisInitial) {
// inner state is done parsing
ParserResult<Type> result = handleInnerParenthesis(
new Token(TokenType.EOF, "", "", -1));
if (result.isError()) {
return result.castError();
} else {
parsedType = Optional.of(result.getResult());
stateParenthesis = Optional.empty();
}
} else {
return handleInnerParenthesis(t);
}
}
if (parsedType.isPresent()) {
return new ParserResult<>(this); // parenthesized part may be start of function
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
case COMMA:
case EOF:
if (state.isPresent()) {
return handleInner(t).attachToken(t);
}
if (stateParenthesis.isPresent() && openParens == parenthesisInitial) {
return handleInnerParenthesis(t).attachToken(t);
}
if (parsedType.isPresent() && openParens == parenthesisInitial) {
return new ParserResult<>(parsedType.get()).attachToken(t);
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
default:
if (state.isPresent()) {
return handleInner(t);
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
private ParserResult<Type> handleInner(Token t) {
ParserState<Type> status = state.get();
// already parsing right type of function
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
return new ParserResult<Type>(new FunctionType(parsedType.get(), result.getResult())).copyToken(result);
} else if (result.isError()) {
return result.castError();
} else {
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
private ParserResult<Type> handleInnerParenthesis(Token t) {
ParserState<Type> status = stateParenthesis.get();
// already parsing right type of function
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
return new ParserResult<>(result.getResult()).copyToken(result);
} else if (result.isError()) {
return result.castError();
} else {
stateParenthesis = Optional.of(result.getState());
return new ParserResult<>(this);
}
}
private static Type parseLiteral(String text) {
Matcher typeVariableMatcher = TYPE_VARIABLE_PATTERN.matcher(text);
if (typeVariableMatcher.matches()) {
int typeVariableIndex = Integer.parseInt(typeVariableMatcher.group(1));
return new TypeVariable(TypeVariableKind.USER_INPUT, typeVariableIndex);
} else {
return new NamedType(text);
}
}
}
private static class ParseTypeStateExpectArrow implements ParserState<Type> {
private Optional<ParserState<Type>> state = Optional.empty();
@Override
public ParserResult<Type> handle(Token t) {
switch (t.getType()) {
case ARROW:
if (state.isEmpty()) {
// try parsing remainder as type
state = Optional.of(new ParseTypeState1());
return new ParserResult<>(this);
}
default:
if (state.isPresent()) {
ParserState<Type> status = state.get();
// already parsing type
ParserResult<Type> result = status.handle(t);
if (result.isResult()) {
return result;
} else if (result.isError()) {
return result.castError();
} else {
state = Optional.of(result.getState());
return new ParserResult<>(this);
}
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
}
}
}
private static class ExpectingTypeVariables implements ParserState<Map<VarTerm, TypeAbstraction>> {
private final Map<VarTerm, TypeAbstraction> alreadyParsed;
private final VarTerm var;
private final Set<TypeVariable> variables = new TreeSet<>();
private boolean expectCommaOrDot = false;
ExpectingTypeVariables(Map<VarTerm, TypeAbstraction> alreadyParsed, VarTerm var) {
this.alreadyParsed = alreadyParsed;
this.var = var;
}
@Override
public ParserResult<Map<VarTerm, TypeAbstraction>> handle(Token t) {
switch (t.getType()) {
case VARIABLE:
if (expectCommaOrDot) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN
.withToken(t)
.expectedTypes(List.of(TokenType.COMMA, Token.TokenType.DOT)));
}
String input = t.getText();
if (!TYPE_VARIABLE_PATTERN.matcher(input).matches()) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
// TODO: somehow convey the expected format
}
int i = Integer.parseInt(input.substring(1));
variables.add(new TypeVariable(TypeVariableKind.USER_INPUT, i));
expectCommaOrDot = true;
return new ParserResult<>(this);
case COMMA:
if (expectCommaOrDot) {
expectCommaOrDot = false;
return new ParserResult<>(this);
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN
.withToken(t)
.expectedType(TokenType.VARIABLE));
}
case DOT:
if (expectCommaOrDot) {
// list of type variables is complete
// parse actual type
return new ParserResult<>(new ExpectingTypeDef(alreadyParsed, variables, var));
} else {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN
.withToken(t)
.expectedType(TokenType.VARIABLE));
}
default:
if (expectCommaOrDot) {
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t)
.expectedTypes(List.of(TokenType.COMMA, TokenType.DOT)));
}
return new ParserResult<>(ParseError.UNEXPECTED_TOKEN.withToken(t));
}
} }
} }
} }

View File

@ -1,7 +1,5 @@
package edu.kit.typicalc.presenter; package edu.kit.typicalc.presenter;
import java.util.Map;
import com.vaadin.flow.component.UI; import com.vaadin.flow.component.UI;
import edu.kit.typicalc.model.Model; import edu.kit.typicalc.model.Model;
import edu.kit.typicalc.model.TypeInfererInterface; import edu.kit.typicalc.model.TypeInfererInterface;
@ -32,7 +30,7 @@ public class Presenter implements MainViewListener {
} }
@Override @Override
public void typeInferLambdaString(String lambdaTerm, Map<String, String> typeAssumptions) { public void typeInferLambdaString(String lambdaTerm, String typeAssumptions) {
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer(lambdaTerm, typeAssumptions); Result<TypeInfererInterface, ParseError> result = model.getTypeInferer(lambdaTerm, typeAssumptions);
if (result.isError()) { if (result.isError()) {
view.displayError(result.unwrapError()); view.displayError(result.unwrapError());

View File

@ -116,6 +116,10 @@ public class Result<T, E> {
return new Result<>(value != null ? mapping.apply(value) : null, error); return new Result<>(value != null ? mapping.apply(value) : null, error);
} }
public <U> Result<U, E> castError() {
return new Result<>(null, this.error);
}
@Override @Override
public String toString() { public String toString() {
if (isOk()) { if (isOk()) {

View File

@ -120,11 +120,8 @@ public class InputBar extends HorizontalLayout implements LocaleChangeObserver {
* *
* @param typeAssumptions the type assumptions as a map * @param typeAssumptions the type assumptions as a map
*/ */
protected void setTypeAssumptions(Map<String, String> typeAssumptions) { protected void setTypeAssumptions(String typeAssumptions) {
assumptionInputField.setValue( assumptionInputField.setValue(typeAssumptions);
typeAssumptions.entrySet().stream()
.map(entry -> entry.getKey().trim() + ": " + entry.getValue().trim())
.collect(Collectors.joining("; ")));
} }
protected void setTermAndClickTypeInfer(String term) { protected void setTermAndClickTypeInfer(String term) {

View File

@ -3,8 +3,6 @@ package edu.kit.typicalc.view.main;
import edu.kit.typicalc.model.TypeInfererInterface; import edu.kit.typicalc.model.TypeInfererInterface;
import edu.kit.typicalc.model.parser.ParseError; import edu.kit.typicalc.model.parser.ParseError;
import java.util.Map;
/** /**
* Provides an interface for the presenter to interact with the view. The interaction consists of * Provides an interface for the presenter to interact with the view. The interaction consists of
* either passing a TypeInfererInterface or a ParseError to the view. * either passing a TypeInfererInterface or a ParseError to the view.
@ -37,6 +35,6 @@ public interface MainView {
* @param lambdaTerm the lambda term to type-infer * @param lambdaTerm the lambda term to type-infer
* @param typeAssumptions type assumptions to use during the calculation * @param typeAssumptions type assumptions to use during the calculation
*/ */
void typeInferLambdaString(String lambdaTerm, Map<String, String> typeAssumptions); void typeInferLambdaString(String lambdaTerm, String typeAssumptions);
} }
} }

View File

@ -74,14 +74,13 @@ public class MainViewImpl extends AppLayout
if (url.getPath().matches(TypeInferenceView.ROUTE + "/.*")) { if (url.getPath().matches(TypeInferenceView.ROUTE + "/.*")) {
List<String> segments = url.getSegments(); List<String> segments = url.getSegments();
String term = segments.get(segments.size() - 1); String term = segments.get(segments.size() - 1);
Map<String, String> types = url.getQueryParameters().getParameters().entrySet().stream().map(entry -> String types = url.getQueryParameters().getParameters()
Pair.of(entry.getKey(), entry.getValue().get(0)) .entrySet().stream().map(kv -> kv.getKey() + ": " + kv.getValue().get(0))
).collect(Collectors.toMap(Pair::getLeft, Pair::getRight, .collect(Collectors.joining(", "));
(existing, replacement) -> existing, LinkedHashMap::new));
upperBar.inferTerm(decodeURL(term), types); upperBar.inferTerm(decodeURL(term), types);
} else if (url.getPath().equals(TypeInferenceView.ROUTE)) { } else if (url.getPath().equals(TypeInferenceView.ROUTE)) {
setContent(new StartPageView()); setContent(new StartPageView());
upperBar.inferTerm(StringUtils.EMPTY, Collections.emptyMap()); upperBar.inferTerm(StringUtils.EMPTY, "");
} else if (url.getPath().equals(StringUtils.EMPTY)) { } else if (url.getPath().equals(StringUtils.EMPTY)) {
setContent(new StartPageView()); setContent(new StartPageView());
} }

View File

@ -94,7 +94,7 @@ public class UpperBar extends VerticalLayout implements LocaleChangeObserver {
inputConsumer.accept(termAndAssumptions); inputConsumer.accept(termAndAssumptions);
} }
private void startInfer(String term, Map<String, String> typeAssumptions) { private void startInfer(String term, String typeAssumptions) {
presenter.typeInferLambdaString(term, typeAssumptions); presenter.typeInferLambdaString(term, typeAssumptions);
} }
@ -104,7 +104,7 @@ public class UpperBar extends VerticalLayout implements LocaleChangeObserver {
* @param term the provided string * @param term the provided string
* @param typeAssumptions type assumptions to use * @param typeAssumptions type assumptions to use
*/ */
protected void inferTerm(String term, Map<String, String> typeAssumptions) { protected void inferTerm(String term, String typeAssumptions) {
inputBar.setTypeAssumptions(typeAssumptions); inputBar.setTypeAssumptions(typeAssumptions);
inputBar.setTerm(term); inputBar.setTerm(term);
startInfer(term, typeAssumptions); startInfer(term, typeAssumptions);

View File

@ -23,14 +23,10 @@ class ModelImplTest {
@Test @Test
void getTypeInferer() { void getTypeInferer() {
ModelImpl model = new ModelImpl(); ModelImpl model = new ModelImpl();
Map<String, String> map = new LinkedHashMap<>(); assertTrue(model.getTypeInferer("test.x.x.test", "a.x: 3.int").isError());
map.put("x", "int"); assertTrue(model.getTypeInferer("x", "a.x: 3.int").isError());
Map<String, String> map2 = new LinkedHashMap<>();
map2.put("a.x", "3.int");
assertTrue(model.getTypeInferer("test.x.x.test", map2).isError());
assertTrue(model.getTypeInferer("x", map2).isError());
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("λy.x", map); Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("λy.x", "x: int");
VarTerm y = new VarTerm("y"); VarTerm y = new VarTerm("y");
VarTerm x = new VarTerm("x"); VarTerm x = new VarTerm("x");
LambdaTerm term = new AbsTerm(y, x); LambdaTerm term = new AbsTerm(y, x);
@ -66,10 +62,9 @@ class ModelImplTest {
@Test @Test
void quantifiedTypeAssumption() { void quantifiedTypeAssumption() {
ModelImpl model = new ModelImpl(); ModelImpl model = new ModelImpl();
Map<String, String> assumptions = new HashMap<>();
assumptions.put("id", "∀ t1 . t1 -> t1");
Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("(id id) (id true)", assumptions); Result<TypeInfererInterface, ParseError> result = model.getTypeInferer("(id id) (id true)",
"id: ∀ t1 . t1 -> t1");
if (result.isError()) { if (result.isError()) {
System.out.println(result.unwrapError()); System.out.println(result.unwrapError());
fail(); fail();
@ -93,7 +88,7 @@ class ModelImplTest {
@Test @Test
void letTermTypeAssumptions() { void letTermTypeAssumptions() {
Model model = new ModelImpl(); Model model = new ModelImpl();
TypeInfererInterface typer = model.getTypeInferer("(let g = λx.x in g) g", new HashMap<>()).unwrap(); TypeInfererInterface typer = model.getTypeInferer("(let g = λx.x in g) g", "").unwrap();
AppStep first = (AppStep) typer.getFirstInferenceStep(); AppStep first = (AppStep) typer.getFirstInferenceStep();
LetStep second = (LetStep) first.getPremise1(); LetStep second = (LetStep) first.getPremise1();
VarStep third = (VarStep) second.getPremise(); VarStep third = (VarStep) second.getPremise();

View File

@ -28,7 +28,7 @@ public class LambdaParserFuzzTest {
@Fuzz @Fuzz
public void testInference(@From(LambdaTermGenerator.class) String term) { public void testInference(@From(LambdaTermGenerator.class) String term) {
Model model = new ModelImpl(); Model model = new ModelImpl();
Result<TypeInfererInterface, ParseError> typer = model.getTypeInferer(term, new HashMap<>()); Result<TypeInfererInterface, ParseError> typer = model.getTypeInferer(term, "");
InferenceStep first = typer.unwrap().getFirstInferenceStep(); InferenceStep first = typer.unwrap().getFirstInferenceStep();
} }

View File

@ -98,7 +98,7 @@ class LambdaParserTest {
parser = new LambdaParser("x)"); parser = new LambdaParser("x)");
ParseError error = parser.parse().unwrapError(); ParseError error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 1), error.getCause().get()); assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "x)", 1), error.getCause().get());
parser = new LambdaParser("??"); parser = new LambdaParser("??");
assertEquals(ParseError.UNEXPECTED_CHARACTER, parser.parse().unwrapError()); assertEquals(ParseError.UNEXPECTED_CHARACTER, parser.parse().unwrapError());
parser = new LambdaParser(""); parser = new LambdaParser("");
@ -110,39 +110,40 @@ class LambdaParserTest {
parser = new LambdaParser("x 123333333333333"); parser = new LambdaParser("x 123333333333333");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_CHARACTER, error); assertEquals(ParseError.UNEXPECTED_CHARACTER, error);
assertEquals(new Token(TokenType.NUMBER, "123333333333333", 2), error.getCause().get()); assertEquals(new Token(TokenType.NUMBER, "123333333333333", "x 123333333333333", 2),
error.getCause().get());
parser = new LambdaParser("λ)"); parser = new LambdaParser("λ)");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 1), error.getCause().get()); assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "λ)", 1), error.getCause().get());
parser = new LambdaParser("λx="); parser = new LambdaParser("λx=");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.EQUALS, "=", 2), error.getCause().get()); assertEquals(new Token(TokenType.EQUALS, "=", "λx=", 2), error.getCause().get());
parser = new LambdaParser("λx.."); parser = new LambdaParser("λx..");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.DOT, ".", 3), error.getCause().get()); assertEquals(new Token(TokenType.DOT, ".", "λx..", 3), error.getCause().get());
parser = new LambdaParser("let ) ="); parser = new LambdaParser("let ) =");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 4), error.getCause().get()); assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let ) =", 4), error.getCause().get());
parser = new LambdaParser("let x ."); parser = new LambdaParser("let x .");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.DOT, ".", 6), error.getCause().get()); assertEquals(new Token(TokenType.DOT, ".", "let x .", 6), error.getCause().get());
parser = new LambdaParser("let x = )"); parser = new LambdaParser("let x = )");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 8), error.getCause().get()); assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let x = )",8), error.getCause().get());
parser = new LambdaParser("let x = y )"); parser = new LambdaParser("let x = y )");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 10), error.getCause().get()); assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let x = y )", 10), error.getCause().get());
parser = new LambdaParser("let x = y in )"); parser = new LambdaParser("let x = y in )");
error = parser.parse().unwrapError(); error = parser.parse().unwrapError();
assertEquals(ParseError.UNEXPECTED_TOKEN, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", 13), error.getCause().get()); assertEquals(new Token(TokenType.RIGHT_PARENTHESIS, ")", "let x = y in )", 13), error.getCause().get());
} }
@Test @Test

View File

@ -6,6 +6,7 @@ import edu.kit.typicalc.util.Result;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.*; import java.util.*;
import java.util.stream.Collectors;
import static edu.kit.typicalc.model.type.NamedType.BOOLEAN; import static edu.kit.typicalc.model.type.NamedType.BOOLEAN;
import static edu.kit.typicalc.model.type.NamedType.INT; import static edu.kit.typicalc.model.type.NamedType.INT;
@ -16,9 +17,7 @@ class TypeAssumptionParserTest {
@Test @Test
void simpleType() { void simpleType() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("a: int");
assumptions.put("a", "int");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -30,9 +29,7 @@ class TypeAssumptionParserTest {
@Test @Test
void typeVariablesOneDigitIndex() { void typeVariablesOneDigitIndex() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("x: t1");
assumptions.put("x", "t1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -40,23 +37,19 @@ class TypeAssumptionParserTest {
assertEquals(new VarTerm("x"), assumption.getKey()); assertEquals(new VarTerm("x"), assumption.getKey());
assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 1)), assumption.getValue()); assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 1)), assumption.getValue());
Map<String, String> assumptions2 = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type2 = parser.parse("x: t009");
assumptions2.put("x", "t001");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type2 = parser.parse(assumptions2);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types2 = type2.unwrap(); Map<VarTerm, TypeAbstraction> types2 = type2.unwrap();
assertEquals(1, types2.size()); assertEquals(1, types2.size());
Map.Entry<VarTerm, TypeAbstraction> assumption2 = types2.entrySet().stream().findFirst().get(); Map.Entry<VarTerm, TypeAbstraction> assumption2 = types2.entrySet().stream().findFirst().get();
assertEquals(new VarTerm("x"), assumption2.getKey()); assertEquals(new VarTerm("x"), assumption2.getKey());
assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 1)), assumption2.getValue()); assertEquals(new TypeAbstraction(new TypeVariable(TypeVariableKind.USER_INPUT, 9)), assumption2.getValue());
} }
@Test @Test
void typeVariablesMultipleDigitIndex() { void typeVariablesMultipleDigitIndex() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("x: t123456");
assumptions.put("x", "t123456");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -68,9 +61,7 @@ class TypeAssumptionParserTest {
@Test @Test
void namedTypeStartingWithT() { void namedTypeStartingWithT() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("x: tau1");
assumptions.put("x", "tau1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -82,27 +73,27 @@ class TypeAssumptionParserTest {
@Test @Test
void functionType() { void functionType() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: int -> int");
assumptions.put("id", "int -> int");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
Map.Entry<VarTerm, TypeAbstraction> assumption = types.entrySet().stream().findFirst().get(); Map.Entry<VarTerm, TypeAbstraction> assumption = types.entrySet().stream().findFirst().get();
assertEquals(new VarTerm("id"), assumption.getKey()); assertEquals(new VarTerm("id"), assumption.getKey());
assertEquals(new TypeAbstraction(new FunctionType(INT, INT)), assumption.getValue()); assertEquals(new TypeAbstraction(new FunctionType(INT, INT)), assumption.getValue());
}
Map<String, String> assumptions2 = new LinkedHashMap<>(); @Test
assumptions2.put("f", "int -> int -> int"); void functionType2() {
type = parser.parse(assumptions2); TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("f: int -> int -> int");
if (type.isError()) { if (type.isError()) {
System.err.println(type.unwrapError()); System.err.println(type.unwrapError());
System.err.println(type.unwrapError().getCause()); System.err.println(type.unwrapError().getCause());
} }
assertTrue(type.isOk()); assertTrue(type.isOk());
types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
assumption = types.entrySet().stream().findFirst().get(); Map.Entry<VarTerm, TypeAbstraction> assumption = types.entrySet().stream().findFirst().get();
assertEquals(new VarTerm("f"), assumption.getKey()); assertEquals(new VarTerm("f"), assumption.getKey());
assertEquals(new TypeAbstraction( assertEquals(new TypeAbstraction(
new FunctionType( new FunctionType(
@ -114,9 +105,7 @@ class TypeAssumptionParserTest {
@Test @Test
void functionTypeWithVariables() { void functionTypeWithVariables() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("fun: t0 -> t0");
assumptions.put("fun", "t0 -> t0");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -129,9 +118,7 @@ class TypeAssumptionParserTest {
@Test @Test
void complicatedTypes() { void complicatedTypes() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: (int -> int) -> (boolean -> boolean)");
assumptions.put("id", "(int -> int) -> (boolean -> boolean)");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
if (type.isError()) { if (type.isError()) {
System.err.println(type.unwrapError()); System.err.println(type.unwrapError());
System.err.println(type.unwrapError().getCause()); System.err.println(type.unwrapError().getCause());
@ -147,9 +134,7 @@ class TypeAssumptionParserTest {
new FunctionType(BOOLEAN, BOOLEAN) new FunctionType(BOOLEAN, BOOLEAN)
)), assumption.getValue()); )), assumption.getValue());
parser = new TypeAssumptionParser(); parser = new TypeAssumptionParser();
Map<String, String> assumptions2 = new LinkedHashMap<>(); type = parser.parse("id: ((int -> int) -> (boolean -> boolean)) -> ((int2 -> boolean2) -> (boolean2 -> int2))");
assumptions2.put("id", "((int -> int) -> (boolean -> boolean)) -> ((int2 -> boolean2) -> (boolean2 -> int2))");
type = parser.parse(assumptions2);
if (type.isError()) { if (type.isError()) {
System.err.println(type.unwrapError()); System.err.println(type.unwrapError());
System.err.println(type.unwrapError().getCause()); System.err.println(type.unwrapError().getCause());
@ -177,9 +162,7 @@ class TypeAssumptionParserTest {
@Test @Test
void longFunction() { void longFunction() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new LinkedHashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("fun: (a -> b -> c) -> d");
assumptions.put("fun", "(a -> b -> c) -> d");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -195,9 +178,7 @@ class TypeAssumptionParserTest {
@Test @Test
void allQuantified() { void allQuantified() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new HashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: ∀ t1 . t1 -> t1");
assumptions.put("id", "∀ t1 . t1 -> t1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isOk()); assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap(); Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(1, types.size()); assertEquals(1, types.size());
@ -212,35 +193,59 @@ class TypeAssumptionParserTest {
), quantified), assumption.getValue()); ), quantified), assumption.getValue());
} }
@Test
void allQuantifiedCommaSeparation() {
TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("transmute: ∀ t1, t2 . t1 -> t2, createAny: ∀ t1 . t1 ");
assertTrue(type.isOk());
Map<VarTerm, TypeAbstraction> types = type.unwrap();
assertEquals(2, types.size());
Set<TypeVariable> quantified = new HashSet<>();
quantified.add(new TypeVariable(TypeVariableKind.USER_INPUT, 1));
quantified.add(new TypeVariable(TypeVariableKind.USER_INPUT, 2));
assertEquals(new TypeAbstraction(
new FunctionType(
new TypeVariable(TypeVariableKind.USER_INPUT, 1),
new TypeVariable(TypeVariableKind.USER_INPUT, 2)
), quantified), types.get(new VarTerm("transmute")));
quantified.remove(new TypeVariable(TypeVariableKind.USER_INPUT, 2));
assertEquals(new TypeAbstraction(
new TypeVariable(TypeVariableKind.USER_INPUT, 1),
quantified), types.get(new VarTerm("createAny")));
}
@Test @Test
void usefulErrors() { void usefulErrors() {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Map<String, String> assumptions = new HashMap<>(); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("id: ∀ t1");
assumptions.put("id", "∀ t1");
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(assumptions);
assertTrue(type.isError()); assertTrue(type.isError());
ParseError error = type.unwrapError(); ParseError error = type.unwrapError();
assertEquals(ParseError.TOO_FEW_TOKENS, error); assertEquals(ParseError.UNEXPECTED_TOKEN, error);
assertEquals(Token.TokenType.EOF, error.getCause().get().getType());
Collection<Token.TokenType> expected = error.getExpected().get(); Collection<Token.TokenType> expected = error.getExpected().get();
assertEquals(1, expected.size()); assertEquals(2, expected.size());
assertTrue(expected.contains(Token.TokenType.DOT)); assertTrue(expected.contains(Token.TokenType.DOT));
// TODO: should also expect a comma, once the parser is fixed! assertTrue(expected.contains(Token.TokenType.COMMA));
} }
@Test @Test
void errors() { void errors() {
Map<String, ParseError> tests = new HashMap<>(); Map<String, ParseError> tests = new HashMap<>();
tests.put("", ParseError.TOO_FEW_TOKENS); tests.put("",
ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.EOF, "", "", 0)));
tests.put("ö", ParseError.UNEXPECTED_CHARACTER); tests.put("ö", ParseError.UNEXPECTED_CHARACTER);
tests.put("(x", ParseError.TOO_FEW_TOKENS); tests.put("(x",
tests.put("-> x", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.ARROW, "->", 0), "")); ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.EOF, "", "(x", 2)));
tests.put("x 11", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.NUMBER, "11", 2), "")); tests.put("-> x",
tests.put("x )", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", 2), "")); ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.ARROW, "->", "-> x", 0)));
tests.put("x 11",
ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.NUMBER, "11", "x 11", 2)));
tests.put("x )", ParseError.UNEXPECTED_TOKEN.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", "x )", 2)));
tests.put("x -> (x) )", ParseError.UNEXPECTED_TOKEN tests.put("x -> (x) )", ParseError.UNEXPECTED_TOKEN
.withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", 9), "")); .withToken(new Token(Token.TokenType.RIGHT_PARENTHESIS, ")", "x -> (x) )", 9)));
for (Map.Entry<String, ParseError> entry : tests.entrySet()) { for (Map.Entry<String, ParseError> entry : tests.entrySet()) {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(Map.of("type1", entry.getKey())); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("type1:" + entry.getKey());
assertTrue(type.isError()); assertTrue(type.isError());
assertEquals(entry.getValue(), type.unwrapError()); assertEquals(entry.getValue(), type.unwrapError());
if (entry.getValue().getCause().isPresent()) { if (entry.getValue().getCause().isPresent()) {
@ -248,13 +253,15 @@ class TypeAssumptionParserTest {
} }
} }
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(Map.of("föhn", "int")); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("föhn: int");
assertTrue(type.isError()); assertTrue(type.isError());
assertEquals(ParseError.UNEXPECTED_CHARACTER, type.unwrapError()); assertEquals(ParseError.UNEXPECTED_CHARACTER, type.unwrapError());
assertEquals(1, type.unwrapError().getPosition());
parser = new TypeAssumptionParser(); parser = new TypeAssumptionParser();
type = parser.parse(Map.of("1typ", "int")); type = parser.parse("1typ:int");
assertTrue(type.isError()); assertTrue(type.isError());
assertEquals(ParseError.UNEXPECTED_CHARACTER, type.unwrapError()); assertEquals(ParseError.UNEXPECTED_TOKEN, type.unwrapError());
assertEquals(0, type.unwrapError().getPosition());
} }
@Test @Test
@ -281,7 +288,7 @@ class TypeAssumptionParserTest {
tests.put("((x) -> ((x)) -> (x)) -> (x -> (x -> (x)))", new TypeAbstraction(xxxxxx)); tests.put("((x) -> ((x)) -> (x)) -> (x -> (x -> (x)))", new TypeAbstraction(xxxxxx));
for (Map.Entry<String, TypeAbstraction> entry : tests.entrySet()) { for (Map.Entry<String, TypeAbstraction> entry : tests.entrySet()) {
TypeAssumptionParser parser = new TypeAssumptionParser(); TypeAssumptionParser parser = new TypeAssumptionParser();
Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse(Map.of("type1", entry.getKey())); Result<Map<VarTerm, TypeAbstraction>, ParseError> type = parser.parse("type1:" + entry.getKey());
assertTrue(type.isOk()); assertTrue(type.isOk());
assertEquals(entry.getValue(), type.unwrap().get(new VarTerm("type1"))); assertEquals(entry.getValue(), type.unwrap().get(new VarTerm("type1")));
} }

View File

@ -25,7 +25,7 @@ class LatexCreatorConstraintsTest {
@Test @Test
void singleVarDefaultConstraintTest() { void singleVarDefaultConstraintTest() {
typeInferer = model.getTypeInferer("x", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("x", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything(); List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
String constraintSet = AMPERSAND + SPLIT_BEGIN + CONSTRAINT_SET + EQUALS + LATEX_CURLY_LEFT + AMPERSAND String constraintSet = AMPERSAND + SPLIT_BEGIN + CONSTRAINT_SET + EQUALS + LATEX_CURLY_LEFT + AMPERSAND
@ -55,7 +55,7 @@ class LatexCreatorConstraintsTest {
@Test @Test
void singleAbsDefaultConstraintTest() { void singleAbsDefaultConstraintTest() {
typeInferer = model.getTypeInferer("λx.y", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("λx.y", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything(); List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
String constraintSet1 = AMPERSAND + SPLIT_BEGIN + CONSTRAINT_SET + EQUALS + LATEX_CURLY_LEFT + AMPERSAND String constraintSet1 = AMPERSAND + SPLIT_BEGIN + CONSTRAINT_SET + EQUALS + LATEX_CURLY_LEFT + AMPERSAND
@ -103,12 +103,12 @@ class LatexCreatorConstraintsTest {
@Test @Test
void lineBreak() { void lineBreak() {
typeInferer = model.getTypeInferer("a b c d e f", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("a b c d e f", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything(); List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{2}=\\alpha_{3} \\rightarrow \\alpha_{1},\\alpha_{4}=\\alpha_{5} \\rightarrow \\alpha_{2},\\alpha_{6}=\\alpha_{7} \\rightarrow \\alpha_{4},\\alpha_{8}=\\alpha_{9} \\rightarrow \\alpha_{6},\\alpha_{10}=\\alpha_{11} \\rightarrow \\alpha_{8},\\alpha_{10}=\\beta_{1},\\alpha_{11}=\\beta_{2},\\alpha_{9}=\\beta_{3},\\alpha_{7}=\\beta_{4},\\alpha_{5}=\\beta_{5},\\\\&\\alpha_{3}=\\beta_{6}\\}\\end{split}\\end{aligned}", actual.get(11)); assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{2}=\\alpha_{3} \\rightarrow \\alpha_{1},\\alpha_{4}=\\alpha_{5} \\rightarrow \\alpha_{2},\\alpha_{6}=\\alpha_{7} \\rightarrow \\alpha_{4},\\alpha_{8}=\\alpha_{9} \\rightarrow \\alpha_{6},\\alpha_{10}=\\alpha_{11} \\rightarrow \\alpha_{8},\\alpha_{10}=\\beta_{1},\\alpha_{11}=\\beta_{2},\\alpha_{9}=\\beta_{3},\\alpha_{7}=\\beta_{4},\\alpha_{5}=\\beta_{5},\\\\&\\alpha_{3}=\\beta_{6}\\}\\end{split}\\end{aligned}", actual.get(11));
typeInferer = model.getTypeInferer("let g = a b c d e f in g", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("let g = a b c d e f in g", "").unwrap();
actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything(); actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{1}=\\alpha_{13}\\}\\end{split}\\\\\n" + assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{1}=\\alpha_{13}\\}\\end{split}\\\\\n" +
@ -117,7 +117,7 @@ class LatexCreatorConstraintsTest {
@Test @Test
void emptyLetTypeAssumptions() { void emptyLetTypeAssumptions() {
typeInferer = model.getTypeInferer("let g = 5 in g", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("let g = 5 in g", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything(); List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{1}=\\alpha_{3}\\}\\end{split}\\\\\n" + assertEquals("\\begin{aligned}&\\begin{split}C=\\{&\\alpha_{1}=\\alpha_{3}\\}\\end{split}\\\\\n" +
"&\\begin{split}C_{let}=\\{&\\alpha_{2}=\\texttt{int}\\}\\end{split}\\\\&\\begin{split}\\sigma_{let}:=\\textit{mgu}(C_{let})=[&\\alpha_{2}\\mathrel{\\unicode{x21E8}}\\texttt{int}]\\end{split}\\\\&\\sigma_{let}(\\alpha_{2})=\\texttt{int}\\\\&\\Gamma'=\\sigma_{let}(\\emptyset),\\ \\texttt{g}:ta(\\sigma_{let}(\\alpha_{2}),\\sigma_{let}(\\emptyset))=\\texttt{g}:\\texttt{int}\\end{aligned}", actual.get(6)); "&\\begin{split}C_{let}=\\{&\\alpha_{2}=\\texttt{int}\\}\\end{split}\\\\&\\begin{split}\\sigma_{let}:=\\textit{mgu}(C_{let})=[&\\alpha_{2}\\mathrel{\\unicode{x21E8}}\\texttt{int}]\\end{split}\\\\&\\sigma_{let}(\\alpha_{2})=\\texttt{int}\\\\&\\Gamma'=\\sigma_{let}(\\emptyset),\\ \\texttt{g}:ta(\\sigma_{let}(\\alpha_{2}),\\sigma_{let}(\\emptyset))=\\texttt{g}:\\texttt{int}\\end{aligned}", actual.get(6));
@ -126,7 +126,7 @@ class LatexCreatorConstraintsTest {
@Test @Test
void excessiveMemoryUsageAvoided() { void excessiveMemoryUsageAvoided() {
try { try {
typeInferer = model.getTypeInferer("(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)(λx.x)", "").unwrap();
List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything(); List<String> actual = new LatexCreatorConstraints(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getEverything();
// should have thrown IllegalStateException by now // should have thrown IllegalStateException by now
fail("did not throw exception"); fail("did not throw exception");

View File

@ -17,7 +17,7 @@ class LatexCreatorTermTest {
@Test @Test
void absTest() { void absTest() {
typeInferer = model.getTypeInferer("λx.y", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("λx.y", "").unwrap();
assertEquals(LAMBDA + SPACE + MONO_TEXT + "{x}" + DOT_SIGN assertEquals(LAMBDA + SPACE + MONO_TEXT + "{x}" + DOT_SIGN
+ LATEX_SPACE + MONO_TEXT + "{y}", + LATEX_SPACE + MONO_TEXT + "{y}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(), new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
@ -26,7 +26,7 @@ class LatexCreatorTermTest {
@Test @Test
void appTest() { void appTest() {
typeInferer = model.getTypeInferer("x y", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("x y", "").unwrap();
assertEquals(MONO_TEXT + "{x}" + LATEX_SPACE + MONO_TEXT + "{y}", assertEquals(MONO_TEXT + "{x}" + LATEX_SPACE + MONO_TEXT + "{y}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(), new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex()); LatexCreatorMode.NORMAL).getLatex());
@ -34,7 +34,7 @@ class LatexCreatorTermTest {
@Test @Test
void varTest() { void varTest() {
typeInferer = model.getTypeInferer("x", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("x", "").unwrap();
assertEquals(MONO_TEXT + "{x}", assertEquals(MONO_TEXT + "{x}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(), new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex()); LatexCreatorMode.NORMAL).getLatex());
@ -42,7 +42,7 @@ class LatexCreatorTermTest {
@Test @Test
void integerTest() { void integerTest() {
typeInferer = model.getTypeInferer("5", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("5", "").unwrap();
assertEquals(MONO_TEXT + "{5}", assertEquals(MONO_TEXT + "{5}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(), new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex()); LatexCreatorMode.NORMAL).getLatex());
@ -50,7 +50,7 @@ class LatexCreatorTermTest {
@Test @Test
void booleanTest() { void booleanTest() {
typeInferer = model.getTypeInferer("true", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("true", "").unwrap();
assertEquals(MONO_TEXT + "{true}", assertEquals(MONO_TEXT + "{true}",
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(), new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),
LatexCreatorMode.NORMAL).getLatex()); LatexCreatorMode.NORMAL).getLatex());
@ -58,7 +58,7 @@ class LatexCreatorTermTest {
@Test @Test
void letTest() { void letTest() {
typeInferer = model.getTypeInferer("let f = x in f", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("let f = x in f", "").unwrap();
assertEquals(MONO_TEXT + CURLY_LEFT + BOLD_TEXT + "{let}" + CURLY_RIGHT + LATEX_SPACE + MONO_TEXT + "{f}" assertEquals(MONO_TEXT + CURLY_LEFT + BOLD_TEXT + "{let}" + CURLY_RIGHT + LATEX_SPACE + MONO_TEXT + "{f}"
+ EQUALS + MONO_TEXT + "{x}" + LATEX_SPACE + MONO_TEXT + CURLY_LEFT + BOLD_TEXT + "{in}" + EQUALS + MONO_TEXT + "{x}" + LATEX_SPACE + MONO_TEXT + CURLY_LEFT + BOLD_TEXT + "{in}"
+ CURLY_RIGHT + LATEX_SPACE + MONO_TEXT + "{f}", + CURLY_RIGHT + LATEX_SPACE + MONO_TEXT + "{f}",
@ -68,7 +68,7 @@ class LatexCreatorTermTest {
@Test @Test
void appWithParenthesesTest() { void appWithParenthesesTest() {
typeInferer = model.getTypeInferer("x (y z)", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("x (y z)", "").unwrap();
assertEquals(MONO_TEXT + "{x}" + LATEX_SPACE + PAREN_LEFT + MONO_TEXT + "{y}" assertEquals(MONO_TEXT + "{x}" + LATEX_SPACE + PAREN_LEFT + MONO_TEXT + "{y}"
+ LATEX_SPACE + MONO_TEXT + "{z}" + PAREN_RIGHT, + LATEX_SPACE + MONO_TEXT + "{z}" + PAREN_RIGHT,
new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(), new LatexCreatorTerm(typeInferer.getFirstInferenceStep().getConclusion().getLambdaTerm(),

View File

@ -14,7 +14,7 @@ class LatexCreatorTest {
@Test @Test
void testFailedLet() { void testFailedLet() {
TypeInfererInterface typeInferer = model.getTypeInferer("let fun = 5 5 in fun 42", new HashMap<>()).unwrap(); TypeInfererInterface typeInferer = model.getTypeInferer("let fun = 5 5 in fun 42", "").unwrap();
String latex = new LatexCreator(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getTree(); String latex = new LatexCreator(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getTree();
assertEquals("\\begin{prooftree}\n" + assertEquals("\\begin{prooftree}\n" +
"\\AxiomC{$\\texttt{5} \\in Const$}\n" + "\\AxiomC{$\\texttt{5} \\in Const$}\n" +
@ -29,7 +29,7 @@ class LatexCreatorTest {
"\\LeftLabel{\\textrm L{\\small ET}}\n" + "\\LeftLabel{\\textrm L{\\small ET}}\n" +
"\\BinaryInfC{$\\vdash\\texttt{\\textbf{let}}\\ \\texttt{fun}=\\texttt{5}\\ \\texttt{5}\\ \\texttt{\\textbf{in}}\\ \\texttt{fun}\\ \\texttt{42}:\\alpha_{1}$}\n" + "\\BinaryInfC{$\\vdash\\texttt{\\textbf{let}}\\ \\texttt{fun}=\\texttt{5}\\ \\texttt{5}\\ \\texttt{\\textbf{in}}\\ \\texttt{fun}\\ \\texttt{42}:\\alpha_{1}$}\n" +
"\\end{prooftree}", latex); "\\end{prooftree}", latex);
typeInferer = model.getTypeInferer("(let fun = 5 5 in fun) 42", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("(let fun = 5 5 in fun) 42", "").unwrap();
latex = new LatexCreator(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getTree(); latex = new LatexCreator(typeInferer, Enum::toString, LatexCreatorMode.NORMAL).getTree();
assertEquals("\\begin{prooftree}\n" + assertEquals("\\begin{prooftree}\n" +
"\\AxiomC{$\\texttt{5} \\in Const$}\n" + "\\AxiomC{$\\texttt{5} \\in Const$}\n" +

View File

@ -17,30 +17,28 @@ class LatexCreatorTypeTest {
@Test @Test
void identityTest() { void identityTest() {
typeInferer = model.getTypeInferer("λx.x", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("λx.x", "").unwrap();
assertEquals(TREE_VARIABLE + "_{2} " + RIGHT_ARROW + SPACE + TREE_VARIABLE + "_{2}", assertEquals(TREE_VARIABLE + "_{2} " + RIGHT_ARROW + SPACE + TREE_VARIABLE + "_{2}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex()); new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
} }
@Test @Test
void generatedTypeTest() { void generatedTypeTest() {
typeInferer = model.getTypeInferer("x", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("x", "").unwrap();
assertEquals(GENERATED_ASSUMPTION_VARIABLE + "_{1}", assertEquals(GENERATED_ASSUMPTION_VARIABLE + "_{1}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex()); new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
} }
@Test @Test
void namedTypeTest() { void namedTypeTest() {
typeInferer = model.getTypeInferer("5", new HashMap<>()).unwrap(); typeInferer = model.getTypeInferer("5", "").unwrap();
assertEquals(MONO_TEXT + "{int}", assertEquals(MONO_TEXT + "{int}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex()); new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
} }
@Test @Test
void userVariableTest() { void userVariableTest() {
HashMap<String, String> map = new HashMap<>(); typeInferer = model.getTypeInferer("x", "x: t1").unwrap();
map.put("x", "t1");
typeInferer = model.getTypeInferer("x", map).unwrap();
assertEquals(USER_VARIABLE + "_{1}", assertEquals(USER_VARIABLE + "_{1}",
new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex()); new LatexCreatorType(typeInferer.getType().get(), LatexCreatorMode.NORMAL).getLatex());
} }