mirror of
https://github.com/lisk77/lambda.git
synced 2025-10-23 17:58:50 +00:00
feat: initial commit
This commit is contained in:
commit
0dd91da74f
17 changed files with 683 additions and 0 deletions
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
./build/
|
24
CMakeLists.txt
Normal file
24
CMakeLists.txt
Normal file
|
@ -0,0 +1,24 @@
|
|||
cmake_minimum_required(VERSION 3.10)
|
||||
project(lambda VERSION 1.0 LANGUAGES CXX)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
# 1) gather all your .cpp files
|
||||
file(GLOB_RECURSE PROJECT_SOURCES
|
||||
${PROJECT_SOURCE_DIR}/src/*.cpp
|
||||
)
|
||||
|
||||
# sanity check
|
||||
if(NOT PROJECT_SOURCES)
|
||||
message(FATAL_ERROR "No .cpp files found in src/")
|
||||
endif()
|
||||
|
||||
# 2) create the executable from those sources
|
||||
add_executable(${PROJECT_NAME} ${PROJECT_SOURCES})
|
||||
|
||||
# 3) point it at your headers
|
||||
target_include_directories(${PROJECT_NAME}
|
||||
PRIVATE ${PROJECT_SOURCE_DIR}/include
|
||||
)
|
||||
|
9
examples/booleans.lambda
Normal file
9
examples/booleans.lambda
Normal file
|
@ -0,0 +1,9 @@
|
|||
true = \ x y . x;
|
||||
false = \ x y . y;
|
||||
|
||||
not = \ p . p false true;
|
||||
and = \ p q . p q p;
|
||||
or = \ p q . p p q;
|
||||
xor = \ p q . p ( not q ) q;
|
||||
|
||||
main = and true false;
|
13
examples/numbers.lambda
Normal file
13
examples/numbers.lambda
Normal file
|
@ -0,0 +1,13 @@
|
|||
0 = \ f x . x;
|
||||
1 = \ f x . f x;
|
||||
2 = \ f x . f ( f x );
|
||||
|
||||
isZero = \ n . n (\ x . false) true;
|
||||
succ = \ n f x . f ( n f x );
|
||||
pred = \ n f x . n (\ g h . h ( g f )) (\u . x) (\u . u);
|
||||
add = \ m n . n succ m;
|
||||
sub = \ m n . n pred m;
|
||||
mul = \ m n f . m ( n f );
|
||||
exp = \ b n . n b;
|
||||
|
||||
main = add 2 1;
|
12
examples/pair.lambda
Normal file
12
examples/pair.lambda
Normal file
|
@ -0,0 +1,12 @@
|
|||
pair = \ x y z . z x y;
|
||||
|
||||
first = \ p . p (\ x y . x);
|
||||
second = \ p . p (\ x y . y);
|
||||
|
||||
cons = pair;
|
||||
head = first;
|
||||
tail = second;
|
||||
nil = \ x y . y;
|
||||
isNil = \ l . l (\ h t d . (\ x y . y)) (\ x y . x);
|
||||
|
||||
main = first (pair a b);
|
39
include/ast.hpp
Normal file
39
include/ast.hpp
Normal file
|
@ -0,0 +1,39 @@
|
|||
#ifndef AST_HPP
|
||||
#define AST_HPP
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
// ast stuff lul
|
||||
struct Expr {
|
||||
virtual ~Expr() = default;
|
||||
};
|
||||
|
||||
// represents a simple symbol like "x" or even "thisIsACoolFunctionTrustme100"
|
||||
// (ikik the true lambda calculus professionals would now scream at me and say
|
||||
// "lambda calculus has only one letter variables!", to which id say ¯\_(ツ)_/¯
|
||||
struct Variable : Expr {
|
||||
std::string name;
|
||||
|
||||
Variable(std::string n) : name(std::move(n)) {}
|
||||
};
|
||||
|
||||
// represents a function itself like \ x . x
|
||||
struct Abstraction : Expr {
|
||||
std::string param;
|
||||
std::unique_ptr<Expr> body;
|
||||
|
||||
Abstraction(std::string p, std::unique_ptr<Expr> b) : param(std::move(p)), body(std::move(b)) {}
|
||||
};
|
||||
|
||||
// wouldnt be turing complete if we couldnt apply to these functions innit
|
||||
// (\ x . x) a would be such an application
|
||||
// note that parenthesis are your savior because (\ x y . x) (\ x . x) b and
|
||||
// (\ x y . x) ((\ x . x) b ) are not the same thing
|
||||
struct Application : Expr {
|
||||
std::unique_ptr<Expr> left, right;
|
||||
|
||||
Application(std::unique_ptr<Expr> l, std::unique_ptr<Expr> r) : left(std::move(l)), right(std::move(r)) {}
|
||||
};
|
||||
|
||||
#endif // AST_HPP
|
14
include/evaluator.hpp
Normal file
14
include/evaluator.hpp
Normal file
|
@ -0,0 +1,14 @@
|
|||
#ifndef EVALUATOR_HPP
|
||||
#define EVALUATOR_HPP
|
||||
|
||||
#include "ast.hpp"
|
||||
#include <unordered_map>
|
||||
#include <string>
|
||||
#include <stdexcept>
|
||||
|
||||
std::unique_ptr<Expr> normalize(const std::unique_ptr<Expr>&, const std::unordered_map<std::string, std::unique_ptr<Expr>>&);
|
||||
std::unique_ptr<Expr> substitute(const std::string&, const std::unique_ptr<Expr>&, const std::unique_ptr<Expr>&);
|
||||
std::unique_ptr<Expr> evaluate(const std::unique_ptr<Expr>&, const std::unordered_map<std::string, std::unique_ptr<Expr>>&);
|
||||
std::unique_ptr<Expr> evaluateMain(const std::unordered_map<std::string, std::unique_ptr<Expr>>&);
|
||||
|
||||
#endif // EVALUATOR_HPP
|
18
include/lexer.hpp
Normal file
18
include/lexer.hpp
Normal file
|
@ -0,0 +1,18 @@
|
|||
#ifndef LEXER_HPP
|
||||
#define LEXER_HPP
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include "token.hpp"
|
||||
|
||||
class Lexer {
|
||||
std::string src;
|
||||
bool error_flag;
|
||||
size_t position;
|
||||
|
||||
public:
|
||||
Lexer(std::string);
|
||||
std::vector<Token> tokenize();
|
||||
};
|
||||
|
||||
#endif // LEXER_HPP
|
31
include/parser.hpp
Normal file
31
include/parser.hpp
Normal file
|
@ -0,0 +1,31 @@
|
|||
#ifndef PARSER_HPP
|
||||
#define PARSER_HPP
|
||||
|
||||
#include <vector>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "token.hpp"
|
||||
#include "ast.hpp"
|
||||
|
||||
class Parser {
|
||||
std::vector<Token> tokens;
|
||||
size_t position;
|
||||
bool error_flag;
|
||||
std::unordered_map<std::string, std::unique_ptr<Expr>> defs;
|
||||
|
||||
public:
|
||||
Parser(std::vector<Token>);
|
||||
const std::unordered_map<std::string, std::unique_ptr<Expr>>& definitions() const;
|
||||
const Token& peek() const;
|
||||
const Token& get();
|
||||
bool accept(TokenType t);
|
||||
void expect(TokenType t);
|
||||
bool tryParseDefinition();
|
||||
std::unique_ptr<Expr> parseSimple();
|
||||
std::unique_ptr<Expr> parseApplication();
|
||||
std::unique_ptr<Expr> parseTerm();
|
||||
std::unique_ptr<Expr> parse();
|
||||
std::vector<std::unique_ptr<Expr>> parseProgram();
|
||||
};
|
||||
|
||||
#endif // PARSER_HPP
|
31
include/token.hpp
Normal file
31
include/token.hpp
Normal file
|
@ -0,0 +1,31 @@
|
|||
#ifndef TOKEN_H
|
||||
#define TOKEN_H
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <string>
|
||||
|
||||
typedef enum {
|
||||
ERROR,
|
||||
EOL,
|
||||
EOC,
|
||||
LAMBDA,
|
||||
DOT,
|
||||
LPAREN,
|
||||
RPAREN,
|
||||
VARIABLE,
|
||||
EQUALS
|
||||
} TokenType;
|
||||
|
||||
typedef struct {
|
||||
TokenType type;
|
||||
std::string lexeme;
|
||||
size_t start;
|
||||
size_t end;
|
||||
} Token;
|
||||
|
||||
void print_lexeme(Token);
|
||||
std::string display_tokentype(TokenType);
|
||||
void print_token(Token);
|
||||
|
||||
#endif // TOKEN_H
|
0
src/ast.cpp
Normal file
0
src/ast.cpp
Normal file
138
src/evaluator.cpp
Normal file
138
src/evaluator.cpp
Normal file
|
@ -0,0 +1,138 @@
|
|||
#include "evaluator.hpp"
|
||||
#include "ast.hpp"
|
||||
#include <memory>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <unordered_set>
|
||||
|
||||
// TODO: optimize that later lol
|
||||
|
||||
static std::unique_ptr<Expr> clone(const std::unique_ptr<Expr>& e) {
|
||||
if (auto v = dynamic_cast<Variable*>(e.get()))
|
||||
return std::make_unique<Variable>(v->name);
|
||||
if (auto ab = dynamic_cast<Abstraction*>(e.get()))
|
||||
return std::make_unique<Abstraction>(ab->param, clone(ab->body));
|
||||
if (auto ap = dynamic_cast<Application*>(e.get()))
|
||||
return std::make_unique<Application>(clone(ap->left), clone(ap->right));
|
||||
throw std::runtime_error("clone: unknown expression type");
|
||||
}
|
||||
|
||||
// alpha conversion because lambda calc is all global scope
|
||||
static std::string newVar(const std::string& baseVar, const std::unordered_set<std::string>& avoid) {
|
||||
std::string var = baseVar + "'";
|
||||
|
||||
while (avoid.count(var)) {
|
||||
var += "'";
|
||||
}
|
||||
|
||||
return var;
|
||||
}
|
||||
|
||||
// gives you all free variables in the expression
|
||||
static std::unordered_set<std::string> freeVars(const std::unique_ptr<Expr>& expr) {
|
||||
std::unordered_set<std::string> result;
|
||||
|
||||
if (auto v = dynamic_cast<Variable*>(expr.get())) {
|
||||
result.insert(v->name);
|
||||
}
|
||||
|
||||
else if (auto ab = dynamic_cast<Abstraction*>(expr.get())) {
|
||||
auto bodyFVs = freeVars(ab->body);
|
||||
bodyFVs.erase(ab->param);
|
||||
result = bodyFVs;
|
||||
}
|
||||
|
||||
else if (auto ap = dynamic_cast<Application*>(expr.get())) {
|
||||
auto leftFVs = freeVars(ap->left);
|
||||
auto rightFVs = freeVars(ap->right);
|
||||
result.insert(leftFVs.begin(), leftFVs.end());
|
||||
result.insert(rightFVs.begin(), rightFVs.end());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// beta reduction because we obviously want to apply parameters to functions
|
||||
std::unique_ptr<Expr> substitute(const std::string& var, const std::unique_ptr<Expr>& val, const std::unique_ptr<Expr>& expr) {
|
||||
if (auto v = dynamic_cast<Variable*>(expr.get())) {
|
||||
if (v->name == var) {
|
||||
return clone(val);
|
||||
}
|
||||
|
||||
else {
|
||||
return clone(expr);
|
||||
}
|
||||
}
|
||||
|
||||
if (auto ab = dynamic_cast<Abstraction*>(expr.get())) {
|
||||
if (ab->param == var) {
|
||||
return clone(expr);
|
||||
}
|
||||
|
||||
auto valFVs = freeVars(val);
|
||||
if (valFVs.count(ab->param)) {
|
||||
auto exprFVs = freeVars(expr);
|
||||
auto allFVs = valFVs;
|
||||
allFVs.insert(exprFVs.begin(), exprFVs.end());
|
||||
allFVs.insert(var);
|
||||
allFVs.erase(ab->param);
|
||||
|
||||
std::string newParam = newVar(ab->param, allFVs);
|
||||
std::unique_ptr<Expr> renamedBody = substitute(ab->param, std::make_unique<Variable>(newParam), ab->body);
|
||||
std::unique_ptr<Expr> newBody = substitute(var, val, renamedBody);
|
||||
return std::make_unique<Abstraction>(newParam, std::move(newBody));
|
||||
}
|
||||
|
||||
else {
|
||||
std::unique_ptr<Expr> newBody = substitute(var, val, ab->body);
|
||||
return std::make_unique<Abstraction>(ab->param, std::move(newBody));
|
||||
}
|
||||
}
|
||||
|
||||
if (auto ap = dynamic_cast<Application*>(expr.get())) {
|
||||
std::unique_ptr<Expr> newLeft = substitute(var, val, ap->left);
|
||||
std::unique_ptr<Expr> newRight = substitute(var, val, ap->right);
|
||||
return std::make_unique<Application>(std::move(newLeft), std::move(newRight));
|
||||
}
|
||||
|
||||
throw std::runtime_error("substitute: unknown expression type");
|
||||
}
|
||||
|
||||
// idk about you but i would like my expressions to be simpler than just "put in for x and done"
|
||||
std::unique_ptr<Expr> normalize(const std::unique_ptr<Expr>& expr, const std::unordered_map<std::string, std::unique_ptr<Expr>>& env) {
|
||||
if (auto v = dynamic_cast<Variable*>(expr.get())) {
|
||||
auto it = env.find(v->name);
|
||||
if (it != env.end()) {
|
||||
return normalize(it->second, env);
|
||||
}
|
||||
return clone(expr);
|
||||
}
|
||||
|
||||
if (auto ab = dynamic_cast<Abstraction*>(expr.get())) {
|
||||
std::unique_ptr<Expr> normalizedBody = normalize(ab->body, env);
|
||||
return std::make_unique<Abstraction>(ab->param, std::move(normalizedBody));
|
||||
}
|
||||
|
||||
if (auto ap = dynamic_cast<Application*>(expr.get())) {
|
||||
std::unique_ptr<Expr> func = normalize(ap->left, env);
|
||||
if (auto ab = dynamic_cast<Abstraction*>(func.get())) {
|
||||
std::unique_ptr<Expr> result = substitute(ab->param, ap->right, ab->body);
|
||||
return normalize(result, env);
|
||||
}
|
||||
|
||||
else {
|
||||
std::unique_ptr<Expr> arg = normalize(ap->right, env);
|
||||
return std::make_unique<Application>(std::move(func), std::move(arg));
|
||||
}
|
||||
}
|
||||
|
||||
throw std::runtime_error("normalize: unknown expression type");
|
||||
}
|
||||
|
||||
// i think that is self explanatory
|
||||
std::unique_ptr<Expr> evaluateMain(const std::unordered_map<std::string, std::unique_ptr<Expr>>& defs) {
|
||||
auto it = defs.find("main");
|
||||
if (it == defs.end()) throw std::runtime_error("evaluate: no main function found in file");
|
||||
return normalize(it->second, defs);
|
||||
}
|
72
src/lexer.cpp
Normal file
72
src/lexer.cpp
Normal file
|
@ -0,0 +1,72 @@
|
|||
#include <iostream>
|
||||
|
||||
#include "lexer.hpp"
|
||||
|
||||
#define IS_SPACE(c) (c == ' ' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == '\v')
|
||||
|
||||
Lexer::Lexer(std::string src) {
|
||||
this->src = src;
|
||||
this->position = 0;
|
||||
this->error_flag = false;
|
||||
}
|
||||
|
||||
std::vector<Token> Lexer::tokenize() {
|
||||
std::vector<Token> tokens;
|
||||
std::string src = this->src;
|
||||
size_t len = src.length();
|
||||
|
||||
while (position < len && !error_flag) {
|
||||
char curr = src[position];
|
||||
|
||||
if (curr == '\\') {
|
||||
tokens.push_back({ LAMBDA, "\\", position, position+1 });
|
||||
position++;
|
||||
}
|
||||
else if (curr == '.') {
|
||||
tokens.push_back({ DOT, ".", position, position+1 });
|
||||
position++;
|
||||
}
|
||||
else if (curr == '(') {
|
||||
tokens.push_back({ LPAREN, "(", position, position+1 });
|
||||
position++;
|
||||
}
|
||||
else if (curr == ')') {
|
||||
tokens.push_back({ RPAREN, ")", position, position+1 });
|
||||
position++;
|
||||
}
|
||||
else if (curr == '=') {
|
||||
tokens.push_back({ EQUALS, "=", position, position+1 });
|
||||
position++;
|
||||
}
|
||||
else if (curr == ';') {
|
||||
tokens.push_back({ EOL, ";", position, position+1 });
|
||||
position++;
|
||||
}
|
||||
else if (!IS_SPACE(curr)) {
|
||||
size_t start = position;
|
||||
while (position < len && !IS_SPACE(src[position])
|
||||
&& src[position] != '\\'
|
||||
&& src[position] != '('
|
||||
&& src[position] != ')'
|
||||
&& src[position] != '.'
|
||||
&& src[position] != '='
|
||||
&& src[position] != ';')
|
||||
{
|
||||
position++;
|
||||
}
|
||||
std::string lex = src.substr(start, position - start);
|
||||
tokens.push_back({ VARIABLE, lex, start, position });
|
||||
}
|
||||
else if (IS_SPACE(curr)) {
|
||||
position++;
|
||||
}
|
||||
else {
|
||||
error_flag = true;
|
||||
std::cerr << "Unknown symbol " << curr << std::endl;
|
||||
tokens.push_back({ ERROR, std::string (1, curr), position, position });
|
||||
}
|
||||
}
|
||||
|
||||
tokens.push_back({ EOC, "", position, position });
|
||||
return tokens;
|
||||
}
|
40
src/main.cpp
Normal file
40
src/main.cpp
Normal file
|
@ -0,0 +1,40 @@
|
|||
#include <iostream>
|
||||
#include <iterator>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "lexer.hpp"
|
||||
#include "parser.hpp"
|
||||
#include "evaluator.hpp"
|
||||
#include "print.cpp"
|
||||
|
||||
int main() {
|
||||
// get that stream
|
||||
std::ostringstream ss;
|
||||
ss << std::cin.rdbuf();
|
||||
const std::string src = ss.str();
|
||||
|
||||
// chop it up nicely
|
||||
Lexer lex(src);
|
||||
auto tokens = lex.tokenize();
|
||||
|
||||
// make sense of the chopping and scream at people if
|
||||
// they are in the wrong order or smth
|
||||
Parser parser(tokens);
|
||||
auto trees = parser.parseProgram();
|
||||
|
||||
// try to print the evaluation
|
||||
try {
|
||||
std::unique_ptr<Expr> result = evaluateMain(parser.definitions());
|
||||
std::cout << toString(result) << "\n";
|
||||
}
|
||||
catch (const std::exception &e) {
|
||||
// eh.
|
||||
std::cerr << "evaluation: " << e.what() << "\n";
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
158
src/parser.cpp
Normal file
158
src/parser.cpp
Normal file
|
@ -0,0 +1,158 @@
|
|||
// src/parser.cpp
|
||||
#include "parser.hpp"
|
||||
#include "ast.hpp"
|
||||
|
||||
#include <stdexcept>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <iostream>
|
||||
|
||||
Parser::Parser(std::vector<Token> toks)
|
||||
: tokens(std::move(toks))
|
||||
, position(0)
|
||||
, error_flag(false)
|
||||
, defs()
|
||||
{}
|
||||
|
||||
const std::unordered_map<std::string, std::unique_ptr<Expr>>& Parser::definitions() const {
|
||||
return defs;
|
||||
}
|
||||
|
||||
const Token& Parser::peek() const {
|
||||
if (position >= tokens.size())
|
||||
throw std::runtime_error("parser: unexpected end of file");
|
||||
return tokens[position];
|
||||
}
|
||||
|
||||
const Token& Parser::get() {
|
||||
if (position >= tokens.size())
|
||||
throw std::runtime_error("parser: unexpected end of file");
|
||||
return tokens[position++];
|
||||
}
|
||||
|
||||
bool Parser::accept(TokenType t) {
|
||||
if (position < tokens.size() && tokens[position].type == t) {
|
||||
++position;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void Parser::expect(TokenType t) {
|
||||
if (!accept(t)) {
|
||||
std::cout << display_tokentype(t) << std::endl;
|
||||
throw std::runtime_error("parser: unexpected token at position "
|
||||
+ std::to_string(peek().start));
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse a definition of the form:
|
||||
// <variable> '=' <term> ';'
|
||||
// Returns true if a definition was parsed and stored in defs.
|
||||
bool Parser::tryParseDefinition() {
|
||||
if (peek().type == TokenType::VARIABLE
|
||||
&& position+1 < tokens.size()
|
||||
&& tokens[position+1].type == TokenType::EQUALS)
|
||||
{
|
||||
std::string name = get().lexeme; // consume VARIABLE
|
||||
expect(TokenType::EQUALS); // consume '='
|
||||
|
||||
// parse the right-hand term
|
||||
std::unique_ptr<Expr> value = parseTerm();
|
||||
|
||||
// require a semicolon (EOL token)
|
||||
expect(TokenType::EOL);
|
||||
|
||||
// store in definitions map
|
||||
defs.emplace(std::move(name), std::move(value));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// <simple> ::= '\' <variable>+ '.' <term>
|
||||
// | <variable>
|
||||
// | '(' <term> ')'
|
||||
std::unique_ptr<Expr> Parser::parseSimple() {
|
||||
// abstraction with one-or-more parameters
|
||||
if (accept(TokenType::LAMBDA)) {
|
||||
std::vector<std::string> params;
|
||||
while (peek().type == TokenType::VARIABLE) {
|
||||
params.push_back(get().lexeme);
|
||||
}
|
||||
expect(TokenType::DOT);
|
||||
|
||||
std::unique_ptr<Expr> body = parseTerm();
|
||||
// right-nest them: \p1.\p2.…body
|
||||
for (auto it = params.rbegin(); it != params.rend(); ++it) {
|
||||
body = std::make_unique<Abstraction>(*it, std::move(body));
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
// variable
|
||||
if (peek().type == TokenType::VARIABLE) {
|
||||
const auto &tok = get();
|
||||
return std::make_unique<Variable>(tok.lexeme);
|
||||
}
|
||||
|
||||
// parenthesized term
|
||||
if (accept(TokenType::LPAREN)) {
|
||||
std::unique_ptr<Expr> e = parseTerm();
|
||||
expect(TokenType::RPAREN);
|
||||
return e;
|
||||
}
|
||||
|
||||
throw std::runtime_error("parser: expected \\, variable, or '(' at position "
|
||||
+ std::to_string(peek().start));
|
||||
}
|
||||
|
||||
// <application> ::= <simple> { <simple> }
|
||||
std::unique_ptr<Expr> Parser::parseApplication() {
|
||||
std::unique_ptr<Expr> expr = parseSimple();
|
||||
while (true) {
|
||||
TokenType t = peek().type;
|
||||
if (t == TokenType::VARIABLE ||
|
||||
t == TokenType::LAMBDA ||
|
||||
t == TokenType::LPAREN)
|
||||
{
|
||||
std::unique_ptr<Expr> rhs = parseSimple();
|
||||
expr = std::make_unique<Application>(std::move(expr), std::move(rhs));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
||||
// <term> ::= <application>
|
||||
std::unique_ptr<Expr> Parser::parseTerm() {
|
||||
return parseApplication();
|
||||
}
|
||||
|
||||
// parse exactly one term and expect EOC
|
||||
std::unique_ptr<Expr> Parser::parse() {
|
||||
std::unique_ptr<Expr> root = parseTerm();
|
||||
expect(TokenType::EOC);
|
||||
return root;
|
||||
}
|
||||
|
||||
// <program> ::= { <definition> | <term> EOL } EOC
|
||||
std::vector<std::unique_ptr<Expr>> Parser::parseProgram() {
|
||||
std::vector<std::unique_ptr<Expr>> results;
|
||||
|
||||
while (peek().type != TokenType::EOC) {
|
||||
// first try a definition
|
||||
if (tryParseDefinition())
|
||||
continue;
|
||||
|
||||
// otherwise a bare term ending in EOL
|
||||
results.push_back(parseTerm());
|
||||
expect(TokenType::EOL);
|
||||
}
|
||||
|
||||
expect(TokenType::EOC);
|
||||
return results;
|
||||
}
|
||||
|
56
src/print.cpp
Normal file
56
src/print.cpp
Normal file
|
@ -0,0 +1,56 @@
|
|||
#include "ast.hpp"
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <vector>
|
||||
|
||||
// Forward‐declare
|
||||
static void _print(const Expr* e, std::ostream& out);
|
||||
|
||||
// Public API: return string
|
||||
inline std::string toString(const std::unique_ptr<Expr>& e) {
|
||||
std::ostringstream oss;
|
||||
_print(e.get(), oss);
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
// Internal recursive printer
|
||||
static void _print(const Expr* e, std::ostream& out) {
|
||||
// 1) Abstraction(s): collect all params
|
||||
if (auto ab = dynamic_cast<const Abstraction*>(e)) {
|
||||
std::vector<std::string> params;
|
||||
const Expr* body = ab;
|
||||
while (auto a2 = dynamic_cast<const Abstraction*>(body)) {
|
||||
params.push_back(a2->param);
|
||||
body = a2->body.get();
|
||||
}
|
||||
// print "\x y z. "
|
||||
out << '\\' << params[0];
|
||||
for (size_t i = 1; i < params.size(); ++i) {
|
||||
out << ' ' << params[i];
|
||||
}
|
||||
out << ". ";
|
||||
// then the body
|
||||
_print(body, out);
|
||||
return;
|
||||
}
|
||||
|
||||
// 2) Variable
|
||||
if (auto v = dynamic_cast<const Variable*>(e)) {
|
||||
out << v->name;
|
||||
return;
|
||||
}
|
||||
|
||||
// 3) Application
|
||||
if (auto ap = dynamic_cast<const Application*>(e)) {
|
||||
out << '(';
|
||||
_print(ap->left.get(), out);
|
||||
out << ' ';
|
||||
_print(ap->right.get(), out);
|
||||
out << ')';
|
||||
return;
|
||||
}
|
||||
|
||||
// fallback
|
||||
out << "<??>";
|
||||
}
|
||||
|
27
src/token.cpp
Normal file
27
src/token.cpp
Normal file
|
@ -0,0 +1,27 @@
|
|||
#include <string>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "token.hpp"
|
||||
|
||||
std::string display_tokentype(TokenType type) {
|
||||
switch (type) {
|
||||
case ERROR : return "ERROR";
|
||||
case EOL : return "EOL";
|
||||
case EOC : return "EOC";
|
||||
case LAMBDA : return "LAMBDA";
|
||||
case DOT : return "DOT";
|
||||
case LPAREN : return "LPAREN";
|
||||
case RPAREN : return "RPAREN";
|
||||
case EQUALS : return "EQUALS";
|
||||
case VARIABLE : return "VARIABLE";
|
||||
default : return "";
|
||||
}
|
||||
}
|
||||
|
||||
void print_lexeme(Token self) {
|
||||
printf("Lexeme: '%.*s'\n", (int)(self.end-self.start), self.lexeme);
|
||||
}
|
||||
|
||||
void print_token(Token self) {
|
||||
printf("[%s] %.*s %li..%li\n", display_tokentype(self.type), (int)(self.end-self.start), self.lexeme, self.start, self.end);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue