This commit is contained in:
2026-02-13 02:57:01 -05:00
commit 15e5ccb064
23 changed files with 11899 additions and 0 deletions

583
src/ast.rs Normal file
View File

@@ -0,0 +1,583 @@
//! Abstract Syntax Tree for the Lux language
#![allow(dead_code)]
use std::fmt;
/// Source location for error reporting
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Hash)]
pub struct Span {
pub start: usize,
pub end: usize,
}
impl Span {
pub fn new(start: usize, end: usize) -> Self {
Self { start, end }
}
pub fn merge(self, other: Span) -> Span {
Span {
start: self.start.min(other.start),
end: self.end.max(other.end),
}
}
}
/// An identifier (variable or type name)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Ident {
pub name: String,
pub span: Span,
}
impl Ident {
pub fn new(name: impl Into<String>, span: Span) -> Self {
Self {
name: name.into(),
span,
}
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)
}
}
/// Visibility modifier
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum Visibility {
/// Public - exported from module
Public,
/// Private - only visible within module (default)
#[default]
Private,
}
// ============ Schema Evolution ============
/// A version number for schema evolution (e.g., @v1, @v2)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Version {
pub number: u32,
pub span: Span,
}
impl PartialOrd for Version {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Version {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.number.cmp(&other.number)
}
}
impl Version {
pub fn new(number: u32, span: Span) -> Self {
Self { number, span }
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "@v{}", self.number)
}
}
/// Version constraint for type annotations
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum VersionConstraint {
/// Exactly this version: @v2
Exact(Version),
/// This version or later: @v2+
AtLeast(Version),
/// Latest version: @latest
Latest(Span),
}
impl fmt::Display for VersionConstraint {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
VersionConstraint::Exact(v) => write!(f, "{}", v),
VersionConstraint::AtLeast(v) => write!(f, "{}+", v),
VersionConstraint::Latest(_) => write!(f, "@latest"),
}
}
}
/// Migration from one version to another
#[derive(Debug, Clone)]
pub struct Migration {
/// Source version: from @v1
pub from_version: Version,
/// Migration body (expression that transforms old to new)
pub body: Expr,
pub span: Span,
}
/// Module path: foo/bar/baz
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ModulePath {
pub segments: Vec<Ident>,
pub span: Span,
}
impl ModulePath {
pub fn to_string(&self) -> String {
self.segments
.iter()
.map(|s| s.name.as_str())
.collect::<Vec<_>>()
.join("/")
}
}
/// Import declaration
#[derive(Debug, Clone)]
pub struct ImportDecl {
/// The module path being imported
pub path: ModulePath,
/// Optional alias: import foo/bar as baz
pub alias: Option<Ident>,
/// Specific items to import: import foo.{a, b, c}
pub items: Option<Vec<Ident>>,
/// Import all items: import foo.*
pub wildcard: bool,
pub span: Span,
}
/// A complete program (or module)
#[derive(Debug, Clone)]
pub struct Program {
/// Module imports
pub imports: Vec<ImportDecl>,
/// Top-level declarations
pub declarations: Vec<Declaration>,
}
/// Top-level declarations
#[derive(Debug, Clone)]
pub enum Declaration {
/// Function definition: fn name(params): ReturnType with {Effects} = body
Function(FunctionDecl),
/// Effect declaration: effect Name { fn op1(...): T, ... }
Effect(EffectDecl),
/// Type alias or ADT: type Name = ...
Type(TypeDecl),
/// Handler definition: handler name: Effect { ... }
Handler(HandlerDecl),
/// Let binding at top level
Let(LetDecl),
}
/// Function declaration
#[derive(Debug, Clone)]
pub struct FunctionDecl {
pub visibility: Visibility,
pub name: Ident,
pub type_params: Vec<Ident>,
pub params: Vec<Parameter>,
pub return_type: TypeExpr,
pub effects: Vec<Ident>,
pub body: Expr,
pub span: Span,
}
/// Function parameter
#[derive(Debug, Clone)]
pub struct Parameter {
pub name: Ident,
pub typ: TypeExpr,
pub span: Span,
}
/// Effect declaration
#[derive(Debug, Clone)]
pub struct EffectDecl {
pub name: Ident,
pub type_params: Vec<Ident>,
pub operations: Vec<EffectOp>,
pub span: Span,
}
/// An operation within an effect
#[derive(Debug, Clone)]
pub struct EffectOp {
pub name: Ident,
pub params: Vec<Parameter>,
pub return_type: TypeExpr,
pub span: Span,
}
/// Type declaration (alias or ADT)
#[derive(Debug, Clone)]
pub struct TypeDecl {
pub visibility: Visibility,
pub name: Ident,
pub type_params: Vec<Ident>,
/// Optional version annotation: type User @v2 { ... }
pub version: Option<Version>,
pub definition: TypeDef,
/// Migrations from previous versions: from @v1 = { ... }
pub migrations: Vec<Migration>,
pub span: Span,
}
/// Type definition
#[derive(Debug, Clone)]
pub enum TypeDef {
/// Type alias: type Foo = Bar
Alias(TypeExpr),
/// Record type: type Foo { field: Type, ... }
Record(Vec<RecordField>),
/// Enum/ADT: type Foo = A | B(Int) | C { x: Int }
Enum(Vec<Variant>),
}
/// Record field
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RecordField {
pub name: Ident,
pub typ: TypeExpr,
pub span: Span,
}
/// Enum variant
#[derive(Debug, Clone)]
pub struct Variant {
pub name: Ident,
pub fields: VariantFields,
pub span: Span,
}
/// Variant field types
#[derive(Debug, Clone)]
pub enum VariantFields {
/// Unit variant: A
Unit,
/// Tuple variant: A(Int, String)
Tuple(Vec<TypeExpr>),
/// Record variant: A { x: Int, y: String }
Record(Vec<RecordField>),
}
/// Handler declaration
#[derive(Debug, Clone)]
pub struct HandlerDecl {
pub name: Ident,
pub params: Vec<Parameter>,
pub effect: Ident,
pub implementations: Vec<HandlerImpl>,
pub span: Span,
}
/// Implementation of an effect operation in a handler
#[derive(Debug, Clone)]
pub struct HandlerImpl {
pub op_name: Ident,
pub params: Vec<Ident>,
pub resume: Option<Ident>, // The continuation parameter
pub body: Expr,
pub span: Span,
}
/// Let declaration
#[derive(Debug, Clone)]
pub struct LetDecl {
pub visibility: Visibility,
pub name: Ident,
pub typ: Option<TypeExpr>,
pub value: Expr,
pub span: Span,
}
/// Type expressions
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TypeExpr {
/// Named type: Int, String, List
Named(Ident),
/// Generic type application: List<Int>, Map<String, Int>
App(Box<TypeExpr>, Vec<TypeExpr>),
/// Function type: fn(A, B): C
Function {
params: Vec<TypeExpr>,
return_type: Box<TypeExpr>,
effects: Vec<Ident>,
},
/// Tuple type: (A, B, C)
Tuple(Vec<TypeExpr>),
/// Record type: { name: String, age: Int }
Record(Vec<RecordField>),
/// Unit type
Unit,
/// Versioned type: User @v2, User @v2+, User @latest
Versioned {
base: Box<TypeExpr>,
constraint: VersionConstraint,
},
}
impl TypeExpr {
pub fn named(name: &str) -> Self {
TypeExpr::Named(Ident::new(name, Span::default()))
}
}
/// Expressions
#[derive(Debug, Clone)]
pub enum Expr {
/// Literal values
Literal(Literal),
/// Variable reference
Var(Ident),
/// Binary operation: a + b
BinaryOp {
op: BinaryOp,
left: Box<Expr>,
right: Box<Expr>,
span: Span,
},
/// Unary operation: -a, !a
UnaryOp {
op: UnaryOp,
operand: Box<Expr>,
span: Span,
},
/// Function call: foo(a, b)
Call {
func: Box<Expr>,
args: Vec<Expr>,
span: Span,
},
/// Effect operation call: Effect.operation(args)
EffectOp {
effect: Ident,
operation: Ident,
args: Vec<Expr>,
span: Span,
},
/// Field access: foo.bar
Field {
object: Box<Expr>,
field: Ident,
span: Span,
},
/// Lambda: fn(x, y) => x + y or fn(x: Int): Int => x + 1
Lambda {
params: Vec<Parameter>,
return_type: Option<Box<TypeExpr>>,
effects: Vec<Ident>,
body: Box<Expr>,
span: Span,
},
/// Let binding: let x = e1; e2
Let {
name: Ident,
typ: Option<TypeExpr>,
value: Box<Expr>,
body: Box<Expr>,
span: Span,
},
/// If expression: if cond then e1 else e2
If {
condition: Box<Expr>,
then_branch: Box<Expr>,
else_branch: Box<Expr>,
span: Span,
},
/// Match expression
Match {
scrutinee: Box<Expr>,
arms: Vec<MatchArm>,
span: Span,
},
/// Block: { e1; e2; e3 }
Block {
statements: Vec<Statement>,
result: Box<Expr>,
span: Span,
},
/// Record literal: { name: "Alice", age: 30 }
Record {
fields: Vec<(Ident, Expr)>,
span: Span,
},
/// Tuple literal: (1, "hello", true)
Tuple { elements: Vec<Expr>, span: Span },
/// List literal: [1, 2, 3]
List { elements: Vec<Expr>, span: Span },
/// Run with handlers: run expr with { Effect = handler, ... }
Run {
expr: Box<Expr>,
handlers: Vec<(Ident, Expr)>,
span: Span,
},
/// Resume continuation in handler (like calling the continuation)
Resume { value: Box<Expr>, span: Span },
}
impl Expr {
pub fn span(&self) -> Span {
match self {
Expr::Literal(lit) => lit.span,
Expr::Var(ident) => ident.span,
Expr::BinaryOp { span, .. } => *span,
Expr::UnaryOp { span, .. } => *span,
Expr::Call { span, .. } => *span,
Expr::EffectOp { span, .. } => *span,
Expr::Field { span, .. } => *span,
Expr::Lambda { span, .. } => *span,
Expr::Let { span, .. } => *span,
Expr::If { span, .. } => *span,
Expr::Match { span, .. } => *span,
Expr::Block { span, .. } => *span,
Expr::Record { span, .. } => *span,
Expr::Tuple { span, .. } => *span,
Expr::List { span, .. } => *span,
Expr::Run { span, .. } => *span,
Expr::Resume { span, .. } => *span,
}
}
}
/// Literal values
#[derive(Debug, Clone)]
pub struct Literal {
pub kind: LiteralKind,
pub span: Span,
}
#[derive(Debug, Clone)]
pub enum LiteralKind {
Int(i64),
Float(f64),
String(String),
Char(char),
Bool(bool),
Unit,
}
/// Binary operators
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BinaryOp {
// Arithmetic
Add,
Sub,
Mul,
Div,
Mod,
// Comparison
Eq,
Ne,
Lt,
Le,
Gt,
Ge,
// Logical
And,
Or,
// Other
Pipe, // |>
}
impl fmt::Display for BinaryOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
BinaryOp::Add => write!(f, "+"),
BinaryOp::Sub => write!(f, "-"),
BinaryOp::Mul => write!(f, "*"),
BinaryOp::Div => write!(f, "/"),
BinaryOp::Mod => write!(f, "%"),
BinaryOp::Eq => write!(f, "=="),
BinaryOp::Ne => write!(f, "!="),
BinaryOp::Lt => write!(f, "<"),
BinaryOp::Le => write!(f, "<="),
BinaryOp::Gt => write!(f, ">"),
BinaryOp::Ge => write!(f, ">="),
BinaryOp::And => write!(f, "&&"),
BinaryOp::Or => write!(f, "||"),
BinaryOp::Pipe => write!(f, "|>"),
}
}
}
/// Unary operators
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum UnaryOp {
Neg, // -
Not, // !
}
impl fmt::Display for UnaryOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
UnaryOp::Neg => write!(f, "-"),
UnaryOp::Not => write!(f, "!"),
}
}
}
/// Statement in a block
#[derive(Debug, Clone)]
pub enum Statement {
/// Expression statement
Expr(Expr),
/// Let binding without body (in blocks)
Let {
name: Ident,
typ: Option<TypeExpr>,
value: Expr,
span: Span,
},
}
/// Match arm
#[derive(Debug, Clone)]
pub struct MatchArm {
pub pattern: Pattern,
pub guard: Option<Expr>,
pub body: Expr,
pub span: Span,
}
/// Patterns for matching
#[derive(Debug, Clone)]
pub enum Pattern {
/// Wildcard: _
Wildcard(Span),
/// Variable binding: x
Var(Ident),
/// Literal: 42, "hello", true
Literal(Literal),
/// Constructor: Some(x), None, Ok(v)
Constructor {
name: Ident,
fields: Vec<Pattern>,
span: Span,
},
/// Record pattern: { name, age: a }
Record {
fields: Vec<(Ident, Pattern)>,
span: Span,
},
/// Tuple pattern: (a, b, c)
Tuple { elements: Vec<Pattern>, span: Span },
}
impl Pattern {
pub fn span(&self) -> Span {
match self {
Pattern::Wildcard(span) => *span,
Pattern::Var(ident) => ident.span,
Pattern::Literal(lit) => lit.span,
Pattern::Constructor { span, .. } => *span,
Pattern::Record { span, .. } => *span,
Pattern::Tuple { span, .. } => *span,
}
}
}

2202
src/interpreter.rs Normal file

File diff suppressed because it is too large Load Diff

633
src/lexer.rs Normal file
View File

@@ -0,0 +1,633 @@
//! Lexer for the Lux language
#![allow(dead_code)]
use crate::ast::Span;
use std::fmt;
use std::iter::Peekable;
use std::str::Chars;
/// Token types
#[derive(Debug, Clone, PartialEq)]
pub enum TokenKind {
// Literals
Int(i64),
Float(f64),
String(String),
Char(char),
Bool(bool),
// Identifiers and keywords
Ident(String),
// Keywords
Fn,
Let,
If,
Then,
Else,
Match,
With,
Effect,
Handler,
Run,
Resume,
Type,
True,
False,
Import,
Pub,
As,
From, // from (for migrations)
Latest, // latest (for @latest version constraint)
// Operators
Plus, // +
Minus, // -
Star, // *
Slash, // /
Percent, // %
Eq, // =
EqEq, // ==
Ne, // !=
Lt, // <
Le, // <=
Gt, // >
Ge, // >=
And, // &&
Or, // ||
Not, // !
Pipe, // |
PipeGt, // |>
Arrow, // =>
ThinArrow, // ->
Dot, // .
Colon, // :
ColonColon, // ::
Comma, // ,
Semi, // ;
At, // @
// Delimiters
LParen, // (
RParen, // )
LBrace, // {
RBrace, // }
LBracket, // [
RBracket, // ]
// Special
Underscore, // _
Newline,
Eof,
}
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenKind::Int(n) => write!(f, "{}", n),
TokenKind::Float(n) => write!(f, "{}", n),
TokenKind::String(s) => write!(f, "\"{}\"", s),
TokenKind::Char(c) => write!(f, "'{}'", c),
TokenKind::Bool(b) => write!(f, "{}", b),
TokenKind::Ident(s) => write!(f, "{}", s),
TokenKind::Fn => write!(f, "fn"),
TokenKind::Let => write!(f, "let"),
TokenKind::If => write!(f, "if"),
TokenKind::Then => write!(f, "then"),
TokenKind::Else => write!(f, "else"),
TokenKind::Match => write!(f, "match"),
TokenKind::With => write!(f, "with"),
TokenKind::Effect => write!(f, "effect"),
TokenKind::Handler => write!(f, "handler"),
TokenKind::Run => write!(f, "run"),
TokenKind::Resume => write!(f, "resume"),
TokenKind::Type => write!(f, "type"),
TokenKind::Import => write!(f, "import"),
TokenKind::Pub => write!(f, "pub"),
TokenKind::As => write!(f, "as"),
TokenKind::From => write!(f, "from"),
TokenKind::Latest => write!(f, "latest"),
TokenKind::True => write!(f, "true"),
TokenKind::False => write!(f, "false"),
TokenKind::Plus => write!(f, "+"),
TokenKind::Minus => write!(f, "-"),
TokenKind::Star => write!(f, "*"),
TokenKind::Slash => write!(f, "/"),
TokenKind::Percent => write!(f, "%"),
TokenKind::Eq => write!(f, "="),
TokenKind::EqEq => write!(f, "=="),
TokenKind::Ne => write!(f, "!="),
TokenKind::Lt => write!(f, "<"),
TokenKind::Le => write!(f, "<="),
TokenKind::Gt => write!(f, ">"),
TokenKind::Ge => write!(f, ">="),
TokenKind::And => write!(f, "&&"),
TokenKind::Or => write!(f, "||"),
TokenKind::Not => write!(f, "!"),
TokenKind::Pipe => write!(f, "|"),
TokenKind::PipeGt => write!(f, "|>"),
TokenKind::Arrow => write!(f, "=>"),
TokenKind::ThinArrow => write!(f, "->"),
TokenKind::Dot => write!(f, "."),
TokenKind::Colon => write!(f, ":"),
TokenKind::ColonColon => write!(f, "::"),
TokenKind::Comma => write!(f, ","),
TokenKind::Semi => write!(f, ";"),
TokenKind::At => write!(f, "@"),
TokenKind::LParen => write!(f, "("),
TokenKind::RParen => write!(f, ")"),
TokenKind::LBrace => write!(f, "{{"),
TokenKind::RBrace => write!(f, "}}"),
TokenKind::LBracket => write!(f, "["),
TokenKind::RBracket => write!(f, "]"),
TokenKind::Underscore => write!(f, "_"),
TokenKind::Newline => write!(f, "\\n"),
TokenKind::Eof => write!(f, "EOF"),
}
}
}
/// A token with its source location
#[derive(Debug, Clone)]
pub struct Token {
pub kind: TokenKind,
pub span: Span,
}
impl Token {
pub fn new(kind: TokenKind, span: Span) -> Self {
Self { kind, span }
}
}
/// Lexer error
#[derive(Debug, Clone)]
pub struct LexError {
pub message: String,
pub span: Span,
}
impl fmt::Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Lexer error at {}-{}: {}",
self.span.start, self.span.end, self.message
)
}
}
/// The lexer
pub struct Lexer<'a> {
source: &'a str,
chars: Peekable<Chars<'a>>,
pos: usize,
}
impl<'a> Lexer<'a> {
pub fn new(source: &'a str) -> Self {
Self {
source,
chars: source.chars().peekable(),
pos: 0,
}
}
/// Tokenize the entire source
pub fn tokenize(mut self) -> Result<Vec<Token>, LexError> {
let mut tokens = Vec::new();
loop {
let token = self.next_token()?;
let is_eof = token.kind == TokenKind::Eof;
tokens.push(token);
if is_eof {
break;
}
}
Ok(tokens)
}
fn next_token(&mut self) -> Result<Token, LexError> {
self.skip_whitespace_and_comments();
let start = self.pos;
let Some(c) = self.advance() else {
return Ok(Token::new(TokenKind::Eof, Span::new(start, start)));
};
let kind = match c {
// Single-character tokens
'+' => TokenKind::Plus,
'*' => TokenKind::Star,
'%' => TokenKind::Percent,
'(' => TokenKind::LParen,
')' => TokenKind::RParen,
'{' => TokenKind::LBrace,
'}' => TokenKind::RBrace,
'[' => TokenKind::LBracket,
']' => TokenKind::RBracket,
',' => TokenKind::Comma,
';' => TokenKind::Semi,
'@' => TokenKind::At,
'\n' => TokenKind::Newline,
// Multi-character tokens
'-' => {
if self.peek() == Some('>') {
self.advance();
TokenKind::ThinArrow
} else {
TokenKind::Minus
}
}
'/' => {
if self.peek() == Some('/') {
// Line comment
self.skip_line_comment();
return self.next_token();
} else {
TokenKind::Slash
}
}
'=' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::EqEq
} else if self.peek() == Some('>') {
self.advance();
TokenKind::Arrow
} else {
TokenKind::Eq
}
}
'!' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::Ne
} else {
TokenKind::Not
}
}
'<' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::Le
} else {
TokenKind::Lt
}
}
'>' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::Ge
} else {
TokenKind::Gt
}
}
'&' => {
if self.peek() == Some('&') {
self.advance();
TokenKind::And
} else {
return Err(LexError {
message: "Expected '&&'".into(),
span: Span::new(start, self.pos),
});
}
}
'|' => {
if self.peek() == Some('|') {
self.advance();
TokenKind::Or
} else if self.peek() == Some('>') {
self.advance();
TokenKind::PipeGt
} else {
TokenKind::Pipe
}
}
'.' => TokenKind::Dot,
':' => {
if self.peek() == Some(':') {
self.advance();
TokenKind::ColonColon
} else {
TokenKind::Colon
}
}
'_' => {
if self.peek().map_or(false, |c| c.is_alphanumeric()) {
// It's an identifier starting with _
self.scan_ident_rest(start)
} else {
TokenKind::Underscore
}
}
// String literals
'"' => self.scan_string(start)?,
// Char literals
'\'' => self.scan_char(start)?,
// Numbers
c if c.is_ascii_digit() => self.scan_number(c, start)?,
// Identifiers and keywords
c if c.is_alphabetic() || c == '_' => self.scan_ident_rest(start),
_ => {
return Err(LexError {
message: format!("Unexpected character: '{}'", c),
span: Span::new(start, self.pos),
});
}
};
Ok(Token::new(kind, Span::new(start, self.pos)))
}
fn advance(&mut self) -> Option<char> {
let c = self.chars.next()?;
self.pos += c.len_utf8();
Some(c)
}
fn peek(&mut self) -> Option<char> {
self.chars.peek().copied()
}
fn skip_whitespace_and_comments(&mut self) {
while let Some(c) = self.peek() {
if c == ' ' || c == '\t' || c == '\r' {
self.advance();
} else if c == '/' {
// Check for comment
let mut chars = self.chars.clone();
chars.next(); // consume '/'
if chars.peek() == Some(&'/') {
self.skip_line_comment();
} else {
break;
}
} else {
break;
}
}
}
fn skip_line_comment(&mut self) {
while let Some(c) = self.peek() {
if c == '\n' {
break;
}
self.advance();
}
}
fn scan_string(&mut self, _start: usize) -> Result<TokenKind, LexError> {
let mut value = String::new();
loop {
match self.advance() {
Some('"') => break,
Some('\\') => {
let escaped = match self.advance() {
Some('n') => '\n',
Some('r') => '\r',
Some('t') => '\t',
Some('\\') => '\\',
Some('"') => '"',
Some(c) => c,
None => {
return Err(LexError {
message: "Unterminated string".into(),
span: Span::new(_start, self.pos),
});
}
};
value.push(escaped);
}
Some(c) => value.push(c),
None => {
return Err(LexError {
message: "Unterminated string".into(),
span: Span::new(_start, self.pos),
});
}
}
}
Ok(TokenKind::String(value))
}
fn scan_char(&mut self, start: usize) -> Result<TokenKind, LexError> {
let c = match self.advance() {
Some('\\') => match self.advance() {
Some('n') => '\n',
Some('r') => '\r',
Some('t') => '\t',
Some('\\') => '\\',
Some('\'') => '\'',
Some(c) => c,
None => {
return Err(LexError {
message: "Unterminated character literal".into(),
span: Span::new(start, self.pos),
});
}
},
Some(c) => c,
None => {
return Err(LexError {
message: "Unterminated character literal".into(),
span: Span::new(start, self.pos),
});
}
};
if self.advance() != Some('\'') {
return Err(LexError {
message: "Expected closing quote for character literal".into(),
span: Span::new(start, self.pos),
});
}
Ok(TokenKind::Char(c))
}
fn scan_number(&mut self, first: char, start: usize) -> Result<TokenKind, LexError> {
let mut num_str = String::new();
num_str.push(first);
while let Some(c) = self.peek() {
if c.is_ascii_digit() || c == '_' {
if c != '_' {
num_str.push(c);
}
self.advance();
} else {
break;
}
}
// Check for float
if self.peek() == Some('.') {
// Look ahead to make sure it's not a method call
let mut chars = self.chars.clone();
chars.next(); // consume '.'
if chars.peek().map_or(false, |c| c.is_ascii_digit()) {
self.advance(); // consume '.'
num_str.push('.');
while let Some(c) = self.peek() {
if c.is_ascii_digit() || c == '_' {
if c != '_' {
num_str.push(c);
}
self.advance();
} else {
break;
}
}
let f: f64 = num_str.parse().map_err(|_| LexError {
message: "Invalid float literal".into(),
span: Span::new(start, self.pos),
})?;
return Ok(TokenKind::Float(f));
}
}
let n: i64 = num_str.parse().map_err(|_| LexError {
message: "Invalid integer literal".into(),
span: Span::new(start, self.pos),
})?;
Ok(TokenKind::Int(n))
}
fn scan_ident_rest(&mut self, start: usize) -> TokenKind {
while let Some(c) = self.peek() {
if c.is_alphanumeric() || c == '_' {
self.advance();
} else {
break;
}
}
let ident = &self.source[start..self.pos];
match ident {
"fn" => TokenKind::Fn,
"let" => TokenKind::Let,
"if" => TokenKind::If,
"then" => TokenKind::Then,
"else" => TokenKind::Else,
"match" => TokenKind::Match,
"with" => TokenKind::With,
"effect" => TokenKind::Effect,
"handler" => TokenKind::Handler,
"run" => TokenKind::Run,
"resume" => TokenKind::Resume,
"type" => TokenKind::Type,
"import" => TokenKind::Import,
"pub" => TokenKind::Pub,
"as" => TokenKind::As,
"from" => TokenKind::From,
"latest" => TokenKind::Latest,
"true" => TokenKind::Bool(true),
"false" => TokenKind::Bool(false),
_ => TokenKind::Ident(ident.to_string()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn lex(source: &str) -> Vec<TokenKind> {
Lexer::new(source)
.tokenize()
.unwrap()
.into_iter()
.map(|t| t.kind)
.filter(|k| !matches!(k, TokenKind::Newline))
.collect()
}
#[test]
fn test_basic_tokens() {
assert_eq!(
lex("fn let if else"),
vec![
TokenKind::Fn,
TokenKind::Let,
TokenKind::If,
TokenKind::Else,
TokenKind::Eof
]
);
}
#[test]
fn test_operators() {
assert_eq!(
lex("+ - * / == != |>"),
vec![
TokenKind::Plus,
TokenKind::Minus,
TokenKind::Star,
TokenKind::Slash,
TokenKind::EqEq,
TokenKind::Ne,
TokenKind::PipeGt,
TokenKind::Eof
]
);
}
#[test]
fn test_numbers() {
assert_eq!(
lex("42 3.14"),
vec![TokenKind::Int(42), TokenKind::Float(3.14), TokenKind::Eof]
);
}
#[test]
fn test_strings() {
assert_eq!(
lex("\"hello\" \"world\""),
vec![
TokenKind::String("hello".into()),
TokenKind::String("world".into()),
TokenKind::Eof
]
);
}
#[test]
fn test_function() {
assert_eq!(
lex("fn add(a: Int, b: Int): Int = a + b"),
vec![
TokenKind::Fn,
TokenKind::Ident("add".into()),
TokenKind::LParen,
TokenKind::Ident("a".into()),
TokenKind::Colon,
TokenKind::Ident("Int".into()),
TokenKind::Comma,
TokenKind::Ident("b".into()),
TokenKind::Colon,
TokenKind::Ident("Int".into()),
TokenKind::RParen,
TokenKind::Colon,
TokenKind::Ident("Int".into()),
TokenKind::Eq,
TokenKind::Ident("a".into()),
TokenKind::Plus,
TokenKind::Ident("b".into()),
TokenKind::Eof
]
);
}
}

791
src/main.rs Normal file
View File

@@ -0,0 +1,791 @@
//! Lux - A functional programming language with first-class effects
mod ast;
mod interpreter;
mod lexer;
mod modules;
mod parser;
mod schema;
mod typechecker;
mod types;
use interpreter::Interpreter;
use parser::Parser;
use std::io::{self, Write};
use typechecker::TypeChecker;
const VERSION: &str = "0.1.0";
const HELP: &str = r#"
Lux - A functional language with first-class effects
Commands:
:help, :h Show this help
:quit, :q Exit the REPL
:type <expr> Show the type of an expression
:clear Clear the environment
:load <file> Load and execute a file
:trace on/off Enable/disable effect tracing
:traces Show recorded effect traces
Examples:
> let x = 42
> x + 1
43
> fn double(n: Int): Int = n * 2
> double(21)
42
> Console.print("Hello, world!")
Hello, world!
Debugging:
> :trace on
> Console.print("test")
> :traces
[ 0.123ms] Console.print("test") → ()
"#;
fn main() {
let args: Vec<String> = std::env::args().collect();
if args.len() > 1 {
// Run a file
run_file(&args[1]);
} else {
// Start REPL
run_repl();
}
}
fn run_file(path: &str) {
use modules::ModuleLoader;
use std::path::Path;
let file_path = Path::new(path);
let source = match std::fs::read_to_string(file_path) {
Ok(s) => s,
Err(e) => {
eprintln!("Error reading file '{}': {}", path, e);
std::process::exit(1);
}
};
// Set up module loader with the file's directory as a search path
let mut loader = ModuleLoader::new();
if let Some(parent) = file_path.parent() {
loader.add_search_path(parent.to_path_buf());
}
// Load and parse the program (including any imports)
let program = match loader.load_source(&source, Some(file_path)) {
Ok(p) => p,
Err(e) => {
eprintln!("Module error: {}", e);
std::process::exit(1);
}
};
let mut checker = TypeChecker::new();
if let Err(errors) = checker.check_program_with_modules(&program, &loader) {
for error in errors {
eprintln!("Type error: {}", error);
}
std::process::exit(1);
}
let mut interp = Interpreter::new();
match interp.run_with_modules(&program, &loader) {
Ok(value) => {
if !matches!(value, interpreter::Value::Unit) {
println!("{}", value);
}
}
Err(e) => {
eprintln!("Runtime error: {}", e);
std::process::exit(1);
}
}
}
fn run_repl() {
println!("Lux v{}", VERSION);
println!("Type :help for help, :quit to exit\n");
let mut interp = Interpreter::new();
let mut checker = TypeChecker::new();
let mut buffer = String::new();
let mut continuation = false;
loop {
// Print prompt
let prompt = if continuation { "... " } else { "lux> " };
print!("{}", prompt);
io::stdout().flush().unwrap();
// Read input
let mut line = String::new();
match io::stdin().read_line(&mut line) {
Ok(0) => break, // EOF
Ok(_) => {}
Err(e) => {
eprintln!("Error reading input: {}", e);
continue;
}
}
let line = line.trim_end();
// Handle commands
if !continuation && line.starts_with(':') {
handle_command(line, &mut interp, &mut checker);
continue;
}
// Accumulate input
buffer.push_str(line);
buffer.push('\n');
// Check for continuation (simple heuristic: unbalanced braces)
let open_braces = buffer.chars().filter(|c| *c == '{').count();
let close_braces = buffer.chars().filter(|c| *c == '}').count();
let open_parens = buffer.chars().filter(|c| *c == '(').count();
let close_parens = buffer.chars().filter(|c| *c == ')').count();
if open_braces > close_braces || open_parens > close_parens {
continuation = true;
continue;
}
continuation = false;
let input = std::mem::take(&mut buffer);
if input.trim().is_empty() {
continue;
}
eval_input(&input, &mut interp, &mut checker);
}
println!("\nGoodbye!");
}
fn handle_command(line: &str, interp: &mut Interpreter, checker: &mut TypeChecker) {
let parts: Vec<&str> = line.splitn(2, ' ').collect();
let cmd = parts[0];
let arg = parts.get(1).map(|s| s.trim());
match cmd {
":help" | ":h" => {
println!("{}", HELP);
}
":quit" | ":q" => {
println!("Goodbye!");
std::process::exit(0);
}
":type" | ":t" => {
if let Some(expr_str) = arg {
show_type(expr_str, checker);
} else {
println!("Usage: :type <expression>");
}
}
":clear" => {
*interp = Interpreter::new();
*checker = TypeChecker::new();
println!("Environment cleared.");
}
":load" | ":l" => {
if let Some(path) = arg {
load_file(path, interp, checker);
} else {
println!("Usage: :load <filename>");
}
}
":trace" => match arg {
Some("on") => {
interp.enable_tracing();
println!("Effect tracing enabled.");
}
Some("off") => {
interp.trace_effects = false;
println!("Effect tracing disabled.");
}
_ => {
println!("Usage: :trace on|off");
}
},
":traces" => {
if interp.get_traces().is_empty() {
println!("No effect traces recorded. Use :trace on to enable tracing.");
} else {
interp.print_traces();
}
}
_ => {
println!("Unknown command: {}", cmd);
println!("Type :help for help");
}
}
}
fn show_type(expr_str: &str, checker: &mut TypeChecker) {
// Wrap expression in a let to parse it
let wrapped = format!("let _expr_ = {}", expr_str);
match Parser::parse_source(&wrapped) {
Ok(program) => {
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
} else {
println!("(type checking passed)");
}
}
Err(e) => {
println!("Parse error: {}", e);
}
}
}
fn load_file(path: &str, interp: &mut Interpreter, checker: &mut TypeChecker) {
let source = match std::fs::read_to_string(path) {
Ok(s) => s,
Err(e) => {
println!("Error reading file '{}': {}", path, e);
return;
}
};
let program = match Parser::parse_source(&source) {
Ok(p) => p,
Err(e) => {
println!("Parse error: {}", e);
return;
}
};
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
return;
}
match interp.run(&program) {
Ok(_) => println!("Loaded '{}'", path),
Err(e) => println!("Runtime error: {}", e),
}
}
fn eval_input(input: &str, interp: &mut Interpreter, checker: &mut TypeChecker) {
// Try to parse as a program (declarations)
match Parser::parse_source(input) {
Ok(program) => {
// Type check
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
return;
}
// Execute
match interp.run(&program) {
Ok(value) => {
if !matches!(value, interpreter::Value::Unit) {
println!("{}", value);
}
}
Err(e) => {
println!("Runtime error: {}", e);
}
}
}
Err(parse_err) => {
// Try wrapping as an expression
let wrapped = format!("let _result_ = {}", input.trim());
match Parser::parse_source(&wrapped) {
Ok(program) => {
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
return;
}
match interp.run(&program) {
Ok(value) => {
println!("{}", value);
}
Err(e) => {
println!("Runtime error: {}", e);
}
}
}
Err(_) => {
// Use original error
println!("Parse error: {}", parse_err);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn eval(source: &str) -> Result<String, String> {
let program = Parser::parse_source(source).map_err(|e| e.to_string())?;
let mut checker = TypeChecker::new();
checker.check_program(&program).map_err(|errors| {
errors
.iter()
.map(|e| e.to_string())
.collect::<Vec<_>>()
.join("\n")
})?;
let mut interp = Interpreter::new();
let value = interp.run(&program).map_err(|e| e.to_string())?;
Ok(format!("{}", value))
}
#[test]
fn test_arithmetic() {
assert_eq!(eval("let x = 1 + 2").unwrap(), "3");
assert_eq!(eval("let x = 10 - 3").unwrap(), "7");
assert_eq!(eval("let x = 4 * 5").unwrap(), "20");
assert_eq!(eval("let x = 15 / 3").unwrap(), "5");
}
#[test]
fn test_function() {
let source = r#"
fn add(a: Int, b: Int): Int = a + b
let result = add(3, 4)
"#;
assert_eq!(eval(source).unwrap(), "7");
}
#[test]
fn test_if_expr() {
let source = r#"
fn max(a: Int, b: Int): Int = if a > b then a else b
let result = max(5, 3)
"#;
assert_eq!(eval(source).unwrap(), "5");
}
#[test]
fn test_recursion() {
let source = r#"
fn factorial(n: Int): Int = if n <= 1 then 1 else n * factorial(n - 1)
let result = factorial(5)
"#;
assert_eq!(eval(source).unwrap(), "120");
}
#[test]
fn test_lambda() {
let source = r#"
let double = fn(x: Int): Int => x * 2
let result = double(21)
"#;
assert_eq!(eval(source).unwrap(), "42");
}
#[test]
fn test_records() {
let source = r#"
let person = { name: "Alice", age: 30 }
let result = person.age
"#;
assert_eq!(eval(source).unwrap(), "30");
}
#[test]
fn test_lists() {
let source = "let nums = [1, 2, 3]";
assert_eq!(eval(source).unwrap(), "[1, 2, 3]");
}
#[test]
fn test_tuples() {
let source = "let pair = (42, \"hello\")";
assert_eq!(eval(source).unwrap(), "(42, \"hello\")");
}
#[test]
fn test_block() {
let source = r#"
let result = {
let x = 10
let y = 20
x + y
}
"#;
assert_eq!(eval(source).unwrap(), "30");
}
#[test]
fn test_pipe() {
let source = r#"
fn double(x: Int): Int = x * 2
fn add_one(x: Int): Int = x + 1
let result = 5 |> double |> add_one
"#;
assert_eq!(eval(source).unwrap(), "11");
}
// ============ Standard Library Tests ============
// List tests
#[test]
fn test_list_length() {
assert_eq!(eval("let x = List.length([1, 2, 3])").unwrap(), "3");
assert_eq!(eval("let x = List.length([])").unwrap(), "0");
}
#[test]
fn test_list_reverse() {
assert_eq!(
eval("let x = List.reverse([1, 2, 3])").unwrap(),
"[3, 2, 1]"
);
assert_eq!(eval("let x = List.reverse([])").unwrap(), "[]");
}
#[test]
fn test_list_range() {
assert_eq!(eval("let x = List.range(0, 5)").unwrap(), "[0, 1, 2, 3, 4]");
assert_eq!(eval("let x = List.range(3, 3)").unwrap(), "[]");
assert_eq!(eval("let x = List.range(-2, 2)").unwrap(), "[-2, -1, 0, 1]");
}
#[test]
fn test_list_head() {
assert_eq!(eval("let x = List.head([1, 2, 3])").unwrap(), "Some(1)");
assert_eq!(eval("let x = List.head([])").unwrap(), "None");
}
#[test]
fn test_list_tail() {
assert_eq!(
eval("let x = List.tail([1, 2, 3])").unwrap(),
"Some([2, 3])"
);
assert_eq!(eval("let x = List.tail([1])").unwrap(), "Some([])");
assert_eq!(eval("let x = List.tail([])").unwrap(), "None");
}
#[test]
fn test_list_concat() {
assert_eq!(
eval("let x = List.concat([1, 2], [3, 4])").unwrap(),
"[1, 2, 3, 4]"
);
assert_eq!(eval("let x = List.concat([], [1])").unwrap(), "[1]");
assert_eq!(eval("let x = List.concat([1], [])").unwrap(), "[1]");
}
#[test]
fn test_list_get() {
assert_eq!(
eval("let x = List.get([10, 20, 30], 0)").unwrap(),
"Some(10)"
);
assert_eq!(
eval("let x = List.get([10, 20, 30], 2)").unwrap(),
"Some(30)"
);
assert_eq!(eval("let x = List.get([10, 20, 30], 5)").unwrap(), "None");
assert_eq!(eval("let x = List.get([10, 20, 30], -1)").unwrap(), "None");
}
#[test]
fn test_list_map() {
let source = r#"
fn double(x: Int): Int = x * 2
let result = List.map([1, 2, 3], double)
"#;
assert_eq!(eval(source).unwrap(), "[2, 4, 6]");
}
#[test]
fn test_list_map_lambda() {
let source = "let x = List.map([1, 2, 3], fn(x: Int): Int => x * x)";
assert_eq!(eval(source).unwrap(), "[1, 4, 9]");
}
#[test]
fn test_list_filter() {
let source = "let x = List.filter([1, 2, 3, 4, 5], fn(x: Int): Bool => x > 2)";
assert_eq!(eval(source).unwrap(), "[3, 4, 5]");
}
#[test]
fn test_list_filter_all() {
let source = "let x = List.filter([1, 2, 3], fn(x: Int): Bool => x > 10)";
assert_eq!(eval(source).unwrap(), "[]");
}
#[test]
fn test_list_fold() {
let source = "let x = List.fold([1, 2, 3, 4], 0, fn(acc: Int, x: Int): Int => acc + x)";
assert_eq!(eval(source).unwrap(), "10");
}
#[test]
fn test_list_fold_product() {
let source = "let x = List.fold([1, 2, 3, 4], 1, fn(acc: Int, x: Int): Int => acc * x)";
assert_eq!(eval(source).unwrap(), "24");
}
// String tests
#[test]
fn test_string_length() {
assert_eq!(eval(r#"let x = String.length("hello")"#).unwrap(), "5");
assert_eq!(eval(r#"let x = String.length("")"#).unwrap(), "0");
}
#[test]
fn test_string_split() {
assert_eq!(
eval(r#"let x = String.split("a,b,c", ",")"#).unwrap(),
r#"["a", "b", "c"]"#
);
assert_eq!(
eval(r#"let x = String.split("hello", ",")"#).unwrap(),
r#"["hello"]"#
);
}
#[test]
fn test_string_join() {
assert_eq!(
eval(r#"let x = String.join(["a", "b", "c"], "-")"#).unwrap(),
r#""a-b-c""#
);
assert_eq!(
eval(r#"let x = String.join(["hello"], ",")"#).unwrap(),
r#""hello""#
);
assert_eq!(eval(r#"let x = String.join([], ",")"#).unwrap(), r#""""#);
}
#[test]
fn test_string_trim() {
assert_eq!(
eval(r#"let x = String.trim(" hello ")"#).unwrap(),
r#""hello""#
);
assert_eq!(
eval(r#"let x = String.trim("hello")"#).unwrap(),
r#""hello""#
);
assert_eq!(eval(r#"let x = String.trim(" ")"#).unwrap(), r#""""#);
}
#[test]
fn test_string_contains() {
assert_eq!(
eval(r#"let x = String.contains("hello world", "world")"#).unwrap(),
"true"
);
assert_eq!(
eval(r#"let x = String.contains("hello", "xyz")"#).unwrap(),
"false"
);
assert_eq!(
eval(r#"let x = String.contains("hello", "")"#).unwrap(),
"true"
);
}
#[test]
fn test_string_replace() {
assert_eq!(
eval(r#"let x = String.replace("hello", "l", "L")"#).unwrap(),
r#""heLLo""#
);
assert_eq!(
eval(r#"let x = String.replace("aaa", "a", "b")"#).unwrap(),
r#""bbb""#
);
}
#[test]
fn test_string_chars() {
assert_eq!(eval(r#"let x = String.chars("hi")"#).unwrap(), "['h', 'i']");
assert_eq!(eval(r#"let x = String.chars("")"#).unwrap(), "[]");
}
#[test]
fn test_string_lines() {
// Note: Using actual newline in the string
let source = r#"let x = String.lines("a
b
c")"#;
assert_eq!(eval(source).unwrap(), r#"["a", "b", "c"]"#);
}
// Option tests
#[test]
fn test_option_constructors() {
assert_eq!(eval("let x = Some(42)").unwrap(), "Some(42)");
assert_eq!(eval("let x = None").unwrap(), "None");
}
#[test]
fn test_option_is_some() {
assert_eq!(eval("let x = Option.isSome(Some(42))").unwrap(), "true");
assert_eq!(eval("let x = Option.isSome(None)").unwrap(), "false");
}
#[test]
fn test_option_is_none() {
assert_eq!(eval("let x = Option.isNone(None)").unwrap(), "true");
assert_eq!(eval("let x = Option.isNone(Some(42))").unwrap(), "false");
}
#[test]
fn test_option_get_or_else() {
assert_eq!(eval("let x = Option.getOrElse(Some(42), 0)").unwrap(), "42");
assert_eq!(eval("let x = Option.getOrElse(None, 0)").unwrap(), "0");
}
#[test]
fn test_option_map() {
let source = "let x = Option.map(Some(5), fn(x: Int): Int => x * 2)";
assert_eq!(eval(source).unwrap(), "Some(10)");
}
#[test]
fn test_option_map_none() {
let source = "let x = Option.map(None, fn(x: Int): Int => x * 2)";
assert_eq!(eval(source).unwrap(), "None");
}
#[test]
fn test_option_flat_map() {
let source = "let x = Option.flatMap(Some(5), fn(x: Int): Option<Int> => Some(x * 2))";
assert_eq!(eval(source).unwrap(), "Some(10)");
}
#[test]
fn test_option_flat_map_to_none() {
let source = "let x = Option.flatMap(Some(5), fn(x: Int): Option<Int> => None)";
assert_eq!(eval(source).unwrap(), "None");
}
// Result tests
#[test]
fn test_result_constructors() {
assert_eq!(eval("let x = Ok(42)").unwrap(), "Ok(42)");
assert_eq!(eval(r#"let x = Err("error")"#).unwrap(), r#"Err("error")"#);
}
#[test]
fn test_result_is_ok() {
assert_eq!(eval("let x = Result.isOk(Ok(42))").unwrap(), "true");
assert_eq!(eval(r#"let x = Result.isOk(Err("e"))"#).unwrap(), "false");
}
#[test]
fn test_result_is_err() {
assert_eq!(eval(r#"let x = Result.isErr(Err("e"))"#).unwrap(), "true");
assert_eq!(eval("let x = Result.isErr(Ok(42))").unwrap(), "false");
}
#[test]
fn test_result_get_or_else() {
assert_eq!(eval("let x = Result.getOrElse(Ok(42), 0)").unwrap(), "42");
assert_eq!(
eval(r#"let x = Result.getOrElse(Err("e"), 0)"#).unwrap(),
"0"
);
}
#[test]
fn test_result_map() {
let source = "let x = Result.map(Ok(5), fn(x: Int): Int => x * 2)";
assert_eq!(eval(source).unwrap(), "Ok(10)");
}
#[test]
fn test_result_map_err() {
let source = r#"let x = Result.map(Err("e"), fn(x: Int): Int => x * 2)"#;
assert_eq!(eval(source).unwrap(), r#"Err("e")"#);
}
// Utility function tests
#[test]
fn test_to_string() {
assert_eq!(eval("let x = toString(42)").unwrap(), r#""42""#);
assert_eq!(eval("let x = toString(true)").unwrap(), r#""true""#);
assert_eq!(eval("let x = toString([1, 2])").unwrap(), r#""[1, 2]""#);
}
#[test]
fn test_type_of() {
assert_eq!(eval("let x = typeOf(42)").unwrap(), r#""Int""#);
assert_eq!(eval("let x = typeOf(true)").unwrap(), r#""Bool""#);
assert_eq!(eval("let x = typeOf([1, 2])").unwrap(), r#""List""#);
assert_eq!(eval(r#"let x = typeOf("hello")"#).unwrap(), r#""String""#);
}
// Pipe with stdlib tests
#[test]
fn test_pipe_with_list() {
assert_eq!(
eval("let x = [1, 2, 3] |> List.reverse").unwrap(),
"[3, 2, 1]"
);
assert_eq!(eval("let x = [1, 2, 3] |> List.length").unwrap(), "3");
}
#[test]
fn test_pipe_with_string() {
assert_eq!(
eval(r#"let x = " hello " |> String.trim"#).unwrap(),
r#""hello""#
);
}
// Combined stdlib usage tests
#[test]
fn test_list_filter_even() {
let source = r#"
fn isEven(x: Int): Bool = x % 2 == 0
let result = List.filter(List.range(1, 6), isEven)
"#;
assert_eq!(eval(source).unwrap(), "[2, 4]");
}
#[test]
fn test_option_chain() {
let source = r#"
fn times10(x: Int): Int = x * 10
let head = List.head([1, 2, 3])
let mapped = Option.map(head, times10)
let result = Option.getOrElse(mapped, 0)
"#;
assert_eq!(eval(source).unwrap(), "10");
}
#[test]
fn test_option_chain_empty() {
let source = r#"
fn times10(x: Int): Int = x * 10
let head = List.head([])
let mapped = Option.map(head, times10)
let result = Option.getOrElse(mapped, 0)
"#;
assert_eq!(eval(source).unwrap(), "0");
}
}

634
src/modules.rs Normal file
View File

@@ -0,0 +1,634 @@
//! Module system for the Lux language
//!
//! Handles loading, parsing, and resolving module imports.
use crate::ast::{Declaration, ImportDecl, Program, Visibility};
use crate::parser::Parser;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
/// Error during module loading
#[derive(Debug, Clone)]
pub struct ModuleError {
pub message: String,
pub module_path: String,
}
impl std::fmt::Display for ModuleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Module error in '{}': {}",
self.module_path, self.message
)
}
}
impl std::error::Error for ModuleError {}
/// A loaded and parsed module
#[derive(Debug, Clone)]
pub struct Module {
/// The module's canonical path (e.g., "std/list")
pub path: String,
/// The parsed program
pub program: Program,
/// Names exported by this module (public declarations)
pub exports: HashSet<String>,
}
impl Module {
/// Get all public declarations from this module
pub fn public_declarations(&self) -> Vec<&Declaration> {
self.program
.declarations
.iter()
.filter(|d| {
match d {
Declaration::Function(f) => f.visibility == Visibility::Public,
Declaration::Let(l) => l.visibility == Visibility::Public,
Declaration::Type(t) => t.visibility == Visibility::Public,
// Effects and handlers are always public for now
Declaration::Effect(_) | Declaration::Handler(_) => true,
}
})
.collect()
}
}
/// Module loader and resolver
pub struct ModuleLoader {
/// Base directories to search for modules
search_paths: Vec<PathBuf>,
/// Cache of loaded modules (path -> module)
cache: HashMap<String, Module>,
/// Modules currently being loaded (for circular dependency detection)
loading: HashSet<String>,
}
impl ModuleLoader {
pub fn new() -> Self {
Self {
search_paths: vec![PathBuf::from(".")],
cache: HashMap::new(),
loading: HashSet::new(),
}
}
/// Create a loader with custom search paths
pub fn with_paths(paths: Vec<PathBuf>) -> Self {
Self {
search_paths: paths,
cache: HashMap::new(),
loading: HashSet::new(),
}
}
/// Add a search path
pub fn add_search_path(&mut self, path: PathBuf) {
self.search_paths.push(path);
}
/// Resolve a module path to a file path
fn resolve_path(&self, module_path: &str) -> Option<PathBuf> {
// Convert module path (e.g., "std/list") to file path (e.g., "std/list.lux")
let relative_path = format!("{}.lux", module_path);
for search_path in &self.search_paths {
let full_path = search_path.join(&relative_path);
if full_path.exists() {
return Some(full_path);
}
}
None
}
/// Load a module by its import path
pub fn load_module(&mut self, module_path: &str) -> Result<&Module, ModuleError> {
// Check if already cached
if self.cache.contains_key(module_path) {
return Ok(self.cache.get(module_path).unwrap());
}
// Check for circular dependency
if self.loading.contains(module_path) {
return Err(ModuleError {
message: "Circular dependency detected".to_string(),
module_path: module_path.to_string(),
});
}
// Mark as loading
self.loading.insert(module_path.to_string());
// Resolve to file path
let file_path = self.resolve_path(module_path).ok_or_else(|| ModuleError {
message: format!("Module not found. Searched in: {:?}", self.search_paths),
module_path: module_path.to_string(),
})?;
// Load the module
let module = self.load_file(&file_path, module_path)?;
// Remove from loading set
self.loading.remove(module_path);
// Cache the module
self.cache.insert(module_path.to_string(), module);
Ok(self.cache.get(module_path).unwrap())
}
/// Load a module from a file path
fn load_file(&mut self, file_path: &Path, module_path: &str) -> Result<Module, ModuleError> {
// Read the file
let source = fs::read_to_string(file_path).map_err(|e| ModuleError {
message: format!("Failed to read file: {}", e),
module_path: module_path.to_string(),
})?;
// Parse the source
let program = Parser::parse_source(&source).map_err(|e| ModuleError {
message: format!("Parse error: {}", e),
module_path: module_path.to_string(),
})?;
// Load any imports this module has
for import in &program.imports {
let import_path = import.path.to_string();
self.load_module(&import_path)?;
}
// Collect exports
let exports = self.collect_exports(&program);
Ok(Module {
path: module_path.to_string(),
program,
exports,
})
}
/// Load a program from source (for REPL or direct execution)
pub fn load_source(
&mut self,
source: &str,
base_path: Option<&Path>,
) -> Result<Program, ModuleError> {
// Add base path to search paths if provided
if let Some(base) = base_path {
if let Some(parent) = base.parent() {
if !self.search_paths.contains(&parent.to_path_buf()) {
self.search_paths.push(parent.to_path_buf());
}
}
}
// Parse the source
let program = Parser::parse_source(source).map_err(|e| ModuleError {
message: format!("Parse error: {}", e),
module_path: "<main>".to_string(),
})?;
// Load any imports
for import in &program.imports {
let import_path = import.path.to_string();
self.load_module(&import_path)?;
}
Ok(program)
}
/// Collect exported names from a program
fn collect_exports(&self, program: &Program) -> HashSet<String> {
let mut exports = HashSet::new();
for decl in &program.declarations {
match decl {
Declaration::Function(f) if f.visibility == Visibility::Public => {
exports.insert(f.name.name.clone());
}
Declaration::Let(l) if l.visibility == Visibility::Public => {
exports.insert(l.name.name.clone());
}
Declaration::Type(t) if t.visibility == Visibility::Public => {
exports.insert(t.name.name.clone());
}
Declaration::Effect(e) => {
// Effects are always exported
exports.insert(e.name.name.clone());
}
Declaration::Handler(h) => {
// Handlers are always exported
exports.insert(h.name.name.clone());
}
_ => {}
}
}
exports
}
/// Get a cached module
pub fn get_module(&self, module_path: &str) -> Option<&Module> {
self.cache.get(module_path)
}
/// Get all loaded modules
pub fn loaded_modules(&self) -> impl Iterator<Item = (&String, &Module)> {
self.cache.iter()
}
/// Clear the module cache
pub fn clear_cache(&mut self) {
self.cache.clear();
}
/// Resolve imports for a program and return the names to be imported
pub fn resolve_imports(
&self,
imports: &[ImportDecl],
) -> Result<HashMap<String, ResolvedImport>, ModuleError> {
let mut resolved = HashMap::new();
for import in imports {
let module_path = import.path.to_string();
let module = self.get_module(&module_path).ok_or_else(|| ModuleError {
message: "Module not loaded".to_string(),
module_path: module_path.clone(),
})?;
let import_name = if let Some(ref alias) = import.alias {
// import foo/bar as Baz -> use "Baz" as the name
alias.name.clone()
} else {
// import foo/bar -> use "bar" as the name (last segment)
import
.path
.segments
.last()
.map(|s| s.name.clone())
.unwrap_or_else(|| module_path.clone())
};
if import.wildcard {
// import foo.* -> import all exports directly
for export in &module.exports {
resolved.insert(
export.clone(),
ResolvedImport {
module_path: module_path.clone(),
name: export.clone(),
kind: ImportKind::Direct,
},
);
}
} else if let Some(ref items) = import.items {
// import foo.{a, b, c} -> import specific items
for item in items {
if !module.exports.contains(&item.name) {
return Err(ModuleError {
message: format!("'{}' is not exported from module", item.name),
module_path: module_path.clone(),
});
}
resolved.insert(
item.name.clone(),
ResolvedImport {
module_path: module_path.clone(),
name: item.name.clone(),
kind: ImportKind::Direct,
},
);
}
} else {
// import foo/bar -> import as module object
resolved.insert(
import_name,
ResolvedImport {
module_path: module_path.clone(),
name: module_path.clone(),
kind: ImportKind::Module,
},
);
}
}
Ok(resolved)
}
}
impl Default for ModuleLoader {
fn default() -> Self {
Self::new()
}
}
/// A resolved import
#[derive(Debug, Clone)]
pub struct ResolvedImport {
/// The module path this import comes from
pub module_path: String,
/// The name being imported
pub name: String,
/// What kind of import this is
pub kind: ImportKind,
}
/// Kind of import
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ImportKind {
/// Import as a module object (import foo/bar)
Module,
/// Direct import of a name (import foo.{bar} or import foo.*)
Direct,
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::TempDir;
fn create_test_module(dir: &Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(format!("{}.lux", name));
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).unwrap();
}
let mut file = fs::File::create(&path).unwrap();
file.write_all(content.as_bytes()).unwrap();
path
}
#[test]
fn test_load_simple_module() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"math",
r#"
pub fn add(a: Int, b: Int): Int = a + b
pub fn sub(a: Int, b: Int): Int = a - b
fn private_fn(): Int = 42
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let module = loader.load_module("math").unwrap();
assert_eq!(module.path, "math");
assert!(module.exports.contains("add"));
assert!(module.exports.contains("sub"));
assert!(!module.exports.contains("private_fn"));
}
#[test]
fn test_load_nested_module() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"std/list",
r#"
pub fn length(list: List<Int>): Int = 0
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let module = loader.load_module("std/list").unwrap();
assert_eq!(module.path, "std/list");
assert!(module.exports.contains("length"));
}
#[test]
fn test_module_not_found() {
let dir = TempDir::new().unwrap();
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let result = loader.load_module("nonexistent");
assert!(result.is_err());
assert!(result.unwrap_err().message.contains("not found"));
}
#[test]
fn test_circular_dependency_detection() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"a",
r#"
import b
pub fn foo(): Int = 1
"#,
);
create_test_module(
dir.path(),
"b",
r#"
import a
pub fn bar(): Int = 2
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let result = loader.load_module("a");
assert!(result.is_err());
assert!(result.unwrap_err().message.contains("Circular"));
}
#[test]
fn test_module_caching() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"cached",
r#"
pub fn foo(): Int = 42
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
// Load twice
loader.load_module("cached").unwrap();
loader.load_module("cached").unwrap();
// Should only be in cache once
assert_eq!(loader.cache.len(), 1);
}
#[test]
fn test_end_to_end_module_import() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create a utility module with public functions
create_test_module(
dir.path(),
"utils",
r#"
pub fn double(x: Int): Int = x * 2
pub fn square(x: Int): Int = x * x
fn private_helper(): Int = 0
"#,
);
// Create a main program that imports and uses the module
let main_source = r#"
import utils
let result = utils.double(21)
"#;
// Set up module loader
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
// Load and parse the main program
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
// Type check with module support
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
// Run with module support
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
// Should evaluate to 42
assert_eq!(format!("{}", result), "42");
}
#[test]
fn test_selective_import() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create a module with multiple exports
create_test_module(
dir.path(),
"math",
r#"
pub fn add(a: Int, b: Int): Int = a + b
pub fn mul(a: Int, b: Int): Int = a * b
"#,
);
// Import only the add function
let main_source = r#"
import math.{add}
let result = add(10, 5)
"#;
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
assert_eq!(format!("{}", result), "15");
}
#[test]
fn test_module_with_alias() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create a nested module
create_test_module(
dir.path(),
"lib/helpers",
r#"
pub fn greet(): String = "hello"
"#,
);
// Import with alias
let main_source = r#"
import lib/helpers as h
let result = h.greet()
"#;
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
assert_eq!(format!("{}", result), "\"hello\"");
}
#[test]
fn test_transitive_imports() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create base module
create_test_module(
dir.path(),
"base",
r#"
pub fn value(): Int = 100
"#,
);
// Create mid module that imports base
create_test_module(
dir.path(),
"mid",
r#"
import base
pub fn doubled(): Int = base.value() * 2
"#,
);
// Create main that imports mid
let main_source = r#"
import mid
let result = mid.doubled()
"#;
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
assert_eq!(format!("{}", result), "200");
}
}

1935
src/parser.rs Normal file

File diff suppressed because it is too large Load Diff

330
src/schema.rs Normal file
View File

@@ -0,0 +1,330 @@
//! Schema Evolution for the Lux language
//!
//! Handles versioned types, compatibility checking, and migrations.
#![allow(dead_code)]
use crate::ast::{Migration, RecordField, TypeDecl, TypeDef};
use std::collections::HashMap;
/// Describes the compatibility between two versions of a type
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Compatibility {
/// Fully compatible - no changes needed
Compatible,
/// Compatible with auto-migration (e.g., adding optional field with default)
AutoMigrate(Vec<AutoMigration>),
/// Breaking change - requires explicit migration
Breaking(Vec<BreakingChange>),
}
/// An automatic migration step
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AutoMigration {
/// Add a field with a default value
AddFieldWithDefault { field_name: String, default: String },
/// Widen a numeric type (e.g., Int32 -> Int64)
WidenType {
field_name: String,
from: String,
to: String,
},
}
/// A breaking change that requires explicit migration
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BreakingChange {
/// Field was removed
FieldRemoved { field_name: String },
/// Field was renamed
FieldRenamed { old_name: String, new_name: String },
/// Field type changed incompatibly
FieldTypeChanged {
field_name: String,
old_type: String,
new_type: String,
},
/// Required field added without default
RequiredFieldAdded { field_name: String },
}
/// Registry of versioned types
#[derive(Debug, Default)]
pub struct SchemaRegistry {
/// Map from type name to versions: TypeName -> (Version -> TypeDef)
versions: HashMap<String, HashMap<u32, VersionedTypeDef>>,
}
/// A versioned type definition with its migrations
#[derive(Debug, Clone)]
pub struct VersionedTypeDef {
pub version: u32,
pub definition: TypeDef,
pub migrations: Vec<Migration>,
}
impl SchemaRegistry {
pub fn new() -> Self {
Self::default()
}
/// Register a versioned type
pub fn register(&mut self, name: &str, type_decl: &TypeDecl) {
let version = type_decl.version.map(|v| v.number).unwrap_or(1);
let versioned_def = VersionedTypeDef {
version,
definition: type_decl.definition.clone(),
migrations: type_decl.migrations.clone(),
};
self.versions
.entry(name.to_string())
.or_default()
.insert(version, versioned_def);
}
/// Get all versions of a type
pub fn get_versions(&self, name: &str) -> Option<&HashMap<u32, VersionedTypeDef>> {
self.versions.get(name)
}
/// Get a specific version of a type
pub fn get_version(&self, name: &str, version: u32) -> Option<&VersionedTypeDef> {
self.versions.get(name)?.get(&version)
}
/// Get the latest version number of a type
pub fn latest_version(&self, name: &str) -> Option<u32> {
self.versions.get(name)?.keys().max().copied()
}
/// Check compatibility between two versions
pub fn check_compatibility(
&self,
name: &str,
from_version: u32,
to_version: u32,
) -> Result<Compatibility, String> {
let from_def = self
.get_version(name, from_version)
.ok_or_else(|| format!("Version {} of type '{}' not found", from_version, name))?;
let to_def = self
.get_version(name, to_version)
.ok_or_else(|| format!("Version {} of type '{}' not found", to_version, name))?;
compare_type_defs(&from_def.definition, &to_def.definition)
}
/// Check if a migration exists for a version transition
pub fn has_migration(&self, name: &str, from_version: u32, to_version: u32) -> bool {
if let Some(to_def) = self.get_version(name, to_version) {
to_def
.migrations
.iter()
.any(|m| m.from_version.number == from_version)
} else {
false
}
}
/// Get the migration chain from one version to another
pub fn get_migration_chain(
&self,
_name: &str,
from_version: u32,
to_version: u32,
) -> Result<Vec<(u32, u32)>, String> {
if from_version >= to_version {
return Ok(vec![]);
}
// Simple chain: v1 -> v2 -> v3 -> ... -> vN
let mut chain = Vec::new();
for v in from_version..to_version {
chain.push((v, v + 1));
}
Ok(chain)
}
}
/// Compare two type definitions for compatibility
fn compare_type_defs(from: &TypeDef, to: &TypeDef) -> Result<Compatibility, String> {
match (from, to) {
(TypeDef::Record(from_fields), TypeDef::Record(to_fields)) => {
compare_record_fields(from_fields, to_fields)
}
(TypeDef::Enum(from_variants), TypeDef::Enum(to_variants)) => {
// For enums, adding variants is compatible, removing is breaking
let from_names: Vec<_> = from_variants.iter().map(|v| &v.name.name).collect();
let to_names: Vec<_> = to_variants.iter().map(|v| &v.name.name).collect();
let removed: Vec<_> = from_names
.iter()
.filter(|n| !to_names.contains(n))
.collect();
if removed.is_empty() {
Ok(Compatibility::Compatible)
} else {
Ok(Compatibility::Breaking(
removed
.iter()
.map(|n| BreakingChange::FieldRemoved {
field_name: n.to_string(),
})
.collect(),
))
}
}
(TypeDef::Alias(from_type), TypeDef::Alias(to_type)) => {
// Type aliases: check if the underlying types are compatible
if from_type == to_type {
Ok(Compatibility::Compatible)
} else {
Ok(Compatibility::Breaking(vec![
BreakingChange::FieldTypeChanged {
field_name: "<alias>".to_string(),
old_type: format!("{:?}", from_type),
new_type: format!("{:?}", to_type),
},
]))
}
}
_ => {
// Different type kinds are breaking
Ok(Compatibility::Breaking(vec![]))
}
}
}
/// Compare record fields for compatibility
fn compare_record_fields(
from: &[RecordField],
to: &[RecordField],
) -> Result<Compatibility, String> {
let from_map: HashMap<&str, &RecordField> =
from.iter().map(|f| (f.name.name.as_str(), f)).collect();
let to_map: HashMap<&str, &RecordField> =
to.iter().map(|f| (f.name.name.as_str(), f)).collect();
let mut auto_migrations = Vec::new();
let mut breaking_changes = Vec::new();
// Check for removed fields
for name in from_map.keys() {
if !to_map.contains_key(name) {
breaking_changes.push(BreakingChange::FieldRemoved {
field_name: name.to_string(),
});
}
}
// Check for added fields
for (name, field) in &to_map {
if !from_map.contains_key(name) {
// New field - check if it's optional or has a default
// For now, treat all new fields as potentially requiring migration
// In a full implementation, we'd check for Option types or default annotations
if is_optional_type(&field.typ) {
auto_migrations.push(AutoMigration::AddFieldWithDefault {
field_name: name.to_string(),
default: "None".to_string(),
});
} else {
breaking_changes.push(BreakingChange::RequiredFieldAdded {
field_name: name.to_string(),
});
}
}
}
// Check for type changes in existing fields
for (name, from_field) in &from_map {
if let Some(to_field) = to_map.get(name) {
if from_field.typ != to_field.typ {
// Types differ - check if it's a compatible widening
// For now, treat all type changes as breaking
breaking_changes.push(BreakingChange::FieldTypeChanged {
field_name: name.to_string(),
old_type: format!("{:?}", from_field.typ),
new_type: format!("{:?}", to_field.typ),
});
}
}
}
if !breaking_changes.is_empty() {
Ok(Compatibility::Breaking(breaking_changes))
} else if !auto_migrations.is_empty() {
Ok(Compatibility::AutoMigrate(auto_migrations))
} else {
Ok(Compatibility::Compatible)
}
}
/// Check if a type expression represents an optional type
fn is_optional_type(typ: &crate::ast::TypeExpr) -> bool {
match typ {
crate::ast::TypeExpr::Named(ident) => ident.name == "Option",
crate::ast::TypeExpr::App(base, _) => {
if let crate::ast::TypeExpr::Named(ident) = base.as_ref() {
ident.name == "Option"
} else {
false
}
}
_ => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::{Ident, Span, TypeExpr};
fn make_field(name: &str, typ: &str) -> RecordField {
RecordField {
name: Ident::new(name, Span::default()),
typ: TypeExpr::Named(Ident::new(typ, Span::default())),
span: Span::default(),
}
}
#[test]
fn test_compatible_same_fields() {
let from = vec![make_field("name", "String"), make_field("age", "Int")];
let to = vec![make_field("name", "String"), make_field("age", "Int")];
let result = compare_record_fields(&from, &to).unwrap();
assert_eq!(result, Compatibility::Compatible);
}
#[test]
fn test_breaking_field_removed() {
let from = vec![make_field("name", "String"), make_field("age", "Int")];
let to = vec![make_field("name", "String")];
let result = compare_record_fields(&from, &to).unwrap();
assert!(matches!(result, Compatibility::Breaking(_)));
}
#[test]
fn test_breaking_field_added_required() {
let from = vec![make_field("name", "String")];
let to = vec![make_field("name", "String"), make_field("age", "Int")];
let result = compare_record_fields(&from, &to).unwrap();
assert!(matches!(result, Compatibility::Breaking(_)));
}
#[test]
fn test_breaking_field_type_changed() {
let from = vec![make_field("name", "String")];
let to = vec![make_field("name", "Int")];
let result = compare_record_fields(&from, &to).unwrap();
assert!(matches!(result, Compatibility::Breaking(_)));
}
}

1228
src/typechecker.rs Normal file

File diff suppressed because it is too large Load Diff

1083
src/types.rs Normal file

File diff suppressed because it is too large Load Diff