refactor: interpreter and type system improvements

- Parser and typechecker updates for new features
- Schema evolution refinements
- Type system enhancements

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-02-15 03:54:23 -05:00
parent 634f665b1b
commit c6d7f5cffb
5 changed files with 400 additions and 64 deletions

View File

@@ -25,6 +25,7 @@ pub enum BuiltinFn {
ListLength,
ListGet,
ListRange,
ListForEach,
// String operations
StringSplit,
@@ -35,6 +36,8 @@ pub enum BuiltinFn {
StringLength,
StringChars,
StringLines,
StringParseInt,
StringParseFloat,
// Option operations
OptionMap,
@@ -842,6 +845,10 @@ impl Interpreter {
("all".to_string(), Value::Builtin(BuiltinFn::ListAll)),
("take".to_string(), Value::Builtin(BuiltinFn::ListTake)),
("drop".to_string(), Value::Builtin(BuiltinFn::ListDrop)),
(
"forEach".to_string(),
Value::Builtin(BuiltinFn::ListForEach),
),
]));
env.define("List", list_module);
@@ -888,6 +895,14 @@ impl Interpreter {
"fromChar".to_string(),
Value::Builtin(BuiltinFn::StringFromChar),
),
(
"parseInt".to_string(),
Value::Builtin(BuiltinFn::StringParseInt),
),
(
"parseFloat".to_string(),
Value::Builtin(BuiltinFn::StringParseFloat),
),
]));
env.define("String", string_module);
@@ -1138,6 +1153,20 @@ impl Interpreter {
self.global_env.define(&variant.name.name, constructor);
}
}
// Register migrations for versioned types
for migration in &type_decl.migrations {
let stored = StoredMigration {
body: migration.body.clone(),
env: self.global_env.clone(),
};
self.register_migration(
&type_decl.name.name,
migration.from_version.number,
stored,
);
}
Ok(Value::Unit)
}
@@ -1615,11 +1644,10 @@ impl Interpreter {
loop {
match result {
EvalResult::Value(v) => return Ok(v),
EvalResult::Effect(_) => {
return Err(RuntimeError {
message: "Effect in callback not supported".to_string(),
span: Some(span),
});
EvalResult::Effect(req) => {
// Handle the effect and continue
let handled = self.handle_effect(req)?;
return Ok(handled);
}
EvalResult::TailCall { func, args, span } => {
result = self.eval_call(func, args, span)?;
@@ -1853,6 +1881,36 @@ impl Interpreter {
Ok(EvalResult::Value(Value::List(lines)))
}
BuiltinFn::StringParseInt => {
let s = Self::expect_arg_1::<String>(&args, "String.parseInt", span)?;
let trimmed = s.trim();
match trimmed.parse::<i64>() {
Ok(n) => Ok(EvalResult::Value(Value::Constructor {
name: "Some".to_string(),
fields: vec![Value::Int(n)],
})),
Err(_) => Ok(EvalResult::Value(Value::Constructor {
name: "None".to_string(),
fields: vec![],
})),
}
}
BuiltinFn::StringParseFloat => {
let s = Self::expect_arg_1::<String>(&args, "String.parseFloat", span)?;
let trimmed = s.trim();
match trimmed.parse::<f64>() {
Ok(f) => Ok(EvalResult::Value(Value::Constructor {
name: "Some".to_string(),
fields: vec![Value::Float(f)],
})),
Err(_) => Ok(EvalResult::Value(Value::Constructor {
name: "None".to_string(),
fields: vec![],
})),
}
}
// Option operations
BuiltinFn::OptionMap => {
let (opt, func) = Self::expect_args_2::<Value, Value>(&args, "Option.map", span)?;
@@ -2105,27 +2163,9 @@ impl Interpreter {
Value::Int(n) => *n as u32,
_ => return Err(err("Schema.migrate: second argument must be an Int")),
};
match &args[0] {
Value::Versioned { type_name, version, value } => {
if *version == target {
// Same version, return as-is
Ok(EvalResult::Value(args[0].clone()))
} else if *version < target {
// Upgrade - for now just update version (no migration logic)
Ok(EvalResult::Value(Value::Versioned {
type_name: type_name.clone(),
version: target,
value: value.clone(),
}))
} else {
Err(err(&format!(
"Cannot downgrade from version {} to {}",
version, target
)))
}
}
_ => Err(err("Schema.migrate: first argument must be a Versioned value")),
}
// Use migrate_value which executes registered migrations
let migrated = self.migrate_value(args[0].clone(), target)?;
Ok(EvalResult::Value(migrated))
}
BuiltinFn::GetVersion => {
@@ -2327,6 +2367,18 @@ impl Interpreter {
Ok(EvalResult::Value(Value::List(result)))
}
BuiltinFn::ListForEach => {
// List.forEach(list, fn(item) => { effectful code })
// Unlike map, forEach doesn't collect results - it just runs effects
let (list, func) =
Self::expect_args_2::<Vec<Value>, Value>(&args, "List.forEach", span)?;
for item in list {
// Call the function for each item, ignoring the result
self.eval_call_to_value(func.clone(), vec![item], span)?;
}
Ok(EvalResult::Value(Value::Unit))
}
// Additional String operations
BuiltinFn::StringStartsWith => {
let (s, prefix) = Self::expect_args_2::<String, String>(&args, "String.startsWith", span)?;
@@ -3967,4 +4019,69 @@ mod tests {
_ => panic!("Expected Versioned value"),
}
}
#[test]
fn test_migration_from_type_declaration() {
use crate::parser::Parser;
// Test that migrations defined in type declarations are registered and executed
let source = r#"
type User @v2 {
name: String,
email: String,
from @v1 = { name: old.name, email: "default@example.com" }
}
// Create a v1 user using Schema.versioned
let v1_user = Schema.versioned("User", 1, { name: "Alice" })
// Migrate to v2 - should use the declared migration
let v2_user = Schema.migrate(v1_user, 2)
// Get the migrated value
let version = Schema.getVersion(v2_user)
"#;
let program = Parser::parse_source(source).expect("parse failed");
let mut interp = Interpreter::new();
let result = interp.run(&program);
assert!(result.is_ok(), "Interpreter failed: {:?}", result);
let result_value = result.unwrap();
// The last expression should be the version number
assert_eq!(format!("{}", result_value), "2");
}
#[test]
fn test_migration_chain() {
use crate::parser::Parser;
// Test that migrations chain correctly: v1 -> v2 -> v3
let source = r#"
type Config @v3 {
host: String,
port: Int,
secure: Bool,
from @v2 = { host: old.host, port: old.port, secure: true },
from @v1 = { host: old.host, port: 8080 }
}
// Create v1 config
let v1_config = Schema.versioned("Config", 1, { host: "localhost" })
// Migrate directly to v3 - should go v1 -> v2 -> v3
let v3_config = Schema.migrate(v1_config, 3)
let version = Schema.getVersion(v3_config)
"#;
let program = Parser::parse_source(source).expect("parse failed");
let mut interp = Interpreter::new();
let result = interp.run(&program);
assert!(result.is_ok(), "Interpreter failed: {:?}", result);
assert_eq!(format!("{}", result.unwrap()), "3");
}
}

View File

@@ -479,8 +479,11 @@ impl Parser {
self.skip_newlines();
if self.check(TokenKind::Pipe) {
// Enum type - requires leading | for variants
// Enum type - with leading | for variants
(TypeDef::Enum(self.parse_variants()?), Vec::new())
} else if self.is_single_line_enum() {
// Single-line enum without leading pipe: type Status = Ok | Error
(TypeDef::Enum(self.parse_single_line_variants()?), Vec::new())
} else {
// Type alias - any type expression
(TypeDef::Alias(self.parse_type()?), Vec::new())
@@ -1253,6 +1256,85 @@ impl Parser {
Ok(migrations)
}
/// Check if we're looking at a single-line enum (no leading pipe)
/// e.g., type Status = Ok | Error
fn is_single_line_enum(&self) -> bool {
// Look for pattern: Ident (LParen|LBrace)? Pipe
// We need to peek ahead without consuming tokens
if !matches!(&self.peek().kind, TokenKind::Ident(_)) {
return false;
}
// Check if there's a | somewhere on this logical line
// This is a simple heuristic - look at the next few tokens
let mut pos = self.pos;
let mut found_ident = false;
let mut depth = 0;
while pos < self.tokens.len() {
match &self.tokens[pos].kind {
TokenKind::Ident(_) if !found_ident => found_ident = true,
TokenKind::LParen | TokenKind::LBrace => depth += 1,
TokenKind::RParen | TokenKind::RBrace => {
if depth > 0 {
depth -= 1;
}
}
TokenKind::Pipe if depth == 0 => return true,
TokenKind::Newline | TokenKind::Eof => return false,
_ => {}
}
pos += 1;
}
false
}
/// Parse single-line variants without leading pipe
fn parse_single_line_variants(&mut self) -> Result<Vec<Variant>, ParseError> {
let mut variants = Vec::new();
loop {
let start = self.current_span();
let name = self.parse_ident()?;
let fields = if self.check(TokenKind::LParen) {
// Tuple variant
self.advance();
let mut types = Vec::new();
while !self.check(TokenKind::RParen) {
types.push(self.parse_type()?);
if !self.check(TokenKind::RParen) {
self.expect(TokenKind::Comma)?;
}
}
self.expect(TokenKind::RParen)?;
VariantFields::Tuple(types)
} else if self.check(TokenKind::LBrace) {
// Record variant
self.advance();
self.skip_newlines();
let fields = self.parse_record_fields()?;
self.expect(TokenKind::RBrace)?;
VariantFields::Record(fields)
} else {
VariantFields::Unit
};
let span = start.merge(self.previous_span());
variants.push(Variant { name, fields, span });
// Check for more variants
if self.check(TokenKind::Pipe) {
self.advance();
self.skip_newlines();
} else {
break;
}
}
Ok(variants)
}
fn parse_variants(&mut self) -> Result<Vec<Variant>, ParseError> {
let mut variants = Vec::new();
@@ -1660,9 +1742,19 @@ impl Parser {
let then_branch = Box::new(self.parse_expr()?);
self.skip_newlines();
self.expect(TokenKind::Else)?;
self.skip_newlines();
let else_branch = Box::new(self.parse_expr()?);
// Else is optional - if missing, synthesize a Unit value
let else_branch = if self.check(TokenKind::Else) {
self.expect(TokenKind::Else)?;
self.skip_newlines();
Box::new(self.parse_expr()?)
} else {
// No else clause - use Unit as the else branch
Box::new(Expr::Literal(Literal {
kind: LiteralKind::Unit,
span: then_branch.span(),
}))
};
let span = start.merge(else_branch.span());
Ok(Expr::If {

View File

@@ -199,6 +199,67 @@ fn compare_type_defs(from: &TypeDef, to: &TypeDef) -> Result<Compatibility, Stri
}
}
/// Compare two TypeExpr for semantic equality (ignoring spans)
fn types_equal(a: &crate::ast::TypeExpr, b: &crate::ast::TypeExpr) -> bool {
use crate::ast::TypeExpr;
match (a, b) {
(TypeExpr::Named(a_ident), TypeExpr::Named(b_ident)) => {
a_ident.name == b_ident.name
}
(TypeExpr::App(a_base, a_args), TypeExpr::App(b_base, b_args)) => {
types_equal(a_base, b_base)
&& a_args.len() == b_args.len()
&& a_args.iter().zip(b_args.iter()).all(|(a, b)| types_equal(a, b))
}
(
TypeExpr::Function {
params: a_params,
return_type: a_ret,
effects: a_effects,
},
TypeExpr::Function {
params: b_params,
return_type: b_ret,
effects: b_effects,
},
) => {
a_params.len() == b_params.len()
&& a_params.iter().zip(b_params.iter()).all(|(a, b)| types_equal(a, b))
&& types_equal(a_ret, b_ret)
&& a_effects.len() == b_effects.len()
&& a_effects.iter().zip(b_effects.iter()).all(|(a, b)| a.name == b.name)
}
(TypeExpr::Tuple(a_elems), TypeExpr::Tuple(b_elems)) => {
a_elems.len() == b_elems.len()
&& a_elems.iter().zip(b_elems.iter()).all(|(a, b)| types_equal(a, b))
}
(TypeExpr::Record(a_fields), TypeExpr::Record(b_fields)) => {
if a_fields.len() != b_fields.len() {
return false;
}
let a_map: HashMap<&str, &crate::ast::RecordField> =
a_fields.iter().map(|f| (f.name.name.as_str(), f)).collect();
for b_field in b_fields {
match a_map.get(b_field.name.name.as_str()) {
Some(a_field) => {
if !types_equal(&a_field.typ, &b_field.typ) {
return false;
}
}
None => return false,
}
}
true
}
(TypeExpr::Unit, TypeExpr::Unit) => true,
(TypeExpr::Versioned { base: a_base, .. }, TypeExpr::Versioned { base: b_base, .. }) => {
// For versioned types, just compare the base type names
types_equal(a_base, b_base)
}
_ => false,
}
}
/// Compare record fields for compatibility
fn compare_record_fields(
from: &[RecordField],
@@ -243,7 +304,7 @@ fn compare_record_fields(
// Check for type changes in existing fields
for (name, from_field) in &from_map {
if let Some(to_field) = to_map.get(name) {
if from_field.typ != to_field.typ {
if !types_equal(&from_field.typ, &to_field.typ) {
// Types differ - check if it's a compatible widening
// For now, treat all type changes as breaking
breaking_changes.push(BreakingChange::FieldTypeChanged {

View File

@@ -12,6 +12,7 @@ use crate::ast::{
use crate::diagnostics::{find_similar_names, format_did_you_mean, Diagnostic, Severity};
use crate::exhaustiveness::{check_exhaustiveness, missing_patterns_hint};
use crate::modules::ModuleLoader;
use crate::schema::{SchemaRegistry, Compatibility, BreakingChange};
use crate::types::{
self, unify, EffectDef, EffectOpDef, EffectSet, HandlerDef, Property, PropertySet,
TraitBoundDef, TraitDef, TraitImpl, TraitMethodDef, Type, TypeEnv, TypeScheme, VariantDef,
@@ -480,6 +481,7 @@ fn check_termination(func: &FunctionDecl) -> Result<(), String> {
#[derive(Debug, Clone)]
pub struct ParamPropertyConstraint {
pub param_name: String,
pub param_index: usize,
pub required_properties: PropertySet,
}
@@ -502,6 +504,8 @@ pub struct TypeChecker {
latest_versions: HashMap<String, u32>,
/// Migrations: type_name -> source_version -> migration_body
migrations: HashMap<String, HashMap<u32, Expr>>,
/// Schema registry for compatibility checking
schema_registry: SchemaRegistry,
}
impl TypeChecker {
@@ -517,6 +521,7 @@ impl TypeChecker {
versioned_types: HashMap::new(),
latest_versions: HashMap::new(),
migrations: HashMap::new(),
schema_registry: SchemaRegistry::new(),
}
}
@@ -826,6 +831,57 @@ impl TypeChecker {
.or_default()
.insert(migration.from_version.number, migration.body.clone());
}
// Register in schema registry and check compatibility
self.schema_registry.register(&type_name, type_decl);
// Check compatibility with previous version if this isn't v1
if version_num > 1 {
let prev_version = version_num - 1;
// Check if both versions are registered
if self.schema_registry.get_version(&type_name, prev_version).is_some() {
match self.schema_registry.check_compatibility(&type_name, prev_version, version_num) {
Ok(Compatibility::Breaking(changes)) => {
// Check if a migration exists for the breaking changes
let has_migration = self.schema_registry.has_migration(&type_name, prev_version, version_num);
if !has_migration {
// No migration for breaking changes - this is a warning
let change_descriptions: Vec<String> = changes.iter().map(|c| {
match c {
BreakingChange::FieldRemoved { field_name } =>
format!("field '{}' removed", field_name),
BreakingChange::FieldRenamed { old_name, new_name } =>
format!("field '{}' renamed to '{}'", old_name, new_name),
BreakingChange::FieldTypeChanged { field_name, old_type, new_type } =>
format!("field '{}' type changed from {} to {}", field_name, old_type, new_type),
BreakingChange::RequiredFieldAdded { field_name } =>
format!("required field '{}' added", field_name),
}
}).collect();
self.errors.push(TypeError {
message: format!(
"Breaking changes in {} @v{} without migration from @v{}: {}. \
Add 'from @v{} = {{ ... }}' to provide a migration.",
type_name, version_num, prev_version,
change_descriptions.join(", "),
prev_version
),
span: type_decl.name.span,
});
}
}
Ok(Compatibility::AutoMigrate(_)) | Ok(Compatibility::Compatible) => {
// No issues - compatible or auto-migratable
}
Err(_) => {
// Previous version not registered yet - that's fine
}
}
}
}
}
// Register ADT constructors as values with polymorphic types
@@ -1128,7 +1184,13 @@ impl TypeChecker {
}
});
if let Some(param) = param_with_type {
if let Some((param_index, param)) = func.params.iter().enumerate().find(|(_, p)| {
if let TypeExpr::Named(name) = &p.typ {
name.name == type_param.name
} else {
false
}
}) {
// Record the constraint for checking at call sites
let constraints = self
.property_constraints
@@ -1143,6 +1205,7 @@ impl TypeChecker {
props.insert(Property::from(*property));
constraints.push(ParamPropertyConstraint {
param_name: param.name.name.clone(),
param_index,
required_properties: props,
});
}
@@ -1557,39 +1620,24 @@ impl TypeChecker {
// Check property constraints from where clauses
if let Expr::Var(func_id) = func {
if let Some(constraints) = self.property_constraints.get(&func_id.name).cloned() {
// Get parameter names from the function declaration (if available in env)
// We'll match by position since we have the constraints by param name
if let Some(scheme) = self.env.lookup(&func_id.name) {
let func_typ = scheme.instantiate();
if let Type::Function { params: param_types, .. } = &func_typ {
for constraint in &constraints {
// Find which argument position corresponds to this param
// For now, match by position based on stored param names
for (i, arg) in args.iter().enumerate() {
// Get the properties of the argument
let arg_props = self.get_expr_properties(arg);
for constraint in &constraints {
// Check if the argument at the constrained position satisfies the constraint
if constraint.param_index < args.len() {
let arg = &args[constraint.param_index];
let arg_props = self.get_expr_properties(arg);
// Check if this argument corresponds to a constrained param
// We check all constraints and verify the arg satisfies them
if !arg_props.satisfies(&constraint.required_properties) {
// Only report if this argument could be the constrained one
// (simple heuristic: function type argument)
if i < param_types.len() {
if let Type::Function { .. } = &param_types[i] {
self.errors.push(TypeError {
message: format!(
"Argument to '{}' does not satisfy property constraint: \
expected {:?}, but argument has {:?}",
func_id.name,
constraint.required_properties,
arg_props
),
span: arg.span(),
});
}
}
}
}
if !arg_props.satisfies(&constraint.required_properties) {
self.errors.push(TypeError {
message: format!(
"Argument '{}' to '{}' does not satisfy property constraint: \
expected {:?}, but argument has {:?}",
constraint.param_name,
func_id.name,
constraint.required_properties,
arg_props
),
span: arg.span(),
});
}
}
}

View File

@@ -1343,6 +1343,16 @@ impl TypeEnv {
Type::List(Box::new(Type::var())),
),
),
(
"forEach".to_string(),
Type::function(
vec![
Type::List(Box::new(Type::var())),
Type::function(vec![Type::var()], Type::Unit),
],
Type::Unit,
),
),
]);
env.bind("List", TypeScheme::mono(list_module_type));
@@ -1410,6 +1420,14 @@ impl TypeEnv {
"fromChar".to_string(),
Type::function(vec![Type::Char], Type::String),
),
(
"parseInt".to_string(),
Type::function(vec![Type::String], Type::Option(Box::new(Type::Int))),
),
(
"parseFloat".to_string(),
Type::function(vec![Type::String], Type::Option(Box::new(Type::Float))),
),
]);
env.bind("String", TypeScheme::mono(string_module_type));