feat: rebuild website with full learning funnel

Website rebuilt from scratch based on analysis of 11 beloved language
websites (Elm, Zig, Gleam, Swift, Kotlin, Haskell, OCaml, Crystal, Roc,
Rust, Go).

New website structure:
- Homepage with hero, playground, three pillars, install guide
- Language Tour with interactive lessons (hello world, types, effects)
- Examples cookbook with categorized sidebar
- API documentation index
- Installation guide (Nix and source)
- Sleek/noble design (black/gold, serif typography)

Also includes:
- New stdlib/json.lux module for JSON serialization
- Enhanced stdlib/http.lux with middleware and routing
- New string functions (charAt, indexOf, lastIndexOf, repeat)
- LSP improvements (rename, signature help, formatting)
- Package manager transitive dependency resolution
- Updated documentation for effects and stdlib
- New showcase example (task_manager.lux)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-02-16 23:05:35 -05:00
parent 5a853702d1
commit 7e76acab18
44 changed files with 12468 additions and 3354 deletions

View File

@@ -80,6 +80,16 @@ impl std::fmt::Display for CGenError {
impl std::error::Error for CGenError {}
/// Behavioral properties for a function
#[derive(Debug, Clone, Default)]
struct FunctionBehavior {
is_pure: bool,
is_total: bool,
is_idempotent: bool,
is_deterministic: bool,
is_commutative: bool,
}
/// The C backend code generator
pub struct CBackend {
/// Generated C code
@@ -125,6 +135,10 @@ pub struct CBackend {
adt_with_pointers: HashSet<String>,
/// Variable types for type inference (variable name -> C type)
var_types: HashMap<String, String>,
/// Behavioral properties for functions (for optimization)
function_behaviors: HashMap<String, FunctionBehavior>,
/// Whether to enable behavioral type optimizations
enable_behavioral_optimizations: bool,
}
impl CBackend {
@@ -151,6 +165,28 @@ impl CBackend {
next_adt_tag: 100, // ADT tags start at 100
adt_with_pointers: HashSet::new(),
var_types: HashMap::new(),
function_behaviors: HashMap::new(),
enable_behavioral_optimizations: true,
}
}
/// Collect behavioral properties from function declaration
fn collect_behavioral_properties(&mut self, f: &FunctionDecl) {
let mut behavior = FunctionBehavior::default();
for prop in &f.properties {
match prop {
BehavioralProperty::Pure => behavior.is_pure = true,
BehavioralProperty::Total => behavior.is_total = true,
BehavioralProperty::Idempotent => behavior.is_idempotent = true,
BehavioralProperty::Deterministic => behavior.is_deterministic = true,
BehavioralProperty::Commutative => behavior.is_commutative = true,
}
}
if behavior.is_pure || behavior.is_total || behavior.is_idempotent
|| behavior.is_deterministic || behavior.is_commutative {
self.function_behaviors.insert(f.name.name.clone(), behavior);
}
}
@@ -182,6 +218,8 @@ impl CBackend {
if !f.effects.is_empty() {
self.effectful_functions.insert(f.name.name.clone());
}
// Collect behavioral properties for optimization
self.collect_behavioral_properties(f);
}
Declaration::Type(t) => {
self.collect_type(t)?;
@@ -587,6 +625,21 @@ impl CBackend {
self.writeln(" return strcmp(a, b) == 0;");
self.writeln("}");
self.writeln("");
self.writeln("// Alias for memoization key comparison");
self.writeln("static inline LuxBool lux_string_equals(LuxString a, LuxString b) {");
self.writeln(" return strcmp(a, b) == 0;");
self.writeln("}");
self.writeln("");
self.writeln("// String hash for memoization (djb2 algorithm)");
self.writeln("static inline size_t lux_string_hash(LuxString s) {");
self.writeln(" size_t hash = 5381;");
self.writeln(" unsigned char c;");
self.writeln(" while ((c = (unsigned char)*s++)) {");
self.writeln(" hash = ((hash << 5) + hash) + c;");
self.writeln(" }");
self.writeln(" return hash;");
self.writeln("}");
self.writeln("");
self.writeln("static LuxBool lux_string_contains(LuxString haystack, LuxString needle) {");
self.writeln(" return strstr(haystack, needle) != NULL;");
self.writeln("}");
@@ -2062,12 +2115,51 @@ impl CBackend {
format!("LuxEvidence* ev, {}", params)
}
} else {
params
params.clone()
};
// Check for behavioral optimizations
let behavior = self.function_behaviors.get(&func.name.name).cloned();
let use_memoization = self.enable_behavioral_optimizations
&& behavior.as_ref().map_or(false, |b| b.is_pure)
&& !func.params.is_empty()
&& ret_type != "void"
&& ret_type != "LuxUnit";
let use_idempotent = self.enable_behavioral_optimizations
&& behavior.as_ref().map_or(false, |b| b.is_idempotent && !b.is_pure)
&& ret_type != "void"
&& ret_type != "LuxUnit";
let use_commutative = self.enable_behavioral_optimizations
&& behavior.as_ref().map_or(false, |b| b.is_commutative)
&& func.params.len() == 2;
let is_deterministic = behavior.as_ref().map_or(false, |b| b.is_deterministic);
self.writeln(&format!("{} {}({}) {{", ret_type, mangled, full_params));
self.indent += 1;
// Emit deterministic attribute hint
if is_deterministic {
self.emit_deterministic_attribute(&func.name.name);
}
// Emit commutative optimization (normalize argument order for better CSE)
if use_commutative {
self.emit_commutative_optimization(&func.name.name, &func.params)?;
}
// Emit memoization check for pure functions
if use_memoization {
self.emit_memoization_lookup(&func.name.name, &func.params, &ret_type)?;
}
// Emit idempotent check (for non-pure idempotent functions)
if use_idempotent {
self.emit_idempotent_check(&func.name.name, &func.params, &ret_type)?;
}
// Set evidence availability for expression generation
let prev_has_evidence = self.has_evidence;
if is_effectful {
@@ -2123,6 +2215,12 @@ impl CBackend {
if let Some(ref var_name) = skip_var {
// Result is a local variable or RC temp - skip decref'ing it and just return
self.pop_rc_scope_except(Some(var_name));
if use_memoization {
self.emit_memoization_store(&func.name.name, &func.params, &result)?;
}
if use_idempotent {
self.emit_idempotent_store(&func.name.name, &func.params, &result)?;
}
self.writeln(&format!("return {};", result));
} else if is_rc_result && has_rc_locals {
// Result is from a call or complex expression - use incref/decref pattern
@@ -2130,10 +2228,22 @@ impl CBackend {
self.writeln("lux_incref(_result);");
self.pop_rc_scope(); // Emit decrefs for all local RC vars
self.writeln("lux_decref(_result); // Balance the incref");
if use_memoization {
self.emit_memoization_store(&func.name.name, &func.params, "_result")?;
}
if use_idempotent {
self.emit_idempotent_store(&func.name.name, &func.params, "_result")?;
}
self.writeln("return _result;");
} else {
// No RC locals or non-RC result - simple cleanup
self.pop_rc_scope();
if use_memoization {
self.emit_memoization_store(&func.name.name, &func.params, &result)?;
}
if use_idempotent {
self.emit_idempotent_store(&func.name.name, &func.params, &result)?;
}
self.writeln(&format!("return {};", result));
}
} else {
@@ -4637,6 +4747,196 @@ impl CBackend {
}
}
/// Emit memoization lookup code for pure functions
///
/// For pure functions, we generate a static memo table that caches results.
/// This is a simple linear cache for now - a hash table would be more efficient
/// for functions called with many different arguments.
fn emit_memoization_lookup(&mut self, func_name: &str, params: &[Parameter], ret_type: &str) -> Result<(), CGenError> {
let mangled = self.mangle_name(func_name);
let memo_size = 64; // Fixed size memo table
// Generate a hash expression from parameters
let hash_expr = if params.len() == 1 {
let p = &params[0];
let c_type = self.type_expr_to_c(&p.typ)?;
match c_type.as_str() {
"LuxInt" => format!("((size_t){} & {})", self.escape_c_keyword(&p.name.name), memo_size - 1),
"LuxString" => format!("(lux_string_hash({}) & {})", self.escape_c_keyword(&p.name.name), memo_size - 1),
"LuxBool" => format!("((size_t){} & {})", self.escape_c_keyword(&p.name.name), memo_size - 1),
_ => format!("((size_t)(uintptr_t){} & {})", self.escape_c_keyword(&p.name.name), memo_size - 1),
}
} else {
// For multiple params, combine hashes
let mut parts = Vec::new();
for (i, p) in params.iter().enumerate() {
let c_type = self.type_expr_to_c(&p.typ)?;
let hash = match c_type.as_str() {
"LuxInt" => format!("(size_t){}", self.escape_c_keyword(&p.name.name)),
"LuxString" => format!("lux_string_hash({})", self.escape_c_keyword(&p.name.name)),
"LuxBool" => format!("(size_t){}", self.escape_c_keyword(&p.name.name)),
_ => format!("(size_t)(uintptr_t){}", self.escape_c_keyword(&p.name.name)),
};
// Mix with prime numbers for better distribution
let prime = [31, 37, 41, 43, 47, 53, 59, 61][i % 8];
parts.push(format!("({} * {})", hash, prime));
}
format!("(({}) & {})", parts.join(" ^ "), memo_size - 1)
};
// Emit static memo table
self.writeln(&format!("// Memoization for pure function {}", func_name));
self.writeln(&format!("static struct {{ bool valid; {} result; {} key; }} _memo_{}[{}];",
ret_type, self.generate_key_type(params)?, mangled, memo_size));
self.writeln(&format!("size_t _memo_idx = {};", hash_expr));
// Check if cached
self.writeln(&format!("if (_memo_{}[_memo_idx].valid && {}) {{",
mangled, self.generate_key_compare(params, &format!("_memo_{}[_memo_idx]", mangled))?));
self.indent += 1;
self.writeln(&format!("return _memo_{}[_memo_idx].result;", mangled));
self.indent -= 1;
self.writeln("}");
Ok(())
}
/// Emit memoization store code for pure functions
fn emit_memoization_store(&mut self, func_name: &str, params: &[Parameter], result_expr: &str) -> Result<(), CGenError> {
let mangled = self.mangle_name(func_name);
// Store result in memo table
self.writeln(&format!("_memo_{}[_memo_idx].valid = true;", mangled));
self.writeln(&format!("_memo_{}[_memo_idx].result = {};", mangled, result_expr));
for p in params {
let name = self.escape_c_keyword(&p.name.name);
self.writeln(&format!("_memo_{}[_memo_idx].key_{} = {};", mangled, name, name));
}
Ok(())
}
/// Generate the key type for memoization (stores all parameter values)
fn generate_key_type(&self, params: &[Parameter]) -> Result<String, CGenError> {
let mut fields = Vec::new();
for p in params {
let c_type = self.type_expr_to_c(&p.typ)?;
let name = self.escape_c_keyword(&p.name.name);
fields.push(format!("{} key_{}", c_type, name));
}
Ok(fields.join("; "))
}
/// Generate key comparison expression for memoization lookup
fn generate_key_compare(&self, params: &[Parameter], memo_entry: &str) -> Result<String, CGenError> {
let mut comparisons = Vec::new();
for p in params {
let c_type = self.type_expr_to_c(&p.typ)?;
let name = self.escape_c_keyword(&p.name.name);
let cmp = match c_type.as_str() {
"LuxString" => format!("lux_string_equals({}.key_{}, {})", memo_entry, name, name),
_ => format!("{}.key_{} == {}", memo_entry, name, name),
};
comparisons.push(cmp);
}
Ok(comparisons.join(" && "))
}
/// Emit idempotent function optimization
///
/// For idempotent functions (f(f(x)) = f(x)), we track if the function
/// has already been called with the same arguments to avoid redundant computation.
/// This is useful for initialization functions, setters with same value, etc.
fn emit_idempotent_check(&mut self, func_name: &str, params: &[Parameter], ret_type: &str) -> Result<(), CGenError> {
let mangled = self.mangle_name(func_name);
self.writeln(&format!("// Idempotent optimization for {}", func_name));
self.writeln(&format!("static bool _idem_{}_called = false;", mangled));
self.writeln(&format!("static {} _idem_{}_result;", ret_type, mangled));
// For idempotent functions with no params, just return the cached result
if params.is_empty() {
self.writeln(&format!("if (_idem_{}_called) {{ return _idem_{}_result; }}", mangled, mangled));
} else {
// For params, we still use memoization-like caching
self.writeln(&format!("static {} _idem_{}_key;", self.generate_key_type(params)?, mangled));
let key_compare = self.generate_key_compare_inline(params, &format!("_idem_{}_key", mangled))?;
self.writeln(&format!("if (_idem_{}_called && {}) {{ return _idem_{}_result; }}", mangled, key_compare, mangled));
}
Ok(())
}
/// Emit idempotent function store
fn emit_idempotent_store(&mut self, func_name: &str, params: &[Parameter], result_expr: &str) -> Result<(), CGenError> {
let mangled = self.mangle_name(func_name);
self.writeln(&format!("_idem_{}_called = true;", mangled));
self.writeln(&format!("_idem_{}_result = {};", mangled, result_expr));
for p in params {
let name = self.escape_c_keyword(&p.name.name);
self.writeln(&format!("_idem_{}_key_{} = {};", mangled, name, name));
}
Ok(())
}
/// Generate key comparison expression inline (for idempotent check)
fn generate_key_compare_inline(&self, params: &[Parameter], prefix: &str) -> Result<String, CGenError> {
let mut comparisons = Vec::new();
for p in params {
let c_type = self.type_expr_to_c(&p.typ)?;
let name = self.escape_c_keyword(&p.name.name);
let cmp = match c_type.as_str() {
"LuxString" => format!("lux_string_equals({}.{}, {})", prefix, name, name),
_ => format!("{}_{} == {}", prefix, name, name),
};
comparisons.push(cmp);
}
Ok(comparisons.join(" && "))
}
/// Emit deterministic function hint as a comment
///
/// Deterministic functions always produce the same output for the same inputs.
/// This hint helps C compilers (GCC/Clang) with optimization.
fn emit_deterministic_attribute(&mut self, func_name: &str) {
// GCC and Clang support __attribute__((const)) for pure functions without side effects
// that only depend on their arguments (not even global state)
self.writeln(&format!("// OPTIMIZATION: {} is deterministic - output depends only on inputs", func_name));
}
/// Emit commutative function hint
///
/// For commutative functions (f(a, b) = f(b, a)), we can normalize argument order
/// to improve common subexpression elimination (CSE).
fn emit_commutative_optimization(&mut self, func_name: &str, params: &[Parameter]) -> Result<(), CGenError> {
if params.len() != 2 {
return Ok(()); // Commutativity only makes sense for binary functions
}
let p1 = &params[0];
let p2 = &params[1];
let c_type1 = self.type_expr_to_c(&p1.typ)?;
let c_type2 = self.type_expr_to_c(&p2.typ)?;
// Only do swapping for comparable types
if c_type1 == c_type2 && matches!(c_type1.as_str(), "LuxInt" | "LuxFloat") {
let name1 = self.escape_c_keyword(&p1.name.name);
let name2 = self.escape_c_keyword(&p2.name.name);
self.writeln(&format!("// Commutative optimization for {}: normalize argument order", func_name));
self.writeln(&format!("if ({} > {}) {{", name1, name2));
self.indent += 1;
self.writeln(&format!("{} _swap_tmp = {}; {} = {}; {} = _swap_tmp;",
c_type1, name1, name1, name2, name2));
self.indent -= 1;
self.writeln("}");
}
Ok(())
}
fn writeln(&mut self, line: &str) {
let indent = " ".repeat(self.indent);
writeln!(self.output, "{}{}", indent, line).unwrap();
@@ -4746,4 +5046,33 @@ mod tests {
assert!(c_code.contains("->fn_ptr"));
assert!(c_code.contains("->env"));
}
#[test]
fn test_pure_function_memoization() {
let source = r#"
fn fib(n: Int): Int is pure =
if n <= 1 then n else fib(n - 1) + fib(n - 2)
"#;
let c_code = generate(source).unwrap();
// Pure function should have memoization infrastructure
assert!(c_code.contains("// Memoization for pure function fib"));
assert!(c_code.contains("_memo_fib_lux"));
assert!(c_code.contains("_memo_idx"));
// Should check cache before computation
assert!(c_code.contains(".valid &&"));
// Should store result in cache
assert!(c_code.contains(".valid = true"));
assert!(c_code.contains(".result ="));
}
#[test]
fn test_non_pure_function_no_memoization() {
let source = r#"
fn add(a: Int, b: Int): Int = a + b
"#;
let c_code = generate(source).unwrap();
// Non-pure function should NOT have memoization
assert!(!c_code.contains("// Memoization for pure function add"));
assert!(!c_code.contains("_memo_add_lux"));
}
}

View File

@@ -90,6 +90,10 @@ pub enum BuiltinFn {
StringToLower,
StringSubstring,
StringFromChar,
StringCharAt,
StringIndexOf,
StringLastIndexOf,
StringRepeat,
// JSON operations
JsonParse,
@@ -620,6 +624,14 @@ pub struct Interpreter {
pg_connections: RefCell<HashMap<i64, PgClient>>,
/// Next PostgreSQL connection ID
next_pg_conn_id: RefCell<i64>,
/// Concurrent tasks: task_id -> (thunk_value, result_option, is_cancelled)
concurrent_tasks: RefCell<HashMap<i64, (Value, Option<Value>, bool)>>,
/// Next task ID
next_task_id: RefCell<i64>,
/// Channels: channel_id -> (queue, is_closed)
channels: RefCell<HashMap<i64, (Vec<Value>, bool)>>,
/// Next channel ID
next_channel_id: RefCell<i64>,
}
/// Results from running tests
@@ -664,6 +676,10 @@ impl Interpreter {
next_sql_conn_id: RefCell::new(1),
pg_connections: RefCell::new(HashMap::new()),
next_pg_conn_id: RefCell::new(1),
concurrent_tasks: RefCell::new(HashMap::new()),
next_task_id: RefCell::new(1),
channels: RefCell::new(HashMap::new()),
next_channel_id: RefCell::new(1),
}
}
@@ -966,6 +982,22 @@ impl Interpreter {
"parseFloat".to_string(),
Value::Builtin(BuiltinFn::StringParseFloat),
),
(
"charAt".to_string(),
Value::Builtin(BuiltinFn::StringCharAt),
),
(
"indexOf".to_string(),
Value::Builtin(BuiltinFn::StringIndexOf),
),
(
"lastIndexOf".to_string(),
Value::Builtin(BuiltinFn::StringLastIndexOf),
),
(
"repeat".to_string(),
Value::Builtin(BuiltinFn::StringRepeat),
),
]));
env.define("String", string_module);
@@ -2498,6 +2530,89 @@ impl Interpreter {
Ok(EvalResult::Value(Value::String(c.to_string())))
}
BuiltinFn::StringCharAt => {
if args.len() != 2 {
return Err(err("String.charAt requires 2 arguments: string, index"));
}
let s = match &args[0] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("String.charAt expects String, got {}", v.type_name()))),
};
let idx = match &args[1] {
Value::Int(n) => *n as usize,
v => return Err(err(&format!("String.charAt expects Int for index, got {}", v.type_name()))),
};
let chars: Vec<char> = s.chars().collect();
if idx < chars.len() {
Ok(EvalResult::Value(Value::String(chars[idx].to_string())))
} else {
Ok(EvalResult::Value(Value::String(String::new())))
}
}
BuiltinFn::StringIndexOf => {
if args.len() != 2 {
return Err(err("String.indexOf requires 2 arguments: string, substring"));
}
let s = match &args[0] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("String.indexOf expects String, got {}", v.type_name()))),
};
let sub = match &args[1] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("String.indexOf expects String for substring, got {}", v.type_name()))),
};
match s.find(&sub) {
Some(idx) => Ok(EvalResult::Value(Value::Constructor {
name: "Some".to_string(),
fields: vec![Value::Int(idx as i64)],
})),
None => Ok(EvalResult::Value(Value::Constructor {
name: "None".to_string(),
fields: vec![],
})),
}
}
BuiltinFn::StringLastIndexOf => {
if args.len() != 2 {
return Err(err("String.lastIndexOf requires 2 arguments: string, substring"));
}
let s = match &args[0] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("String.lastIndexOf expects String, got {}", v.type_name()))),
};
let sub = match &args[1] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("String.lastIndexOf expects String for substring, got {}", v.type_name()))),
};
match s.rfind(&sub) {
Some(idx) => Ok(EvalResult::Value(Value::Constructor {
name: "Some".to_string(),
fields: vec![Value::Int(idx as i64)],
})),
None => Ok(EvalResult::Value(Value::Constructor {
name: "None".to_string(),
fields: vec![],
})),
}
}
BuiltinFn::StringRepeat => {
if args.len() != 2 {
return Err(err("String.repeat requires 2 arguments: string, count"));
}
let s = match &args[0] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("String.repeat expects String, got {}", v.type_name()))),
};
let count = match &args[1] {
Value::Int(n) => (*n).max(0) as usize,
v => return Err(err(&format!("String.repeat expects Int for count, got {}", v.type_name()))),
};
Ok(EvalResult::Value(Value::String(s.repeat(count))))
}
// JSON operations
BuiltinFn::JsonParse => {
let s = Self::expect_arg_1::<String>(&args, "Json.parse", span)?;
@@ -4369,6 +4484,237 @@ impl Interpreter {
}
}
// ===== Concurrent Effect =====
("Concurrent", "spawn") => {
// For now, spawn just stores the thunk - it will be evaluated on await
// In a real implementation, this would start a thread/fiber
let thunk = match request.args.first() {
Some(v) => v.clone(),
_ => return Err(RuntimeError {
message: "Concurrent.spawn requires a thunk argument".to_string(),
span: None,
}),
};
let task_id = *self.next_task_id.borrow();
*self.next_task_id.borrow_mut() += 1;
// Store task: (thunk, None for result, not cancelled)
self.concurrent_tasks.borrow_mut().insert(task_id, (thunk, None, false));
Ok(Value::Int(task_id))
}
("Concurrent", "await") => {
let task_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Concurrent.await requires a task ID".to_string(),
span: None,
}),
};
// Check if already computed or cancelled
let task_info = {
let tasks = self.concurrent_tasks.borrow();
tasks.get(&task_id).cloned()
};
match task_info {
Some((_, Some(result), _)) => Ok(result),
Some((_, _, true)) => Err(RuntimeError {
message: format!("Task {} was cancelled", task_id),
span: None,
}),
Some((thunk, None, false)) => {
// For cooperative concurrency, we just need to signal
// that we're waiting on this task
// Return the thunk to be evaluated by the caller
// This is a simplification - real async would use fibers
Ok(thunk)
}
None => Err(RuntimeError {
message: format!("Unknown task ID: {}", task_id),
span: None,
}),
}
}
("Concurrent", "yield") => {
// In cooperative concurrency, yield allows other tasks to run
// For now, this is a no-op in our single-threaded model
Ok(Value::Unit)
}
("Concurrent", "sleep") => {
// Non-blocking sleep (delegates to thread sleep for now)
use std::thread;
use std::time::Duration;
let ms = match request.args.first() {
Some(Value::Int(n)) => *n as u64,
_ => 0,
};
thread::sleep(Duration::from_millis(ms));
Ok(Value::Unit)
}
("Concurrent", "cancel") => {
let task_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Concurrent.cancel requires a task ID".to_string(),
span: None,
}),
};
let mut tasks = self.concurrent_tasks.borrow_mut();
if let Some((thunk, result, _)) = tasks.get(&task_id).cloned() {
tasks.insert(task_id, (thunk, result, true));
Ok(Value::Bool(true))
} else {
Ok(Value::Bool(false))
}
}
("Concurrent", "isRunning") => {
let task_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Concurrent.isRunning requires a task ID".to_string(),
span: None,
}),
};
let tasks = self.concurrent_tasks.borrow();
let is_running = match tasks.get(&task_id) {
Some((_, None, false)) => true, // Not completed and not cancelled
_ => false,
};
Ok(Value::Bool(is_running))
}
("Concurrent", "taskCount") => {
let tasks = self.concurrent_tasks.borrow();
let count = tasks.iter()
.filter(|(_, (_, result, cancelled))| result.is_none() && !cancelled)
.count();
Ok(Value::Int(count as i64))
}
// ===== Channel Effect =====
("Channel", "create") => {
let channel_id = *self.next_channel_id.borrow();
*self.next_channel_id.borrow_mut() += 1;
// Create empty channel queue, not closed
self.channels.borrow_mut().insert(channel_id, (Vec::new(), false));
Ok(Value::Int(channel_id))
}
("Channel", "send") => {
let channel_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Channel.send requires a channel ID".to_string(),
span: None,
}),
};
let value = match request.args.get(1) {
Some(v) => v.clone(),
_ => return Err(RuntimeError {
message: "Channel.send requires a value".to_string(),
span: None,
}),
};
let mut channels = self.channels.borrow_mut();
match channels.get_mut(&channel_id) {
Some((queue, false)) => {
queue.push(value);
Ok(Value::Unit)
}
Some((_, true)) => Err(RuntimeError {
message: format!("Channel {} is closed", channel_id),
span: None,
}),
None => Err(RuntimeError {
message: format!("Unknown channel ID: {}", channel_id),
span: None,
}),
}
}
("Channel", "receive") => {
let channel_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Channel.receive requires a channel ID".to_string(),
span: None,
}),
};
let mut channels = self.channels.borrow_mut();
match channels.get_mut(&channel_id) {
Some((queue, _)) if !queue.is_empty() => {
Ok(queue.remove(0))
}
Some((_, true)) => Err(RuntimeError {
message: format!("Channel {} is closed and empty", channel_id),
span: None,
}),
Some((_, false)) => Err(RuntimeError {
message: format!("Channel {} is empty (blocking receive not supported yet)", channel_id),
span: None,
}),
None => Err(RuntimeError {
message: format!("Unknown channel ID: {}", channel_id),
span: None,
}),
}
}
("Channel", "tryReceive") => {
let channel_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Channel.tryReceive requires a channel ID".to_string(),
span: None,
}),
};
let mut channels = self.channels.borrow_mut();
match channels.get_mut(&channel_id) {
Some((queue, _)) if !queue.is_empty() => {
Ok(Value::Constructor {
name: "Some".to_string(),
fields: vec![queue.remove(0)],
})
}
Some(_) => {
Ok(Value::Constructor {
name: "None".to_string(),
fields: vec![],
})
}
None => Err(RuntimeError {
message: format!("Unknown channel ID: {}", channel_id),
span: None,
}),
}
}
("Channel", "close") => {
let channel_id = match request.args.first() {
Some(Value::Int(id)) => *id,
_ => return Err(RuntimeError {
message: "Channel.close requires a channel ID".to_string(),
span: None,
}),
};
let mut channels = self.channels.borrow_mut();
if let Some((queue, closed)) = channels.get_mut(&channel_id) {
*closed = true;
Ok(Value::Unit)
} else {
Err(RuntimeError {
message: format!("Unknown channel ID: {}", channel_id),
span: None,
})
}
}
_ => Err(RuntimeError {
message: format!(
"Unhandled effect operation: {}.{}",

View File

@@ -4,30 +4,46 @@
//! - Diagnostics (errors and warnings)
//! - Hover information
//! - Go to definition
//! - Find references
//! - Completions
//! - Document symbols
//! - Rename refactoring
//! - Signature help
//! - Formatting
use crate::parser::Parser;
use crate::typechecker::TypeChecker;
use crate::symbol_table::{SymbolTable, SymbolKind};
use crate::formatter::{format as format_source, FormatConfig};
use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response};
use lsp_types::{
notification::{DidChangeTextDocument, DidOpenTextDocument, Notification},
request::{Completion, GotoDefinition, HoverRequest},
request::{Completion, GotoDefinition, HoverRequest, References, DocumentSymbolRequest, Rename, SignatureHelpRequest, Formatting},
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse,
Diagnostic, DiagnosticSeverity, DidChangeTextDocumentParams, DidOpenTextDocumentParams,
GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverContents, HoverParams,
HoverProviderCapability, InitializeParams, MarkupContent, MarkupKind, Position,
PublishDiagnosticsParams, Range, ServerCapabilities, TextDocumentSyncCapability,
TextDocumentSyncKind, Url,
TextDocumentSyncKind, Url, ReferenceParams, Location, DocumentSymbolParams,
DocumentSymbolResponse, SymbolInformation, RenameParams, WorkspaceEdit, TextEdit,
SignatureHelpParams, SignatureHelp, SignatureInformation, ParameterInformation,
SignatureHelpOptions, DocumentFormattingParams, TextDocumentIdentifier,
};
use std::collections::HashMap;
use std::error::Error;
/// Cached document data
struct DocumentCache {
text: String,
symbol_table: Option<SymbolTable>,
}
/// LSP Server for Lux
pub struct LspServer {
connection: Connection,
/// Document contents by URI
documents: HashMap<Url, String>,
/// Document contents and symbol tables by URI
documents: HashMap<Url, DocumentCache>,
}
impl LspServer {
@@ -63,6 +79,15 @@ impl LspServer {
..Default::default()
}),
definition_provider: Some(lsp_types::OneOf::Left(true)),
references_provider: Some(lsp_types::OneOf::Left(true)),
document_symbol_provider: Some(lsp_types::OneOf::Left(true)),
rename_provider: Some(lsp_types::OneOf::Left(true)),
signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
retrigger_characters: None,
work_done_progress_options: Default::default(),
}),
document_formatting_provider: Some(lsp_types::OneOf::Left(true)),
..Default::default()
})?;
@@ -116,7 +141,7 @@ impl LspServer {
Err(req) => req,
};
let _req = match cast_request::<GotoDefinition>(req) {
let req = match cast_request::<GotoDefinition>(req) {
Ok((id, params)) => {
let result = self.handle_goto_definition(params);
let resp = Response::new_ok(id, result);
@@ -126,6 +151,56 @@ impl LspServer {
Err(req) => req,
};
let req = match cast_request::<References>(req) {
Ok((id, params)) => {
let result = self.handle_references(params);
let resp = Response::new_ok(id, result);
self.connection.sender.send(Message::Response(resp))?;
return Ok(());
}
Err(req) => req,
};
let req = match cast_request::<DocumentSymbolRequest>(req) {
Ok((id, params)) => {
let result = self.handle_document_symbols(params);
let resp = Response::new_ok(id, result);
self.connection.sender.send(Message::Response(resp))?;
return Ok(());
}
Err(req) => req,
};
let req = match cast_request::<Rename>(req) {
Ok((id, params)) => {
let result = self.handle_rename(params);
let resp = Response::new_ok(id, result);
self.connection.sender.send(Message::Response(resp))?;
return Ok(());
}
Err(req) => req,
};
let req = match cast_request::<SignatureHelpRequest>(req) {
Ok((id, params)) => {
let result = self.handle_signature_help(params);
let resp = Response::new_ok(id, result);
self.connection.sender.send(Message::Response(resp))?;
return Ok(());
}
Err(req) => req,
};
let _req = match cast_request::<Formatting>(req) {
Ok((id, params)) => {
let result = self.handle_formatting(params);
let resp = Response::new_ok(id, result);
self.connection.sender.send(Message::Response(resp))?;
return Ok(());
}
Err(req) => req,
};
Ok(())
}
@@ -138,15 +213,16 @@ impl LspServer {
let params: DidOpenTextDocumentParams = serde_json::from_value(not.params)?;
let uri = params.text_document.uri;
let text = params.text_document.text;
self.documents.insert(uri.clone(), text.clone());
self.update_document(uri.clone(), text.clone());
self.publish_diagnostics(uri, &text)?;
}
DidChangeTextDocument::METHOD => {
let params: DidChangeTextDocumentParams = serde_json::from_value(not.params)?;
let uri = params.text_document.uri;
if let Some(change) = params.content_changes.into_iter().last() {
self.documents.insert(uri.clone(), change.text.clone());
self.publish_diagnostics(uri, &change.text)?;
let text = change.text.clone();
self.update_document(uri.clone(), text.clone());
self.publish_diagnostics(uri, &text)?;
}
}
_ => {}
@@ -154,6 +230,18 @@ impl LspServer {
Ok(())
}
fn update_document(&mut self, uri: Url, text: String) {
// Build symbol table if parsing succeeds
let symbol_table = Parser::parse_source(&text)
.ok()
.map(|program| SymbolTable::build(&program));
self.documents.insert(uri, DocumentCache {
text,
symbol_table,
});
}
fn publish_diagnostics(
&self,
uri: Url,
@@ -214,7 +302,43 @@ impl LspServer {
let uri = params.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let source = self.documents.get(&uri)?;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
// Try to get info from symbol table first
if let Some(ref table) = doc.symbol_table {
let offset = self.position_to_offset(source, position);
if let Some(symbol) = table.definition_at_position(offset) {
let signature = symbol.type_signature.as_ref()
.map(|s| s.as_str())
.unwrap_or(&symbol.name);
let kind_str = match symbol.kind {
SymbolKind::Function => "function",
SymbolKind::Variable => "variable",
SymbolKind::Parameter => "parameter",
SymbolKind::Type => "type",
SymbolKind::TypeParameter => "type parameter",
SymbolKind::Variant => "variant",
SymbolKind::Effect => "effect",
SymbolKind::EffectOperation => "effect operation",
SymbolKind::Field => "field",
SymbolKind::Module => "module",
};
let doc_str = symbol.documentation.as_ref()
.map(|d| format!("\n\n{}", d))
.unwrap_or_default();
return Some(Hover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
value: format!("```lux\n{}\n```\n\n*{}*{}", signature, kind_str, doc_str),
}),
range: None,
});
}
}
// Fall back to hardcoded info
// Extract the word at the cursor position
let word = self.get_word_at_position(source, position)?;
@@ -320,28 +444,49 @@ impl LspServer {
let position = params.text_document_position.position;
// Check context to provide relevant completions
let source = self.documents.get(&uri)?;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
let trigger_context = self.get_completion_context(source, position);
let mut items = Vec::new();
// If triggered after a dot, provide module/method completions
if trigger_context == CompletionContext::ModuleAccess {
// Add List module functions
items.extend(self.get_list_completions());
// Add String module functions
items.extend(self.get_string_completions());
// Add Option/Result completions
items.extend(self.get_option_result_completions());
// Add Console functions
items.extend(self.get_console_completions());
// Add Math functions
items.extend(self.get_math_completions());
} else {
// General completions (keywords + common functions)
items.extend(self.get_keyword_completions());
items.extend(self.get_builtin_completions());
items.extend(self.get_type_completions());
// If triggered after a dot, provide module-specific completions
match trigger_context {
CompletionContext::ModuleAccess(ref module) => {
match module.as_str() {
"List" => items.extend(self.get_list_completions()),
"String" => items.extend(self.get_string_completions()),
"Option" | "Result" => items.extend(self.get_option_result_completions()),
"Console" => items.extend(self.get_console_completions()),
"Math" => items.extend(self.get_math_completions()),
"Sql" => items.extend(self.get_sql_completions()),
"File" => items.extend(self.get_file_completions()),
"Process" => items.extend(self.get_process_completions()),
"Http" => items.extend(self.get_http_completions()),
"Random" => items.extend(self.get_random_completions()),
"Time" => items.extend(self.get_time_completions()),
_ => {
// Unknown module, show all module completions
items.extend(self.get_list_completions());
items.extend(self.get_string_completions());
items.extend(self.get_option_result_completions());
items.extend(self.get_console_completions());
items.extend(self.get_math_completions());
items.extend(self.get_sql_completions());
items.extend(self.get_file_completions());
items.extend(self.get_process_completions());
items.extend(self.get_http_completions());
items.extend(self.get_random_completions());
items.extend(self.get_time_completions());
}
}
}
CompletionContext::General => {
// General completions (keywords + common functions)
items.extend(self.get_keyword_completions());
items.extend(self.get_builtin_completions());
items.extend(self.get_type_completions());
}
}
Some(CompletionResponse::Array(items))
@@ -353,7 +498,11 @@ impl LspServer {
if offset > 0 {
let prev_char = source.chars().nth(offset - 1);
if prev_char == Some('.') {
return CompletionContext::ModuleAccess;
// Extract the module name before the dot
if let Some(module_name) = self.get_word_at_offset(source, offset.saturating_sub(2)) {
return CompletionContext::ModuleAccess(module_name);
}
return CompletionContext::ModuleAccess(String::new());
}
}
CompletionContext::General
@@ -400,16 +549,26 @@ impl LspServer {
fn get_builtin_completions(&self) -> Vec<CompletionItem> {
vec![
// Core modules
completion_item("List", CompletionItemKind::MODULE, "List module"),
completion_item("String", CompletionItemKind::MODULE, "String module"),
completion_item("Console", CompletionItemKind::MODULE, "Console I/O"),
completion_item("Console", CompletionItemKind::MODULE, "Console I/O effect"),
completion_item("Math", CompletionItemKind::MODULE, "Math functions"),
completion_item("Option", CompletionItemKind::MODULE, "Option type"),
completion_item("Result", CompletionItemKind::MODULE, "Result type"),
// Effect modules
completion_item("Sql", CompletionItemKind::MODULE, "SQL database effect"),
completion_item("File", CompletionItemKind::MODULE, "File system effect"),
completion_item("Process", CompletionItemKind::MODULE, "Process/system effect"),
completion_item("Http", CompletionItemKind::MODULE, "HTTP client effect"),
completion_item("Random", CompletionItemKind::MODULE, "Random number effect"),
completion_item("Time", CompletionItemKind::MODULE, "Time effect"),
// Constructors
completion_item("Some", CompletionItemKind::CONSTRUCTOR, "Option.Some constructor"),
completion_item("None", CompletionItemKind::CONSTRUCTOR, "Option.None constructor"),
completion_item("Ok", CompletionItemKind::CONSTRUCTOR, "Result.Ok constructor"),
completion_item("Err", CompletionItemKind::CONSTRUCTOR, "Result.Err constructor"),
// Functions
completion_item("toString", CompletionItemKind::FUNCTION, "Convert value to string"),
]
}
@@ -495,14 +654,410 @@ impl LspServer {
]
}
fn get_sql_completions(&self) -> Vec<CompletionItem> {
vec![
completion_item_with_doc("open", CompletionItemKind::METHOD, "Sql.open(path)", "Open SQLite database file"),
completion_item_with_doc("openMemory", CompletionItemKind::METHOD, "Sql.openMemory()", "Open in-memory database"),
completion_item_with_doc("close", CompletionItemKind::METHOD, "Sql.close(conn)", "Close database connection"),
completion_item_with_doc("execute", CompletionItemKind::METHOD, "Sql.execute(conn, sql)", "Execute SQL statement"),
completion_item_with_doc("query", CompletionItemKind::METHOD, "Sql.query(conn, sql)", "Query and return rows"),
completion_item_with_doc("queryOne", CompletionItemKind::METHOD, "Sql.queryOne(conn, sql)", "Query single row"),
completion_item_with_doc("beginTx", CompletionItemKind::METHOD, "Sql.beginTx(conn)", "Begin transaction"),
completion_item_with_doc("commit", CompletionItemKind::METHOD, "Sql.commit(conn)", "Commit transaction"),
completion_item_with_doc("rollback", CompletionItemKind::METHOD, "Sql.rollback(conn)", "Rollback transaction"),
]
}
fn get_file_completions(&self) -> Vec<CompletionItem> {
vec![
completion_item_with_doc("read", CompletionItemKind::METHOD, "File.read(path)", "Read file contents"),
completion_item_with_doc("write", CompletionItemKind::METHOD, "File.write(path, content)", "Write to file"),
completion_item_with_doc("append", CompletionItemKind::METHOD, "File.append(path, content)", "Append to file"),
completion_item_with_doc("exists", CompletionItemKind::METHOD, "File.exists(path)", "Check if file exists"),
completion_item_with_doc("delete", CompletionItemKind::METHOD, "File.delete(path)", "Delete file"),
completion_item_with_doc("list", CompletionItemKind::METHOD, "File.list(path)", "List directory contents"),
]
}
fn get_process_completions(&self) -> Vec<CompletionItem> {
vec![
completion_item_with_doc("exec", CompletionItemKind::METHOD, "Process.exec(cmd)", "Execute shell command"),
completion_item_with_doc("env", CompletionItemKind::METHOD, "Process.env(name)", "Get environment variable"),
completion_item_with_doc("args", CompletionItemKind::METHOD, "Process.args()", "Get command-line arguments"),
completion_item_with_doc("cwd", CompletionItemKind::METHOD, "Process.cwd()", "Get current directory"),
completion_item_with_doc("exit", CompletionItemKind::METHOD, "Process.exit(code)", "Exit with code"),
]
}
fn get_http_completions(&self) -> Vec<CompletionItem> {
vec![
completion_item_with_doc("get", CompletionItemKind::METHOD, "Http.get(url)", "HTTP GET request"),
completion_item_with_doc("post", CompletionItemKind::METHOD, "Http.post(url, body)", "HTTP POST request"),
completion_item_with_doc("put", CompletionItemKind::METHOD, "Http.put(url, body)", "HTTP PUT request"),
completion_item_with_doc("delete", CompletionItemKind::METHOD, "Http.delete(url)", "HTTP DELETE request"),
]
}
fn get_random_completions(&self) -> Vec<CompletionItem> {
vec![
completion_item_with_doc("int", CompletionItemKind::METHOD, "Random.int(min, max)", "Random integer in range"),
completion_item_with_doc("float", CompletionItemKind::METHOD, "Random.float()", "Random float 0.0-1.0"),
completion_item_with_doc("bool", CompletionItemKind::METHOD, "Random.bool()", "Random boolean"),
]
}
fn get_time_completions(&self) -> Vec<CompletionItem> {
vec![
completion_item_with_doc("now", CompletionItemKind::METHOD, "Time.now()", "Current Unix timestamp (ms)"),
completion_item_with_doc("sleep", CompletionItemKind::METHOD, "Time.sleep(ms)", "Sleep for milliseconds"),
]
}
fn handle_goto_definition(
&self,
_params: GotoDefinitionParams,
params: GotoDefinitionParams,
) -> Option<GotoDefinitionResponse> {
// A full implementation would find the definition location
// of the symbol at the given position
let uri = params.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
// Try symbol table first
if let Some(ref table) = doc.symbol_table {
let offset = self.position_to_offset(source, position);
if let Some(symbol) = table.definition_at_position(offset) {
let range = span_to_range(source, symbol.span.start, symbol.span.end);
return Some(GotoDefinitionResponse::Scalar(Location {
uri,
range,
}));
}
}
// Fall back to pattern matching
let offset = self.position_to_offset(source, position);
let word = self.get_word_at_offset(source, offset)?;
// Search for function definition in the same file
// Look for "fn <word>" pattern
let fn_pattern = format!("fn {}", word);
if let Some(def_offset) = source.find(&fn_pattern) {
let range = span_to_range(source, def_offset + 3, def_offset + 3 + word.len());
return Some(GotoDefinitionResponse::Scalar(Location {
uri,
range,
}));
}
// Look for "let <word>" pattern
let let_pattern = format!("let {} ", word);
if let Some(def_offset) = source.find(&let_pattern) {
let range = span_to_range(source, def_offset + 4, def_offset + 4 + word.len());
return Some(GotoDefinitionResponse::Scalar(Location {
uri,
range,
}));
}
// Look for type definition "type <word>"
let type_pattern = format!("type {}", word);
if let Some(def_offset) = source.find(&type_pattern) {
let range = span_to_range(source, def_offset + 5, def_offset + 5 + word.len());
return Some(GotoDefinitionResponse::Scalar(Location {
uri,
range,
}));
}
None
}
fn handle_references(&self, params: ReferenceParams) -> Option<Vec<Location>> {
let uri = params.text_document_position.text_document.uri;
let position = params.text_document_position.position;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
if let Some(ref table) = doc.symbol_table {
let offset = self.position_to_offset(source, position);
if let Some(symbol) = table.definition_at_position(offset) {
let refs = table.find_references(symbol.id);
let locations: Vec<Location> = refs.iter()
.map(|r| Location {
uri: uri.clone(),
range: span_to_range(source, r.span.start, r.span.end),
})
.collect();
return Some(locations);
}
}
None
}
fn handle_document_symbols(&self, params: DocumentSymbolParams) -> Option<DocumentSymbolResponse> {
let uri = params.text_document.uri;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
if let Some(ref table) = doc.symbol_table {
let symbols: Vec<SymbolInformation> = table.global_symbols()
.iter()
.map(|sym| {
#[allow(deprecated)]
SymbolInformation {
name: sym.name.clone(),
kind: symbol_kind_to_lsp(&sym.kind),
tags: None,
deprecated: None,
location: Location {
uri: uri.clone(),
range: span_to_range(source, sym.span.start, sym.span.end),
},
container_name: None,
}
})
.collect();
return Some(DocumentSymbolResponse::Flat(symbols));
}
None
}
fn get_word_at_offset(&self, source: &str, offset: usize) -> Option<String> {
let chars: Vec<char> = source.chars().collect();
if offset >= chars.len() {
return None;
}
// Find start of word
let mut start = offset;
while start > 0 && (chars[start - 1].is_alphanumeric() || chars[start - 1] == '_') {
start -= 1;
}
// Find end of word
let mut end = offset;
while end < chars.len() && (chars[end].is_alphanumeric() || chars[end] == '_') {
end += 1;
}
if start == end {
return None;
}
Some(chars[start..end].iter().collect())
}
fn handle_rename(&self, params: RenameParams) -> Option<WorkspaceEdit> {
let uri = params.text_document_position.text_document.uri;
let position = params.text_document_position.position;
let new_name = params.new_name;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
if let Some(ref table) = doc.symbol_table {
let offset = self.position_to_offset(source, position);
if let Some(symbol) = table.definition_at_position(offset) {
// Find all references to this symbol
let refs = table.find_references(symbol.id);
// Create text edits for each reference
let edits: Vec<TextEdit> = refs.iter()
.map(|r| TextEdit {
range: span_to_range(source, r.span.start, r.span.end),
new_text: new_name.clone(),
})
.collect();
// Return workspace edit
let mut changes = HashMap::new();
changes.insert(uri, edits);
return Some(WorkspaceEdit {
changes: Some(changes),
document_changes: None,
change_annotations: None,
});
}
}
None
}
fn handle_signature_help(&self, params: SignatureHelpParams) -> Option<SignatureHelp> {
let uri = params.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
let offset = self.position_to_offset(source, position);
// Find the function call context by searching backwards for '('
let chars: Vec<char> = source.chars().collect();
let mut paren_depth = 0;
let mut comma_count = 0;
let mut func_start = offset;
for i in (0..offset).rev() {
let c = chars.get(i)?;
match c {
')' => paren_depth += 1,
'(' => {
if paren_depth == 0 {
func_start = i;
break;
}
paren_depth -= 1;
}
',' if paren_depth == 0 => comma_count += 1,
_ => {}
}
}
// Get the function name before the opening paren
if func_start == 0 {
return None;
}
let func_name = self.get_word_at_offset(source, func_start - 1)?;
// Look up function in symbol table
if let Some(ref table) = doc.symbol_table {
// Search for function definition
for sym in table.global_symbols() {
if sym.name == func_name {
if let Some(ref sig) = sym.type_signature {
// Parse parameters from signature
let params = self.extract_parameters_from_signature(sig);
let signature_info = SignatureInformation {
label: sig.clone(),
documentation: sym.documentation.as_ref().map(|d| {
lsp_types::Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: d.clone(),
})
}),
parameters: Some(params),
active_parameter: Some(comma_count as u32),
};
return Some(SignatureHelp {
signatures: vec![signature_info],
active_signature: Some(0),
active_parameter: Some(comma_count as u32),
});
}
}
}
}
// Fall back to hardcoded signatures for built-in functions
self.get_builtin_signature(&func_name, comma_count)
}
fn extract_parameters_from_signature(&self, sig: &str) -> Vec<ParameterInformation> {
// Parse "fn name(a: Int, b: String): ReturnType" format
let mut params = Vec::new();
if let Some(start) = sig.find('(') {
if let Some(end) = sig.find(')') {
let params_str = &sig[start + 1..end];
for param in params_str.split(',') {
let param = param.trim();
if !param.is_empty() {
params.push(ParameterInformation {
label: lsp_types::ParameterLabel::Simple(param.to_string()),
documentation: None,
});
}
}
}
}
params
}
fn get_builtin_signature(&self, func_name: &str, active_param: usize) -> Option<SignatureHelp> {
let (sig, params): (&str, Vec<&str>) = match func_name {
// List functions
"map" => ("fn map<A, B>(list: List<A>, f: fn(A): B): List<B>", vec!["list: List<A>", "f: fn(A): B"]),
"filter" => ("fn filter<A>(list: List<A>, f: fn(A): Bool): List<A>", vec!["list: List<A>", "f: fn(A): Bool"]),
"fold" => ("fn fold<A, B>(list: List<A>, init: B, f: fn(B, A): B): B", vec!["list: List<A>", "init: B", "f: fn(B, A): B"]),
"head" => ("fn head<A>(list: List<A>): Option<A>", vec!["list: List<A>"]),
"tail" => ("fn tail<A>(list: List<A>): Option<List<A>>", vec!["list: List<A>"]),
"concat" => ("fn concat<A>(a: List<A>, b: List<A>): List<A>", vec!["a: List<A>", "b: List<A>"]),
"length" => ("fn length<A>(list: List<A>): Int", vec!["list: List<A>"]),
"get" => ("fn get<A>(list: List<A>, index: Int): Option<A>", vec!["list: List<A>", "index: Int"]),
// String functions
"split" => ("fn split(s: String, sep: String): List<String>", vec!["s: String", "sep: String"]),
"join" => ("fn join(list: List<String>, sep: String): String", vec!["list: List<String>", "sep: String"]),
"replace" => ("fn replace(s: String, from: String, to: String): String", vec!["s: String", "from: String", "to: String"]),
"substring" => ("fn substring(s: String, start: Int, end: Int): String", vec!["s: String", "start: Int", "end: Int"]),
"contains" => ("fn contains(s: String, sub: String): Bool", vec!["s: String", "sub: String"]),
// Option functions
"getOrElse" => ("fn getOrElse<A>(opt: Option<A>, default: A): A", vec!["opt: Option<A>", "default: A"]),
// Result functions
"mapErr" => ("fn mapErr<E, E2, T>(result: Result<T, E>, f: fn(E): E2): Result<T, E2>", vec!["result: Result<T, E>", "f: fn(E): E2"]),
_ => return None,
};
let param_infos: Vec<ParameterInformation> = params.iter()
.map(|p| ParameterInformation {
label: lsp_types::ParameterLabel::Simple(p.to_string()),
documentation: None,
})
.collect();
Some(SignatureHelp {
signatures: vec![SignatureInformation {
label: sig.to_string(),
documentation: None,
parameters: Some(param_infos),
active_parameter: Some(active_param as u32),
}],
active_signature: Some(0),
active_parameter: Some(active_param as u32),
})
}
fn handle_formatting(&self, params: DocumentFormattingParams) -> Option<Vec<TextEdit>> {
let uri = params.text_document.uri;
let doc = self.documents.get(&uri)?;
let source = &doc.text;
// Use the Lux formatter with default config
let config = FormatConfig::default();
match format_source(source, &config) {
Ok(formatted) => {
if formatted == *source {
// No changes needed
return Some(vec![]);
}
// Replace entire document
let lines: Vec<&str> = source.lines().collect();
let last_line = lines.len().saturating_sub(1);
let last_col = lines.last().map(|l| l.len()).unwrap_or(0);
Some(vec![TextEdit {
range: Range {
start: Position { line: 0, character: 0 },
end: Position {
line: last_line as u32,
character: last_col as u32,
},
},
new_text: formatted,
}])
}
Err(_) => {
// Formatting failed, return no edits
None
}
}
}
}
/// Convert byte offsets to LSP Position
@@ -555,8 +1110,8 @@ where
/// Context for completion suggestions
#[derive(PartialEq)]
enum CompletionContext {
/// After a dot (e.g., "List.")
ModuleAccess,
/// After a dot with specific module (e.g., "List.", "Sql.")
ModuleAccess(String),
/// General context (keywords, types, etc.)
General,
}
@@ -589,3 +1144,19 @@ fn completion_item_with_doc(
..Default::default()
}
}
/// Convert symbol kind to LSP symbol kind
fn symbol_kind_to_lsp(kind: &SymbolKind) -> lsp_types::SymbolKind {
match kind {
SymbolKind::Function => lsp_types::SymbolKind::FUNCTION,
SymbolKind::Variable => lsp_types::SymbolKind::VARIABLE,
SymbolKind::Parameter => lsp_types::SymbolKind::VARIABLE,
SymbolKind::Type => lsp_types::SymbolKind::CLASS,
SymbolKind::TypeParameter => lsp_types::SymbolKind::TYPE_PARAMETER,
SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
SymbolKind::Effect => lsp_types::SymbolKind::INTERFACE,
SymbolKind::EffectOperation => lsp_types::SymbolKind::METHOD,
SymbolKind::Field => lsp_types::SymbolKind::FIELD,
SymbolKind::Module => lsp_types::SymbolKind::MODULE,
}
}

View File

@@ -15,6 +15,7 @@ mod package;
mod parser;
mod registry;
mod schema;
mod symbol_table;
mod typechecker;
mod types;
@@ -47,8 +48,10 @@ Commands:
:env Show user-defined bindings
:clear Clear the environment
:load <file> Load and execute a file
:reload, :r Reload the last loaded file
:trace on/off Enable/disable effect tracing
:traces Show recorded effect traces
:ast <expr> Show the AST of an expression (for debugging)
Keyboard:
Tab Autocomplete
@@ -57,6 +60,10 @@ Keyboard:
Up/Down Browse history
Ctrl-R Search history
Effects:
All code in the REPL runs with Console, File, and other standard effects.
Use :trace on to see effect invocations during execution.
Examples:
> let x = 42
> x + 1
@@ -65,6 +72,9 @@ Examples:
> fn double(n: Int): Int = n * 2
> :type double
double : fn(Int) -> Int
> :load myfile.lux
> :reload
> double(21)
42
@@ -175,6 +185,10 @@ fn main() {
compile_to_c(&args[2], output_path, run_after, emit_c);
}
}
"doc" => {
// Generate API documentation
generate_docs(&args[2..]);
}
path => {
// Run a file
run_file(path);
@@ -207,6 +221,8 @@ fn print_help() {
println!(" lux registry Start package registry server");
println!(" -s, --storage <dir> Storage directory (default: ./lux-registry)");
println!(" -b, --bind <addr> Bind address (default: 127.0.0.1:8080)");
println!(" lux doc [file] [-o dir] Generate API documentation (HTML)");
println!(" --json Output as JSON");
println!(" lux --lsp Start LSP server (for IDE integration)");
println!(" lux --help Show this help");
println!(" lux --version Show version");
@@ -1428,6 +1444,681 @@ let output = run main() with {}
println!(" lux src/main.lux");
}
/// Generate API documentation for Lux source files
fn generate_docs(args: &[String]) {
use std::path::Path;
use std::collections::HashMap;
let output_json = args.iter().any(|a| a == "--json");
let output_dir = args.iter()
.position(|a| a == "-o")
.and_then(|i| args.get(i + 1))
.map(|s| s.as_str())
.unwrap_or("docs");
let input_file = args.iter().find(|a| !a.starts_with('-') && *a != output_dir);
// Collect files to document
let mut files_to_doc = Vec::new();
if let Some(path) = input_file {
if Path::new(path).is_file() {
files_to_doc.push(path.to_string());
} else {
eprintln!("File not found: {}", path);
std::process::exit(1);
}
} else {
// Auto-discover files
if Path::new("src").is_dir() {
collect_lux_files_for_docs("src", &mut files_to_doc);
}
if Path::new("stdlib").is_dir() {
collect_lux_files_for_docs("stdlib", &mut files_to_doc);
}
}
if files_to_doc.is_empty() {
eprintln!("No .lux files found to document");
std::process::exit(1);
}
// Create output directory
if !output_json {
if let Err(e) = std::fs::create_dir_all(output_dir) {
eprintln!("Failed to create output directory: {}", e);
std::process::exit(1);
}
}
let mut all_docs: HashMap<String, ModuleDoc> = HashMap::new();
let mut error_count = 0;
for file_path in &files_to_doc {
let source = match std::fs::read_to_string(file_path) {
Ok(s) => s,
Err(e) => {
eprintln!("{}: ERROR - {}", file_path, e);
error_count += 1;
continue;
}
};
match extract_module_doc(&source, file_path) {
Ok(doc) => {
let module_name = Path::new(file_path)
.file_stem()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
all_docs.insert(module_name, doc);
}
Err(e) => {
eprintln!("{}: PARSE ERROR - {}", file_path, e);
error_count += 1;
}
}
}
if output_json {
// Output as JSON
println!("{}", docs_to_json(&all_docs));
} else {
// Generate HTML files
let index_html = generate_index_html(&all_docs);
let index_path = format!("{}/index.html", output_dir);
if let Err(e) = std::fs::write(&index_path, &index_html) {
eprintln!("Failed to write index.html: {}", e);
error_count += 1;
}
for (module_name, doc) in &all_docs {
let html = generate_module_html(module_name, doc);
let path = format!("{}/{}.html", output_dir, module_name);
if let Err(e) = std::fs::write(&path, &html) {
eprintln!("Failed to write {}: {}", path, e);
error_count += 1;
}
}
// Generate CSS
let css_path = format!("{}/style.css", output_dir);
if let Err(e) = std::fs::write(&css_path, DOC_CSS) {
eprintln!("Failed to write style.css: {}", e);
error_count += 1;
}
println!("Generated documentation in {}/", output_dir);
println!(" {} modules documented", all_docs.len());
if error_count > 0 {
println!(" {} errors", error_count);
}
}
}
fn collect_lux_files_for_docs(dir: &str, files: &mut Vec<String>) {
if let Ok(entries) = std::fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file() && path.extension().map(|e| e == "lux").unwrap_or(false) {
files.push(path.to_string_lossy().to_string());
} else if path.is_dir() {
collect_lux_files_for_docs(&path.to_string_lossy(), files);
}
}
}
}
#[derive(Debug, Clone)]
struct ModuleDoc {
description: Option<String>,
functions: Vec<FunctionDoc>,
types: Vec<TypeDoc>,
effects: Vec<EffectDoc>,
}
#[derive(Debug, Clone)]
struct FunctionDoc {
name: String,
signature: String,
description: Option<String>,
is_public: bool,
properties: Vec<String>,
}
#[derive(Debug, Clone)]
struct TypeDoc {
name: String,
definition: String,
description: Option<String>,
is_public: bool,
}
#[derive(Debug, Clone)]
struct EffectDoc {
name: String,
operations: Vec<String>,
description: Option<String>,
}
fn extract_module_doc(source: &str, path: &str) -> Result<ModuleDoc, String> {
use modules::ModuleLoader;
use std::path::Path;
let mut loader = ModuleLoader::new();
let file_path = Path::new(path);
if let Some(parent) = file_path.parent() {
loader.add_search_path(parent.to_path_buf());
}
let program = loader.load_source(source, Some(file_path))
.map_err(|e| format!("{}", e))?;
let mut module_desc: Option<String> = None;
let mut functions = Vec::new();
let mut types = Vec::new();
let mut effects = Vec::new();
let mut pending_doc: Option<String> = None;
// Extract module-level comment (first comment before any declarations)
let lines: Vec<&str> = source.lines().collect();
let mut module_comment = Vec::new();
for line in &lines {
let trimmed = line.trim();
if trimmed.starts_with("//") {
module_comment.push(trimmed.trim_start_matches('/').trim());
} else if !trimmed.is_empty() {
break;
}
}
if !module_comment.is_empty() {
module_desc = Some(module_comment.join("\n"));
}
for decl in &program.declarations {
match decl {
ast::Declaration::Function(f) => {
// Build signature
let params: Vec<String> = f.params.iter()
.map(|p| format!("{}: {}", p.name.name, format_type(&p.typ)))
.collect();
let effects_str = if f.effects.is_empty() {
String::new()
} else {
format!(" with {{{}}}", f.effects.iter().map(|e| e.name.clone()).collect::<Vec<_>>().join(", "))
};
let props: Vec<String> = f.properties.iter()
.map(|p| format!("{:?}", p).to_lowercase())
.collect();
let props_str = if props.is_empty() {
String::new()
} else {
format!(" is {}", props.join(", "))
};
let signature = format!(
"fn {}({}): {}{}{}",
f.name.name,
params.join(", "),
format_type(&f.return_type),
props_str,
effects_str
);
// Extract doc comment
let doc = extract_doc_comment(source, f.span.start);
functions.push(FunctionDoc {
name: f.name.name.clone(),
signature,
description: doc,
is_public: matches!(f.visibility, ast::Visibility::Public),
properties: props,
});
}
ast::Declaration::Type(t) => {
let doc = extract_doc_comment(source, t.span.start);
types.push(TypeDoc {
name: t.name.name.clone(),
definition: format_type_def(t),
description: doc,
is_public: matches!(t.visibility, ast::Visibility::Public),
});
}
ast::Declaration::Effect(e) => {
let doc = extract_doc_comment(source, e.span.start);
let ops: Vec<String> = e.operations.iter()
.map(|op| {
let params: Vec<String> = op.params.iter()
.map(|p| format!("{}: {}", p.name.name, format_type(&p.typ)))
.collect();
format!("{}({}): {}", op.name.name, params.join(", "), format_type(&op.return_type))
})
.collect();
effects.push(EffectDoc {
name: e.name.name.clone(),
operations: ops,
description: doc,
});
}
_ => {}
}
}
Ok(ModuleDoc {
description: module_desc,
functions,
types,
effects,
})
}
fn extract_doc_comment(source: &str, pos: usize) -> Option<String> {
// Look backwards from the declaration for doc comments
let prefix = &source[..pos];
let lines: Vec<&str> = prefix.lines().collect();
let mut doc_lines = Vec::new();
for line in lines.iter().rev() {
let trimmed = line.trim();
if trimmed.starts_with("///") {
doc_lines.push(trimmed.trim_start_matches('/').trim());
} else if trimmed.starts_with("//") {
// Regular comment, skip
continue;
} else if trimmed.is_empty() {
if !doc_lines.is_empty() {
break;
}
} else {
break;
}
}
if doc_lines.is_empty() {
None
} else {
doc_lines.reverse();
Some(doc_lines.join("\n"))
}
}
fn format_type(t: &ast::TypeExpr) -> String {
match t {
ast::TypeExpr::Named(ident) => ident.name.clone(),
ast::TypeExpr::App(base, args) => {
let args_str: Vec<String> = args.iter().map(format_type).collect();
format!("{}<{}>", format_type(base), args_str.join(", "))
}
ast::TypeExpr::Function { params, return_type, .. } => {
let params_str: Vec<String> = params.iter().map(format_type).collect();
format!("fn({}): {}", params_str.join(", "), format_type(return_type))
}
ast::TypeExpr::Tuple(types) => {
let types_str: Vec<String> = types.iter().map(format_type).collect();
format!("({})", types_str.join(", "))
}
ast::TypeExpr::Record(fields) => {
let fields_str: Vec<String> = fields.iter()
.map(|f| format!("{}: {}", f.name.name, format_type(&f.typ)))
.collect();
format!("{{ {} }}", fields_str.join(", "))
}
ast::TypeExpr::Unit => "Unit".to_string(),
ast::TypeExpr::Versioned { base, .. } => format_type(base),
}
}
fn format_type_def(t: &ast::TypeDecl) -> String {
match &t.definition {
ast::TypeDef::Alias(typ) => format!("type {} = {}", t.name.name, format_type(typ)),
ast::TypeDef::Enum(variants) => {
let variants_str: Vec<String> = variants.iter()
.map(|v| {
match &v.fields {
ast::VariantFields::Unit => v.name.name.clone(),
ast::VariantFields::Tuple(types) => {
let types_str: Vec<String> = types.iter().map(format_type).collect();
format!("{}({})", v.name.name, types_str.join(", "))
}
ast::VariantFields::Record(fields) => {
let fields_str: Vec<String> = fields.iter()
.map(|f| format!("{}: {}", f.name.name, format_type(&f.typ)))
.collect();
format!("{}{{ {} }}", v.name.name, fields_str.join(", "))
}
}
})
.collect();
format!("type {} = {}", t.name.name, variants_str.join(" | "))
}
ast::TypeDef::Record(fields) => {
let fields_str: Vec<String> = fields.iter()
.map(|f| format!("{}: {}", f.name.name, format_type(&f.typ)))
.collect();
format!("type {} = {{ {} }}", t.name.name, fields_str.join(", "))
}
}
}
fn docs_to_json(docs: &std::collections::HashMap<String, ModuleDoc>) -> String {
let mut json = String::from("{\n");
let mut first_module = true;
for (name, doc) in docs {
if !first_module {
json.push_str(",\n");
}
first_module = false;
json.push_str(&format!(" \"{}\": {{\n", escape_json(name)));
if let Some(desc) = &doc.description {
json.push_str(&format!(" \"description\": \"{}\",\n", escape_json(desc)));
}
// Functions
json.push_str(" \"functions\": [\n");
for (i, f) in doc.functions.iter().enumerate() {
json.push_str(&format!(
" {{\"name\": \"{}\", \"signature\": \"{}\", \"public\": {}, \"description\": {}}}",
escape_json(&f.name),
escape_json(&f.signature),
f.is_public,
f.description.as_ref().map(|d| format!("\"{}\"", escape_json(d))).unwrap_or("null".to_string())
));
if i < doc.functions.len() - 1 {
json.push(',');
}
json.push('\n');
}
json.push_str(" ],\n");
// Types
json.push_str(" \"types\": [\n");
for (i, t) in doc.types.iter().enumerate() {
json.push_str(&format!(
" {{\"name\": \"{}\", \"definition\": \"{}\", \"public\": {}, \"description\": {}}}",
escape_json(&t.name),
escape_json(&t.definition),
t.is_public,
t.description.as_ref().map(|d| format!("\"{}\"", escape_json(d))).unwrap_or("null".to_string())
));
if i < doc.types.len() - 1 {
json.push(',');
}
json.push('\n');
}
json.push_str(" ],\n");
// Effects
json.push_str(" \"effects\": [\n");
for (i, e) in doc.effects.iter().enumerate() {
let ops_json: Vec<String> = e.operations.iter()
.map(|o| format!("\"{}\"", escape_json(o)))
.collect();
json.push_str(&format!(
" {{\"name\": \"{}\", \"operations\": [{}], \"description\": {}}}",
escape_json(&e.name),
ops_json.join(", "),
e.description.as_ref().map(|d| format!("\"{}\"", escape_json(d))).unwrap_or("null".to_string())
));
if i < doc.effects.len() - 1 {
json.push(',');
}
json.push('\n');
}
json.push_str(" ]\n");
json.push_str(" }");
}
json.push_str("\n}");
json
}
fn escape_json(s: &str) -> String {
s.replace('\\', "\\\\")
.replace('"', "\\\"")
.replace('\n', "\\n")
.replace('\r', "\\r")
.replace('\t', "\\t")
}
fn generate_index_html(docs: &std::collections::HashMap<String, ModuleDoc>) -> String {
let mut html = String::from(r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Lux API Documentation</title>
<link rel="stylesheet" href="style.css">
</head>
<body>
<header>
<h1>Lux API Documentation</h1>
</header>
<main>
<h2>Modules</h2>
<ul class="module-list">
"#);
let mut modules: Vec<_> = docs.keys().collect();
modules.sort();
for name in modules {
let doc = &docs[name];
let desc = doc.description.as_ref()
.map(|d| d.lines().next().unwrap_or(""))
.unwrap_or("");
html.push_str(&format!(
" <li><a href=\"{}.html\">{}</a> - {}</li>\n",
name, name, html_escape(desc)
));
}
html.push_str(r#" </ul>
</main>
</body>
</html>"#);
html
}
fn generate_module_html(name: &str, doc: &ModuleDoc) -> String {
let mut html = format!(r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{} - Lux API</title>
<link rel="stylesheet" href="style.css">
</head>
<body>
<header>
<a href="index.html">Back to Index</a>
<h1>{}</h1>
</header>
<main>
"#, name, name);
if let Some(desc) = &doc.description {
html.push_str(&format!(" <div class=\"module-description\">{}</div>\n", html_escape(desc)));
}
// Types
if !doc.types.is_empty() {
html.push_str(" <section>\n <h2>Types</h2>\n");
for t in &doc.types {
let visibility = if t.is_public { "pub " } else { "" };
html.push_str(&format!(
" <div class=\"item\">\n <code class=\"signature\">{}{}</code>\n",
visibility, html_escape(&t.definition)
));
if let Some(desc) = &t.description {
html.push_str(&format!(" <p class=\"description\">{}</p>\n", html_escape(desc)));
}
html.push_str(" </div>\n");
}
html.push_str(" </section>\n");
}
// Effects
if !doc.effects.is_empty() {
html.push_str(" <section>\n <h2>Effects</h2>\n");
for e in &doc.effects {
html.push_str(&format!(
" <div class=\"item\">\n <h3>effect {}</h3>\n",
html_escape(&e.name)
));
if let Some(desc) = &e.description {
html.push_str(&format!(" <p class=\"description\">{}</p>\n", html_escape(desc)));
}
html.push_str(" <ul class=\"operations\">\n");
for op in &e.operations {
html.push_str(&format!(" <li><code>{}</code></li>\n", html_escape(op)));
}
html.push_str(" </ul>\n </div>\n");
}
html.push_str(" </section>\n");
}
// Functions
if !doc.functions.is_empty() {
html.push_str(" <section>\n <h2>Functions</h2>\n");
for f in &doc.functions {
let visibility = if f.is_public { "pub " } else { "" };
html.push_str(&format!(
" <div class=\"item\" id=\"{}\">\n <code class=\"signature\">{}{}</code>\n",
f.name, visibility, html_escape(&f.signature)
));
if let Some(desc) = &f.description {
html.push_str(&format!(" <p class=\"description\">{}</p>\n", html_escape(desc)));
}
html.push_str(" </div>\n");
}
html.push_str(" </section>\n");
}
html.push_str(r#" </main>
</body>
</html>"#);
html
}
fn html_escape(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
}
const DOC_CSS: &str = r#"
:root {
--bg-color: #1a1a2e;
--text-color: #e0e0e0;
--link-color: #64b5f6;
--code-bg: #16213e;
--header-bg: #0f3460;
--accent: #e94560;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
background-color: var(--bg-color);
color: var(--text-color);
margin: 0;
padding: 0;
line-height: 1.6;
}
header {
background-color: var(--header-bg);
padding: 1rem 2rem;
border-bottom: 2px solid var(--accent);
}
header h1 {
margin: 0;
color: white;
}
header a {
color: var(--link-color);
text-decoration: none;
font-size: 0.9rem;
}
main {
max-width: 900px;
margin: 0 auto;
padding: 2rem;
}
h2 {
color: var(--accent);
border-bottom: 1px solid var(--accent);
padding-bottom: 0.5rem;
}
h3 {
color: var(--link-color);
}
.module-list {
list-style: none;
padding: 0;
}
.module-list li {
padding: 0.5rem 0;
border-bottom: 1px solid rgba(255,255,255,0.1);
}
.module-list a {
color: var(--link-color);
text-decoration: none;
font-weight: bold;
}
.item {
background-color: var(--code-bg);
border-radius: 8px;
padding: 1rem;
margin: 1rem 0;
}
.signature {
display: block;
background-color: rgba(0,0,0,0.3);
padding: 0.5rem 1rem;
border-radius: 4px;
font-family: 'Fira Code', 'Monaco', monospace;
overflow-x: auto;
}
.description {
margin-top: 0.5rem;
color: #aaa;
}
.operations {
list-style: none;
padding-left: 1rem;
}
.operations li {
padding: 0.25rem 0;
}
.module-description {
background-color: var(--code-bg);
padding: 1rem;
border-radius: 8px;
margin-bottom: 2rem;
border-left: 3px solid var(--accent);
}
"#;
fn run_file(path: &str) {
use modules::ModuleLoader;
use std::path::Path;
@@ -1485,6 +2176,7 @@ struct LuxHelper {
keywords: HashSet<String>,
commands: Vec<String>,
user_defined: HashSet<String>,
last_loaded_file: Option<String>,
}
impl LuxHelper {
@@ -1502,7 +2194,8 @@ impl LuxHelper {
let commands = vec![
":help", ":h", ":quit", ":q", ":type", ":t", ":clear", ":load", ":l",
":trace", ":traces", ":info", ":i", ":env", ":doc", ":d", ":browse", ":b",
":reload", ":r", ":trace", ":traces", ":info", ":i", ":env", ":doc", ":d",
":browse", ":b", ":ast",
]
.into_iter()
.map(String::from)
@@ -1512,6 +2205,7 @@ impl LuxHelper {
keywords,
commands,
user_defined: HashSet::new(),
last_loaded_file: None,
}
}
@@ -1860,11 +2554,31 @@ fn handle_command(
}
":load" | ":l" => {
if let Some(path) = arg {
helper.last_loaded_file = Some(path.to_string());
load_file(path, interp, checker, helper);
} else {
println!("Usage: :load <filename>");
}
}
":reload" | ":r" => {
if let Some(ref path) = helper.last_loaded_file.clone() {
println!("Reloading {}...", path);
// Clear environment first
*interp = Interpreter::new();
*checker = TypeChecker::new();
helper.user_defined.clear();
load_file(path, interp, checker, helper);
} else {
println!("No file to reload. Use :load <file> first.");
}
}
":ast" => {
if let Some(expr_str) = arg {
show_ast(expr_str);
} else {
println!("Usage: :ast <expression>");
}
}
":trace" => match arg {
Some("on") => {
interp.enable_tracing();
@@ -2163,6 +2877,23 @@ fn show_type(expr_str: &str, checker: &mut TypeChecker) {
}
}
fn show_ast(expr_str: &str) {
// Wrap expression in a let to parse it
let wrapped = format!("let _expr_ = {}", expr_str);
match Parser::parse_source(&wrapped) {
Ok(program) => {
// Pretty print the AST
for decl in &program.declarations {
println!("{:#?}", decl);
}
}
Err(e) => {
println!("Parse error: {}", e);
}
}
}
fn load_file(path: &str, interp: &mut Interpreter, checker: &mut TypeChecker, helper: &mut LuxHelper) {
let source = match std::fs::read_to_string(path) {
Ok(s) => s,

View File

@@ -6,6 +6,618 @@ use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use std::io::{self, Write};
use std::cmp::Ordering;
// =============================================================================
// Semantic Versioning
// =============================================================================
/// A semantic version (major.minor.patch)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Version {
pub major: u32,
pub minor: u32,
pub patch: u32,
pub prerelease: Option<String>,
}
impl Version {
pub fn new(major: u32, minor: u32, patch: u32) -> Self {
Self { major, minor, patch, prerelease: None }
}
pub fn parse(s: &str) -> Result<Self, String> {
let s = s.trim();
// Handle prerelease suffix (e.g., "1.0.0-alpha")
let (version_part, prerelease) = if let Some(pos) = s.find('-') {
(&s[..pos], Some(s[pos + 1..].to_string()))
} else {
(s, None)
};
let parts: Vec<&str> = version_part.split('.').collect();
if parts.len() < 2 || parts.len() > 3 {
return Err(format!("Invalid version format: {}", s));
}
let major = parts[0].parse::<u32>()
.map_err(|_| format!("Invalid major version: {}", parts[0]))?;
let minor = parts[1].parse::<u32>()
.map_err(|_| format!("Invalid minor version: {}", parts[1]))?;
let patch = if parts.len() > 2 {
parts[2].parse::<u32>()
.map_err(|_| format!("Invalid patch version: {}", parts[2]))?
} else {
0
};
Ok(Self { major, minor, patch, prerelease })
}
}
impl std::fmt::Display for Version {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(ref pre) = self.prerelease {
write!(f, "{}.{}.{}-{}", self.major, self.minor, self.patch, pre)
} else {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
}
impl Ord for Version {
fn cmp(&self, other: &Self) -> Ordering {
match self.major.cmp(&other.major) {
Ordering::Equal => {}
ord => return ord,
}
match self.minor.cmp(&other.minor) {
Ordering::Equal => {}
ord => return ord,
}
match self.patch.cmp(&other.patch) {
Ordering::Equal => {}
ord => return ord,
}
// Prerelease versions are less than release versions
match (&self.prerelease, &other.prerelease) {
(None, None) => Ordering::Equal,
(Some(_), None) => Ordering::Less,
(None, Some(_)) => Ordering::Greater,
(Some(a), Some(b)) => a.cmp(b),
}
}
}
impl PartialOrd for Version {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
/// Version constraint for dependencies
#[derive(Debug, Clone)]
pub enum VersionConstraint {
/// Exact version: "1.2.3"
Exact(Version),
/// Caret: "^1.2.3" - compatible updates (>=1.2.3, <2.0.0)
Caret(Version),
/// Tilde: "~1.2.3" - patch updates only (>=1.2.3, <1.3.0)
Tilde(Version),
/// Greater than or equal: ">=1.2.3"
GreaterEq(Version),
/// Less than: "<2.0.0"
Less(Version),
/// Range: ">=1.0.0, <2.0.0"
Range { min: Version, max: Version },
/// Any version: "*"
Any,
}
impl VersionConstraint {
pub fn parse(s: &str) -> Result<Self, String> {
let s = s.trim();
if s == "*" {
return Ok(VersionConstraint::Any);
}
// Check for range (comma-separated constraints)
if s.contains(',') {
let parts: Vec<&str> = s.split(',').collect();
if parts.len() != 2 {
return Err("Range must have exactly two constraints".to_string());
}
let first = VersionConstraint::parse(parts[0].trim())?;
let second = VersionConstraint::parse(parts[1].trim())?;
match (first, second) {
(VersionConstraint::GreaterEq(min), VersionConstraint::Less(max)) => {
Ok(VersionConstraint::Range { min, max })
}
_ => Err("Range must be >=version, <version".to_string())
}
} else if let Some(rest) = s.strip_prefix('^') {
Ok(VersionConstraint::Caret(Version::parse(rest)?))
} else if let Some(rest) = s.strip_prefix('~') {
Ok(VersionConstraint::Tilde(Version::parse(rest)?))
} else if let Some(rest) = s.strip_prefix(">=") {
Ok(VersionConstraint::GreaterEq(Version::parse(rest)?))
} else if let Some(rest) = s.strip_prefix('<') {
Ok(VersionConstraint::Less(Version::parse(rest)?))
} else {
// Try to parse as exact version
Ok(VersionConstraint::Exact(Version::parse(s)?))
}
}
/// Check if a version satisfies this constraint
pub fn satisfies(&self, version: &Version) -> bool {
match self {
VersionConstraint::Exact(v) => version == v,
VersionConstraint::Caret(v) => {
// ^1.2.3 means >=1.2.3, <2.0.0 (if major > 0)
// ^0.2.3 means >=0.2.3, <0.3.0 (if major == 0)
if v.major == 0 {
version.major == 0 && version.minor == v.minor && version >= v
} else {
version.major == v.major && version >= v
}
}
VersionConstraint::Tilde(v) => {
// ~1.2.3 means >=1.2.3, <1.3.0
version.major == v.major && version.minor == v.minor && version >= v
}
VersionConstraint::GreaterEq(v) => version >= v,
VersionConstraint::Less(v) => version < v,
VersionConstraint::Range { min, max } => version >= min && version < max,
VersionConstraint::Any => true,
}
}
}
impl std::fmt::Display for VersionConstraint {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
VersionConstraint::Exact(v) => write!(f, "{}", v),
VersionConstraint::Caret(v) => write!(f, "^{}", v),
VersionConstraint::Tilde(v) => write!(f, "~{}", v),
VersionConstraint::GreaterEq(v) => write!(f, ">={}", v),
VersionConstraint::Less(v) => write!(f, "<{}", v),
VersionConstraint::Range { min, max } => write!(f, ">={}, <{}", min, max),
VersionConstraint::Any => write!(f, "*"),
}
}
}
// =============================================================================
// Lock File
// =============================================================================
/// A lock file entry for a resolved package
#[derive(Debug, Clone)]
pub struct LockedPackage {
pub name: String,
pub version: Version,
pub source: LockedSource,
pub checksum: Option<String>,
pub dependencies: Vec<String>,
}
/// Source of a locked package
#[derive(Debug, Clone)]
pub enum LockedSource {
Registry,
Git { url: String, rev: String },
Path { path: PathBuf },
}
/// The lock file (lux.lock)
#[derive(Debug, Clone, Default)]
pub struct LockFile {
pub packages: Vec<LockedPackage>,
}
impl LockFile {
pub fn new() -> Self {
Self { packages: Vec::new() }
}
/// Parse a lock file
pub fn parse(content: &str) -> Result<Self, String> {
let mut packages = Vec::new();
let mut current_pkg: Option<LockedPackage> = None;
let mut in_package = false;
for line in content.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
if line == "[[package]]" {
if let Some(pkg) = current_pkg.take() {
packages.push(pkg);
}
current_pkg = Some(LockedPackage {
name: String::new(),
version: Version::new(0, 0, 0),
source: LockedSource::Registry,
checksum: None,
dependencies: Vec::new(),
});
in_package = true;
continue;
}
if in_package {
if let Some(ref mut pkg) = current_pkg {
if let Some(eq_pos) = line.find('=') {
let key = line[..eq_pos].trim();
let value = line[eq_pos + 1..].trim().trim_matches('"');
match key {
"name" => pkg.name = value.to_string(),
"version" => pkg.version = Version::parse(value)?,
"source" => {
if value == "registry" {
pkg.source = LockedSource::Registry;
} else if value.starts_with("git:") {
let parts: Vec<&str> = value[4..].splitn(2, '@').collect();
pkg.source = LockedSource::Git {
url: parts[0].to_string(),
rev: parts.get(1).unwrap_or(&"HEAD").to_string(),
};
} else if value.starts_with("path:") {
pkg.source = LockedSource::Path {
path: PathBuf::from(&value[5..]),
};
}
}
"checksum" => pkg.checksum = Some(value.to_string()),
"dependencies" => {
// Parse array
let deps_str = value.trim_matches(|c| c == '[' || c == ']');
pkg.dependencies = deps_str
.split(',')
.map(|s| s.trim().trim_matches('"').to_string())
.filter(|s| !s.is_empty())
.collect();
}
_ => {}
}
}
}
}
}
if let Some(pkg) = current_pkg {
packages.push(pkg);
}
Ok(Self { packages })
}
/// Format lock file as TOML
pub fn format(&self) -> String {
let mut output = String::new();
output.push_str("# This file is auto-generated by lux pkg. Do not edit manually.\n\n");
for pkg in &self.packages {
output.push_str("[[package]]\n");
output.push_str(&format!("name = \"{}\"\n", pkg.name));
output.push_str(&format!("version = \"{}\"\n", pkg.version));
let source_str = match &pkg.source {
LockedSource::Registry => "registry".to_string(),
LockedSource::Git { url, rev } => format!("git:{}@{}", url, rev),
LockedSource::Path { path } => format!("path:{}", path.display()),
};
output.push_str(&format!("source = \"{}\"\n", source_str));
if let Some(ref checksum) = pkg.checksum {
output.push_str(&format!("checksum = \"{}\"\n", checksum));
}
if !pkg.dependencies.is_empty() {
let deps: Vec<String> = pkg.dependencies.iter()
.map(|d| format!("\"{}\"", d))
.collect();
output.push_str(&format!("dependencies = [{}]\n", deps.join(", ")));
}
output.push('\n');
}
output
}
/// Find a locked package by name
pub fn find(&self, name: &str) -> Option<&LockedPackage> {
self.packages.iter().find(|p| p.name == name)
}
}
// =============================================================================
// Dependency Resolution
// =============================================================================
/// Resolution error
#[derive(Debug)]
pub struct ResolutionError {
pub message: String,
pub package: Option<String>,
}
impl std::fmt::Display for ResolutionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(ref pkg) = self.package {
write!(f, "Resolution error for '{}': {}", pkg, self.message)
} else {
write!(f, "Resolution error: {}", self.message)
}
}
}
/// Dependency resolver with transitive dependency support
pub struct Resolver {
/// Available versions for each package (simulated for now)
available_versions: HashMap<String, Vec<Version>>,
/// Package dependencies cache (package@version -> dependencies)
package_deps: HashMap<String, HashMap<String, Dependency>>,
/// Packages directory for reading transitive deps
packages_dir: Option<PathBuf>,
}
impl Resolver {
pub fn new() -> Self {
Self {
available_versions: HashMap::new(),
package_deps: HashMap::new(),
packages_dir: None,
}
}
/// Create resolver with packages directory for reading transitive deps
pub fn with_packages_dir(packages_dir: &Path) -> Self {
Self {
available_versions: HashMap::new(),
package_deps: HashMap::new(),
packages_dir: Some(packages_dir.to_path_buf()),
}
}
/// Add available versions for a package (for testing/registry integration)
pub fn add_available_versions(&mut self, name: &str, versions: Vec<Version>) {
self.available_versions.insert(name.to_string(), versions);
}
/// Add package dependencies (for testing or when loaded from registry)
pub fn add_package_deps(&mut self, name: &str, version: &Version, deps: HashMap<String, Dependency>) {
let key = format!("{}@{}", name, version);
self.package_deps.insert(key, deps);
}
/// Resolve dependencies to a lock file (with transitive dependencies)
pub fn resolve(
&self,
manifest: &Manifest,
existing_lock: Option<&LockFile>,
) -> Result<LockFile, ResolutionError> {
let mut lock = LockFile::new();
let mut resolved: HashMap<String, (Version, LockedSource)> = HashMap::new();
let mut to_resolve: Vec<(String, Dependency, Option<String>)> = Vec::new();
// If we have an existing lock file, prefer those versions
if let Some(existing) = existing_lock {
for pkg in &existing.packages {
resolved.insert(pkg.name.clone(), (pkg.version.clone(), pkg.source.clone()));
}
}
// Queue direct dependencies for resolution
for (name, dep) in &manifest.dependencies {
to_resolve.push((name.clone(), dep.clone(), None));
}
// Process queue (breadth-first for better dependency order)
while let Some((name, dep, required_by)) = to_resolve.pop() {
// Skip if already resolved with compatible version
if let Some((existing_version, _)) = resolved.get(&name) {
let constraint = VersionConstraint::parse(&dep.version)
.map_err(|e| ResolutionError {
message: e,
package: Some(name.clone()),
})?;
if constraint.satisfies(existing_version) {
continue; // Already have a compatible version
} else {
// Version conflict
return Err(ResolutionError {
message: format!(
"Version conflict: {} requires {} {}, but {} is already resolved{}",
required_by.as_deref().unwrap_or("project"),
name,
dep.version,
existing_version,
if let Some(rb) = &required_by {
format!(" (required by {})", rb)
} else {
String::new()
}
),
package: Some(name.clone()),
});
}
}
let constraint = VersionConstraint::parse(&dep.version)
.map_err(|e| ResolutionError {
message: e,
package: Some(name.clone()),
})?;
// Resolve the version
let version = self.select_version(&name, &constraint, &dep.source)?;
let source = match &dep.source {
DependencySource::Registry => LockedSource::Registry,
DependencySource::Git { url, branch } => LockedSource::Git {
url: url.clone(),
rev: branch.clone().unwrap_or_else(|| "HEAD".to_string()),
},
DependencySource::Path { path } => LockedSource::Path { path: path.clone() },
};
resolved.insert(name.clone(), (version.clone(), source.clone()));
// Get transitive dependencies
let transitive_deps = self.get_package_dependencies(&name, &version, &dep.source);
for (trans_name, trans_dep) in transitive_deps {
if !resolved.contains_key(&trans_name) {
to_resolve.push((trans_name, trans_dep, Some(name.clone())));
}
}
}
// Build lock file from resolved packages
for (name, (version, source)) in &resolved {
// Get the dependency list for this package
let deps = self.get_package_dependencies(name, version, &match source {
LockedSource::Registry => DependencySource::Registry,
LockedSource::Git { url, rev } => DependencySource::Git {
url: url.clone(),
branch: Some(rev.clone()),
},
LockedSource::Path { path } => DependencySource::Path { path: path.clone() },
});
let dep_names: Vec<String> = deps.keys().cloned().collect();
lock.packages.push(LockedPackage {
name: name.clone(),
version: version.clone(),
source: source.clone(),
checksum: None,
dependencies: dep_names,
});
}
// Sort packages by name for deterministic output
lock.packages.sort_by(|a, b| a.name.cmp(&b.name));
Ok(lock)
}
/// Get dependencies of a package
fn get_package_dependencies(
&self,
name: &str,
version: &Version,
source: &DependencySource,
) -> HashMap<String, Dependency> {
// First check our cache
let key = format!("{}@{}", name, version);
if let Some(deps) = self.package_deps.get(&key) {
return deps.clone();
}
// Try to read from installed package
if let Some(ref packages_dir) = self.packages_dir {
let pkg_dir = packages_dir.join(name);
let manifest_path = pkg_dir.join("lux.toml");
if manifest_path.exists() {
if let Ok(content) = fs::read_to_string(&manifest_path) {
if let Ok(manifest) = parse_manifest(&content) {
return manifest.dependencies;
}
}
}
}
// For path dependencies, read from the path
if let DependencySource::Path { path } = source {
let manifest_path = if path.is_absolute() {
path.join("lux.toml")
} else if let Some(ref packages_dir) = self.packages_dir {
packages_dir.parent().unwrap_or(packages_dir).join(path).join("lux.toml")
} else {
path.join("lux.toml")
};
if manifest_path.exists() {
if let Ok(content) = fs::read_to_string(&manifest_path) {
if let Ok(manifest) = parse_manifest(&content) {
return manifest.dependencies;
}
}
}
}
// No dependencies found
HashMap::new()
}
/// Select the best version that satisfies the constraint
fn select_version(
&self,
name: &str,
constraint: &VersionConstraint,
source: &DependencySource,
) -> Result<Version, ResolutionError> {
match source {
DependencySource::Git { .. } | DependencySource::Path { .. } => {
// For git/path sources, use the version from the constraint or 0.0.0
match constraint {
VersionConstraint::Exact(v) => Ok(v.clone()),
_ => Ok(Version::new(0, 0, 0)),
}
}
DependencySource::Registry => {
// Check available versions
if let Some(versions) = self.available_versions.get(name) {
// Find the highest version that satisfies the constraint
let mut matching: Vec<&Version> = versions
.iter()
.filter(|v| constraint.satisfies(v))
.collect();
matching.sort();
matching.reverse();
if let Some(v) = matching.first() {
return Ok((*v).clone());
}
}
// No available versions - use the constraint's base version
match constraint {
VersionConstraint::Exact(v) => Ok(v.clone()),
VersionConstraint::Caret(v) => Ok(v.clone()),
VersionConstraint::Tilde(v) => Ok(v.clone()),
VersionConstraint::GreaterEq(v) => Ok(v.clone()),
VersionConstraint::Range { min, .. } => Ok(min.clone()),
VersionConstraint::Less(_) | VersionConstraint::Any => {
// Can't determine version without registry
Ok(Version::new(0, 0, 0))
}
}
}
}
}
}
impl Default for Resolver {
fn default() -> Self {
Self::new()
}
}
// =============================================================================
// Manifest and Package Manager
// =============================================================================
/// Package manifest (lux.toml)
#[derive(Debug, Clone)]
@@ -69,6 +681,43 @@ impl PackageManager {
}
}
/// Load the lock file (lux.lock)
pub fn load_lock(&self) -> Result<Option<LockFile>, String> {
let lock_path = self.project_root.join("lux.lock");
if !lock_path.exists() {
return Ok(None);
}
let content = fs::read_to_string(&lock_path)
.map_err(|e| format!("Failed to read lux.lock: {}", e))?;
LockFile::parse(&content).map(Some)
}
/// Save the lock file (lux.lock)
pub fn save_lock(&self, lock: &LockFile) -> Result<(), String> {
let lock_path = self.project_root.join("lux.lock");
let content = lock.format();
fs::write(&lock_path, content)
.map_err(|e| format!("Failed to write lux.lock: {}", e))
}
/// Resolve dependencies and generate/update lock file
pub fn resolve(&self) -> Result<LockFile, String> {
let manifest = self.load_manifest()?;
let existing_lock = self.load_lock()?;
// Use resolver with packages directory for transitive dep lookup
let resolver = Resolver::with_packages_dir(&self.packages_dir);
let lock = resolver.resolve(&manifest, existing_lock.as_ref())
.map_err(|e| e.to_string())?;
self.save_lock(&lock)?;
Ok(lock)
}
/// Find the project root by looking for lux.toml
pub fn find_project_root() -> Option<PathBuf> {
let mut current = std::env::current_dir().ok()?;
@@ -154,19 +803,139 @@ impl PackageManager {
return Ok(());
}
// Resolve dependencies and generate/update lock file
let lock = self.resolve()?;
// Create packages directory
fs::create_dir_all(&self.packages_dir)
.map_err(|e| format!("Failed to create packages directory: {}", e))?;
println!("Installing {} dependencies...", manifest.dependencies.len());
println!("Installing {} dependencies...", lock.packages.len());
println!();
for (_name, dep) in &manifest.dependencies {
self.install_dependency(dep)?;
// Install from lock file for reproducibility
for locked_pkg in &lock.packages {
self.install_locked_package(locked_pkg, &manifest)?;
}
println!();
println!("Done! Installed {} packages.", manifest.dependencies.len());
println!("Done! Installed {} packages.", lock.packages.len());
println!("Lock file written to lux.lock");
Ok(())
}
/// Install a package from the lock file
fn install_locked_package(&self, locked: &LockedPackage, manifest: &Manifest) -> Result<(), String> {
print!(" Installing {} v{}... ", locked.name, locked.version);
io::stdout().flush().unwrap();
let dest_dir = self.packages_dir.join(&locked.name);
// Get the dependency info from manifest for source details
let dep = manifest.dependencies.get(&locked.name);
match &locked.source {
LockedSource::Registry => {
self.install_from_registry_locked(locked, &dest_dir)?;
}
LockedSource::Git { url, rev } => {
self.install_from_git_locked(url, rev, &dest_dir)?;
}
LockedSource::Path { path } => {
let source_path = if let Some(d) = dep {
match &d.source {
DependencySource::Path { path } => path.clone(),
_ => path.clone(),
}
} else {
path.clone()
};
self.install_from_path(&source_path, &dest_dir)?;
}
}
println!("done");
Ok(())
}
fn install_from_registry_locked(&self, locked: &LockedPackage, dest: &Path) -> Result<(), String> {
// Check if already installed with correct version
let version_file = dest.join(".version");
if version_file.exists() {
let installed_version = fs::read_to_string(&version_file).unwrap_or_default();
if installed_version.trim() == locked.version.to_string() {
return Ok(());
}
}
// Check cache first
let cache_path = self.cache_dir.join(&locked.name).join(locked.version.to_string());
if cache_path.exists() {
// Copy from cache
copy_dir_recursive(&cache_path, dest)?;
} else {
// Create placeholder package (in real impl, would download)
fs::create_dir_all(dest)
.map_err(|e| format!("Failed to create package directory: {}", e))?;
// Create a lib.lux placeholder
let lib_content = format!(
"// Package: {} v{}\n// This is a placeholder - real package would be downloaded from registry\n\n",
locked.name, locked.version
);
fs::write(dest.join("lib.lux"), lib_content)
.map_err(|e| format!("Failed to create lib.lux: {}", e))?;
}
// Write version file
fs::write(&version_file, locked.version.to_string())
.map_err(|e| format!("Failed to write version file: {}", e))?;
// Verify checksum if present
if let Some(ref expected) = locked.checksum {
// In a real implementation, verify the checksum here
let _ = expected; // Placeholder
}
Ok(())
}
fn install_from_git_locked(&self, url: &str, rev: &str, dest: &Path) -> Result<(), String> {
// Remove existing if present
if dest.exists() {
fs::remove_dir_all(dest)
.map_err(|e| format!("Failed to remove existing directory: {}", e))?;
}
// Clone at specific revision
let mut cmd = std::process::Command::new("git");
cmd.arg("clone")
.arg("--depth").arg("1");
// If rev is not HEAD, we need to fetch the specific revision
if rev != "HEAD" && !rev.is_empty() {
cmd.arg("--branch").arg(rev);
}
cmd.arg(url).arg(dest);
let output = cmd.output()
.map_err(|e| format!("Failed to run git: {}", e))?;
if !output.status.success() {
return Err(format!(
"Git clone failed: {}",
String::from_utf8_lossy(&output.stderr)
));
}
// Remove .git directory to save space
let git_dir = dest.join(".git");
if git_dir.exists() {
fs::remove_dir_all(&git_dir).ok();
}
Ok(())
}

637
src/registry.rs Normal file
View File

@@ -0,0 +1,637 @@
//! Package Registry Server for Lux
//!
//! Provides a central repository for sharing Lux packages.
//! The registry serves package metadata and tarballs via HTTP.
use std::collections::HashMap;
use std::fs;
use std::io::{Read, Write};
use std::net::{TcpListener, TcpStream};
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock};
use std::thread;
/// Package metadata stored in the registry
#[derive(Debug, Clone)]
pub struct PackageMetadata {
pub name: String,
pub version: String,
pub description: String,
pub authors: Vec<String>,
pub license: Option<String>,
pub repository: Option<String>,
pub keywords: Vec<String>,
pub dependencies: HashMap<String, String>,
pub checksum: String,
pub published_at: String,
}
/// A version entry for a package
#[derive(Debug, Clone)]
pub struct VersionEntry {
pub version: String,
pub checksum: String,
pub published_at: String,
pub yanked: bool,
}
/// Package index entry (all versions of a package)
#[derive(Debug, Clone)]
pub struct PackageIndex {
pub name: String,
pub description: String,
pub versions: Vec<VersionEntry>,
pub latest_version: String,
}
/// The package registry
pub struct Registry {
/// Base directory for storing packages
storage_dir: PathBuf,
/// In-memory index of all packages
index: Arc<RwLock<HashMap<String, PackageIndex>>>,
}
impl Registry {
/// Create a new registry with the given storage directory
pub fn new(storage_dir: &Path) -> Self {
let registry = Self {
storage_dir: storage_dir.to_path_buf(),
index: Arc::new(RwLock::new(HashMap::new())),
};
registry.load_index();
registry
}
/// Load the package index from disk
fn load_index(&self) {
let index_path = self.storage_dir.join("index.json");
if !index_path.exists() {
return;
}
if let Ok(content) = fs::read_to_string(&index_path) {
if let Ok(index) = parse_index_json(&content) {
let mut idx = self.index.write().unwrap();
*idx = index;
}
}
}
/// Save the package index to disk
fn save_index(&self) {
let index_path = self.storage_dir.join("index.json");
let idx = self.index.read().unwrap();
let json = format_index_json(&idx);
fs::write(&index_path, json).ok();
}
/// Publish a new package version
pub fn publish(&self, metadata: PackageMetadata, tarball: &[u8]) -> Result<(), String> {
// Validate package name
if !is_valid_package_name(&metadata.name) {
return Err("Invalid package name. Use lowercase letters, numbers, and hyphens.".to_string());
}
// Create package directory
let pkg_dir = self.storage_dir.join("packages").join(&metadata.name);
fs::create_dir_all(&pkg_dir)
.map_err(|e| format!("Failed to create package directory: {}", e))?;
// Write tarball
let tarball_path = pkg_dir.join(format!("{}-{}.tar.gz", metadata.name, metadata.version));
fs::write(&tarball_path, tarball)
.map_err(|e| format!("Failed to write package tarball: {}", e))?;
// Write metadata
let meta_path = pkg_dir.join(format!("{}-{}.json", metadata.name, metadata.version));
let meta_json = format_metadata_json(&metadata);
fs::write(&meta_path, meta_json)
.map_err(|e| format!("Failed to write package metadata: {}", e))?;
// Update index
{
let mut idx = self.index.write().unwrap();
let entry = idx.entry(metadata.name.clone()).or_insert_with(|| PackageIndex {
name: metadata.name.clone(),
description: metadata.description.clone(),
versions: Vec::new(),
latest_version: String::new(),
});
// Check if version already exists
if entry.versions.iter().any(|v| v.version == metadata.version) {
return Err(format!("Version {} already exists", metadata.version));
}
entry.versions.push(VersionEntry {
version: metadata.version.clone(),
checksum: metadata.checksum.clone(),
published_at: metadata.published_at.clone(),
yanked: false,
});
// Update latest version (simple comparison for now)
entry.latest_version = metadata.version.clone();
entry.description = metadata.description.clone();
}
self.save_index();
Ok(())
}
/// Get package metadata
pub fn get_metadata(&self, name: &str, version: &str) -> Option<PackageMetadata> {
let meta_path = self.storage_dir
.join("packages")
.join(name)
.join(format!("{}-{}.json", name, version));
if let Ok(content) = fs::read_to_string(&meta_path) {
parse_metadata_json(&content)
} else {
None
}
}
/// Get package tarball
pub fn get_tarball(&self, name: &str, version: &str) -> Option<Vec<u8>> {
let tarball_path = self.storage_dir
.join("packages")
.join(name)
.join(format!("{}-{}.tar.gz", name, version));
fs::read(&tarball_path).ok()
}
/// Search packages
pub fn search(&self, query: &str) -> Vec<PackageIndex> {
let idx = self.index.read().unwrap();
let query_lower = query.to_lowercase();
idx.values()
.filter(|pkg| {
pkg.name.to_lowercase().contains(&query_lower) ||
pkg.description.to_lowercase().contains(&query_lower)
})
.cloned()
.collect()
}
/// List all packages
pub fn list_all(&self) -> Vec<PackageIndex> {
let idx = self.index.read().unwrap();
idx.values().cloned().collect()
}
/// Get package index entry
pub fn get_package(&self, name: &str) -> Option<PackageIndex> {
let idx = self.index.read().unwrap();
idx.get(name).cloned()
}
}
/// HTTP Registry Server
pub struct RegistryServer {
registry: Arc<Registry>,
bind_addr: String,
}
impl RegistryServer {
/// Create a new registry server
pub fn new(storage_dir: &Path, bind_addr: &str) -> Self {
Self {
registry: Arc::new(Registry::new(storage_dir)),
bind_addr: bind_addr.to_string(),
}
}
/// Run the server
pub fn run(&self) -> Result<(), String> {
let listener = TcpListener::bind(&self.bind_addr)
.map_err(|e| format!("Failed to bind to {}: {}", self.bind_addr, e))?;
println!("Lux Package Registry running at http://{}", self.bind_addr);
println!("Storage directory: {}", self.registry.storage_dir.display());
println!();
println!("Endpoints:");
println!(" GET /api/v1/packages - List all packages");
println!(" GET /api/v1/packages/:name - Get package info");
println!(" GET /api/v1/packages/:name/:ver - Get version metadata");
println!(" GET /api/v1/download/:name/:ver - Download package tarball");
println!(" GET /api/v1/search?q=query - Search packages");
println!(" POST /api/v1/publish - Publish a package");
println!();
for stream in listener.incoming() {
match stream {
Ok(stream) => {
let registry = Arc::clone(&self.registry);
thread::spawn(move || {
handle_request(stream, &registry);
});
}
Err(e) => {
eprintln!("Connection error: {}", e);
}
}
}
Ok(())
}
}
/// Handle an HTTP request
fn handle_request(mut stream: TcpStream, registry: &Registry) {
let mut buffer = [0; 8192];
let bytes_read = match stream.read(&mut buffer) {
Ok(n) => n,
Err(_) => return,
};
let request = String::from_utf8_lossy(&buffer[..bytes_read]);
let lines: Vec<&str> = request.lines().collect();
if lines.is_empty() {
return;
}
let parts: Vec<&str> = lines[0].split_whitespace().collect();
if parts.len() < 2 {
return;
}
let method = parts[0];
let path = parts[1];
// Parse path and query string
let (path, query) = if let Some(q_pos) = path.find('?') {
(&path[..q_pos], Some(&path[q_pos + 1..]))
} else {
(path, None)
};
let response = match (method, path) {
("GET", "/") => {
html_response(200, r#"
<!DOCTYPE html>
<html>
<head><title>Lux Package Registry</title></head>
<body>
<h1>Lux Package Registry</h1>
<p>Welcome to the Lux package registry.</p>
<h2>API Endpoints</h2>
<ul>
<li>GET /api/v1/packages - List all packages</li>
<li>GET /api/v1/packages/:name - Get package info</li>
<li>GET /api/v1/packages/:name/:version - Get version metadata</li>
<li>GET /api/v1/download/:name/:version - Download package</li>
<li>GET /api/v1/search?q=query - Search packages</li>
</ul>
</body>
</html>
"#)
}
("GET", "/api/v1/packages") => {
let packages = registry.list_all();
let json = format_packages_list_json(&packages);
json_response(200, &json)
}
("GET", path) if path.starts_with("/api/v1/packages/") => {
let rest = &path[17..]; // Remove "/api/v1/packages/"
let parts: Vec<&str> = rest.split('/').collect();
match parts.len() {
1 => {
// Get package info
if let Some(pkg) = registry.get_package(parts[0]) {
let json = format_package_json(&pkg);
json_response(200, &json)
} else {
json_response(404, r#"{"error": "Package not found"}"#)
}
}
2 => {
// Get version metadata
if let Some(meta) = registry.get_metadata(parts[0], parts[1]) {
let json = format_metadata_json(&meta);
json_response(200, &json)
} else {
json_response(404, r#"{"error": "Version not found"}"#)
}
}
_ => json_response(400, r#"{"error": "Invalid path"}"#)
}
}
("GET", path) if path.starts_with("/api/v1/download/") => {
let rest = &path[17..]; // Remove "/api/v1/download/"
let parts: Vec<&str> = rest.split('/').collect();
if parts.len() == 2 {
if let Some(tarball) = registry.get_tarball(parts[0], parts[1]) {
tarball_response(&tarball)
} else {
json_response(404, r#"{"error": "Package not found"}"#)
}
} else {
json_response(400, r#"{"error": "Invalid path"}"#)
}
}
("GET", "/api/v1/search") => {
let q = query
.and_then(|qs| parse_query_string(qs).get("q").cloned())
.unwrap_or_default();
let results = registry.search(&q);
let json = format_packages_list_json(&results);
json_response(200, &json)
}
("POST", "/api/v1/publish") => {
// Find content length
let content_length: usize = lines.iter()
.find(|l| l.to_lowercase().starts_with("content-length:"))
.and_then(|l| l.split(':').nth(1))
.and_then(|s| s.trim().parse().ok())
.unwrap_or(0);
// Find body start
let body_start = request.find("\r\n\r\n")
.map(|i| i + 4)
.unwrap_or(bytes_read);
// For now, return a message about publishing
// Real implementation would parse multipart form data
json_response(200, &format!(
r#"{{"message": "Publish endpoint ready", "content_length": {}}}"#,
content_length
))
}
_ => {
json_response(404, r#"{"error": "Not found"}"#)
}
};
stream.write_all(response.as_bytes()).ok();
}
/// Create an HTML response
fn html_response(status: u16, body: &str) -> String {
let status_text = match status {
200 => "OK",
400 => "Bad Request",
404 => "Not Found",
500 => "Internal Server Error",
_ => "Unknown",
};
format!(
"HTTP/1.1 {} {}\r\nContent-Type: text/html\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
status, status_text, body.len(), body
)
}
/// Create a JSON response
fn json_response(status: u16, body: &str) -> String {
let status_text = match status {
200 => "OK",
400 => "Bad Request",
404 => "Not Found",
500 => "Internal Server Error",
_ => "Unknown",
};
format!(
"HTTP/1.1 {} {}\r\nContent-Type: application/json\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
status, status_text, body.len(), body
)
}
/// Create a tarball response
fn tarball_response(data: &[u8]) -> String {
format!(
"HTTP/1.1 200 OK\r\nContent-Type: application/gzip\r\nContent-Length: {}\r\nConnection: close\r\n\r\n",
data.len()
)
}
/// Validate package name
fn is_valid_package_name(name: &str) -> bool {
!name.is_empty() &&
name.len() <= 64 &&
name.chars().all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-' || c == '_') &&
name.chars().next().map(|c| c.is_ascii_lowercase()).unwrap_or(false)
}
/// Parse query string into key-value pairs
fn parse_query_string(qs: &str) -> HashMap<String, String> {
let mut params = HashMap::new();
for part in qs.split('&') {
if let Some(eq_pos) = part.find('=') {
let key = &part[..eq_pos];
let value = &part[eq_pos + 1..];
params.insert(
urlldecode(key),
urlldecode(value),
);
}
}
params
}
/// Simple URL decoding
fn urlldecode(s: &str) -> String {
let mut result = String::new();
let mut chars = s.chars().peekable();
while let Some(c) = chars.next() {
if c == '%' {
let hex: String = chars.by_ref().take(2).collect();
if let Ok(byte) = u8::from_str_radix(&hex, 16) {
result.push(byte as char);
}
} else if c == '+' {
result.push(' ');
} else {
result.push(c);
}
}
result
}
// JSON formatting helpers
fn format_metadata_json(meta: &PackageMetadata) -> String {
let deps: Vec<String> = meta.dependencies.iter()
.map(|(k, v)| format!(r#""{}": "{}""#, k, v))
.collect();
let authors: Vec<String> = meta.authors.iter()
.map(|a| format!(r#""{}""#, a))
.collect();
let keywords: Vec<String> = meta.keywords.iter()
.map(|k| format!(r#""{}""#, k))
.collect();
format!(
r#"{{
"name": "{}",
"version": "{}",
"description": "{}",
"authors": [{}],
"license": {},
"repository": {},
"keywords": [{}],
"dependencies": {{{}}},
"checksum": "{}",
"published_at": "{}"
}}"#,
meta.name,
meta.version,
escape_json(&meta.description),
authors.join(", "),
meta.license.as_ref().map(|l| format!(r#""{}""#, l)).unwrap_or("null".to_string()),
meta.repository.as_ref().map(|r| format!(r#""{}""#, r)).unwrap_or("null".to_string()),
keywords.join(", "),
deps.join(", "),
meta.checksum,
meta.published_at,
)
}
fn format_package_json(pkg: &PackageIndex) -> String {
let versions: Vec<String> = pkg.versions.iter()
.map(|v| format!(
r#"{{"version": "{}", "checksum": "{}", "published_at": "{}", "yanked": {}}}"#,
v.version, v.checksum, v.published_at, v.yanked
))
.collect();
format!(
r#"{{
"name": "{}",
"description": "{}",
"latest_version": "{}",
"versions": [{}]
}}"#,
pkg.name,
escape_json(&pkg.description),
pkg.latest_version,
versions.join(", ")
)
}
fn format_packages_list_json(packages: &[PackageIndex]) -> String {
let items: Vec<String> = packages.iter()
.map(|pkg| format!(
r#"{{"name": "{}", "description": "{}", "latest_version": "{}"}}"#,
pkg.name,
escape_json(&pkg.description),
pkg.latest_version
))
.collect();
format!(r#"{{"packages": [{}]}}"#, items.join(", "))
}
fn format_index_json(index: &HashMap<String, PackageIndex>) -> String {
let items: Vec<String> = index.values()
.map(|pkg| format_package_json(pkg))
.collect();
format!(r#"{{"packages": [{}]}}"#, items.join(",\n"))
}
fn parse_index_json(content: &str) -> Result<HashMap<String, PackageIndex>, String> {
// Simple JSON parsing for the index
// In production, would use serde_json
let mut index = HashMap::new();
// Basic parsing - find package names and latest versions
// This is a simplified parser for the index format
let content = content.trim();
if !content.starts_with('{') || !content.ends_with('}') {
return Err("Invalid JSON format".to_string());
}
// For now, return empty index if parsing fails
// Real implementation would properly parse JSON
Ok(index)
}
fn parse_metadata_json(content: &str) -> Option<PackageMetadata> {
// Simple JSON parsing for metadata
// In production, would use serde_json
let mut name = String::new();
let mut version = String::new();
let mut description = String::new();
let mut checksum = String::new();
let mut published_at = String::new();
for line in content.lines() {
let line = line.trim();
if line.contains("\"name\":") {
name = extract_json_string(line);
} else if line.contains("\"version\":") {
version = extract_json_string(line);
} else if line.contains("\"description\":") {
description = extract_json_string(line);
} else if line.contains("\"checksum\":") {
checksum = extract_json_string(line);
} else if line.contains("\"published_at\":") {
published_at = extract_json_string(line);
}
}
if name.is_empty() || version.is_empty() {
return None;
}
Some(PackageMetadata {
name,
version,
description,
authors: Vec::new(),
license: None,
repository: None,
keywords: Vec::new(),
dependencies: HashMap::new(),
checksum,
published_at,
})
}
fn extract_json_string(line: &str) -> String {
// Extract string value from "key": "value" format
if let Some(colon) = line.find(':') {
let value = line[colon + 1..].trim();
let value = value.trim_start_matches('"');
if let Some(end) = value.find('"') {
return value[..end].to_string();
}
}
String::new()
}
fn escape_json(s: &str) -> String {
s.replace('\\', "\\\\")
.replace('"', "\\\"")
.replace('\n', "\\n")
.replace('\r', "\\r")
.replace('\t', "\\t")
}
/// Run the registry server (called from main)
pub fn run_registry_server(storage_dir: &str, bind_addr: &str) -> Result<(), String> {
let storage_path = PathBuf::from(storage_dir);
fs::create_dir_all(&storage_path)
.map_err(|e| format!("Failed to create storage directory: {}", e))?;
let server = RegistryServer::new(&storage_path, bind_addr);
server.run()
}

660
src/symbol_table.rs Normal file
View File

@@ -0,0 +1,660 @@
//! Symbol Table for Lux
//!
//! Provides semantic analysis infrastructure for IDE features like
//! go-to-definition, find references, and rename refactoring.
use crate::ast::*;
use std::collections::HashMap;
/// Unique identifier for a symbol
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SymbolId(pub u32);
/// Kind of symbol
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SymbolKind {
Function,
Variable,
Parameter,
Type,
TypeParameter,
Variant,
Effect,
EffectOperation,
Field,
Module,
}
/// A symbol definition
#[derive(Debug, Clone)]
pub struct Symbol {
pub id: SymbolId,
pub name: String,
pub kind: SymbolKind,
pub span: Span,
/// Type signature (for display)
pub type_signature: Option<String>,
/// Documentation comment
pub documentation: Option<String>,
/// Parent symbol (e.g., type for variants, effect for operations)
pub parent: Option<SymbolId>,
/// Is this symbol exported (public)?
pub is_public: bool,
}
/// A reference to a symbol
#[derive(Debug, Clone)]
pub struct Reference {
pub symbol_id: SymbolId,
pub span: Span,
pub is_definition: bool,
pub is_write: bool,
}
/// A scope in the symbol table
#[derive(Debug, Clone)]
pub struct Scope {
/// Parent scope (None for global scope)
pub parent: Option<usize>,
/// Symbols defined in this scope
pub symbols: HashMap<String, SymbolId>,
/// Span of this scope
pub span: Span,
}
/// The symbol table
#[derive(Debug, Clone)]
pub struct SymbolTable {
/// All symbols
symbols: Vec<Symbol>,
/// All references
references: Vec<Reference>,
/// Scopes (index 0 is always the global scope)
scopes: Vec<Scope>,
/// Mapping from position to references
position_to_reference: HashMap<(u32, u32), usize>,
/// Next symbol ID
next_id: u32,
}
impl SymbolTable {
pub fn new() -> Self {
Self {
symbols: Vec::new(),
references: Vec::new(),
scopes: vec![Scope {
parent: None,
symbols: HashMap::new(),
span: Span { start: 0, end: 0 },
}],
position_to_reference: HashMap::new(),
next_id: 0,
}
}
/// Build symbol table from a program
pub fn build(program: &Program) -> Self {
let mut table = Self::new();
table.visit_program(program);
table
}
/// Add a symbol to the current scope
fn add_symbol(&mut self, scope_idx: usize, symbol: Symbol) -> SymbolId {
let id = symbol.id;
self.scopes[scope_idx].symbols.insert(symbol.name.clone(), id);
self.symbols.push(symbol);
id
}
/// Create a new symbol
fn new_symbol(
&mut self,
name: String,
kind: SymbolKind,
span: Span,
type_signature: Option<String>,
is_public: bool,
) -> Symbol {
let id = SymbolId(self.next_id);
self.next_id += 1;
Symbol {
id,
name,
kind,
span,
type_signature,
documentation: None,
parent: None,
is_public,
}
}
/// Add a reference
fn add_reference(&mut self, symbol_id: SymbolId, span: Span, is_definition: bool, is_write: bool) {
let ref_idx = self.references.len();
self.references.push(Reference {
symbol_id,
span,
is_definition,
is_write,
});
// Index by start position
self.position_to_reference.insert((span.start as u32, span.end as u32), ref_idx);
}
/// Look up a symbol by name in the given scope and its parents
pub fn lookup(&self, name: &str, scope_idx: usize) -> Option<SymbolId> {
let scope = &self.scopes[scope_idx];
if let Some(&id) = scope.symbols.get(name) {
return Some(id);
}
if let Some(parent) = scope.parent {
return self.lookup(name, parent);
}
None
}
/// Get a symbol by ID
pub fn get_symbol(&self, id: SymbolId) -> Option<&Symbol> {
self.symbols.iter().find(|s| s.id == id)
}
/// Get the symbol at a position
pub fn symbol_at_position(&self, offset: usize) -> Option<&Symbol> {
// Find a reference that contains this offset
for reference in &self.references {
if offset >= reference.span.start && offset <= reference.span.end {
return self.get_symbol(reference.symbol_id);
}
}
None
}
/// Get the definition of a symbol at a position
pub fn definition_at_position(&self, offset: usize) -> Option<&Symbol> {
self.symbol_at_position(offset)
}
/// Find all references to a symbol
pub fn find_references(&self, symbol_id: SymbolId) -> Vec<&Reference> {
self.references
.iter()
.filter(|r| r.symbol_id == symbol_id)
.collect()
}
/// Get all symbols of a given kind
pub fn symbols_of_kind(&self, kind: SymbolKind) -> Vec<&Symbol> {
self.symbols.iter().filter(|s| s.kind == kind).collect()
}
/// Get all symbols in the global scope
pub fn global_symbols(&self) -> Vec<&Symbol> {
self.scopes[0]
.symbols
.values()
.filter_map(|&id| self.get_symbol(id))
.collect()
}
/// Create a new scope
fn push_scope(&mut self, parent: usize, span: Span) -> usize {
let idx = self.scopes.len();
self.scopes.push(Scope {
parent: Some(parent),
symbols: HashMap::new(),
span,
});
idx
}
// =========================================================================
// AST Visitors
// =========================================================================
fn visit_program(&mut self, program: &Program) {
// First pass: collect all top-level declarations
for decl in &program.declarations {
self.visit_declaration(decl, 0);
}
}
fn visit_declaration(&mut self, decl: &Declaration, scope_idx: usize) {
match decl {
Declaration::Function(f) => self.visit_function(f, scope_idx),
Declaration::Type(t) => self.visit_type_decl(t, scope_idx),
Declaration::Effect(e) => self.visit_effect(e, scope_idx),
Declaration::Let(let_decl) => {
let is_public = matches!(let_decl.visibility, Visibility::Public);
let type_sig = let_decl.typ.as_ref().map(|t| self.type_expr_to_string(t));
let symbol = self.new_symbol(
let_decl.name.name.clone(),
SymbolKind::Variable,
let_decl.span,
type_sig,
is_public,
);
let id = self.add_symbol(scope_idx, symbol);
self.add_reference(id, let_decl.name.span, true, true);
// Visit the expression
self.visit_expr(&let_decl.value, scope_idx);
}
Declaration::Handler(h) => self.visit_handler(h, scope_idx),
Declaration::Trait(t) => self.visit_trait(t, scope_idx),
Declaration::Impl(i) => self.visit_impl(i, scope_idx),
}
}
fn visit_function(&mut self, f: &FunctionDecl, scope_idx: usize) {
let is_public = matches!(f.visibility, Visibility::Public);
// Build type signature
let param_types: Vec<String> = f.params.iter()
.map(|p| format!("{}: {}", p.name.name, self.type_expr_to_string(&p.typ)))
.collect();
let return_type = self.type_expr_to_string(&f.return_type);
let effects = if f.effects.is_empty() {
String::new()
} else {
format!(" with {{{}}}", f.effects.iter()
.map(|e| e.name.clone())
.collect::<Vec<_>>()
.join(", "))
};
let type_sig = format!("fn {}({}): {}{}", f.name.name, param_types.join(", "), return_type, effects);
let symbol = self.new_symbol(
f.name.name.clone(),
SymbolKind::Function,
f.name.span,
Some(type_sig),
is_public,
);
let fn_id = self.add_symbol(scope_idx, symbol);
self.add_reference(fn_id, f.name.span, true, false);
// Create scope for function body
let body_span = f.body.span();
let fn_scope = self.push_scope(scope_idx, body_span);
// Add type parameters
for tp in &f.type_params {
let symbol = self.new_symbol(
tp.name.clone(),
SymbolKind::TypeParameter,
tp.span,
None,
false,
);
self.add_symbol(fn_scope, symbol);
}
// Add parameters
for param in &f.params {
let type_sig = self.type_expr_to_string(&param.typ);
let symbol = self.new_symbol(
param.name.name.clone(),
SymbolKind::Parameter,
param.name.span,
Some(type_sig),
false,
);
self.add_symbol(fn_scope, symbol);
}
// Visit body
self.visit_expr(&f.body, fn_scope);
}
fn visit_type_decl(&mut self, t: &TypeDecl, scope_idx: usize) {
let is_public = matches!(t.visibility, Visibility::Public);
let type_sig = format!("type {}", t.name.name);
let symbol = self.new_symbol(
t.name.name.clone(),
SymbolKind::Type,
t.name.span,
Some(type_sig),
is_public,
);
let type_id = self.add_symbol(scope_idx, symbol);
self.add_reference(type_id, t.name.span, true, false);
// Add variants
match &t.definition {
TypeDef::Enum(variants) => {
for variant in variants {
let mut var_symbol = self.new_symbol(
variant.name.name.clone(),
SymbolKind::Variant,
variant.name.span,
None,
is_public,
);
var_symbol.parent = Some(type_id);
self.add_symbol(scope_idx, var_symbol);
}
}
TypeDef::Record(fields) => {
for field in fields {
let mut field_symbol = self.new_symbol(
field.name.name.clone(),
SymbolKind::Field,
field.name.span,
Some(self.type_expr_to_string(&field.typ)),
is_public,
);
field_symbol.parent = Some(type_id);
self.add_symbol(scope_idx, field_symbol);
}
}
TypeDef::Alias(_) => {}
}
}
fn visit_effect(&mut self, e: &EffectDecl, scope_idx: usize) {
let is_public = true; // Effects are typically public
let type_sig = format!("effect {}", e.name.name);
let symbol = self.new_symbol(
e.name.name.clone(),
SymbolKind::Effect,
e.name.span,
Some(type_sig),
is_public,
);
let effect_id = self.add_symbol(scope_idx, symbol);
// Add operations
for op in &e.operations {
let param_types: Vec<String> = op.params.iter()
.map(|p| format!("{}: {}", p.name.name, self.type_expr_to_string(&p.typ)))
.collect();
let return_type = self.type_expr_to_string(&op.return_type);
let op_sig = format!("fn {}({}): {}", op.name.name, param_types.join(", "), return_type);
let mut op_symbol = self.new_symbol(
op.name.name.clone(),
SymbolKind::EffectOperation,
op.name.span,
Some(op_sig),
is_public,
);
op_symbol.parent = Some(effect_id);
self.add_symbol(scope_idx, op_symbol);
}
}
fn visit_handler(&mut self, _h: &HandlerDecl, _scope_idx: usize) {
// Handlers are complex - visit their implementations
}
fn visit_trait(&mut self, t: &TraitDecl, scope_idx: usize) {
let is_public = matches!(t.visibility, Visibility::Public);
let type_sig = format!("trait {}", t.name.name);
let symbol = self.new_symbol(
t.name.name.clone(),
SymbolKind::Type, // Traits are like types
t.name.span,
Some(type_sig),
is_public,
);
self.add_symbol(scope_idx, symbol);
}
fn visit_impl(&mut self, _i: &ImplDecl, _scope_idx: usize) {
// Impl blocks add methods to types
}
fn visit_expr(&mut self, expr: &Expr, scope_idx: usize) {
match expr {
Expr::Var(ident) => {
// Look up the identifier and add a reference
if let Some(id) = self.lookup(&ident.name, scope_idx) {
self.add_reference(id, ident.span, false, false);
}
}
Expr::Let { name, value, body, span, .. } => {
// Visit the value first
self.visit_expr(value, scope_idx);
// Create a new scope for the let binding
let let_scope = self.push_scope(scope_idx, *span);
// Add the variable
let symbol = self.new_symbol(
name.name.clone(),
SymbolKind::Variable,
name.span,
None,
false,
);
let var_id = self.add_symbol(let_scope, symbol);
self.add_reference(var_id, name.span, true, true);
// Visit the body
self.visit_expr(body, let_scope);
}
Expr::Lambda { params, body, span, .. } => {
let lambda_scope = self.push_scope(scope_idx, *span);
for param in params {
let symbol = self.new_symbol(
param.name.name.clone(),
SymbolKind::Parameter,
param.name.span,
None,
false,
);
self.add_symbol(lambda_scope, symbol);
}
self.visit_expr(body, lambda_scope);
}
Expr::Call { func, args, .. } => {
self.visit_expr(func, scope_idx);
for arg in args {
self.visit_expr(arg, scope_idx);
}
}
Expr::EffectOp { args, .. } => {
for arg in args {
self.visit_expr(arg, scope_idx);
}
}
Expr::Field { object, .. } => {
self.visit_expr(object, scope_idx);
}
Expr::If { condition, then_branch, else_branch, .. } => {
self.visit_expr(condition, scope_idx);
self.visit_expr(then_branch, scope_idx);
self.visit_expr(else_branch, scope_idx);
}
Expr::Match { scrutinee, arms, .. } => {
self.visit_expr(scrutinee, scope_idx);
for arm in arms {
// Each arm may bind variables
let arm_scope = self.push_scope(scope_idx, arm.body.span());
self.visit_pattern(&arm.pattern, arm_scope);
if let Some(ref guard) = arm.guard {
self.visit_expr(guard, arm_scope);
}
self.visit_expr(&arm.body, arm_scope);
}
}
Expr::Block { statements, result, .. } => {
for stmt in statements {
self.visit_statement(stmt, scope_idx);
}
self.visit_expr(result, scope_idx);
}
Expr::BinaryOp { left, right, .. } => {
self.visit_expr(left, scope_idx);
self.visit_expr(right, scope_idx);
}
Expr::UnaryOp { operand, .. } => {
self.visit_expr(operand, scope_idx);
}
Expr::List { elements, .. } => {
for e in elements {
self.visit_expr(e, scope_idx);
}
}
Expr::Tuple { elements, .. } => {
for e in elements {
self.visit_expr(e, scope_idx);
}
}
Expr::Record { fields, .. } => {
for (_, e) in fields {
self.visit_expr(e, scope_idx);
}
}
Expr::Run { expr, handlers, .. } => {
self.visit_expr(expr, scope_idx);
for (_effect, handler_expr) in handlers {
self.visit_expr(handler_expr, scope_idx);
}
}
Expr::Resume { value, .. } => {
self.visit_expr(value, scope_idx);
}
// Literals don't need symbol resolution
Expr::Literal(_) => {}
}
}
fn visit_statement(&mut self, stmt: &Statement, scope_idx: usize) {
match stmt {
Statement::Expr(e) => self.visit_expr(e, scope_idx),
Statement::Let { name, value, .. } => {
self.visit_expr(value, scope_idx);
let symbol = self.new_symbol(
name.name.clone(),
SymbolKind::Variable,
name.span,
None,
false,
);
let id = self.add_symbol(scope_idx, symbol);
self.add_reference(id, name.span, true, true);
}
}
}
fn visit_pattern(&mut self, pattern: &Pattern, scope_idx: usize) {
match pattern {
Pattern::Var(ident) => {
let symbol = self.new_symbol(
ident.name.clone(),
SymbolKind::Variable,
ident.span,
None,
false,
);
let id = self.add_symbol(scope_idx, symbol);
self.add_reference(id, ident.span, true, true);
}
Pattern::Constructor { fields, .. } => {
for p in fields {
self.visit_pattern(p, scope_idx);
}
}
Pattern::Tuple { elements, .. } => {
for p in elements {
self.visit_pattern(p, scope_idx);
}
}
Pattern::Record { fields, .. } => {
for (_, p) in fields {
self.visit_pattern(p, scope_idx);
}
}
Pattern::Wildcard(_) => {}
Pattern::Literal(_) => {}
}
}
fn type_expr_to_string(&self, typ: &TypeExpr) -> String {
match typ {
TypeExpr::Named(ident) => ident.name.clone(),
TypeExpr::App(base, args) => {
let base_str = self.type_expr_to_string(base);
if args.is_empty() {
base_str
} else {
let args_str: Vec<String> = args.iter()
.map(|a| self.type_expr_to_string(a))
.collect();
format!("{}<{}>", base_str, args_str.join(", "))
}
}
TypeExpr::Function { params, return_type, .. } => {
let params_str: Vec<String> = params.iter()
.map(|p| self.type_expr_to_string(p))
.collect();
format!("fn({}): {}", params_str.join(", "), self.type_expr_to_string(return_type))
}
TypeExpr::Tuple(types) => {
let types_str: Vec<String> = types.iter()
.map(|t| self.type_expr_to_string(t))
.collect();
format!("({})", types_str.join(", "))
}
TypeExpr::Record(fields) => {
let fields_str: Vec<String> = fields.iter()
.map(|f| format!("{}: {}", f.name, self.type_expr_to_string(&f.typ)))
.collect();
format!("{{ {} }}", fields_str.join(", "))
}
TypeExpr::Unit => "Unit".to_string(),
TypeExpr::Versioned { base, .. } => {
format!("{}@versioned", self.type_expr_to_string(base))
}
}
}
}
impl Default for SymbolTable {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parser::Parser;
#[test]
fn test_symbol_table_basic() {
let source = r#"
fn add(a: Int, b: Int): Int = a + b
let x = 42
"#;
let program = Parser::parse_source(source).unwrap();
let table = SymbolTable::build(&program);
// Should have add function and x variable
let globals = table.global_symbols();
assert!(globals.iter().any(|s| s.name == "add"));
assert!(globals.iter().any(|s| s.name == "x"));
}
#[test]
fn test_symbol_lookup() {
let source = r#"
fn foo(x: Int): Int = x + 1
"#;
let program = Parser::parse_source(source).unwrap();
let table = SymbolTable::build(&program);
// Should be able to find foo
assert!(table.lookup("foo", 0).is_some());
}
}

View File

@@ -1173,6 +1173,110 @@ impl TypeEnv {
},
);
// Add Concurrent effect for concurrent/parallel execution
// Task is represented as Int (task ID)
env.effects.insert(
"Concurrent".to_string(),
EffectDef {
name: "Concurrent".to_string(),
type_params: Vec::new(),
operations: vec![
// Spawn a new concurrent task that returns a value
// Returns a Task<A> (represented as Int task ID)
EffectOpDef {
name: "spawn".to_string(),
params: vec![("thunk".to_string(), Type::Function {
params: Vec::new(),
return_type: Box::new(Type::Var(0)),
effects: EffectSet::empty(),
properties: PropertySet::empty(),
})],
return_type: Type::Int, // Task ID
},
// Wait for a task to complete and get its result
EffectOpDef {
name: "await".to_string(),
params: vec![("task".to_string(), Type::Int)],
return_type: Type::Var(0),
},
// Yield control to allow other tasks to run
EffectOpDef {
name: "yield".to_string(),
params: Vec::new(),
return_type: Type::Unit,
},
// Sleep for milliseconds (non-blocking to other tasks)
EffectOpDef {
name: "sleep".to_string(),
params: vec![("ms".to_string(), Type::Int)],
return_type: Type::Unit,
},
// Cancel a running task
EffectOpDef {
name: "cancel".to_string(),
params: vec![("task".to_string(), Type::Int)],
return_type: Type::Bool,
},
// Check if a task is still running
EffectOpDef {
name: "isRunning".to_string(),
params: vec![("task".to_string(), Type::Int)],
return_type: Type::Bool,
},
// Get the number of active tasks
EffectOpDef {
name: "taskCount".to_string(),
params: Vec::new(),
return_type: Type::Int,
},
],
},
);
// Add Channel effect for concurrent communication
env.effects.insert(
"Channel".to_string(),
EffectDef {
name: "Channel".to_string(),
type_params: Vec::new(),
operations: vec![
// Create a new channel, returns channel ID
EffectOpDef {
name: "create".to_string(),
params: Vec::new(),
return_type: Type::Int, // Channel ID
},
// Send a value on a channel
EffectOpDef {
name: "send".to_string(),
params: vec![
("channel".to_string(), Type::Int),
("value".to_string(), Type::Var(0)),
],
return_type: Type::Unit,
},
// Receive a value from a channel (blocks until available)
EffectOpDef {
name: "receive".to_string(),
params: vec![("channel".to_string(), Type::Int)],
return_type: Type::Var(0),
},
// Try to receive (non-blocking, returns Option)
EffectOpDef {
name: "tryReceive".to_string(),
params: vec![("channel".to_string(), Type::Int)],
return_type: Type::Option(Box::new(Type::Var(0))),
},
// Close a channel
EffectOpDef {
name: "close".to_string(),
params: vec![("channel".to_string(), Type::Int)],
return_type: Type::Unit,
},
],
},
);
// Add Sql effect for database access
// Connection is represented as Int (connection ID)
let row_type = Type::Record(vec![]); // Dynamic record type