1 Commits

Author SHA1 Message Date
7c55b76dae feat: static binary builds and automated release script
Switch reqwest from native-tls (openssl) to rustls-tls for a pure-Rust
TLS stack, enabling fully static musl builds. Add `nix build .#static`
for portable Linux binaries and `scripts/release.sh` for automated
Gitea releases with changelog generation.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 19:06:33 -05:00
24 changed files with 228 additions and 4138 deletions

View File

@@ -42,45 +42,17 @@ When making changes:
7. **Fix language limitations**: If you encounter parser/type system limitations, fix them (without regressions on guarantees or speed)
8. **Git commits**: Always use `--no-gpg-sign` flag
### Post-work checklist (run after each committable change)
**MANDATORY: Run the full validation script after every committable change:**
```bash
./scripts/validate.sh
```
This script runs ALL of the following checks and will fail if any regress:
1. `cargo check` — no Rust compilation errors
2. `cargo test` — all Rust tests pass (currently 387)
3. `cargo build --release` — release binary builds
4. `lux test` on every package (path, frontmatter, xml, rss, markdown) — all 286 package tests pass
5. `lux check` on every package — type checking + lint passes
If `validate.sh` is not available or you need to run manually:
### Post-work checklist (run after each major piece of work)
```bash
nix develop --command cargo check # No Rust errors
nix develop --command cargo test # All Rust tests pass
nix develop --command cargo build --release # Build release binary
cd ../packages/path && ../../lang/target/release/lux test # Package tests
cd ../packages/frontmatter && ../../lang/target/release/lux test
cd ../packages/xml && ../../lang/target/release/lux test
cd ../packages/rss && ../../lang/target/release/lux test
cd ../packages/markdown && ../../lang/target/release/lux test
nix develop --command cargo test # All tests pass (currently 381)
./target/release/lux check # Type check + lint all .lux files
./target/release/lux fmt # Format all .lux files
./target/release/lux lint # Standalone lint pass
```
**Do NOT commit if any check fails.** Fix the issue first.
### Commit after every piece of work
**After completing each logical unit of work, commit immediately.** This is NOT optional — every fix, feature, or change MUST be committed right away. Do not let changes accumulate uncommitted across multiple features. Each commit should be a single logical change (one feature, one bugfix, etc.). Use `--no-gpg-sign` flag for all commits.
**Commit workflow:**
1. Make the change
2. Run `./scripts/validate.sh` (all 13 checks must pass)
3. `git add` the relevant files
4. `git commit --no-gpg-sign -m "type: description"` (use conventional commits: fix/feat/chore/docs)
5. Move on to the next task
**Never skip committing.** If you fixed a bug, commit it. If you added a feature, commit it. If you updated docs, commit it. Do not batch unrelated changes into one commit.
**After completing each logical unit of work, commit immediately.** Do not let changes accumulate uncommitted across multiple features. Each commit should be a single logical change (one feature, one bugfix, etc.). Use `--no-gpg-sign` flag for all commits.
**IMPORTANT: Always verify Lux code you write:**
- Run with interpreter: `./target/release/lux file.lux`
@@ -137,7 +109,7 @@ When working on any major task that involves writing Lux code, **document every
## Code Quality
- Fix all compiler warnings before committing
- Ensure all tests pass (currently 387 tests)
- Ensure all tests pass (currently 381 tests)
- Add new tests when adding features
- Keep examples and documentation in sync

15
Cargo.lock generated
View File

@@ -225,7 +225,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -392,12 +392,6 @@ dependencies = [
"wasip3",
]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "h2"
version = "0.3.27"
@@ -776,9 +770,8 @@ dependencies = [
[[package]]
name = "lux"
version = "0.1.8"
version = "0.1.0"
dependencies = [
"glob",
"lsp-server",
"lsp-types",
"postgres",
@@ -1189,7 +1182,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -1482,7 +1475,7 @@ dependencies = [
"getrandom 0.4.1",
"once_cell",
"rustix",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]

View File

@@ -1,6 +1,6 @@
[package]
name = "lux"
version = "0.1.9"
version = "0.1.0"
edition = "2021"
description = "A functional programming language with first-class effects, schema evolution, and behavioral types"
license = "MIT"
@@ -17,7 +17,6 @@ reqwest = { version = "0.11", default-features = false, features = ["blocking",
tiny_http = "0.12"
rusqlite = { version = "0.31", features = ["bundled"] }
postgres = "0.19"
glob = "0.3"
[dev-dependencies]

View File

@@ -44,7 +44,7 @@
printf "\n"
printf " \033[1;35m \033[0m\n"
printf " \033[1;35m \033[0m\n"
printf " \033[1;35m \033[0m v0.1.9\n"
printf " \033[1;35m \033[0m v0.1.0\n"
printf "\n"
printf " Functional language with first-class effects\n"
printf "\n"
@@ -62,7 +62,7 @@
packages.default = pkgs.rustPlatform.buildRustPackage {
pname = "lux";
version = "0.1.9";
version = "0.1.0";
src = ./.;
cargoLock.lockFile = ./Cargo.lock;
@@ -79,7 +79,7 @@
};
in muslPkgs.rustPlatform.buildRustPackage {
pname = "lux";
version = "0.1.9";
version = "0.1.0";
src = ./.;
cargoLock.lockFile = ./Cargo.lock;

View File

@@ -1,225 +0,0 @@
// Lux AST — Self-hosted Abstract Syntax Tree definitions
//
// Direct translation of src/ast.rs into Lux ADTs.
// These types represent the parsed structure of a Lux program.
//
// Naming conventions to avoid collisions:
// Ex = Expr variant, Pat = Pattern, Te = TypeExpr
// Td = TypeDef, Vf = VariantFields, Op = Operator
// Decl = Declaration, St = Statement
// === Source Location ===
type Span = | Span(Int, Int)
// === Identifiers ===
type Ident = | Ident(String, Span)
// === Visibility ===
type Visibility = | Public | Private
// === Schema Evolution ===
type Version = | Version(Int, Span)
type VersionConstraint =
| VcExact(Version)
| VcAtLeast(Version)
| VcLatest(Span)
// === Behavioral Types ===
type BehavioralProperty =
| BpPure
| BpTotal
| BpIdempotent
| BpDeterministic
| BpCommutative
// === Trait Bound (needed before WhereClause) ===
type TraitBound = | TraitBound(Ident, List<TypeExpr>, Span)
// === Trait Constraint (needed before WhereClause) ===
type TraitConstraint = | TraitConstraint(Ident, List<TraitBound>, Span)
// === Where Clauses ===
type WhereClause =
| WcProperty(Ident, BehavioralProperty, Span)
| WcResult(Expr, Span)
| WcTrait(TraitConstraint)
// === Module Path ===
type ModulePath = | ModulePath(List<Ident>, Span)
// === Import ===
// path, alias, items, wildcard, span
type ImportDecl = | ImportDecl(ModulePath, Option<Ident>, Option<List<Ident>>, Bool, Span)
// === Program ===
type Program = | Program(List<ImportDecl>, List<Declaration>)
// === Declarations ===
type Declaration =
| DeclFunction(FunctionDecl)
| DeclEffect(EffectDecl)
| DeclType(TypeDecl)
| DeclHandler(HandlerDecl)
| DeclLet(LetDecl)
| DeclTrait(TraitDecl)
| DeclImpl(ImplDecl)
// === Parameter ===
type Parameter = | Parameter(Ident, TypeExpr, Span)
// === Effect Operation ===
type EffectOp = | EffectOp(Ident, List<Parameter>, TypeExpr, Span)
// === Record Field ===
type RecordField = | RecordField(Ident, TypeExpr, Span)
// === Variant Fields ===
type VariantFields =
| VfUnit
| VfTuple(List<TypeExpr>)
| VfRecord(List<RecordField>)
// === Variant ===
type Variant = | Variant(Ident, VariantFields, Span)
// === Migration ===
type Migration = | Migration(Version, Expr, Span)
// === Handler Impl ===
// op_name, params, resume, body, span
type HandlerImpl = | HandlerImpl(Ident, List<Ident>, Option<Ident>, Expr, Span)
// === Impl Method ===
// name, params, return_type, body, span
type ImplMethod = | ImplMethod(Ident, List<Parameter>, Option<TypeExpr>, Expr, Span)
// === Trait Method ===
// name, type_params, params, return_type, default_impl, span
type TraitMethod = | TraitMethod(Ident, List<Ident>, List<Parameter>, TypeExpr, Option<Expr>, Span)
// === Type Expressions ===
type TypeExpr =
| TeNamed(Ident)
| TeApp(TypeExpr, List<TypeExpr>)
| TeFunction(List<TypeExpr>, TypeExpr, List<Ident>)
| TeTuple(List<TypeExpr>)
| TeRecord(List<RecordField>)
| TeUnit
| TeVersioned(TypeExpr, VersionConstraint)
// === Literal ===
type LiteralKind =
| LitInt(Int)
| LitFloat(String)
| LitString(String)
| LitChar(Char)
| LitBool(Bool)
| LitUnit
type Literal = | Literal(LiteralKind, Span)
// === Binary Operators ===
type BinaryOp =
| OpAdd | OpSub | OpMul | OpDiv | OpMod
| OpEq | OpNe | OpLt | OpLe | OpGt | OpGe
| OpAnd | OpOr
| OpPipe | OpConcat
// === Unary Operators ===
type UnaryOp = | OpNeg | OpNot
// === Statements ===
type Statement =
| StExpr(Expr)
| StLet(Ident, Option<TypeExpr>, Expr, Span)
// === Match Arms ===
type MatchArm = | MatchArm(Pattern, Option<Expr>, Expr, Span)
// === Patterns ===
type Pattern =
| PatWildcard(Span)
| PatVar(Ident)
| PatLiteral(Literal)
| PatConstructor(Ident, List<Pattern>, Span)
| PatRecord(List<(Ident, Pattern)>, Span)
| PatTuple(List<Pattern>, Span)
// === Function Declaration ===
// visibility, doc, name, type_params, params, return_type, effects, properties, where_clauses, body, span
type FunctionDecl = | FunctionDecl(Visibility, Option<String>, Ident, List<Ident>, List<Parameter>, TypeExpr, List<Ident>, List<BehavioralProperty>, List<WhereClause>, Expr, Span)
// === Effect Declaration ===
// doc, name, type_params, operations, span
type EffectDecl = | EffectDecl(Option<String>, Ident, List<Ident>, List<EffectOp>, Span)
// === Type Declaration ===
// visibility, doc, name, type_params, version, definition, migrations, span
type TypeDecl = | TypeDecl(Visibility, Option<String>, Ident, List<Ident>, Option<Version>, TypeDef, List<Migration>, Span)
// === Handler Declaration ===
// name, params, effect, implementations, span
type HandlerDecl = | HandlerDecl(Ident, List<Parameter>, Ident, List<HandlerImpl>, Span)
// === Let Declaration ===
// visibility, doc, name, typ, value, span
type LetDecl = | LetDecl(Visibility, Option<String>, Ident, Option<TypeExpr>, Expr, Span)
// === Trait Declaration ===
// visibility, doc, name, type_params, super_traits, methods, span
type TraitDecl = | TraitDecl(Visibility, Option<String>, Ident, List<Ident>, List<TraitBound>, List<TraitMethod>, Span)
// === Impl Declaration ===
// type_params, constraints, trait_name, trait_args, target_type, methods, span
type ImplDecl = | ImplDecl(List<Ident>, List<TraitConstraint>, Ident, List<TypeExpr>, TypeExpr, List<ImplMethod>, Span)
// === Expressions ===
type Expr =
| ExLiteral(Literal)
| ExVar(Ident)
| ExBinaryOp(BinaryOp, Expr, Expr, Span)
| ExUnaryOp(UnaryOp, Expr, Span)
| ExCall(Expr, List<Expr>, Span)
| ExEffectOp(Ident, Ident, List<Expr>, Span)
| ExField(Expr, Ident, Span)
| ExTupleIndex(Expr, Int, Span)
| ExLambda(List<Parameter>, Option<TypeExpr>, List<Ident>, Expr, Span)
| ExLet(Ident, Option<TypeExpr>, Expr, Expr, Span)
| ExIf(Expr, Expr, Expr, Span)
| ExMatch(Expr, List<MatchArm>, Span)
| ExBlock(List<Statement>, Expr, Span)
| ExRecord(Option<Expr>, List<(Ident, Expr)>, Span)
| ExTuple(List<Expr>, Span)
| ExList(List<Expr>, Span)
| ExRun(Expr, List<(Ident, Expr)>, Span)
| ExResume(Expr, Span)

View File

@@ -5,20 +5,12 @@ set -euo pipefail
# Builds a static binary, generates changelog, and creates a Gitea release.
#
# Usage:
# ./scripts/release.sh # auto-bump patch (0.2.0 → 0.2.1)
# ./scripts/release.sh patch # same as above
# ./scripts/release.sh minor # bump minor (0.2.0 → 0.3.0)
# ./scripts/release.sh major # bump major (0.2.0 → 1.0.0)
# ./scripts/release.sh v1.2.3 # explicit version
# ./scripts/release.sh [version]
#
# Environment:
# GITEA_TOKEN - API token for git.qrty.ink (prompted if not set)
# GITEA_URL - Gitea instance URL (default: https://git.qrty.ink)
# cd to repo root (directory containing this script's parent)
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
cd "$SCRIPT_DIR/.."
GITEA_URL="${GITEA_URL:-https://git.qrty.ink}"
REPO_OWNER="blu"
REPO_NAME="lux"
@@ -38,33 +30,14 @@ warn() { printf "${YELLOW}!!${NC} %s\n" "$1"; }
err() { printf "${RED}error:${NC} %s\n" "$1" >&2; exit 1; }
# --- Determine version ---
CURRENT=$(grep '^version' Cargo.toml | head -1 | sed 's/.*"\(.*\)".*/\1/')
BUMP="${1:-patch}"
bump_version() {
local ver="$1" part="$2"
IFS='.' read -r major minor patch <<< "$ver"
case "$part" in
major) echo "$((major + 1)).0.0" ;;
minor) echo "$major.$((minor + 1)).0" ;;
patch) echo "$major.$minor.$((patch + 1))" ;;
*) echo "$part" ;; # treat as explicit version
esac
}
case "$BUMP" in
major|minor|patch)
VERSION=$(bump_version "$CURRENT" "$BUMP")
info "Bumping $BUMP: $CURRENT$VERSION"
;;
*)
# Explicit version — strip v prefix if present
VERSION="${BUMP#v}"
info "Explicit version: $VERSION"
;;
esac
TAG="v$VERSION"
VERSION="${1:-}"
if [ -z "$VERSION" ]; then
VERSION=$(grep '^version' Cargo.toml | head -1 | sed 's/.*"\(.*\)".*/\1/')
info "Version from Cargo.toml: v$VERSION"
fi
# Ensure v prefix
[[ "$VERSION" == v* ]] || VERSION="v$VERSION"
TAG="$VERSION"
# --- Check for clean working tree ---
if [ -n "$(git status --porcelain)" ]; then
@@ -77,18 +50,7 @@ fi
# --- Check if tag already exists ---
if git rev-parse "$TAG" >/dev/null 2>&1; then
err "Tag $TAG already exists. Choose a different version."
fi
# --- Update version in source files ---
if [ "$VERSION" != "$CURRENT" ]; then
info "Updating version in Cargo.toml and flake.nix..."
sed -i "0,/^version = \"$CURRENT\"/s//version = \"$VERSION\"/" Cargo.toml
sed -i "s/version = \"$CURRENT\";/version = \"$VERSION\";/g" flake.nix
sed -i "s/v$CURRENT/v$VERSION/g" flake.nix
git add Cargo.toml flake.nix
git commit --no-gpg-sign -m "chore: bump version to $VERSION"
ok "Version updated and committed"
err "Tag $TAG already exists. Bump version in Cargo.toml or choose a different version."
fi
# --- Generate changelog ---
@@ -102,7 +64,7 @@ else
info "First release — summarizing recent commits:"
fi
CHANGELOG=$(git log "$RANGE" --pretty=format:"- %s" --no-merges 2>/dev/null | head -50 || true)
CHANGELOG=$(git log "$RANGE" --pretty=format:"- %s" --no-merges 2>/dev/null | head -50)
if [ -z "$CHANGELOG" ]; then
CHANGELOG="- Initial release"
fi

View File

@@ -1,211 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Lux Full Validation Script
# Runs all checks: Rust tests, package tests, type checking, example compilation.
# Run after every committable change to ensure no regressions.
# cd to repo root (directory containing this script's parent)
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
cd "$SCRIPT_DIR/.."
LUX="$(pwd)/target/release/lux"
PACKAGES_DIR="$(pwd)/../packages"
PROJECTS_DIR="$(pwd)/projects"
EXAMPLES_DIR="$(pwd)/examples"
RED='\033[0;31m'
GREEN='\033[0;32m'
CYAN='\033[0;36m'
BOLD='\033[1m'
NC='\033[0m'
FAILED=0
TOTAL=0
step() {
TOTAL=$((TOTAL + 1))
printf "${CYAN}[%d]${NC} %s... " "$TOTAL" "$1"
}
ok() { printf "${GREEN}ok${NC} %s\n" "${1:-}"; }
fail() { printf "${RED}FAIL${NC} %s\n" "${1:-}"; FAILED=$((FAILED + 1)); }
# --- Rust checks ---
step "cargo check"
if nix develop --command cargo check 2>/dev/null; then ok; else fail; fi
step "cargo test"
OUTPUT=$(nix develop --command cargo test 2>&1 || true)
RESULT=$(echo "$OUTPUT" | grep "test result:" || echo "no result")
if echo "$RESULT" | grep -q "0 failed"; then ok "$RESULT"; else fail "$RESULT"; fi
# --- Build release binary ---
step "cargo build --release"
if nix develop --command cargo build --release 2>/dev/null; then ok; else fail; fi
# --- Package tests ---
for pkg in path frontmatter xml rss markdown; do
PKG_DIR="$PACKAGES_DIR/$pkg"
if [ -d "$PKG_DIR" ]; then
step "lux test ($pkg)"
OUTPUT=$(cd "$PKG_DIR" && "$LUX" test 2>&1 || true)
RESULT=$(echo "$OUTPUT" | grep "passed" | tail -1 || echo "no result")
if echo "$RESULT" | grep -q "passed"; then ok "$RESULT"; else fail "$RESULT"; fi
fi
done
# --- Lux check on packages ---
for pkg in path frontmatter xml rss markdown; do
PKG_DIR="$PACKAGES_DIR/$pkg"
if [ -d "$PKG_DIR" ]; then
step "lux check ($pkg)"
OUTPUT=$(cd "$PKG_DIR" && "$LUX" check 2>&1 || true)
RESULT=$(echo "$OUTPUT" | grep "passed" | tail -1 || echo "no result")
if echo "$RESULT" | grep -q "passed"; then ok; else fail "$RESULT"; fi
fi
done
# --- Project checks ---
for proj_dir in "$PROJECTS_DIR"/*/; do
proj=$(basename "$proj_dir")
if [ -f "$proj_dir/main.lux" ]; then
step "lux check (project: $proj)"
OUTPUT=$("$LUX" check "$proj_dir/main.lux" 2>&1 || true)
if echo "$OUTPUT" | grep -qi "error"; then fail; else ok; fi
fi
# Check any standalone .lux files in the project
for lux_file in "$proj_dir"/*.lux; do
[ -f "$lux_file" ] || continue
fname=$(basename "$lux_file")
[ "$fname" = "main.lux" ] && continue
step "lux check (project: $proj/$fname)"
OUTPUT=$("$LUX" check "$lux_file" 2>&1 || true)
if echo "$OUTPUT" | grep -qi "error"; then fail; else ok; fi
done
done
# === Compilation & Interpreter Checks ===
# --- Interpreter: examples ---
# Skip: http_api, http, http_router, http_server (network), postgres_demo (db),
# random, property_testing (Random effect), shell (Process), json (File I/O),
# file_io (File I/O), test_math, test_lists (Test effect), stress_shared_rc,
# test_rc_comparison (internal tests), modules/* (need cwd)
INTERP_SKIP="http_api http http_router http_server postgres_demo random property_testing shell json file_io test_math test_lists stress_shared_rc test_rc_comparison"
for f in "$EXAMPLES_DIR"/*.lux; do
name=$(basename "$f" .lux)
skip=false
for s in $INTERP_SKIP; do [ "$name" = "$s" ] && skip=true; done
$skip && continue
step "interpreter (examples/$name)"
if timeout 10 "$LUX" "$f" >/dev/null 2>&1; then ok; else fail; fi
done
# --- Interpreter: examples/standard ---
# Skip: guessing_game (reads stdin)
for f in "$EXAMPLES_DIR"/standard/*.lux; do
[ -f "$f" ] || continue
name=$(basename "$f" .lux)
[ "$name" = "guessing_game" ] && continue
step "interpreter (standard/$name)"
if timeout 10 "$LUX" "$f" >/dev/null 2>&1; then ok; else fail; fi
done
# --- Interpreter: examples/showcase ---
# Skip: task_manager (parse error in current version)
for f in "$EXAMPLES_DIR"/showcase/*.lux; do
[ -f "$f" ] || continue
name=$(basename "$f" .lux)
[ "$name" = "task_manager" ] && continue
step "interpreter (showcase/$name)"
if timeout 10 "$LUX" "$f" >/dev/null 2>&1; then ok; else fail; fi
done
# --- Interpreter: projects ---
# Skip: guessing-game (Random), rest-api (HttpServer)
PROJ_INTERP_SKIP="guessing-game rest-api"
for proj_dir in "$PROJECTS_DIR"/*/; do
proj=$(basename "$proj_dir")
[ -f "$proj_dir/main.lux" ] || continue
skip=false
for s in $PROJ_INTERP_SKIP; do [ "$proj" = "$s" ] && skip=true; done
$skip && continue
step "interpreter (project: $proj)"
if timeout 10 "$LUX" "$proj_dir/main.lux" >/dev/null 2>&1; then ok; else fail; fi
done
# --- JS compilation: examples ---
# Skip files that fail JS compilation (unsupported features)
JS_SKIP="http_api http http_router postgres_demo property_testing json test_lists test_rc_comparison"
for f in "$EXAMPLES_DIR"/*.lux; do
name=$(basename "$f" .lux)
skip=false
for s in $JS_SKIP; do [ "$name" = "$s" ] && skip=true; done
$skip && continue
step "compile JS (examples/$name)"
if "$LUX" compile "$f" --target js -o /tmp/lux_validate.js >/dev/null 2>&1; then ok; else fail; fi
done
# --- JS compilation: examples/standard ---
# Skip: stdlib_demo (uses String.toUpper not in JS backend)
for f in "$EXAMPLES_DIR"/standard/*.lux; do
[ -f "$f" ] || continue
name=$(basename "$f" .lux)
[ "$name" = "stdlib_demo" ] && continue
step "compile JS (standard/$name)"
if "$LUX" compile "$f" --target js -o /tmp/lux_validate.js >/dev/null 2>&1; then ok; else fail; fi
done
# --- JS compilation: examples/showcase ---
# Skip: task_manager (unsupported features)
for f in "$EXAMPLES_DIR"/showcase/*.lux; do
[ -f "$f" ] || continue
name=$(basename "$f" .lux)
[ "$name" = "task_manager" ] && continue
step "compile JS (showcase/$name)"
if "$LUX" compile "$f" --target js -o /tmp/lux_validate.js >/dev/null 2>&1; then ok; else fail; fi
done
# --- JS compilation: projects ---
# Skip: json-parser, rest-api (unsupported features)
JS_PROJ_SKIP="json-parser rest-api"
for proj_dir in "$PROJECTS_DIR"/*/; do
proj=$(basename "$proj_dir")
[ -f "$proj_dir/main.lux" ] || continue
skip=false
for s in $JS_PROJ_SKIP; do [ "$proj" = "$s" ] && skip=true; done
$skip && continue
step "compile JS (project: $proj)"
if "$LUX" compile "$proj_dir/main.lux" --target js -o /tmp/lux_validate.js >/dev/null 2>&1; then ok; else fail; fi
done
# --- C compilation: examples ---
# Only compile examples known to work with C backend
C_EXAMPLES="hello factorial pipelines tailcall jit_test"
for name in $C_EXAMPLES; do
f="$EXAMPLES_DIR/$name.lux"
[ -f "$f" ] || continue
step "compile C (examples/$name)"
if "$LUX" compile "$f" -o /tmp/lux_validate_bin >/dev/null 2>&1; then ok; else fail; fi
done
# --- C compilation: examples/standard ---
C_STD_EXAMPLES="hello_world factorial fizzbuzz primes guessing_game"
for name in $C_STD_EXAMPLES; do
f="$EXAMPLES_DIR/standard/$name.lux"
[ -f "$f" ] || continue
step "compile C (standard/$name)"
if "$LUX" compile "$f" -o /tmp/lux_validate_bin >/dev/null 2>&1; then ok; else fail; fi
done
# --- Cleanup ---
rm -f /tmp/lux_validate.js /tmp/lux_validate_bin
# --- Summary ---
printf "\n${BOLD}═══ Validation Summary ═══${NC}\n"
if [ $FAILED -eq 0 ]; then
printf "${GREEN}All %d checks passed.${NC}\n" "$TOTAL"
else
printf "${RED}%d/%d checks failed.${NC}\n" "$FAILED" "$TOTAL"
exit 1
fi

View File

@@ -221,8 +221,6 @@ pub enum Declaration {
Trait(TraitDecl),
/// Trait implementation: impl Trait for Type { ... }
Impl(ImplDecl),
/// Extern function declaration (FFI): extern fn name(params): ReturnType
ExternFn(ExternFnDecl),
}
/// Function declaration
@@ -430,21 +428,6 @@ pub struct ImplMethod {
pub span: Span,
}
/// Extern function declaration (FFI)
#[derive(Debug, Clone)]
pub struct ExternFnDecl {
pub visibility: Visibility,
/// Documentation comment
pub doc: Option<String>,
pub name: Ident,
pub type_params: Vec<Ident>,
pub params: Vec<Parameter>,
pub return_type: TypeExpr,
/// Optional JS name override: extern fn foo(...): T = "jsFoo"
pub js_name: Option<String>,
pub span: Span,
}
/// Type expressions
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TypeExpr {
@@ -558,9 +541,7 @@ pub enum Expr {
span: Span,
},
/// Record literal: { name: "Alice", age: 30 }
/// With optional spread: { ...base, name: "Bob" }
Record {
spread: Option<Box<Expr>>,
fields: Vec<(Ident, Expr)>,
span: Span,
},
@@ -640,8 +621,7 @@ pub enum BinaryOp {
And,
Or,
// Other
Pipe, // |>
Concat, // ++
Pipe, // |>
}
impl fmt::Display for BinaryOp {
@@ -661,7 +641,6 @@ impl fmt::Display for BinaryOp {
BinaryOp::And => write!(f, "&&"),
BinaryOp::Or => write!(f, "||"),
BinaryOp::Pipe => write!(f, "|>"),
BinaryOp::Concat => write!(f, "++"),
}
}
}
@@ -714,9 +693,8 @@ pub enum Pattern {
Var(Ident),
/// Literal: 42, "hello", true
Literal(Literal),
/// Constructor: Some(x), None, Ok(v), module.Constructor(x)
/// Constructor: Some(x), None, Ok(v)
Constructor {
module: Option<Ident>,
name: Ident,
fields: Vec<Pattern>,
span: Span,

File diff suppressed because it is too large Load Diff

View File

@@ -69,10 +69,6 @@ pub struct JsBackend {
has_handlers: bool,
/// Variable substitutions for let binding
var_substitutions: HashMap<String, String>,
/// Effects actually used in the program (for tree-shaking runtime)
used_effects: HashSet<String>,
/// Extern function names mapped to their JS names
extern_fns: HashMap<String, String>,
}
impl JsBackend {
@@ -94,8 +90,6 @@ impl JsBackend {
effectful_functions: HashSet::new(),
has_handlers: false,
var_substitutions: HashMap::new(),
used_effects: HashSet::new(),
extern_fns: HashMap::new(),
}
}
@@ -103,6 +97,9 @@ impl JsBackend {
pub fn generate(&mut self, program: &Program) -> Result<String, JsGenError> {
self.output.clear();
// Emit runtime helpers
self.emit_runtime();
// First pass: collect all function names, types, and effects
for decl in &program.declarations {
match decl {
@@ -115,24 +112,10 @@ impl JsBackend {
Declaration::Type(t) => {
self.collect_type(t)?;
}
Declaration::ExternFn(ext) => {
let js_name = ext
.js_name
.clone()
.unwrap_or_else(|| ext.name.name.clone());
self.extern_fns.insert(ext.name.name.clone(), js_name);
self.functions.insert(ext.name.name.clone());
}
_ => {}
}
}
// Collect used effects for tree-shaking
self.collect_used_effects(program);
// Emit runtime helpers (tree-shaken based on used effects)
self.emit_runtime();
// Emit type constructors
for decl in &program.declarations {
if let Declaration::Type(t) = decl {
@@ -180,181 +163,32 @@ impl JsBackend {
Ok(self.output.clone())
}
/// Collect all effects used in the program for runtime tree-shaking
fn collect_used_effects(&mut self, program: &Program) {
for decl in &program.declarations {
match decl {
Declaration::Function(f) => {
for effect in &f.effects {
self.used_effects.insert(effect.name.clone());
}
self.collect_effects_from_expr(&f.body);
}
Declaration::Let(l) => {
self.collect_effects_from_expr(&l.value);
}
Declaration::Handler(h) => {
self.used_effects.insert(h.effect.name.clone());
for imp in &h.implementations {
self.collect_effects_from_expr(&imp.body);
}
}
_ => {}
}
}
}
/// Recursively collect effect names from an expression
fn collect_effects_from_expr(&mut self, expr: &Expr) {
match expr {
Expr::EffectOp { effect, args, .. } => {
self.used_effects.insert(effect.name.clone());
for arg in args {
self.collect_effects_from_expr(arg);
}
}
Expr::Run { expr, handlers, .. } => {
self.collect_effects_from_expr(expr);
for (effect, handler) in handlers {
self.used_effects.insert(effect.name.clone());
self.collect_effects_from_expr(handler);
}
}
Expr::Call { func, args, .. } => {
self.collect_effects_from_expr(func);
for arg in args {
self.collect_effects_from_expr(arg);
}
}
Expr::Lambda { body, effects, .. } => {
for effect in effects {
self.used_effects.insert(effect.name.clone());
}
self.collect_effects_from_expr(body);
}
Expr::Let { value, body, .. } => {
self.collect_effects_from_expr(value);
self.collect_effects_from_expr(body);
}
Expr::If { condition, then_branch, else_branch, .. } => {
self.collect_effects_from_expr(condition);
self.collect_effects_from_expr(then_branch);
self.collect_effects_from_expr(else_branch);
}
Expr::Match { scrutinee, arms, .. } => {
self.collect_effects_from_expr(scrutinee);
for arm in arms {
self.collect_effects_from_expr(&arm.body);
if let Some(guard) = &arm.guard {
self.collect_effects_from_expr(guard);
}
}
}
Expr::Block { statements, result, .. } => {
for stmt in statements {
match stmt {
Statement::Expr(e) => self.collect_effects_from_expr(e),
Statement::Let { value, .. } => self.collect_effects_from_expr(value),
}
}
self.collect_effects_from_expr(result);
}
Expr::BinaryOp { left, right, .. } => {
self.collect_effects_from_expr(left);
self.collect_effects_from_expr(right);
}
Expr::UnaryOp { operand, .. } => {
self.collect_effects_from_expr(operand);
}
Expr::Field { object, .. } => {
self.collect_effects_from_expr(object);
}
Expr::TupleIndex { object, .. } => {
self.collect_effects_from_expr(object);
}
Expr::Record { spread, fields, .. } => {
if let Some(s) = spread {
self.collect_effects_from_expr(s);
}
for (_, expr) in fields {
self.collect_effects_from_expr(expr);
}
}
Expr::Tuple { elements, .. } | Expr::List { elements, .. } => {
for el in elements {
self.collect_effects_from_expr(el);
}
}
Expr::Resume { value, .. } => {
self.collect_effects_from_expr(value);
}
Expr::Literal(_) | Expr::Var(_) => {}
}
}
/// Emit the Lux runtime, tree-shaken based on used effects
/// Emit the minimal Lux runtime
fn emit_runtime(&mut self) {
let uses_console = self.used_effects.contains("Console");
let uses_random = self.used_effects.contains("Random");
let uses_time = self.used_effects.contains("Time");
let uses_http = self.used_effects.contains("Http");
let uses_dom = self.used_effects.contains("Dom");
let uses_html = self.used_effects.contains("Html") || uses_dom;
self.writeln("// Lux Runtime");
self.writeln("const Lux = {");
self.indent += 1;
// Core helpers — always emitted
// Option helpers
self.writeln("Some: (value) => ({ tag: \"Some\", value }),");
self.writeln("None: () => ({ tag: \"None\" }),");
self.writeln("");
// Result helpers
self.writeln("Ok: (value) => ({ tag: \"Ok\", value }),");
self.writeln("Err: (error) => ({ tag: \"Err\", error }),");
self.writeln("");
// List helpers
self.writeln("Cons: (head, tail) => [head, ...tail],");
self.writeln("Nil: () => [],");
self.writeln("");
// Default handlers — only include effects that are used
// Default handlers for effects
self.writeln("defaultHandlers: {");
self.indent += 1;
if uses_console {
self.emit_console_handler();
}
if uses_random {
self.emit_random_handler();
}
if uses_time {
self.emit_time_handler();
}
if uses_http {
self.emit_http_handler();
}
if uses_dom {
self.emit_dom_handler();
}
self.indent -= 1;
self.writeln("},");
// HTML rendering — only if Html or Dom effects are used
if uses_html {
self.emit_html_helpers();
}
// TEA runtime — only if Dom is used
if uses_dom {
self.emit_tea_runtime();
}
self.indent -= 1;
self.writeln("};");
self.writeln("");
}
fn emit_console_handler(&mut self) {
// Console effect
self.writeln("Console: {");
self.indent += 1;
self.writeln("print: (msg) => console.log(msg),");
@@ -373,9 +207,8 @@ impl JsBackend {
self.writeln("readInt: () => parseInt(Lux.defaultHandlers.Console.readLine(), 10)");
self.indent -= 1;
self.writeln("},");
}
fn emit_random_handler(&mut self) {
// Random effect
self.writeln("Random: {");
self.indent += 1;
self.writeln("int: (min, max) => Math.floor(Math.random() * (max - min + 1)) + min,");
@@ -383,18 +216,16 @@ impl JsBackend {
self.writeln("float: () => Math.random()");
self.indent -= 1;
self.writeln("},");
}
fn emit_time_handler(&mut self) {
// Time effect
self.writeln("Time: {");
self.indent += 1;
self.writeln("now: () => Date.now(),");
self.writeln("sleep: (ms) => new Promise(resolve => setTimeout(resolve, ms))");
self.indent -= 1;
self.writeln("},");
}
fn emit_http_handler(&mut self) {
// Http effect (browser/Node compatible)
self.writeln("Http: {");
self.indent += 1;
self.writeln("get: async (url) => {");
@@ -456,9 +287,8 @@ impl JsBackend {
self.writeln("}");
self.indent -= 1;
self.writeln("},");
}
fn emit_dom_handler(&mut self) {
// Dom effect (browser only - stubs for Node.js)
self.writeln("Dom: {");
self.indent += 1;
@@ -486,6 +316,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Element creation
self.writeln("createElement: (tag) => {");
self.indent += 1;
self.writeln("if (typeof document === 'undefined') return null;");
@@ -500,6 +331,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// DOM manipulation
self.writeln("appendChild: (parent, child) => {");
self.indent += 1;
self.writeln("if (parent && child) parent.appendChild(child);");
@@ -524,6 +356,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Content
self.writeln("setTextContent: (el, text) => {");
self.indent += 1;
self.writeln("if (el) el.textContent = text;");
@@ -548,6 +381,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Attributes
self.writeln("setAttribute: (el, name, value) => {");
self.indent += 1;
self.writeln("if (el) el.setAttribute(name, value);");
@@ -574,6 +408,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Classes
self.writeln("addClass: (el, className) => {");
self.indent += 1;
self.writeln("if (el) el.classList.add(className);");
@@ -598,6 +433,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Styles
self.writeln("setStyle: (el, property, value) => {");
self.indent += 1;
self.writeln("if (el) el.style[property] = value;");
@@ -610,6 +446,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Form elements
self.writeln("getValue: (el) => {");
self.indent += 1;
self.writeln("return el ? el.value : '';");
@@ -634,6 +471,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Events
self.writeln("addEventListener: (el, event, handler) => {");
self.indent += 1;
self.writeln("if (el) el.addEventListener(event, handler);");
@@ -646,6 +484,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Focus
self.writeln("focus: (el) => {");
self.indent += 1;
self.writeln("if (el && el.focus) el.focus();");
@@ -658,6 +497,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Document
self.writeln("getBody: () => {");
self.indent += 1;
self.writeln("if (typeof document === 'undefined') return null;");
@@ -672,6 +512,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Window
self.writeln("getWindow: () => {");
self.indent += 1;
self.writeln("if (typeof window === 'undefined') return null;");
@@ -704,6 +545,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Scroll
self.writeln("scrollTo: (x, y) => {");
self.indent += 1;
self.writeln("if (typeof window !== 'undefined') window.scrollTo(x, y);");
@@ -716,6 +558,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Dimensions
self.writeln("getBoundingClientRect: (el) => {");
self.indent += 1;
self.writeln("if (!el) return { top: 0, left: 0, width: 0, height: 0, right: 0, bottom: 0 };");
@@ -732,10 +575,12 @@ impl JsBackend {
self.writeln("}");
self.indent -= 1;
self.writeln("},");
}
self.writeln("}");
fn emit_html_helpers(&mut self) {
self.indent -= 1;
self.writeln("},");
// HTML rendering helpers
self.writeln("");
self.writeln("// HTML rendering");
self.writeln("renderHtml: (node) => {");
@@ -837,9 +682,8 @@ impl JsBackend {
self.writeln("return el;");
self.indent -= 1;
self.writeln("},");
}
fn emit_tea_runtime(&mut self) {
// TEA (The Elm Architecture) runtime
self.writeln("");
self.writeln("// The Elm Architecture (TEA) runtime");
self.writeln("app: (config) => {");
@@ -883,6 +727,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Simple app (for string-based views like the counter example)
self.writeln("");
self.writeln("// Simple TEA app (string-based view)");
self.writeln("simpleApp: (config) => {");
@@ -912,6 +757,7 @@ impl JsBackend {
self.indent -= 1;
self.writeln("},");
// Diff and patch (basic implementation for view_deps optimization)
self.writeln("");
self.writeln("// Basic diff - checks if model fields changed");
self.writeln("hasChanged: (oldModel, newModel, ...paths) => {");
@@ -931,7 +777,11 @@ impl JsBackend {
self.writeln("}");
self.writeln("return false;");
self.indent -= 1;
self.writeln("},");
self.writeln("}");
self.indent -= 1;
self.writeln("};");
self.writeln("");
}
/// Collect type information from a type declaration
@@ -1038,8 +888,7 @@ impl JsBackend {
let prev_has_handlers = self.has_handlers;
self.has_handlers = is_effectful;
// Save and clear var substitutions for this function scope
let saved_substitutions = self.var_substitutions.clone();
// Clear var substitutions for this function
self.var_substitutions.clear();
// Emit function body
@@ -1047,7 +896,6 @@ impl JsBackend {
self.writeln(&format!("return {};", body_code));
self.has_handlers = prev_has_handlers;
self.var_substitutions = saved_substitutions;
self.indent -= 1;
self.writeln("}");
@@ -1061,16 +909,13 @@ impl JsBackend {
let val = self.emit_expr(&let_decl.value)?;
let var_name = &let_decl.name.name;
if var_name == "_" {
// Wildcard binding: just execute for side effects
self.writeln(&format!("{};", val));
} else {
self.writeln(&format!("const {} = {};", var_name, val));
// Check if this is a run expression (often results in undefined)
// We still want to execute it for its side effects
self.writeln(&format!("const {} = {};", var_name, val));
// Register the variable for future use
self.var_substitutions
.insert(var_name.clone(), var_name.clone());
}
// Register the variable for future use
self.var_substitutions
.insert(var_name.clone(), var_name.clone());
Ok(())
}
@@ -1109,17 +954,12 @@ impl JsBackend {
let r = self.emit_expr(right)?;
// Check for string concatenation
if matches!(op, BinaryOp::Add | BinaryOp::Concat) {
if matches!(op, BinaryOp::Add) {
if self.is_string_expr(left) || self.is_string_expr(right) {
return Ok(format!("({} + {})", l, r));
}
}
// ++ on lists: use .concat()
if matches!(op, BinaryOp::Concat) {
return Ok(format!("{}.concat({})", l, r));
}
let op_str = match op {
BinaryOp::Add => "+",
BinaryOp::Sub => "-",
@@ -1134,7 +974,6 @@ impl JsBackend {
BinaryOp::Ge => ">=",
BinaryOp::And => "&&",
BinaryOp::Or => "||",
BinaryOp::Concat => unreachable!("handled above"),
BinaryOp::Pipe => {
// Pipe operator: x |> f becomes f(x)
return Ok(format!("{}({})", r, l));
@@ -1195,26 +1034,18 @@ impl JsBackend {
name, value, body, ..
} => {
let val = self.emit_expr(value)?;
let var_name = format!("{}_{}", name.name, self.fresh_name());
if name.name == "_" {
// Wildcard binding: just execute for side effects
self.writeln(&format!("{};", val));
} else {
let var_name = format!("{}_{}", name.name, self.fresh_name());
self.writeln(&format!("const {} = {};", var_name, val));
self.writeln(&format!("const {} = {};", var_name, val));
// Add substitution
self.var_substitutions
.insert(name.name.clone(), var_name.clone());
}
// Add substitution
self.var_substitutions
.insert(name.name.clone(), var_name.clone());
let body_result = self.emit_expr(body)?;
// Remove substitution
if name.name != "_" {
self.var_substitutions.remove(&name.name);
}
self.var_substitutions.remove(&name.name);
Ok(body_result)
}
@@ -1226,31 +1057,6 @@ impl JsBackend {
if module_name.name == "List" {
return self.emit_list_operation(&field.name, args);
}
if module_name.name == "Map" {
return self.emit_map_operation(&field.name, args);
}
}
}
// Int/Float module operations
if let Expr::Field { object, field, .. } = func.as_ref() {
if let Expr::Var(module_name) = object.as_ref() {
if module_name.name == "Int" {
let arg = self.emit_expr(&args[0])?;
match field.name.as_str() {
"toFloat" => return Ok(arg),
"toString" => return Ok(format!("String({})", arg)),
_ => {}
}
}
if module_name.name == "Float" {
let arg = self.emit_expr(&args[0])?;
match field.name.as_str() {
"toInt" => return Ok(format!("Math.trunc({})", arg)),
"toString" => return Ok(format!("String({})", arg)),
_ => {}
}
}
}
}
@@ -1260,10 +1066,6 @@ impl JsBackend {
let arg = self.emit_expr(&args[0])?;
return Ok(format!("String({})", arg));
}
if ident.name == "print" {
let arg = self.emit_expr(&args[0])?;
return Ok(format!("console.log({})", arg));
}
}
let arg_strs: Result<Vec<_>, _> = args.iter().map(|a| self.emit_expr(a)).collect();
@@ -1340,26 +1142,6 @@ impl JsBackend {
return self.emit_math_operation(&operation.name, args);
}
// Special case: Int module operations
if effect.name == "Int" {
let arg = self.emit_expr(&args[0])?;
match operation.name.as_str() {
"toFloat" => return Ok(arg), // JS numbers are already floats
"toString" => return Ok(format!("String({})", arg)),
_ => {}
}
}
// Special case: Float module operations
if effect.name == "Float" {
let arg = self.emit_expr(&args[0])?;
match operation.name.as_str() {
"toInt" => return Ok(format!("Math.trunc({})", arg)),
"toString" => return Ok(format!("String({})", arg)),
_ => {}
}
}
// Special case: Result module operations (not an effect)
if effect.name == "Result" {
return self.emit_result_operation(&operation.name, args);
@@ -1370,11 +1152,6 @@ impl JsBackend {
return self.emit_json_operation(&operation.name, args);
}
// Special case: Map module operations (not an effect)
if effect.name == "Map" {
return self.emit_map_operation(&operation.name, args);
}
// Special case: Html module operations (not an effect)
if effect.name == "Html" {
return self.emit_html_operation(&operation.name, args);
@@ -1420,39 +1197,18 @@ impl JsBackend {
param_names
};
// Save state
// Save handler state
let prev_has_handlers = self.has_handlers;
let saved_substitutions = self.var_substitutions.clone();
self.has_handlers = !effects.is_empty();
// Register lambda params as themselves (override any outer substitutions)
for p in &all_params {
self.var_substitutions.insert(p.clone(), p.clone());
}
// Capture any statements emitted during body evaluation
let output_start = self.output.len();
let prev_indent = self.indent;
self.indent += 1;
let body_code = self.emit_expr(body)?;
self.writeln(&format!("return {};", body_code));
// Extract body statements and restore output
let body_statements = self.output[output_start..].to_string();
self.output.truncate(output_start);
self.indent = prev_indent;
// Restore state
self.has_handlers = prev_has_handlers;
self.var_substitutions = saved_substitutions;
let indent_str = " ".repeat(self.indent);
Ok(format!(
"(function({}) {{\n{}{}}})",
"(function({}) {{ return {}; }})",
all_params.join(", "),
body_statements,
indent_str,
body_code
))
}
@@ -1472,15 +1228,10 @@ impl JsBackend {
}
Statement::Let { name, value, .. } => {
let val = self.emit_expr(value)?;
if name.name == "_" {
self.writeln(&format!("{};", val));
} else {
let var_name =
format!("{}_{}", name.name, self.fresh_name());
self.writeln(&format!("const {} = {};", var_name, val));
self.var_substitutions
.insert(name.name.clone(), var_name.clone());
}
let var_name = format!("{}_{}", name.name, self.fresh_name());
self.writeln(&format!("const {} = {};", var_name, val));
self.var_substitutions
.insert(name.name.clone(), var_name.clone());
}
}
}
@@ -1489,19 +1240,15 @@ impl JsBackend {
self.emit_expr(result)
}
Expr::Record {
spread, fields, ..
} => {
let mut parts = Vec::new();
if let Some(spread_expr) = spread {
let spread_code = self.emit_expr(spread_expr)?;
parts.push(format!("...{}", spread_code));
}
for (name, expr) in fields {
let val = self.emit_expr(expr)?;
parts.push(format!("{}: {}", name.name, val));
}
Ok(format!("{{ {} }}", parts.join(", ")))
Expr::Record { fields, .. } => {
let field_strs: Result<Vec<_>, _> = fields
.iter()
.map(|(name, expr)| {
let val = self.emit_expr(expr)?;
Ok(format!("{}: {}", name.name, val))
})
.collect();
Ok(format!("{{ {} }}", field_strs?.join(", ")))
}
Expr::Tuple { elements, .. } => {
@@ -1823,18 +1570,6 @@ impl JsBackend {
end, start, start
))
}
"sort" => {
let list = self.emit_expr(&args[0])?;
Ok(format!(
"[...{}].sort((a, b) => a < b ? -1 : a > b ? 1 : 0)",
list
))
}
"sortBy" => {
let list = self.emit_expr(&args[0])?;
let func = self.emit_expr(&args[1])?;
Ok(format!("[...{}].sort({})", list, func))
}
_ => Err(JsGenError {
message: format!("Unknown List operation: {}", operation),
span: None,
@@ -2332,86 +2067,6 @@ impl JsBackend {
}
}
/// Emit Map module operations using JS Map
fn emit_map_operation(
&mut self,
operation: &str,
args: &[Expr],
) -> Result<String, JsGenError> {
match operation {
"new" => Ok("new Map()".to_string()),
"set" => {
let map = self.emit_expr(&args[0])?;
let key = self.emit_expr(&args[1])?;
let val = self.emit_expr(&args[2])?;
Ok(format!(
"(function() {{ var m = new Map({}); m.set({}, {}); return m; }})()",
map, key, val
))
}
"get" => {
let map = self.emit_expr(&args[0])?;
let key = self.emit_expr(&args[1])?;
Ok(format!(
"({0}.has({1}) ? Lux.Some({0}.get({1})) : Lux.None())",
map, key
))
}
"contains" => {
let map = self.emit_expr(&args[0])?;
let key = self.emit_expr(&args[1])?;
Ok(format!("{}.has({})", map, key))
}
"remove" => {
let map = self.emit_expr(&args[0])?;
let key = self.emit_expr(&args[1])?;
Ok(format!(
"(function() {{ var m = new Map({}); m.delete({}); return m; }})()",
map, key
))
}
"keys" => {
let map = self.emit_expr(&args[0])?;
Ok(format!("Array.from({}.keys()).sort()", map))
}
"values" => {
let map = self.emit_expr(&args[0])?;
Ok(format!(
"Array.from({0}.entries()).sort(function(a,b) {{ return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0; }}).map(function(e) {{ return e[1]; }})",
map
))
}
"size" => {
let map = self.emit_expr(&args[0])?;
Ok(format!("{}.size", map))
}
"isEmpty" => {
let map = self.emit_expr(&args[0])?;
Ok(format!("({}.size === 0)", map))
}
"fromList" => {
let list = self.emit_expr(&args[0])?;
Ok(format!("new Map({}.map(function(t) {{ return [t[0], t[1]]; }}))", list))
}
"toList" => {
let map = self.emit_expr(&args[0])?;
Ok(format!(
"Array.from({}.entries()).sort(function(a,b) {{ return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0; }})",
map
))
}
"merge" => {
let m1 = self.emit_expr(&args[0])?;
let m2 = self.emit_expr(&args[1])?;
Ok(format!("new Map([...{}, ...{}])", m1, m2))
}
_ => Err(JsGenError {
message: format!("Unknown Map operation: {}", operation),
span: None,
}),
}
}
/// Emit Html module operations for type-safe HTML construction
fn emit_html_operation(
&mut self,
@@ -2683,7 +2338,7 @@ impl JsBackend {
}
}
Expr::BinaryOp { op, left, right, .. } => {
matches!(op, BinaryOp::Add | BinaryOp::Concat)
matches!(op, BinaryOp::Add)
&& (self.is_string_expr(left) || self.is_string_expr(right))
}
_ => false,
@@ -2734,10 +2389,6 @@ impl JsBackend {
/// Mangle a Lux name to a valid JavaScript name
fn mangle_name(&self, name: &str) -> String {
// Extern functions use their JS name directly (no mangling)
if let Some(js_name) = self.extern_fns.get(name) {
return js_name.clone();
}
format!("{}_lux", name)
}
@@ -4086,7 +3737,7 @@ line3"
#[test]
fn test_js_runtime_generated() {
// Test that the Lux runtime core is always generated
// Test that the Lux runtime is properly generated
use crate::parser::Parser;
let source = r#"
@@ -4097,51 +3748,21 @@ line3"
let mut backend = JsBackend::new();
let js_code = backend.generate(&program).expect("Should generate");
// Core runtime is always present
// Check that Lux runtime includes key functions
assert!(js_code.contains("const Lux = {"), "Lux object should be defined");
assert!(js_code.contains("Some:"), "Option Some should be defined");
assert!(js_code.contains("None:"), "Option None should be defined");
// Console-only program should NOT include Dom, Html, or TEA sections
assert!(!js_code.contains("Dom:"), "Dom handler should not be in Console-only program");
assert!(!js_code.contains("renderHtml:"), "renderHtml should not be in Console-only program");
assert!(!js_code.contains("app:"), "TEA app should not be in Console-only program");
assert!(!js_code.contains("Http:"), "Http should not be in Console-only program");
// Console should be present
assert!(js_code.contains("Console:"), "Console handler should exist");
}
#[test]
fn test_js_runtime_tree_shaking_all_effects() {
// Test that all effects are included when all are used
use crate::parser::Parser;
let source = r#"
fn main(): Unit with {Console, Dom} = {
Console.print("Hello")
let _ = Dom.getElementById("app")
()
}
"#;
let program = Parser::parse_source(source).expect("Should parse");
let mut backend = JsBackend::new();
let js_code = backend.generate(&program).expect("Should generate");
assert!(js_code.contains("Console:"), "Console handler should exist");
assert!(js_code.contains("Dom:"), "Dom handler should exist");
assert!(js_code.contains("renderHtml:"), "renderHtml should be defined when Dom is used");
assert!(js_code.contains("renderToDom:"), "renderToDom should be defined when Dom is used");
assert!(js_code.contains("escapeHtml:"), "escapeHtml should be defined when Dom is used");
assert!(js_code.contains("app:"), "TEA app should be defined when Dom is used");
assert!(js_code.contains("simpleApp:"), "simpleApp should be defined when Dom is used");
assert!(js_code.contains("hasChanged:"), "hasChanged should be defined when Dom is used");
assert!(js_code.contains("renderHtml:"), "renderHtml should be defined");
assert!(js_code.contains("renderToDom:"), "renderToDom should be defined");
assert!(js_code.contains("escapeHtml:"), "escapeHtml should be defined");
assert!(js_code.contains("app:"), "TEA app should be defined");
assert!(js_code.contains("simpleApp:"), "simpleApp should be defined");
assert!(js_code.contains("hasChanged:"), "hasChanged should be defined");
}
#[test]
fn test_js_runtime_default_handlers() {
// Test that only used effect handlers are generated
// Test that default handlers are properly generated
use crate::parser::Parser;
let source = r#"
@@ -4152,12 +3773,12 @@ line3"
let mut backend = JsBackend::new();
let js_code = backend.generate(&program).expect("Should generate");
// Only Console should be present
// Check that default handlers include all effects
assert!(js_code.contains("Console:"), "Console handler should exist");
assert!(!js_code.contains("Random:"), "Random handler should not exist in Console-only program");
assert!(!js_code.contains("Time:"), "Time handler should not exist in Console-only program");
assert!(!js_code.contains("Http:"), "Http handler should not exist in Console-only program");
assert!(!js_code.contains("Dom:"), "Dom handler should not exist in Console-only program");
assert!(js_code.contains("Random:"), "Random handler should exist");
assert!(js_code.contains("Time:"), "Time handler should exist");
assert!(js_code.contains("Http:"), "Http handler should exist");
assert!(js_code.contains("Dom:"), "Dom handler should exist");
}
#[test]

View File

@@ -333,13 +333,11 @@ mod tests {
fn test_option_exhaustive() {
let patterns = vec![
Pattern::Constructor {
module: None,
name: make_ident("None"),
fields: vec![],
span: span(),
},
Pattern::Constructor {
module: None,
name: make_ident("Some"),
fields: vec![Pattern::Wildcard(span())],
span: span(),
@@ -354,7 +352,6 @@ mod tests {
#[test]
fn test_option_missing_none() {
let patterns = vec![Pattern::Constructor {
module: None,
name: make_ident("Some"),
fields: vec![Pattern::Wildcard(span())],
span: span(),
@@ -394,13 +391,11 @@ mod tests {
fn test_result_exhaustive() {
let patterns = vec![
Pattern::Constructor {
module: None,
name: make_ident("Ok"),
fields: vec![Pattern::Wildcard(span())],
span: span(),
},
Pattern::Constructor {
module: None,
name: make_ident("Err"),
fields: vec![Pattern::Wildcard(span())],
span: span(),

View File

@@ -3,9 +3,9 @@
//! Formats Lux source code according to standard style guidelines.
use crate::ast::{
BehavioralProperty, BinaryOp, Declaration, EffectDecl, ExternFnDecl, Expr, FunctionDecl,
HandlerDecl, ImplDecl, ImplMethod, LetDecl, Literal, LiteralKind, Pattern, Program, Statement,
TraitDecl, TypeDecl, TypeDef, TypeExpr, UnaryOp, VariantFields, Visibility,
BehavioralProperty, BinaryOp, Declaration, EffectDecl, Expr, FunctionDecl, HandlerDecl,
ImplDecl, ImplMethod, LetDecl, Literal, LiteralKind, Pattern, Program, Statement, TraitDecl,
TypeDecl, TypeDef, TypeExpr, UnaryOp, VariantFields,
};
use crate::lexer::Lexer;
use crate::parser::Parser;
@@ -103,55 +103,9 @@ impl Formatter {
Declaration::Handler(h) => self.format_handler(h),
Declaration::Trait(t) => self.format_trait(t),
Declaration::Impl(i) => self.format_impl(i),
Declaration::ExternFn(e) => self.format_extern_fn(e),
}
}
fn format_extern_fn(&mut self, ext: &ExternFnDecl) {
let indent = self.indent();
self.write(&indent);
if ext.visibility == Visibility::Public {
self.write("pub ");
}
self.write("extern fn ");
self.write(&ext.name.name);
// Type parameters
if !ext.type_params.is_empty() {
self.write("<");
self.write(
&ext.type_params
.iter()
.map(|p| p.name.clone())
.collect::<Vec<_>>()
.join(", "),
);
self.write(">");
}
// Parameters
self.write("(");
let params: Vec<String> = ext
.params
.iter()
.map(|p| format!("{}: {}", p.name.name, self.format_type_expr(&p.typ)))
.collect();
self.write(&params.join(", "));
self.write("): ");
// Return type
self.write(&self.format_type_expr(&ext.return_type));
// Optional JS name
if let Some(js_name) = &ext.js_name {
self.write(&format!(" = \"{}\"", js_name));
}
self.newline();
}
fn format_function(&mut self, func: &FunctionDecl) {
let indent = self.indent();
self.write(&indent);
@@ -734,17 +688,15 @@ impl Formatter {
.join(", ")
)
}
Expr::Record {
spread, fields, ..
} => {
let mut parts = Vec::new();
if let Some(spread_expr) = spread {
parts.push(format!("...{}", self.format_expr(spread_expr)));
}
for (name, val) in fields {
parts.push(format!("{}: {}", name.name, self.format_expr(val)));
}
format!("{{ {} }}", parts.join(", "))
Expr::Record { fields, .. } => {
format!(
"{{ {} }}",
fields
.iter()
.map(|(name, val)| format!("{}: {}", name.name, self.format_expr(val)))
.collect::<Vec<_>>()
.join(", ")
)
}
Expr::EffectOp { effect, operation, args, .. } => {
format!(
@@ -779,30 +731,7 @@ impl Formatter {
match &lit.kind {
LiteralKind::Int(n) => n.to_string(),
LiteralKind::Float(f) => format!("{}", f),
LiteralKind::String(s) => {
if s.contains('\n') {
// Use triple-quoted multiline string
let tab = " ".repeat(self.config.indent_size);
let base_indent = tab.repeat(self.indent_level);
let content_indent = tab.repeat(self.indent_level + 1);
let lines: Vec<&str> = s.split('\n').collect();
let mut result = String::from("\"\"\"\n");
for line in &lines {
if line.is_empty() {
result.push('\n');
} else {
result.push_str(&content_indent);
result.push_str(&line.replace('{', "\\{").replace('}', "\\}"));
result.push('\n');
}
}
result.push_str(&base_indent);
result.push_str("\"\"\"");
result
} else {
format!("\"{}\"", s.replace('\\', "\\\\").replace('"', "\\\"").replace('{', "\\{").replace('}', "\\}"))
}
},
LiteralKind::String(s) => format!("\"{}\"", s.replace('\\', "\\\\").replace('"', "\\\"").replace('{', "\\{").replace('}', "\\}")),
LiteralKind::Char(c) => format!("'{}'", c),
LiteralKind::Bool(b) => b.to_string(),
LiteralKind::Unit => "()".to_string(),
@@ -824,7 +753,6 @@ impl Formatter {
BinaryOp::Ge => ">=",
BinaryOp::And => "&&",
BinaryOp::Or => "||",
BinaryOp::Concat => "++",
BinaryOp::Pipe => "|>",
}
}
@@ -841,22 +769,12 @@ impl Formatter {
Pattern::Wildcard(_) => "_".to_string(),
Pattern::Var(ident) => ident.name.clone(),
Pattern::Literal(lit) => self.format_literal(lit),
Pattern::Constructor {
module,
name,
fields,
..
} => {
let prefix = match module {
Some(m) => format!("{}.", m.name),
None => String::new(),
};
Pattern::Constructor { name, fields, .. } => {
if fields.is_empty() {
format!("{}{}", prefix, name.name)
name.name.clone()
} else {
format!(
"{}{}({})",
prefix,
"{}({})",
name.name,
fields
.iter()

View File

@@ -28,8 +28,6 @@ pub enum BuiltinFn {
ListGet,
ListRange,
ListForEach,
ListSort,
ListSortBy,
// String operations
StringSplit,
@@ -76,21 +74,14 @@ pub enum BuiltinFn {
MathFloor,
MathCeil,
MathRound,
MathSin,
MathCos,
MathAtan2,
// Additional List operations
ListIsEmpty,
ListFind,
ListFindIndex,
ListAny,
ListAll,
ListTake,
ListDrop,
ListZip,
ListFlatten,
ListContains,
// Additional String operations
StringStartsWith,
@@ -106,9 +97,7 @@ pub enum BuiltinFn {
// Int/Float operations
IntToString,
IntToFloat,
FloatToString,
FloatToInt,
// JSON operations
JsonParse,
@@ -130,20 +119,6 @@ pub enum BuiltinFn {
JsonString,
JsonArray,
JsonObject,
// Map operations
MapNew,
MapSet,
MapGet,
MapContains,
MapRemove,
MapKeys,
MapValues,
MapSize,
MapIsEmpty,
MapFromList,
MapToList,
MapMerge,
}
/// Runtime value
@@ -158,7 +133,6 @@ pub enum Value {
List(Vec<Value>),
Tuple(Vec<Value>),
Record(HashMap<String, Value>),
Map(HashMap<String, Value>),
Function(Rc<Closure>),
Handler(Rc<HandlerValue>),
/// Built-in function
@@ -176,11 +150,6 @@ pub enum Value {
},
/// JSON value (for JSON parsing/manipulation)
Json(serde_json::Value),
/// Extern function (FFI — only callable from JS backend)
ExternFn {
name: String,
arity: usize,
},
}
impl Value {
@@ -195,14 +164,12 @@ impl Value {
Value::List(_) => "List",
Value::Tuple(_) => "Tuple",
Value::Record(_) => "Record",
Value::Map(_) => "Map",
Value::Function(_) => "Function",
Value::Handler(_) => "Handler",
Value::Builtin(_) => "Function",
Value::Constructor { .. } => "Constructor",
Value::Versioned { .. } => "Versioned",
Value::Json(_) => "Json",
Value::ExternFn { .. } => "ExternFn",
}
}
@@ -245,11 +212,6 @@ impl Value {
ys.get(k).map(|yv| Value::values_equal(v, yv)).unwrap_or(false)
})
}
(Value::Map(xs), Value::Map(ys)) => {
xs.len() == ys.len() && xs.iter().all(|(k, v)| {
ys.get(k).map(|yv| Value::values_equal(v, yv)).unwrap_or(false)
})
}
(Value::Constructor { name: n1, fields: f1 }, Value::Constructor { name: n2, fields: f2 }) => {
n1 == n2 && f1.len() == f2.len() && f1.iter().zip(f2.iter()).all(|(x, y)| Value::values_equal(x, y))
}
@@ -320,16 +282,6 @@ impl TryFromValue for Vec<Value> {
}
}
impl TryFromValue for HashMap<String, Value> {
const TYPE_NAME: &'static str = "Map";
fn try_from_value(value: &Value) -> Option<Self> {
match value {
Value::Map(m) => Some(m.clone()),
_ => None,
}
}
}
impl TryFromValue for Value {
const TYPE_NAME: &'static str = "any";
fn try_from_value(value: &Value) -> Option<Self> {
@@ -376,18 +328,6 @@ impl fmt::Display for Value {
}
write!(f, " }}")
}
Value::Map(entries) => {
write!(f, "Map {{")?;
let mut sorted: Vec<_> = entries.iter().collect();
sorted.sort_by_key(|(k, _)| (*k).clone());
for (i, (key, value)) in sorted.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "\"{}\": {}", key, value)?;
}
write!(f, "}}")
}
Value::Function(_) => write!(f, "<function>"),
Value::Builtin(b) => write!(f, "<builtin:{:?}>", b),
Value::Handler(_) => write!(f, "<handler>"),
@@ -413,7 +353,6 @@ impl fmt::Display for Value {
write!(f, "{} @v{}", value, version)
}
Value::Json(json) => write!(f, "{}", json),
Value::ExternFn { name, .. } => write!(f, "<extern fn {}>", name),
}
}
}
@@ -985,23 +924,14 @@ impl Interpreter {
Value::Builtin(BuiltinFn::ListIsEmpty),
),
("find".to_string(), Value::Builtin(BuiltinFn::ListFind)),
("findIndex".to_string(), Value::Builtin(BuiltinFn::ListFindIndex)),
("any".to_string(), Value::Builtin(BuiltinFn::ListAny)),
("all".to_string(), Value::Builtin(BuiltinFn::ListAll)),
("take".to_string(), Value::Builtin(BuiltinFn::ListTake)),
("drop".to_string(), Value::Builtin(BuiltinFn::ListDrop)),
("zip".to_string(), Value::Builtin(BuiltinFn::ListZip)),
("flatten".to_string(), Value::Builtin(BuiltinFn::ListFlatten)),
("contains".to_string(), Value::Builtin(BuiltinFn::ListContains)),
(
"forEach".to_string(),
Value::Builtin(BuiltinFn::ListForEach),
),
("sort".to_string(), Value::Builtin(BuiltinFn::ListSort)),
(
"sortBy".to_string(),
Value::Builtin(BuiltinFn::ListSortBy),
),
]));
env.define("List", list_module);
@@ -1142,23 +1072,18 @@ impl Interpreter {
("floor".to_string(), Value::Builtin(BuiltinFn::MathFloor)),
("ceil".to_string(), Value::Builtin(BuiltinFn::MathCeil)),
("round".to_string(), Value::Builtin(BuiltinFn::MathRound)),
("sin".to_string(), Value::Builtin(BuiltinFn::MathSin)),
("cos".to_string(), Value::Builtin(BuiltinFn::MathCos)),
("atan2".to_string(), Value::Builtin(BuiltinFn::MathAtan2)),
]));
env.define("Math", math_module);
// Int module
let int_module = Value::Record(HashMap::from([
("toString".to_string(), Value::Builtin(BuiltinFn::IntToString)),
("toFloat".to_string(), Value::Builtin(BuiltinFn::IntToFloat)),
]));
env.define("Int", int_module);
// Float module
let float_module = Value::Record(HashMap::from([
("toString".to_string(), Value::Builtin(BuiltinFn::FloatToString)),
("toInt".to_string(), Value::Builtin(BuiltinFn::FloatToInt)),
]));
env.define("Float", float_module);
@@ -1185,72 +1110,16 @@ impl Interpreter {
("object".to_string(), Value::Builtin(BuiltinFn::JsonObject)),
]));
env.define("Json", json_module);
// Map module
let map_module = Value::Record(HashMap::from([
("new".to_string(), Value::Builtin(BuiltinFn::MapNew)),
("set".to_string(), Value::Builtin(BuiltinFn::MapSet)),
("get".to_string(), Value::Builtin(BuiltinFn::MapGet)),
("contains".to_string(), Value::Builtin(BuiltinFn::MapContains)),
("remove".to_string(), Value::Builtin(BuiltinFn::MapRemove)),
("keys".to_string(), Value::Builtin(BuiltinFn::MapKeys)),
("values".to_string(), Value::Builtin(BuiltinFn::MapValues)),
("size".to_string(), Value::Builtin(BuiltinFn::MapSize)),
("isEmpty".to_string(), Value::Builtin(BuiltinFn::MapIsEmpty)),
("fromList".to_string(), Value::Builtin(BuiltinFn::MapFromList)),
("toList".to_string(), Value::Builtin(BuiltinFn::MapToList)),
("merge".to_string(), Value::Builtin(BuiltinFn::MapMerge)),
]));
env.define("Map", map_module);
}
/// Execute a program
pub fn run(&mut self, program: &Program) -> Result<Value, RuntimeError> {
let mut last_value = Value::Unit;
let mut has_main_let = false;
for decl in &program.declarations {
// Track if there's a top-level `let main = ...`
if let Declaration::Let(let_decl) = decl {
if let_decl.name.name == "main" {
has_main_let = true;
}
}
last_value = self.eval_declaration(decl)?;
}
// Auto-invoke main if it was defined as a let binding with a function value
if has_main_let {
if let Some(main_val) = self.global_env.get("main") {
if let Value::Function(ref closure) = main_val {
if closure.params.is_empty() {
let span = Span { start: 0, end: 0 };
let mut result = self.eval_call(main_val.clone(), vec![], span)?;
// Trampoline loop
loop {
match result {
EvalResult::Value(v) => {
last_value = v;
break;
}
EvalResult::Effect(req) => {
last_value = self.handle_effect(req)?;
break;
}
EvalResult::TailCall { func, args, span } => {
result = self.eval_call(func, args, span)?;
}
EvalResult::Resume(v) => {
last_value = v;
break;
}
}
}
}
}
}
}
Ok(last_value)
}
@@ -1412,25 +1281,6 @@ impl Interpreter {
Ok(Value::Unit)
}
Declaration::ExternFn(ext) => {
// Register a placeholder that errors at runtime
let name = ext.name.name.clone();
let arity = ext.params.len();
// Create a closure that produces a clear error
let closure = Closure {
params: ext.params.iter().map(|p| p.name.name.clone()).collect(),
body: Expr::Literal(crate::ast::Literal {
kind: crate::ast::LiteralKind::Unit,
span: ext.span,
}),
env: self.global_env.clone(),
};
// We store an ExternFn marker value
self.global_env
.define(&name, Value::ExternFn { name: name.clone(), arity });
Ok(Value::Unit)
}
Declaration::Effect(_) | Declaration::Trait(_) | Declaration::Impl(_) => {
// These are compile-time only
Ok(Value::Unit)
@@ -1675,28 +1525,8 @@ impl Interpreter {
self.eval_expr_tail(result, &block_env, tail)
}
Expr::Record {
spread, fields, ..
} => {
Expr::Record { fields, .. } => {
let mut record = HashMap::new();
// If there's a spread, evaluate it and start with its fields
if let Some(spread_expr) = spread {
let spread_val = self.eval_expr(spread_expr, env)?;
if let Value::Record(spread_fields) = spread_val {
record = spread_fields;
} else {
return Err(RuntimeError {
message: format!(
"Spread expression must evaluate to a record, got {}",
spread_val.type_name()
),
span: Some(expr.span()),
});
}
}
// Override with explicit fields
for (name, expr) in fields {
let val = self.eval_expr(expr, env)?;
record.insert(name.name.clone(), val);
@@ -1769,18 +1599,6 @@ impl Interpreter {
span: Some(span),
}),
},
BinaryOp::Concat => match (left, right) {
(Value::String(a), Value::String(b)) => Ok(Value::String(a + &b)),
(Value::List(a), Value::List(b)) => {
let mut result = a;
result.extend(b);
Ok(Value::List(result))
}
(l, r) => Err(RuntimeError {
message: format!("Cannot concatenate {} and {}", l.type_name(), r.type_name()),
span: Some(span),
}),
},
BinaryOp::Sub => match (left, right) {
(Value::Int(a), Value::Int(b)) => Ok(Value::Int(a - b)),
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a - b)),
@@ -1950,13 +1768,6 @@ impl Interpreter {
}))
}
Value::Builtin(builtin) => self.eval_builtin(builtin, args, span),
Value::ExternFn { name, .. } => Err(RuntimeError {
message: format!(
"Extern function '{}' can only be called when compiled to JavaScript (use `lux build --target js`)",
name
),
span: Some(span),
}),
v => Err(RuntimeError {
message: format!("Cannot call {}", v.type_name()),
span: Some(span),
@@ -2476,26 +2287,6 @@ impl Interpreter {
}
}
BuiltinFn::IntToFloat => {
if args.len() != 1 {
return Err(err("Int.toFloat requires 1 argument"));
}
match &args[0] {
Value::Int(n) => Ok(EvalResult::Value(Value::Float(*n as f64))),
v => Err(err(&format!("Int.toFloat expects Int, got {}", v.type_name()))),
}
}
BuiltinFn::FloatToInt => {
if args.len() != 1 {
return Err(err("Float.toInt requires 1 argument"));
}
match &args[0] {
Value::Float(f) => Ok(EvalResult::Value(Value::Int(*f as i64))),
v => Err(err(&format!("Float.toInt expects Float, got {}", v.type_name()))),
}
}
BuiltinFn::TypeOf => {
if args.len() != 1 {
return Err(err("typeOf requires 1 argument"));
@@ -2672,45 +2463,6 @@ impl Interpreter {
}
}
BuiltinFn::MathSin => {
if args.len() != 1 {
return Err(err("Math.sin requires 1 argument"));
}
match &args[0] {
Value::Float(n) => Ok(EvalResult::Value(Value::Float(n.sin()))),
Value::Int(n) => Ok(EvalResult::Value(Value::Float((*n as f64).sin()))),
v => Err(err(&format!("Math.sin expects number, got {}", v.type_name()))),
}
}
BuiltinFn::MathCos => {
if args.len() != 1 {
return Err(err("Math.cos requires 1 argument"));
}
match &args[0] {
Value::Float(n) => Ok(EvalResult::Value(Value::Float(n.cos()))),
Value::Int(n) => Ok(EvalResult::Value(Value::Float((*n as f64).cos()))),
v => Err(err(&format!("Math.cos expects number, got {}", v.type_name()))),
}
}
BuiltinFn::MathAtan2 => {
if args.len() != 2 {
return Err(err("Math.atan2 requires 2 arguments: y, x"));
}
let y = match &args[0] {
Value::Float(n) => *n,
Value::Int(n) => *n as f64,
v => return Err(err(&format!("Math.atan2 expects number, got {}", v.type_name()))),
};
let x = match &args[1] {
Value::Float(n) => *n,
Value::Int(n) => *n as f64,
v => return Err(err(&format!("Math.atan2 expects number, got {}", v.type_name()))),
};
Ok(EvalResult::Value(Value::Float(y.atan2(x))))
}
// Additional List operations
BuiltinFn::ListIsEmpty => {
let list = Self::expect_arg_1::<Vec<Value>>(&args, "List.isEmpty", span)?;
@@ -2764,55 +2516,6 @@ impl Interpreter {
Ok(EvalResult::Value(Value::Bool(true)))
}
BuiltinFn::ListFindIndex => {
let (list, func) = Self::expect_args_2::<Vec<Value>, Value>(&args, "List.findIndex", span)?;
for (i, item) in list.iter().enumerate() {
let v = self.eval_call_to_value(func.clone(), vec![item.clone()], span)?;
match v {
Value::Bool(true) => {
return Ok(EvalResult::Value(Value::Constructor {
name: "Some".to_string(),
fields: vec![Value::Int(i as i64)],
}));
}
Value::Bool(false) => {}
_ => return Err(err("List.findIndex predicate must return Bool")),
}
}
Ok(EvalResult::Value(Value::Constructor {
name: "None".to_string(),
fields: vec![],
}))
}
BuiltinFn::ListZip => {
let (list1, list2) = Self::expect_args_2::<Vec<Value>, Vec<Value>>(&args, "List.zip", span)?;
let result: Vec<Value> = list1
.into_iter()
.zip(list2.into_iter())
.map(|(a, b)| Value::Tuple(vec![a, b]))
.collect();
Ok(EvalResult::Value(Value::List(result)))
}
BuiltinFn::ListFlatten => {
let list = Self::expect_arg_1::<Vec<Value>>(&args, "List.flatten", span)?;
let mut result = Vec::new();
for item in list {
match item {
Value::List(inner) => result.extend(inner),
other => result.push(other),
}
}
Ok(EvalResult::Value(Value::List(result)))
}
BuiltinFn::ListContains => {
let (list, target) = Self::expect_args_2::<Vec<Value>, Value>(&args, "List.contains", span)?;
let found = list.iter().any(|item| Value::values_equal(item, &target));
Ok(EvalResult::Value(Value::Bool(found)))
}
BuiltinFn::ListTake => {
let (list, n) = Self::expect_args_2::<Vec<Value>, i64>(&args, "List.take", span)?;
let n = n.max(0) as usize;
@@ -2839,67 +2542,6 @@ impl Interpreter {
Ok(EvalResult::Value(Value::Unit))
}
BuiltinFn::ListSort => {
// List.sort(list) - sort using natural ordering (Int, Float, String, Bool)
let mut list =
Self::expect_arg_1::<Vec<Value>>(&args, "List.sort", span)?;
list.sort_by(|a, b| Self::compare_values(a, b));
Ok(EvalResult::Value(Value::List(list)))
}
BuiltinFn::ListSortBy => {
// List.sortBy(list, fn(a, b) => Int) - sort with custom comparator
// Comparator returns negative (a < b), 0 (a == b), or positive (a > b)
let (list, func) =
Self::expect_args_2::<Vec<Value>, Value>(&args, "List.sortBy", span)?;
let mut indexed: Vec<(usize, Value)> =
list.into_iter().enumerate().collect();
let mut err: Option<RuntimeError> = None;
let func_ref = &func;
let self_ptr = self as *mut Self;
indexed.sort_by(|a, b| {
if err.is_some() {
return std::cmp::Ordering::Equal;
}
// Safety: we're in a single-threaded context and the closure
// needs mutable access to call eval_call_to_value
let interp = unsafe { &mut *self_ptr };
match interp.eval_call_to_value(
func_ref.clone(),
vec![a.1.clone(), b.1.clone()],
span,
) {
Ok(Value::Int(n)) => {
if n < 0 {
std::cmp::Ordering::Less
} else if n > 0 {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Equal
}
}
Ok(_) => {
err = Some(RuntimeError {
message: "List.sortBy comparator must return Int"
.to_string(),
span: Some(span),
});
std::cmp::Ordering::Equal
}
Err(e) => {
err = Some(e);
std::cmp::Ordering::Equal
}
}
});
if let Some(e) = err {
return Err(e);
}
let result: Vec<Value> =
indexed.into_iter().map(|(_, v)| v).collect();
Ok(EvalResult::Value(Value::List(result)))
}
// Additional String operations
BuiltinFn::StringStartsWith => {
let (s, prefix) = Self::expect_args_2::<String, String>(&args, "String.startsWith", span)?;
@@ -3310,128 +2952,6 @@ impl Interpreter {
}
Ok(EvalResult::Value(Value::Json(serde_json::Value::Object(map))))
}
// Map operations
BuiltinFn::MapNew => {
Ok(EvalResult::Value(Value::Map(HashMap::new())))
}
BuiltinFn::MapSet => {
if args.len() != 3 {
return Err(err("Map.set requires 3 arguments: map, key, value"));
}
let mut map = match &args[0] {
Value::Map(m) => m.clone(),
v => return Err(err(&format!("Map.set expects Map as first argument, got {}", v.type_name()))),
};
let key = match &args[1] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("Map.set expects String key, got {}", v.type_name()))),
};
map.insert(key, args[2].clone());
Ok(EvalResult::Value(Value::Map(map)))
}
BuiltinFn::MapGet => {
let (map, key) = Self::expect_args_2::<HashMap<String, Value>, String>(&args, "Map.get", span)?;
match map.get(&key) {
Some(v) => Ok(EvalResult::Value(Value::Constructor {
name: "Some".to_string(),
fields: vec![v.clone()],
})),
None => Ok(EvalResult::Value(Value::Constructor {
name: "None".to_string(),
fields: vec![],
})),
}
}
BuiltinFn::MapContains => {
let (map, key) = Self::expect_args_2::<HashMap<String, Value>, String>(&args, "Map.contains", span)?;
Ok(EvalResult::Value(Value::Bool(map.contains_key(&key))))
}
BuiltinFn::MapRemove => {
let (mut map, key) = Self::expect_args_2::<HashMap<String, Value>, String>(&args, "Map.remove", span)?;
map.remove(&key);
Ok(EvalResult::Value(Value::Map(map)))
}
BuiltinFn::MapKeys => {
let map = Self::expect_arg_1::<HashMap<String, Value>>(&args, "Map.keys", span)?;
let mut keys: Vec<String> = map.keys().cloned().collect();
keys.sort();
Ok(EvalResult::Value(Value::List(
keys.into_iter().map(Value::String).collect(),
)))
}
BuiltinFn::MapValues => {
let map = Self::expect_arg_1::<HashMap<String, Value>>(&args, "Map.values", span)?;
let mut entries: Vec<(String, Value)> = map.into_iter().collect();
entries.sort_by(|(a, _), (b, _)| a.cmp(b));
Ok(EvalResult::Value(Value::List(
entries.into_iter().map(|(_, v)| v).collect(),
)))
}
BuiltinFn::MapSize => {
let map = Self::expect_arg_1::<HashMap<String, Value>>(&args, "Map.size", span)?;
Ok(EvalResult::Value(Value::Int(map.len() as i64)))
}
BuiltinFn::MapIsEmpty => {
let map = Self::expect_arg_1::<HashMap<String, Value>>(&args, "Map.isEmpty", span)?;
Ok(EvalResult::Value(Value::Bool(map.is_empty())))
}
BuiltinFn::MapFromList => {
let list = Self::expect_arg_1::<Vec<Value>>(&args, "Map.fromList", span)?;
let mut map = HashMap::new();
for item in list {
match item {
Value::Tuple(fields) if fields.len() == 2 => {
let key = match &fields[0] {
Value::String(s) => s.clone(),
v => return Err(err(&format!("Map.fromList expects (String, V) tuples, got {} key", v.type_name()))),
};
map.insert(key, fields[1].clone());
}
_ => return Err(err("Map.fromList expects List<(String, V)>")),
}
}
Ok(EvalResult::Value(Value::Map(map)))
}
BuiltinFn::MapToList => {
let map = Self::expect_arg_1::<HashMap<String, Value>>(&args, "Map.toList", span)?;
let mut entries: Vec<(String, Value)> = map.into_iter().collect();
entries.sort_by(|(a, _), (b, _)| a.cmp(b));
Ok(EvalResult::Value(Value::List(
entries
.into_iter()
.map(|(k, v)| Value::Tuple(vec![Value::String(k), v]))
.collect(),
)))
}
BuiltinFn::MapMerge => {
if args.len() != 2 {
return Err(err("Map.merge requires 2 arguments: map1, map2"));
}
let mut map1 = match &args[0] {
Value::Map(m) => m.clone(),
v => return Err(err(&format!("Map.merge expects Map as first argument, got {}", v.type_name()))),
};
let map2 = match &args[1] {
Value::Map(m) => m.clone(),
v => return Err(err(&format!("Map.merge expects Map as second argument, got {}", v.type_name()))),
};
for (k, v) in map2 {
map1.insert(k, v);
}
Ok(EvalResult::Value(Value::Map(map1)))
}
}
}
@@ -3515,18 +3035,6 @@ impl Interpreter {
})
}
/// Compare two values for natural ordering (used by List.sort)
fn compare_values(a: &Value, b: &Value) -> std::cmp::Ordering {
match (a, b) {
(Value::Int(x), Value::Int(y)) => x.cmp(y),
(Value::Float(x), Value::Float(y)) => x.partial_cmp(y).unwrap_or(std::cmp::Ordering::Equal),
(Value::String(x), Value::String(y)) => x.cmp(y),
(Value::Bool(x), Value::Bool(y)) => x.cmp(y),
(Value::Char(x), Value::Char(y)) => x.cmp(y),
_ => std::cmp::Ordering::Equal,
}
}
fn match_pattern(&self, pattern: &Pattern, value: &Value) -> Option<Vec<(String, Value)>> {
match pattern {
Pattern::Wildcard(_) => Some(Vec::new()),
@@ -3609,11 +3117,6 @@ impl Interpreter {
b.get(k).map(|bv| self.values_equal(v, bv)).unwrap_or(false)
})
}
(Value::Map(a), Value::Map(b)) => {
a.len() == b.len() && a.iter().all(|(k, v)| {
b.get(k).map(|bv| self.values_equal(v, bv)).unwrap_or(false)
})
}
(
Value::Constructor {
name: n1,
@@ -4034,119 +3537,6 @@ impl Interpreter {
}
}
("File", "copy") => {
let source = match request.args.first() {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.copy requires a string source path".to_string(),
span: None,
}),
};
let dest = match request.args.get(1) {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.copy requires a string destination path".to_string(),
span: None,
}),
};
match std::fs::copy(&source, &dest) {
Ok(_) => Ok(Value::Unit),
Err(e) => Err(RuntimeError {
message: format!("Failed to copy '{}' to '{}': {}", source, dest, e),
span: None,
}),
}
}
("File", "glob") => {
let pattern = match request.args.first() {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.glob requires a string pattern".to_string(),
span: None,
}),
};
match glob::glob(&pattern) {
Ok(paths) => {
let entries: Vec<Value> = paths
.filter_map(|entry| entry.ok())
.map(|path| Value::String(path.to_string_lossy().to_string()))
.collect();
Ok(Value::List(entries))
}
Err(e) => Err(RuntimeError {
message: format!("Invalid glob pattern '{}': {}", pattern, e),
span: None,
}),
}
}
// ===== File Effect (safe Result-returning variants) =====
("File", "tryRead") => {
let path = match request.args.first() {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.tryRead requires a string path".to_string(),
span: None,
}),
};
match std::fs::read_to_string(&path) {
Ok(content) => Ok(Value::Constructor {
name: "Ok".to_string(),
fields: vec![Value::String(content)],
}),
Err(e) => Ok(Value::Constructor {
name: "Err".to_string(),
fields: vec![Value::String(format!("Failed to read file '{}': {}", path, e))],
}),
}
}
("File", "tryWrite") => {
let path = match request.args.first() {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.tryWrite requires a string path".to_string(),
span: None,
}),
};
let content = match request.args.get(1) {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.tryWrite requires string content".to_string(),
span: None,
}),
};
match std::fs::write(&path, &content) {
Ok(()) => Ok(Value::Constructor {
name: "Ok".to_string(),
fields: vec![Value::Unit],
}),
Err(e) => Ok(Value::Constructor {
name: "Err".to_string(),
fields: vec![Value::String(format!("Failed to write file '{}': {}", path, e))],
}),
}
}
("File", "tryDelete") => {
let path = match request.args.first() {
Some(Value::String(s)) => s.clone(),
_ => return Err(RuntimeError {
message: "File.tryDelete requires a string path".to_string(),
span: None,
}),
};
match std::fs::remove_file(&path) {
Ok(()) => Ok(Value::Constructor {
name: "Ok".to_string(),
fields: vec![Value::Unit],
}),
Err(e) => Ok(Value::Constructor {
name: "Err".to_string(),
fields: vec![Value::String(format!("Failed to delete file '{}': {}", path, e))],
}),
}
}
// ===== Process Effect =====
("Process", "exec") => {
use std::process::Command;
@@ -5654,7 +5044,6 @@ mod tests {
// Create a simple migration that adds a field
// Migration: old.name -> { name: old.name, email: "unknown" }
let migration_body = Expr::Record {
spread: None,
fields: vec![
(
Ident::new("name", Span::default()),

View File

@@ -42,7 +42,6 @@ pub enum TokenKind {
Effect,
Handler,
Run,
Handle,
Resume,
Type,
True,
@@ -55,7 +54,6 @@ pub enum TokenKind {
Trait, // trait (for type classes)
Impl, // impl (for trait implementations)
For, // for (in impl Trait for Type)
Extern, // extern (for FFI declarations)
// Documentation
DocComment(String), // /// doc comment
@@ -72,7 +70,6 @@ pub enum TokenKind {
// Operators
Plus, // +
PlusPlus, // ++
Minus, // -
Star, // *
Slash, // /
@@ -92,7 +89,6 @@ pub enum TokenKind {
Arrow, // =>
ThinArrow, // ->
Dot, // .
DotDotDot, // ...
Colon, // :
ColonColon, // ::
Comma, // ,
@@ -142,7 +138,6 @@ impl fmt::Display for TokenKind {
TokenKind::Effect => write!(f, "effect"),
TokenKind::Handler => write!(f, "handler"),
TokenKind::Run => write!(f, "run"),
TokenKind::Handle => write!(f, "handle"),
TokenKind::Resume => write!(f, "resume"),
TokenKind::Type => write!(f, "type"),
TokenKind::Import => write!(f, "import"),
@@ -153,7 +148,6 @@ impl fmt::Display for TokenKind {
TokenKind::Trait => write!(f, "trait"),
TokenKind::Impl => write!(f, "impl"),
TokenKind::For => write!(f, "for"),
TokenKind::Extern => write!(f, "extern"),
TokenKind::DocComment(s) => write!(f, "/// {}", s),
TokenKind::Is => write!(f, "is"),
TokenKind::Pure => write!(f, "pure"),
@@ -166,7 +160,6 @@ impl fmt::Display for TokenKind {
TokenKind::True => write!(f, "true"),
TokenKind::False => write!(f, "false"),
TokenKind::Plus => write!(f, "+"),
TokenKind::PlusPlus => write!(f, "++"),
TokenKind::Minus => write!(f, "-"),
TokenKind::Star => write!(f, "*"),
TokenKind::Slash => write!(f, "/"),
@@ -186,7 +179,6 @@ impl fmt::Display for TokenKind {
TokenKind::Arrow => write!(f, "=>"),
TokenKind::ThinArrow => write!(f, "->"),
TokenKind::Dot => write!(f, "."),
TokenKind::DotDotDot => write!(f, "..."),
TokenKind::Colon => write!(f, ":"),
TokenKind::ColonColon => write!(f, "::"),
TokenKind::Comma => write!(f, ","),
@@ -276,14 +268,7 @@ impl<'a> Lexer<'a> {
let kind = match c {
// Single-character tokens
'+' => {
if self.peek() == Some('+') {
self.advance();
TokenKind::PlusPlus
} else {
TokenKind::Plus
}
}
'+' => TokenKind::Plus,
'*' => TokenKind::Star,
'%' => TokenKind::Percent,
'(' => TokenKind::LParen,
@@ -379,22 +364,7 @@ impl<'a> Lexer<'a> {
TokenKind::Pipe
}
}
'.' => {
if self.peek() == Some('.') {
// Check for ... (need to peek past second dot)
// We look at source directly since we can only peek one ahead
let next_next = self.source[self.pos..].chars().nth(1);
if next_next == Some('.') {
self.advance(); // consume second '.'
self.advance(); // consume third '.'
TokenKind::DotDotDot
} else {
TokenKind::Dot
}
} else {
TokenKind::Dot
}
}
'.' => TokenKind::Dot,
':' => {
if self.peek() == Some(':') {
self.advance();
@@ -413,26 +383,7 @@ impl<'a> Lexer<'a> {
}
// String literals
'"' => {
// Check for triple-quote multiline string """
if self.peek() == Some('"') {
// Clone to peek at the second char
let mut lookahead = self.chars.clone();
lookahead.next(); // consume first peeked "
if lookahead.peek() == Some(&'"') {
// It's a triple-quote: consume both remaining quotes
self.advance(); // second "
self.advance(); // third "
self.scan_multiline_string(start)?
} else {
// It's an empty string ""
self.advance(); // consume closing "
TokenKind::String(String::new())
}
} else {
self.scan_string(start)?
}
}
'"' => self.scan_string(start)?,
// Char literals
'\'' => self.scan_char(start)?,
@@ -690,211 +641,6 @@ impl<'a> Lexer<'a> {
Ok(TokenKind::InterpolatedString(parts))
}
fn scan_multiline_string(&mut self, _start: usize) -> Result<TokenKind, LexError> {
let mut parts: Vec<StringPart> = Vec::new();
let mut current_literal = String::new();
// Skip the first newline after opening """ if present
if self.peek() == Some('\n') {
self.advance();
} else if self.peek() == Some('\r') {
self.advance();
if self.peek() == Some('\n') {
self.advance();
}
}
loop {
match self.advance() {
Some('"') => {
// Check for closing """
if self.peek() == Some('"') {
let mut lookahead = self.chars.clone();
lookahead.next(); // consume first peeked "
if lookahead.peek() == Some(&'"') {
// Closing """ found
self.advance(); // second "
self.advance(); // third "
break;
}
}
// Not closing triple-quote, just a regular " in the string
current_literal.push('"');
}
Some('\\') => {
// Handle escape sequences (same as regular strings)
match self.peek() {
Some('{') => {
self.advance();
current_literal.push('{');
}
Some('}') => {
self.advance();
current_literal.push('}');
}
_ => {
let escape_start = self.pos;
let escaped = match self.advance() {
Some('n') => '\n',
Some('r') => '\r',
Some('t') => '\t',
Some('\\') => '\\',
Some('"') => '"',
Some('0') => '\0',
Some('\'') => '\'',
Some(c) => {
return Err(LexError {
message: format!("Invalid escape sequence: \\{}", c),
span: Span::new(escape_start - 1, self.pos),
});
}
None => {
return Err(LexError {
message: "Unterminated multiline string".into(),
span: Span::new(_start, self.pos),
});
}
};
current_literal.push(escaped);
}
}
}
Some('{') => {
// Interpolation (same as regular strings)
if !current_literal.is_empty() {
parts.push(StringPart::Literal(std::mem::take(&mut current_literal)));
}
let mut expr_text = String::new();
let mut brace_depth = 1;
loop {
match self.advance() {
Some('{') => {
brace_depth += 1;
expr_text.push('{');
}
Some('}') => {
brace_depth -= 1;
if brace_depth == 0 {
break;
}
expr_text.push('}');
}
Some(c) => expr_text.push(c),
None => {
return Err(LexError {
message: "Unterminated interpolation in multiline string"
.into(),
span: Span::new(_start, self.pos),
});
}
}
}
parts.push(StringPart::Expr(expr_text));
}
Some(c) => current_literal.push(c),
None => {
return Err(LexError {
message: "Unterminated multiline string".into(),
span: Span::new(_start, self.pos),
});
}
}
}
// Strip common leading whitespace from all lines
let strip_indent = |s: &str| -> String {
if s.is_empty() {
return String::new();
}
let lines: Vec<&str> = s.split('\n').collect();
// Find minimum indentation of non-empty lines
let min_indent = lines
.iter()
.filter(|line| !line.trim().is_empty())
.map(|line| line.len() - line.trim_start().len())
.min()
.unwrap_or(0);
// Strip that indentation from each line
lines
.iter()
.map(|line| {
if line.len() >= min_indent {
&line[min_indent..]
} else {
line.trim_start()
}
})
.collect::<Vec<_>>()
.join("\n")
};
// Strip trailing whitespace-only line before closing """
let trim_trailing = |s: &mut String| {
// Remove trailing spaces/tabs (indent before closing """)
while s.ends_with(' ') || s.ends_with('\t') {
s.pop();
}
// Remove the trailing newline
if s.ends_with('\n') {
s.pop();
if s.ends_with('\r') {
s.pop();
}
}
};
if parts.is_empty() {
trim_trailing(&mut current_literal);
let result = strip_indent(&current_literal);
return Ok(TokenKind::String(result));
}
// Add remaining literal
if !current_literal.is_empty() {
trim_trailing(&mut current_literal);
parts.push(StringPart::Literal(current_literal));
}
// For interpolated multiline strings, strip indent from literal parts
// First, collect all literal content to find min indent
let mut all_text = String::new();
for part in &parts {
if let StringPart::Literal(lit) = part {
all_text.push_str(lit);
}
}
let lines: Vec<&str> = all_text.split('\n').collect();
let min_indent = lines
.iter()
.filter(|line| !line.trim().is_empty())
.map(|line| line.len() - line.trim_start().len())
.min()
.unwrap_or(0);
if min_indent > 0 {
for part in &mut parts {
if let StringPart::Literal(lit) = part {
let stripped_lines: Vec<&str> = lit
.split('\n')
.map(|line| {
if line.len() >= min_indent {
&line[min_indent..]
} else {
line.trim_start()
}
})
.collect();
*lit = stripped_lines.join("\n");
}
}
}
Ok(TokenKind::InterpolatedString(parts))
}
fn scan_char(&mut self, start: usize) -> Result<TokenKind, LexError> {
let c = match self.advance() {
Some('\\') => match self.advance() {
@@ -999,7 +745,6 @@ impl<'a> Lexer<'a> {
"effect" => TokenKind::Effect,
"handler" => TokenKind::Handler,
"run" => TokenKind::Run,
"handle" => TokenKind::Handle,
"resume" => TokenKind::Resume,
"type" => TokenKind::Type,
"import" => TokenKind::Import,
@@ -1010,7 +755,6 @@ impl<'a> Lexer<'a> {
"trait" => TokenKind::Trait,
"impl" => TokenKind::Impl,
"for" => TokenKind::For,
"extern" => TokenKind::Extern,
"is" => TokenKind::Is,
"pure" => TokenKind::Pure,
"total" => TokenKind::Total,
@@ -1019,8 +763,6 @@ impl<'a> Lexer<'a> {
"commutative" => TokenKind::Commutative,
"where" => TokenKind::Where,
"assume" => TokenKind::Assume,
"and" => TokenKind::And,
"or" => TokenKind::Or,
"true" => TokenKind::Bool(true),
"false" => TokenKind::Bool(false),
_ => TokenKind::Ident(ident.to_string()),

View File

@@ -403,9 +403,6 @@ impl Linter {
Declaration::Function(f) => {
self.defined_functions.insert(f.name.name.clone());
}
Declaration::ExternFn(e) => {
self.defined_functions.insert(e.name.name.clone());
}
Declaration::Let(l) => {
self.define_var(&l.name.name);
}
@@ -516,10 +513,7 @@ impl Linter {
Expr::Field { object, .. } | Expr::TupleIndex { object, .. } => {
self.collect_refs_expr(object);
}
Expr::Record { spread, fields, .. } => {
if let Some(spread_expr) = spread {
self.collect_refs_expr(spread_expr);
}
Expr::Record { fields, .. } => {
for (_, val) in fields {
self.collect_refs_expr(val);
}

View File

@@ -1571,10 +1571,7 @@ fn collect_call_site_hints(
collect_call_site_hints(source, e, param_names, hints);
}
}
Expr::Record { spread, fields, .. } => {
if let Some(spread_expr) = spread {
collect_call_site_hints(source, spread_expr, param_names, hints);
}
Expr::Record { fields, .. } => {
for (_, e) in fields {
collect_call_site_hints(source, e, param_names, hints);
}

View File

@@ -37,7 +37,7 @@ use std::borrow::Cow;
use std::collections::HashSet;
use typechecker::TypeChecker;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const VERSION: &str = "0.1.0";
const HELP: &str = r#"
Lux - A functional language with first-class effects
@@ -193,12 +193,10 @@ fn main() {
eprintln!(" lux compile <file.lux> --run");
eprintln!(" lux compile <file.lux> --emit-c [-o file.c]");
eprintln!(" lux compile <file.lux> --target js [-o file.js]");
eprintln!(" lux compile <file.lux> --watch");
std::process::exit(1);
}
let run_after = args.iter().any(|a| a == "--run");
let emit_c = args.iter().any(|a| a == "--emit-c");
let watch = args.iter().any(|a| a == "--watch");
let target_js = args.iter()
.position(|a| a == "--target")
.and_then(|i| args.get(i + 1))
@@ -214,16 +212,6 @@ fn main() {
} else {
compile_to_c(&args[2], output_path, run_after, emit_c);
}
if watch {
// Build the args to replay for each recompilation (without --watch)
let compile_args: Vec<String> = args.iter()
.skip(1)
.filter(|a| a.as_str() != "--watch")
.cloned()
.collect();
watch_and_rerun(&args[2], &compile_args);
}
}
"repl" => {
// Start REPL
@@ -914,7 +902,6 @@ fn compile_to_c(path: &str, output_path: Option<&str>, run_after: bool, emit_c:
.args(["-O2", "-o"])
.arg(&output_bin)
.arg(&temp_c)
.arg("-lm")
.output();
match compile_result {
@@ -1363,64 +1350,6 @@ fn watch_file(path: &str) {
}
}
fn watch_and_rerun(path: &str, compile_args: &[String]) {
use std::time::{Duration, SystemTime};
use std::path::Path;
let file_path = Path::new(path);
if !file_path.exists() {
eprintln!("File not found: {}", path);
std::process::exit(1);
}
println!();
println!("Watching {} for changes (Ctrl+C to stop)...", path);
let mut last_modified = std::fs::metadata(file_path)
.and_then(|m| m.modified())
.unwrap_or(SystemTime::UNIX_EPOCH);
loop {
std::thread::sleep(Duration::from_millis(500));
let modified = match std::fs::metadata(file_path).and_then(|m| m.modified()) {
Ok(m) => m,
Err(_) => continue,
};
if modified > last_modified {
last_modified = modified;
// Clear screen
print!("\x1B[2J\x1B[H");
println!("=== Compiling {} ===", path);
println!();
let result = std::process::Command::new(std::env::current_exe().unwrap())
.args(compile_args)
.status();
match result {
Ok(status) if status.success() => {
println!();
println!("=== Success ===");
}
Ok(_) => {
println!();
println!("=== Failed ===");
}
Err(e) => {
eprintln!("Error running compiler: {}", e);
}
}
println!();
println!("Watching for changes...");
}
}
}
fn serve_static_files(dir: &str, port: u16) {
use std::io::{Write, BufRead, BufReader};
use std::net::TcpListener;
@@ -2288,29 +2217,6 @@ fn extract_module_doc(source: &str, path: &str) -> Result<ModuleDoc, String> {
is_public: matches!(t.visibility, ast::Visibility::Public),
});
}
ast::Declaration::ExternFn(ext) => {
let params: Vec<String> = ext.params.iter()
.map(|p| format!("{}: {}", p.name.name, format_type(&p.typ)))
.collect();
let js_note = ext.js_name.as_ref()
.map(|n| format!(" = \"{}\"", n))
.unwrap_or_default();
let signature = format!(
"extern fn {}({}): {}{}",
ext.name.name,
params.join(", "),
format_type(&ext.return_type),
js_note
);
let doc = extract_doc_comment(source, ext.span.start);
functions.push(FunctionDoc {
name: ext.name.name.clone(),
signature,
description: doc,
is_public: matches!(ext.visibility, ast::Visibility::Public),
properties: vec![],
});
}
ast::Declaration::Effect(e) => {
let doc = extract_doc_comment(source, e.span.start);
let ops: Vec<String> = e.operations.iter()
@@ -3948,49 +3854,6 @@ c")"#;
assert_eq!(eval(source).unwrap(), r#""literal {braces}""#);
}
#[test]
fn test_multiline_string() {
let source = r#"
let s = """
hello
world
"""
let result = String.length(s)
"#;
// "hello\nworld" = 11 chars
assert_eq!(eval(source).unwrap(), "11");
}
#[test]
fn test_multiline_string_with_quotes() {
// Quotes are fine in the middle of triple-quoted strings
let source = "let s = \"\"\"\n She said \"hello\" to him.\n\"\"\"";
assert_eq!(eval(source).unwrap(), r#""She said "hello" to him.""#);
}
#[test]
fn test_multiline_string_interpolation() {
let source = r#"
let name = "Lux"
let s = """
Hello, {name}!
"""
"#;
assert_eq!(eval(source).unwrap(), r#""Hello, Lux!""#);
}
#[test]
fn test_multiline_string_empty() {
let source = r#"let s = """""""#;
assert_eq!(eval(source).unwrap(), r#""""#);
}
#[test]
fn test_multiline_string_inline() {
let source = r#"let s = """hello world""""#;
assert_eq!(eval(source).unwrap(), r#""hello world""#);
}
// Option tests
#[test]
fn test_option_constructors() {
@@ -4104,146 +3967,6 @@ c")"#;
assert_eq!(eval("let x = { a: 1, b: 2 } == { a: 1, b: 3 }").unwrap(), "false");
}
#[test]
fn test_record_spread() {
let source = r#"
let base = { x: 1, y: 2, z: 3 }
let updated = { ...base, y: 20 }
let result = updated.y
"#;
assert_eq!(eval(source).unwrap(), "20");
}
#[test]
fn test_deep_path_record_update() {
// Basic deep path: { ...base, pos.x: val } desugars to { ...base, pos: { ...base.pos, x: val } }
let source = r#"
let npc = { name: "Goblin", pos: { x: 10, y: 20 } }
let moved = { ...npc, pos.x: 50, pos.y: 60 }
let result = moved.pos.x
"#;
assert_eq!(eval(source).unwrap(), "50");
// Verify other fields are preserved through spread
let source2 = r#"
let npc = { name: "Goblin", pos: { x: 10, y: 20 } }
let moved = { ...npc, pos.x: 50 }
let result = moved.pos.y
"#;
assert_eq!(eval(source2).unwrap(), "20");
// Verify top-level spread fields preserved
let source3 = r#"
let npc = { name: "Goblin", pos: { x: 10, y: 20 } }
let moved = { ...npc, pos.x: 50 }
let result = moved.name
"#;
assert_eq!(eval(source3).unwrap(), "\"Goblin\"");
// Mix of flat and deep path fields
let source4 = r#"
let npc = { name: "Goblin", pos: { x: 10, y: 20 }, hp: 100 }
let updated = { ...npc, pos.x: 50, hp: 80 }
let result = (updated.pos.x, updated.hp, updated.name)
"#;
assert_eq!(eval(source4).unwrap(), "(50, 80, \"Goblin\")");
}
#[test]
fn test_deep_path_record_multilevel() {
// Multi-level deep path: world.physics.gravity
let source = r#"
let world = { name: "Earth", physics: { gravity: { x: 0, y: -10 }, drag: 1 } }
let updated = { ...world, physics.gravity.y: -20 }
let result = (updated.physics.gravity.y, updated.physics.drag, updated.name)
"#;
assert_eq!(eval(source).unwrap(), "(-20, 1, \"Earth\")");
}
#[test]
fn test_deep_path_conflict_error() {
// Field appears as both flat and deep path — should error
let result = eval(r#"
let base = { pos: { x: 1, y: 2 } }
let bad = { ...base, pos: { x: 10, y: 20 }, pos.x: 30 }
"#);
assert!(result.is_err());
}
#[test]
fn test_extern_fn_parse() {
// Extern fn should parse successfully
let source = r#"
extern fn getElementById(id: String): String
let x = 42
"#;
assert_eq!(eval(source).unwrap(), "42");
}
#[test]
fn test_extern_fn_with_js_name() {
// Extern fn with JS name override
let source = r#"
extern fn getCtx(el: String, kind: String): String = "getContext"
let x = 42
"#;
assert_eq!(eval(source).unwrap(), "42");
}
#[test]
fn test_extern_fn_call_errors_in_interpreter() {
// Calling an extern fn in the interpreter should produce a clear error
let source = r#"
extern fn alert(msg: String): Unit
let x = alert("hello")
"#;
let result = eval(source);
assert!(result.is_err());
let err = result.unwrap_err();
assert!(err.contains("extern") || err.contains("Extern") || err.contains("JavaScript"),
"Error should mention extern/JavaScript: {}", err);
}
#[test]
fn test_pub_extern_fn() {
// pub extern fn should parse
let source = r#"
pub extern fn requestAnimationFrame(callback: fn(): Unit): Int
let x = 42
"#;
assert_eq!(eval(source).unwrap(), "42");
}
#[test]
fn test_extern_fn_js_codegen() {
// Verify JS backend emits extern fn calls without _lux suffix
use crate::codegen::js_backend::JsBackend;
use crate::parser::Parser;
use crate::lexer::Lexer;
let source = r#"
extern fn getElementById(id: String): String
extern fn getContext(el: String, kind: String): String = "getContext"
fn main(): Unit = {
let el = getElementById("canvas")
let ctx = getContext(el, "2d")
()
}
"#;
let tokens = Lexer::new(source).tokenize().unwrap();
let program = Parser::new(tokens).parse_program().unwrap();
let mut backend = JsBackend::new();
let js = backend.generate(&program).unwrap();
// getElementById should appear as-is (no _lux suffix)
assert!(js.contains("getElementById("), "JS should call getElementById directly: {}", js);
// getContext should use the JS name override
assert!(js.contains("getContext("), "JS should call getContext directly: {}", js);
// main should still be mangled
assert!(js.contains("main_lux"), "main should be mangled: {}", js);
}
#[test]
fn test_invalid_escape_sequence() {
let result = eval(r#"let x = "\z""#);
@@ -5717,173 +5440,4 @@ c")"#;
check_file("projects/rest-api/main.lux").unwrap();
}
}
// === Map type tests ===
#[test]
fn test_map_new_and_size() {
let source = r#"
let m = Map.new()
let result = Map.size(m)
"#;
assert_eq!(eval(source).unwrap(), "0");
}
#[test]
fn test_map_set_and_get() {
let source = r#"
let m = Map.new()
let m2 = Map.set(m, "name", "Alice")
let result = Map.get(m2, "name")
"#;
assert_eq!(eval(source).unwrap(), "Some(\"Alice\")");
}
#[test]
fn test_map_get_missing() {
let source = r#"
let m = Map.new()
let result = Map.get(m, "missing")
"#;
assert_eq!(eval(source).unwrap(), "None");
}
#[test]
fn test_map_contains() {
let source = r#"
let m = Map.set(Map.new(), "x", 1)
let result = (Map.contains(m, "x"), Map.contains(m, "y"))
"#;
assert_eq!(eval(source).unwrap(), "(true, false)");
}
#[test]
fn test_map_remove() {
let source = r#"
let m = Map.set(Map.set(Map.new(), "a", 1), "b", 2)
let m2 = Map.remove(m, "a")
let result = (Map.size(m2), Map.contains(m2, "a"), Map.contains(m2, "b"))
"#;
assert_eq!(eval(source).unwrap(), "(1, false, true)");
}
#[test]
fn test_map_keys_and_values() {
let source = r#"
let m = Map.set(Map.set(Map.new(), "b", 2), "a", 1)
let result = Map.keys(m)
"#;
assert_eq!(eval(source).unwrap(), "[\"a\", \"b\"]");
}
#[test]
fn test_map_from_list() {
let source = r#"
let m = Map.fromList([("x", 10), ("y", 20)])
let result = (Map.get(m, "x"), Map.size(m))
"#;
assert_eq!(eval(source).unwrap(), "(Some(10), 2)");
}
#[test]
fn test_map_to_list() {
let source = r#"
let m = Map.set(Map.set(Map.new(), "b", 2), "a", 1)
let result = Map.toList(m)
"#;
assert_eq!(eval(source).unwrap(), "[(\"a\", 1), (\"b\", 2)]");
}
#[test]
fn test_map_merge() {
let source = r#"
let m1 = Map.fromList([("a", 1), ("b", 2)])
let m2 = Map.fromList([("b", 3), ("c", 4)])
let merged = Map.merge(m1, m2)
let result = (Map.get(merged, "a"), Map.get(merged, "b"), Map.get(merged, "c"))
"#;
assert_eq!(eval(source).unwrap(), "(Some(1), Some(3), Some(4))");
}
#[test]
fn test_map_immutability() {
let source = r#"
let m1 = Map.fromList([("a", 1)])
let m2 = Map.set(m1, "b", 2)
let result = (Map.size(m1), Map.size(m2))
"#;
assert_eq!(eval(source).unwrap(), "(1, 2)");
}
#[test]
fn test_map_is_empty() {
let source = r#"
let m1 = Map.new()
let m2 = Map.set(m1, "x", 1)
let result = (Map.isEmpty(m1), Map.isEmpty(m2))
"#;
assert_eq!(eval(source).unwrap(), "(true, false)");
}
#[test]
fn test_map_type_annotation() {
let source = r#"
fn lookup(m: Map<String, Int>, key: String): Option<Int> =
Map.get(m, key)
let m = Map.fromList([("age", 30)])
let result = lookup(m, "age")
"#;
assert_eq!(eval(source).unwrap(), "Some(30)");
}
#[test]
fn test_file_copy() {
use std::io::Write;
// Create a temp file, copy it, verify contents
let dir = std::env::temp_dir().join("lux_test_file_copy");
let _ = std::fs::create_dir_all(&dir);
let src = dir.join("src.txt");
let dst = dir.join("dst.txt");
std::fs::File::create(&src).unwrap().write_all(b"hello copy").unwrap();
let _ = std::fs::remove_file(&dst);
let source = format!(r#"
fn main(): Unit with {{File}} =
File.copy("{}", "{}")
let _ = run main() with {{}}
let result = "done"
"#, src.display(), dst.display());
let result = eval(&source);
assert!(result.is_ok(), "File.copy failed: {:?}", result);
let contents = std::fs::read_to_string(&dst).unwrap();
assert_eq!(contents, "hello copy");
// Cleanup
let _ = std::fs::remove_dir_all(&dir);
}
#[test]
fn test_effectful_callback_propagation() {
// WISH-7: effectful callbacks in List.forEach should propagate effects
// This should type-check successfully because Console effect is inferred
let source = r#"
fn printAll(items: List<String>): Unit =
List.forEach(items, fn(x: String): Unit => Console.print(x))
let result = "ok"
"#;
let result = eval(source);
assert!(result.is_ok(), "Effectful callback should type-check: {:?}", result);
}
#[test]
fn test_effectful_callback_in_map() {
// Effectful callback in List.map should propagate effects
let source = r#"
fn readAll(paths: List<String>): List<String> =
List.map(paths, fn(p: String): String => File.read(p))
let result = "ok"
"#;
let result = eval(source);
assert!(result.is_ok(), "Effectful callback in map should type-check: {:?}", result);
}
}

View File

@@ -52,7 +52,6 @@ impl Module {
Declaration::Let(l) => l.visibility == Visibility::Public,
Declaration::Type(t) => t.visibility == Visibility::Public,
Declaration::Trait(t) => t.visibility == Visibility::Public,
Declaration::ExternFn(e) => e.visibility == Visibility::Public,
// Effects, handlers, and impls are always public for now
Declaration::Effect(_) | Declaration::Handler(_) | Declaration::Impl(_) => true,
}
@@ -280,12 +279,6 @@ impl ModuleLoader {
}
Declaration::Type(t) if t.visibility == Visibility::Public => {
exports.insert(t.name.name.clone());
// Also export constructors for ADT types
if let crate::ast::TypeDef::Enum(variants) = &t.definition {
for variant in variants {
exports.insert(variant.name.name.clone());
}
}
}
Declaration::Effect(e) => {
// Effects are always exported
@@ -295,9 +288,6 @@ impl ModuleLoader {
// Handlers are always exported
exports.insert(h.name.name.clone());
}
Declaration::ExternFn(e) if e.visibility == Visibility::Public => {
exports.insert(e.name.name.clone());
}
_ => {}
}
}

View File

@@ -238,7 +238,6 @@ impl Parser {
match self.peek_kind() {
TokenKind::Fn => Ok(Declaration::Function(self.parse_function_decl(visibility, doc)?)),
TokenKind::Extern => Ok(Declaration::ExternFn(self.parse_extern_fn_decl(visibility, doc)?)),
TokenKind::Effect => Ok(Declaration::Effect(self.parse_effect_decl(doc)?)),
TokenKind::Handler => Ok(Declaration::Handler(self.parse_handler_decl()?)),
TokenKind::Type => Ok(Declaration::Type(self.parse_type_decl(visibility, doc)?)),
@@ -246,8 +245,7 @@ impl Parser {
TokenKind::Trait => Ok(Declaration::Trait(self.parse_trait_decl(visibility, doc)?)),
TokenKind::Impl => Ok(Declaration::Impl(self.parse_impl_decl()?)),
TokenKind::Run => Err(self.error("Bare 'run' expressions are not allowed at top level. Use 'let _ = run ...' or 'let result = run ...'")),
TokenKind::Handle => Err(self.error("Bare 'handle' expressions are not allowed at top level. Use 'let _ = handle ...' or 'let result = handle ...'")),
_ => Err(self.error("Expected declaration (fn, extern, effect, handler, type, trait, impl, or let)")),
_ => Err(self.error("Expected declaration (fn, effect, handler, type, trait, impl, or let)")),
}
}
@@ -324,57 +322,6 @@ impl Parser {
})
}
/// Parse extern function declaration: extern fn name<T>(params): ReturnType = "jsName"
fn parse_extern_fn_decl(&mut self, visibility: Visibility, doc: Option<String>) -> Result<ExternFnDecl, ParseError> {
let start = self.current_span();
self.expect(TokenKind::Extern)?;
self.expect(TokenKind::Fn)?;
let name = self.parse_ident()?;
// Optional type parameters
let type_params = if self.check(TokenKind::Lt) {
self.parse_type_params()?
} else {
Vec::new()
};
self.expect(TokenKind::LParen)?;
let params = self.parse_params()?;
self.expect(TokenKind::RParen)?;
// Return type
self.expect(TokenKind::Colon)?;
let return_type = self.parse_type()?;
// Optional JS name override: = "jsName"
let js_name = if self.check(TokenKind::Eq) {
self.advance();
match self.peek_kind() {
TokenKind::String(s) => {
let name = s.clone();
self.advance();
Some(name)
}
_ => return Err(self.error("Expected string literal for JS name in extern fn")),
}
} else {
None
};
let span = start.merge(self.previous_span());
Ok(ExternFnDecl {
visibility,
doc,
name,
type_params,
params,
return_type,
js_name,
span,
})
}
/// Parse effect declaration
fn parse_effect_decl(&mut self, doc: Option<String>) -> Result<EffectDecl, ParseError> {
let start = self.current_span();
@@ -898,7 +845,6 @@ impl Parser {
/// Parse function parameters
fn parse_params(&mut self) -> Result<Vec<Parameter>, ParseError> {
let mut params = Vec::new();
self.skip_newlines();
while !self.check(TokenKind::RParen) {
let start = self.current_span();
@@ -908,11 +854,9 @@ impl Parser {
let span = start.merge(self.previous_span());
params.push(Parameter { name, typ, span });
self.skip_newlines();
if !self.check(TokenKind::RParen) {
self.expect(TokenKind::Comma)?;
self.skip_newlines();
}
}
@@ -1614,7 +1558,6 @@ impl Parser {
loop {
let op = match self.peek_kind() {
TokenKind::Plus => BinaryOp::Add,
TokenKind::PlusPlus => BinaryOp::Concat,
TokenKind::Minus => BinaryOp::Sub,
_ => break,
};
@@ -1831,7 +1774,6 @@ impl Parser {
TokenKind::Let => self.parse_let_expr(),
TokenKind::Fn => self.parse_lambda_expr(),
TokenKind::Run => self.parse_run_expr(),
TokenKind::Handle => self.parse_handle_expr(),
TokenKind::Resume => self.parse_resume_expr(),
// Delimiters
@@ -1849,7 +1791,6 @@ impl Parser {
let condition = Box::new(self.parse_expr()?);
self.skip_newlines();
self.expect(TokenKind::Then)?;
self.skip_newlines();
let then_branch = Box::new(self.parse_expr()?);
@@ -1974,27 +1915,9 @@ impl Parser {
TokenKind::Ident(name) => {
// Check if it starts with uppercase (constructor) or lowercase (variable)
if name.chars().next().map_or(false, |c| c.is_uppercase()) {
self.parse_constructor_pattern_with_module(None)
self.parse_constructor_pattern()
} else {
let ident = self.parse_ident()?;
// Check for module-qualified constructor: module.Constructor
if self.check(TokenKind::Dot) {
// Peek ahead to see if next is an uppercase identifier
let dot_pos = self.pos;
self.advance(); // skip dot
if let TokenKind::Ident(next_name) = self.peek_kind() {
if next_name
.chars()
.next()
.map_or(false, |c| c.is_uppercase())
{
return self
.parse_constructor_pattern_with_module(Some(ident));
}
}
// Not a module-qualified constructor, backtrack
self.pos = dot_pos;
}
Ok(Pattern::Var(ident))
}
}
@@ -2004,40 +1927,25 @@ impl Parser {
}
}
fn parse_constructor_pattern_with_module(
&mut self,
module: Option<Ident>,
) -> Result<Pattern, ParseError> {
let start = module
.as_ref()
.map(|m| m.span)
.unwrap_or_else(|| self.current_span());
fn parse_constructor_pattern(&mut self) -> Result<Pattern, ParseError> {
let start = self.current_span();
let name = self.parse_ident()?;
if self.check(TokenKind::LParen) {
self.advance();
self.skip_newlines();
let mut fields = Vec::new();
while !self.check(TokenKind::RParen) {
fields.push(self.parse_pattern()?);
self.skip_newlines();
if !self.check(TokenKind::RParen) {
self.expect(TokenKind::Comma)?;
self.skip_newlines();
}
}
self.expect(TokenKind::RParen)?;
let span = start.merge(self.previous_span());
Ok(Pattern::Constructor {
module,
name,
fields,
span,
})
Ok(Pattern::Constructor { name, fields, span })
} else {
let span = start.merge(name.span);
let span = name.span;
Ok(Pattern::Constructor {
module,
name,
fields: Vec::new(),
span,
@@ -2048,15 +1956,12 @@ impl Parser {
fn parse_tuple_pattern(&mut self) -> Result<Pattern, ParseError> {
let start = self.current_span();
self.expect(TokenKind::LParen)?;
self.skip_newlines();
let mut elements = Vec::new();
while !self.check(TokenKind::RParen) {
elements.push(self.parse_pattern()?);
self.skip_newlines();
if !self.check(TokenKind::RParen) {
self.expect(TokenKind::Comma)?;
self.skip_newlines();
}
}
@@ -2186,7 +2091,6 @@ impl Parser {
fn parse_lambda_params(&mut self) -> Result<Vec<Parameter>, ParseError> {
let mut params = Vec::new();
self.skip_newlines();
while !self.check(TokenKind::RParen) {
let start = self.current_span();
@@ -2202,11 +2106,9 @@ impl Parser {
let span = start.merge(self.previous_span());
params.push(Parameter { name, typ, span });
self.skip_newlines();
if !self.check(TokenKind::RParen) {
self.expect(TokenKind::Comma)?;
self.skip_newlines();
}
}
@@ -2247,40 +2149,6 @@ impl Parser {
})
}
fn parse_handle_expr(&mut self) -> Result<Expr, ParseError> {
let start = self.current_span();
self.expect(TokenKind::Handle)?;
let expr = Box::new(self.parse_call_expr()?);
self.expect(TokenKind::With)?;
self.expect(TokenKind::LBrace)?;
self.skip_newlines();
let mut handlers = Vec::new();
while !self.check(TokenKind::RBrace) {
let effect = self.parse_ident()?;
self.expect(TokenKind::Eq)?;
let handler = self.parse_expr()?;
handlers.push((effect, handler));
self.skip_newlines();
if self.check(TokenKind::Comma) {
self.advance();
}
self.skip_newlines();
}
let end = self.current_span();
self.expect(TokenKind::RBrace)?;
Ok(Expr::Run {
expr,
handlers,
span: start.merge(end),
})
}
fn parse_resume_expr(&mut self) -> Result<Expr, ParseError> {
let start = self.current_span();
self.expect(TokenKind::Resume)?;
@@ -2294,7 +2162,6 @@ impl Parser {
fn parse_tuple_or_paren_expr(&mut self) -> Result<Expr, ParseError> {
let start = self.current_span();
self.expect(TokenKind::LParen)?;
self.skip_newlines();
if self.check(TokenKind::RParen) {
self.advance();
@@ -2305,19 +2172,16 @@ impl Parser {
}
let first = self.parse_expr()?;
self.skip_newlines();
if self.check(TokenKind::Comma) {
// Tuple
let mut elements = vec![first];
while self.check(TokenKind::Comma) {
self.advance();
self.skip_newlines();
if self.check(TokenKind::RParen) {
break;
}
elements.push(self.parse_expr()?);
self.skip_newlines();
}
self.expect(TokenKind::RParen)?;
let span = start.merge(self.previous_span());
@@ -2343,39 +2207,12 @@ impl Parser {
}));
}
// Check for record spread: { ...expr, field: val }
if matches!(self.peek_kind(), TokenKind::DotDotDot) {
return self.parse_record_expr_rest(start);
}
// Check if it's a record (ident: expr or ident.path: expr) or block
// Check if it's a record (ident: expr) or block
if matches!(self.peek_kind(), TokenKind::Ident(_)) {
let lookahead = self.tokens.get(self.pos + 1).map(|t| &t.kind);
if matches!(lookahead, Some(TokenKind::Colon)) {
return self.parse_record_expr_rest(start);
}
// Check for deep path record: { ident.ident...: expr }
if matches!(lookahead, Some(TokenKind::Dot)) {
let mut look = self.pos + 2;
loop {
match self.tokens.get(look).map(|t| &t.kind) {
Some(TokenKind::Ident(_)) => {
look += 1;
match self.tokens.get(look).map(|t| &t.kind) {
Some(TokenKind::Colon) => {
return self.parse_record_expr_rest(start);
}
Some(TokenKind::Dot) => {
look += 1;
continue;
}
_ => break,
}
}
_ => break,
}
}
}
}
// It's a block
@@ -2383,40 +2220,13 @@ impl Parser {
}
fn parse_record_expr_rest(&mut self, start: Span) -> Result<Expr, ParseError> {
let mut raw_fields: Vec<(Vec<Ident>, Expr)> = Vec::new();
let mut spread = None;
let mut has_deep_paths = false;
// Check for spread: { ...expr, ... }
if self.check(TokenKind::DotDotDot) {
self.advance(); // consume ...
let spread_expr = self.parse_expr()?;
spread = Some(Box::new(spread_expr));
self.skip_newlines();
if self.check(TokenKind::Comma) {
self.advance();
}
self.skip_newlines();
}
let mut fields = Vec::new();
while !self.check(TokenKind::RBrace) {
let name = self.parse_ident()?;
// Check for dotted path: pos.x, pos.x.y, etc.
let mut path = vec![name];
while self.check(TokenKind::Dot) {
self.advance(); // consume .
let segment = self.parse_ident()?;
path.push(segment);
}
if path.len() > 1 {
has_deep_paths = true;
}
self.expect(TokenKind::Colon)?;
let value = self.parse_expr()?;
raw_fields.push((path, value));
fields.push((name, value));
self.skip_newlines();
if self.check(TokenKind::Comma) {
@@ -2427,120 +2237,7 @@ impl Parser {
self.expect(TokenKind::RBrace)?;
let span = start.merge(self.previous_span());
if has_deep_paths {
Self::desugar_deep_fields(spread, raw_fields, span)
} else {
// No deep paths — use flat fields directly (common case, no allocation overhead)
let fields = raw_fields
.into_iter()
.map(|(mut path, value)| (path.remove(0), value))
.collect();
Ok(Expr::Record {
spread,
fields,
span,
})
}
}
/// Desugar deep path record fields into nested record spread expressions.
/// `{ ...base, pos.x: vx, pos.y: vy }` becomes `{ ...base, pos: { ...base.pos, x: vx, y: vy } }`
fn desugar_deep_fields(
spread: Option<Box<Expr>>,
raw_fields: Vec<(Vec<Ident>, Expr)>,
outer_span: Span,
) -> Result<Expr, ParseError> {
use std::collections::HashMap;
// Group fields by first path segment, preserving order
let mut groups: Vec<(String, Vec<(Vec<Ident>, Expr)>)> = Vec::new();
let mut group_map: HashMap<String, usize> = HashMap::new();
for (path, value) in raw_fields {
let key = path[0].name.clone();
if let Some(&idx) = group_map.get(&key) {
groups[idx].1.push((path, value));
} else {
group_map.insert(key.clone(), groups.len());
groups.push((key, vec![(path, value)]));
}
}
let mut fields = Vec::new();
for (_, group) in groups {
let first_ident = group[0].0[0].clone();
let has_flat = group.iter().any(|(p, _)| p.len() == 1);
let has_deep = group.iter().any(|(p, _)| p.len() > 1);
if has_flat && has_deep {
return Err(ParseError {
message: format!(
"Field '{}' appears as both a direct field and a deep path prefix",
first_ident.name
),
span: first_ident.span,
});
}
if has_flat {
if group.len() > 1 {
return Err(ParseError {
message: format!("Duplicate field '{}'", first_ident.name),
span: group[1].0[0].span,
});
}
let (_, value) = group.into_iter().next().unwrap();
fields.push((first_ident, value));
} else {
// Deep paths — create nested record with spread from parent
let sub_spread = spread.as_ref().map(|s| {
Box::new(Expr::Field {
object: s.clone(),
field: first_ident.clone(),
span: first_ident.span,
})
});
// Strip first segment from all paths
let sub_fields: Vec<(Vec<Ident>, Expr)> = group
.into_iter()
.map(|(mut path, value)| {
path.remove(0);
(path, value)
})
.collect();
let has_nested_deep = sub_fields.iter().any(|(p, _)| p.len() > 1);
if has_nested_deep {
// Recursively desugar deeper paths
let nested =
Self::desugar_deep_fields(sub_spread, sub_fields, first_ident.span)?;
fields.push((first_ident, nested));
} else {
// All sub-paths are single-segment — build Record directly
let flat_fields: Vec<(Ident, Expr)> = sub_fields
.into_iter()
.map(|(mut path, value)| (path.remove(0), value))
.collect();
fields.push((
first_ident.clone(),
Expr::Record {
spread: sub_spread,
fields: flat_fields,
span: first_ident.span,
},
));
}
}
}
Ok(Expr::Record {
spread,
fields,
span: outer_span,
})
Ok(Expr::Record { fields, span })
}
fn parse_block_rest(&mut self, start: Span) -> Result<Expr, ParseError> {

View File

@@ -245,30 +245,6 @@ impl SymbolTable {
Declaration::Handler(h) => self.visit_handler(h, scope_idx),
Declaration::Trait(t) => self.visit_trait(t, scope_idx),
Declaration::Impl(i) => self.visit_impl(i, scope_idx),
Declaration::ExternFn(ext) => {
let is_public = matches!(ext.visibility, Visibility::Public);
let params: Vec<String> = ext
.params
.iter()
.map(|p| format!("{}: {}", p.name.name, self.type_expr_to_string(&p.typ)))
.collect();
let sig = format!(
"extern fn {}({}): {}",
ext.name.name,
params.join(", "),
self.type_expr_to_string(&ext.return_type)
);
let mut symbol = self.new_symbol(
ext.name.name.clone(),
SymbolKind::Function,
ext.span,
Some(sig),
is_public,
);
symbol.documentation = ext.doc.clone();
let id = self.add_symbol(scope_idx, symbol);
self.add_reference(id, ext.name.span, true, true);
}
}
}
@@ -551,10 +527,7 @@ impl SymbolTable {
self.visit_expr(e, scope_idx);
}
}
Expr::Record { spread, fields, .. } => {
if let Some(spread_expr) = spread {
self.visit_expr(spread_expr, scope_idx);
}
Expr::Record { fields, .. } => {
for (_, e) in fields {
self.visit_expr(e, scope_idx);
}

View File

@@ -5,9 +5,9 @@
use std::collections::HashMap;
use crate::ast::{
self, BinaryOp, Declaration, EffectDecl, ExternFnDecl, Expr, FunctionDecl, HandlerDecl, Ident,
ImplDecl, ImportDecl, LetDecl, Literal, LiteralKind, MatchArm, Parameter, Pattern, Program,
Span, Statement, TraitDecl, TypeDecl, TypeExpr, UnaryOp, VariantFields,
self, BinaryOp, Declaration, EffectDecl, Expr, FunctionDecl, HandlerDecl, Ident, ImplDecl,
ImportDecl, LetDecl, Literal, LiteralKind, MatchArm, Parameter, Pattern, Program, Span,
Statement, TraitDecl, TypeDecl, TypeExpr, UnaryOp, VariantFields,
};
use crate::diagnostics::{find_similar_names, format_did_you_mean, Diagnostic, ErrorCode, Severity};
use crate::exhaustiveness::{check_exhaustiveness, missing_patterns_hint};
@@ -339,10 +339,7 @@ fn references_params(expr: &Expr, params: &[&str]) -> bool {
Expr::Lambda { body, .. } => references_params(body, params),
Expr::Tuple { elements, .. } => elements.iter().any(|e| references_params(e, params)),
Expr::List { elements, .. } => elements.iter().any(|e| references_params(e, params)),
Expr::Record { spread, fields, .. } => {
spread.as_ref().is_some_and(|s| references_params(s, params))
|| fields.iter().any(|(_, e)| references_params(e, params))
}
Expr::Record { fields, .. } => fields.iter().any(|(_, e)| references_params(e, params)),
Expr::Match { scrutinee, arms, .. } => {
references_params(scrutinee, params)
|| arms.iter().any(|a| references_params(&a.body, params))
@@ -519,9 +516,8 @@ fn has_recursive_calls(func_name: &str, body: &Expr) -> bool {
Expr::Tuple { elements, .. } | Expr::List { elements, .. } => {
elements.iter().any(|e| has_recursive_calls(func_name, e))
}
Expr::Record { spread, fields, .. } => {
spread.as_ref().is_some_and(|s| has_recursive_calls(func_name, s))
|| fields.iter().any(|(_, e)| has_recursive_calls(func_name, e))
Expr::Record { fields, .. } => {
fields.iter().any(|(_, e)| has_recursive_calls(func_name, e))
}
Expr::Field { object, .. } | Expr::TupleIndex { object, .. } => has_recursive_calls(func_name, object),
Expr::Let { value, body, .. } => {
@@ -676,7 +672,6 @@ fn generate_auto_migration_expr(
// Build the record expression
Some(Expr::Record {
spread: None,
fields: field_exprs,
span,
})
@@ -981,13 +976,6 @@ impl TypeChecker {
if !fields.is_empty() {
self.env.bind(&name, TypeScheme::mono(Type::Record(fields)));
}
// Also copy type definitions so imported types are usable
for (type_name, type_def) in &module_checker.env.types {
if !self.env.types.contains_key(type_name) {
self.env.types.insert(type_name.clone(), type_def.clone());
}
}
}
ImportKind::Direct => {
// Import a specific name directly
@@ -1227,17 +1215,6 @@ impl TypeChecker {
let trait_impl = self.collect_impl(impl_decl);
self.env.trait_impls.push(trait_impl);
}
Declaration::ExternFn(ext) => {
// Register extern fn type signature (like a regular function but no body)
let param_types: Vec<Type> = ext
.params
.iter()
.map(|p| self.resolve_type(&p.typ))
.collect();
let return_type = self.resolve_type(&ext.return_type);
let fn_type = Type::function(param_types, return_type);
self.env.bind(&ext.name.name, TypeScheme::mono(fn_type));
}
}
}
@@ -1559,7 +1536,7 @@ impl TypeChecker {
// Use the declared type if present, otherwise use inferred
let final_type = if let Some(ref type_expr) = let_decl.typ {
let declared = self.resolve_type(type_expr);
if let Err(e) = unify_with_env(&inferred, &declared, &self.env) {
if let Err(e) = unify(&inferred, &declared) {
self.errors.push(TypeError {
message: format!(
"Variable '{}' has type {}, but declared type is {}: {}",
@@ -1767,11 +1744,7 @@ impl TypeChecker {
span,
} => self.infer_block(statements, result, *span),
Expr::Record {
spread,
fields,
span,
} => self.infer_record(spread.as_deref(), fields, *span),
Expr::Record { fields, span } => self.infer_record(fields, *span),
Expr::Tuple { elements, span } => self.infer_tuple(elements, *span),
@@ -1810,7 +1783,7 @@ impl TypeChecker {
match op {
BinaryOp::Add => {
// Add supports both numeric types and string concatenation
if let Err(e) = unify_with_env(&left_type, &right_type, &self.env) {
if let Err(e) = unify(&left_type, &right_type) {
self.errors.push(TypeError {
message: format!("Operands of '{}' must have same type: {}", op, e),
span,
@@ -1831,32 +1804,9 @@ impl TypeChecker {
}
}
BinaryOp::Concat => {
// Concat (++) supports strings and lists
if let Err(e) = unify_with_env(&left_type, &right_type, &self.env) {
self.errors.push(TypeError {
message: format!("Operands of '++' must have same type: {}", e),
span,
});
}
match &left_type {
Type::String | Type::List(_) | Type::Var(_) => left_type,
_ => {
self.errors.push(TypeError {
message: format!(
"Operator '++' requires String or List operands, got {}",
left_type
),
span,
});
Type::Error
}
}
}
BinaryOp::Sub | BinaryOp::Mul | BinaryOp::Div | BinaryOp::Mod => {
// Arithmetic: both operands must be same numeric type
if let Err(e) = unify_with_env(&left_type, &right_type, &self.env) {
if let Err(e) = unify(&left_type, &right_type) {
self.errors.push(TypeError {
message: format!("Operands of '{}' must have same type: {}", op, e),
span,
@@ -1880,7 +1830,7 @@ impl TypeChecker {
BinaryOp::Eq | BinaryOp::Ne => {
// Equality: operands must have same type
if let Err(e) = unify_with_env(&left_type, &right_type, &self.env) {
if let Err(e) = unify(&left_type, &right_type) {
self.errors.push(TypeError {
message: format!("Operands of '{}' must have same type: {}", op, e),
span,
@@ -1891,7 +1841,7 @@ impl TypeChecker {
BinaryOp::Lt | BinaryOp::Le | BinaryOp::Gt | BinaryOp::Ge => {
// Comparison: operands must be same orderable type
if let Err(e) = unify_with_env(&left_type, &right_type, &self.env) {
if let Err(e) = unify(&left_type, &right_type) {
self.errors.push(TypeError {
message: format!("Operands of '{}' must have same type: {}", op, e),
span,
@@ -1902,13 +1852,13 @@ impl TypeChecker {
BinaryOp::And | BinaryOp::Or => {
// Logical: both must be Bool
if let Err(e) = unify_with_env(&left_type, &Type::Bool, &self.env) {
if let Err(e) = unify(&left_type, &Type::Bool) {
self.errors.push(TypeError {
message: format!("Left operand of '{}' must be Bool: {}", op, e),
span: left.span(),
});
}
if let Err(e) = unify_with_env(&right_type, &Type::Bool, &self.env) {
if let Err(e) = unify(&right_type, &Type::Bool) {
self.errors.push(TypeError {
message: format!("Right operand of '{}' must be Bool: {}", op, e),
span: right.span(),
@@ -1922,7 +1872,7 @@ impl TypeChecker {
// right must be a function that accepts left's type
let result_type = Type::var();
let expected_fn = Type::function(vec![left_type.clone()], result_type.clone());
if let Err(e) = unify_with_env(&right_type, &expected_fn, &self.env) {
if let Err(e) = unify(&right_type, &expected_fn) {
self.errors.push(TypeError {
message: format!(
"Pipe target must be a function accepting {}: {}",
@@ -1954,7 +1904,7 @@ impl TypeChecker {
}
},
UnaryOp::Not => {
if let Err(e) = unify_with_env(&operand_type, &Type::Bool, &self.env) {
if let Err(e) = unify(&operand_type, &Type::Bool) {
self.errors.push(TypeError {
message: format!("Operator '!' requires Bool operand: {}", e),
span,
@@ -1969,17 +1919,6 @@ impl TypeChecker {
let func_type = self.infer_expr(func);
let arg_types: Vec<Type> = args.iter().map(|a| self.infer_expr(a)).collect();
// Propagate effects from callback arguments to enclosing scope
for arg_type in &arg_types {
if let Type::Function { effects, .. } = arg_type {
for effect in &effects.effects {
if self.inferring_effects {
self.inferred_effects.insert(effect.clone());
}
}
}
}
// Check property constraints from where clauses
if let Expr::Var(func_id) = func {
if let Some(constraints) = self.property_constraints.get(&func_id.name).cloned() {
@@ -2016,7 +1955,7 @@ impl TypeChecker {
self.current_effects.clone(),
);
match unify_with_env(&func_type, &expected_fn, &self.env) {
match unify(&func_type, &expected_fn) {
Ok(subst) => result_type.apply(&subst),
Err(e) => {
// Provide more detailed error message based on the type of mismatch
@@ -2090,22 +2029,10 @@ impl TypeChecker {
if let Some((_, field_type)) = fields.iter().find(|(n, _)| n == &operation.name) {
// It's a function call on a module field
let arg_types: Vec<Type> = args.iter().map(|a| self.infer_expr(a)).collect();
// Propagate effects from callback arguments to enclosing scope
for arg_type in &arg_types {
if let Type::Function { effects, .. } = arg_type {
for effect in &effects.effects {
if self.inferring_effects {
self.inferred_effects.insert(effect.clone());
}
}
}
}
let result_type = Type::var();
let expected_fn = Type::function(arg_types, result_type.clone());
if let Err(e) = unify_with_env(field_type, &expected_fn, &self.env) {
if let Err(e) = unify(field_type, &expected_fn) {
self.errors.push(TypeError {
message: format!(
"Type mismatch in {}.{} call: {}",
@@ -2161,17 +2088,6 @@ impl TypeChecker {
// Check argument types
let arg_types: Vec<Type> = args.iter().map(|a| self.infer_expr(a)).collect();
// Propagate effects from callback arguments to enclosing scope
for arg_type in &arg_types {
if let Type::Function { effects, .. } = arg_type {
for effect in &effects.effects {
if self.inferring_effects {
self.inferred_effects.insert(effect.clone());
}
}
}
}
if arg_types.len() != op.params.len() {
self.errors.push(TypeError {
message: format!(
@@ -2188,7 +2104,7 @@ impl TypeChecker {
for (i, (arg_type, (_, param_type))) in
arg_types.iter().zip(op.params.iter()).enumerate()
{
if let Err(e) = unify_with_env(arg_type, param_type, &self.env) {
if let Err(e) = unify(arg_type, param_type) {
self.errors.push(TypeError {
message: format!(
"Argument {} of '{}.{}' has type {}, expected {}: {}",
@@ -2221,7 +2137,6 @@ impl TypeChecker {
fn infer_field(&mut self, object: &Expr, field: &Ident, span: Span) -> Type {
let object_type = self.infer_expr(object);
let object_type = self.env.expand_type_alias(&object_type);
match &object_type {
Type::Record(fields) => match fields.iter().find(|(n, _)| n == &field.name) {
@@ -2302,7 +2217,7 @@ impl TypeChecker {
// Check return type if specified
let ret_type = if let Some(rt) = return_type {
let declared = self.resolve_type(rt);
if let Err(e) = unify_with_env(&body_type, &declared, &self.env) {
if let Err(e) = unify(&body_type, &declared) {
self.errors.push(TypeError {
message: format!(
"Lambda body type {} doesn't match declared {}: {}",
@@ -2368,7 +2283,7 @@ impl TypeChecker {
span: Span,
) -> Type {
let cond_type = self.infer_expr(condition);
if let Err(e) = unify_with_env(&cond_type, &Type::Bool, &self.env) {
if let Err(e) = unify(&cond_type, &Type::Bool) {
self.errors.push(TypeError {
message: format!("If condition must be Bool, got {}: {}", cond_type, e),
span: condition.span(),
@@ -2378,7 +2293,7 @@ impl TypeChecker {
let then_type = self.infer_expr(then_branch);
let else_type = self.infer_expr(else_branch);
match unify_with_env(&then_type, &else_type, &self.env) {
match unify(&then_type, &else_type) {
Ok(subst) => then_type.apply(&subst),
Err(e) => {
self.errors.push(TypeError {
@@ -2419,7 +2334,7 @@ impl TypeChecker {
// Check guard if present
if let Some(ref guard) = arm.guard {
let guard_type = self.infer_expr(guard);
if let Err(e) = unify_with_env(&guard_type, &Type::Bool, &self.env) {
if let Err(e) = unify(&guard_type, &Type::Bool) {
self.errors.push(TypeError {
message: format!("Match guard must be Bool: {}", e),
span: guard.span(),
@@ -2435,7 +2350,7 @@ impl TypeChecker {
match &result_type {
None => result_type = Some(body_type),
Some(prev) => {
if let Err(e) = unify_with_env(prev, &body_type, &self.env) {
if let Err(e) = unify(prev, &body_type) {
self.errors.push(TypeError {
message: format!(
"Match arm has incompatible type: expected {}, got {}: {}",
@@ -2485,7 +2400,7 @@ impl TypeChecker {
Pattern::Literal(lit) => {
let lit_type = self.infer_literal(lit);
if let Err(e) = unify_with_env(&lit_type, expected, &self.env) {
if let Err(e) = unify(&lit_type, expected) {
self.errors.push(TypeError {
message: format!("Pattern literal type mismatch: {}", e),
span: lit.span,
@@ -2494,12 +2409,12 @@ impl TypeChecker {
Vec::new()
}
Pattern::Constructor { name, fields, span, .. } => {
Pattern::Constructor { name, fields, span } => {
// Look up constructor
// For now, handle Option specially
match name.name.as_str() {
"None" => {
if let Err(e) = unify_with_env(expected, &Type::Option(Box::new(Type::var())), &self.env) {
if let Err(e) = unify(expected, &Type::Option(Box::new(Type::var()))) {
self.errors.push(TypeError {
message: format!(
"None pattern doesn't match type {}: {}",
@@ -2512,7 +2427,7 @@ impl TypeChecker {
}
"Some" => {
let inner_type = Type::var();
if let Err(e) = unify_with_env(expected, &Type::Option(Box::new(inner_type.clone())), &self.env)
if let Err(e) = unify(expected, &Type::Option(Box::new(inner_type.clone())))
{
self.errors.push(TypeError {
message: format!(
@@ -2541,7 +2456,7 @@ impl TypeChecker {
Pattern::Tuple { elements, span } => {
let element_types: Vec<Type> = elements.iter().map(|_| Type::var()).collect();
if let Err(e) = unify_with_env(expected, &Type::Tuple(element_types.clone()), &self.env) {
if let Err(e) = unify(expected, &Type::Tuple(element_types.clone())) {
self.errors.push(TypeError {
message: format!("Tuple pattern doesn't match type {}: {}", expected, e),
span: *span,
@@ -2591,7 +2506,7 @@ impl TypeChecker {
if let Some(type_expr) = typ {
let declared = self.resolve_type(type_expr);
if let Err(e) = unify_with_env(&value_type, &declared, &self.env) {
if let Err(e) = unify(&value_type, &declared) {
self.errors.push(TypeError {
message: format!(
"Variable '{}' has type {}, but declared type is {}: {}",
@@ -2612,47 +2527,12 @@ impl TypeChecker {
self.infer_expr(result)
}
fn infer_record(
&mut self,
spread: Option<&Expr>,
fields: &[(Ident, Expr)],
span: Span,
) -> Type {
// Start with spread fields if present
let mut field_types: Vec<(String, Type)> = if let Some(spread_expr) = spread {
let spread_type = self.infer_expr(spread_expr);
let spread_type = self.env.expand_type_alias(&spread_type);
match spread_type {
Type::Record(spread_fields) => spread_fields,
_ => {
self.errors.push(TypeError {
message: format!(
"Spread expression must be a record type, got {}",
spread_type
),
span,
});
Vec::new()
}
}
} else {
Vec::new()
};
// Apply explicit field overrides
let explicit_types: Vec<(String, Type)> = fields
fn infer_record(&mut self, fields: &[(Ident, Expr)], _span: Span) -> Type {
let field_types: Vec<(String, Type)> = fields
.iter()
.map(|(name, expr)| (name.name.clone(), self.infer_expr(expr)))
.collect();
for (name, typ) in explicit_types {
if let Some(existing) = field_types.iter_mut().find(|(n, _)| n == &name) {
existing.1 = typ;
} else {
field_types.push((name, typ));
}
}
Type::Record(field_types)
}
@@ -2669,7 +2549,7 @@ impl TypeChecker {
let first_type = self.infer_expr(&elements[0]);
for elem in &elements[1..] {
let elem_type = self.infer_expr(elem);
if let Err(e) = unify_with_env(&first_type, &elem_type, &self.env) {
if let Err(e) = unify(&first_type, &elem_type) {
self.errors.push(TypeError {
message: format!("List elements must have same type: {}", e),
span,
@@ -2975,7 +2855,7 @@ impl TypeChecker {
// Check return type matches if specified
if let Some(ref return_type_expr) = impl_method.return_type {
let return_type = self.resolve_type(return_type_expr);
if let Err(e) = unify_with_env(&body_type, &return_type, &self.env) {
if let Err(e) = unify(&body_type, &return_type) {
self.errors.push(TypeError {
message: format!(
"Method '{}' body has type {}, but declared return type is {}: {}",
@@ -3018,9 +2898,6 @@ impl TypeChecker {
"Option" if resolved_args.len() == 1 => {
return Type::Option(Box::new(resolved_args[0].clone()));
}
"Map" if resolved_args.len() == 2 => {
return Type::Map(Box::new(resolved_args[0].clone()), Box::new(resolved_args[1].clone()));
}
_ => {}
}
}

View File

@@ -47,8 +47,6 @@ pub enum Type {
List(Box<Type>),
/// Option type (sugar for App(Option, [T]))
Option(Box<Type>),
/// Map type (sugar for App(Map, [K, V]))
Map(Box<Type>, Box<Type>),
/// Versioned type (e.g., User @v2)
Versioned {
base: Box<Type>,
@@ -121,7 +119,6 @@ impl Type {
Type::Tuple(elements) => elements.iter().any(|e| e.contains_var(var)),
Type::Record(fields) => fields.iter().any(|(_, t)| t.contains_var(var)),
Type::List(inner) | Type::Option(inner) => inner.contains_var(var),
Type::Map(k, v) => k.contains_var(var) || v.contains_var(var),
Type::Versioned { base, .. } => base.contains_var(var),
_ => false,
}
@@ -161,7 +158,6 @@ impl Type {
),
Type::List(inner) => Type::List(Box::new(inner.apply(subst))),
Type::Option(inner) => Type::Option(Box::new(inner.apply(subst))),
Type::Map(k, v) => Type::Map(Box::new(k.apply(subst)), Box::new(v.apply(subst))),
Type::Versioned { base, version } => Type::Versioned {
base: Box::new(base.apply(subst)),
version: version.clone(),
@@ -212,11 +208,6 @@ impl Type {
vars
}
Type::List(inner) | Type::Option(inner) => inner.free_vars(),
Type::Map(k, v) => {
let mut vars = k.free_vars();
vars.extend(v.free_vars());
vars
}
Type::Versioned { base, .. } => base.free_vars(),
_ => HashSet::new(),
}
@@ -288,7 +279,6 @@ impl fmt::Display for Type {
}
Type::List(inner) => write!(f, "List<{}>", inner),
Type::Option(inner) => write!(f, "Option<{}>", inner),
Type::Map(k, v) => write!(f, "Map<{}, {}>", k, v),
Type::Versioned { base, version } => {
write!(f, "{} {}", base, version)
}
@@ -956,46 +946,6 @@ impl TypeEnv {
params: vec![("path".to_string(), Type::String)],
return_type: Type::Unit,
},
EffectOpDef {
name: "copy".to_string(),
params: vec![
("source".to_string(), Type::String),
("dest".to_string(), Type::String),
],
return_type: Type::Unit,
},
EffectOpDef {
name: "glob".to_string(),
params: vec![("pattern".to_string(), Type::String)],
return_type: Type::List(Box::new(Type::String)),
},
EffectOpDef {
name: "tryRead".to_string(),
params: vec![("path".to_string(), Type::String)],
return_type: Type::App {
constructor: Box::new(Type::Named("Result".to_string())),
args: vec![Type::String, Type::String],
},
},
EffectOpDef {
name: "tryWrite".to_string(),
params: vec![
("path".to_string(), Type::String),
("content".to_string(), Type::String),
],
return_type: Type::App {
constructor: Box::new(Type::Named("Result".to_string())),
args: vec![Type::Unit, Type::String],
},
},
EffectOpDef {
name: "tryDelete".to_string(),
params: vec![("path".to_string(), Type::String)],
return_type: Type::App {
constructor: Box::new(Type::Named("Result".to_string())),
args: vec![Type::Unit, Type::String],
},
},
],
},
);
@@ -1539,16 +1489,6 @@ impl TypeEnv {
Type::Option(Box::new(Type::var())),
),
),
(
"findIndex".to_string(),
Type::function(
vec![
Type::List(Box::new(Type::var())),
Type::function(vec![Type::var()], Type::Bool),
],
Type::Option(Box::new(Type::Int)),
),
),
(
"any".to_string(),
Type::function(
@@ -1593,50 +1533,6 @@ impl TypeEnv {
Type::Unit,
),
),
(
"sort".to_string(),
Type::function(
vec![Type::List(Box::new(Type::var()))],
Type::List(Box::new(Type::var())),
),
),
(
"sortBy".to_string(),
{
let elem = Type::var();
Type::function(
vec![
Type::List(Box::new(elem.clone())),
Type::function(vec![elem.clone(), elem], Type::Int),
],
Type::List(Box::new(Type::var())),
)
},
),
(
"zip".to_string(),
Type::function(
vec![
Type::List(Box::new(Type::var())),
Type::List(Box::new(Type::var())),
],
Type::List(Box::new(Type::Tuple(vec![Type::var(), Type::var()]))),
),
),
(
"flatten".to_string(),
Type::function(
vec![Type::List(Box::new(Type::List(Box::new(Type::var()))))],
Type::List(Box::new(Type::var())),
),
),
(
"contains".to_string(),
Type::function(
vec![Type::List(Box::new(Type::var())), Type::var()],
Type::Bool,
),
),
]);
env.bind("List", TypeScheme::mono(list_module_type));
@@ -1879,73 +1775,6 @@ impl TypeEnv {
]);
env.bind("Option", TypeScheme::mono(option_module_type));
// Map module
let map_v = || Type::var();
let map_type = || Type::Map(Box::new(Type::String), Box::new(Type::var()));
let map_module_type = Type::Record(vec![
(
"new".to_string(),
Type::function(vec![], map_type()),
),
(
"set".to_string(),
Type::function(
vec![map_type(), Type::String, map_v()],
map_type(),
),
),
(
"get".to_string(),
Type::function(
vec![map_type(), Type::String],
Type::Option(Box::new(map_v())),
),
),
(
"contains".to_string(),
Type::function(vec![map_type(), Type::String], Type::Bool),
),
(
"remove".to_string(),
Type::function(vec![map_type(), Type::String], map_type()),
),
(
"keys".to_string(),
Type::function(vec![map_type()], Type::List(Box::new(Type::String))),
),
(
"values".to_string(),
Type::function(vec![map_type()], Type::List(Box::new(map_v()))),
),
(
"size".to_string(),
Type::function(vec![map_type()], Type::Int),
),
(
"isEmpty".to_string(),
Type::function(vec![map_type()], Type::Bool),
),
(
"fromList".to_string(),
Type::function(
vec![Type::List(Box::new(Type::Tuple(vec![Type::String, map_v()])))],
map_type(),
),
),
(
"toList".to_string(),
Type::function(
vec![map_type()],
Type::List(Box::new(Type::Tuple(vec![Type::String, map_v()]))),
),
),
(
"merge".to_string(),
Type::function(vec![map_type(), map_type()], map_type()),
),
]);
env.bind("Map", TypeScheme::mono(map_module_type));
// Result module
let result_type = Type::App {
constructor: Box::new(Type::Named("Result".to_string())),
@@ -2058,18 +1887,6 @@ impl TypeEnv {
"round".to_string(),
Type::function(vec![Type::var()], Type::Int),
),
(
"sin".to_string(),
Type::function(vec![Type::Float], Type::Float),
),
(
"cos".to_string(),
Type::function(vec![Type::Float], Type::Float),
),
(
"atan2".to_string(),
Type::function(vec![Type::Float, Type::Float], Type::Float),
),
]);
env.bind("Math", TypeScheme::mono(math_module_type));
@@ -2079,10 +1896,6 @@ impl TypeEnv {
"toString".to_string(),
Type::function(vec![Type::Int], Type::String),
),
(
"toFloat".to_string(),
Type::function(vec![Type::Int], Type::Float),
),
]);
env.bind("Int", TypeScheme::mono(int_module_type));
@@ -2092,10 +1905,6 @@ impl TypeEnv {
"toString".to_string(),
Type::function(vec![Type::Float], Type::String),
),
(
"toInt".to_string(),
Type::function(vec![Type::Float], Type::Int),
),
]);
env.bind("Float", TypeScheme::mono(float_module_type));
@@ -2182,9 +1991,6 @@ impl TypeEnv {
Type::Option(inner) => {
Type::Option(Box::new(self.expand_type_alias(inner)))
}
Type::Map(k, v) => {
Type::Map(Box::new(self.expand_type_alias(k)), Box::new(self.expand_type_alias(v)))
}
Type::Versioned { base, version } => {
Type::Versioned {
base: Box::new(self.expand_type_alias(base)),
@@ -2345,13 +2151,6 @@ pub fn unify(t1: &Type, t2: &Type) -> Result<Substitution, String> {
// Option
(Type::Option(a), Type::Option(b)) => unify(a, b),
// Map
(Type::Map(k1, v1), Type::Map(k2, v2)) => {
let s1 = unify(k1, k2)?;
let s2 = unify(&v1.apply(&s1), &v2.apply(&s1))?;
Ok(s1.compose(&s2))
}
// Versioned types
(
Type::Versioned {

View File

@@ -14,7 +14,6 @@
pub type Html<M> =
| Element(String, List<Attr<M>>, List<Html<M>>)
| Text(String)
| RawHtml(String)
| Empty
// Attributes that can be applied to elements
@@ -42,7 +41,6 @@ pub type Attr<M> =
| OnKeyDown(fn(String): M)
| OnKeyUp(fn(String): M)
| DataAttr(String, String)
| Attribute(String, String)
// ============================================================================
// Element builders - Container elements
@@ -182,28 +180,6 @@ pub fn video<M>(attrs: List<Attr<M>>, children: List<Html<M>>): Html<M> =
pub fn audio<M>(attrs: List<Attr<M>>, children: List<Html<M>>): Html<M> =
Element("audio", attrs, children)
// ============================================================================
// Element builders - Document / Head elements
// ============================================================================
pub fn meta<M>(attrs: List<Attr<M>>): Html<M> =
Element("meta", attrs, [])
pub fn link<M>(attrs: List<Attr<M>>): Html<M> =
Element("link", attrs, [])
pub fn script<M>(attrs: List<Attr<M>>, children: List<Html<M>>): Html<M> =
Element("script", attrs, children)
pub fn iframe<M>(attrs: List<Attr<M>>, children: List<Html<M>>): Html<M> =
Element("iframe", attrs, children)
pub fn figure<M>(attrs: List<Attr<M>>, children: List<Html<M>>): Html<M> =
Element("figure", attrs, children)
pub fn figcaption<M>(attrs: List<Attr<M>>, children: List<Html<M>>): Html<M> =
Element("figcaption", attrs, children)
// ============================================================================
// Element builders - Tables
// ============================================================================
@@ -309,12 +285,6 @@ pub fn onKeyUp<M>(h: fn(String): M): Attr<M> =
pub fn data<M>(name: String, value: String): Attr<M> =
DataAttr(name, value)
pub fn attr<M>(name: String, value: String): Attr<M> =
Attribute(name, value)
pub fn rawHtml<M>(content: String): Html<M> =
RawHtml(content)
// ============================================================================
// Utility functions
// ============================================================================
@@ -349,7 +319,6 @@ pub fn renderAttr<M>(attr: Attr<M>): String =
Checked(false) => "",
Name(n) => " name=\"" + n + "\"",
DataAttr(name, value) => " data-" + name + "=\"" + value + "\"",
Attribute(name, value) => " " + name + "=\"" + value + "\"",
// Event handlers are ignored in static rendering
OnClick(_) => "",
OnInput(_) => "",
@@ -386,7 +355,6 @@ pub fn render<M>(html: Html<M>): String =
}
},
Text(content) => escapeHtml(content),
RawHtml(content) => content,
Empty => ""
}
@@ -400,47 +368,15 @@ pub fn escapeHtml(s: String): String = {
s4
}
// Render a full HTML document (basic)
// Render a full HTML document
pub fn document(title: String, headExtra: List<Html<M>>, bodyContent: List<Html<M>>): String = {
let headElements = List.concat([
[Element("meta", [Attribute("charset", "UTF-8")], [])],
[Element("meta", [Name("viewport"), Attribute("content", "width=device-width, initial-scale=1.0")], [])],
[Element("meta", [DataAttr("charset", "UTF-8")], [])],
[Element("meta", [Name("viewport"), Value("width=device-width, initial-scale=1.0")], [])],
[Element("title", [], [Text(title)])],
headExtra
])
let doc = Element("html", [Attribute("lang", "en")], [
Element("head", [], headElements),
Element("body", [], bodyContent)
])
"<!DOCTYPE html>\n" + render(doc)
}
// Render a full HTML document with SEO meta tags
pub fn seoDocument(
title: String,
description: String,
url: String,
ogImage: String,
headExtra: List<Html<M>>,
bodyContent: List<Html<M>>
): String = {
let headElements = List.concat([
[Element("meta", [Attribute("charset", "UTF-8")], [])],
[Element("meta", [Name("viewport"), Attribute("content", "width=device-width, initial-scale=1.0")], [])],
[Element("title", [], [Text(title)])],
[Element("meta", [Name("description"), Attribute("content", description)], [])],
[Element("meta", [Attribute("property", "og:title"), Attribute("content", title)], [])],
[Element("meta", [Attribute("property", "og:description"), Attribute("content", description)], [])],
[Element("meta", [Attribute("property", "og:type"), Attribute("content", "website")], [])],
[Element("meta", [Attribute("property", "og:url"), Attribute("content", url)], [])],
[Element("meta", [Attribute("property", "og:image"), Attribute("content", ogImage)], [])],
[Element("meta", [Name("twitter:card"), Attribute("content", "summary_large_image")], [])],
[Element("meta", [Name("twitter:title"), Attribute("content", title)], [])],
[Element("meta", [Name("twitter:description"), Attribute("content", description)], [])],
[Element("link", [Attribute("rel", "canonical"), Href(url)], [])],
headExtra
])
let doc = Element("html", [Attribute("lang", "en")], [
let doc = Element("html", [DataAttr("lang", "en")], [
Element("head", [], headElements),
Element("body", [], bodyContent)
])

View File

@@ -625,41 +625,6 @@ pub fn router(routes: List<Route>, notFound: fn(Request): Response): Handler =
}
}
// ============================================================
// Static File Serving
// ============================================================
// Serve a static file from disk
pub fn serveStaticFile(basePath: String, requestPath: String): Response with {File} = {
let filePath = basePath + requestPath
if File.exists(filePath) then {
let content = File.read(filePath)
let mime = getMimeType(filePath)
{ status: 200, headers: [("Content-Type", mime)], body: content }
} else
{ status: 404, headers: textHeaders(), body: "Not Found" }
}
// ============================================================
// Form Body Parsing
// ============================================================
// Parse URL-encoded form body (same format as query strings)
pub fn parseFormBody(body: String): List<(String, String)> =
parseQueryParams(body)
// Get a form field value by name
pub fn getFormField(fields: List<(String, String)>, name: String): Option<String> =
getParam(fields, name)
// ============================================================
// Response Helpers
// ============================================================
// Send a Response using HttpServer effect (convenience wrapper)
pub fn sendResponse(resp: Response): Unit with {HttpServer} =
HttpServer.respondWithHeaders(resp.status, resp.body, resp.headers)
// ============================================================
// Example Usage
// ============================================================