This commit is contained in:
2026-02-13 02:57:01 -05:00
commit 15e5ccb064
23 changed files with 11899 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
/target
/result

670
Cargo.lock generated Normal file
View File

@@ -0,0 +1,670 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "anyhow"
version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
[[package]]
name = "bitflags"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
[[package]]
name = "cfg-if"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "cfg_aliases"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
[[package]]
name = "clipboard-win"
version = "5.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bde03770d3df201d4fb868f2c9c59e66a3e4e2bd06692a0fe701e7103c7e84d4"
dependencies = [
"error-code",
]
[[package]]
name = "endian-type"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
]
[[package]]
name = "error-code"
version = "3.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
[[package]]
name = "fastrand"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fd-lock"
version = "4.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
dependencies = [
"cfg-if",
"rustix",
"windows-sys 0.59.0",
]
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "getrandom"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasip2",
"wasip3",
]
[[package]]
name = "hashbrown"
version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
dependencies = [
"foldhash",
]
[[package]]
name = "hashbrown"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "home"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "id-arena"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954"
[[package]]
name = "indexmap"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
dependencies = [
"equivalent",
"hashbrown 0.16.1",
"serde",
"serde_core",
]
[[package]]
name = "itoa"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
[[package]]
name = "leb128fmt"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "libc"
version = "0.2.181"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "459427e2af2b9c839b132acb702a1c654d95e10f8c326bfc2ad11310e458b1c5"
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
[[package]]
name = "log"
version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "lux"
version = "0.1.0"
dependencies = [
"rustyline",
"tempfile",
"thiserror",
]
[[package]]
name = "memchr"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
[[package]]
name = "nibble_vec"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43"
dependencies = [
"smallvec",
]
[[package]]
name = "nix"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]]
name = "once_cell"
version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "prettyplease"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4"
dependencies = [
"proc-macro2",
]
[[package]]
name = "r-efi"
version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "radix_trie"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd"
dependencies = [
"endian-type",
"nibble_vec",
]
[[package]]
name = "rustix"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.2",
]
[[package]]
name = "rustyline"
version = "14.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7803e8936da37efd9b6d4478277f4b2b9bb5cdb37a113e8d63222e58da647e63"
dependencies = [
"bitflags",
"cfg-if",
"clipboard-win",
"fd-lock",
"home",
"libc",
"log",
"memchr",
"nix",
"radix_trie",
"unicode-segmentation",
"unicode-width",
"utf8parse",
"windows-sys 0.52.0",
]
[[package]]
name = "semver"
version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
[[package]]
name = "serde"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
]
[[package]]
name = "serde_core"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.149"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
dependencies = [
"itoa",
"memchr",
"serde",
"serde_core",
"zmij",
]
[[package]]
name = "smallvec"
version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "syn"
version = "2.0.115"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e614ed320ac28113fa64972c4262d5dbc89deacdfd00c34a3e4cea073243c12"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "tempfile"
version = "3.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1"
dependencies = [
"fastrand",
"getrandom",
"once_cell",
"rustix",
"windows-sys 0.61.2",
]
[[package]]
name = "thiserror"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "unicode-ident"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-xid"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "wasip2"
version = "1.0.2+wasi-0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "wasip3"
version = "0.4.0+wasi-0.3.0-rc-2026-01-06"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "wasm-encoder"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319"
dependencies = [
"leb128fmt",
"wasmparser",
]
[[package]]
name = "wasm-metadata"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
dependencies = [
"anyhow",
"indexmap",
"wasm-encoder",
"wasmparser",
]
[[package]]
name = "wasmparser"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
dependencies = [
"bitflags",
"hashbrown 0.15.5",
"indexmap",
"semver",
]
[[package]]
name = "windows-link"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "wit-bindgen"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
dependencies = [
"wit-bindgen-rust-macro",
]
[[package]]
name = "wit-bindgen-core"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc"
dependencies = [
"anyhow",
"heck",
"wit-parser",
]
[[package]]
name = "wit-bindgen-rust"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
dependencies = [
"anyhow",
"heck",
"indexmap",
"prettyplease",
"syn",
"wasm-metadata",
"wit-bindgen-core",
"wit-component",
]
[[package]]
name = "wit-bindgen-rust-macro"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a"
dependencies = [
"anyhow",
"prettyplease",
"proc-macro2",
"quote",
"syn",
"wit-bindgen-core",
"wit-bindgen-rust",
]
[[package]]
name = "wit-component"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
dependencies = [
"anyhow",
"bitflags",
"indexmap",
"log",
"serde",
"serde_derive",
"serde_json",
"wasm-encoder",
"wasm-metadata",
"wasmparser",
"wit-parser",
]
[[package]]
name = "wit-parser"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
dependencies = [
"anyhow",
"id-arena",
"indexmap",
"log",
"semver",
"serde",
"serde_derive",
"serde_json",
"unicode-xid",
"wasmparser",
]
[[package]]
name = "zmij"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa"

16
Cargo.toml Normal file
View File

@@ -0,0 +1,16 @@
[package]
name = "lux"
version = "0.1.0"
edition = "2021"
description = "A functional programming language with first-class effects, schema evolution, and behavioral types"
license = "MIT"
[dependencies]
rustyline = "14"
thiserror = "1"
[dev-dependencies]
tempfile = "3"
[profile.release]
lto = true

213
README.md Normal file
View File

@@ -0,0 +1,213 @@
# Lux
A functional programming language with first-class effects, schema evolution, and behavioral types.
## Vision
Most programming languages treat three critical concerns as afterthoughts:
1. **Effects** — What can this code do? (Hidden, untraceable, untestable)
2. **Data Evolution** — Types change, data persists. (Manual migrations, runtime failures)
3. **Behavioral Properties** — Is this idempotent? Does it terminate? (Comments and hope)
Lux makes these first-class language features. The compiler knows what your code does, how your data evolves, and what properties your functions guarantee.
## Core Principles
### 1. Effects Are Explicit and Composable
```lux
fn fetchUser(id: UserId): User with {Database, Http} =
let profile = Http.get("/users/{id}")
let prefs = Database.query(userPrefsQuery(id))
User.merge(profile, prefs)
-- Testing: swap real effects for mocks
test "fetchUser returns merged data" =
run fetchUser(testId) with {
Database = mockDb({ testId: testPrefs }),
Http = mockHttp({ "/users/{testId}": testProfile })
}
|> Assert.eq(expectedUser)
```
No hidden side effects. No dependency injection boilerplate. Effects are declared, handlers are swappable, composition just works.
### 2. Schema Evolution Is Built-In
```lux
type User @v1 {
name: String,
email: String
}
type User @v2 {
name: String,
email: String,
age: Option<Int> -- optional field: auto-compatible
}
type User @v3 {
fullName: String, -- renamed: requires migration
email: String,
age: Option<Int>,
from @v2 = { fullName: v2.name, ..v2 }
}
```
The compiler tracks compatibility. Breaking changes are compile errors. Migrations are code, not config.
### 3. Behavioral Types Are First-Class
```lux
fn retry<F, T>(action: F): Result<T, Error>
where F: fn() -> T with {Fail},
where F is idempotent -- enforced!
=
match action() {
Ok(v) => Ok(v),
Err(_) => action() -- safe: we know it's idempotent
}
fn sort<T: Ord>(list: List<T>): List<T>
is pure,
is total,
where result.len == list.len,
where result.isSorted
```
Properties like `pure`, `total`, `idempotent`, `commutative` are part of the type system. The compiler proves what it can, tests what it can't.
## Example
```lux
-- Define an effect
effect Logger {
fn log(level: Level, msg: String): Unit
}
-- Define a versioned type
type Config @v1 {
host: String,
port: Int
}
type Config @v2 {
host: String,
port: Int,
timeout: Duration,
from @v1 = { timeout: Duration.seconds(30), ..v1 }
}
-- A function with explicit effects and properties
fn loadConfig(path: Path): Config @v2 with {FileSystem, Logger}
is total
=
Logger.log(Info, "Loading config from {path}")
let raw = FileSystem.read(path)
Config.parse(raw)
-- Run with handlers
fn main(): Unit with {Console} =
let config = run loadConfig("./config.json") with {
FileSystem = realFs,
Logger = consoleLogger
}
Console.print("Loaded: {config}")
```
## Status
**Current Phase: Prototype Implementation**
The interpreter is functional with:
- Core language (functions, closures, pattern matching)
- Effect system (declare effects, use operations, handle with handlers)
- Type checking with effect tracking
- REPL for interactive development
See:
- [SKILLS.md](./SKILLS.md) — Language specification and implementation roadmap
- [docs/VISION.md](./docs/VISION.md) — Problems Lux solves and development roadmap
- [docs/OVERVIEW.md](./docs/OVERVIEW.md) — Use cases, pros/cons, complexity analysis
## Design Goals
| Goal | Approach |
|------|----------|
| **Correctness by default** | Effects, schemas, and behaviors are compiler-checked |
| **Incremental adoption** | Start simple, add properties/versions as needed |
| **Zero-cost abstractions** | Effect handlers inline, versions compile away |
| **Practical, not academic** | Familiar syntax, clear errors, gradual verification |
## Non-Goals
- Not a systems language (no manual memory management)
- Not a scripting language (static types required)
- Not a proof assistant (verification is practical, not total)
## Building
Requires Rust 1.70+:
```bash
# Build the interpreter
cargo build --release
# Run the REPL
cargo run
# Run a file
cargo run -- examples/hello.lux
# Run tests
cargo test
```
## Examples
See the `examples/` directory:
- `hello.lux` — Hello World with effects
- `factorial.lux` — Recursive functions
- `effects.lux` — Custom effects and handlers
- `datatypes.lux` — ADTs and pattern matching
- `functional.lux` — Higher-order functions and pipes
### Quick REPL Session
```
$ cargo run
Lux v0.1.0
Type :help for help, :quit to exit
lux> let x = 42
lux> x * 2
84
lux> fn double(n: Int): Int = n * 2
lux> double(21)
42
lux> [1, 2, 3] |> List.reverse
[3, 2, 1]
lux> List.map([1, 2, 3], double)
[2, 4, 6]
lux> String.split("a,b,c", ",")
["a", "b", "c"]
lux> Some(42) |> Option.map(double)
Some(84)
lux> :quit
```
## Contributing
This project is in early design. Contributions welcome in:
- Language design discussions (open an issue)
- Syntax bikeshedding
- Semantic formalization
- Compiler implementation (once design stabilizes)
## License
MIT

734
SKILLS.md Normal file
View File

@@ -0,0 +1,734 @@
# Lux Language Skills & Implementation Plan
## Table of Contents
1. [Effect System](#1-effect-system)
2. [Schema Evolution](#2-schema-evolution)
3. [Behavioral Types](#3-behavioral-types)
4. [Type System Foundation](#4-type-system-foundation)
5. [Implementation Roadmap](#5-implementation-roadmap)
---
## 1. Effect System
### Overview
Effects make side effects explicit, trackable, and testable. Every function declares what it can do. Handlers interpret effects at runtime.
### Core Concepts
#### Effect Declarations
```lux
effect Http {
fn get(url: String): Response
fn post(url: String, body: Bytes): Response
}
effect Database {
fn query<T>(q: Query<T>): List<T>
fn execute(q: Command): Int
}
effect Logger {
fn log(level: Level, message: String): Unit
}
effect Async {
fn await<T>(future: Future<T>): T
fn spawn<T>(action: fn(): T): Future<T>
}
```
#### Effect Signatures
Functions declare their effects after `with`:
```lux
fn fetchUsers(): List<User> with {Database, Logger} =
Logger.log(Info, "Fetching users")
Database.query(selectAllUsers)
-- Multiple effects compose naturally
fn syncUsers(source: Url): Int with {Http, Database, Logger} =
let users = Http.get(source) |> parseUsers
Logger.log(Info, "Syncing {users.len} users")
users |> List.map(upsertUser) |> List.sum
```
#### Effect Handlers
Handlers provide implementations:
```lux
handler realHttp: Http {
fn get(url) = httpClientGet(url)
fn post(url, body) = httpClientPost(url, body)
}
handler mockHttp(responses: Map<String, Response>): Http {
fn get(url) = responses.get(url).unwrapOr(Response.notFound)
fn post(url, _) = responses.get(url).unwrapOr(Response.notFound)
}
handler postgresDb(conn: Connection): Database {
fn query(q) = conn.execute(q.toSql) |> parseRows
fn execute(q) = conn.execute(q.toSql)
}
```
#### Running Effects
```lux
fn main(): Unit with {Console} =
let users = run fetchUsers() with {
Database = postgresDb(openConnection()),
Logger = consoleLogger
}
Console.print("Found {users.len} users")
```
### Effect Features
#### Effect Polymorphism
Write code generic over effects:
```lux
fn withRetry<E, T>(action: fn(): T with E, attempts: Int): T with E =
match attempts {
0 => panic("Retry exhausted"),
n => try action() catch _ => withRetry(action, n - 1)
}
```
#### Effect Constraints
Require specific effects:
```lux
fn transactional<T>(action: fn(): T with {Database}): T with {Database} =
Database.execute(begin)
let result = try action() catch e => {
Database.execute(rollback)
throw e
}
Database.execute(commit)
result
```
#### Effect Inference
Effects can be inferred within function bodies, but signatures must be explicit:
```lux
fn helper() with {Logger} = -- explicit signature
let x = compute() -- effect-free, inferred
Logger.log(Debug, "x = {x}") -- Logger effect used
x
```
#### Built-in Effects
```lux
effect Fail {
fn fail<T>(error: Error): T -- early return / exceptions
}
effect State<S> {
fn get(): S
fn put(s: S): Unit
fn modify(f: fn(S): S): Unit
}
effect Reader<R> {
fn ask(): R
}
effect Random {
fn int(range: Range<Int>): Int
fn float(): Float
fn shuffle<T>(list: List<T>): List<T>
}
effect Time {
fn now(): Instant
fn sleep(duration: Duration): Unit
}
```
### Effect Semantics
- **Lexical handling**: Effects are handled at `run` boundaries
- **Order independence**: Multiple effects can be handled in any order
- **Resumable**: Handlers can resume computations (algebraic effect style)
- **Zero-cost goal**: Handlers inline when statically known
---
## 2. Schema Evolution
### Overview
Types change over time. Data persists. Lux tracks type versions and ensures compatibility at compile time.
### Core Concepts
#### Versioned Types
```lux
type User @v1 {
name: String,
email: String
}
type User @v2 {
name: String,
email: String,
createdAt: Timestamp
}
```
#### Compatibility Rules
**Auto-compatible changes** (no migration needed):
- Adding optional fields
- Adding fields with defaults
- Widening numeric types (Int32 -> Int64)
- Adding enum variants (for extensible enums)
**Breaking changes** (require explicit migration):
- Removing fields
- Renaming fields
- Changing field types
- Removing enum variants
```lux
type User @v3 {
fullName: String, -- renamed from 'name'
email: String,
createdAt: Timestamp,
-- Explicit migration required
from @v2 = {
fullName: v2.name,
email: v2.email,
createdAt: v2.createdAt
}
}
```
#### Migration Chains
Migrations compose automatically:
```lux
-- Reading @v1 data as @v3:
-- 1. @v1 -> @v2 (auto: createdAt gets default)
-- 2. @v2 -> @v3 (explicit: name -> fullName)
fn loadLegacyUser(data: Bytes): User @v3 =
Codec.decode<User>(data) -- handles any version
```
#### Version Constraints
```lux
-- Accept any version >= @v2
fn processUser(user: User @v2+): Unit = ...
-- Accept exactly @v3
fn processUserV3(user: User @v3): Unit = ...
-- Return latest version
fn createUser(name: String): User @latest = ...
```
### Schema Features
#### Serialization
```lux
-- Encode with version tag
let bytes = Codec.encode(user) -- includes version marker
-- Decode to specific version (migrates if needed)
let user: User @v3 = Codec.decode(bytes)
-- Decode to any compatible version
let user: User @v2+ = Codec.decode(bytes)
```
#### Database Integration
```lux
table users: User @v3 {
primaryKey: id,
index: [email]
}
-- Compiler generates migration SQL when version changes
-- Or errors if migration is ambiguous
```
#### API Versioning
```lux
endpoint getUser: GET "/users/{id}" -> User @v2
-- Later: update endpoint
endpoint getUser: GET "/users/{id}" -> User @v3
-- Compiler: "Breaking change for clients expecting @v2"
-- Must either:
-- 1. Keep old endpoint as getUser_v2
-- 2. Prove @v3 is wire-compatible with @v2
```
#### Compatibility Checking
```lux
-- Compile-time compatibility proof
assert User @v2 compatibleWith User @v1 -- passes
assert User @v3 compatibleWith User @v1 -- fails: breaking change
-- Generate compatibility report
lux schema diff User @v1 User @v3
-- Output:
-- - 'name' renamed to 'fullName' (breaking)
-- - 'createdAt' added with default (compatible)
```
### Schema Semantics
- **Versions are types**: `User @v1` and `User @v2` are distinct types
- **Migrations are functions**: `from @v1` is a function `@v1 -> @v2`
- **Compatibility is decidable**: Compiler checks all rules statically
- **Wire format is stable**: Version tag + canonical encoding
---
## 3. Behavioral Types
### Overview
Properties beyond input/output types. Express and verify behavioral guarantees like purity, totality, idempotency.
### Core Concepts
#### Built-in Properties
```lux
-- Purity: no effects
fn add(a: Int, b: Int): Int
is pure
= a + b
-- Totality: always terminates, no exceptions
fn safeDiv(a: Int, b: Int): Option<Int>
is total
= if b == 0 then None else Some(a / b)
-- Idempotency: f(f(x)) == f(x)
fn normalize(s: String): String
is idempotent
= s.trim.lowercase
-- Determinism: same inputs -> same outputs
fn hash(data: Bytes): Hash
is deterministic
```
#### Refinement Types
```lux
type PositiveInt = Int where self > 0
type NonEmptyList<T> = List<T> where self.len > 0
type Email = String where self.matches(emailRegex)
fn head<T>(list: NonEmptyList<T>): T
is total -- can't fail: list is non-empty
= list.unsafeHead
fn sqrt(n: PositiveInt): Float
is total -- can't fail: n is positive
```
#### Output Refinements
```lux
fn sort<T: Ord>(list: List<T>): List<T>
is pure,
is total,
where result.len == list.len,
where result.isSorted,
where result.isPermutationOf(list)
fn filter<T>(list: List<T>, pred: fn(T): Bool): List<T>
is pure,
is total,
where result.len <= list.len,
where result.all(pred)
```
#### Property Requirements
```lux
-- Require properties from function arguments
fn retry<F, T>(action: F, times: Int): Result<T, Error>
where F: fn(): T with {Fail},
where F is idempotent -- enforced at call site!
= ...
fn memoize<F, A, B>(f: F): fn(A): B with {Cache}
where F: fn(A): B,
where F is pure,
where F is deterministic
= ...
fn parallelize<F, T>(actions: List<F>): List<T> with {Async}
where F: fn(): T,
where F is commutative -- order-independent
= ...
```
### Verification Levels
#### Level 1: Compiler-Proven
Simple properties proven automatically:
```lux
fn double(x: Int): Int
is pure -- proven: no effects
= x * 2
fn always42(): Int
is total, -- proven: no recursion, no failure
is deterministic -- proven: no effects
= 42
```
#### Level 2: SMT-Backed
Refinements checked by SMT solver:
```lux
fn clamp(x: Int, lo: Int, hi: Int): Int
where lo <= hi,
where result >= lo,
where result <= hi
= if x < lo then lo else if x > hi then hi else x
-- SMT proves postconditions hold
```
#### Level 3: Property-Tested
Complex properties generate tests:
```lux
fn sort<T: Ord>(list: List<T>): List<T>
where result.isPermutationOf(list) -- too complex for SMT
-- Compiler generates: forall lists, sort(list).isPermutationOf(list)
-- Runs as property-based test
```
#### Level 4: Assumed
Escape hatch for unverifiable properties:
```lux
fn externalSort<T: Ord>(list: List<T>): List<T>
assume is idempotent -- trust me (FFI, etc.)
= ffiSort(list)
```
### Property Propagation
Properties flow through composition:
```lux
fn f(x: Int): Int is pure = x + 1
fn g(x: Int): Int is pure = x * 2
fn h(x: Int): Int is pure = f(g(x)) -- inferred pure
```
```lux
fn f(x: Int): Int is idempotent = x.abs
fn g(x: Int): Int is idempotent = x.abs -- same function
-- Composition of idempotent functions is idempotent IF they're the same
-- or if one is a fixpoint of the other. Otherwise, not guaranteed.
fn h(x: Int): Int = f(g(x)) -- NOT automatically idempotent
```
---
## 4. Type System Foundation
### Core Types
```lux
-- Primitives
Int, Int8, Int16, Int32, Int64
UInt, UInt8, UInt16, UInt32, UInt64
Float, Float32, Float64
Bool
Char
String
-- Collections
List<T>
Set<T>
Map<K, V>
Array<T> -- fixed size
-- Optionality
Option<T> = None | Some(T)
Result<T, E> = Err(E) | Ok(T)
-- Tuples
(A, B), (A, B, C), ...
-- Records
{ name: String, age: Int }
-- Functions
fn(A): B
fn(A, B): C
fn(A): B with {Effects}
```
### Algebraic Data Types
```lux
type Color = Red | Green | Blue
type Tree<T> =
| Leaf(T)
| Node(Tree<T>, Tree<T>)
type Result<T, E> =
| Ok(T)
| Err(E)
```
### Pattern Matching
```lux
fn describe(color: Color): String =
match color {
Red => "red",
Green => "green",
Blue => "blue"
}
fn sum(tree: Tree<Int>): Int =
match tree {
Leaf(n) => n,
Node(left, right) => sum(left) + sum(right)
}
```
### Type Classes / Traits
```lux
trait Eq {
fn eq(self, other: Self): Bool
}
trait Ord: Eq {
fn cmp(self, other: Self): Ordering
}
trait Show {
fn show(self): String
}
impl Eq for Int {
fn eq(self, other) = intEq(self, other)
}
```
### Row Polymorphism
```lux
-- Extensible records
fn getName(r: { name: String, ..rest }): String = r.name
-- Works with any record containing 'name'
getName({ name: "Alice", age: 30 })
getName({ name: "Bob", email: "bob@example.com" })
-- Extensible variants
type HttpError = { NotFound | Timeout | ..rest }
```
---
## 5. Implementation Roadmap
### Phase 0: Foundation
**Goal**: Minimal viable compiler
- [ ] Lexer and parser for core syntax
- [ ] AST representation
- [ ] Basic type checker (no effects, no versions, no properties)
- [ ] Interpreter for testing semantics
- [ ] REPL
**Deliverable**: Can type-check and interpret pure functional programs
```lux
fn fib(n: Int): Int =
if n <= 1 then n else fib(n-1) + fib(n-2)
```
### Phase 1: Effect System
**Goal**: First-class algebraic effects
- [ ] Effect declarations
- [ ] Effect signatures on functions
- [ ] Handler definitions
- [ ] `run ... with` syntax
- [ ] Effect inference within function bodies
- [ ] Effect polymorphism
- [ ] Built-in effects (Fail, State, etc.)
**Deliverable**: Can define, handle, and compose effects
```lux
effect Console { fn print(s: String): Unit }
fn greet(name: String): Unit with {Console} =
Console.print("Hello, {name}!")
fn main() =
run greet("World") with { Console = stdoutConsole }
```
### Phase 2: Code Generation
**Goal**: Compile to a real target
- [ ] IR design (effect-aware)
- [ ] Backend selection (LLVM, WASM, or JS)
- [ ] Effect handler compilation (CPS or evidence-passing)
- [ ] Optimization passes
- [ ] Runtime library
**Deliverable**: Compiled programs that run natively or in browser
### Phase 3: Schema Evolution
**Goal**: Versioned types with migrations
- [ ] Version annotations on types (`@v1`, `@v2`)
- [ ] Compatibility checker
- [ ] Migration syntax (`from @v1 = ...`)
- [ ] Migration chaining
- [ ] Codec generation
- [ ] Version constraints (`@v2+`, `@latest`)
**Deliverable**: Types with automatic serialization and migration
```lux
type Config @v1 { host: String }
type Config @v2 { host: String, port: Int, from @v1 = { port: 8080, ..v1 } }
let cfg: Config @v2 = Codec.decode(legacyBytes)
```
### Phase 4: Behavioral Types
**Goal**: Property specifications and verification
- [ ] Property syntax (`is pure`, `where result > 0`)
- [ ] Built-in properties (pure, total, idempotent, etc.)
- [ ] Refinement type checking
- [ ] SMT solver integration (Z3)
- [ ] Property-based test generation
- [ ] Property inference for simple cases
- [ ] `assume` escape hatch
**Deliverable**: Compile-time verification of behavioral properties
```lux
fn abs(x: Int): Int
is pure,
is total,
where result >= 0
= if x < 0 then -x else x
```
### Phase 5: Ecosystem
**Goal**: Usable for real projects
- [ ] Package manager
- [ ] Standard library
- [ ] LSP server (IDE support)
- [ ] Documentation generator
- [ ] REPL improvements
- [ ] Debugger
- [ ] Profiler
### Phase 6: Advanced Features
**Goal**: Full language vision
- [ ] Database effect with schema-aware queries
- [ ] HTTP effect with API versioning
- [ ] Incremental computation (bonus feature)
- [ ] Distributed effects (location-aware)
- [ ] Proof assistant mode (optional full verification)
---
## Open Design Questions
### Syntax
- [ ] Significant whitespace vs braces?
- [ ] Effect syntax: `with {E1, E2}` vs `!E1 + E2` vs `<E1, E2>`?
- [ ] Version syntax: `@v1` vs `v1` vs `#1`?
### Semantics
- [ ] Effect handler semantics: deep vs shallow handlers?
- [ ] Version compatibility: structural or nominal?
- [ ] Property verification: sound or best-effort?
### Pragmatics
- [ ] Primary compile target: native, WASM, JS?
- [ ] Interop story: FFI design?
- [ ] Gradual adoption: can you use Lux from other languages?
---
## References
### Effect Systems
- Koka language (Daan Leijen)
- Eff language (Matija Pretnar)
- "Algebraic Effects for Functional Programming" (Daan Leijen)
- Frank language (Sam Lindley)
### Schema Evolution
- Protocol Buffers / Protobuf
- Apache Avro
- "Schema Evolution in Heterogeneous Data Environments"
### Behavioral Types
- Liquid Haskell (refinement types)
- F* (dependent types + effects)
- Dafny (verification)
- "Refinement Types for Haskell" (Vazou et al.)
### General
- "Types and Programming Languages" (Pierce)
- "Practical Foundations for Programming Languages" (Harper)

301
docs/OVERVIEW.md Normal file
View File

@@ -0,0 +1,301 @@
# Lux Language Overview
## What is Lux?
Lux is a statically-typed functional programming language with **algebraic effects** as a first-class feature. It makes side effects explicit, trackable, and testable.
## What Can You Do With It?
### Currently Working
```lux
// Functions with type inference
fn factorial(n: Int): Int =
if n <= 1 then 1 else n * factorial(n - 1)
// Higher-order functions
fn apply(f: fn(Int): Int, x: Int): Int = f(x)
fn double(x: Int): Int = x * 2
let result = apply(double, 21) // 42
// Lambdas and closures
let add = fn(a: Int, b: Int): Int => a + b
let addFive = fn(x: Int): Int => add(5, x)
// Pattern matching
fn describe(n: Int): String =
match n {
0 => "zero",
1 => "one",
_ => "many"
}
// Records
let person = { name: "Alice", age: 30 }
let age = person.age
// Tuples
let point = (10, 20)
// Lists
let numbers = [1, 2, 3, 4, 5]
// Pipe operator
let result = 5 |> double |> addOne // (5 * 2) + 1 = 11
// Built-in effects (Console, Fail)
Console.print("Hello, world!")
// Custom effects
effect Logger {
fn log(level: String, msg: String): Unit
}
// Effect handlers
handler consoleLogger: Logger {
fn log(level, msg) = Console.print("[" + level + "] " + msg)
}
// Running with handlers
fn greet(name: String): Unit with {Logger} =
Logger.log("info", "Hello, " + name)
run greet("Alice") with { Logger = consoleLogger }
```
### Standard Library (Built-in)
```lux
// List operations
List.map([1, 2, 3], fn(x: Int): Int => x * 2) // [2, 4, 6]
List.filter([1, 2, 3, 4], fn(x: Int): Bool => x > 2) // [3, 4]
List.fold([1, 2, 3], 0, fn(acc: Int, x: Int): Int => acc + x) // 6
List.head([1, 2, 3]) // Some(1)
List.tail([1, 2, 3]) // Some([2, 3])
List.concat([1, 2], [3]) // [1, 2, 3]
List.reverse([1, 2, 3]) // [3, 2, 1]
List.length([1, 2, 3]) // 3
List.get([1, 2, 3], 0) // Some(1)
List.range(0, 5) // [0, 1, 2, 3, 4]
// String operations
String.split("a,b,c", ",") // ["a", "b", "c"]
String.join(["a", "b"], "-") // "a-b"
String.trim(" hello ") // "hello"
String.contains("hello", "ell") // true
String.replace("hi", "i", "ey") // "hey"
String.length("hello") // 5
String.chars("hi") // ['h', 'i']
String.lines("a\nb") // ["a", "b"]
// Option operations
let x = Some(42)
let y = None
Option.map(x, fn(n: Int): Int => n * 2) // Some(84)
Option.flatMap(x, fn(n: Int): Option<Int> => Some(n + 1)) // Some(43)
Option.getOrElse(y, 0) // 0
Option.isSome(x) // true
Option.isNone(y) // true
// Result operations
let ok = Ok(42)
let err = Err("failed")
Result.map(ok, fn(n: Int): Int => n * 2) // Ok(84)
Result.getOrElse(err, 0) // 0
Result.isOk(ok) // true
Result.isErr(err) // true
// Utility functions
print("Hello") // prints to stdout
toString(42) // "42"
typeOf([1, 2, 3]) // "List"
```
### Planned (Not Yet Implemented)
- **Schema Evolution**: Versioned types with automatic migrations
- **Behavioral Types**: Properties like `is pure`, `is idempotent`
- **Modules/Imports**: Code organization
- **Compilation**: Currently interpreter-only
---
## Primary Use Cases
### 1. Learning Effect Systems
Lux is an excellent educational tool for understanding algebraic effects without the complexity of Haskell's monad transformers or the academic syntax of languages like Koka.
### 2. Testable Application Code
Effects make dependencies explicit. Swap handlers for testing:
```lux
// Production
run app() with { Database = postgres, Http = realHttp }
// Testing
run app() with { Database = mockDb, Http = mockHttp }
```
### 3. Domain Modeling
Explicit effects document what code can do:
```lux
fn processOrder(order: Order): Receipt with {Database, Email, Logger}
// ^ The signature tells you exactly what side effects this function performs
```
### 4. Prototyping
Quick iteration with type inference and a REPL.
---
## Pros and Cons
### Pros
| Advantage | Description |
|-----------|-------------|
| **Explicit Effects** | Function signatures show what side effects are possible |
| **Testability** | Swap effect handlers for mocking—no dependency injection frameworks |
| **Type Safety** | Static types catch errors at compile time |
| **Type Inference** | Write less type annotations, compiler figures it out |
| **Clean Syntax** | ML-family inspired, minimal boilerplate |
| **Pattern Matching** | Destructure data elegantly |
| **Immutable by Default** | Easier to reason about |
| **REPL** | Interactive development |
### Cons
| Limitation | Description |
|------------|-------------|
| **Interpreter Only** | No compilation to native/JS/WASM yet |
| **No Modules** | Can't split code across files |
| **Limited IO** | Only Console built-in, no file/network |
| **No Generics** | Polymorphic functions not fully implemented |
| **New Paradigm** | Effects require learning new concepts |
| **Small Ecosystem** | No packages, libraries, or community |
| **Early Stage** | Bugs likely, features incomplete |
---
## Complexity Assessment
### Conceptual Complexity
| Concept | Difficulty | Notes |
|---------|------------|-------|
| Basic syntax | Easy | Similar to other ML-family languages |
| Functions | Easy | Standard functional style |
| Pattern matching | Easy | If you know any FP language |
| Type system | Medium | Hindley-Milner inference helps |
| Effects | Medium | New concept, but simpler than monads |
| Handlers | Medium | Requires understanding of continuations |
### Comparison to Other Languages
| Language | Complexity | Comparison to Lux |
|----------|------------|-------------------|
| Python | Simpler | No types, no effect tracking |
| TypeScript | Similar | Lux has effects, TS has larger ecosystem |
| Elm | Similar | Both pure FP, Lux has general effects |
| Haskell | More Complex | Monads harder than algebraic effects |
| Koka | Similar | Koka more academic, Lux more practical syntax |
| Rust | More Complex | Ownership adds significant complexity |
### Learning Curve
**Beginner** (1-2 hours):
- Basic expressions, functions, let bindings
- If/else, pattern matching
- REPL usage
**Intermediate** (1-2 days):
- Custom types and records
- Higher-order functions
- Built-in effects (Console)
**Advanced** (1 week):
- Custom effect definitions
- Effect handlers
- Understanding when to use effects vs. regular functions
---
## When to Use Lux
### Good Fit
- Learning algebraic effects
- Prototyping with explicit effect tracking
- Small tools where testability matters
- Teaching functional programming concepts
### Not a Good Fit (Yet)
- Production applications (too early)
- Performance-critical code (interpreter)
- Large codebases (no modules)
- Web development (no JS compilation)
- Systems programming (no low-level control)
---
## Example Session
```
$ cargo run
Lux v0.1.0
Type :help for help, :quit to exit
lux> let x = 42
lux> x * 2
84
lux> fn greet(name: String): Unit with {Console} = Console.print("Hello, " + name)
lux> greet("World")
Hello, World
()
lux> let nums = [1, 2, 3]
lux> nums
[1, 2, 3]
lux> :quit
```
---
## Architecture
```
Source Code
┌─────────┐
│ Lexer │ → Tokens
└─────────┘
┌─────────┐
│ Parser │ → AST
└─────────┘
┌─────────────┐
│ Type Checker│ → Typed AST + Effect Tracking
└─────────────┘
┌─────────────┐
│ Interpreter │ → Values + Effect Handling
└─────────────┘
```
---
## Future Roadmap
1. **Standard Library** - List, String, Option utilities
2. **Module System** - Import/export, namespaces
3. **JavaScript Backend** - Run in browsers
4. **Schema Evolution** - Versioned types
5. **Behavioral Types** - is pure, is idempotent
6. **LSP Server** - IDE support
7. **Package Manager** - Share code

253
docs/VISION.md Normal file
View File

@@ -0,0 +1,253 @@
# Lux: Vision and Roadmap
## The Problems Lux Solves
### 1. The "What Can This Code Do?" Problem
In most languages, you can't tell from a function signature what it might do:
```typescript
// TypeScript - what does this do? No idea without reading the code.
function processOrder(order: Order): Receipt { ... }
```
Could it hit a database? Send emails? Log? Throw? You don't know until you read every line (and every function it calls).
**Lux solution:**
```lux
fn processOrder(order: Order): Receipt with {Database, Email, Logger, Fail}
```
The signature *is* the documentation. Code review becomes "should this function really send emails?" Effects are compile-time checked.
### 2. The Testing Problem
Testing side-effecting code requires mocking frameworks, dependency injection containers, and boilerplate:
```typescript
// TypeScript - need DI framework, mock libraries, setup/teardown
const mockDb = jest.mock('./database');
const mockEmail = jest.mock('./email');
// ... 50 lines of setup
```
**Lux solution:**
```lux
// Production
run processOrder(order) with {
Database = postgres(connString),
Email = sendgrid(apiKey),
Logger = cloudWatch
}
// Test - same code, different handlers
run processOrder(order) with {
Database = inMemoryDb(testData),
Email = collectEmails(sentList), // captures instead of sends
Logger = nullLogger
}
```
No mocking library. No DI framework. Just swap handlers.
### 3. The Schema Evolution Problem (Planned)
Types change. Data persists. Every production system eventually faces:
- "I renamed this field, now deserialization breaks"
- "I added a required field, old data can't load"
- "I need to migrate 10M rows and pray"
**Lux solution:**
```lux
type User @v1 { name: String, email: String }
type User @v2 {
name: String,
email: String,
createdAt: Timestamp,
from @v1 = { createdAt: Timestamp.epoch(), ..v1 } // migration
}
type User @v3 {
fullName: String, // renamed
email: String,
createdAt: Timestamp,
from @v2 = { fullName: v2.name, ..v2 }
}
// Compiler knows: v1 → v2 is auto-compatible, v2 → v3 needs migration
// Serialization handles any version automatically
```
### 4. The "Is This Safe?" Problem (Planned)
Critical properties are documented in comments and hoped for:
```typescript
// IMPORTANT: This function must be idempotent for retry logic!
function chargeCard(payment: Payment): Result { ... }
```
**Lux solution:**
```lux
fn chargeCard(payment: Payment): Result
is idempotent // Compiler enforces or generates property tests
```
```lux
fn retry<F>(action: F, times: Int): Result
where F is idempotent // Won't compile if you pass non-idempotent function
```
---
## What's Built vs. What's Needed
### Currently Working (Phase 1: Core Language)
| Feature | Status | Notes |
|---------|--------|-------|
| Lexer/Parser | Done | Full syntax support |
| Type Inference | Done | Hindley-Milner |
| Functions/Closures | Done | First-class functions |
| Pattern Matching | Done | Destructuring, guards |
| Records/Tuples/Lists | Done | Basic data structures |
| Effect Declarations | Done | `effect Name { ... }` |
| Effect Operations | Done | `Effect.operation()` |
| Effect Handlers | Done | `handler name: Effect { ... }` |
| Run with Handlers | Done | `run expr with { ... }` |
| Built-in Console/Fail | Done | Basic IO |
| REPL | Done | Interactive development |
| Type Checking | Done | With effect tracking |
### Needed for Real Use (Phase 2: Practical)
| Feature | Effort | Why It Matters |
|---------|--------|----------------|
| **Module System** | 2-3 weeks | Can't build real apps without imports |
| **Standard Library** | Done | List.map, String.split, Option.map, etc. |
| **File/Network Effects** | 1-2 weeks | Real IO beyond Console |
| **Better Error Messages** | 2-3 weeks | Elm-quality diagnostics |
| **JS/WASM Compilation** | 4-6 weeks | Deploy to browsers/servers |
### Needed for Full Vision (Phase 3: Differentiation)
| Feature | Effort | Why It Matters |
|---------|--------|----------------|
| **Schema Evolution** | 4-6 weeks | The versioned types system |
| **Behavioral Types** | 4-6 weeks | is pure, is idempotent, etc. |
| **Effect Tracing/Debugging** | 2-3 weeks | Elm-like debugging |
| **LSP Server** | 3-4 weeks | IDE support |
| **Package Manager** | 2-3 weeks | Share code |
---
## Elm-Style Debugging for Effects
Elm's debugging is famous because:
1. **Time-travel**: See app state at any point
2. **No runtime crashes**: Everything is Result/Maybe
3. **Amazing error messages**: Context, suggestions, examples
Lux can go further because effects are explicit:
### Effect Tracing
Every effect operation can be automatically logged:
```lux
// With tracing enabled:
run processOrder(order) with {
Database = traced(postgres), // Logs all queries
Email = traced(sendgrid), // Logs all sends
Logger = traced(cloudWatch) // Meta-logging!
}
// Output:
// [00:00:01] Database.query("SELECT * FROM users WHERE id = 42")
// [00:00:02] Database.query("SELECT * FROM inventory WHERE sku = 'ABC'")
// [00:00:03] Email.send(to: "customer@example.com", subject: "Order Confirmed")
// [00:00:03] Logger.log(level: "info", msg: "Order 123 processed")
```
### Effect Replay
Since all effects are captured, we can replay:
```lux
// Record effects during production
let recording = record(processOrder(order)) with { Database = postgres, ... }
// Replay in development with exact same effect responses
replay(recording) with { Database = mockFromRecording(recording) }
```
### State Snapshots
Since state changes only happen through effects:
```lux
// Snapshot state before/after each effect
run debugSession(app) with {
State = snapshotted(initialState), // Captures every state change
Console = traced(stdout)
}
// Later: inspect state at any point, step forward/backward
```
### Error Messages (To Build)
Current:
```
Type error at 15-45: Cannot unify Int with String
```
Goal (Elm-style):
```
── TYPE MISMATCH ─────────────────────────────────────── src/order.lux
The `calculateTotal` function expects an `Int` but got a `String`:
15│ let total = calculateTotal(order.quantity)
^^^^^^^^^^^^^^
`order.quantity` is a `String` but `calculateTotal` needs an `Int`.
Hint: Maybe you need to parse the string?
let qty = Int.parse(order.quantity)?
let total = calculateTotal(qty)
```
---
## Development Effort Summary
**To be minimally useful for real projects:**
- Module system + standard library + better errors
- **Estimate: 6-8 weeks of focused work**
**To deliver the full vision (effects + schemas + behavioral types):**
- All of the above + schema evolution + behavioral types + compilation
- **Estimate: 4-6 months of focused work**
**To have Elm-quality experience:**
- All of the above + debugging tools + LSP + package manager
- **Estimate: 8-12 months of focused work**
---
## Immediate Next Steps
1. ~~**Standard Library**~~ - Done! List, String, Option, Result operations
2. **Module System** - `import`, `export`, namespaces
3. **File Effect** - `FileSystem.read`, `FileSystem.write`
4. **Error Message Overhaul** - Source snippets, suggestions, colors
5. **JavaScript Backend** - Compile to runnable JS
These would make Lux usable for small real projects.

46
examples/datatypes.lux Normal file
View File

@@ -0,0 +1,46 @@
// Demonstrating algebraic data types and pattern matching
// Define a binary tree
type Tree =
| Leaf(Int)
| Node(Tree, Tree)
// Sum all values in a tree
fn sumTree(tree: Tree): Int =
match tree {
Leaf(n) => n,
Node(left, right) => sumTree(left) + sumTree(right)
}
// Find the depth of a tree
fn depth(tree: Tree): Int =
match tree {
Leaf(_) => 1,
Node(left, right) => {
let leftDepth = depth(left)
let rightDepth = depth(right)
1 + (if leftDepth > rightDepth then leftDepth else rightDepth)
}
}
// Example tree:
// Node
// / \
// Node Leaf(5)
// / \
// Leaf(1) Leaf(2)
let myTree = Node(Node(Leaf(1), Leaf(2)), Leaf(5))
let total = sumTree(myTree)
let treeDepth = depth(myTree)
// Option type example
fn safeDivide(a: Int, b: Int): Option<Int> =
if b == 0 then None
else Some(a / b)
fn showResult(result: Option<Int>): String =
match result {
None => "Division by zero!",
Some(n) => "Result: " + n
}

35
examples/effects.lux Normal file
View File

@@ -0,0 +1,35 @@
// Demonstrating algebraic effects in Lux
// Define a custom logging effect
effect Logger {
fn log(level: String, msg: String): Unit
fn getLevel(): String
}
// A function that uses the Logger effect
fn processData(data: Int): Int with {Logger} = {
Logger.log("info", "Processing data...")
let result = data * 2
Logger.log("debug", "Result computed")
result
}
// A handler that prints logs to console
handler consoleLogger: Logger {
fn log(level, msg) = Console.print("[" + level + "] " + msg)
fn getLevel() = "debug"
}
// A handler that ignores logs (for testing)
handler nullLogger: Logger {
fn log(level, msg) = ()
fn getLevel() = "none"
}
// Main function showing handler usage
fn main(): Unit with {Console} = {
let result = run processData(21) with {
Logger = consoleLogger
}
Console.print("Final result: " + result)
}

12
examples/factorial.lux Normal file
View File

@@ -0,0 +1,12 @@
// Factorial function demonstrating recursion
fn factorial(n: Int): Int =
if n <= 1 then 1
else n * factorial(n - 1)
// Calculate factorial of 10
let result = factorial(10)
// Print result
fn main(): Unit with {Console} =
Console.print("10! = " + result)

42
examples/functional.lux Normal file
View File

@@ -0,0 +1,42 @@
// Demonstrating functional programming features
// Higher-order functions
fn apply(f: fn(Int): Int, x: Int): Int = f(x)
fn compose(f: fn(Int): Int, g: fn(Int): Int): fn(Int): Int =
fn(x: Int): Int => f(g(x))
// Basic functions
fn double(x: Int): Int = x * 2
fn addOne(x: Int): Int = x + 1
fn square(x: Int): Int = x * x
// Using apply
let result1 = apply(double, 21) // 42
// Using compose
let doubleAndAddOne = compose(addOne, double)
let result2 = doubleAndAddOne(5) // 11
// Using the pipe operator
let result3 = 5 |> double |> addOne |> square // ((5 * 2) + 1)^2 = 121
// Currying example
fn add(a: Int): fn(Int): Int =
fn(b: Int): Int => a + b
let add5 = add(5)
let result4 = add5(10) // 15
// Partial application simulation
fn multiply(a: Int, b: Int): Int = a * b
let times3 = fn(x: Int): Int => multiply(3, x)
let result5 = times3(7) // 21
// Working with records
let transform = fn(record: { x: Int, y: Int }): Int =>
record.x + record.y
let point = { x: 10, y: 20 }
let sum = transform(point) // 30

5
examples/hello.lux Normal file
View File

@@ -0,0 +1,5 @@
// Hello World in Lux
// Demonstrates basic effect usage
fn main(): Unit with {Console} =
Console.print("Hello, World!")

96
flake.lock generated Normal file
View File

@@ -0,0 +1,96 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1770841267,
"narHash": "sha256-9xejG0KoqsoKEGp2kVbXRlEYtFFcDTHjidiuX8hGO44=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "ec7c70d12ce2fc37cb92aff673dcdca89d187bae",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1744536153,
"narHash": "sha256-awS2zRgF4uTwrOKwwiJcByDzDOdo3Q1rPZbiHQg/N38=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "18dd725c29603f582cf1900e0d25f9f1063dbf11",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1770952264,
"narHash": "sha256-CjymNrJZWBtpavyuTkfPVPaZkwzIzGaf0E/3WgcwM14=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "ec6a3d5cdf14bb5a1dd03652bd3f6351004d2188",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

55
flake.nix Normal file
View File

@@ -0,0 +1,55 @@
{
description = "Lux - A functional programming language with first-class effects";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
rust-overlay.url = "github:oxalica/rust-overlay";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, rust-overlay, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let
overlays = [ (import rust-overlay) ];
pkgs = import nixpkgs { inherit system overlays; };
rustToolchain = pkgs.rust-bin.stable.latest.default.override {
extensions = [ "rust-src" "rust-analyzer" ];
};
in
{
devShells.default = pkgs.mkShell {
buildInputs = with pkgs; [
rustToolchain
cargo-watch
cargo-edit
];
RUST_BACKTRACE = "1";
RUST_SRC_PATH = "${rustToolchain}/lib/rustlib/src/rust/library";
shellHook = ''
printf "\n"
printf " \033[1;35m \033[0m\n"
printf " \033[1;35m \033[0m\n"
printf " \033[1;35m \033[0m v0.1.0\n"
printf "\n"
printf " Functional language with first-class effects\n"
printf "\n"
printf " \033[1mCommands:\033[0m\n"
printf " cargo build Build the compiler\n"
printf " cargo run Start the REPL\n"
printf " cargo test Run tests\n"
printf " cargo run -- \033[3m<file.lux>\033[0m Run a file\n"
printf "\n"
'';
};
packages.default = pkgs.rustPlatform.buildRustPackage {
pname = "lux";
version = "0.1.0";
src = ./.;
cargoLock.lockFile = ./Cargo.lock;
};
}
);
}

583
src/ast.rs Normal file
View File

@@ -0,0 +1,583 @@
//! Abstract Syntax Tree for the Lux language
#![allow(dead_code)]
use std::fmt;
/// Source location for error reporting
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Hash)]
pub struct Span {
pub start: usize,
pub end: usize,
}
impl Span {
pub fn new(start: usize, end: usize) -> Self {
Self { start, end }
}
pub fn merge(self, other: Span) -> Span {
Span {
start: self.start.min(other.start),
end: self.end.max(other.end),
}
}
}
/// An identifier (variable or type name)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Ident {
pub name: String,
pub span: Span,
}
impl Ident {
pub fn new(name: impl Into<String>, span: Span) -> Self {
Self {
name: name.into(),
span,
}
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)
}
}
/// Visibility modifier
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum Visibility {
/// Public - exported from module
Public,
/// Private - only visible within module (default)
#[default]
Private,
}
// ============ Schema Evolution ============
/// A version number for schema evolution (e.g., @v1, @v2)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Version {
pub number: u32,
pub span: Span,
}
impl PartialOrd for Version {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Version {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.number.cmp(&other.number)
}
}
impl Version {
pub fn new(number: u32, span: Span) -> Self {
Self { number, span }
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "@v{}", self.number)
}
}
/// Version constraint for type annotations
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum VersionConstraint {
/// Exactly this version: @v2
Exact(Version),
/// This version or later: @v2+
AtLeast(Version),
/// Latest version: @latest
Latest(Span),
}
impl fmt::Display for VersionConstraint {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
VersionConstraint::Exact(v) => write!(f, "{}", v),
VersionConstraint::AtLeast(v) => write!(f, "{}+", v),
VersionConstraint::Latest(_) => write!(f, "@latest"),
}
}
}
/// Migration from one version to another
#[derive(Debug, Clone)]
pub struct Migration {
/// Source version: from @v1
pub from_version: Version,
/// Migration body (expression that transforms old to new)
pub body: Expr,
pub span: Span,
}
/// Module path: foo/bar/baz
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ModulePath {
pub segments: Vec<Ident>,
pub span: Span,
}
impl ModulePath {
pub fn to_string(&self) -> String {
self.segments
.iter()
.map(|s| s.name.as_str())
.collect::<Vec<_>>()
.join("/")
}
}
/// Import declaration
#[derive(Debug, Clone)]
pub struct ImportDecl {
/// The module path being imported
pub path: ModulePath,
/// Optional alias: import foo/bar as baz
pub alias: Option<Ident>,
/// Specific items to import: import foo.{a, b, c}
pub items: Option<Vec<Ident>>,
/// Import all items: import foo.*
pub wildcard: bool,
pub span: Span,
}
/// A complete program (or module)
#[derive(Debug, Clone)]
pub struct Program {
/// Module imports
pub imports: Vec<ImportDecl>,
/// Top-level declarations
pub declarations: Vec<Declaration>,
}
/// Top-level declarations
#[derive(Debug, Clone)]
pub enum Declaration {
/// Function definition: fn name(params): ReturnType with {Effects} = body
Function(FunctionDecl),
/// Effect declaration: effect Name { fn op1(...): T, ... }
Effect(EffectDecl),
/// Type alias or ADT: type Name = ...
Type(TypeDecl),
/// Handler definition: handler name: Effect { ... }
Handler(HandlerDecl),
/// Let binding at top level
Let(LetDecl),
}
/// Function declaration
#[derive(Debug, Clone)]
pub struct FunctionDecl {
pub visibility: Visibility,
pub name: Ident,
pub type_params: Vec<Ident>,
pub params: Vec<Parameter>,
pub return_type: TypeExpr,
pub effects: Vec<Ident>,
pub body: Expr,
pub span: Span,
}
/// Function parameter
#[derive(Debug, Clone)]
pub struct Parameter {
pub name: Ident,
pub typ: TypeExpr,
pub span: Span,
}
/// Effect declaration
#[derive(Debug, Clone)]
pub struct EffectDecl {
pub name: Ident,
pub type_params: Vec<Ident>,
pub operations: Vec<EffectOp>,
pub span: Span,
}
/// An operation within an effect
#[derive(Debug, Clone)]
pub struct EffectOp {
pub name: Ident,
pub params: Vec<Parameter>,
pub return_type: TypeExpr,
pub span: Span,
}
/// Type declaration (alias or ADT)
#[derive(Debug, Clone)]
pub struct TypeDecl {
pub visibility: Visibility,
pub name: Ident,
pub type_params: Vec<Ident>,
/// Optional version annotation: type User @v2 { ... }
pub version: Option<Version>,
pub definition: TypeDef,
/// Migrations from previous versions: from @v1 = { ... }
pub migrations: Vec<Migration>,
pub span: Span,
}
/// Type definition
#[derive(Debug, Clone)]
pub enum TypeDef {
/// Type alias: type Foo = Bar
Alias(TypeExpr),
/// Record type: type Foo { field: Type, ... }
Record(Vec<RecordField>),
/// Enum/ADT: type Foo = A | B(Int) | C { x: Int }
Enum(Vec<Variant>),
}
/// Record field
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RecordField {
pub name: Ident,
pub typ: TypeExpr,
pub span: Span,
}
/// Enum variant
#[derive(Debug, Clone)]
pub struct Variant {
pub name: Ident,
pub fields: VariantFields,
pub span: Span,
}
/// Variant field types
#[derive(Debug, Clone)]
pub enum VariantFields {
/// Unit variant: A
Unit,
/// Tuple variant: A(Int, String)
Tuple(Vec<TypeExpr>),
/// Record variant: A { x: Int, y: String }
Record(Vec<RecordField>),
}
/// Handler declaration
#[derive(Debug, Clone)]
pub struct HandlerDecl {
pub name: Ident,
pub params: Vec<Parameter>,
pub effect: Ident,
pub implementations: Vec<HandlerImpl>,
pub span: Span,
}
/// Implementation of an effect operation in a handler
#[derive(Debug, Clone)]
pub struct HandlerImpl {
pub op_name: Ident,
pub params: Vec<Ident>,
pub resume: Option<Ident>, // The continuation parameter
pub body: Expr,
pub span: Span,
}
/// Let declaration
#[derive(Debug, Clone)]
pub struct LetDecl {
pub visibility: Visibility,
pub name: Ident,
pub typ: Option<TypeExpr>,
pub value: Expr,
pub span: Span,
}
/// Type expressions
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TypeExpr {
/// Named type: Int, String, List
Named(Ident),
/// Generic type application: List<Int>, Map<String, Int>
App(Box<TypeExpr>, Vec<TypeExpr>),
/// Function type: fn(A, B): C
Function {
params: Vec<TypeExpr>,
return_type: Box<TypeExpr>,
effects: Vec<Ident>,
},
/// Tuple type: (A, B, C)
Tuple(Vec<TypeExpr>),
/// Record type: { name: String, age: Int }
Record(Vec<RecordField>),
/// Unit type
Unit,
/// Versioned type: User @v2, User @v2+, User @latest
Versioned {
base: Box<TypeExpr>,
constraint: VersionConstraint,
},
}
impl TypeExpr {
pub fn named(name: &str) -> Self {
TypeExpr::Named(Ident::new(name, Span::default()))
}
}
/// Expressions
#[derive(Debug, Clone)]
pub enum Expr {
/// Literal values
Literal(Literal),
/// Variable reference
Var(Ident),
/// Binary operation: a + b
BinaryOp {
op: BinaryOp,
left: Box<Expr>,
right: Box<Expr>,
span: Span,
},
/// Unary operation: -a, !a
UnaryOp {
op: UnaryOp,
operand: Box<Expr>,
span: Span,
},
/// Function call: foo(a, b)
Call {
func: Box<Expr>,
args: Vec<Expr>,
span: Span,
},
/// Effect operation call: Effect.operation(args)
EffectOp {
effect: Ident,
operation: Ident,
args: Vec<Expr>,
span: Span,
},
/// Field access: foo.bar
Field {
object: Box<Expr>,
field: Ident,
span: Span,
},
/// Lambda: fn(x, y) => x + y or fn(x: Int): Int => x + 1
Lambda {
params: Vec<Parameter>,
return_type: Option<Box<TypeExpr>>,
effects: Vec<Ident>,
body: Box<Expr>,
span: Span,
},
/// Let binding: let x = e1; e2
Let {
name: Ident,
typ: Option<TypeExpr>,
value: Box<Expr>,
body: Box<Expr>,
span: Span,
},
/// If expression: if cond then e1 else e2
If {
condition: Box<Expr>,
then_branch: Box<Expr>,
else_branch: Box<Expr>,
span: Span,
},
/// Match expression
Match {
scrutinee: Box<Expr>,
arms: Vec<MatchArm>,
span: Span,
},
/// Block: { e1; e2; e3 }
Block {
statements: Vec<Statement>,
result: Box<Expr>,
span: Span,
},
/// Record literal: { name: "Alice", age: 30 }
Record {
fields: Vec<(Ident, Expr)>,
span: Span,
},
/// Tuple literal: (1, "hello", true)
Tuple { elements: Vec<Expr>, span: Span },
/// List literal: [1, 2, 3]
List { elements: Vec<Expr>, span: Span },
/// Run with handlers: run expr with { Effect = handler, ... }
Run {
expr: Box<Expr>,
handlers: Vec<(Ident, Expr)>,
span: Span,
},
/// Resume continuation in handler (like calling the continuation)
Resume { value: Box<Expr>, span: Span },
}
impl Expr {
pub fn span(&self) -> Span {
match self {
Expr::Literal(lit) => lit.span,
Expr::Var(ident) => ident.span,
Expr::BinaryOp { span, .. } => *span,
Expr::UnaryOp { span, .. } => *span,
Expr::Call { span, .. } => *span,
Expr::EffectOp { span, .. } => *span,
Expr::Field { span, .. } => *span,
Expr::Lambda { span, .. } => *span,
Expr::Let { span, .. } => *span,
Expr::If { span, .. } => *span,
Expr::Match { span, .. } => *span,
Expr::Block { span, .. } => *span,
Expr::Record { span, .. } => *span,
Expr::Tuple { span, .. } => *span,
Expr::List { span, .. } => *span,
Expr::Run { span, .. } => *span,
Expr::Resume { span, .. } => *span,
}
}
}
/// Literal values
#[derive(Debug, Clone)]
pub struct Literal {
pub kind: LiteralKind,
pub span: Span,
}
#[derive(Debug, Clone)]
pub enum LiteralKind {
Int(i64),
Float(f64),
String(String),
Char(char),
Bool(bool),
Unit,
}
/// Binary operators
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BinaryOp {
// Arithmetic
Add,
Sub,
Mul,
Div,
Mod,
// Comparison
Eq,
Ne,
Lt,
Le,
Gt,
Ge,
// Logical
And,
Or,
// Other
Pipe, // |>
}
impl fmt::Display for BinaryOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
BinaryOp::Add => write!(f, "+"),
BinaryOp::Sub => write!(f, "-"),
BinaryOp::Mul => write!(f, "*"),
BinaryOp::Div => write!(f, "/"),
BinaryOp::Mod => write!(f, "%"),
BinaryOp::Eq => write!(f, "=="),
BinaryOp::Ne => write!(f, "!="),
BinaryOp::Lt => write!(f, "<"),
BinaryOp::Le => write!(f, "<="),
BinaryOp::Gt => write!(f, ">"),
BinaryOp::Ge => write!(f, ">="),
BinaryOp::And => write!(f, "&&"),
BinaryOp::Or => write!(f, "||"),
BinaryOp::Pipe => write!(f, "|>"),
}
}
}
/// Unary operators
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum UnaryOp {
Neg, // -
Not, // !
}
impl fmt::Display for UnaryOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
UnaryOp::Neg => write!(f, "-"),
UnaryOp::Not => write!(f, "!"),
}
}
}
/// Statement in a block
#[derive(Debug, Clone)]
pub enum Statement {
/// Expression statement
Expr(Expr),
/// Let binding without body (in blocks)
Let {
name: Ident,
typ: Option<TypeExpr>,
value: Expr,
span: Span,
},
}
/// Match arm
#[derive(Debug, Clone)]
pub struct MatchArm {
pub pattern: Pattern,
pub guard: Option<Expr>,
pub body: Expr,
pub span: Span,
}
/// Patterns for matching
#[derive(Debug, Clone)]
pub enum Pattern {
/// Wildcard: _
Wildcard(Span),
/// Variable binding: x
Var(Ident),
/// Literal: 42, "hello", true
Literal(Literal),
/// Constructor: Some(x), None, Ok(v)
Constructor {
name: Ident,
fields: Vec<Pattern>,
span: Span,
},
/// Record pattern: { name, age: a }
Record {
fields: Vec<(Ident, Pattern)>,
span: Span,
},
/// Tuple pattern: (a, b, c)
Tuple { elements: Vec<Pattern>, span: Span },
}
impl Pattern {
pub fn span(&self) -> Span {
match self {
Pattern::Wildcard(span) => *span,
Pattern::Var(ident) => ident.span,
Pattern::Literal(lit) => lit.span,
Pattern::Constructor { span, .. } => *span,
Pattern::Record { span, .. } => *span,
Pattern::Tuple { span, .. } => *span,
}
}
}

2202
src/interpreter.rs Normal file

File diff suppressed because it is too large Load Diff

633
src/lexer.rs Normal file
View File

@@ -0,0 +1,633 @@
//! Lexer for the Lux language
#![allow(dead_code)]
use crate::ast::Span;
use std::fmt;
use std::iter::Peekable;
use std::str::Chars;
/// Token types
#[derive(Debug, Clone, PartialEq)]
pub enum TokenKind {
// Literals
Int(i64),
Float(f64),
String(String),
Char(char),
Bool(bool),
// Identifiers and keywords
Ident(String),
// Keywords
Fn,
Let,
If,
Then,
Else,
Match,
With,
Effect,
Handler,
Run,
Resume,
Type,
True,
False,
Import,
Pub,
As,
From, // from (for migrations)
Latest, // latest (for @latest version constraint)
// Operators
Plus, // +
Minus, // -
Star, // *
Slash, // /
Percent, // %
Eq, // =
EqEq, // ==
Ne, // !=
Lt, // <
Le, // <=
Gt, // >
Ge, // >=
And, // &&
Or, // ||
Not, // !
Pipe, // |
PipeGt, // |>
Arrow, // =>
ThinArrow, // ->
Dot, // .
Colon, // :
ColonColon, // ::
Comma, // ,
Semi, // ;
At, // @
// Delimiters
LParen, // (
RParen, // )
LBrace, // {
RBrace, // }
LBracket, // [
RBracket, // ]
// Special
Underscore, // _
Newline,
Eof,
}
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenKind::Int(n) => write!(f, "{}", n),
TokenKind::Float(n) => write!(f, "{}", n),
TokenKind::String(s) => write!(f, "\"{}\"", s),
TokenKind::Char(c) => write!(f, "'{}'", c),
TokenKind::Bool(b) => write!(f, "{}", b),
TokenKind::Ident(s) => write!(f, "{}", s),
TokenKind::Fn => write!(f, "fn"),
TokenKind::Let => write!(f, "let"),
TokenKind::If => write!(f, "if"),
TokenKind::Then => write!(f, "then"),
TokenKind::Else => write!(f, "else"),
TokenKind::Match => write!(f, "match"),
TokenKind::With => write!(f, "with"),
TokenKind::Effect => write!(f, "effect"),
TokenKind::Handler => write!(f, "handler"),
TokenKind::Run => write!(f, "run"),
TokenKind::Resume => write!(f, "resume"),
TokenKind::Type => write!(f, "type"),
TokenKind::Import => write!(f, "import"),
TokenKind::Pub => write!(f, "pub"),
TokenKind::As => write!(f, "as"),
TokenKind::From => write!(f, "from"),
TokenKind::Latest => write!(f, "latest"),
TokenKind::True => write!(f, "true"),
TokenKind::False => write!(f, "false"),
TokenKind::Plus => write!(f, "+"),
TokenKind::Minus => write!(f, "-"),
TokenKind::Star => write!(f, "*"),
TokenKind::Slash => write!(f, "/"),
TokenKind::Percent => write!(f, "%"),
TokenKind::Eq => write!(f, "="),
TokenKind::EqEq => write!(f, "=="),
TokenKind::Ne => write!(f, "!="),
TokenKind::Lt => write!(f, "<"),
TokenKind::Le => write!(f, "<="),
TokenKind::Gt => write!(f, ">"),
TokenKind::Ge => write!(f, ">="),
TokenKind::And => write!(f, "&&"),
TokenKind::Or => write!(f, "||"),
TokenKind::Not => write!(f, "!"),
TokenKind::Pipe => write!(f, "|"),
TokenKind::PipeGt => write!(f, "|>"),
TokenKind::Arrow => write!(f, "=>"),
TokenKind::ThinArrow => write!(f, "->"),
TokenKind::Dot => write!(f, "."),
TokenKind::Colon => write!(f, ":"),
TokenKind::ColonColon => write!(f, "::"),
TokenKind::Comma => write!(f, ","),
TokenKind::Semi => write!(f, ";"),
TokenKind::At => write!(f, "@"),
TokenKind::LParen => write!(f, "("),
TokenKind::RParen => write!(f, ")"),
TokenKind::LBrace => write!(f, "{{"),
TokenKind::RBrace => write!(f, "}}"),
TokenKind::LBracket => write!(f, "["),
TokenKind::RBracket => write!(f, "]"),
TokenKind::Underscore => write!(f, "_"),
TokenKind::Newline => write!(f, "\\n"),
TokenKind::Eof => write!(f, "EOF"),
}
}
}
/// A token with its source location
#[derive(Debug, Clone)]
pub struct Token {
pub kind: TokenKind,
pub span: Span,
}
impl Token {
pub fn new(kind: TokenKind, span: Span) -> Self {
Self { kind, span }
}
}
/// Lexer error
#[derive(Debug, Clone)]
pub struct LexError {
pub message: String,
pub span: Span,
}
impl fmt::Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Lexer error at {}-{}: {}",
self.span.start, self.span.end, self.message
)
}
}
/// The lexer
pub struct Lexer<'a> {
source: &'a str,
chars: Peekable<Chars<'a>>,
pos: usize,
}
impl<'a> Lexer<'a> {
pub fn new(source: &'a str) -> Self {
Self {
source,
chars: source.chars().peekable(),
pos: 0,
}
}
/// Tokenize the entire source
pub fn tokenize(mut self) -> Result<Vec<Token>, LexError> {
let mut tokens = Vec::new();
loop {
let token = self.next_token()?;
let is_eof = token.kind == TokenKind::Eof;
tokens.push(token);
if is_eof {
break;
}
}
Ok(tokens)
}
fn next_token(&mut self) -> Result<Token, LexError> {
self.skip_whitespace_and_comments();
let start = self.pos;
let Some(c) = self.advance() else {
return Ok(Token::new(TokenKind::Eof, Span::new(start, start)));
};
let kind = match c {
// Single-character tokens
'+' => TokenKind::Plus,
'*' => TokenKind::Star,
'%' => TokenKind::Percent,
'(' => TokenKind::LParen,
')' => TokenKind::RParen,
'{' => TokenKind::LBrace,
'}' => TokenKind::RBrace,
'[' => TokenKind::LBracket,
']' => TokenKind::RBracket,
',' => TokenKind::Comma,
';' => TokenKind::Semi,
'@' => TokenKind::At,
'\n' => TokenKind::Newline,
// Multi-character tokens
'-' => {
if self.peek() == Some('>') {
self.advance();
TokenKind::ThinArrow
} else {
TokenKind::Minus
}
}
'/' => {
if self.peek() == Some('/') {
// Line comment
self.skip_line_comment();
return self.next_token();
} else {
TokenKind::Slash
}
}
'=' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::EqEq
} else if self.peek() == Some('>') {
self.advance();
TokenKind::Arrow
} else {
TokenKind::Eq
}
}
'!' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::Ne
} else {
TokenKind::Not
}
}
'<' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::Le
} else {
TokenKind::Lt
}
}
'>' => {
if self.peek() == Some('=') {
self.advance();
TokenKind::Ge
} else {
TokenKind::Gt
}
}
'&' => {
if self.peek() == Some('&') {
self.advance();
TokenKind::And
} else {
return Err(LexError {
message: "Expected '&&'".into(),
span: Span::new(start, self.pos),
});
}
}
'|' => {
if self.peek() == Some('|') {
self.advance();
TokenKind::Or
} else if self.peek() == Some('>') {
self.advance();
TokenKind::PipeGt
} else {
TokenKind::Pipe
}
}
'.' => TokenKind::Dot,
':' => {
if self.peek() == Some(':') {
self.advance();
TokenKind::ColonColon
} else {
TokenKind::Colon
}
}
'_' => {
if self.peek().map_or(false, |c| c.is_alphanumeric()) {
// It's an identifier starting with _
self.scan_ident_rest(start)
} else {
TokenKind::Underscore
}
}
// String literals
'"' => self.scan_string(start)?,
// Char literals
'\'' => self.scan_char(start)?,
// Numbers
c if c.is_ascii_digit() => self.scan_number(c, start)?,
// Identifiers and keywords
c if c.is_alphabetic() || c == '_' => self.scan_ident_rest(start),
_ => {
return Err(LexError {
message: format!("Unexpected character: '{}'", c),
span: Span::new(start, self.pos),
});
}
};
Ok(Token::new(kind, Span::new(start, self.pos)))
}
fn advance(&mut self) -> Option<char> {
let c = self.chars.next()?;
self.pos += c.len_utf8();
Some(c)
}
fn peek(&mut self) -> Option<char> {
self.chars.peek().copied()
}
fn skip_whitespace_and_comments(&mut self) {
while let Some(c) = self.peek() {
if c == ' ' || c == '\t' || c == '\r' {
self.advance();
} else if c == '/' {
// Check for comment
let mut chars = self.chars.clone();
chars.next(); // consume '/'
if chars.peek() == Some(&'/') {
self.skip_line_comment();
} else {
break;
}
} else {
break;
}
}
}
fn skip_line_comment(&mut self) {
while let Some(c) = self.peek() {
if c == '\n' {
break;
}
self.advance();
}
}
fn scan_string(&mut self, _start: usize) -> Result<TokenKind, LexError> {
let mut value = String::new();
loop {
match self.advance() {
Some('"') => break,
Some('\\') => {
let escaped = match self.advance() {
Some('n') => '\n',
Some('r') => '\r',
Some('t') => '\t',
Some('\\') => '\\',
Some('"') => '"',
Some(c) => c,
None => {
return Err(LexError {
message: "Unterminated string".into(),
span: Span::new(_start, self.pos),
});
}
};
value.push(escaped);
}
Some(c) => value.push(c),
None => {
return Err(LexError {
message: "Unterminated string".into(),
span: Span::new(_start, self.pos),
});
}
}
}
Ok(TokenKind::String(value))
}
fn scan_char(&mut self, start: usize) -> Result<TokenKind, LexError> {
let c = match self.advance() {
Some('\\') => match self.advance() {
Some('n') => '\n',
Some('r') => '\r',
Some('t') => '\t',
Some('\\') => '\\',
Some('\'') => '\'',
Some(c) => c,
None => {
return Err(LexError {
message: "Unterminated character literal".into(),
span: Span::new(start, self.pos),
});
}
},
Some(c) => c,
None => {
return Err(LexError {
message: "Unterminated character literal".into(),
span: Span::new(start, self.pos),
});
}
};
if self.advance() != Some('\'') {
return Err(LexError {
message: "Expected closing quote for character literal".into(),
span: Span::new(start, self.pos),
});
}
Ok(TokenKind::Char(c))
}
fn scan_number(&mut self, first: char, start: usize) -> Result<TokenKind, LexError> {
let mut num_str = String::new();
num_str.push(first);
while let Some(c) = self.peek() {
if c.is_ascii_digit() || c == '_' {
if c != '_' {
num_str.push(c);
}
self.advance();
} else {
break;
}
}
// Check for float
if self.peek() == Some('.') {
// Look ahead to make sure it's not a method call
let mut chars = self.chars.clone();
chars.next(); // consume '.'
if chars.peek().map_or(false, |c| c.is_ascii_digit()) {
self.advance(); // consume '.'
num_str.push('.');
while let Some(c) = self.peek() {
if c.is_ascii_digit() || c == '_' {
if c != '_' {
num_str.push(c);
}
self.advance();
} else {
break;
}
}
let f: f64 = num_str.parse().map_err(|_| LexError {
message: "Invalid float literal".into(),
span: Span::new(start, self.pos),
})?;
return Ok(TokenKind::Float(f));
}
}
let n: i64 = num_str.parse().map_err(|_| LexError {
message: "Invalid integer literal".into(),
span: Span::new(start, self.pos),
})?;
Ok(TokenKind::Int(n))
}
fn scan_ident_rest(&mut self, start: usize) -> TokenKind {
while let Some(c) = self.peek() {
if c.is_alphanumeric() || c == '_' {
self.advance();
} else {
break;
}
}
let ident = &self.source[start..self.pos];
match ident {
"fn" => TokenKind::Fn,
"let" => TokenKind::Let,
"if" => TokenKind::If,
"then" => TokenKind::Then,
"else" => TokenKind::Else,
"match" => TokenKind::Match,
"with" => TokenKind::With,
"effect" => TokenKind::Effect,
"handler" => TokenKind::Handler,
"run" => TokenKind::Run,
"resume" => TokenKind::Resume,
"type" => TokenKind::Type,
"import" => TokenKind::Import,
"pub" => TokenKind::Pub,
"as" => TokenKind::As,
"from" => TokenKind::From,
"latest" => TokenKind::Latest,
"true" => TokenKind::Bool(true),
"false" => TokenKind::Bool(false),
_ => TokenKind::Ident(ident.to_string()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn lex(source: &str) -> Vec<TokenKind> {
Lexer::new(source)
.tokenize()
.unwrap()
.into_iter()
.map(|t| t.kind)
.filter(|k| !matches!(k, TokenKind::Newline))
.collect()
}
#[test]
fn test_basic_tokens() {
assert_eq!(
lex("fn let if else"),
vec![
TokenKind::Fn,
TokenKind::Let,
TokenKind::If,
TokenKind::Else,
TokenKind::Eof
]
);
}
#[test]
fn test_operators() {
assert_eq!(
lex("+ - * / == != |>"),
vec![
TokenKind::Plus,
TokenKind::Minus,
TokenKind::Star,
TokenKind::Slash,
TokenKind::EqEq,
TokenKind::Ne,
TokenKind::PipeGt,
TokenKind::Eof
]
);
}
#[test]
fn test_numbers() {
assert_eq!(
lex("42 3.14"),
vec![TokenKind::Int(42), TokenKind::Float(3.14), TokenKind::Eof]
);
}
#[test]
fn test_strings() {
assert_eq!(
lex("\"hello\" \"world\""),
vec![
TokenKind::String("hello".into()),
TokenKind::String("world".into()),
TokenKind::Eof
]
);
}
#[test]
fn test_function() {
assert_eq!(
lex("fn add(a: Int, b: Int): Int = a + b"),
vec![
TokenKind::Fn,
TokenKind::Ident("add".into()),
TokenKind::LParen,
TokenKind::Ident("a".into()),
TokenKind::Colon,
TokenKind::Ident("Int".into()),
TokenKind::Comma,
TokenKind::Ident("b".into()),
TokenKind::Colon,
TokenKind::Ident("Int".into()),
TokenKind::RParen,
TokenKind::Colon,
TokenKind::Ident("Int".into()),
TokenKind::Eq,
TokenKind::Ident("a".into()),
TokenKind::Plus,
TokenKind::Ident("b".into()),
TokenKind::Eof
]
);
}
}

791
src/main.rs Normal file
View File

@@ -0,0 +1,791 @@
//! Lux - A functional programming language with first-class effects
mod ast;
mod interpreter;
mod lexer;
mod modules;
mod parser;
mod schema;
mod typechecker;
mod types;
use interpreter::Interpreter;
use parser::Parser;
use std::io::{self, Write};
use typechecker::TypeChecker;
const VERSION: &str = "0.1.0";
const HELP: &str = r#"
Lux - A functional language with first-class effects
Commands:
:help, :h Show this help
:quit, :q Exit the REPL
:type <expr> Show the type of an expression
:clear Clear the environment
:load <file> Load and execute a file
:trace on/off Enable/disable effect tracing
:traces Show recorded effect traces
Examples:
> let x = 42
> x + 1
43
> fn double(n: Int): Int = n * 2
> double(21)
42
> Console.print("Hello, world!")
Hello, world!
Debugging:
> :trace on
> Console.print("test")
> :traces
[ 0.123ms] Console.print("test") → ()
"#;
fn main() {
let args: Vec<String> = std::env::args().collect();
if args.len() > 1 {
// Run a file
run_file(&args[1]);
} else {
// Start REPL
run_repl();
}
}
fn run_file(path: &str) {
use modules::ModuleLoader;
use std::path::Path;
let file_path = Path::new(path);
let source = match std::fs::read_to_string(file_path) {
Ok(s) => s,
Err(e) => {
eprintln!("Error reading file '{}': {}", path, e);
std::process::exit(1);
}
};
// Set up module loader with the file's directory as a search path
let mut loader = ModuleLoader::new();
if let Some(parent) = file_path.parent() {
loader.add_search_path(parent.to_path_buf());
}
// Load and parse the program (including any imports)
let program = match loader.load_source(&source, Some(file_path)) {
Ok(p) => p,
Err(e) => {
eprintln!("Module error: {}", e);
std::process::exit(1);
}
};
let mut checker = TypeChecker::new();
if let Err(errors) = checker.check_program_with_modules(&program, &loader) {
for error in errors {
eprintln!("Type error: {}", error);
}
std::process::exit(1);
}
let mut interp = Interpreter::new();
match interp.run_with_modules(&program, &loader) {
Ok(value) => {
if !matches!(value, interpreter::Value::Unit) {
println!("{}", value);
}
}
Err(e) => {
eprintln!("Runtime error: {}", e);
std::process::exit(1);
}
}
}
fn run_repl() {
println!("Lux v{}", VERSION);
println!("Type :help for help, :quit to exit\n");
let mut interp = Interpreter::new();
let mut checker = TypeChecker::new();
let mut buffer = String::new();
let mut continuation = false;
loop {
// Print prompt
let prompt = if continuation { "... " } else { "lux> " };
print!("{}", prompt);
io::stdout().flush().unwrap();
// Read input
let mut line = String::new();
match io::stdin().read_line(&mut line) {
Ok(0) => break, // EOF
Ok(_) => {}
Err(e) => {
eprintln!("Error reading input: {}", e);
continue;
}
}
let line = line.trim_end();
// Handle commands
if !continuation && line.starts_with(':') {
handle_command(line, &mut interp, &mut checker);
continue;
}
// Accumulate input
buffer.push_str(line);
buffer.push('\n');
// Check for continuation (simple heuristic: unbalanced braces)
let open_braces = buffer.chars().filter(|c| *c == '{').count();
let close_braces = buffer.chars().filter(|c| *c == '}').count();
let open_parens = buffer.chars().filter(|c| *c == '(').count();
let close_parens = buffer.chars().filter(|c| *c == ')').count();
if open_braces > close_braces || open_parens > close_parens {
continuation = true;
continue;
}
continuation = false;
let input = std::mem::take(&mut buffer);
if input.trim().is_empty() {
continue;
}
eval_input(&input, &mut interp, &mut checker);
}
println!("\nGoodbye!");
}
fn handle_command(line: &str, interp: &mut Interpreter, checker: &mut TypeChecker) {
let parts: Vec<&str> = line.splitn(2, ' ').collect();
let cmd = parts[0];
let arg = parts.get(1).map(|s| s.trim());
match cmd {
":help" | ":h" => {
println!("{}", HELP);
}
":quit" | ":q" => {
println!("Goodbye!");
std::process::exit(0);
}
":type" | ":t" => {
if let Some(expr_str) = arg {
show_type(expr_str, checker);
} else {
println!("Usage: :type <expression>");
}
}
":clear" => {
*interp = Interpreter::new();
*checker = TypeChecker::new();
println!("Environment cleared.");
}
":load" | ":l" => {
if let Some(path) = arg {
load_file(path, interp, checker);
} else {
println!("Usage: :load <filename>");
}
}
":trace" => match arg {
Some("on") => {
interp.enable_tracing();
println!("Effect tracing enabled.");
}
Some("off") => {
interp.trace_effects = false;
println!("Effect tracing disabled.");
}
_ => {
println!("Usage: :trace on|off");
}
},
":traces" => {
if interp.get_traces().is_empty() {
println!("No effect traces recorded. Use :trace on to enable tracing.");
} else {
interp.print_traces();
}
}
_ => {
println!("Unknown command: {}", cmd);
println!("Type :help for help");
}
}
}
fn show_type(expr_str: &str, checker: &mut TypeChecker) {
// Wrap expression in a let to parse it
let wrapped = format!("let _expr_ = {}", expr_str);
match Parser::parse_source(&wrapped) {
Ok(program) => {
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
} else {
println!("(type checking passed)");
}
}
Err(e) => {
println!("Parse error: {}", e);
}
}
}
fn load_file(path: &str, interp: &mut Interpreter, checker: &mut TypeChecker) {
let source = match std::fs::read_to_string(path) {
Ok(s) => s,
Err(e) => {
println!("Error reading file '{}': {}", path, e);
return;
}
};
let program = match Parser::parse_source(&source) {
Ok(p) => p,
Err(e) => {
println!("Parse error: {}", e);
return;
}
};
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
return;
}
match interp.run(&program) {
Ok(_) => println!("Loaded '{}'", path),
Err(e) => println!("Runtime error: {}", e),
}
}
fn eval_input(input: &str, interp: &mut Interpreter, checker: &mut TypeChecker) {
// Try to parse as a program (declarations)
match Parser::parse_source(input) {
Ok(program) => {
// Type check
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
return;
}
// Execute
match interp.run(&program) {
Ok(value) => {
if !matches!(value, interpreter::Value::Unit) {
println!("{}", value);
}
}
Err(e) => {
println!("Runtime error: {}", e);
}
}
}
Err(parse_err) => {
// Try wrapping as an expression
let wrapped = format!("let _result_ = {}", input.trim());
match Parser::parse_source(&wrapped) {
Ok(program) => {
if let Err(errors) = checker.check_program(&program) {
for error in errors {
println!("Type error: {}", error);
}
return;
}
match interp.run(&program) {
Ok(value) => {
println!("{}", value);
}
Err(e) => {
println!("Runtime error: {}", e);
}
}
}
Err(_) => {
// Use original error
println!("Parse error: {}", parse_err);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn eval(source: &str) -> Result<String, String> {
let program = Parser::parse_source(source).map_err(|e| e.to_string())?;
let mut checker = TypeChecker::new();
checker.check_program(&program).map_err(|errors| {
errors
.iter()
.map(|e| e.to_string())
.collect::<Vec<_>>()
.join("\n")
})?;
let mut interp = Interpreter::new();
let value = interp.run(&program).map_err(|e| e.to_string())?;
Ok(format!("{}", value))
}
#[test]
fn test_arithmetic() {
assert_eq!(eval("let x = 1 + 2").unwrap(), "3");
assert_eq!(eval("let x = 10 - 3").unwrap(), "7");
assert_eq!(eval("let x = 4 * 5").unwrap(), "20");
assert_eq!(eval("let x = 15 / 3").unwrap(), "5");
}
#[test]
fn test_function() {
let source = r#"
fn add(a: Int, b: Int): Int = a + b
let result = add(3, 4)
"#;
assert_eq!(eval(source).unwrap(), "7");
}
#[test]
fn test_if_expr() {
let source = r#"
fn max(a: Int, b: Int): Int = if a > b then a else b
let result = max(5, 3)
"#;
assert_eq!(eval(source).unwrap(), "5");
}
#[test]
fn test_recursion() {
let source = r#"
fn factorial(n: Int): Int = if n <= 1 then 1 else n * factorial(n - 1)
let result = factorial(5)
"#;
assert_eq!(eval(source).unwrap(), "120");
}
#[test]
fn test_lambda() {
let source = r#"
let double = fn(x: Int): Int => x * 2
let result = double(21)
"#;
assert_eq!(eval(source).unwrap(), "42");
}
#[test]
fn test_records() {
let source = r#"
let person = { name: "Alice", age: 30 }
let result = person.age
"#;
assert_eq!(eval(source).unwrap(), "30");
}
#[test]
fn test_lists() {
let source = "let nums = [1, 2, 3]";
assert_eq!(eval(source).unwrap(), "[1, 2, 3]");
}
#[test]
fn test_tuples() {
let source = "let pair = (42, \"hello\")";
assert_eq!(eval(source).unwrap(), "(42, \"hello\")");
}
#[test]
fn test_block() {
let source = r#"
let result = {
let x = 10
let y = 20
x + y
}
"#;
assert_eq!(eval(source).unwrap(), "30");
}
#[test]
fn test_pipe() {
let source = r#"
fn double(x: Int): Int = x * 2
fn add_one(x: Int): Int = x + 1
let result = 5 |> double |> add_one
"#;
assert_eq!(eval(source).unwrap(), "11");
}
// ============ Standard Library Tests ============
// List tests
#[test]
fn test_list_length() {
assert_eq!(eval("let x = List.length([1, 2, 3])").unwrap(), "3");
assert_eq!(eval("let x = List.length([])").unwrap(), "0");
}
#[test]
fn test_list_reverse() {
assert_eq!(
eval("let x = List.reverse([1, 2, 3])").unwrap(),
"[3, 2, 1]"
);
assert_eq!(eval("let x = List.reverse([])").unwrap(), "[]");
}
#[test]
fn test_list_range() {
assert_eq!(eval("let x = List.range(0, 5)").unwrap(), "[0, 1, 2, 3, 4]");
assert_eq!(eval("let x = List.range(3, 3)").unwrap(), "[]");
assert_eq!(eval("let x = List.range(-2, 2)").unwrap(), "[-2, -1, 0, 1]");
}
#[test]
fn test_list_head() {
assert_eq!(eval("let x = List.head([1, 2, 3])").unwrap(), "Some(1)");
assert_eq!(eval("let x = List.head([])").unwrap(), "None");
}
#[test]
fn test_list_tail() {
assert_eq!(
eval("let x = List.tail([1, 2, 3])").unwrap(),
"Some([2, 3])"
);
assert_eq!(eval("let x = List.tail([1])").unwrap(), "Some([])");
assert_eq!(eval("let x = List.tail([])").unwrap(), "None");
}
#[test]
fn test_list_concat() {
assert_eq!(
eval("let x = List.concat([1, 2], [3, 4])").unwrap(),
"[1, 2, 3, 4]"
);
assert_eq!(eval("let x = List.concat([], [1])").unwrap(), "[1]");
assert_eq!(eval("let x = List.concat([1], [])").unwrap(), "[1]");
}
#[test]
fn test_list_get() {
assert_eq!(
eval("let x = List.get([10, 20, 30], 0)").unwrap(),
"Some(10)"
);
assert_eq!(
eval("let x = List.get([10, 20, 30], 2)").unwrap(),
"Some(30)"
);
assert_eq!(eval("let x = List.get([10, 20, 30], 5)").unwrap(), "None");
assert_eq!(eval("let x = List.get([10, 20, 30], -1)").unwrap(), "None");
}
#[test]
fn test_list_map() {
let source = r#"
fn double(x: Int): Int = x * 2
let result = List.map([1, 2, 3], double)
"#;
assert_eq!(eval(source).unwrap(), "[2, 4, 6]");
}
#[test]
fn test_list_map_lambda() {
let source = "let x = List.map([1, 2, 3], fn(x: Int): Int => x * x)";
assert_eq!(eval(source).unwrap(), "[1, 4, 9]");
}
#[test]
fn test_list_filter() {
let source = "let x = List.filter([1, 2, 3, 4, 5], fn(x: Int): Bool => x > 2)";
assert_eq!(eval(source).unwrap(), "[3, 4, 5]");
}
#[test]
fn test_list_filter_all() {
let source = "let x = List.filter([1, 2, 3], fn(x: Int): Bool => x > 10)";
assert_eq!(eval(source).unwrap(), "[]");
}
#[test]
fn test_list_fold() {
let source = "let x = List.fold([1, 2, 3, 4], 0, fn(acc: Int, x: Int): Int => acc + x)";
assert_eq!(eval(source).unwrap(), "10");
}
#[test]
fn test_list_fold_product() {
let source = "let x = List.fold([1, 2, 3, 4], 1, fn(acc: Int, x: Int): Int => acc * x)";
assert_eq!(eval(source).unwrap(), "24");
}
// String tests
#[test]
fn test_string_length() {
assert_eq!(eval(r#"let x = String.length("hello")"#).unwrap(), "5");
assert_eq!(eval(r#"let x = String.length("")"#).unwrap(), "0");
}
#[test]
fn test_string_split() {
assert_eq!(
eval(r#"let x = String.split("a,b,c", ",")"#).unwrap(),
r#"["a", "b", "c"]"#
);
assert_eq!(
eval(r#"let x = String.split("hello", ",")"#).unwrap(),
r#"["hello"]"#
);
}
#[test]
fn test_string_join() {
assert_eq!(
eval(r#"let x = String.join(["a", "b", "c"], "-")"#).unwrap(),
r#""a-b-c""#
);
assert_eq!(
eval(r#"let x = String.join(["hello"], ",")"#).unwrap(),
r#""hello""#
);
assert_eq!(eval(r#"let x = String.join([], ",")"#).unwrap(), r#""""#);
}
#[test]
fn test_string_trim() {
assert_eq!(
eval(r#"let x = String.trim(" hello ")"#).unwrap(),
r#""hello""#
);
assert_eq!(
eval(r#"let x = String.trim("hello")"#).unwrap(),
r#""hello""#
);
assert_eq!(eval(r#"let x = String.trim(" ")"#).unwrap(), r#""""#);
}
#[test]
fn test_string_contains() {
assert_eq!(
eval(r#"let x = String.contains("hello world", "world")"#).unwrap(),
"true"
);
assert_eq!(
eval(r#"let x = String.contains("hello", "xyz")"#).unwrap(),
"false"
);
assert_eq!(
eval(r#"let x = String.contains("hello", "")"#).unwrap(),
"true"
);
}
#[test]
fn test_string_replace() {
assert_eq!(
eval(r#"let x = String.replace("hello", "l", "L")"#).unwrap(),
r#""heLLo""#
);
assert_eq!(
eval(r#"let x = String.replace("aaa", "a", "b")"#).unwrap(),
r#""bbb""#
);
}
#[test]
fn test_string_chars() {
assert_eq!(eval(r#"let x = String.chars("hi")"#).unwrap(), "['h', 'i']");
assert_eq!(eval(r#"let x = String.chars("")"#).unwrap(), "[]");
}
#[test]
fn test_string_lines() {
// Note: Using actual newline in the string
let source = r#"let x = String.lines("a
b
c")"#;
assert_eq!(eval(source).unwrap(), r#"["a", "b", "c"]"#);
}
// Option tests
#[test]
fn test_option_constructors() {
assert_eq!(eval("let x = Some(42)").unwrap(), "Some(42)");
assert_eq!(eval("let x = None").unwrap(), "None");
}
#[test]
fn test_option_is_some() {
assert_eq!(eval("let x = Option.isSome(Some(42))").unwrap(), "true");
assert_eq!(eval("let x = Option.isSome(None)").unwrap(), "false");
}
#[test]
fn test_option_is_none() {
assert_eq!(eval("let x = Option.isNone(None)").unwrap(), "true");
assert_eq!(eval("let x = Option.isNone(Some(42))").unwrap(), "false");
}
#[test]
fn test_option_get_or_else() {
assert_eq!(eval("let x = Option.getOrElse(Some(42), 0)").unwrap(), "42");
assert_eq!(eval("let x = Option.getOrElse(None, 0)").unwrap(), "0");
}
#[test]
fn test_option_map() {
let source = "let x = Option.map(Some(5), fn(x: Int): Int => x * 2)";
assert_eq!(eval(source).unwrap(), "Some(10)");
}
#[test]
fn test_option_map_none() {
let source = "let x = Option.map(None, fn(x: Int): Int => x * 2)";
assert_eq!(eval(source).unwrap(), "None");
}
#[test]
fn test_option_flat_map() {
let source = "let x = Option.flatMap(Some(5), fn(x: Int): Option<Int> => Some(x * 2))";
assert_eq!(eval(source).unwrap(), "Some(10)");
}
#[test]
fn test_option_flat_map_to_none() {
let source = "let x = Option.flatMap(Some(5), fn(x: Int): Option<Int> => None)";
assert_eq!(eval(source).unwrap(), "None");
}
// Result tests
#[test]
fn test_result_constructors() {
assert_eq!(eval("let x = Ok(42)").unwrap(), "Ok(42)");
assert_eq!(eval(r#"let x = Err("error")"#).unwrap(), r#"Err("error")"#);
}
#[test]
fn test_result_is_ok() {
assert_eq!(eval("let x = Result.isOk(Ok(42))").unwrap(), "true");
assert_eq!(eval(r#"let x = Result.isOk(Err("e"))"#).unwrap(), "false");
}
#[test]
fn test_result_is_err() {
assert_eq!(eval(r#"let x = Result.isErr(Err("e"))"#).unwrap(), "true");
assert_eq!(eval("let x = Result.isErr(Ok(42))").unwrap(), "false");
}
#[test]
fn test_result_get_or_else() {
assert_eq!(eval("let x = Result.getOrElse(Ok(42), 0)").unwrap(), "42");
assert_eq!(
eval(r#"let x = Result.getOrElse(Err("e"), 0)"#).unwrap(),
"0"
);
}
#[test]
fn test_result_map() {
let source = "let x = Result.map(Ok(5), fn(x: Int): Int => x * 2)";
assert_eq!(eval(source).unwrap(), "Ok(10)");
}
#[test]
fn test_result_map_err() {
let source = r#"let x = Result.map(Err("e"), fn(x: Int): Int => x * 2)"#;
assert_eq!(eval(source).unwrap(), r#"Err("e")"#);
}
// Utility function tests
#[test]
fn test_to_string() {
assert_eq!(eval("let x = toString(42)").unwrap(), r#""42""#);
assert_eq!(eval("let x = toString(true)").unwrap(), r#""true""#);
assert_eq!(eval("let x = toString([1, 2])").unwrap(), r#""[1, 2]""#);
}
#[test]
fn test_type_of() {
assert_eq!(eval("let x = typeOf(42)").unwrap(), r#""Int""#);
assert_eq!(eval("let x = typeOf(true)").unwrap(), r#""Bool""#);
assert_eq!(eval("let x = typeOf([1, 2])").unwrap(), r#""List""#);
assert_eq!(eval(r#"let x = typeOf("hello")"#).unwrap(), r#""String""#);
}
// Pipe with stdlib tests
#[test]
fn test_pipe_with_list() {
assert_eq!(
eval("let x = [1, 2, 3] |> List.reverse").unwrap(),
"[3, 2, 1]"
);
assert_eq!(eval("let x = [1, 2, 3] |> List.length").unwrap(), "3");
}
#[test]
fn test_pipe_with_string() {
assert_eq!(
eval(r#"let x = " hello " |> String.trim"#).unwrap(),
r#""hello""#
);
}
// Combined stdlib usage tests
#[test]
fn test_list_filter_even() {
let source = r#"
fn isEven(x: Int): Bool = x % 2 == 0
let result = List.filter(List.range(1, 6), isEven)
"#;
assert_eq!(eval(source).unwrap(), "[2, 4]");
}
#[test]
fn test_option_chain() {
let source = r#"
fn times10(x: Int): Int = x * 10
let head = List.head([1, 2, 3])
let mapped = Option.map(head, times10)
let result = Option.getOrElse(mapped, 0)
"#;
assert_eq!(eval(source).unwrap(), "10");
}
#[test]
fn test_option_chain_empty() {
let source = r#"
fn times10(x: Int): Int = x * 10
let head = List.head([])
let mapped = Option.map(head, times10)
let result = Option.getOrElse(mapped, 0)
"#;
assert_eq!(eval(source).unwrap(), "0");
}
}

634
src/modules.rs Normal file
View File

@@ -0,0 +1,634 @@
//! Module system for the Lux language
//!
//! Handles loading, parsing, and resolving module imports.
use crate::ast::{Declaration, ImportDecl, Program, Visibility};
use crate::parser::Parser;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
/// Error during module loading
#[derive(Debug, Clone)]
pub struct ModuleError {
pub message: String,
pub module_path: String,
}
impl std::fmt::Display for ModuleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Module error in '{}': {}",
self.module_path, self.message
)
}
}
impl std::error::Error for ModuleError {}
/// A loaded and parsed module
#[derive(Debug, Clone)]
pub struct Module {
/// The module's canonical path (e.g., "std/list")
pub path: String,
/// The parsed program
pub program: Program,
/// Names exported by this module (public declarations)
pub exports: HashSet<String>,
}
impl Module {
/// Get all public declarations from this module
pub fn public_declarations(&self) -> Vec<&Declaration> {
self.program
.declarations
.iter()
.filter(|d| {
match d {
Declaration::Function(f) => f.visibility == Visibility::Public,
Declaration::Let(l) => l.visibility == Visibility::Public,
Declaration::Type(t) => t.visibility == Visibility::Public,
// Effects and handlers are always public for now
Declaration::Effect(_) | Declaration::Handler(_) => true,
}
})
.collect()
}
}
/// Module loader and resolver
pub struct ModuleLoader {
/// Base directories to search for modules
search_paths: Vec<PathBuf>,
/// Cache of loaded modules (path -> module)
cache: HashMap<String, Module>,
/// Modules currently being loaded (for circular dependency detection)
loading: HashSet<String>,
}
impl ModuleLoader {
pub fn new() -> Self {
Self {
search_paths: vec![PathBuf::from(".")],
cache: HashMap::new(),
loading: HashSet::new(),
}
}
/// Create a loader with custom search paths
pub fn with_paths(paths: Vec<PathBuf>) -> Self {
Self {
search_paths: paths,
cache: HashMap::new(),
loading: HashSet::new(),
}
}
/// Add a search path
pub fn add_search_path(&mut self, path: PathBuf) {
self.search_paths.push(path);
}
/// Resolve a module path to a file path
fn resolve_path(&self, module_path: &str) -> Option<PathBuf> {
// Convert module path (e.g., "std/list") to file path (e.g., "std/list.lux")
let relative_path = format!("{}.lux", module_path);
for search_path in &self.search_paths {
let full_path = search_path.join(&relative_path);
if full_path.exists() {
return Some(full_path);
}
}
None
}
/// Load a module by its import path
pub fn load_module(&mut self, module_path: &str) -> Result<&Module, ModuleError> {
// Check if already cached
if self.cache.contains_key(module_path) {
return Ok(self.cache.get(module_path).unwrap());
}
// Check for circular dependency
if self.loading.contains(module_path) {
return Err(ModuleError {
message: "Circular dependency detected".to_string(),
module_path: module_path.to_string(),
});
}
// Mark as loading
self.loading.insert(module_path.to_string());
// Resolve to file path
let file_path = self.resolve_path(module_path).ok_or_else(|| ModuleError {
message: format!("Module not found. Searched in: {:?}", self.search_paths),
module_path: module_path.to_string(),
})?;
// Load the module
let module = self.load_file(&file_path, module_path)?;
// Remove from loading set
self.loading.remove(module_path);
// Cache the module
self.cache.insert(module_path.to_string(), module);
Ok(self.cache.get(module_path).unwrap())
}
/// Load a module from a file path
fn load_file(&mut self, file_path: &Path, module_path: &str) -> Result<Module, ModuleError> {
// Read the file
let source = fs::read_to_string(file_path).map_err(|e| ModuleError {
message: format!("Failed to read file: {}", e),
module_path: module_path.to_string(),
})?;
// Parse the source
let program = Parser::parse_source(&source).map_err(|e| ModuleError {
message: format!("Parse error: {}", e),
module_path: module_path.to_string(),
})?;
// Load any imports this module has
for import in &program.imports {
let import_path = import.path.to_string();
self.load_module(&import_path)?;
}
// Collect exports
let exports = self.collect_exports(&program);
Ok(Module {
path: module_path.to_string(),
program,
exports,
})
}
/// Load a program from source (for REPL or direct execution)
pub fn load_source(
&mut self,
source: &str,
base_path: Option<&Path>,
) -> Result<Program, ModuleError> {
// Add base path to search paths if provided
if let Some(base) = base_path {
if let Some(parent) = base.parent() {
if !self.search_paths.contains(&parent.to_path_buf()) {
self.search_paths.push(parent.to_path_buf());
}
}
}
// Parse the source
let program = Parser::parse_source(source).map_err(|e| ModuleError {
message: format!("Parse error: {}", e),
module_path: "<main>".to_string(),
})?;
// Load any imports
for import in &program.imports {
let import_path = import.path.to_string();
self.load_module(&import_path)?;
}
Ok(program)
}
/// Collect exported names from a program
fn collect_exports(&self, program: &Program) -> HashSet<String> {
let mut exports = HashSet::new();
for decl in &program.declarations {
match decl {
Declaration::Function(f) if f.visibility == Visibility::Public => {
exports.insert(f.name.name.clone());
}
Declaration::Let(l) if l.visibility == Visibility::Public => {
exports.insert(l.name.name.clone());
}
Declaration::Type(t) if t.visibility == Visibility::Public => {
exports.insert(t.name.name.clone());
}
Declaration::Effect(e) => {
// Effects are always exported
exports.insert(e.name.name.clone());
}
Declaration::Handler(h) => {
// Handlers are always exported
exports.insert(h.name.name.clone());
}
_ => {}
}
}
exports
}
/// Get a cached module
pub fn get_module(&self, module_path: &str) -> Option<&Module> {
self.cache.get(module_path)
}
/// Get all loaded modules
pub fn loaded_modules(&self) -> impl Iterator<Item = (&String, &Module)> {
self.cache.iter()
}
/// Clear the module cache
pub fn clear_cache(&mut self) {
self.cache.clear();
}
/// Resolve imports for a program and return the names to be imported
pub fn resolve_imports(
&self,
imports: &[ImportDecl],
) -> Result<HashMap<String, ResolvedImport>, ModuleError> {
let mut resolved = HashMap::new();
for import in imports {
let module_path = import.path.to_string();
let module = self.get_module(&module_path).ok_or_else(|| ModuleError {
message: "Module not loaded".to_string(),
module_path: module_path.clone(),
})?;
let import_name = if let Some(ref alias) = import.alias {
// import foo/bar as Baz -> use "Baz" as the name
alias.name.clone()
} else {
// import foo/bar -> use "bar" as the name (last segment)
import
.path
.segments
.last()
.map(|s| s.name.clone())
.unwrap_or_else(|| module_path.clone())
};
if import.wildcard {
// import foo.* -> import all exports directly
for export in &module.exports {
resolved.insert(
export.clone(),
ResolvedImport {
module_path: module_path.clone(),
name: export.clone(),
kind: ImportKind::Direct,
},
);
}
} else if let Some(ref items) = import.items {
// import foo.{a, b, c} -> import specific items
for item in items {
if !module.exports.contains(&item.name) {
return Err(ModuleError {
message: format!("'{}' is not exported from module", item.name),
module_path: module_path.clone(),
});
}
resolved.insert(
item.name.clone(),
ResolvedImport {
module_path: module_path.clone(),
name: item.name.clone(),
kind: ImportKind::Direct,
},
);
}
} else {
// import foo/bar -> import as module object
resolved.insert(
import_name,
ResolvedImport {
module_path: module_path.clone(),
name: module_path.clone(),
kind: ImportKind::Module,
},
);
}
}
Ok(resolved)
}
}
impl Default for ModuleLoader {
fn default() -> Self {
Self::new()
}
}
/// A resolved import
#[derive(Debug, Clone)]
pub struct ResolvedImport {
/// The module path this import comes from
pub module_path: String,
/// The name being imported
pub name: String,
/// What kind of import this is
pub kind: ImportKind,
}
/// Kind of import
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ImportKind {
/// Import as a module object (import foo/bar)
Module,
/// Direct import of a name (import foo.{bar} or import foo.*)
Direct,
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::TempDir;
fn create_test_module(dir: &Path, name: &str, content: &str) -> PathBuf {
let path = dir.join(format!("{}.lux", name));
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).unwrap();
}
let mut file = fs::File::create(&path).unwrap();
file.write_all(content.as_bytes()).unwrap();
path
}
#[test]
fn test_load_simple_module() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"math",
r#"
pub fn add(a: Int, b: Int): Int = a + b
pub fn sub(a: Int, b: Int): Int = a - b
fn private_fn(): Int = 42
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let module = loader.load_module("math").unwrap();
assert_eq!(module.path, "math");
assert!(module.exports.contains("add"));
assert!(module.exports.contains("sub"));
assert!(!module.exports.contains("private_fn"));
}
#[test]
fn test_load_nested_module() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"std/list",
r#"
pub fn length(list: List<Int>): Int = 0
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let module = loader.load_module("std/list").unwrap();
assert_eq!(module.path, "std/list");
assert!(module.exports.contains("length"));
}
#[test]
fn test_module_not_found() {
let dir = TempDir::new().unwrap();
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let result = loader.load_module("nonexistent");
assert!(result.is_err());
assert!(result.unwrap_err().message.contains("not found"));
}
#[test]
fn test_circular_dependency_detection() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"a",
r#"
import b
pub fn foo(): Int = 1
"#,
);
create_test_module(
dir.path(),
"b",
r#"
import a
pub fn bar(): Int = 2
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let result = loader.load_module("a");
assert!(result.is_err());
assert!(result.unwrap_err().message.contains("Circular"));
}
#[test]
fn test_module_caching() {
let dir = TempDir::new().unwrap();
create_test_module(
dir.path(),
"cached",
r#"
pub fn foo(): Int = 42
"#,
);
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
// Load twice
loader.load_module("cached").unwrap();
loader.load_module("cached").unwrap();
// Should only be in cache once
assert_eq!(loader.cache.len(), 1);
}
#[test]
fn test_end_to_end_module_import() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create a utility module with public functions
create_test_module(
dir.path(),
"utils",
r#"
pub fn double(x: Int): Int = x * 2
pub fn square(x: Int): Int = x * x
fn private_helper(): Int = 0
"#,
);
// Create a main program that imports and uses the module
let main_source = r#"
import utils
let result = utils.double(21)
"#;
// Set up module loader
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
// Load and parse the main program
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
// Type check with module support
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
// Run with module support
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
// Should evaluate to 42
assert_eq!(format!("{}", result), "42");
}
#[test]
fn test_selective_import() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create a module with multiple exports
create_test_module(
dir.path(),
"math",
r#"
pub fn add(a: Int, b: Int): Int = a + b
pub fn mul(a: Int, b: Int): Int = a * b
"#,
);
// Import only the add function
let main_source = r#"
import math.{add}
let result = add(10, 5)
"#;
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
assert_eq!(format!("{}", result), "15");
}
#[test]
fn test_module_with_alias() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create a nested module
create_test_module(
dir.path(),
"lib/helpers",
r#"
pub fn greet(): String = "hello"
"#,
);
// Import with alias
let main_source = r#"
import lib/helpers as h
let result = h.greet()
"#;
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
assert_eq!(format!("{}", result), "\"hello\"");
}
#[test]
fn test_transitive_imports() {
use crate::interpreter::Interpreter;
use crate::typechecker::TypeChecker;
let dir = TempDir::new().unwrap();
// Create base module
create_test_module(
dir.path(),
"base",
r#"
pub fn value(): Int = 100
"#,
);
// Create mid module that imports base
create_test_module(
dir.path(),
"mid",
r#"
import base
pub fn doubled(): Int = base.value() * 2
"#,
);
// Create main that imports mid
let main_source = r#"
import mid
let result = mid.doubled()
"#;
let mut loader = ModuleLoader::with_paths(vec![dir.path().to_path_buf()]);
let main_path = dir.path().join("main.lux");
let program = loader.load_source(main_source, Some(&main_path)).unwrap();
let mut checker = TypeChecker::new();
checker
.check_program_with_modules(&program, &loader)
.unwrap();
let mut interp = Interpreter::new();
let result = interp.run_with_modules(&program, &loader).unwrap();
assert_eq!(format!("{}", result), "200");
}
}

1935
src/parser.rs Normal file

File diff suppressed because it is too large Load Diff

330
src/schema.rs Normal file
View File

@@ -0,0 +1,330 @@
//! Schema Evolution for the Lux language
//!
//! Handles versioned types, compatibility checking, and migrations.
#![allow(dead_code)]
use crate::ast::{Migration, RecordField, TypeDecl, TypeDef};
use std::collections::HashMap;
/// Describes the compatibility between two versions of a type
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Compatibility {
/// Fully compatible - no changes needed
Compatible,
/// Compatible with auto-migration (e.g., adding optional field with default)
AutoMigrate(Vec<AutoMigration>),
/// Breaking change - requires explicit migration
Breaking(Vec<BreakingChange>),
}
/// An automatic migration step
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AutoMigration {
/// Add a field with a default value
AddFieldWithDefault { field_name: String, default: String },
/// Widen a numeric type (e.g., Int32 -> Int64)
WidenType {
field_name: String,
from: String,
to: String,
},
}
/// A breaking change that requires explicit migration
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BreakingChange {
/// Field was removed
FieldRemoved { field_name: String },
/// Field was renamed
FieldRenamed { old_name: String, new_name: String },
/// Field type changed incompatibly
FieldTypeChanged {
field_name: String,
old_type: String,
new_type: String,
},
/// Required field added without default
RequiredFieldAdded { field_name: String },
}
/// Registry of versioned types
#[derive(Debug, Default)]
pub struct SchemaRegistry {
/// Map from type name to versions: TypeName -> (Version -> TypeDef)
versions: HashMap<String, HashMap<u32, VersionedTypeDef>>,
}
/// A versioned type definition with its migrations
#[derive(Debug, Clone)]
pub struct VersionedTypeDef {
pub version: u32,
pub definition: TypeDef,
pub migrations: Vec<Migration>,
}
impl SchemaRegistry {
pub fn new() -> Self {
Self::default()
}
/// Register a versioned type
pub fn register(&mut self, name: &str, type_decl: &TypeDecl) {
let version = type_decl.version.map(|v| v.number).unwrap_or(1);
let versioned_def = VersionedTypeDef {
version,
definition: type_decl.definition.clone(),
migrations: type_decl.migrations.clone(),
};
self.versions
.entry(name.to_string())
.or_default()
.insert(version, versioned_def);
}
/// Get all versions of a type
pub fn get_versions(&self, name: &str) -> Option<&HashMap<u32, VersionedTypeDef>> {
self.versions.get(name)
}
/// Get a specific version of a type
pub fn get_version(&self, name: &str, version: u32) -> Option<&VersionedTypeDef> {
self.versions.get(name)?.get(&version)
}
/// Get the latest version number of a type
pub fn latest_version(&self, name: &str) -> Option<u32> {
self.versions.get(name)?.keys().max().copied()
}
/// Check compatibility between two versions
pub fn check_compatibility(
&self,
name: &str,
from_version: u32,
to_version: u32,
) -> Result<Compatibility, String> {
let from_def = self
.get_version(name, from_version)
.ok_or_else(|| format!("Version {} of type '{}' not found", from_version, name))?;
let to_def = self
.get_version(name, to_version)
.ok_or_else(|| format!("Version {} of type '{}' not found", to_version, name))?;
compare_type_defs(&from_def.definition, &to_def.definition)
}
/// Check if a migration exists for a version transition
pub fn has_migration(&self, name: &str, from_version: u32, to_version: u32) -> bool {
if let Some(to_def) = self.get_version(name, to_version) {
to_def
.migrations
.iter()
.any(|m| m.from_version.number == from_version)
} else {
false
}
}
/// Get the migration chain from one version to another
pub fn get_migration_chain(
&self,
_name: &str,
from_version: u32,
to_version: u32,
) -> Result<Vec<(u32, u32)>, String> {
if from_version >= to_version {
return Ok(vec![]);
}
// Simple chain: v1 -> v2 -> v3 -> ... -> vN
let mut chain = Vec::new();
for v in from_version..to_version {
chain.push((v, v + 1));
}
Ok(chain)
}
}
/// Compare two type definitions for compatibility
fn compare_type_defs(from: &TypeDef, to: &TypeDef) -> Result<Compatibility, String> {
match (from, to) {
(TypeDef::Record(from_fields), TypeDef::Record(to_fields)) => {
compare_record_fields(from_fields, to_fields)
}
(TypeDef::Enum(from_variants), TypeDef::Enum(to_variants)) => {
// For enums, adding variants is compatible, removing is breaking
let from_names: Vec<_> = from_variants.iter().map(|v| &v.name.name).collect();
let to_names: Vec<_> = to_variants.iter().map(|v| &v.name.name).collect();
let removed: Vec<_> = from_names
.iter()
.filter(|n| !to_names.contains(n))
.collect();
if removed.is_empty() {
Ok(Compatibility::Compatible)
} else {
Ok(Compatibility::Breaking(
removed
.iter()
.map(|n| BreakingChange::FieldRemoved {
field_name: n.to_string(),
})
.collect(),
))
}
}
(TypeDef::Alias(from_type), TypeDef::Alias(to_type)) => {
// Type aliases: check if the underlying types are compatible
if from_type == to_type {
Ok(Compatibility::Compatible)
} else {
Ok(Compatibility::Breaking(vec![
BreakingChange::FieldTypeChanged {
field_name: "<alias>".to_string(),
old_type: format!("{:?}", from_type),
new_type: format!("{:?}", to_type),
},
]))
}
}
_ => {
// Different type kinds are breaking
Ok(Compatibility::Breaking(vec![]))
}
}
}
/// Compare record fields for compatibility
fn compare_record_fields(
from: &[RecordField],
to: &[RecordField],
) -> Result<Compatibility, String> {
let from_map: HashMap<&str, &RecordField> =
from.iter().map(|f| (f.name.name.as_str(), f)).collect();
let to_map: HashMap<&str, &RecordField> =
to.iter().map(|f| (f.name.name.as_str(), f)).collect();
let mut auto_migrations = Vec::new();
let mut breaking_changes = Vec::new();
// Check for removed fields
for name in from_map.keys() {
if !to_map.contains_key(name) {
breaking_changes.push(BreakingChange::FieldRemoved {
field_name: name.to_string(),
});
}
}
// Check for added fields
for (name, field) in &to_map {
if !from_map.contains_key(name) {
// New field - check if it's optional or has a default
// For now, treat all new fields as potentially requiring migration
// In a full implementation, we'd check for Option types or default annotations
if is_optional_type(&field.typ) {
auto_migrations.push(AutoMigration::AddFieldWithDefault {
field_name: name.to_string(),
default: "None".to_string(),
});
} else {
breaking_changes.push(BreakingChange::RequiredFieldAdded {
field_name: name.to_string(),
});
}
}
}
// Check for type changes in existing fields
for (name, from_field) in &from_map {
if let Some(to_field) = to_map.get(name) {
if from_field.typ != to_field.typ {
// Types differ - check if it's a compatible widening
// For now, treat all type changes as breaking
breaking_changes.push(BreakingChange::FieldTypeChanged {
field_name: name.to_string(),
old_type: format!("{:?}", from_field.typ),
new_type: format!("{:?}", to_field.typ),
});
}
}
}
if !breaking_changes.is_empty() {
Ok(Compatibility::Breaking(breaking_changes))
} else if !auto_migrations.is_empty() {
Ok(Compatibility::AutoMigrate(auto_migrations))
} else {
Ok(Compatibility::Compatible)
}
}
/// Check if a type expression represents an optional type
fn is_optional_type(typ: &crate::ast::TypeExpr) -> bool {
match typ {
crate::ast::TypeExpr::Named(ident) => ident.name == "Option",
crate::ast::TypeExpr::App(base, _) => {
if let crate::ast::TypeExpr::Named(ident) = base.as_ref() {
ident.name == "Option"
} else {
false
}
}
_ => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::{Ident, Span, TypeExpr};
fn make_field(name: &str, typ: &str) -> RecordField {
RecordField {
name: Ident::new(name, Span::default()),
typ: TypeExpr::Named(Ident::new(typ, Span::default())),
span: Span::default(),
}
}
#[test]
fn test_compatible_same_fields() {
let from = vec![make_field("name", "String"), make_field("age", "Int")];
let to = vec![make_field("name", "String"), make_field("age", "Int")];
let result = compare_record_fields(&from, &to).unwrap();
assert_eq!(result, Compatibility::Compatible);
}
#[test]
fn test_breaking_field_removed() {
let from = vec![make_field("name", "String"), make_field("age", "Int")];
let to = vec![make_field("name", "String")];
let result = compare_record_fields(&from, &to).unwrap();
assert!(matches!(result, Compatibility::Breaking(_)));
}
#[test]
fn test_breaking_field_added_required() {
let from = vec![make_field("name", "String")];
let to = vec![make_field("name", "String"), make_field("age", "Int")];
let result = compare_record_fields(&from, &to).unwrap();
assert!(matches!(result, Compatibility::Breaking(_)));
}
#[test]
fn test_breaking_field_type_changed() {
let from = vec![make_field("name", "String")];
let to = vec![make_field("name", "Int")];
let result = compare_record_fields(&from, &to).unwrap();
assert!(matches!(result, Compatibility::Breaking(_)));
}
}

1228
src/typechecker.rs Normal file

File diff suppressed because it is too large Load Diff

1083
src/types.rs Normal file

File diff suppressed because it is too large Load Diff