From 04d78a4aa3cda3aaa69a03c174a92fa2ba6c391f Mon Sep 17 00:00:00 2001 From: Aaron Councilman <aaronjc4@illinois.edu> Date: Tue, 12 Nov 2024 14:19:35 -0600 Subject: [PATCH] Juno build system, labels, and skeleton scheduler --- Cargo.lock | 398 +- Cargo.toml | 7 +- hercules_cg/src/manifest.rs | 19 + hercules_cg/src/sched_ir.rs | 2 +- hercules_opt/src/editor.rs | 14 + hercules_opt/src/gvn.rs | 21 +- hercules_opt/src/pass.rs | 6 +- hercules_rt_proc/src/lib.rs | 11 +- juno_build/Cargo.toml | 10 + juno_build/src/lib.rs | 172 + juno_frontend/Cargo.toml | 9 +- juno_frontend/examples/matmul.jn | 8 +- juno_frontend/examples/simple3.jn | 2 + juno_frontend/src/codegen.rs | 386 +- juno_frontend/src/dynconst.rs | 242 +- juno_frontend/src/env.rs | 37 +- juno_frontend/src/intrinsics.rs | 30 +- juno_frontend/src/labeled_builder.rs | 141 + juno_frontend/src/lang.l | 1 + juno_frontend/src/lang.y | 18 +- juno_frontend/src/lib.rs | 144 + juno_frontend/src/locs.rs | 36 +- juno_frontend/src/main.rs | 122 +- juno_frontend/src/parser.rs | 4 +- juno_frontend/src/semant.rs | 5663 +++++++++++++++----------- juno_frontend/src/ssa.rs | 192 +- juno_frontend/src/types.rs | 1067 +++-- juno_samples/matmul/Cargo.toml | 18 + juno_samples/matmul/build.rs | 12 + juno_samples/matmul/src/main.rs | 19 + juno_samples/matmul/src/matmul.jn | 15 + juno_samples/matmul/src/matmul.sch | 7 + juno_samples/simple3/Cargo.toml | 18 + juno_samples/simple3/build.rs | 12 + juno_samples/simple3/src/main.rs | 18 + juno_samples/simple3/src/simple3.jn | 12 + juno_samples/simple3/src/simple3.sch | 6 + juno_scheduler/Cargo.toml | 16 + juno_scheduler/build.rs | 15 + juno_scheduler/examples/matmul.sch | 7 + juno_scheduler/examples/simple3.sch | 6 + juno_scheduler/src/lang.l | 27 + juno_scheduler/src/lang.y | 94 + juno_scheduler/src/lib.rs | 339 ++ juno_scheduler/src/parser.rs | 10 + 45 files changed, 6055 insertions(+), 3358 deletions(-) create mode 100644 juno_build/Cargo.toml create mode 100644 juno_build/src/lib.rs create mode 100644 juno_frontend/src/labeled_builder.rs create mode 100644 juno_frontend/src/lib.rs create mode 100644 juno_samples/matmul/Cargo.toml create mode 100644 juno_samples/matmul/build.rs create mode 100644 juno_samples/matmul/src/main.rs create mode 100644 juno_samples/matmul/src/matmul.jn create mode 100644 juno_samples/matmul/src/matmul.sch create mode 100644 juno_samples/simple3/Cargo.toml create mode 100644 juno_samples/simple3/build.rs create mode 100644 juno_samples/simple3/src/main.rs create mode 100644 juno_samples/simple3/src/simple3.jn create mode 100644 juno_samples/simple3/src/simple3.sch create mode 100644 juno_scheduler/Cargo.toml create mode 100644 juno_scheduler/build.rs create mode 100644 juno_scheduler/examples/matmul.sch create mode 100644 juno_scheduler/examples/simple3.sch create mode 100644 juno_scheduler/src/lang.l create mode 100644 juno_scheduler/src/lang.y create mode 100644 juno_scheduler/src/lib.rs create mode 100644 juno_scheduler/src/parser.rs diff --git a/Cargo.lock b/Cargo.lock index 8b9e4c9e..47f8fce9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", @@ -28,43 +28,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.3" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "async-channel" @@ -119,9 +119,9 @@ dependencies = [ [[package]] name = "async-io" -version = "2.3.3" +version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" +checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" dependencies = [ "async-lock", "cfg-if", @@ -133,7 +133,7 @@ dependencies = [ "rustix", "slab", "tracing", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -196,9 +196,9 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "base64" @@ -217,15 +217,9 @@ dependencies = [ [[package]] name = "bitflags" -version = "1.3.2" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" dependencies = [ "serde", ] @@ -281,9 +275,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "cfgrammar" -version = "0.13.6" +version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec07af28018dd8b4b52e49eb6e57268b19dda0996d4824889eb07ee0ef67378c" +checksum = "6026d8cd82ada8bbcfe337805dd1eb6afdc9e80fa4d57e977b3a36315e0c5525" dependencies = [ "indexmap", "lazy_static", @@ -295,9 +289,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.4" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615" dependencies = [ "clap_builder", "clap_derive", @@ -305,9 +299,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.2" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b" dependencies = [ "anstream", "anstyle", @@ -317,21 +311,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.4" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] name = "clap_lex" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" [[package]] name = "cobs" @@ -341,9 +335,9 @@ checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" [[package]] name = "colorchoice" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "concurrent-queue" @@ -356,9 +350,9 @@ dependencies = [ [[package]] name = "critical-section" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" +checksum = "f64009896348fc5af4222e9cf7d7d82a95a256c634ebcf61c53e4ea461422242" [[package]] name = "crossbeam-utils" @@ -392,7 +386,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -417,6 +411,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" +[[package]] +name = "embedded-io" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" + [[package]] name = "equivalent" version = "1.0.1" @@ -430,7 +430,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -478,14 +478,14 @@ checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", - "redox_syscall", - "windows-sys", + "libredox", + "windows-sys 0.59.0", ] [[package]] @@ -502,24 +502,24 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", ] [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" @@ -577,9 +577,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" [[package]] name = "heapless" @@ -708,9 +708,9 @@ checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" [[package]] name = "indexmap" -version = "2.2.6" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", "hashbrown", @@ -718,9 +718,9 @@ dependencies = [ [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" @@ -746,6 +746,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "juno_build" +version = "0.1.0" +dependencies = [ + "hercules_rt", + "juno_frontend", + "with_builtin_macros", +] + [[package]] name = "juno_frontend" version = "0.1.0" @@ -754,6 +763,7 @@ dependencies = [ "clap", "hercules_ir", "hercules_opt", + "juno_scheduler", "lrlex", "lrpar", "num-rational", @@ -762,6 +772,36 @@ dependencies = [ "phf", ] +[[package]] +name = "juno_matmul" +version = "0.1.0" +dependencies = [ + "async-std", + "hercules_rt", + "juno_build", + "with_builtin_macros", +] + +[[package]] +name = "juno_scheduler" +version = "0.0.1" +dependencies = [ + "cfgrammar", + "hercules_ir", + "lrlex", + "lrpar", +] + +[[package]] +name = "juno_simple3" +version = "0.1.0" +dependencies = [ + "async-std", + "hercules_rt", + "juno_build", + "with_builtin_macros", +] + [[package]] name = "kv-log-macro" version = "1.0.7" @@ -773,15 +813,26 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" + +[[package]] +name = "libredox" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags", + "libc", + "redox_syscall", +] [[package]] name = "linux-raw-sys" @@ -810,9 +861,9 @@ dependencies = [ [[package]] name = "lrlex" -version = "0.13.6" +version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c65e01ebaccc77218ed6fa4f0053daa2124bce4e25a5e83aae0f7ccfc9cbfccb" +checksum = "05863fdac293d1bc74f0cd91512933a5ab67e0cb607dc78ac4984be089456b49" dependencies = [ "cfgrammar", "getopts", @@ -828,9 +879,9 @@ dependencies = [ [[package]] name = "lrpar" -version = "0.13.6" +version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a4b858180a332aec09d10479a070802b13081077eb94010744bc4e3a11d9768" +checksum = "3b1ecae55cf667db308d3555e22b20bcc28eaeca0c95a09b37171673be157c71" dependencies = [ "bincode", "cactus", @@ -850,9 +901,9 @@ dependencies = [ [[package]] name = "lrtable" -version = "0.13.6" +version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fcefc5628209d1b1f4b2cd0bcefd0e50be80bdf178e886cb07317f5ce4f2856" +checksum = "d42d2752cb50a171efadda0cb6fa97432e8bf05accfff3eed320b87e80a2f69e" dependencies = [ "cfgrammar", "fnv", @@ -874,9 +925,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "minimal-lexical" @@ -896,9 +947,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", @@ -950,15 +1001,15 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "ordered-float" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76df7075c7d4d01fdcb46c912dd17fba5b60c78ea480b475f2b6ab6f666584e" +checksum = "44d501f1a72f71d3c063a6bbc8f7271fa73aa09fe5d6283b6571e2ed176a2537" dependencies = [ "num-traits", "rand", @@ -1011,7 +1062,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1048,9 +1099,9 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.2" +version = "3.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" dependencies = [ "cfg-if", "concurrent-queue", @@ -1058,17 +1109,18 @@ dependencies = [ "pin-project-lite", "rustix", "tracing", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] name = "postcard" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" +checksum = "5f7f0a8d620d71c457dd1d47df76bb18960378da56af4527aaa10f515eee732e" dependencies = [ "cobs", - "embedded-io", + "embedded-io 0.4.0", + "embedded-io 0.6.1", "heapless", "serde", ] @@ -1081,24 +1133,27 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "proc-macro2" -version = "1.0.85" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] @@ -1143,18 +1198,18 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ - "bitflags 1.3.2", + "bitflags", ] [[package]] name = "regex" -version = "1.10.4" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" dependencies = [ "aho-corasick", "memchr", @@ -1164,9 +1219,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.6" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", @@ -1175,9 +1230,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ron" @@ -1186,16 +1241,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" dependencies = [ "base64", - "bitflags 2.5.0", + "bitflags", "serde", "serde_derive", ] [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] @@ -1206,11 +1261,11 @@ version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ - "bitflags 2.5.0", + "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -1233,22 +1288,22 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.203" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.203" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1307,9 +1362,20 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.66" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" dependencies = [ "proc-macro2", "quote", @@ -1379,21 +1445,21 @@ checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-width" -version = "0.1.12" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" @@ -1414,7 +1480,7 @@ checksum = "6b91f57fe13a38d0ce9e28a03463d8d3c2468ed03d75375110ec71d93b449a08" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1425,9 +1491,9 @@ checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" [[package]] name = "vergen" -version = "8.3.1" +version = "8.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e27d6bdd219887a9eadd19e1c34f32e47fa332301184935c6d9bca26f3cca525" +checksum = "2990d9ea5967266ea0ccf413a4aa5c42a93dbcfda9cb49a97de6931726b12566" dependencies = [ "anyhow", "rustversion", @@ -1473,7 +1539,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 2.0.79", "wasm-bindgen-shared", ] @@ -1507,7 +1573,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -1537,11 +1603,20 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -1555,51 +1630,71 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "with_builtin_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24deb3cd6e530e7617b12b1f0f1ce160a3a71d92feb351c4db5156d1d10e398a" +dependencies = [ + "with_builtin_macros-proc_macros", +] + +[[package]] +name = "with_builtin_macros-proc_macros" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "2259ae9b1285596f1ee52ce8f627013c65853d4d7f271cb10bfe2d048769804a" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] name = "wyz" @@ -1609,3 +1704,24 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" dependencies = [ "tap", ] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.79", +] diff --git a/Cargo.toml b/Cargo.toml index 322aa33d..027e9bf7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,9 +12,14 @@ members = [ "hercules_tools/hercules_driver", - "juno_frontend", + "juno_frontend", + "juno_scheduler", + "juno_build", "hercules_samples/dot", "hercules_samples/matmul", "hercules_samples/fac", + + "juno_samples/matmul", + "juno_samples/simple3", ] diff --git a/hercules_cg/src/manifest.rs b/hercules_cg/src/manifest.rs index f7161bed..ac54c4bc 100644 --- a/hercules_cg/src/manifest.rs +++ b/hercules_cg/src/manifest.rs @@ -2,6 +2,7 @@ extern crate serde; extern crate hercules_ir; +use std::collections::BTreeSet; use std::iter::once; use self::serde::Deserialize; @@ -113,6 +114,24 @@ impl Manifest { SType::Product(partition.returns.iter().map(|(ty, _)| ty.clone()).collect()) })) } + + pub fn transitive_closure_type_set(type_set: BTreeSet<SType>) -> BTreeSet<SType> { + let mut closure = BTreeSet::new(); + let mut workset: BTreeSet<&SType> = type_set.iter().collect(); + + while let Some(ty) = workset.pop_last() { + match ty { + SType::Product(fields) => workset.extend(fields), + SType::ArrayRef(elem) => { + workset.insert(elem); + } + _ => {} + } + closure.insert(ty.clone()); + } + + closure + } } impl PartitionManifest { diff --git a/hercules_cg/src/sched_ir.rs b/hercules_cg/src/sched_ir.rs index 02563834..3ceee621 100644 --- a/hercules_cg/src/sched_ir.rs +++ b/hercules_cg/src/sched_ir.rs @@ -175,7 +175,7 @@ pub fn sched_make_schedule(schedule: &Schedule) -> SSchedule { * value product types, since the layout of these types may be platform * dependent. */ -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum SType { Boolean, Integer8, diff --git a/hercules_opt/src/editor.rs b/hercules_opt/src/editor.rs index b9044542..96e6edf0 100644 --- a/hercules_opt/src/editor.rs +++ b/hercules_opt/src/editor.rs @@ -222,6 +222,20 @@ impl<'a: 'b, 'b> FunctionEditor<'a> { self.mut_def_use[id.idx()].iter().map(|x| *x) } + pub fn get_type(&self, id: TypeID) -> Ref<'_, Type> { + Ref::map(self.types.borrow(), |types| &types[id.idx()]) + } + + pub fn get_constant(&self, id: ConstantID) -> Ref<'_, Constant> { + Ref::map(self.constants.borrow(), |constants| &constants[id.idx()]) + } + + pub fn get_dynamic_constant(&self, id: DynamicConstantID) -> Ref<'_, DynamicConstant> { + Ref::map(self.dynamic_constants.borrow(), |dynamic_constants| { + &dynamic_constants[id.idx()] + }) + } + pub fn is_mutable(&self, id: NodeID) -> bool { self.mutable_nodes[id.idx()] } diff --git a/hercules_opt/src/gvn.rs b/hercules_opt/src/gvn.rs index e3b2fa60..032081a9 100644 --- a/hercules_opt/src/gvn.rs +++ b/hercules_opt/src/gvn.rs @@ -11,14 +11,14 @@ use crate::*; * fairly simple compared to in a normal CFG. Needs access to constants for * identity function simplification. */ -pub fn gvn(editor: &mut FunctionEditor, constants: &Vec<Constant>) { +pub fn gvn(editor: &mut FunctionEditor) { // Create worklist (starts as all nodes) and value number hashmap. let mut worklist: Vec<NodeID> = (0..editor.func().nodes.len()).map(NodeID::new).collect(); let mut value_numbers: HashMap<Node, NodeID> = HashMap::new(); while let Some(work) = worklist.pop() { // First, simplify the work node by unwrapping identity functions. - let value = crawl_identities(work, editor.func(), constants); + let value = crawl_identities(work, editor); // Next, check if there is a value number for this simplified value yet. if let Some(number) = value_numbers.get(&editor.func().nodes[value.idx()]) { @@ -59,18 +59,19 @@ pub fn gvn(editor: &mut FunctionEditor, constants: &Vec<Constant>) { /* * Helper function for unwrapping identity functions. */ -fn crawl_identities(mut work: NodeID, function: &Function, constants: &Vec<Constant>) -> NodeID { +fn crawl_identities(mut work: NodeID, editor: &FunctionEditor) -> NodeID { loop { + let func = editor.func(); // TODO: replace with API for saner pattern matching on IR. Also, - // actually add the rest of the identity functions. + // actually add the rest of the identity funcs. if let Node::Binary { left, right, op: BinaryOperator::Add, - } = function.nodes[work.idx()] + } = func.nodes[work.idx()] { - if let Node::Constant { id } = function.nodes[left.idx()] { - if constants[id.idx()].is_zero() { + if let Node::Constant { id } = func.nodes[left.idx()] { + if editor.get_constant(id).is_zero() { work = right; continue; } @@ -81,10 +82,10 @@ fn crawl_identities(mut work: NodeID, function: &Function, constants: &Vec<Const left, right, op: BinaryOperator::Add, - } = function.nodes[work.idx()] + } = func.nodes[work.idx()] { - if let Node::Constant { id } = function.nodes[right.idx()] { - if constants[id.idx()].is_zero() { + if let Node::Constant { id } = func.nodes[right.idx()] { + if editor.get_constant(id).is_zero() { work = left; continue; } diff --git a/hercules_opt/src/pass.rs b/hercules_opt/src/pass.rs index bf4b7898..5db24e30 100644 --- a/hercules_opt/src/pass.rs +++ b/hercules_opt/src/pass.rs @@ -280,6 +280,10 @@ impl PassManager { } } + pub fn set_plans(&mut self, plans: Vec<Plan>) { + self.plans = Some(plans); + } + pub fn make_plans(&mut self) { if self.plans.is_none() { self.make_def_uses(); @@ -392,7 +396,7 @@ impl PassManager { &types_ref, &def_uses[idx], ); - gvn(&mut editor, &self.module.constants); + gvn(&mut editor); self.module.constants = constants_ref.take(); self.module.dynamic_constants = dynamic_constants_ref.take(); diff --git a/hercules_rt_proc/src/lib.rs b/hercules_rt_proc/src/lib.rs index f3df33aa..1aaa640f 100644 --- a/hercules_rt_proc/src/lib.rs +++ b/hercules_rt_proc/src/lib.rs @@ -7,7 +7,7 @@ extern crate hercules_opt; extern crate postcard; extern crate proc_macro; -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap}; use std::ffi::OsStr; use std::fmt::Write; use std::fs::File; @@ -136,11 +136,12 @@ fn codegen( // Emit the product types used in this module. We can't just emit product // types, since we need #[repr(C)] to interact with LLVM. - let all_stypes = manifests + let visible_stypes = manifests .into_iter() .map(|(_, manifest)| manifest.all_visible_types()) .flatten() - .collect::<HashSet<SType>>(); + .collect::<BTreeSet<SType>>(); + let all_stypes = Manifest::transitive_closure_type_set(visible_stypes); for stype in all_stypes.iter() { if let Some(fields) = stype.try_product() { write!( @@ -230,8 +231,8 @@ fn codegen( // are declared as MaybeUninit, since they get assigned after running a // partition. MaybeUninits should always be defined before assume_init() // is called on them, assuming a valid partitioning. - let mut data_inputs = HashSet::new(); - let mut data_outputs = HashSet::new(); + let mut data_inputs = BTreeSet::new(); + let mut data_outputs = BTreeSet::new(); for partition in manifest.partitions.iter() { data_inputs.extend(partition.data_inputs()); data_outputs.extend(partition.data_outputs()); diff --git a/juno_build/Cargo.toml b/juno_build/Cargo.toml new file mode 100644 index 00000000..2e35059e --- /dev/null +++ b/juno_build/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "juno_build" +version = "0.1.0" +authors = ["Aaron Councilman <aaronjc4@illinois.edu>"] +edition = "2021" + +[dependencies] +juno_frontend = { path = "../juno_frontend" } +hercules_rt = { path = "../hercules_rt" } +with_builtin_macros = "0.1.0" diff --git a/juno_build/src/lib.rs b/juno_build/src/lib.rs new file mode 100644 index 00000000..c40d8de1 --- /dev/null +++ b/juno_build/src/lib.rs @@ -0,0 +1,172 @@ +extern crate hercules_rt; +use juno_compiler::*; + +use std::env::{current_dir, var}; +use std::fmt::Write; +use std::fs::create_dir_all; +use std::path::{Path, PathBuf}; + +use with_builtin_macros::with_builtin; + +// JunoCompiler is used to compile juno files into a library and manifest file appropriately to +// import the definitions into a rust project via the juno! macro defined below +pub struct JunoCompiler { + src_path: Option<PathBuf>, + out_path: Option<PathBuf>, + verify: JunoVerify, + x_dot: bool, + schedule: JunoSchedule, +} + +impl JunoCompiler { + pub fn new() -> Self { + JunoCompiler { + src_path: None, + out_path: None, + verify: JunoVerify::None, + x_dot: false, + schedule: JunoSchedule::None, + } + } + + // Sets the name of the Juno file, this file should be contained in the src/ + // file of the package that the JunoCompiler is used in the build script of + pub fn file_in_src<P>(mut self, file: P) -> Result<Self, String> + where + P: AsRef<Path>, + { + // Since the file will be in the src directory, the path must be relative + if !file.as_ref().is_relative() { + return Err(format!( + "Source path '{}' must be a relative path.", + file.as_ref().to_str().unwrap_or("<invalid UTF-8>") + )); + } + + let Ok(mut path) = current_dir() else { + return Err("Failed to retrieve current directory.".to_string()); + }; + path.push("src"); + path.push(file.as_ref()); + self.src_path = Some(path); + + // We also set the output file in this process, under the OUT_DIR cargo provides + let mut out = PathBuf::new(); + out.push(var("OUT_DIR").unwrap()); + out.push(file.as_ref().parent().unwrap().to_str().unwrap()); + let Ok(()) = create_dir_all(&out) else { + return Err("Failed to create output directory.".to_string()); + }; + self.out_path = Some(out); + + // Tell cargo to rerun if the Juno file changes + println!( + "cargo::rerun-if-changed=src/{}", + file.as_ref().to_str().unwrap() + ); + // Tell cargo to include the output directory in its linker search + // (and to link the resulting library) + println!( + "cargo::rustc-link-search=native={}", + var("OUT_DIR").unwrap() + ); + println!( + "cargo::rustc-link-lib=static={}", + file.as_ref().file_stem().unwrap().to_str().unwrap() + ); + + Ok(self) + } + + pub fn verify(mut self, enabled: bool) -> Self { + if enabled && !self.verify.verify() { + self.verify = JunoVerify::JunoOpts; + } else if !enabled && self.verify.verify() { + self.verify = JunoVerify::None; + } + self + } + + pub fn verify_all(mut self, enabled: bool) -> Self { + if enabled { + self.verify = JunoVerify::AllPasses; + } else if !enabled && self.verify.verify_all() { + self.verify = JunoVerify::JunoOpts; + } + self + } + + pub fn x_dot(mut self, enabled: bool) -> Self { + self.x_dot = enabled; + self + } + + // Sets the schedule to be the default schedule + pub fn default_schedule(mut self) -> Self { + self.schedule = JunoSchedule::DefaultSchedule; + self + } + + // Set the schedule as a schedule file in the src directory + pub fn schedule_in_src<P>(mut self, file: P) -> Result<Self, String> + where + P: AsRef<Path>, + { + // Since the file will be in the src directory, the path must be relative + if !file.as_ref().is_relative() { + return Err(format!( + "Schedule path '{}' must be a relative path.", + file.as_ref().to_str().unwrap_or("<invalid UTF-8>") + )); + } + + let Ok(mut path) = current_dir() else { + return Err("Failed to retrieve current directory.".to_string()); + }; + path.push("src"); + path.push(file.as_ref()); + self.schedule = JunoSchedule::Schedule(path.to_str().unwrap().to_string()); + + // Tell cargo to rerun if the schedule changes + println!( + "cargo::rerun-if-changed=src/{}", + file.as_ref().to_str().unwrap() + ); + + Ok(self) + } + + // Builds the juno file into a libary and a manifest file. + pub fn build(self) -> Result<(), String> { + let JunoCompiler { + src_path: Some(src_path), + out_path: Some(out_path), + verify, + x_dot, + schedule, + } = self + else { + return Err("No source file specified.".to_string()); + }; + + let src_file = src_path.to_str().unwrap().to_string(); + let out_dir = out_path.to_str().unwrap().to_string(); + + match compile(src_file, verify, x_dot, schedule, out_dir) { + Ok(()) => Ok(()), + Err(errs) => Err(format!("{}", errs)), + } + } +} + +// The juno!(filename) macro impots the definitions from the manifest which is +// compiled using the JunoCompiler object (in a build.rs file) +#[macro_export] +macro_rules! juno { + ($path:expr) => { + with_builtin_macros::with_builtin!( + let $hman = concat!(env!("OUT_DIR"), "/", $path, ".hman") in { + hercules_rt::use_hman!($hman); + }); + }; +} diff --git a/juno_frontend/Cargo.toml b/juno_frontend/Cargo.toml index 31d938ed..39e18baa 100644 --- a/juno_frontend/Cargo.toml +++ b/juno_frontend/Cargo.toml @@ -8,19 +8,24 @@ edition = "2021" name = "juno" path = "src/main.rs" +[lib] +name = "juno_compiler" +path = "src/lib.rs" + [build-dependencies] cfgrammar = "0.13" lrlex = "0.13" lrpar = "0.13" [dependencies] -clap = { version = "*", features = ["derive"] } cfgrammar = "0.13" +clap = { version = "*", features = ["derive"] } lrlex = "0.13" lrpar = "0.13" -ordered-float = "*" num-rational = "*" num-traits = "*" +ordered-float = "*" phf = { version = "0.11", features = ["macros"] } hercules_ir = { path = "../hercules_ir" } hercules_opt = { path = "../hercules_opt" } +juno_scheduler = { path = "../juno_scheduler" } diff --git a/juno_frontend/examples/matmul.jn b/juno_frontend/examples/matmul.jn index 5c7b28a2..ca9778b1 100644 --- a/juno_frontend/examples/matmul.jn +++ b/juno_frontend/examples/matmul.jn @@ -1,13 +1,13 @@ fn matmul<n : usize, m : usize, l : usize>(a : f32[n, m], b : f32[m, l]) -> f32[n, l] { let res : f32[n, l]; - for i = 0 to n { - for j = 0 to l { - for k = 0 to m { + @outer for i = 0 to n { + @middle for j = 0 to l { + @inner for k = 0 to m { res[i, j] += a[i, k] * b[k, j]; } } } - return res; + @exit return res; } diff --git a/juno_frontend/examples/simple3.jn b/juno_frontend/examples/simple3.jn index 028f0d55..c6525197 100644 --- a/juno_frontend/examples/simple3.jn +++ b/juno_frontend/examples/simple3.jn @@ -1,9 +1,11 @@ fn simple3<n : usize>(a : i32[n], b : i32[n]) -> i32 { let res : i32 = 0; + @loop for i = 0 to n { res += a[i] * b[i]; } + @exit return res; } diff --git a/juno_frontend/src/codegen.rs b/juno_frontend/src/codegen.rs index a76d4773..c99978c8 100644 --- a/juno_frontend/src/codegen.rs +++ b/juno_frontend/src/codegen.rs @@ -1,11 +1,10 @@ -extern crate hercules_ir; +use std::collections::{HashMap, HashSet, VecDeque}; -use std::collections::{HashMap, VecDeque}; - -use self::hercules_ir::build::*; -use self::hercules_ir::ir; -use self::hercules_ir::ir::*; +use hercules_ir::ir; +use hercules_ir::ir::*; +use juno_scheduler::{FunctionMap, LabeledStructure}; +use crate::labeled_builder::LabeledBuilder; use crate::semant; use crate::semant::{BinaryOp, Expr, Function, Literal, Prg, Stmt, UnaryOp}; use crate::ssa::SSA; @@ -14,12 +13,34 @@ use crate::types::{Either, Primitive, TypeSolver, TypeSolverInst}; // Loop info is a stack of the loop levels, recording the latch and exit block of each type LoopInfo = Vec<(NodeID, NodeID)>; -pub fn codegen_program(prg: Prg) -> Module { +fn merge_function_maps( + mut functions: HashMap<(usize, Vec<TypeID>), FunctionID>, + funcs: &Vec<Function>, + mut tree: HashMap<FunctionID, Vec<(LabeledStructure, HashSet<usize>)>>, + mut labels: HashMap<FunctionID, HashMap<NodeID, usize>>, +) -> FunctionMap { + let mut res = HashMap::new(); + for ((func_num, type_vars), func_id) in functions.drain() { + let func_labels = tree.remove(&func_id).unwrap(); + let node_labels = labels.remove(&func_id).unwrap(); + let func_info = res.entry(func_num).or_insert(( + funcs[func_num].label_map.clone(), + funcs[func_num].name.clone(), + vec![], + )); + func_info + .2 + .push((type_vars, func_id, func_labels, node_labels)); + } + res +} + +pub fn codegen_program(prg: Prg) -> (Module, FunctionMap) { CodeGenerator::build(prg) } struct CodeGenerator<'a> { - builder: Builder<'a>, + builder: LabeledBuilder<'a>, types: &'a TypeSolver, funcs: &'a Vec<Function>, uid: usize, @@ -33,7 +54,7 @@ struct CodeGenerator<'a> { } impl CodeGenerator<'_> { - fn build((types, funcs): Prg) -> Module { + fn build((types, funcs): Prg) -> (Module, FunctionMap) { // Identify the functions (by index) which have no type arguments, these are the ones we // ask for code to be generated for let func_idx = @@ -43,7 +64,7 @@ impl CodeGenerator<'_> { .filter_map(|(i, f)| if f.num_type_args == 0 { Some(i) } else { None }); let mut codegen = CodeGenerator { - builder: Builder::create(), + builder: LabeledBuilder::create(), types: &types, funcs: &funcs, uid: 0, @@ -59,13 +80,26 @@ impl CodeGenerator<'_> { codegen.finish() } - fn finish(mut self) -> Module { + fn finish(mut self) -> (Module, FunctionMap) { while !self.worklist.is_empty() { let (idx, mut type_inst, func, entry) = self.worklist.pop_front().unwrap(); - self.codegen_function(&self.funcs[idx], &mut type_inst, func, entry); + self.builder.set_function(func); + self.codegen_function(&self.funcs[idx], &mut type_inst, entry); } - self.builder.finish() + let CodeGenerator { + builder, + types: _, + funcs, + uid: _, + functions, + worklist: _, + } = self; + let (module, label_tree, label_map) = builder.finish(); + ( + module, + merge_function_maps(functions, funcs, label_tree, label_map), + ) } fn get_function(&mut self, func_idx: usize, ty_args: Vec<TypeID>) -> FunctionID { @@ -80,19 +114,32 @@ impl CodeGenerator<'_> { // TODO: Ideally we would write out the type arguments, but now that they're // lowered to TypeID we can't do that as far as I can tell - let name = format!("{}_{}", func.name, self.uid); + let name = + // For entry functions, we preserve the name, which is safe + // since they have no type variables and this is necessary + // to ensure easy ingestion into Rust + if func.entry { func.name.clone() } + else { format!("_{}_{}", self.uid, func.name) }; self.uid += 1; let mut param_types = vec![]; for (_, ty) in func.arguments.iter() { - param_types.push(solver_inst.lower_type(&mut self.builder, *ty)); + // Because we're building types, we can extract the builder + param_types.push(solver_inst.lower_type(&mut self.builder.builder, *ty)); } - let return_type = solver_inst.lower_type(&mut self.builder, func.return_type); + let return_type = + solver_inst.lower_type(&mut self.builder.builder, func.return_type); let (func_id, entry) = self .builder - .create_function(&name, param_types, return_type, func.num_dyn_consts as u32) + .create_function( + &name, + param_types, + return_type, + func.num_dyn_consts as u32, + func.num_labels, + ) .unwrap(); self.functions.insert((func_idx, ty_args), func_id); @@ -103,27 +150,20 @@ impl CodeGenerator<'_> { } } - fn codegen_function( - &mut self, - func: &Function, - types: &mut TypeSolverInst, - func_id: FunctionID, - entry: NodeID, - ) { + fn codegen_function(&mut self, func: &Function, types: &mut TypeSolverInst, entry: NodeID) { // Setup the SSA construction data structure - let mut ssa = SSA::new(func_id, entry); + let mut ssa = SSA::new(entry); // Create nodes for the arguments for (idx, (var, _)) in func.arguments.iter().enumerate() { - let mut node_builder = self.builder.allocate_node(func_id); + let mut node_builder = self.builder.allocate_node(); ssa.write_variable(*var, entry, node_builder.id()); node_builder.build_parameter(idx); - let _ = self.builder.add_node(node_builder); + self.builder.add_node(node_builder); } // Generate code for the body - let None = self.codegen_stmt(&func.body, types, &mut ssa, func_id, entry, &mut vec![]) - else { + let (_, None) = self.codegen_stmt(&func.body, types, &mut ssa, entry, &mut vec![]) else { panic!("Generated code for a function missing a return") }; } @@ -133,54 +173,54 @@ impl CodeGenerator<'_> { stmt: &Stmt, types: &mut TypeSolverInst, ssa: &mut SSA, - func_id: FunctionID, cur_block: NodeID, loops: &mut LoopInfo, - ) -> Option<NodeID> { + ) -> (LabeledStructure, Option<NodeID>) { match stmt { Stmt::AssignStmt { var, val } => { - let (val, block) = self.codegen_expr(val, types, ssa, func_id, cur_block); + let (val, block) = self.codegen_expr(val, types, ssa, cur_block); ssa.write_variable(*var, block, val); - Some(block) + (LabeledStructure::Expression(val), Some(block)) } Stmt::IfStmt { cond, thn, els } => { - let (val_cond, block_cond) = - self.codegen_expr(cond, types, ssa, func_id, cur_block); + let (val_cond, block_cond) = self.codegen_expr(cond, types, ssa, cur_block); let (mut if_node, block_then, block_else) = ssa.create_cond(&mut self.builder, block_cond); - let then_end = self.codegen_stmt(thn, types, ssa, func_id, block_then, loops); + let (_, then_end) = self.codegen_stmt(thn, types, ssa, block_then, loops); let else_end = match els { None => Some(block_else), - Some(els_stmt) => { - self.codegen_stmt(els_stmt, types, ssa, func_id, block_else, loops) - } + Some(els_stmt) => self.codegen_stmt(els_stmt, types, ssa, block_else, loops).1, }; + let if_id = if_node.id(); if_node.build_if(block_cond, val_cond); - let _ = self.builder.add_node(if_node); - - match (then_end, else_end) { - (None, els) => els, - (thn, None) => thn, - (Some(then_term), Some(else_term)) => { - let block_join = ssa.create_block(&mut self.builder); - ssa.add_pred(block_join, then_term); - ssa.add_pred(block_join, else_term); - ssa.seal_block(block_join, &mut self.builder); - Some(block_join) - } - } + self.builder.add_node(if_node); + + ( + LabeledStructure::Branch(if_id), + match (then_end, else_end) { + (None, els) => els, + (thn, None) => thn, + (Some(then_term), Some(else_term)) => { + let block_join = ssa.create_block(&mut self.builder); + ssa.add_pred(block_join, then_term); + ssa.add_pred(block_join, else_term); + ssa.seal_block(block_join, &mut self.builder); + Some(block_join) + } + }, + ) } Stmt::LoopStmt { cond, update, body } => { // We generate guarded loops, so the first step is to create // a conditional branch, branching on the condition - let (val_guard, block_guard) = - self.codegen_expr(cond, types, ssa, func_id, cur_block); + let (val_guard, block_guard) = self.codegen_expr(cond, types, ssa, cur_block); let (mut if_node, true_guard, false_proj) = ssa.create_cond(&mut self.builder, block_guard); + if_node.build_if(block_guard, val_guard); - let _ = self.builder.add_node(if_node); + self.builder.add_node(if_node); // We then create a region for the exit (since there may be breaks) let block_exit = ssa.create_block(&mut self.builder); @@ -194,16 +234,16 @@ impl CodeGenerator<'_> { let block_updated = match update { None => block_latch, Some(stmt) => self - .codegen_stmt(stmt, types, ssa, func_id, block_latch, loops) + .codegen_stmt(stmt, types, ssa, block_latch, loops) + .1 .expect("Loop update should return control"), }; - let (val_cond, block_cond) = - self.codegen_expr(cond, types, ssa, func_id, block_updated); + let (val_cond, block_cond) = self.codegen_expr(cond, types, ssa, block_updated); let (mut if_node, true_proj, false_proj) = ssa.create_cond(&mut self.builder, block_cond); if_node.build_if(block_cond, val_cond); - let _ = self.builder.add_node(if_node); + self.builder.add_node(if_node); // Add the false projection from the latch as a predecessor of the exit ssa.add_pred(block_exit, false_proj); @@ -217,7 +257,7 @@ impl CodeGenerator<'_> { // Generate code for the body loops.push((block_latch, block_exit)); - let body_res = self.codegen_stmt(body, types, ssa, func_id, body_block, loops); + let (_, body_res) = self.codegen_stmt(body, types, ssa, body_block, loops); loops.pop(); // If the body of the loop can reach some block, we add that block as a predecessor @@ -235,39 +275,51 @@ impl CodeGenerator<'_> { // It is always assumed a loop may be skipped and so control can reach after the // loop - Some(block_exit) + (LabeledStructure::Loop(body_block), Some(block_exit)) } Stmt::ReturnStmt { expr } => { - let (val_ret, block_ret) = self.codegen_expr(expr, types, ssa, func_id, cur_block); - let mut return_node = self.builder.allocate_node(func_id); + let (val_ret, block_ret) = self.codegen_expr(expr, types, ssa, cur_block); + let mut return_node = self.builder.allocate_node(); return_node.build_return(block_ret, val_ret); - let _ = self.builder.add_node(return_node); - None + self.builder.add_node(return_node); + (LabeledStructure::Expression(val_ret), None) } Stmt::BreakStmt {} => { let last_loop = loops.len() - 1; let (_latch, exit) = loops[last_loop]; ssa.add_pred(exit, cur_block); // The block that contains this break now leads to // the exit - None + (LabeledStructure::Nothing(), None) } Stmt::ContinueStmt {} => { let last_loop = loops.len() - 1; let (latch, _exit) = loops[last_loop]; ssa.add_pred(latch, cur_block); // The block that contains this continue now leads // to the latch - None + (LabeledStructure::Nothing(), None) } - Stmt::BlockStmt { body } => { + Stmt::BlockStmt { body, label_last } => { + let mut label = None; let mut block = Some(cur_block); for stmt in body.iter() { - block = self.codegen_stmt(stmt, types, ssa, func_id, block.unwrap(), loops); + let (new_label, new_block) = + self.codegen_stmt(stmt, types, ssa, block.unwrap(), loops); + block = new_block; + if label.is_none() || *label_last { + label = Some(new_label); + } } - block + (label.unwrap_or(LabeledStructure::Nothing()), block) } Stmt::ExprStmt { expr } => { - let (_val, block) = self.codegen_expr(expr, types, ssa, func_id, cur_block); - Some(block) + let (val, block) = self.codegen_expr(expr, types, ssa, cur_block); + (LabeledStructure::Expression(val), Some(block)) + } + Stmt::LabeledStmt { label, stmt } => { + self.builder.push_label(*label); + let (labeled, res) = self.codegen_stmt(&*stmt, types, ssa, cur_block, loops); + self.builder.pop_label(labeled); + (labeled, res) } } } @@ -280,7 +332,6 @@ impl CodeGenerator<'_> { expr: &Expr, types: &mut TypeSolverInst, ssa: &mut SSA, - func_id: FunctionID, cur_block: NodeID, ) -> (NodeID, NodeID) { match expr { @@ -289,79 +340,78 @@ impl CodeGenerator<'_> { cur_block, ), Expr::DynConst { val, .. } => { - let mut node = self.builder.allocate_node(func_id); + let mut node = self.builder.allocate_node(); let node_id = node.id(); - let dyn_const = val.build(&mut self.builder); + let dyn_const = val.build(&mut self.builder.builder); node.build_dynamicconstant(dyn_const); - let _ = self.builder.add_node(node); + self.builder.add_node(node); (node_id, cur_block) } Expr::Read { index, val, .. } => { - let (collection, block) = self.codegen_expr(val, types, ssa, func_id, cur_block); - let (indices, end_block) = self.codegen_indices(index, types, ssa, func_id, block); + let (collection, block) = self.codegen_expr(val, types, ssa, cur_block); + let (indices, end_block) = self.codegen_indices(index, types, ssa, block); - let mut node = self.builder.allocate_node(func_id); + let mut node = self.builder.allocate_node(); let node_id = node.id(); node.build_read(collection, indices.into()); - let _ = self.builder.add_node(node); + self.builder.add_node(node); (node_id, end_block) } Expr::Write { index, val, rep, .. } => { - let (collection, block) = self.codegen_expr(val, types, ssa, func_id, cur_block); - let (indices, idx_block) = self.codegen_indices(index, types, ssa, func_id, block); - let (replace, end_block) = self.codegen_expr(rep, types, ssa, func_id, idx_block); + let (collection, block) = self.codegen_expr(val, types, ssa, cur_block); + let (indices, idx_block) = self.codegen_indices(index, types, ssa, block); + let (replace, end_block) = self.codegen_expr(rep, types, ssa, idx_block); - let mut node = self.builder.allocate_node(func_id); + let mut node = self.builder.allocate_node(); let node_id = node.id(); node.build_write(collection, replace, indices.into()); - let _ = self.builder.add_node(node); + self.builder.add_node(node); (node_id, end_block) } Expr::Tuple { vals, typ } => { let mut block = cur_block; let mut values = vec![]; for expr in vals { - let (val_expr, block_expr) = - self.codegen_expr(expr, types, ssa, func_id, block); + let (val_expr, block_expr) = self.codegen_expr(expr, types, ssa, block); block = block_expr; values.push(val_expr); } - let tuple_type = types.lower_type(&mut self.builder, *typ); - (self.build_tuple(values, tuple_type, func_id), block) + let tuple_type = types.lower_type(&mut self.builder.builder, *typ); + (self.build_tuple(values, tuple_type), block) } Expr::Union { tag, val, typ } => { - let (value, block) = self.codegen_expr(val, types, ssa, func_id, cur_block); + let (value, block) = self.codegen_expr(val, types, ssa, cur_block); - let union_type = types.lower_type(&mut self.builder, *typ); - (self.build_union(*tag, value, union_type, func_id), block) + let union_type = types.lower_type(&mut self.builder.builder, *typ); + (self.build_union(*tag, value, union_type), block) } Expr::Constant { val, .. } => { let const_id = self.build_constant(val, types); - let mut val = self.builder.allocate_node(func_id); + let mut val = self.builder.allocate_node(); let val_node = val.id(); val.build_constant(const_id); - let _ = self.builder.add_node(val); + self.builder.add_node(val); (val_node, cur_block) } Expr::Zero { typ } => { - let type_id = types.lower_type(&mut self.builder, *typ); - let zero_const = self.builder.create_constant_zero(type_id); - let mut zero = self.builder.allocate_node(func_id); + let type_id = types.lower_type(&mut self.builder.builder, *typ); + let zero_const = self.builder.builder.create_constant_zero(type_id); + let mut zero = self.builder.allocate_node(); let zero_val = zero.id(); zero.build_constant(zero_const); - let _ = self.builder.add_node(zero); + self.builder.add_node(zero); (zero_val, cur_block) } Expr::UnaryExp { op, expr, .. } => { - let (val, block) = self.codegen_expr(expr, types, ssa, func_id, cur_block); + let (val, block) = self.codegen_expr(expr, types, ssa, cur_block); - let mut expr = self.builder.allocate_node(func_id); + let mut expr = self.builder.allocate_node(); let expr_id = expr.id(); expr.build_unary( val, @@ -370,15 +420,15 @@ impl CodeGenerator<'_> { UnaryOp::BitwiseNot => UnaryOperator::Not, }, ); - let _ = self.builder.add_node(expr); + self.builder.add_node(expr); (expr_id, block) } Expr::BinaryExp { op, lhs, rhs, .. } => { - let (val_lhs, block_lhs) = self.codegen_expr(lhs, types, ssa, func_id, cur_block); - let (val_rhs, block_rhs) = self.codegen_expr(rhs, types, ssa, func_id, block_lhs); + let (val_lhs, block_lhs) = self.codegen_expr(lhs, types, ssa, cur_block); + let (val_rhs, block_rhs) = self.codegen_expr(rhs, types, ssa, block_lhs); - let mut expr = self.builder.allocate_node(func_id); + let mut expr = self.builder.allocate_node(); let expr_id = expr.id(); expr.build_binary( val_lhs, @@ -407,32 +457,29 @@ impl CodeGenerator<'_> { (expr_id, block_rhs) } Expr::CastExpr { expr, typ } => { - let type_id = types.lower_type(&mut self.builder, *typ); - let (val, block) = self.codegen_expr(expr, types, ssa, func_id, cur_block); + let type_id = types.lower_type(&mut self.builder.builder, *typ); + let (val, block) = self.codegen_expr(expr, types, ssa, cur_block); - let mut expr = self.builder.allocate_node(func_id); + let mut expr = self.builder.allocate_node(); let expr_id = expr.id(); expr.build_unary(val, UnaryOperator::Cast(type_id)); - let _ = self.builder.add_node(expr); + self.builder.add_node(expr); (expr_id, block) } Expr::CondExpr { cond, thn, els, .. } => { // Code-gen the condition - let (val_cond, block_cond) = - self.codegen_expr(cond, types, ssa, func_id, cur_block); + let (val_cond, block_cond) = self.codegen_expr(cond, types, ssa, cur_block); // Create the if let (mut if_builder, then_block, else_block) = ssa.create_cond(&mut self.builder, block_cond); if_builder.build_if(block_cond, val_cond); - let _ = self.builder.add_node(if_builder); + self.builder.add_node(if_builder); // Code-gen the branches - let (then_val, block_then) = - self.codegen_expr(thn, types, ssa, func_id, then_block); - let (else_val, block_else) = - self.codegen_expr(els, types, ssa, func_id, else_block); + let (then_val, block_then) = self.codegen_expr(thn, types, ssa, then_block); + let (else_val, block_else) = self.codegen_expr(els, types, ssa, else_block); // Create the join in the control-flow let join = ssa.create_block(&mut self.builder); @@ -441,10 +488,10 @@ impl CodeGenerator<'_> { ssa.seal_block(join, &mut self.builder); // Create a phi that joins the two branches - let mut phi = self.builder.allocate_node(func_id); + let mut phi = self.builder.allocate_node(); let phi_id = phi.id(); phi.build_phi(join, vec![then_val, else_val].into()); - let _ = self.builder.add_node(phi); + self.builder.add_node(phi); (phi_id, join) } @@ -458,7 +505,7 @@ impl CodeGenerator<'_> { // We start by lowering the type arguments to TypeIDs let mut type_params = vec![]; for typ in ty_args { - type_params.push(types.lower_type(&mut self.builder, *typ)); + type_params.push(types.lower_type(&mut self.builder.builder, *typ)); } // With the type arguments, we can now lookup the function @@ -466,7 +513,7 @@ impl CodeGenerator<'_> { // We then build the dynamic constants let dynamic_constants = - TypeSolverInst::build_dyn_consts(&mut self.builder, dyn_consts); + TypeSolverInst::build_dyn_consts(&mut self.builder.builder, dyn_consts); // Code gen for each argument in order // For inouts, this becomes an ssa.read_variable @@ -477,8 +524,7 @@ impl CodeGenerator<'_> { for arg in args { match arg { Either::Left(exp) => { - let (val, new_block) = - self.codegen_expr(exp, types, ssa, func_id, block); + let (val, new_block) = self.codegen_expr(exp, types, ssa, block); block = new_block; arg_vals.push(val); } @@ -490,15 +536,15 @@ impl CodeGenerator<'_> { } // Create the call expression, a region specifically for it, and a region after that. - let mut call_region = ssa.create_block(&mut self.builder); + let call_region = ssa.create_block(&mut self.builder); ssa.add_pred(call_region, block); ssa.seal_block(call_region, &mut self.builder); - let mut after_call_region = ssa.create_block(&mut self.builder); + let after_call_region = ssa.create_block(&mut self.builder); ssa.add_pred(after_call_region, call_region); ssa.seal_block(after_call_region, &mut self.builder); - let mut call = self.builder.allocate_node(func_id); + let mut call = self.builder.allocate_node(); let call_id = call.id(); call.build_call( @@ -512,23 +558,23 @@ impl CodeGenerator<'_> { block = after_call_region; // Read each of the "inout values" and perform the SSA update - let inouts_index = self.builder.create_field_index(1); + let inouts_index = self.builder.builder.create_field_index(1); for (idx, var) in inouts.into_iter().enumerate() { - let index = self.builder.create_field_index(idx); - let mut read = self.builder.allocate_node(func_id); + let index = self.builder.builder.create_field_index(idx); + let mut read = self.builder.allocate_node(); let read_id = read.id(); read.build_read(call_id, vec![inouts_index.clone(), index].into()); - let _ = self.builder.add_node(read); + self.builder.add_node(read); ssa.write_variable(var, block, read_id); } // Read the "actual return" value and return it - let value_index = self.builder.create_field_index(0); - let mut read = self.builder.allocate_node(func_id); + let value_index = self.builder.builder.create_field_index(0); + let mut read = self.builder.allocate_node(); let read_id = read.id(); read.build_read(call_id, vec![value_index].into()); - let _ = self.builder.add_node(read); + self.builder.add_node(read); (read_id, block) } @@ -542,13 +588,13 @@ impl CodeGenerator<'_> { let mut block = cur_block; let mut arg_vals = vec![]; for arg in args { - let (val, new_block) = self.codegen_expr(arg, types, ssa, func_id, block); + let (val, new_block) = self.codegen_expr(arg, types, ssa, block); block = new_block; arg_vals.push(val); } // Create the intrinsic call expression - let mut call = self.builder.allocate_node(func_id); + let mut call = self.builder.allocate_node(); let call_id = call.id(); call.build_intrinsic(*id, arg_vals.into()); let _ = self.builder.add_node(call); @@ -565,7 +611,6 @@ impl CodeGenerator<'_> { index: &Vec<semant::Index>, types: &mut TypeSolverInst, ssa: &mut SSA, - func_id: FunctionID, cur_block: NodeID, ) -> (Vec<ir::Index>, NodeID) { let mut block = cur_block; @@ -573,19 +618,19 @@ impl CodeGenerator<'_> { for idx in index { match idx { semant::Index::Field(idx) => { - built_index.push(self.builder.create_field_index(*idx)); + built_index.push(self.builder.builder.create_field_index(*idx)); } semant::Index::Variant(idx) => { - built_index.push(self.builder.create_variant_index(*idx)); + built_index.push(self.builder.builder.create_variant_index(*idx)); } semant::Index::Array(exps) => { let mut expr_vals = vec![]; for exp in exps { - let (val, new_block) = self.codegen_expr(exp, types, ssa, func_id, block); + let (val, new_block) = self.codegen_expr(exp, types, ssa, block); block = new_block; expr_vals.push(val); } - built_index.push(self.builder.create_position_index(expr_vals.into())); + built_index.push(self.builder.builder.create_position_index(expr_vals.into())); } } } @@ -593,42 +638,42 @@ impl CodeGenerator<'_> { (built_index, block) } - fn build_tuple(&mut self, exprs: Vec<NodeID>, typ: TypeID, func_id: FunctionID) -> NodeID { - let zero_const = self.builder.create_constant_zero(typ); + fn build_tuple(&mut self, exprs: Vec<NodeID>, typ: TypeID) -> NodeID { + let zero_const = self.builder.builder.create_constant_zero(typ); - let mut zero = self.builder.allocate_node(func_id); + let mut zero = self.builder.allocate_node(); let zero_val = zero.id(); zero.build_constant(zero_const); - let _ = self.builder.add_node(zero); + self.builder.add_node(zero); let mut val = zero_val; for (idx, exp) in exprs.into_iter().enumerate() { - let mut write = self.builder.allocate_node(func_id); + let mut write = self.builder.allocate_node(); let write_id = write.id(); - let index = self.builder.create_field_index(idx); + let index = self.builder.builder.create_field_index(idx); write.build_write(val, exp, vec![index].into()); - let _ = self.builder.add_node(write); + self.builder.add_node(write); val = write_id; } val } - fn build_union(&mut self, tag: usize, val: NodeID, typ: TypeID, func_id: FunctionID) -> NodeID { - let zero_const = self.builder.create_constant_zero(typ); + fn build_union(&mut self, tag: usize, val: NodeID, typ: TypeID) -> NodeID { + let zero_const = self.builder.builder.create_constant_zero(typ); - let mut zero = self.builder.allocate_node(func_id); + let mut zero = self.builder.allocate_node(); let zero_val = zero.id(); zero.build_constant(zero_const); - let _ = self.builder.add_node(zero); + self.builder.add_node(zero); - let mut write = self.builder.allocate_node(func_id); + let mut write = self.builder.allocate_node(); let write_id = write.id(); - let index = self.builder.create_variant_index(tag); + let index = self.builder.builder.create_variant_index(tag); write.build_write(zero_val, val, vec![index].into()); - let _ = self.builder.add_node(write); + self.builder.add_node(write); write_id } @@ -639,29 +684,29 @@ impl CodeGenerator<'_> { types: &mut TypeSolverInst<'a>, ) -> ConstantID { match lit { - Literal::Unit => self.builder.create_constant_prod(vec![].into()), - Literal::Bool(val) => self.builder.create_constant_bool(*val), + Literal::Unit => self.builder.builder.create_constant_prod(vec![].into()), + Literal::Bool(val) => self.builder.builder.create_constant_bool(*val), Literal::Integer(val) => { - let p = types.as_numeric_type(&mut self.builder, *typ); + let p = types.as_numeric_type(&mut self.builder.builder, *typ); match p { - Primitive::I8 => self.builder.create_constant_i8(*val as i8), - Primitive::I16 => self.builder.create_constant_i16(*val as i16), - Primitive::I32 => self.builder.create_constant_i32(*val as i32), - Primitive::I64 => self.builder.create_constant_i64(*val as i64), - Primitive::U8 => self.builder.create_constant_u8(*val as u8), - Primitive::U16 => self.builder.create_constant_u16(*val as u16), - Primitive::U32 => self.builder.create_constant_u32(*val as u32), - Primitive::U64 => self.builder.create_constant_u64(*val as u64), - Primitive::F32 => self.builder.create_constant_f32(*val as f32), - Primitive::F64 => self.builder.create_constant_f64(*val as f64), + Primitive::I8 => self.builder.builder.create_constant_i8(*val as i8), + Primitive::I16 => self.builder.builder.create_constant_i16(*val as i16), + Primitive::I32 => self.builder.builder.create_constant_i32(*val as i32), + Primitive::I64 => self.builder.builder.create_constant_i64(*val as i64), + Primitive::U8 => self.builder.builder.create_constant_u8(*val as u8), + Primitive::U16 => self.builder.builder.create_constant_u16(*val as u16), + Primitive::U32 => self.builder.builder.create_constant_u32(*val as u32), + Primitive::U64 => self.builder.builder.create_constant_u64(*val as u64), + Primitive::F32 => self.builder.builder.create_constant_f32(*val as f32), + Primitive::F64 => self.builder.builder.create_constant_f64(*val as f64), _ => panic!("Internal error in build_constant for integer"), } } Literal::Float(val) => { - let p = types.as_numeric_type(&mut self.builder, *typ); + let p = types.as_numeric_type(&mut self.builder.builder, *typ); match p { - Primitive::F32 => self.builder.create_constant_f32(*val as f32), - Primitive::F64 => self.builder.create_constant_f64(*val as f64), + Primitive::F32 => self.builder.builder.create_constant_f32(*val as f32), + Primitive::F64 => self.builder.builder.create_constant_f64(*val as f64), _ => panic!("Internal error in build_constant for float"), } } @@ -670,12 +715,13 @@ impl CodeGenerator<'_> { for val in vals { constants.push(self.build_constant(val, types)); } - self.builder.create_constant_prod(constants.into()) + self.builder.builder.create_constant_prod(constants.into()) } Literal::Sum(tag, val) => { let constant = self.build_constant(val, types); - let type_id = types.lower_type(&mut self.builder, *typ); + let type_id = types.lower_type(&mut self.builder.builder, *typ); self.builder + .builder .create_constant_sum(type_id, *tag as u32, constant) .unwrap() } diff --git a/juno_frontend/src/dynconst.rs b/juno_frontend/src/dynconst.rs index ba726299..786633be 100644 --- a/juno_frontend/src/dynconst.rs +++ b/juno_frontend/src/dynconst.rs @@ -1,29 +1,34 @@ /* A data structure for normalizing and performing computation over dynamic constant expressions */ use std::collections::HashMap; use std::{fmt, iter}; + use hercules_ir::{Builder, DynamicConstantID}; use num_rational::Ratio; -use num_traits::identities::{Zero, One}; +use num_traits::identities::{One, Zero}; // A dynamic constant is represented as a map from a vector of the powers of the variables to the // coefficient for that term #[derive(Eq, Clone)] pub struct DynConst { - terms : HashMap<Vec<i64>, Ratio<i64>>, - vars : usize, + terms: HashMap<Vec<i64>, Ratio<i64>>, + vars: usize, } // Two dynamic constants are equal if all terms in each polynomial with non-zero // coefficients have an equivalent term in the other polynomial impl PartialEq for DynConst { - fn eq(&self, other : &Self) -> bool { - if self.vars != other.vars { return false; } + fn eq(&self, other: &Self) -> bool { + if self.vars != other.vars { + return false; + } for (t, c) in self.terms.iter() { if !c.is_zero() { if let Some(q) = other.terms.get(t) { - if c != q { return false; } + if c != q { + return false; + } } else { return false; } @@ -33,7 +38,9 @@ impl PartialEq for DynConst { for (t, c) in other.terms.iter() { if !c.is_zero() { if let Some(q) = other.terms.get(t) { - if c != q { return false; } + if c != q { + return false; + } } else { return false; } @@ -45,48 +52,48 @@ impl PartialEq for DynConst { } impl fmt::Debug for DynConst { - fn fmt(&self, f : &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.to_string(&|i| format!("v{}", i))) } } impl DynConst { // Construct a dynamic constant whose value is a constant integer - pub fn constant(val : i64, num_dyn : usize) -> DynConst { + pub fn constant(val: i64, num_dyn: usize) -> DynConst { Self::constant_ratio(Ratio::from_integer(val), num_dyn) } // Construct a dynamic constant whose value is a constant rational value - pub fn constant_ratio(val : Ratio<i64>, num_dyn : usize) -> DynConst { + pub fn constant_ratio(val: Ratio<i64>, num_dyn: usize) -> DynConst { // Create the vector of powers for each variable, all these powers are // just 0 let powers = vec![0; num_dyn]; DynConst { - terms : HashMap::from([(powers, val)]), - vars : num_dyn, + terms: HashMap::from([(powers, val)]), + vars: num_dyn, } } // Construct the zero dynamic constant - pub fn zero(dc : &DynConst) -> DynConst { + pub fn zero(dc: &DynConst) -> DynConst { Self::constant(0, dc.vars) } // Construct the dynamic constant value for a single particular dynamic // constant - pub fn dynamic_constant(idx : usize, num_dyn : usize) -> DynConst { + pub fn dynamic_constant(idx: usize, num_dyn: usize) -> DynConst { let mut powers = vec![0; num_dyn]; powers[idx] = 1; DynConst { - terms : HashMap::from([(powers, Ratio::one())]), - vars : num_dyn, + terms: HashMap::from([(powers, Ratio::one())]), + vars: num_dyn, } } // Adds a term (defined by the powers in t and coefficient c) to the // dynamic constant dc // This is used in implementing other operations - fn add_term(dc : &mut DynConst, t : &Vec<i64>, c : Ratio<i64>) { + fn add_term(dc: &mut DynConst, t: &Vec<i64>, c: Ratio<i64>) { if !c.is_zero() { if let Some(cur_coeff) = dc.terms.get_mut(t) { *cur_coeff += c; @@ -98,7 +105,7 @@ impl DynConst { // Same as add_term except the vector of powers is owned (which allows us // to avoid a clone that might otherwise be necessary) - fn add_term_owned(dc : &mut DynConst, t : Vec<i64>, c : Ratio<i64>) { + fn add_term_owned(dc: &mut DynConst, t: Vec<i64>, c: Ratio<i64>) { if !c.is_zero() { if let Some(cur_coeff) = dc.terms.get_mut(&t) { *cur_coeff += c; @@ -109,7 +116,7 @@ impl DynConst { } // Adds two dynamic constants - pub fn add(lhs : &DynConst, rhs : &DynConst) -> DynConst { + pub fn add(lhs: &DynConst, rhs: &DynConst) -> DynConst { let mut res = lhs.clone(); assert!(lhs.vars == rhs.vars); @@ -121,20 +128,20 @@ impl DynConst { } // Subtracts a dynamic constant from another - pub fn sub(lhs : &DynConst, rhs : &DynConst) -> DynConst { + pub fn sub(lhs: &DynConst, rhs: &DynConst) -> DynConst { let mut res = lhs.clone(); assert!(lhs.vars == rhs.vars); // Works like add except the coefficients we add are negated for (term, coeff) in rhs.terms.iter() { - Self::add_term(&mut res, term, - *coeff); + Self::add_term(&mut res, term, -*coeff); } res } // Multiplies two dynamic constants - pub fn mul(lhs : &DynConst, rhs : &DynConst) -> DynConst { + pub fn mul(lhs: &DynConst, rhs: &DynConst) -> DynConst { let mut res = DynConst::zero(lhs); assert!(lhs.vars == rhs.vars); @@ -143,9 +150,11 @@ impl DynConst { for (ltrm, lcoeff) in lhs.terms.iter() { for (rtrm, rcoeff) in rhs.terms.iter() { // Add the powers from the two terms together - let term = ltrm.iter() - .zip(rtrm.iter()).map(|(lp, rp)| lp + rp) - .collect::<Vec<_>>(); + let term = ltrm + .iter() + .zip(rtrm.iter()) + .map(|(lp, rp)| lp + rp) + .collect::<Vec<_>>(); // Multiply their coefficients let coeff = *lcoeff * *rcoeff; @@ -158,18 +167,25 @@ impl DynConst { // Our division at least currently only supports division by a single term // This also returns None if we try to divide by zero - pub fn div(lhs : &DynConst, rhs : &DynConst) -> Option<DynConst> { + pub fn div(lhs: &DynConst, rhs: &DynConst) -> Option<DynConst> { assert!(lhs.vars == rhs.vars); - if rhs.terms.len() != 1 { return None; } + if rhs.terms.len() != 1 { + return None; + } let (dterm, dcoeff) = rhs.terms.iter().nth(0).expect("From above"); - if dcoeff.is_zero() { return None; } + if dcoeff.is_zero() { + return None; + } let mut res = DynConst::zero(lhs); for (t, c) in lhs.terms.iter() { - let term = t.iter().zip(dterm.iter()).map(|(lp, rp)| lp - rp) - .collect::<_>(); + let term = t + .iter() + .zip(dterm.iter()) + .map(|(lp, rp)| lp - rp) + .collect::<_>(); let coeff = c / dcoeff; Self::add_term_owned(&mut res, term, coeff); } @@ -178,15 +194,19 @@ impl DynConst { } // Computes the negation of a dynamic constant - pub fn negate(val : &DynConst) -> DynConst { + pub fn negate(val: &DynConst) -> DynConst { DynConst { - terms : val.terms.iter().map(|(t, c)| (t.clone(), - c)).collect::<_>(), - vars : val.vars + terms: val + .terms + .iter() + .map(|(t, c)| (t.clone(), -c)) + .collect::<_>(), + vars: val.vars, } } // Raises a dynamic constant to a particular power - fn power(val : &DynConst, power : i64) -> Option<DynConst> { + fn power(val: &DynConst, power: i64) -> Option<DynConst> { let mut res = DynConst::constant(1, val.vars); if power > 0 { @@ -205,7 +225,7 @@ impl DynConst { // Substitutes the variables in this dynamic constant for the dynamic // constant expressions vars - pub fn subst(&self, vars : &Vec<DynConst>) -> Option<DynConst> { + pub fn subst(&self, vars: &Vec<DynConst>) -> Option<DynConst> { assert!(self.vars == vars.len()); // Only a constant, so we can just clone @@ -221,8 +241,7 @@ impl DynConst { for (term, coeff) in self.terms.iter() { let mut product = DynConst::constant_ratio(*coeff, num_dyns); for (var, power) in term.iter().enumerate() { - product = DynConst::mul(&product, - &DynConst::power(&vars[var], *power)?); + product = DynConst::mul(&product, &DynConst::power(&vars[var], *power)?); } res = DynConst::add(&res, &product); } @@ -230,111 +249,138 @@ impl DynConst { Some(res) } - fn term_to_string(term : &Vec<i64>, stringtab : &dyn Fn(usize) -> String) -> String { - term.iter().enumerate() - .map(|(i, p)| if *p == 0 { "".to_string() } - else { format!("{}^{}", stringtab(i), p) }) - .filter(|s| !s.is_empty()).collect::<Vec<_>>().join(" ") + fn term_to_string(term: &Vec<i64>, stringtab: &dyn Fn(usize) -> String) -> String { + term.iter() + .enumerate() + .map(|(i, p)| { + if *p == 0 { + "".to_string() + } else { + format!("{}^{}", stringtab(i), p) + } + }) + .filter(|s| !s.is_empty()) + .collect::<Vec<_>>() + .join(" ") } // Converts the dynamic constant into a string using a function which maps // variable numbers into names // Useful for debugging and error messages - pub fn to_string(&self, stringtab : &dyn Fn(usize) -> String) -> String { + pub fn to_string(&self, stringtab: &dyn Fn(usize) -> String) -> String { let mut vec = self.terms.iter().collect::<Vec<_>>(); vec.sort(); vec.iter() - .map(|(t, c)| format!("{} {}", c.to_string(), - Self::term_to_string(*t, stringtab))) - .collect::<Vec<_>>().join(" + ") + .map(|(t, c)| format!("{} {}", c.to_string(), Self::term_to_string(*t, stringtab))) + .collect::<Vec<_>>() + .join(" + ") } // Builds a dynamic constant in the IR - pub fn build(&self, builder : &mut Builder) -> DynamicConstantID { + pub fn build(&self, builder: &mut Builder) -> DynamicConstantID { // Identify the terms with non-zero coefficients, based on the powers - let mut non_zero_coeff = self.terms.iter().filter(|(_, c)| !c.is_zero()) - .collect::<Vec<_>>(); + let mut non_zero_coeff = self + .terms + .iter() + .filter(|(_, c)| !c.is_zero()) + .collect::<Vec<_>>(); non_zero_coeff.sort_by(|(d1, _), (d2, _)| d1.cmp(d2)); - let (pos, neg) : (Vec<_>, Vec<_>) = - non_zero_coeff.iter() - .map(|(d, c)| self.build_mono(builder, d, c)) - .partition(|(_, neg)| ! *neg); + let (pos, neg): (Vec<_>, Vec<_>) = non_zero_coeff + .iter() + .map(|(d, c)| self.build_mono(builder, d, c)) + .partition(|(_, neg)| !*neg); - let pos_sum = pos.into_iter().map(|(t, _)| t) - .reduce(|x, y| builder.create_dynamic_constant_add(x, y)) - .unwrap_or_else(|| builder.create_dynamic_constant_constant(0)); + let pos_sum = pos + .into_iter() + .map(|(t, _)| t) + .reduce(|x, y| builder.create_dynamic_constant_add(x, y)) + .unwrap_or_else(|| builder.create_dynamic_constant_constant(0)); - let neg_sum = neg.into_iter().map(|(t, _)| t) - .reduce(|x, y| builder.create_dynamic_constant_add(x, y)); + let neg_sum = neg + .into_iter() + .map(|(t, _)| t) + .reduce(|x, y| builder.create_dynamic_constant_add(x, y)); match neg_sum { None => pos_sum, - Some(neg) => builder.create_dynamic_constant_sub(pos_sum, neg) + Some(neg) => builder.create_dynamic_constant_sub(pos_sum, neg), } } // Build's a monomial, with a given list of powers (term) and coefficients // Returns the dynamic constant id of the positive value and a boolean // indicating whether the value should actually be negative - fn build_mono(&self, builder : &mut Builder, term : &Vec<i64>, - coeff : &Ratio<i64>) -> (DynamicConstantID, bool) { - let term_id = term.iter().enumerate() - .filter(|(_, p)| **p != 0) - .map(|(v, p)| self.build_power(builder, v, *p)) - .collect::<Vec<_>>().into_iter() - .reduce(|x, y| builder.create_dynamic_constant_add(x, y)); + fn build_mono( + &self, + builder: &mut Builder, + term: &Vec<i64>, + coeff: &Ratio<i64>, + ) -> (DynamicConstantID, bool) { + let term_id = term + .iter() + .enumerate() + .filter(|(_, p)| **p != 0) + .map(|(v, p)| self.build_power(builder, v, *p)) + .collect::<Vec<_>>() + .into_iter() + .reduce(|x, y| builder.create_dynamic_constant_add(x, y)); match term_id { - None => { // This means all powers of the term are 0, so we just - // output the coefficient + None => { + // This means all powers of the term are 0, so we just + // output the coefficient if !coeff.is_integer() { panic!("Dynamic constant is a non-integer constant") } else { - let val : i64 = coeff.to_integer(); - (builder.create_dynamic_constant_constant(val.abs() as usize), - val < 0) + let val: i64 = coeff.to_integer(); + ( + builder.create_dynamic_constant_constant(val.abs() as usize), + val < 0, + ) } - }, + } Some(term) => { - if coeff.is_one() { (term, false) } - else { - let numer : i64 = coeff.numer().abs(); - let denom : i64 = *coeff.denom(); // > 0 - - let with_numer = - if numer == 1 { term } - else { - let numer_id = builder.create_dynamic_constant_constant(numer as usize); - builder.create_dynamic_constant_mul(numer_id, term) - }; - let with_denom = - if denom == 1 { with_numer } - else { - let denom_id = builder.create_dynamic_constant_constant(denom as usize); - builder.create_dynamic_constant_div(with_numer, denom_id) - }; + if coeff.is_one() { + (term, false) + } else { + let numer: i64 = coeff.numer().abs(); + let denom: i64 = *coeff.denom(); // > 0 + + let with_numer = if numer == 1 { + term + } else { + let numer_id = builder.create_dynamic_constant_constant(numer as usize); + builder.create_dynamic_constant_mul(numer_id, term) + }; + let with_denom = if denom == 1 { + with_numer + } else { + let denom_id = builder.create_dynamic_constant_constant(denom as usize); + builder.create_dynamic_constant_div(with_numer, denom_id) + }; (with_denom, numer < 0) } - }, + } } } // Build's a dynamic constant that is a certain power of a specific variable - fn build_power(&self, builder : &mut Builder, v : usize, power : i64) -> DynamicConstantID { + fn build_power(&self, builder: &mut Builder, v: usize, power: i64) -> DynamicConstantID { assert!(power != 0); let power_pos = power.abs() as usize; - let var_id = builder.create_dynamic_constant_parameter(v); - let power_id = iter::repeat(var_id).take(power_pos) - .map(|_| var_id) - .reduce(|x, y| builder.create_dynamic_constant_mul(x, y)) - .expect("Power is non-zero"); + let power_id = iter::repeat(var_id) + .take(power_pos) + .map(|_| var_id) + .reduce(|x, y| builder.create_dynamic_constant_mul(x, y)) + .expect("Power is non-zero"); - if power > 0 { power_id } - else { + if power > 0 { + power_id + } else { let one_id = builder.create_dynamic_constant_constant(1); builder.create_dynamic_constant_div(one_id, power_id) } diff --git a/juno_frontend/src/env.rs b/juno_frontend/src/env.rs index f8e5fda6..fb746045 100644 --- a/juno_frontend/src/env.rs +++ b/juno_frontend/src/env.rs @@ -3,38 +3,39 @@ use std::collections::HashSet; use std::hash::Hash; pub struct Env<K, V> { - table : HashMap<K, Vec<V>>, - scope : Vec<HashSet<K>>, - count : usize, + table: HashMap<K, Vec<V>>, + scope: Vec<HashSet<K>>, + count: usize, } -impl<K : Eq + Hash + Copy, V> Env<K, V> { +impl<K: Eq + Hash + Copy, V> Env<K, V> { pub fn new() -> Env<K, V> { - Env { table : HashMap::new(), scope : vec![], count : 0 } + Env { + table: HashMap::new(), + scope: vec![], + count: 0, + } } - pub fn lookup(&self, k : &K) -> Option<&V> { + pub fn lookup(&self, k: &K) -> Option<&V> { match self.table.get(k) { None => None, Some(l) => l.last(), } } - pub fn insert(&mut self, k : K, v : V) { - if self.scope[self.scope.len()-1].contains(&k) { + pub fn insert(&mut self, k: K, v: V) { + if self.scope[self.scope.len() - 1].contains(&k) { match self.table.get_mut(&k) { None => panic!("Internal Failure: Environment Insert"), Some(r) => { let last = r.len() - 1; r[last] = v; - }, + } } } else { let last = self.scope.len() - 1; - match self.table.get_mut(&k) { - None => { self.table.insert(k, vec![v]); }, - Some(r) => { r.push(v); }, - } + self.table.entry(k).or_insert(vec![]).push(v); self.scope[last].insert(k); } } @@ -49,11 +50,15 @@ impl<K : Eq + Hash + Copy, V> Env<K, V> { Some(to_remove) => { for k in to_remove { match self.table.get_mut(&k) { - None => { assert!(false, "Internal Failure: Environment Close Scope"); }, - Some(r) => { r.pop(); }, + None => { + assert!(false, "Internal Failure: Environment Close Scope"); + } + Some(r) => { + r.pop(); + } } } - }, + } } } diff --git a/juno_frontend/src/intrinsics.rs b/juno_frontend/src/intrinsics.rs index 100d1f25..e0c2d2c3 100644 --- a/juno_frontend/src/intrinsics.rs +++ b/juno_frontend/src/intrinsics.rs @@ -1,8 +1,8 @@ /* Definitions of the set of intrinsic functions in Juno */ use phf::phf_map; -use crate::types::{Type, TypeSolver, Primitive}; use crate::parser; +use crate::types::{Primitive, Type, TypeSolver}; // How intrinsics are identified in the Hercules IR pub type IntrinsicIdentity = hercules_ir::ir::Intrinsic; @@ -11,32 +11,38 @@ pub type IntrinsicIdentity = hercules_ir::ir::Intrinsic; // identified in the Hercules IR #[derive(Clone)] pub struct IntrinsicInfo { - pub id : IntrinsicIdentity, - pub kinds : &'static [parser::Kind], - pub typ : fn(&Vec<Type>, &mut TypeSolver) -> (Vec<Type>, Type), + pub id: IntrinsicIdentity, + pub kinds: &'static [parser::Kind], + pub typ: fn(&Vec<Type>, &mut TypeSolver) -> (Vec<Type>, Type), } // The type for a function which takes one argument of a variable type and // returns a value of that same type -fn var_type(ty_args : &Vec<Type>, _ : &mut TypeSolver) -> (Vec<Type>, Type) { +fn var_type(ty_args: &Vec<Type>, _: &mut TypeSolver) -> (Vec<Type>, Type) { (vec![ty_args[0]], ty_args[0]) } // Type type for a function which takes two arguments of the same variable type // and returns a value of that same type -fn var2_type(ty_args : &Vec<Type>, _ : &mut TypeSolver) -> (Vec<Type>, Type) { +fn var2_type(ty_args: &Vec<Type>, _: &mut TypeSolver) -> (Vec<Type>, Type) { (vec![ty_args[0], ty_args[0]], ty_args[0]) } -fn pow_type(ty_args : &Vec<Type>, types : &mut TypeSolver) -> (Vec<Type>, Type) { - (vec![ty_args[0], types.new_primitive(Primitive::U32)], ty_args[0]) +fn pow_type(ty_args: &Vec<Type>, types: &mut TypeSolver) -> (Vec<Type>, Type) { + ( + vec![ty_args[0], types.new_primitive(Primitive::U32)], + ty_args[0], + ) } -fn powi_type(ty_args : &Vec<Type>, types : &mut TypeSolver) -> (Vec<Type>, Type) { - (vec![ty_args[0], types.new_primitive(Primitive::I32)], ty_args[0]) +fn powi_type(ty_args: &Vec<Type>, types: &mut TypeSolver) -> (Vec<Type>, Type) { + ( + vec![ty_args[0], types.new_primitive(Primitive::I32)], + ty_args[0], + ) } -static INTRINSICS : phf::Map<&'static str, IntrinsicInfo> = phf_map! { +static INTRINSICS: phf::Map<&'static str, IntrinsicInfo> = phf_map! { "abs" => IntrinsicInfo { id : hercules_ir::ir::Intrinsic::Abs, kinds : &[parser::Kind::Number], @@ -189,6 +195,6 @@ static INTRINSICS : phf::Map<&'static str, IntrinsicInfo> = phf_map! { }, }; -pub fn lookup(nm : &str) -> Option<&IntrinsicInfo> { +pub fn lookup(nm: &str) -> Option<&IntrinsicInfo> { INTRINSICS.get(nm) } diff --git a/juno_frontend/src/labeled_builder.rs b/juno_frontend/src/labeled_builder.rs new file mode 100644 index 00000000..2386d81c --- /dev/null +++ b/juno_frontend/src/labeled_builder.rs @@ -0,0 +1,141 @@ +use hercules_ir::build::*; +use hercules_ir::ir::*; +use juno_scheduler::LabeledStructure; +use std::collections::{HashMap, HashSet}; + +// A label-tracking code generator which tracks the current function that we're +// generating code for and the what labels apply to the nodes being created +// FIXME: The builder public is currently public to avoid code duplication and +// we're just allowing types and dynamic constants to be constructed on the +// builder directly, rather than on this labeled builder (since types and +// dynamic constants do not require labeling) +pub struct LabeledBuilder<'a> { + pub builder: Builder<'a>, + function: Option<FunctionID>, + label: usize, + label_stack: Vec<usize>, + label_tree: HashMap<FunctionID, Vec<(LabeledStructure, HashSet<usize>)>>, + label_map: HashMap<FunctionID, HashMap<NodeID, usize>>, +} +impl<'a> LabeledBuilder<'a> { + pub fn create() -> LabeledBuilder<'a> { + LabeledBuilder { + builder: Builder::create(), + function: None, + label: 0, // 0 is always the root label + label_stack: vec![], + label_tree: HashMap::new(), + label_map: HashMap::new(), + } + } + + pub fn finish( + self, + ) -> ( + Module, + HashMap<FunctionID, Vec<(LabeledStructure, HashSet<usize>)>>, + HashMap<FunctionID, HashMap<NodeID, usize>>, + ) { + let LabeledBuilder { + builder, + function: _, + label: _, + label_stack: _, + label_tree, + label_map, + } = self; + (builder.finish(), label_tree, label_map) + } + + pub fn create_function( + &mut self, + name: &str, + param_types: Vec<TypeID>, + return_type: TypeID, + num_dynamic_constants: u32, + num_labels: usize, + ) -> Result<(FunctionID, NodeID), String> { + let (func, entry) = + self.builder + .create_function(name, param_types, return_type, num_dynamic_constants)?; + + self.label_tree.insert( + func, + vec![(LabeledStructure::Nothing(), HashSet::new()); num_labels], + ); + self.label_map.insert(func, HashMap::new()); + + Ok((func, entry)) + } + + pub fn set_function(&mut self, func: FunctionID) { + self.function = Some(func); + self.label = 0; + } + + pub fn push_label(&mut self, label: usize) { + let Some(cur_func) = self.function else { + panic!("Setting label without function") + }; + + let cur_label = self.label; + self.label_stack.push(cur_label); + + for ancestor in self.label_stack.iter() { + self.label_tree.get_mut(&cur_func).unwrap()[*ancestor] + .1 + .insert(label); + } + + self.label = label; + } + + pub fn pop_label(&mut self, structure: LabeledStructure) { + let Some(cur_func) = self.function else { + panic!("Setting label without function") + }; + self.label_tree.get_mut(&cur_func).unwrap()[self.label].0 = structure; + let Some(label) = self.label_stack.pop() else { + panic!("Cannot pop label not pushed first") + }; + self.label = label; + } + + fn allocate_node_labeled(&mut self, label: usize) -> NodeBuilder { + let Some(func) = self.function else { + panic!("Cannot allocate node without function") + }; + let builder = self.builder.allocate_node(func); + + self.label_map + .get_mut(&func) + .unwrap() + .insert(builder.id(), label); + + builder + } + + pub fn allocate_node(&mut self) -> NodeBuilder { + self.allocate_node_labeled(self.label) + } + + pub fn allocate_node_labeled_with(&mut self, other: NodeID) -> NodeBuilder { + let Some(func) = self.function else { + panic!("Cannot allocate node without function") + }; + let label = self + .label_map + .get(&func) + .unwrap() + .get(&other) + .expect("Other node not labeled"); + + self.allocate_node_labeled(*label) + } + + pub fn add_node(&mut self, builder: NodeBuilder) { + let Ok(()) = self.builder.add_node(builder) else { + panic!("Node not built") + }; + } +} diff --git a/juno_frontend/src/lang.l b/juno_frontend/src/lang.l index 6d449c14..d54a54d7 100644 --- a/juno_frontend/src/lang.l +++ b/juno_frontend/src/lang.l @@ -126,6 +126,7 @@ _ "_" 0b[0-1]+ "BIN_INT" 0o[0-7]+ "OCT_INT" [0-9]+\.[0-9]*(|e[0-9]+) "FLOAT_LIT" +@[a-zA-Z0-9_]+ "LABEL" . "UNMATCHED" . "UNARY" diff --git a/juno_frontend/src/lang.y b/juno_frontend/src/lang.y index 98322da4..d294d9a1 100644 --- a/juno_frontend/src/lang.y +++ b/juno_frontend/src/lang.y @@ -1,7 +1,7 @@ %start Program %token UNARY -%avoid_insert "FUNC_ATTR" "DOT_NUM" "ID" "INT" "HEX_INT" "BIN_INT" "OCT_INT" "FLOAT_LIT" +%avoid_insert "FUNC_ATTR" "DOT_NUM" "ID" "INT" "HEX_INT" "BIN_INT" "OCT_INT" "FLOAT_LIT" "LABEL" %expect-unused Unmatched 'UNMATCHED' 'UNARY' %nonassoc ')' @@ -295,7 +295,10 @@ Stmt -> Result<Stmt, ()> { Ok(Stmt::CallStmt{ span : $span, name : $1?, ty_args : None, args : $3? }) } | PackageName '::' '<' TypeExprs '>' '(' Params ')' ';' { Ok(Stmt::CallStmt{ span : $span, name : $1?, ty_args : Some($4?), args : $7? }) } + | Label Stmt + { Ok(Stmt::LabeledStmt { span : $span, label : $1?, stmt : Box::new($2?) }) } ; +Label -> Result<Span, ()> : 'LABEL' { Ok($span) }; Stmts -> Result<Stmt, ()> : '{' StmtList '}' { Ok(Stmt::BlockStmt{ span : $span, body : $2? }) }; StmtList -> Result<Vec<Stmt>, ()> @@ -575,7 +578,8 @@ Unmatched -> (): 'UNMATCHED' { }; %% use cfgrammar::Span; -use lrlex::DefaultLexeme; +use lrlex::{DefaultLexeme, DefaultLexerTypes}; +use lrpar::NonStreamingLexer; use std::collections::VecDeque; fn flatten<T>(lhs: Result<Vec<T>, ()>, rhs: Result<T, ()>) -> Result<Vec<T>, ()> { @@ -671,6 +675,7 @@ pub enum Stmt { BlockStmt { span : Span, body : Vec<Stmt> }, CallStmt { span : Span, name : PackageName, ty_args : Option<Vec<TypeExpr>>, args : Vec<(bool, Expr)> }, // bool indicates & (for inouts) + LabeledStmt { span : Span, label : Span, stmt : Box<Stmt> }, } #[derive(Debug)] @@ -770,6 +775,7 @@ impl Spans for Stmt { | Stmt::ContinueStmt { span, .. } | Stmt::BlockStmt { span, .. } | Stmt::CallStmt { span, .. } + | Stmt::LabeledStmt { span, .. } => *span } } @@ -803,4 +809,12 @@ impl IntBase { IntBase::Hexadecimal => 16, } } + + pub fn string(&self, lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, span : Span) -> String { + let full_string = lexer.span_str(span); + match self { + IntBase::Decimal => full_string.to_string(), + _ => full_string[2..].to_string(), + } + } } diff --git a/juno_frontend/src/lib.rs b/juno_frontend/src/lib.rs new file mode 100644 index 00000000..1ac61fe5 --- /dev/null +++ b/juno_frontend/src/lib.rs @@ -0,0 +1,144 @@ +mod codegen; +mod dynconst; +mod env; +mod intrinsics; +mod labeled_builder; +mod locs; +mod parser; +mod semant; +mod ssa; +mod types; + +extern crate hercules_ir; + +use std::fmt; +use std::path::Path; + +pub enum JunoVerify { + None, + JunoOpts, + AllPasses, +} +impl JunoVerify { + pub fn verify(&self) -> bool { + match self { + JunoVerify::None => false, + _ => true, + } + } + pub fn verify_all(&self) -> bool { + match self { + JunoVerify::AllPasses => true, + _ => false, + } + } +} + +pub enum JunoSchedule { + None, + DefaultSchedule, + Schedule(String), +} + +macro_rules! add_verified_pass { + ($pm:ident, $verify:ident, $pass:ident) => { + $pm.add_pass(hercules_opt::pass::Pass::$pass); + if $verify.verify() || $verify.verify_all() { + $pm.add_pass(hercules_opt::pass::Pass::Verify); + } + }; +} +macro_rules! add_pass { + ($pm:ident, $verify:ident, $pass:ident) => { + $pm.add_pass(hercules_opt::pass::Pass::$pass); + if $verify.verify_all() { + $pm.add_pass(hercules_opt::pass::Pass::Verify); + } + }; +} + +pub enum ErrorMessage { + SemanticError(semant::ErrorMessages), + SchedulingError(String), +} + +impl fmt::Display for ErrorMessage { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ErrorMessage::SemanticError(errs) => { + for err in errs { + write!(f, "{}", err)?; + } + } + ErrorMessage::SchedulingError(msg) => { + write!(f, "{}", msg)?; + } + } + Ok(()) + } +} + +pub fn compile( + src_file: String, + verify: JunoVerify, + x_dot: bool, + schedule: JunoSchedule, + output_dir: String, +) -> Result<(), ErrorMessage> { + let src_file_path = Path::new(&src_file); + let module_name = String::from(src_file_path.file_stem().unwrap().to_str().unwrap()); + + let prg = match semant::parse_and_analyze(src_file) { + Ok(prg) => prg, + Err(msg) => { + return Err(ErrorMessage::SemanticError(msg)); + } + }; + let (module, func_info) = codegen::codegen_program(prg); + + let mut pm = match schedule { + JunoSchedule::None => hercules_opt::pass::PassManager::new(module), + JunoSchedule::DefaultSchedule => { + let mut pm = hercules_opt::pass::PassManager::new(module); + pm.make_plans(); + pm + } + JunoSchedule::Schedule(file) => match juno_scheduler::schedule(&module, func_info, file) { + Ok(plans) => { + let mut pm = hercules_opt::pass::PassManager::new(module); + pm.set_plans(plans); + pm + } + Err(msg) => { + return Err(ErrorMessage::SchedulingError(msg)); + } + }, + }; + if verify.verify() || verify.verify_all() { + pm.add_pass(hercules_opt::pass::Pass::Verify); + } + add_verified_pass!(pm, verify, PhiElim); + if x_dot { + pm.add_pass(hercules_opt::pass::Pass::Xdot(true)); + } + add_pass!(pm, verify, CCP); + add_pass!(pm, verify, DCE); + add_pass!(pm, verify, GVN); + add_pass!(pm, verify, DCE); + //add_pass!(pm, verify, SROA); + if x_dot { + pm.add_pass(hercules_opt::pass::Pass::Xdot(true)); + } + add_pass!(pm, verify, Forkify); + add_pass!(pm, verify, ForkGuardElim); + add_verified_pass!(pm, verify, DCE); + if x_dot { + pm.add_pass(hercules_opt::pass::Pass::Xdot(true)); + pm.add_pass(hercules_opt::pass::Pass::SchedXdot); + } + + pm.add_pass(hercules_opt::pass::Pass::Codegen(output_dir, module_name)); + pm.run_passes(); + + Ok(()) +} diff --git a/juno_frontend/src/locs.rs b/juno_frontend/src/locs.rs index 24c06711..283f391e 100644 --- a/juno_frontend/src/locs.rs +++ b/juno_frontend/src/locs.rs @@ -1,35 +1,47 @@ +use cfgrammar::Span; use lrlex::DefaultLexerTypes; use lrpar::NonStreamingLexer; -use cfgrammar::Span; use std::fmt; // A location in the program, used in error messages #[derive(Copy, Clone, Debug)] pub struct Location { - start_line : usize, start_column : usize, - end_line : usize, end_column : usize, + start_line: usize, + start_column: usize, + end_line: usize, + end_column: usize, } impl Location { pub fn fake() -> Location { - Location { start_line : 0, start_column : 0, - end_line : 0, end_column : 0 } + Location { + start_line: 0, + start_column: 0, + end_line: 0, + end_column: 0, + } } } // Conversion from span to internal locations -pub fn span_to_loc(span : Span, lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>) - -> Location { +pub fn span_to_loc(span: Span, lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>) -> Location { let ((start_line, start_column), (end_line, end_column)) = lexer.line_col(span); - Location { start_line, start_column, end_line, end_column } + Location { + start_line, + start_column, + end_line, + end_column, + } } // Printing locations impl fmt::Display for Location { - fn fmt(&self, f : &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}, {} -- {}, {}", - self.start_line, self.start_column, - self.end_line, self.end_column) + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "{}, {} -- {}, {}", + self.start_line, self.start_column, self.end_line, self.end_column + ) } } diff --git a/juno_frontend/src/main.rs b/juno_frontend/src/main.rs index f291b870..4624e716 100644 --- a/juno_frontend/src/main.rs +++ b/juno_frontend/src/main.rs @@ -1,25 +1,17 @@ extern crate clap; -use clap::Parser; +use juno_compiler::*; -mod codegen; -mod dynconst; -mod env; -mod intrinsics; -mod locs; -mod parser; -mod semant; -mod ssa; -mod types; +use clap::{ArgGroup, Parser}; -use codegen::*; - -use std::path::Path; - -extern crate hercules_ir; +use std::path::PathBuf; #[derive(Parser)] -#[command(author, version, about, long_about = None)] +#[clap(author, version, about, long_about = None)] +#[clap(group( + ArgGroup::new("scheduling") + .required(false) + .args(&["schedule", "default_schedule", "no_schedule"])))] struct Cli { src_file: String, #[clap(short, long)] @@ -28,76 +20,48 @@ struct Cli { verify_all: bool, #[arg(short, long = "x-dot")] x_dot: bool, - #[arg(short, long, value_name = "OUTPUT")] - output: Option<String>, -} - -macro_rules! add_verified_pass { - ($pm:ident, $args:ident, $pass:ident) => { - $pm.add_pass(hercules_opt::pass::Pass::$pass); - if $args.verify || $args.verify_all { - $pm.add_pass(hercules_opt::pass::Pass::Verify); - } - }; -} -macro_rules! add_pass { - ($pm:ident, $args:ident, $pass:ident) => { - $pm.add_pass(hercules_opt::pass::Pass::$pass); - if $args.verify_all { - $pm.add_pass(hercules_opt::pass::Pass::Verify); - } - }; + #[clap(short, long, value_name = "SCHEDULE")] + schedule: Option<String>, + #[clap(short, long = "default-schedule")] + default_schedule: bool, + #[clap(short, long)] + no_schedule: bool, + #[arg(short, long = "output-dir", value_name = "OUTPUT DIR")] + output_dir: Option<String>, } fn main() { let args = Cli::parse(); - let src_file = args.src_file.clone(); - let prg = semant::parse_and_analyze(args.src_file); - match prg { - Ok(prg) => { - let module = codegen_program(prg); - - let mut pm = hercules_opt::pass::PassManager::new(module); - if args.verify || args.verify_all { - pm.add_pass(hercules_opt::pass::Pass::Verify); - } - add_verified_pass!(pm, args, PhiElim); - if args.x_dot { - pm.add_pass(hercules_opt::pass::Pass::Xdot(true)); - } - add_pass!(pm, args, CCP); - add_pass!(pm, args, DCE); - add_pass!(pm, args, GVN); - add_pass!(pm, args, DCE); - //add_pass!(pm, args, SROA); - if args.x_dot { - pm.add_pass(hercules_opt::pass::Pass::Xdot(true)); - } - add_pass!(pm, args, Forkify); - add_pass!(pm, args, ForkGuardElim); - add_verified_pass!(pm, args, DCE); - if args.x_dot { - pm.add_pass(hercules_opt::pass::Pass::Xdot(true)); - pm.add_pass(hercules_opt::pass::Pass::SchedXdot); + let verify = if args.verify_all { + JunoVerify::AllPasses + } else if args.verify { + JunoVerify::JunoOpts + } else { + JunoVerify::None + }; + let schedule = match args.schedule { + Some(file) => JunoSchedule::Schedule(file), + None => { + if args.default_schedule { + JunoSchedule::DefaultSchedule + } else { + JunoSchedule::None } - - let src_file_path = Path::new(&src_file); - let module_name = String::from(src_file_path.file_stem().unwrap().to_str().unwrap()); - let output_folder = match args.output { - Some(output_folder) => output_folder, - None => String::from(src_file_path.parent().unwrap().to_str().unwrap()), - }; - pm.add_pass(hercules_opt::pass::Pass::Codegen( - output_folder, - module_name, - )); - - let _ = pm.run_passes(); } + }; + let output_dir = match args.output_dir { + Some(dir) => dir, + None => PathBuf::from(args.src_file.clone()) + .parent() + .unwrap() + .to_str() + .unwrap() + .to_string(), + }; + match compile(args.src_file, verify, args.x_dot, schedule, output_dir) { + Ok(()) => {} Err(errs) => { - for err in errs { - eprintln!("{}", err); - } + eprintln!("{}", errs); } } } diff --git a/juno_frontend/src/parser.rs b/juno_frontend/src/parser.rs index d5885d3e..89bdf2ec 100644 --- a/juno_frontend/src/parser.rs +++ b/juno_frontend/src/parser.rs @@ -5,4 +5,6 @@ lrlex_mod!("lang.l"); lrpar_mod!("lang.y"); pub use lang_y::*; -pub mod lexer { pub use super::lang_l::*; } +pub mod lexer { + pub use super::lang_l::*; +} diff --git a/juno_frontend/src/semant.rs b/juno_frontend/src/semant.rs index d573154e..1ccdf3ed 100644 --- a/juno_frontend/src/semant.rs +++ b/juno_frontend/src/semant.rs @@ -1,57 +1,74 @@ extern crate hercules_ir; use std::collections::{HashMap, LinkedList}; +use std::fmt; use std::fs::File; use std::io::Read; -use std::fmt; +use cfgrammar::Span; use lrlex::DefaultLexerTypes; use lrpar::NonStreamingLexer; -use cfgrammar::Span; use ordered_float::OrderedFloat; use crate::dynconst::DynConst; use crate::env::Env; use crate::intrinsics; -use crate::locs::{Location, span_to_loc}; -use crate::parser::*; +use crate::locs::{span_to_loc, Location}; use crate::parser; -use crate::types::{Either, Type, TypeSolver}; +use crate::parser::*; use crate::types; +use crate::types::{Either, Type, TypeSolver}; // Definitions and data structures for semantic analysis // Entities in the environment enum Entity { // A variable has a variable number to distinguish shadowing - Variable { variable : usize, typ : Type, is_const : bool }, - Type { type_args : Vec<parser::Kind>, value : Type }, - DynConst { value : DynConst }, - Constant { value : Constant }, + Variable { + variable: usize, + typ: Type, + is_const: bool, + }, + Type { + type_args: Vec<parser::Kind>, + value: Type, + }, + DynConst { + value: DynConst, + }, + Constant { + value: Constant, + }, // For functions we track an index, its type parameters, its argument types and if they are // inout, and its return type - Function { index : usize, type_args : Vec<parser::Kind>, - args : Vec<(types::Type, bool)>, return_type : types::Type }, + Function { + index: usize, + type_args: Vec<parser::Kind>, + args: Vec<(types::Type, bool)>, + return_type: types::Type, + }, } // Constant values #[derive(Clone, Debug)] pub enum Literal { - Unit, Bool(bool), Integer(u64), Float(f64), + Unit, + Bool(bool), + Integer(u64), + Float(f64), Tuple(Vec<Constant>), Sum(usize, Box<Constant>), // The tag and value } pub type Constant = (Literal, Type); impl PartialEq for Literal { - fn eq(&self, other : &Self) -> bool { + fn eq(&self, other: &Self) -> bool { match (self, other) { (Literal::Unit, Literal::Unit) => true, (Literal::Bool(b), Literal::Bool(c)) => b == c, (Literal::Integer(i), Literal::Integer(j)) => i == j, - (Literal::Float(i), Literal::Float(j)) => - OrderedFloat(*i) == OrderedFloat(*j), + (Literal::Float(i), Literal::Float(j)) => OrderedFloat(*i) == OrderedFloat(*j), (Literal::Tuple(fs), Literal::Tuple(gs)) => fs == gs, (Literal::Sum(i, v), Literal::Sum(j, u)) => i == j && *v == *u, _ => false, @@ -63,19 +80,21 @@ impl Eq for Literal {} // Map strings to unique identifiers and counts uids struct StringTable { - count : usize, - string_to_index : HashMap<String, usize>, - index_to_string : HashMap<usize, String>, + count: usize, + string_to_index: HashMap<String, usize>, + index_to_string: HashMap<usize, String>, } impl StringTable { fn new() -> StringTable { - StringTable { count : 0, - string_to_index : HashMap::new(), - index_to_string : HashMap::new(), } + StringTable { + count: 0, + string_to_index: HashMap::new(), + index_to_string: HashMap::new(), + } } // Produce the UID for a string - fn lookup_string(&mut self, s : String) -> usize { + fn lookup_string(&mut self, s: String) -> usize { match self.string_to_index.get(&s) { Some(n) => *n, None => { @@ -84,26 +103,61 @@ impl StringTable { self.string_to_index.insert(s.clone(), n); self.index_to_string.insert(n, s); n - }, + } } } // Identify the string corresponding to a UID - fn lookup_id(&self, n : usize) -> Option<String> { + fn lookup_id(&self, n: usize) -> Option<String> { self.index_to_string.get(&n).cloned() } } +// Maps label names to unique identifiers (numbered 0..n for each function) +// Also tracks the map from function names to their numbers +struct LabelSet { + count: usize, + string_to_index: HashMap<String, usize>, +} +impl LabelSet { + fn new() -> LabelSet { + // Label number 0 is reserved to be the "root" label in code generation + LabelSet { + count: 1, + string_to_index: HashMap::from([("<root>".to_string(), 0)]), + } + } + + // Inserts a string if it is not already contained in this set, if it is + // contained does nothing and returns the label back wrapped in an error, + // otherwise inserts and returns the new label's id wrapped in Ok + fn insert_new(&mut self, label: String) -> Result<usize, String> { + match self.string_to_index.get(&label) { + Some(_) => Err(label), + None => { + let uid = self.count; + self.count += 1; + self.string_to_index.insert(label, uid); + Ok(uid) + } + } + } +} + // Convert spans into uids in the String Table -fn intern_id(n : &Span, lex : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable) -> usize { +fn intern_id( + n: &Span, + lex: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, +) -> usize { stringtab.lookup_string(lex.span_str(*n).to_string()) } fn intern_package_name( - n : &PackageName, lex : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable) -> Vec<usize> { - + n: &PackageName, + lex: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, +) -> Vec<usize> { let mut res = vec![]; for s in n { res.push(intern_id(s, lex, stringtab)); @@ -127,30 +181,37 @@ pub enum ErrorMessage { // Printing for error messages impl fmt::Display for ErrorMessage { - fn fmt(&self, f : &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ErrorMessage::NotImplemented(loc, msg) => { write!(f, "Error ({}). Feature not implemented : {}", loc, msg) - }, + } ErrorMessage::IOError(msg) => { write!(f, "Error: {}", msg) - }, + } ErrorMessage::SyntaxError(msg) => { write!(f, "Syntax Error : {}", msg) - }, + } ErrorMessage::SemanticError(loc, msg) => { write!(f, "Error ({}). {}", loc, msg) - }, + } ErrorMessage::UndefinedVariable(loc, name) => { write!(f, "Error ({}). Undefined variable '{}'", loc, name) - }, + } ErrorMessage::KindError(loc, expected, actual) => { - write!(f, "Error ({}). Expected {} but found {}", loc, expected, actual) - }, + write!( + f, + "Error ({}). Expected {} but found {}", + loc, expected, actual + ) + } ErrorMessage::TypeError(loc, expected, actual) => { - write!(f, "Error ({}). Type error, expected {} but found {}", - loc, expected, actual) - }, + write!( + f, + "Error ({}). Type error, expected {} but found {}", + loc, expected, actual + ) + } } } } @@ -158,25 +219,30 @@ impl fmt::Display for ErrorMessage { pub type ErrorMessages = LinkedList<ErrorMessage>; // Constructors and combiners for error messages -fn singleton_error(err : ErrorMessage) -> ErrorMessages { +fn singleton_error(err: ErrorMessage) -> ErrorMessages { LinkedList::from([err]) } -fn append_errors2<A, B>(x : Result<A, ErrorMessages>, y : Result<B, ErrorMessages>) - -> Result<(A, B), ErrorMessages> { +fn append_errors2<A, B>( + x: Result<A, ErrorMessages>, + y: Result<B, ErrorMessages>, +) -> Result<(A, B), ErrorMessages> { match (x, y) { - (Err(mut err_x), Err(mut err_y)) => { + (Err(mut err_x), Err(mut err_y)) => { err_x.append(&mut err_y); Err(err_x) - }, + } (Err(err_x), _) => Err(err_x), (_, Err(err_y)) => Err(err_y), (Ok(x), Ok(y)) => Ok((x, y)), } } -fn append_errors3<A, B, C>(x : Result<A, ErrorMessages>, y : Result<B, ErrorMessages>, - z : Result<C, ErrorMessages>) -> Result<(A, B, C), ErrorMessages> { +fn append_errors3<A, B, C>( + x: Result<A, ErrorMessages>, + y: Result<B, ErrorMessages>, + z: Result<C, ErrorMessages>, +) -> Result<(A, B, C), ErrorMessages> { let xy = append_errors2(x, y); let xyz = append_errors2(xy, z); @@ -194,12 +260,15 @@ pub type Prg = (TypeSolver, Vec<Function>); // type information that is needed for type checking code that uses this function. // In particular, the return type accounts for the type of inout arguments pub struct Function { - pub name : String, - pub num_dyn_consts : usize, - pub num_type_args : usize, - pub arguments : Vec<(usize, Type)>, - pub return_type : Type, - pub body : Stmt, + pub name: String, + pub num_dyn_consts: usize, + pub num_type_args: usize, + pub arguments: Vec<(usize, Type)>, + pub return_type: Type, + pub num_labels: usize, + pub label_map: HashMap<String, usize>, + pub body: Stmt, + pub entry: bool, } // Normalized statements differ in a number of ways from the form from the parser: @@ -214,16 +283,38 @@ pub struct Function { // Additional notes // - Returns in this AST include the inout values pub enum Stmt { - AssignStmt { var : usize, val : Expr }, - IfStmt { cond : Expr, thn : Box<Stmt>, els : Option<Box<Stmt>> }, + AssignStmt { + var: usize, + val: Expr, + }, + IfStmt { + cond: Expr, + thn: Box<Stmt>, + els: Option<Box<Stmt>>, + }, // TODO: Not implemented //MatchStmt { expr : Expr, cases : Vec<usize>, body : Vec<Stmt> }, - LoopStmt { cond : Expr, update : Option<Box<Stmt>>, body : Box<Stmt> }, - ReturnStmt { expr : Expr }, - BreakStmt {}, + LoopStmt { + cond: Expr, + update: Option<Box<Stmt>>, + body: Box<Stmt>, + }, + ReturnStmt { + expr: Expr, + }, + BreakStmt {}, ContinueStmt {}, - BlockStmt { body : Vec<Stmt> }, - ExprStmt { expr : Expr }, + BlockStmt { + body: Vec<Stmt>, + label_last: bool, + }, + ExprStmt { + expr: Expr, + }, + LabeledStmt { + label: usize, + stmt: Box<Stmt>, + }, } // Normalized expressions differ in a number of ways: @@ -241,73 +332,147 @@ pub enum Stmt { // 7. There's an additional Zero which is used to construct the default of a type #[derive(Clone, Debug)] pub enum Expr { - Variable { var : usize, typ : Type }, - DynConst { val : DynConst, typ : Type }, - Read { index : Vec<Index>, val : Box<Expr>, typ : Type }, - Write { index : Vec<Index>, val : Box<Expr>, rep : Box<Expr>, typ : Type }, - Tuple { vals : Vec<Expr>, typ : Type }, - Union { tag : usize, val : Box<Expr>, typ : Type }, - Constant { val : Constant, typ : Type }, - Zero { typ : Type }, - UnaryExp { op : UnaryOp, expr : Box<Expr>, typ : Type }, - BinaryExp { op : BinaryOp, lhs : Box<Expr>, rhs : Box<Expr>, typ : Type }, - CastExpr { expr : Box<Expr>, typ : Type }, - CondExpr { cond : Box<Expr>, thn : Box<Expr>, els : Box<Expr>, typ : Type }, - CallExpr { func : usize, ty_args : Vec<Type>, dyn_consts : Vec<DynConst>, - args : Vec<Either<Expr, usize>>, typ : Type }, - Intrinsic { id : intrinsics::IntrinsicIdentity, - ty_args : Vec<Type>, args : Vec<Expr>, typ : Type }, + Variable { + var: usize, + typ: Type, + }, + DynConst { + val: DynConst, + typ: Type, + }, + Read { + index: Vec<Index>, + val: Box<Expr>, + typ: Type, + }, + Write { + index: Vec<Index>, + val: Box<Expr>, + rep: Box<Expr>, + typ: Type, + }, + Tuple { + vals: Vec<Expr>, + typ: Type, + }, + Union { + tag: usize, + val: Box<Expr>, + typ: Type, + }, + Constant { + val: Constant, + typ: Type, + }, + Zero { + typ: Type, + }, + UnaryExp { + op: UnaryOp, + expr: Box<Expr>, + typ: Type, + }, + BinaryExp { + op: BinaryOp, + lhs: Box<Expr>, + rhs: Box<Expr>, + typ: Type, + }, + CastExpr { + expr: Box<Expr>, + typ: Type, + }, + CondExpr { + cond: Box<Expr>, + thn: Box<Expr>, + els: Box<Expr>, + typ: Type, + }, + CallExpr { + func: usize, + ty_args: Vec<Type>, + dyn_consts: Vec<DynConst>, + args: Vec<Either<Expr, usize>>, + typ: Type, + }, + Intrinsic { + id: intrinsics::IntrinsicIdentity, + ty_args: Vec<Type>, + args: Vec<Expr>, + typ: Type, + }, } #[derive(Clone, Debug)] -pub enum Index { Field(usize), Variant(usize), Array(Vec<Expr>) } +pub enum Index { + Field(usize), + Variant(usize), + Array(Vec<Expr>), +} #[derive(Clone, Debug)] -pub enum UnaryOp { Negation, BitwiseNot } +pub enum UnaryOp { + Negation, + BitwiseNot, +} #[derive(Clone, Debug)] -pub enum BinaryOp { Add, Sub, Mul, Div, Mod, - BitAnd, BitOr, Xor, - Lt, Le, Gt, Ge, Eq, Neq, - LShift, RShift } +pub enum BinaryOp { + Add, + Sub, + Mul, + Div, + Mod, + BitAnd, + BitOr, + Xor, + Lt, + Le, + Gt, + Ge, + Eq, + Neq, + LShift, + RShift, +} -fn convert_assign_op(op : parser::AssignOp) -> BinaryOp { +fn convert_assign_op(op: parser::AssignOp) -> BinaryOp { match op { - AssignOp::None => panic!("Do not call convert_assign_op on AssignOp::None"), - AssignOp::Add => BinaryOp::Add, - AssignOp::Sub => BinaryOp::Sub, - AssignOp::Mul => BinaryOp::Mul, - AssignOp::Div => BinaryOp::Div, - AssignOp::Mod => BinaryOp::Mod, + AssignOp::None => panic!("Do not call convert_assign_op on AssignOp::None"), + AssignOp::Add => BinaryOp::Add, + AssignOp::Sub => BinaryOp::Sub, + AssignOp::Mul => BinaryOp::Mul, + AssignOp::Div => BinaryOp::Div, + AssignOp::Mod => BinaryOp::Mod, AssignOp::BitAnd => BinaryOp::BitAnd, - AssignOp::BitOr => BinaryOp::BitOr, - AssignOp::Xor => BinaryOp::Xor, + AssignOp::BitOr => BinaryOp::BitOr, + AssignOp::Xor => BinaryOp::Xor, AssignOp::LShift => BinaryOp::LShift, AssignOp::RShift => BinaryOp::RShift, AssignOp::LogAnd => panic!("Do not call convert_assign_op on AssignOp::LogAnd"), - AssignOp::LogOr => panic!("Do not call convert_assign_op on AssignOp::LogOr"), + AssignOp::LogOr => panic!("Do not call convert_assign_op on AssignOp::LogOr"), } } -fn convert_binary_op(op : parser::BinaryOp) -> BinaryOp { +fn convert_binary_op(op: parser::BinaryOp) -> BinaryOp { match op { - parser::BinaryOp::Add => BinaryOp::Add, - parser::BinaryOp::Sub => BinaryOp::Sub, - parser::BinaryOp::Mul => BinaryOp::Mul, - parser::BinaryOp::Div => BinaryOp::Div, - parser::BinaryOp::Mod => BinaryOp::Mod, + parser::BinaryOp::Add => BinaryOp::Add, + parser::BinaryOp::Sub => BinaryOp::Sub, + parser::BinaryOp::Mul => BinaryOp::Mul, + parser::BinaryOp::Div => BinaryOp::Div, + parser::BinaryOp::Mod => BinaryOp::Mod, parser::BinaryOp::BitAnd => BinaryOp::BitAnd, - parser::BinaryOp::BitOr => BinaryOp::BitOr, - parser::BinaryOp::Xor => BinaryOp::Xor, - parser::BinaryOp::Lt => BinaryOp::Lt, - parser::BinaryOp::Le => BinaryOp::Le, - parser::BinaryOp::Gt => BinaryOp::Gt, - parser::BinaryOp::Ge => BinaryOp::Ge, - parser::BinaryOp::Eq => BinaryOp::Eq, - parser::BinaryOp::Neq => BinaryOp::Neq, + parser::BinaryOp::BitOr => BinaryOp::BitOr, + parser::BinaryOp::Xor => BinaryOp::Xor, + parser::BinaryOp::Lt => BinaryOp::Lt, + parser::BinaryOp::Le => BinaryOp::Le, + parser::BinaryOp::Gt => BinaryOp::Gt, + parser::BinaryOp::Ge => BinaryOp::Ge, + parser::BinaryOp::Eq => BinaryOp::Eq, + parser::BinaryOp::Neq => BinaryOp::Neq, parser::BinaryOp::LShift => BinaryOp::LShift, parser::BinaryOp::RShift => BinaryOp::RShift, parser::BinaryOp::LogAnd => panic!("Do not call convert_binary_op on BinaryOp::LogAnd"), - parser::BinaryOp::LogOr => panic!("Do not call convert_binary_op on BinaryOp::LogOr"), + parser::BinaryOp::LogOr => panic!("Do not call convert_binary_op on BinaryOp::LogOr"), } } @@ -315,31 +480,71 @@ fn convert_binary_op(op : parser::BinaryOp) -> BinaryOp { impl Expr { pub fn get_type(&self) -> Type { match self { - Expr::Variable { var : _, typ } | Expr::DynConst { val : _, typ } - | Expr::Read { index : _, val : _, typ } - | Expr::Write { index : _, val : _, rep : _, typ } - | Expr::Tuple { vals : _, typ } | Expr::Union { tag : _, val : _, typ } - | Expr::Constant { val : _, typ } | Expr::UnaryExp { op : _, expr : _, typ } - | Expr::BinaryExp { op : _, lhs : _, rhs : _, typ } - | Expr::CastExpr { expr : _, typ } - | Expr::CondExpr { cond : _, thn : _, els : _, typ } - | Expr::CallExpr { func : _, ty_args : _, dyn_consts : _, args : _, typ } - | Expr::Intrinsic { id : _, ty_args : _, args : _, typ } - | Expr::Zero { typ } - => *typ + Expr::Variable { var: _, typ } + | Expr::DynConst { val: _, typ } + | Expr::Read { + index: _, + val: _, + typ, + } + | Expr::Write { + index: _, + val: _, + rep: _, + typ, + } + | Expr::Tuple { vals: _, typ } + | Expr::Union { + tag: _, + val: _, + typ, + } + | Expr::Constant { val: _, typ } + | Expr::UnaryExp { + op: _, + expr: _, + typ, + } + | Expr::BinaryExp { + op: _, + lhs: _, + rhs: _, + typ, + } + | Expr::CastExpr { expr: _, typ } + | Expr::CondExpr { + cond: _, + thn: _, + els: _, + typ, + } + | Expr::CallExpr { + func: _, + ty_args: _, + dyn_consts: _, + args: _, + typ, + } + | Expr::Intrinsic { + id: _, + ty_args: _, + args: _, + typ, + } + | Expr::Zero { typ } => *typ, } } } // Helper function to unparse types -fn unparse_type(types : &TypeSolver, typ : Type, stringtab : &StringTable) -> String { +fn unparse_type(types: &TypeSolver, typ: Type, stringtab: &StringTable) -> String { types.to_string(typ, &|n| stringtab.lookup_id(n).unwrap()) } // Start of parsing and semantic analysis // Loads the contents of the given file name, parses, and performs semantic analysis -pub fn parse_and_analyze(src_file : String) -> Result<Prg, ErrorMessages> { +pub fn parse_and_analyze(src_file: String) -> Result<Prg, ErrorMessages> { if let Ok(mut file) = File::open(src_file) { let mut contents = String::new(); if let Ok(_) = file.read_to_string(&mut contents) { @@ -349,32 +554,38 @@ pub fn parse_and_analyze(src_file : String) -> Result<Prg, ErrorMessages> { if errs.is_empty() { match res { - None => Err(singleton_error( - ErrorMessage::SyntaxError("Parser did not return".to_string()))), - Some(Err(())) => Err(singleton_error( - ErrorMessage::SyntaxError("Unspecified parse error".to_string()))), + None => Err(singleton_error(ErrorMessage::SyntaxError( + "Parser did not return".to_string(), + ))), + Some(Err(())) => Err(singleton_error(ErrorMessage::SyntaxError( + "Unspecified parse error".to_string(), + ))), Some(Ok(r)) => analyze_program(r, &lexer), } } else { - Err(errs.iter() - .map(|e| ErrorMessage::SyntaxError( - e.pp(&lexer, &parser::token_epp))) - .collect()) + Err(errs + .iter() + .map(|e| ErrorMessage::SyntaxError(e.pp(&lexer, &parser::token_epp))) + .collect()) } } else { - Err(singleton_error(ErrorMessage::IOError("Unable to read input file".to_string()))) + Err(singleton_error(ErrorMessage::IOError( + "Unable to read input file".to_string(), + ))) } } else { - Err(singleton_error(ErrorMessage::IOError("Unable to open input file".to_string()))) + Err(singleton_error(ErrorMessage::IOError( + "Unable to open input file".to_string(), + ))) } } fn analyze_program( - prg : parser::Prg, lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>) - -> Result<Prg, ErrorMessages> { - + prg: parser::Prg, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, +) -> Result<Prg, ErrorMessages> { let mut stringtab = StringTable::new(); - let mut env : Env<usize, Entity> = Env::new(); + let mut env: Env<usize, Entity> = Env::new(); let mut types = TypeSolver::new(); let mut res = vec![]; @@ -383,13 +594,16 @@ fn analyze_program( for top in prg { match top { - parser::Top::Import { span, name: _ } => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "imports".to_string())))? - }, - parser::Top::TypeDecl { span : _, public: _, name, ty_vars, body } => { + parser::Top::Import { span, name: _ } => Err(singleton_error( + ErrorMessage::NotImplemented(span_to_loc(span, lexer), "imports".to_string()), + ))?, + parser::Top::TypeDecl { + span: _, + public: _, + name, + ty_vars, + body, + } => { // TODO: Handle public env.open_scope(); // Create a new scope for the body (for type variables) @@ -401,7 +615,12 @@ fn analyze_program( let mut dyn_const_names = vec![]; - for TypeVar { span : _, name, kind } in ty_vars { + for TypeVar { + span: _, + name, + kind, + } in ty_vars + { let nm = intern_id(&name, lexer, &mut stringtab); kinds.push(kind); @@ -409,87 +628,166 @@ fn analyze_program( Kind::USize => { dyn_const_names.push(nm); num_dyn_const += 1; - }, + } _ => { let typ = types.new_type_var(nm, num_type, kind); - env.insert(nm, Entity::Type { type_args : vec![], value : typ }); + env.insert( + nm, + Entity::Type { + type_args: vec![], + value: typ, + }, + ); num_type += 1; - }, + } } } for (idx, nm) in dyn_const_names.into_iter().enumerate() { - env.insert(nm, Entity::DynConst { - value : DynConst::dynamic_constant(idx, num_dyn_const) - }); + env.insert( + nm, + Entity::DynConst { + value: DynConst::dynamic_constant(idx, num_dyn_const), + }, + ); } let nm = intern_id(&name, lexer, &mut stringtab); - let typ = process_type_def(body, nm, num_dyn_const, lexer, - &mut stringtab, &mut env, &mut types)?; - + let typ = process_type_def( + body, + nm, + num_dyn_const, + lexer, + &mut stringtab, + &mut env, + &mut types, + )?; + // Insert information into the global scope env.close_scope(); - env.insert(nm, Entity::Type { type_args : kinds, value : typ }); - }, - parser::Top::ConstDecl { span, public: _, name, ty, body } => { + env.insert( + nm, + Entity::Type { + type_args: kinds, + value: typ, + }, + ); + } + parser::Top::ConstDecl { + span, + public: _, + name, + ty, + body, + } => { // TODO: Handle public let nm = intern_id(&name, lexer, &mut stringtab); - let val = process_expr_as_constant(body, 0, lexer, &mut stringtab, - &mut env, &mut types)?; + let val = + process_expr_as_constant(body, 0, lexer, &mut stringtab, &mut env, &mut types)?; // Check type (if specified) if ty.is_some() { - let ty = process_type(ty.unwrap(), 0, lexer, &mut stringtab, - &env, &mut types, true)?; + let ty = process_type( + ty.unwrap(), + 0, + lexer, + &mut stringtab, + &env, + &mut types, + true, + )?; if !types.unify(ty, val.1) { - return Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - unparse_type(&types, ty, &stringtab), - unparse_type(&types, val.1, &stringtab)))); + return Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + unparse_type(&types, ty, &stringtab), + unparse_type(&types, val.1, &stringtab), + ))); } } - env.insert(nm, Entity::Constant { value : val }); - }, - parser::Top::FuncDecl { span, public: _, attr: _, name, ty_vars, args, ty, body } => { - // TODO: Handle public, attributes + env.insert(nm, Entity::Constant { value: val }); + } + parser::Top::FuncDecl { + span, + public: _, + attr, + name, + ty_vars, + args, + ty, + body, + } => { + // TODO: Handle public env.open_scope(); // Open a new scope immediately to put type variables in + let attributes = match attr { + None => vec![], + Some(attr) => { + let attr_str = lexer.span_str(attr); + attr_str[2..attr_str.len() - 1] + .split(',') + .map(|s| s.trim()) + .collect::<Vec<_>>() + } + }; + + // Determine whether this function is marked as an entry point + let entry = attributes.contains(&"entry"); + // Process the type variables and add them into the environment let mut num_dyn_const = 0; - let mut num_type_var = 0; + let mut num_type_var = 0; let mut type_kinds = vec![]; let mut dyn_const_names = vec![]; - for TypeVar { span : _, name, kind } in ty_vars { + for TypeVar { + span: _, + name, + kind, + } in ty_vars + { type_kinds.push(kind); let nm = intern_id(&name, lexer, &mut stringtab); match kind { Kind::USize => { dyn_const_names.push(nm); num_dyn_const += 1; - }, + } _ => { let typ = types.new_type_var(nm, num_type_var, kind); - env.insert(nm, Entity::Type { type_args : vec![], value : typ }); + env.insert( + nm, + Entity::Type { + type_args: vec![], + value: typ, + }, + ); num_type_var += 1; - }, + } } } for (idx, nm) in dyn_const_names.into_iter().enumerate() { - env.insert(nm, Entity::DynConst { - value : DynConst::dynamic_constant(idx, num_dyn_const) - }); + env.insert( + nm, + Entity::DynConst { + value: DynConst::dynamic_constant(idx, num_dyn_const), + }, + ); + } + + if num_type_var > 0 && entry { + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Function with 'entry' attribute cannot have type variables".to_string(), + ))); } // Process arguments - let mut arg_types : Vec<(usize, Type, bool)> = vec![]; // list of name, type, and - // whether is inout + let mut arg_types: Vec<(usize, Type, bool)> = vec![]; // list of name, type, and + // whether is inout let mut inout_args = vec![]; // list of indices into args - + // A collection of errors we encounter processing the arguments let mut errors = LinkedList::new(); @@ -507,40 +805,55 @@ fn analyze_program( } let nm = intern_package_name(&name, lexer, &mut stringtab)[0]; - match process_type(typ, num_dyn_const, - lexer, &mut stringtab, &env, - &mut types, true) { + match process_type( + typ, + num_dyn_const, + lexer, + &mut stringtab, + &env, + &mut types, + true, + ) { Ok(ty) => { if inout.is_some() { inout_args.push(arg_types.len()); } arg_types.push((nm, ty, inout.is_some())); - }, - Err(mut errs) => { errors.append(&mut errs); }, + } + Err(mut errs) => { + errors.append(&mut errs); + } } - }, + } _ => { - errors.push_back( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "patterns in arguments".to_string())); - }, + errors.push_back(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "patterns in arguments".to_string(), + )); + } } } let return_type = { // A missing return type is implicitly void let ty = ty.unwrap_or(parser::Type::PrimType { - span : span, - typ : parser::Primitive::Void }); - match process_type(ty, num_dyn_const, lexer, - &mut stringtab, &env, &mut types, - true) { + span: span, + typ: parser::Primitive::Void, + }); + match process_type( + ty, + num_dyn_const, + lexer, + &mut stringtab, + &env, + &mut types, + true, + ) { Ok(ty) => ty, Err(mut errs) => { errors.append(&mut errs); types.new_primitive(types::Primitive::Unit) - }, + } } }; @@ -553,10 +866,9 @@ fn analyze_program( for arg_idx in &inout_args { inout_types.push(arg_types[*arg_idx].1.clone()); } - + let inout_tuple = types.new_tuple(inout_types.clone()); - let pure_return_type - = types.new_tuple(vec![return_type, inout_tuple]); + let pure_return_type = types.new_tuple(vec![return_type, inout_tuple]); // Add the arguments to the environment and assign each a unique variable number // Also track the variable numbers of the inout arguments for generating returns @@ -564,21 +876,39 @@ fn analyze_program( let mut inout_variables = vec![]; for (nm, ty, is_inout) in arg_types.iter() { let variable = env.uniq(); - env.insert(*nm, - Entity::Variable { - variable : variable, - typ : *ty, - is_const : false }); + env.insert( + *nm, + Entity::Variable { + variable: variable, + typ: *ty, + is_const: false, + }, + ); arg_variables.push(variable); - if *is_inout { inout_variables.push(variable); } + if *is_inout { + inout_variables.push(variable); + } } + // Create a set of the labels in this function + let mut labels = LabelSet::new(); + // Finally, we have a properly built environment and we can // start processing the body - let (mut body, end_reachable) - = process_stmt(body, num_dyn_const, lexer, &mut stringtab, &mut env, &mut types, - false, return_type, &inout_variables, &inout_types)?; + let (mut body, end_reachable) = process_stmt( + body, + num_dyn_const, + lexer, + &mut stringtab, + &mut env, + &mut types, + false, + return_type, + &inout_variables, + &inout_types, + &mut labels, + )?; if end_reachable { // The end of a function being reachable (i.e. there is some possible path @@ -587,19 +917,25 @@ fn analyze_program( if types.unify_void(return_type) { // Insert return at the end body = Stmt::BlockStmt { - body : vec![ - body, - generate_return(Expr::Tuple { - vals : vec![], - typ : types.new_primitive(types::Primitive::Unit) - }, - &inout_variables, &inout_types, - &mut types)] }; + body: vec![ + body, + generate_return( + Expr::Tuple { + vals: vec![], + typ: types.new_primitive(types::Primitive::Unit), + }, + &inout_variables, + &inout_types, + &mut types, + ), + ], + label_last: false, + }; } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "May reach end of control without return".to_string())))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "May reach end of control without return".to_string(), + )))? } } @@ -607,30 +943,45 @@ fn analyze_program( // Add the function to the global environment let nm = intern_id(&name, lexer, &mut stringtab); - env.insert(nm, Entity::Function { - index : res.len(), - type_args : type_kinds, - args : arg_types.iter() - .map(|(_, ty, is)| (*ty, *is)) - .collect::<Vec<_>>(), - return_type : return_type }); - + env.insert( + nm, + Entity::Function { + index: res.len(), + type_args: type_kinds, + args: arg_types + .iter() + .map(|(_, ty, is)| (*ty, *is)) + .collect::<Vec<_>>(), + return_type: return_type, + }, + ); + // Add the function definition to the list of functions res.push(Function { - name : lexer.span_str(name).to_string(), - num_dyn_consts : num_dyn_const, - num_type_args : num_type_var, - arguments : arg_types.iter().zip(arg_variables.iter()) - .map(|(v, n)| (*n, v.1)).collect::<Vec<_>>(), - return_type : pure_return_type, - body : body }); - }, - parser::Top::ModDecl { span, public: _, name: _, body: _ } => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "modules".to_string())))? - }, + name: lexer.span_str(name).to_string(), + num_dyn_consts: num_dyn_const, + num_type_args: num_type_var, + arguments: arg_types + .iter() + .zip(arg_variables.iter()) + .map(|(v, n)| (*n, v.1)) + .collect::<Vec<_>>(), + return_type: pure_return_type, + num_labels: labels.count, + label_map: labels.string_to_index, + body: body, + entry: entry, + }); + } + parser::Top::ModDecl { + span, + public: _, + name: _, + body: _, + } => Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "modules".to_string(), + )))?, } // After each top level construct, verify that our types are solved @@ -638,54 +989,69 @@ fn analyze_program( match types.solve() { Ok(()) => (), Err((kind, loc)) => { - return Err(singleton_error( - ErrorMessage::SemanticError( - loc, - format!("unconstrained type, not constrained beyond {}", - kind.to_string())))); - }, + return Err(singleton_error(ErrorMessage::SemanticError( + loc, + format!( + "unconstrained type, not constrained beyond {}", + kind.to_string() + ), + ))); + } } } Ok((types, res)) } -fn process_type_def(def : parser::TyDef, name : usize, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, env : &mut Env<usize, Entity>, - types : &mut TypeSolver) -> Result<Type, ErrorMessages> { - +fn process_type_def( + def: parser::TyDef, + name: usize, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &mut Env<usize, Entity>, + types: &mut TypeSolver, +) -> Result<Type, ErrorMessages> { match def { parser::TyDef::TypeAlias { span: _, body } => { process_type(body, num_dyn_const, lexer, stringtab, env, types, false) - }, - parser::TyDef::Struct { span : _, public: _, fields } => { + } + parser::TyDef::Struct { + span: _, + public: _, + fields, + } => { // TODO: handle public correctly (and field public) - + let mut field_list = vec![]; let mut field_map = HashMap::new(); let mut errors = LinkedList::new(); - for ObjField { span, public: _, name, typ } in fields { + for ObjField { + span, + public: _, + name, + typ, + } in fields + { let nm = intern_id(&name, lexer, stringtab); match typ { None => { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "struct fields must have a type".to_string())); - }, + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "struct fields must have a type".to_string(), + )); + } Some(ty) => { - match process_type(ty, num_dyn_const, lexer, stringtab, - env, types, false) { + match process_type(ty, num_dyn_const, lexer, stringtab, env, types, false) { Ok(typ) => { let idx = field_list.len(); field_list.push(typ); field_map.insert(nm, idx); - }, + } Err(mut errs) => errors.append(&mut errs), } - }, + } } } @@ -694,14 +1060,24 @@ fn process_type_def(def : parser::TyDef, name : usize, num_dyn_const : usize, } else { Ok(types.new_struct(name, env.uniq(), field_list, field_map)) } - }, - parser::TyDef::Union { span : _, public: _, fields } => { + } + parser::TyDef::Union { + span: _, + public: _, + fields, + } => { // TODO: handle public correctly let mut constr_list = vec![]; let mut constr_map = HashMap::new(); let mut errors = LinkedList::new(); - for ObjField { span, public, name, typ } in fields { + for ObjField { + span, + public, + name, + typ, + } in fields + { if public { errors.push_back( ErrorMessage::SemanticError( @@ -714,56 +1090,65 @@ fn process_type_def(def : parser::TyDef, name : usize, num_dyn_const : usize, let idx = constr_list.len(); constr_list.push(types.new_primitive(types::Primitive::Unit)); constr_map.insert(nm, idx); - }, + } Some(ty) => { - match process_type(ty, num_dyn_const, lexer, - stringtab, env, types, false) { + match process_type( + ty, + num_dyn_const, + lexer, + stringtab, + env, + types, + false, + ) { Ok(typ) => { let idx = constr_list.len(); constr_list.push(typ); constr_map.insert(nm, idx); - }, + } Err(mut errs) => errors.append(&mut errs), } - }, + } } } } - if !errors.is_empty() { Err(errors) } - else { + if !errors.is_empty() { + Err(errors) + } else { Ok(types.new_union(name, env.uniq(), constr_list, constr_map)) } - }, + } } } -fn process_type(typ : parser::Type, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, env : &Env<usize, Entity>, - types : &mut TypeSolver, can_infer : bool) -> Result<Type, ErrorMessages> { - +fn process_type( + typ: parser::Type, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &Env<usize, Entity>, + types: &mut TypeSolver, + can_infer: bool, +) -> Result<Type, ErrorMessages> { match typ { - parser::Type::PrimType { span : _, typ } => { - Ok(types.new_primitive(convert_primitive(typ))) - }, + parser::Type::PrimType { span: _, typ } => Ok(types.new_primitive(convert_primitive(typ))), parser::Type::WildType { span } => { if can_infer { Ok(types.new_of_kind(parser::Kind::Type, span_to_loc(span, lexer))) } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "cannot infer type in this context".to_string()))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "cannot infer type in this context".to_string(), + ))) } - }, - parser::Type::TupleType { span : _, tys } => { + } + parser::Type::TupleType { span: _, tys } => { let mut fields = vec![]; let mut errors = LinkedList::new(); for ty in tys { - match process_type(ty, num_dyn_const, lexer, stringtab, env, - types, can_infer) { + match process_type(ty, num_dyn_const, lexer, stringtab, env, types, can_infer) { Ok(t) => fields.push(t), Err(mut errs) => errors.append(&mut errs), } @@ -778,40 +1163,44 @@ fn process_type(typ : parser::Type, num_dyn_const : usize, Ok(types.new_tuple(fields)) } } - }, + } parser::Type::NamedType { span, name, args } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string()))) + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + ))) } else { let id = intern_package_name(&name, lexer, stringtab); let nm = id[0]; match env.lookup(&nm) { Some(Entity::Type { type_args, value }) => { if args.is_none() && type_args.len() != 0 && !can_infer { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided none", - type_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided none", + type_args.len() + ), + )))? } // If we did not provide type arguments (but we can // infer types) then we make all of the type arguments // wild cards - let args = - args.unwrap_or_else( - || vec![ parser::TypeExpr::WildcardType { span : span } - ; type_args.len() ]); + let args = args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; type_args.len()] + }); if args.len() != type_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - type_args.len(), args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + type_args.len(), + args.len() + ), + )))? } // Process the type arguments, ensuring they match the given kinds @@ -824,72 +1213,95 @@ fn process_type(typ : parser::Type, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dynamic_constants.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, - env, types, can_infer) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + can_infer, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { Err(errors)? } + if !errors.is_empty() { + Err(errors)? + } if type_vars.len() == 0 && dynamic_constants.len() == 0 { Ok(*value) } else { - if let Some(res) - = types.instantiate(*value, &type_vars, &dynamic_constants) { + if let Some(res) = + types.instantiate(*value, &type_vars, &dynamic_constants) + { Ok(res) } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))) } } - }, - Some(_) => - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "type".to_string(), - "value".to_string()))), - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(nm).unwrap()))), + } + Some(_) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "type".to_string(), + "value".to_string(), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(nm).unwrap(), + ))), } } - }, - parser::Type::ArrayType { span : _, elem, dims } => { + } + parser::Type::ArrayType { + span: _, + elem, + dims, + } => { let mut dimensions = vec![]; let mut errors = LinkedList::new(); - let element = process_type(*elem, num_dyn_const, lexer, stringtab, - env, types, can_infer); + let element = process_type( + *elem, + num_dyn_const, + lexer, + stringtab, + env, + types, + can_infer, + ); for dim in dims { - match process_type_expr_as_expr(dim, num_dyn_const, lexer, - stringtab, env, types) { + match process_type_expr_as_expr(dim, num_dyn_const, lexer, stringtab, env, types) { Err(mut errs) => errors.append(&mut errs), Ok(ex) => dimensions.push(ex), } @@ -899,7 +1311,7 @@ fn process_type(typ : parser::Type, num_dyn_const : usize, Err(mut errs) => { errs.append(&mut errors); Err(errs) - }, + } Ok(element_type) => { if !errors.is_empty() { Err(errors) @@ -916,171 +1328,173 @@ fn process_type(typ : parser::Type, num_dyn_const : usize, } } } - }, + } } } -fn process_type_expr_as_expr(exp : parser::TypeExpr, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, - env : &Env<usize, Entity>, types : &mut TypeSolver) - -> Result<DynConst, ErrorMessages> { - +fn process_type_expr_as_expr( + exp: parser::TypeExpr, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &Env<usize, Entity>, + types: &mut TypeSolver, +) -> Result<DynConst, ErrorMessages> { match exp { parser::TypeExpr::PrimType { span, .. } | parser::TypeExpr::WildcardType { span } | parser::TypeExpr::TupleType { span, .. } - | parser::TypeExpr::ArrayTypeExpr { span, .. } => - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "dynamic constant expression".to_string(), - "type".to_string()))), + | parser::TypeExpr::ArrayTypeExpr { span, .. } => { + Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "dynamic constant expression".to_string(), + "type".to_string(), + ))) + } parser::TypeExpr::NamedTypeExpr { span, name, args } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string()))) + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + ))) } else { let id = intern_package_name(&name, lexer, stringtab); let nm = id[0]; match env.lookup(&nm) { Some(Entity::DynConst { value }) => { if args.is_some() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("No type arguments exists on dynamic constants")))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!("No type arguments exists on dynamic constants"), + ))) } else { Ok(value.clone()) } + } + Some(Entity::Constant { value: (val, typ) }) => match val { + Literal::Integer(val) => Ok(DynConst::constant(*val as i64, num_dyn_const)), + _ => Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "usize".to_string(), + unparse_type(types, *typ, stringtab), + ))), }, - Some(Entity::Constant { value : (val, typ) }) => { - match val { - Literal::Integer(val) => - Ok(DynConst::constant(*val as i64, num_dyn_const)), - _ => - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "usize".to_string(), - unparse_type(types, *typ, stringtab)))), - } - }, - Some(Entity::Variable { .. }) => - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "dynamic constant expression".to_string(), - "runtime variable".to_string()))), - Some(Entity::Type { .. }) => - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "dynamic constant expression".to_string(), - "type".to_string()))), - Some(Entity::Function { .. }) => - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "dynamic constant expression".to_string(), - "function".to_string()))), - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(nm).unwrap()))), + Some(Entity::Variable { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "dynamic constant expression".to_string(), + "runtime variable".to_string(), + ))), + Some(Entity::Type { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "dynamic constant expression".to_string(), + "type".to_string(), + ))), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "dynamic constant expression".to_string(), + "function".to_string(), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(nm).unwrap(), + ))), } } - }, + } parser::TypeExpr::IntLiteral { span, base } => { - let res = i64::from_str_radix(lexer.span_str(span), base.base()); + let res = i64::from_str_radix(&base.string(lexer, span), base.base()); assert!(res.is_ok(), "Internal Error: Int literal is not an integer"); Ok(DynConst::constant(res.unwrap(), num_dyn_const)) - }, - parser::TypeExpr::Negative { span : _, expr } => - Ok(DynConst::negate( - &process_type_expr_as_expr(*expr, num_dyn_const, lexer, - stringtab, env, types)?)), - parser::TypeExpr::Add { span : _, lhs, rhs } => { - let lhs_res = process_type_expr_as_expr(*lhs, num_dyn_const, lexer, - stringtab, env, types)?; - let rhs_res = process_type_expr_as_expr(*rhs, num_dyn_const, lexer, - stringtab, env, types)?; + } + parser::TypeExpr::Negative { span: _, expr } => Ok(DynConst::negate( + &process_type_expr_as_expr(*expr, num_dyn_const, lexer, stringtab, env, types)?, + )), + parser::TypeExpr::Add { span: _, lhs, rhs } => { + let lhs_res = + process_type_expr_as_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let rhs_res = + process_type_expr_as_expr(*rhs, num_dyn_const, lexer, stringtab, env, types)?; Ok(DynConst::add(&lhs_res, &rhs_res)) - }, - parser::TypeExpr::Sub { span : _, lhs, rhs } => { - let lhs_res = process_type_expr_as_expr(*lhs, num_dyn_const, lexer, - stringtab, env, types)?; - let rhs_res = process_type_expr_as_expr(*rhs, num_dyn_const, lexer, - stringtab, env, types)?; + } + parser::TypeExpr::Sub { span: _, lhs, rhs } => { + let lhs_res = + process_type_expr_as_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let rhs_res = + process_type_expr_as_expr(*rhs, num_dyn_const, lexer, stringtab, env, types)?; Ok(DynConst::sub(&lhs_res, &rhs_res)) - }, - parser::TypeExpr::Mul { span : _, lhs, rhs } => { - let lhs_res = process_type_expr_as_expr(*lhs, num_dyn_const, lexer, - stringtab, env, types)?; - let rhs_res = process_type_expr_as_expr(*rhs, num_dyn_const, lexer, - stringtab, env, types)?; + } + parser::TypeExpr::Mul { span: _, lhs, rhs } => { + let lhs_res = + process_type_expr_as_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let rhs_res = + process_type_expr_as_expr(*rhs, num_dyn_const, lexer, stringtab, env, types)?; Ok(DynConst::mul(&lhs_res, &rhs_res)) - }, + } parser::TypeExpr::Div { span, lhs, rhs } => { - let lhs_res = process_type_expr_as_expr(*lhs, num_dyn_const, lexer, - stringtab, env, types)?; - let rhs_res = process_type_expr_as_expr(*rhs, num_dyn_const, lexer, - stringtab, env, types)?; + let lhs_res = + process_type_expr_as_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let rhs_res = + process_type_expr_as_expr(*rhs, num_dyn_const, lexer, stringtab, env, types)?; if let Some(res) = DynConst::div(&lhs_res, &rhs_res) { Ok(res) } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Division by dynamic constant expression failed".to_string()))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Division by dynamic constant expression failed".to_string(), + ))) } - }, + } } } -fn process_type_expr_as_type(exp : parser::TypeExpr, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, - env : &Env<usize, Entity>, types : &mut TypeSolver, - can_infer : bool) - -> Result<Type, ErrorMessages> { - +fn process_type_expr_as_type( + exp: parser::TypeExpr, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &Env<usize, Entity>, + types: &mut TypeSolver, + can_infer: bool, +) -> Result<Type, ErrorMessages> { match exp { parser::TypeExpr::IntLiteral { span, .. } | parser::TypeExpr::Negative { span, .. } | parser::TypeExpr::Add { span, .. } | parser::TypeExpr::Sub { span, .. } | parser::TypeExpr::Mul { span, .. } - | parser::TypeExpr::Div { span, .. } => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "type".to_string(), "expression".to_string()))) - }, - parser::TypeExpr::PrimType { span : _, typ } => { + | parser::TypeExpr::Div { span, .. } => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "type".to_string(), + "expression".to_string(), + ))), + parser::TypeExpr::PrimType { span: _, typ } => { Ok(types.new_primitive(convert_primitive(typ))) - }, + } parser::TypeExpr::WildcardType { span } => { if can_infer { Ok(types.new_of_kind(parser::Kind::Type, span_to_loc(span, lexer))) } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "cannot infer type in this context".to_string()))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "cannot infer type in this context".to_string(), + ))) } - }, - parser::TypeExpr::TupleType { span : _, tys } => { + } + parser::TypeExpr::TupleType { span: _, tys } => { let mut fields = vec![]; let mut errors = LinkedList::new(); for ty in tys { - match process_type_expr_as_type(ty, num_dyn_const, lexer, stringtab, - env, types, can_infer) { + match process_type_expr_as_type( + ty, + num_dyn_const, + lexer, + stringtab, + env, + types, + can_infer, + ) { Ok(t) => fields.push(t), Err(mut errs) => errors.append(&mut errs), } @@ -1095,13 +1509,24 @@ fn process_type_expr_as_type(exp : parser::TypeExpr, num_dyn_const : usize, Ok(types.new_tuple(fields)) } } - }, - parser::TypeExpr::ArrayTypeExpr { span : _, elem, dims } => { + } + parser::TypeExpr::ArrayTypeExpr { + span: _, + elem, + dims, + } => { let mut dimensions = vec![]; let mut errors = LinkedList::new(); - let element = process_type_expr_as_type(*elem, num_dyn_const, lexer, - stringtab, env, types, can_infer); + let element = process_type_expr_as_type( + *elem, + num_dyn_const, + lexer, + stringtab, + env, + types, + can_infer, + ); for dim in dims { match process_type_expr_as_expr(dim, num_dyn_const, lexer, stringtab, env, types) { @@ -1114,7 +1539,7 @@ fn process_type_expr_as_type(exp : parser::TypeExpr, num_dyn_const : usize, Err(mut errs) => { errs.append(&mut errors); Err(errs) - }, + } Ok(element_type) => { if !errors.is_empty() { Err(errors) @@ -1131,37 +1556,41 @@ fn process_type_expr_as_type(exp : parser::TypeExpr, num_dyn_const : usize, } } } - }, + } parser::TypeExpr::NamedTypeExpr { span, name, args } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string()))) + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + ))) } else { let id = intern_package_name(&name, lexer, stringtab); let nm = id[0]; match env.lookup(&nm) { Some(Entity::Type { type_args, value }) => { if args.is_none() && type_args.len() != 0 && !can_infer { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided none", - type_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided none", + type_args.len() + ), + )))? } - let args = - args.unwrap_or_else( - || vec![ parser::TypeExpr::WildcardType { span : span } - ; type_args.len() ]); + let args = args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; type_args.len()] + }); if args.len() != type_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - type_args.len(), args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + type_args.len(), + args.len() + ), + )))? } // Process the type arguments, ensuring they match the given kinds @@ -1174,419 +1603,545 @@ fn process_type_expr_as_type(exp : parser::TypeExpr, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dynamic_constants.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, - env, types, can_infer) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + can_infer, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { Err(errors)? } + if !errors.is_empty() { + Err(errors)? + } if type_vars.len() == 0 && dynamic_constants.len() == 0 { Ok(*value) } else { - if let Some(res) - = types.instantiate(*value, &type_vars, &dynamic_constants) { + if let Some(res) = + types.instantiate(*value, &type_vars, &dynamic_constants) + { Ok(res) } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))) } } - }, - Some(_) => - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "type".to_string(), - "value".to_string()))), - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(nm).unwrap()))), + } + Some(_) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "type".to_string(), + "value".to_string(), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(nm).unwrap(), + ))), } } - }, + } } } // Normalizes the given statement, and returns the normalized statement plus whether a statement // after the analyzed one is reachable or not -fn process_stmt(stmt : parser::Stmt, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, env : &mut Env<usize, Entity>, - types : &mut TypeSolver, in_loop : bool, return_type : Type, - inout_vars : &Vec<usize>, inout_types : &Vec<Type>) - -> Result<(Stmt, bool), ErrorMessages> { - +fn process_stmt( + stmt: parser::Stmt, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &mut Env<usize, Entity>, + types: &mut TypeSolver, + in_loop: bool, + return_type: Type, + inout_vars: &Vec<usize>, + inout_types: &Vec<Type>, + labels: &mut LabelSet, +) -> Result<(Stmt, bool), ErrorMessages> { match stmt { - parser::Stmt::LetStmt { span: _, var : VarBind { span : v_span, pattern, typ }, init } => { - match pattern { - Pattern::Variable { span, name } => { - if typ.is_none() && init.is_none() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Must specify either type or initial value".to_string())))? - } - if name.len() != 1 { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Bound variables must be local names, without a package separator".to_string())))? - } + parser::Stmt::LetStmt { + span: _, + var: + VarBind { + span: v_span, + pattern, + typ, + }, + init, + } => match pattern { + Pattern::Variable { span, name } => { + if typ.is_none() && init.is_none() { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Must specify either type or initial value".to_string(), + )))? + } + if name.len() != 1 { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Bound variables must be local names, without a package separator" + .to_string(), + )))? + } - let nm = intern_package_name(&name, lexer, stringtab)[0]; - let ty = - match typ { - None => None, - Some(t) => - Some(process_type(t, num_dyn_const, lexer, - stringtab, env, types, true)?), - }; + let nm = intern_package_name(&name, lexer, stringtab)[0]; + let ty = match typ { + None => None, + Some(t) => Some(process_type( + t, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + )?), + }; - let var = env.uniq(); + let var = env.uniq(); - let (val, exp_loc) = - match init { - Some(exp) => { - let loc = span_to_loc(exp.span(), lexer); - (process_expr(exp, num_dyn_const, lexer, stringtab, env, types)?, loc) - }, - None => { - (Expr::Zero { typ : ty.expect("From Above") }, - Location::fake()) - }, - }; - let typ = val.get_type(); - - env.insert(nm, - Entity::Variable { variable : var, typ : typ, - is_const : false }); - - match ty { - Some(ty) if !types.unify(ty, typ) => { - Err(singleton_error( - ErrorMessage::TypeError( - exp_loc, - unparse_type(types, ty, stringtab), - unparse_type(types, typ, stringtab))))? + let (val, exp_loc) = match init { + Some(exp) => { + let loc = span_to_loc(exp.span(), lexer); + ( + process_expr(exp, num_dyn_const, lexer, stringtab, env, types)?, + loc, + ) + } + None => ( + Expr::Zero { + typ: ty.expect("From Above"), }, - _ => Ok((Stmt::AssignStmt { var : var, val : val }, true)), + Location::fake(), + ), + }; + let typ = val.get_type(); + + env.insert( + nm, + Entity::Variable { + variable: var, + typ: typ, + is_const: false, + }, + ); + + match ty { + Some(ty) if !types.unify(ty, typ) => { + Err(singleton_error(ErrorMessage::TypeError( + exp_loc, + unparse_type(types, ty, stringtab), + unparse_type(types, typ, stringtab), + )))? } - }, - _ => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(v_span, lexer), - "non-variable bindings".to_string()))) - }, + _ => Ok((Stmt::AssignStmt { var: var, val: val }, true)), + } } + _ => Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(v_span, lexer), + "non-variable bindings".to_string(), + ))), }, - parser::Stmt::ConstStmt { span: _, var : VarBind { span : v_span, pattern, typ }, init } => { - match pattern { - Pattern::Variable { span, name } => { - if typ.is_none() && init.is_none() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Must specify either type or initial value".to_string())))? - } - if name.len() != 1 { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Bound variables must be local names, without a package separator".to_string())))? - } + parser::Stmt::ConstStmt { + span: _, + var: + VarBind { + span: v_span, + pattern, + typ, + }, + init, + } => match pattern { + Pattern::Variable { span, name } => { + if typ.is_none() && init.is_none() { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Must specify either type or initial value".to_string(), + )))? + } + if name.len() != 1 { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Bound variables must be local names, without a package separator" + .to_string(), + )))? + } - let nm = intern_package_name(&name, lexer, stringtab)[0]; - let ty = - match typ { - None => None, - Some(t) => - Some(process_type(t, num_dyn_const, lexer, - stringtab, env, types, true)?), - }; + let nm = intern_package_name(&name, lexer, stringtab)[0]; + let ty = match typ { + None => None, + Some(t) => Some(process_type( + t, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + )?), + }; - let var = env.uniq(); + let var = env.uniq(); - let (val, exp_loc) = - match init { - Some(exp) => { - let loc = span_to_loc(exp.span(), lexer); - (process_expr(exp, num_dyn_const, lexer, stringtab, env, types)?, loc) - }, - None => { - (Expr::Zero { typ : ty.expect("From Above") }, - Location::fake()) - }, - }; - let typ = val.get_type(); - - env.insert(nm, - Entity::Variable { variable : var, typ : typ, - is_const : true }); - - match ty { - Some(ty) if !types.unify(ty, typ) => { - Err(singleton_error( - ErrorMessage::TypeError( - exp_loc, - unparse_type(types, ty, stringtab), - unparse_type(types, typ, stringtab)))) + let (val, exp_loc) = match init { + Some(exp) => { + let loc = span_to_loc(exp.span(), lexer); + ( + process_expr(exp, num_dyn_const, lexer, stringtab, env, types)?, + loc, + ) + } + None => ( + Expr::Zero { + typ: ty.expect("From Above"), }, - _ => Ok((Stmt::AssignStmt { var : var, val : val }, true)), + Location::fake(), + ), + }; + let typ = val.get_type(); + + env.insert( + nm, + Entity::Variable { + variable: var, + typ: typ, + is_const: true, + }, + ); + + match ty { + Some(ty) if !types.unify(ty, typ) => { + Err(singleton_error(ErrorMessage::TypeError( + exp_loc, + unparse_type(types, ty, stringtab), + unparse_type(types, typ, stringtab), + ))) } - }, - _ => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(v_span, lexer), - "non-variable bindings".to_string()))) - }, + _ => Ok((Stmt::AssignStmt { var: var, val: val }, true)), + } } + _ => Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(v_span, lexer), + "non-variable bindings".to_string(), + ))), }, - parser::Stmt::AssignStmt { span: _, lhs, assign, assign_span, rhs } => { + parser::Stmt::AssignStmt { + span: _, + lhs, + assign, + assign_span, + rhs, + } => { let lhs_res = process_lexpr(lhs, num_dyn_const, lexer, stringtab, env, types); let rhs_res = process_expr(rhs, num_dyn_const, lexer, stringtab, env, types); - let (((var, var_typ), (exp_typ, index)), val) - = append_errors2(lhs_res, rhs_res)?; + let (((var, var_typ), (exp_typ, index)), val) = append_errors2(lhs_res, rhs_res)?; let typ = val.get_type(); // Perform the appropriate type checking match assign { - AssignOp::None => { + AssignOp::None => { if !types.unify(exp_typ, typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - unparse_type(types, exp_typ, stringtab), - unparse_type(types, typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + unparse_type(types, exp_typ, stringtab), + unparse_type(types, typ, stringtab), + )))? } - }, + } AssignOp::Add | AssignOp::Sub | AssignOp::Mul | AssignOp::Div => { if !types.unify(exp_typ, typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - unparse_type(types, exp_typ, stringtab), - unparse_type(types, typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + unparse_type(types, exp_typ, stringtab), + unparse_type(types, typ, stringtab), + )))? } if !types.unify_kind(exp_typ, parser::Kind::Number) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - "number".to_string(), - unparse_type(types, exp_typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + "number".to_string(), + unparse_type(types, exp_typ, stringtab), + )))? } - }, - AssignOp::Mod | AssignOp::BitAnd | AssignOp::BitOr | AssignOp::Xor - | AssignOp::LShift | AssignOp::RShift => { + } + AssignOp::Mod + | AssignOp::BitAnd + | AssignOp::BitOr + | AssignOp::Xor + | AssignOp::LShift + | AssignOp::RShift => { if !types.unify(exp_typ, typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - unparse_type(types, exp_typ, stringtab), - unparse_type(types, typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + unparse_type(types, exp_typ, stringtab), + unparse_type(types, typ, stringtab), + )))? } if !types.unify_kind(exp_typ, parser::Kind::Integer) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - "integer".to_string(), - unparse_type(types, exp_typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + "integer".to_string(), + unparse_type(types, exp_typ, stringtab), + )))? } - }, + } AssignOp::LogAnd | AssignOp::LogOr => { if !types.unify(exp_typ, typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - unparse_type(types, exp_typ, stringtab), - unparse_type(types, typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + unparse_type(types, exp_typ, stringtab), + unparse_type(types, typ, stringtab), + )))? } if !types.unify_bool(exp_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(assign_span, lexer), - "bool".to_string(), - unparse_type(types, exp_typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(assign_span, lexer), + "bool".to_string(), + unparse_type(types, exp_typ, stringtab), + )))? } - }, + } } let empty_index = index.is_empty(); - let rhs_var = Expr::Variable { var : var, typ : var_typ }; + let rhs_var = Expr::Variable { + var: var, + typ: var_typ, + }; - let rhs_val = - if empty_index { - rhs_var - } else { - Expr::Read { - index : index.clone(), - val : Box::new(rhs_var), - typ : exp_typ } - }; + let rhs_val = if empty_index { + rhs_var + } else { + Expr::Read { + index: index.clone(), + val: Box::new(rhs_var), + typ: exp_typ, + } + }; // Construct the right-hand side for the normalized expression; for x= operations this // will construct the read and the operation; the write is left for after this since it // is common to all cases - let result_rhs = - match assign { - AssignOp::None => { - val - }, - AssignOp::Add | AssignOp::Sub | AssignOp::Mul | AssignOp::Div - | AssignOp::Mod | AssignOp::BitAnd | AssignOp::BitOr - | AssignOp::Xor | AssignOp::LShift | AssignOp::RShift => { - Expr::BinaryExp { - op : convert_assign_op(assign), - lhs : Box::new(rhs_val), - rhs : Box::new(val), - typ : typ } - }, - // For x &&= y we convert to if x then y else false - AssignOp::LogAnd => { - Expr::CondExpr { - cond : Box::new(rhs_val), - thn : Box::new(val), - // We know that the expected type is bool, so just use it to avoid - // creating additional new types - els : Box::new(Expr::Constant { - val : (Literal::Bool(false), exp_typ), - typ : exp_typ }), - typ : typ } - }, - // For x ||= y we convert to if x then true else y - AssignOp::LogOr => { - Expr::CondExpr { - cond : Box::new(rhs_val), - thn : Box::new(Expr::Constant { - val : (Literal::Bool(true), exp_typ), - typ : exp_typ }), - els : Box::new(val), - typ : typ } - }, - }; + let result_rhs = match assign { + AssignOp::None => val, + AssignOp::Add + | AssignOp::Sub + | AssignOp::Mul + | AssignOp::Div + | AssignOp::Mod + | AssignOp::BitAnd + | AssignOp::BitOr + | AssignOp::Xor + | AssignOp::LShift + | AssignOp::RShift => Expr::BinaryExp { + op: convert_assign_op(assign), + lhs: Box::new(rhs_val), + rhs: Box::new(val), + typ: typ, + }, + // For x &&= y we convert to if x then y else false + AssignOp::LogAnd => { + Expr::CondExpr { + cond: Box::new(rhs_val), + thn: Box::new(val), + // We know that the expected type is bool, so just use it to avoid + // creating additional new types + els: Box::new(Expr::Constant { + val: (Literal::Bool(false), exp_typ), + typ: exp_typ, + }), + typ: typ, + } + } + // For x ||= y we convert to if x then true else y + AssignOp::LogOr => Expr::CondExpr { + cond: Box::new(rhs_val), + thn: Box::new(Expr::Constant { + val: (Literal::Bool(true), exp_typ), + typ: exp_typ, + }), + els: Box::new(val), + typ: typ, + }, + }; - let write_exp = - if empty_index { - result_rhs - } else { - Expr::Write { - index : index, - val : Box::new(Expr::Variable { var : var, typ : var_typ }), - rep : Box::new(result_rhs), - typ : var_typ } - }; + let write_exp = if empty_index { + result_rhs + } else { + Expr::Write { + index: index, + val: Box::new(Expr::Variable { + var: var, + typ: var_typ, + }), + rep: Box::new(result_rhs), + typ: var_typ, + } + }; - Ok((Stmt::AssignStmt { - var : var, - val : write_exp }, true)) - }, - parser::Stmt::IfStmt { span: _, cond, thn, els } => { + Ok(( + Stmt::AssignStmt { + var: var, + val: write_exp, + }, + true, + )) + } + parser::Stmt::IfStmt { + span: _, + cond, + thn, + els, + } => { let cond_span = cond.span(); let cond_res = process_expr(cond, num_dyn_const, lexer, stringtab, env, types); env.open_scope(); - let thn_res = process_stmt(*thn, num_dyn_const, lexer, stringtab, env, types, - in_loop, return_type, inout_vars, inout_types); + let thn_res = process_stmt( + *thn, + num_dyn_const, + lexer, + stringtab, + env, + types, + in_loop, + return_type, + inout_vars, + inout_types, + labels, + ); env.close_scope(); env.open_scope(); - let els_res = - match els { None => Ok((None, true)), - Some(stmt) => - process_stmt(*stmt, num_dyn_const, lexer, stringtab, env, types, - in_loop, return_type, inout_vars, inout_types) - .map(|(s, b)| (Some(s), b)), }; + let els_res = match els { + None => Ok((None, true)), + Some(stmt) => process_stmt( + *stmt, + num_dyn_const, + lexer, + stringtab, + env, + types, + in_loop, + return_type, + inout_vars, + inout_types, + labels, + ) + .map(|(s, b)| (Some(s), b)), + }; env.close_scope(); - let (cond_exp, (thn_body, thn_fall), (els_body, els_fall)) - = append_errors3(cond_res, thn_res, els_res)?; + let (cond_exp, (thn_body, thn_fall), (els_body, els_fall)) = + append_errors3(cond_res, thn_res, els_res)?; let cond_typ = cond_exp.get_type(); if !types.unify_bool(cond_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(cond_span, lexer), - "bool".to_string(), - unparse_type(types, cond_typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(cond_span, lexer), + "bool".to_string(), + unparse_type(types, cond_typ, stringtab), + )))? } - Ok((Stmt::IfStmt { - cond : cond_exp, - thn : Box::new(thn_body), - els : els_body.map(|s| Box::new(s)) }, - thn_fall || els_fall)) - }, - parser::Stmt::MatchStmt { span, expr: _, body: _ } => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "match statements".to_string()))) - }, - parser::Stmt::ForStmt { span : _, var : VarBind { span : v_span, pattern, typ }, - init, bound, step, body } => { - let (var, var_name, var_type) = - match pattern { - Pattern::Variable { span, name } => { - if name.len() != 1 { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Bound variables must be local names, without a package separator".to_string()))); - } - - let nm = intern_package_name(&name, lexer, stringtab)[0]; - let var_type = - match typ { - None => types.new_primitive(types::Primitive::U64), - Some(t) => { - let ty = process_type(t, num_dyn_const, lexer, stringtab, env, types, true)?; - if !types.unify_kind(ty, parser::Kind::Integer) { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(v_span, lexer), - "For loop variables must be integers".to_string()))); - } - ty - }, - }; + Ok(( + Stmt::IfStmt { + cond: cond_exp, + thn: Box::new(thn_body), + els: els_body.map(|s| Box::new(s)), + }, + thn_fall || els_fall, + )) + } + parser::Stmt::MatchStmt { + span, + expr: _, + body: _, + } => Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "match statements".to_string(), + ))), + parser::Stmt::ForStmt { + span: _, + var: + VarBind { + span: v_span, + pattern, + typ, + }, + init, + bound, + step, + body, + } => { + let (var, var_name, var_type) = match pattern { + Pattern::Variable { span, name } => { + if name.len() != 1 { + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Bound variables must be local names, without a package separator" + .to_string(), + ))); + } - let var = env.uniq(); - (var, nm, var_type) - }, - _ => { - return Err(singleton_error( - ErrorMessage::NotImplemented( + let nm = intern_package_name(&name, lexer, stringtab)[0]; + let var_type = match typ { + None => types.new_primitive(types::Primitive::U64), + Some(t) => { + let ty = + process_type(t, num_dyn_const, lexer, stringtab, env, types, true)?; + if !types.unify_kind(ty, parser::Kind::Integer) { + return Err(singleton_error(ErrorMessage::SemanticError( span_to_loc(v_span, lexer), - "patterns in for loop arguments".to_string()))); - }, - }; + "For loop variables must be integers".to_string(), + ))); + } + ty + } + }; + + let var = env.uniq(); + (var, nm, var_type) + } + _ => { + return Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(v_span, lexer), + "patterns in for loop arguments".to_string(), + ))); + } + }; // Evaluate the initial value, bound, and step let init_span = init.span(); @@ -1597,46 +2152,42 @@ fn process_stmt(stmt : parser::Stmt, num_dyn_const : usize, // The step is tracked as a pair of the step's amount (always positive) and whether the // step should be positive or negative - let (step_val, step_pos) = - match step { - None => { - (1, true) - }, - Some((negative, span, base)) => { - let val = u64::from_str_radix(lexer.span_str(span), base.base()); - assert!(val.is_ok(), "Internal Error: Int literal is not an integer"); - let num = val.unwrap(); - if num == 0 { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "For loop step cannot be 0".to_string()))); - } + let (step_val, step_pos) = match step { + None => (1, true), + Some((negative, span, base)) => { + let val = u64::from_str_radix(&base.string(lexer, span), base.base()); + assert!(val.is_ok(), "Internal Error: Int literal is not an integer"); + let num = val.unwrap(); + if num == 0 { + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "For loop step cannot be 0".to_string(), + ))); + } - (num, !negative) - }, - }; + (num, !negative) + } + }; - let (init_val, bound_val) - = append_errors2(init_res, bound_res)?; + let (init_val, bound_val) = append_errors2(init_res, bound_res)?; let init_typ = init_val.get_type(); let bound_typ = bound_val.get_type(); // Verify that the types of the initial value and bound are correct let mut type_errors = LinkedList::new(); if !types.unify(var_type, init_typ) { - type_errors.push_back( - ErrorMessage::TypeError( - span_to_loc(init_span, lexer), - unparse_type(types, var_type, stringtab), - unparse_type(types, init_typ, stringtab))); + type_errors.push_back(ErrorMessage::TypeError( + span_to_loc(init_span, lexer), + unparse_type(types, var_type, stringtab), + unparse_type(types, init_typ, stringtab), + )); } if !types.unify(var_type, bound_typ) { - type_errors.push_back( - ErrorMessage::TypeError( - span_to_loc(bound_span, lexer), - unparse_type(types, var_type, stringtab), - unparse_type(types, bound_typ, stringtab))); + type_errors.push_back(ErrorMessage::TypeError( + span_to_loc(bound_span, lexer), + unparse_type(types, var_type, stringtab), + unparse_type(types, bound_typ, stringtab), + )); } if !type_errors.is_empty() { Err(type_errors)? @@ -1644,143 +2195,207 @@ fn process_stmt(stmt : parser::Stmt, num_dyn_const : usize, // Create the scope for the body env.open_scope(); - env.insert(var_name, Entity::Variable { - variable : var, typ : var_type, is_const : true }); + env.insert( + var_name, + Entity::Variable { + variable: var, + typ: var_type, + is_const: true, + }, + ); // Process the body - let (body, _) - = process_stmt(*body, num_dyn_const, lexer, stringtab, env, types, true, - return_type, inout_vars, inout_types)?; + let (body, _) = process_stmt( + *body, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + return_type, + inout_vars, + inout_types, + labels, + )?; env.close_scope(); // We bind the initial value of the loop counter - let init_eval = Stmt::AssignStmt { var : var, val : init_val }; + let init_eval = Stmt::AssignStmt { + var: var, + val: init_val, + }; // We create a new variable for the loop bound and we're going to bind the bound to // that value before the loop so that it is only evaluated once let bound_var = env.uniq(); - let bound_eval = Stmt::AssignStmt { var : bound_var, val : bound_val }; + let bound_eval = Stmt::AssignStmt { + var: bound_var, + val: bound_val, + }; // The condition of the loop is var < bound, unless the step is negative in which case // it is var > bound - let condition = - Expr::BinaryExp { - op : if step_pos { BinaryOp::Lt } else { BinaryOp::Gt }, - lhs : Box::new(Expr::Variable { var : var, typ : var_type }), - rhs : Box::new(Expr::Variable { var : bound_var, typ : bound_typ }), - typ : types.new_primitive(types::Primitive::Bool) }; + let condition = Expr::BinaryExp { + op: if step_pos { BinaryOp::Lt } else { BinaryOp::Gt }, + lhs: Box::new(Expr::Variable { + var: var, + typ: var_type, + }), + rhs: Box::new(Expr::Variable { + var: bound_var, + typ: bound_typ, + }), + typ: types.new_primitive(types::Primitive::Bool), + }; // The update of the loop is var = var + step, unless the step is negative in which // case it is var = var - step - let update = - Stmt::AssignStmt { - var : var, - val : Expr::BinaryExp { - op : if step_pos { BinaryOp::Add } else { BinaryOp::Sub }, - lhs : Box::new(Expr::Variable { var : var, typ : var_type }), - rhs : Box::new(Expr::Constant { - val : (Literal::Integer(step_val), var_type), - typ : var_type }), - typ : var_type }}; + let update = Stmt::AssignStmt { + var: var, + val: Expr::BinaryExp { + op: if step_pos { + BinaryOp::Add + } else { + BinaryOp::Sub + }, + lhs: Box::new(Expr::Variable { + var: var, + typ: var_type, + }), + rhs: Box::new(Expr::Constant { + val: (Literal::Integer(step_val), var_type), + typ: var_type, + }), + typ: var_type, + }, + }; // Finally, the entire loop is constructed as: // Evaluate initial value // Evaluate bound value // Loop // Note that the statement after a loop is always assumed to be reachable - Ok((Stmt::BlockStmt { - body : vec![ + Ok(( + Stmt::BlockStmt { + body: vec![ init_eval, bound_eval, Stmt::LoopStmt { - cond : condition, - update : Some(Box::new(update)), - body : Box::new(body) - } - ] - }, true)) - }, - parser::Stmt::WhileStmt { span: _, cond, body } => { + cond: condition, + update: Some(Box::new(update)), + body: Box::new(body), + }, + ], + // A label applied to this loop should be applied to the + // loop, not the initialization + label_last: true, + }, + true, + )) + } + parser::Stmt::WhileStmt { + span: _, + cond, + body, + } => { let cond_span = cond.span(); let cond_res = process_expr(cond, num_dyn_const, lexer, stringtab, env, types); env.open_scope(); - let body_res = process_stmt(*body, num_dyn_const, lexer, stringtab, env, types, - true, return_type, inout_vars, inout_types); + let body_res = process_stmt( + *body, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + return_type, + inout_vars, + inout_types, + labels, + ); env.close_scope(); - let (cond_val, (body_stmt, _)) - = append_errors2(cond_res, body_res)?; + let (cond_val, (body_stmt, _)) = append_errors2(cond_res, body_res)?; let cond_typ = cond_val.get_type(); if !types.unify_bool(cond_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(cond_span, lexer), - "bool".to_string(), - unparse_type(types, cond_typ, stringtab))))? + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(cond_span, lexer), + "bool".to_string(), + unparse_type(types, cond_typ, stringtab), + )))? } // Again, the statement after a loop is always considered reachable - Ok((Stmt::LoopStmt { - cond : cond_val, - update : None, - body : Box::new(body_stmt) }, true)) - }, + Ok(( + Stmt::LoopStmt { + cond: cond_val, + update: None, + body: Box::new(body_stmt), + }, + true, + )) + } parser::Stmt::ReturnStmt { span, expr } => { - let return_val = - if expr.is_none() && types.unify_void(return_type) { - Expr::Constant { - val : (Literal::Unit, return_type), - typ : return_type } - } else if expr.is_none() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected return of type {} found no return value", - unparse_type(types, return_type, stringtab)))))? - } else { - let val = process_expr(expr.unwrap(), num_dyn_const, lexer, stringtab, env, - types)?; - let typ = val.get_type(); - if !types.unify(return_type, typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - unparse_type(types, return_type, stringtab), - unparse_type(types, typ, stringtab))))? - } - val - }; + let return_val = if expr.is_none() && types.unify_void(return_type) { + Expr::Constant { + val: (Literal::Unit, return_type), + typ: return_type, + } + } else if expr.is_none() { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected return of type {} found no return value", + unparse_type(types, return_type, stringtab) + ), + )))? + } else { + let val = process_expr(expr.unwrap(), num_dyn_const, lexer, stringtab, env, types)?; + let typ = val.get_type(); + if !types.unify(return_type, typ) { + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + unparse_type(types, return_type, stringtab), + unparse_type(types, typ, stringtab), + )))? + } + val + }; // We return a tuple of the return value and of the inout variables // Statements after a return are never reachable - Ok((generate_return(return_val, inout_vars, inout_types, types), - false)) - }, + Ok(( + generate_return(return_val, inout_vars, inout_types, types), + false, + )) + } parser::Stmt::BreakStmt { span } => { if !in_loop { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Break not contained within loop".to_string())))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Break not contained within loop".to_string(), + )))? } // Code after a break is unreachable Ok((Stmt::BreakStmt {}, false)) - }, + } parser::Stmt::ContinueStmt { span } => { if !in_loop { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Continue not contained within loop".to_string())))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Continue not contained within loop".to_string(), + )))? } // Code after a continue is unreachable Ok((Stmt::ContinueStmt {}, false)) - }, + } parser::Stmt::BlockStmt { span: _, body } => { // Blocks create a new scope for variables declared in them env.open_scope(); @@ -1791,19 +2406,32 @@ fn process_stmt(stmt : parser::Stmt, num_dyn_const : usize, for stmt in body { if !reachable { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(stmt.span(), lexer), - "Unreachable statement".to_string())))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(stmt.span(), lexer), + "Unreachable statement".to_string(), + )))? } - match process_stmt(stmt, num_dyn_const, lexer, stringtab, env, types, in_loop, - return_type, inout_vars, inout_types) { - Err(mut errs) => { errors.append(&mut errs); }, + match process_stmt( + stmt, + num_dyn_const, + lexer, + stringtab, + env, + types, + in_loop, + return_type, + inout_vars, + inout_types, + labels, + ) { + Err(mut errs) => { + errors.append(&mut errs); + } Ok((stmt, post_reachable)) => { res.push(stmt); reachable = post_reachable; - }, + } } } @@ -1812,19 +2440,79 @@ fn process_stmt(stmt : parser::Stmt, num_dyn_const : usize, if !errors.is_empty() { Err(errors) } else { - Ok((Stmt::BlockStmt { body : res }, reachable)) + Ok(( + Stmt::BlockStmt { + body: res, + label_last: false, + }, + reachable, + )) } - }, - parser::Stmt::CallStmt { span, name, ty_args, args } => { + } + parser::Stmt::CallStmt { + span, + name, + ty_args, + args, + } => { // Call statements are lowered to call expressions which is made a statment using the // ExprStmt constructor // Code after a call is always reachable - Ok((Stmt::ExprStmt { - expr : process_expr( - parser::Expr::CallExpr { span, name, ty_args, args }, - num_dyn_const, lexer, stringtab, env, types)? }, - true)) - }, + Ok(( + Stmt::ExprStmt { + expr: process_expr( + parser::Expr::CallExpr { + span, + name, + ty_args, + args, + }, + num_dyn_const, + lexer, + stringtab, + env, + types, + )?, + }, + true, + )) + } + parser::Stmt::LabeledStmt { + span: _, + label, + stmt, + } => { + let label_str = lexer.span_str(label).to_string(); + + let label_id = match labels.insert_new(label_str) { + Err(label_str) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(label, lexer), + format!("Label {} already exists", label_str), + )))?, + Ok(id) => id, + }; + + let (body, reach_end) = process_stmt( + *stmt, + num_dyn_const, + lexer, + stringtab, + env, + types, + in_loop, + return_type, + inout_vars, + inout_types, + labels, + )?; + Ok(( + Stmt::LabeledStmt { + label: label_id, + stmt: Box::new(body), + }, + reach_end, + )) + } } } @@ -1833,105 +2521,114 @@ fn process_stmt(stmt : parser::Stmt, num_dyn_const : usize, // piece // This should only be used for the left-hand side of an assignment since it will return an error // if the variable that is accessed is marked as constant -fn process_lexpr(expr : parser::LExpr, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, env : &mut Env<usize, Entity>, - types : &mut TypeSolver) - -> Result<((usize, Type), (Type, Vec<Index>)), ErrorMessages> { +fn process_lexpr( + expr: parser::LExpr, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &mut Env<usize, Entity>, + types: &mut TypeSolver, +) -> Result<((usize, Type), (Type, Vec<Index>)), ErrorMessages> { match expr { parser::LExpr::VariableLExpr { span } => { let nm = intern_id(&span, lexer, stringtab); match env.lookup(&nm) { - Some(Entity::Variable { variable, typ, is_const }) => { + Some(Entity::Variable { + variable, + typ, + is_const, + }) => { if *is_const { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Variable {} is const, cannot assign to it", - lexer.span_str(span))))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Variable {} is const, cannot assign to it", + lexer.span_str(span) + ), + ))) } else { Ok(((*variable, *typ), (*typ, vec![]))) } - }, - Some(Entity::DynConst { .. }) => { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a dynamic constant, cannot assign to it", - lexer.span_str(span))))) - }, - Some(Entity::Constant { .. }) => { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a constant, cannot assign to it", - lexer.span_str(span))))) - }, - Some(Entity::Function { .. }) => { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a function, cannot assign to it", - lexer.span_str(span))))) - }, - Some(Entity::Type { .. }) => { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a type, cannot assign to it", - lexer.span_str(span))))) - }, - None => { - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - lexer.span_str(span).to_string()))) - }, + } + Some(Entity::DynConst { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a dynamic constant, cannot assign to it", + lexer.span_str(span) + ), + ))), + Some(Entity::Constant { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a constant, cannot assign to it", + lexer.span_str(span) + ), + ))), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a function, cannot assign to it", + lexer.span_str(span) + ), + ))), + Some(Entity::Type { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!("{} is a type, cannot assign to it", lexer.span_str(span)), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + lexer.span_str(span).to_string(), + ))), } - }, + } parser::LExpr::FieldLExpr { span, lhs, rhs } => { - let ((var, var_typ), (idx_typ, mut idx)) - = process_lexpr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let ((var, var_typ), (idx_typ, mut idx)) = + process_lexpr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; let field_nm = intern_id(&rhs, lexer, stringtab); match types.get_field(idx_typ, field_nm) { - None => Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Type {} does not possess field {}", - unparse_type(types, idx_typ, stringtab), - stringtab.lookup_id(field_nm).unwrap())))), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Type {} does not possess field {}", + unparse_type(types, idx_typ, stringtab), + stringtab.lookup_id(field_nm).unwrap() + ), + ))), Some((field_idx, field_type)) => { idx.push(Index::Field(field_idx)); Ok(((var, var_typ), (field_type, idx))) - }, + } } - }, + } parser::LExpr::NumFieldLExpr { span, lhs, rhs } => { - let ((var, var_typ), (idx_typ, mut idx)) - = process_lexpr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let ((var, var_typ), (idx_typ, mut idx)) = + process_lexpr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; // Identify the field number; to do this we remove the first character of the string of // the right-hand side since the ".###" is lexed as a single token - let num = lexer.span_str(rhs)[1..].parse::<usize>() - .expect("From lexical analysis"); + let num = lexer.span_str(rhs)[1..] + .parse::<usize>() + .expect("From lexical analysis"); match types.get_index(idx_typ, num) { - None => Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Type {} does not possess index {}", - unparse_type(types, idx_typ, stringtab), - num)))), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Type {} does not possess index {}", + unparse_type(types, idx_typ, stringtab), + num + ), + ))), Some(field_type) => { idx.push(Index::Field(num)); Ok(((var, var_typ), (field_type, idx))) - }, + } } - }, + } parser::LExpr::IndexLExpr { span, lhs, index } => { - let ((var, var_typ), (idx_typ, mut idx)) - = process_lexpr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let ((var, var_typ), (idx_typ, mut idx)) = + process_lexpr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; let mut indices = vec![]; let mut errors = LinkedList::new(); @@ -1942,15 +2639,15 @@ fn process_lexpr(expr : parser::LExpr, num_dyn_const : usize, Ok(exp) => { let typ = exp.get_type(); if !types.unify_u64(typ) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(idx_span, lexer), - "usize".to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(idx_span, lexer), + "usize".to_string(), + unparse_type(types, typ, stringtab), + )); } else { indices.push(exp); } - }, + } } } @@ -1959,11 +2656,13 @@ fn process_lexpr(expr : parser::LExpr, num_dyn_const : usize, } if !types.is_array(idx_typ) { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Array index does not apply to type {}", - unparse_type(types, idx_typ, stringtab)))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Array index does not apply to type {}", + unparse_type(types, idx_typ, stringtab) + ), + )))? } let num_dims = types.get_num_dimensions(idx_typ).unwrap(); @@ -1974,110 +2673,134 @@ fn process_lexpr(expr : parser::LExpr, num_dyn_const : usize, format!("fewer array indices than dimensions, array has {} dimensions but using {} indices", num_dims, indices.len())))) } else if indices.len() > num_dims { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Too many array indices, array has {} dimensions but using {} indices", - num_dims, indices.len())))) + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Too many array indices, array has {} dimensions but using {} indices", + num_dims, + indices.len() + ), + ))) } else { idx.push(Index::Array(indices)); - Ok(((var, var_typ), - (types.get_element_type(idx_typ).unwrap(), idx))) + Ok(( + (var, var_typ), + (types.get_element_type(idx_typ).unwrap(), idx), + )) } - }, + } } } -fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, env : &mut Env<usize, Entity>, - types : &mut TypeSolver) - -> Result<Constant, ErrorMessages> { - +fn process_expr_as_constant( + expr: parser::Expr, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &mut Env<usize, Entity>, + types: &mut TypeSolver, +) -> Result<Constant, ErrorMessages> { match expr { parser::Expr::Variable { span, name } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } let nm = intern_package_name(&name, lexer, stringtab)[0]; match env.lookup(&nm) { Some(Entity::Variable { .. }) => { panic!("Constant should not be evaluated in an environment with variables") - }, + } Some(Entity::DynConst { .. }) => { - panic!("Constant should not be evaluated in an environment with dynamic constants") - }, - Some(Entity::Constant { value }) => { - Ok(value.clone()) - }, - Some(Entity::Function { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a function, expected a value", - stringtab.lookup_id(nm).unwrap())))), - Some(Entity::Type { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a type, expected a value", - stringtab.lookup_id(nm).unwrap())))), - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(nm).unwrap()))) + panic!( + "Constant should not be evaluated in an environment with dynamic constants" + ) + } + Some(Entity::Constant { value }) => Ok(value.clone()), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a function, expected a value", + stringtab.lookup_id(nm).unwrap() + ), + ))), + Some(Entity::Type { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a type, expected a value", + stringtab.lookup_id(nm).unwrap() + ), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(nm).unwrap(), + ))), } - }, + } parser::Expr::Field { span, lhs, rhs } => { let field_name = intern_id(&rhs, lexer, stringtab); - let (lit, typ) = process_expr_as_constant(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let (lit, typ) = + process_expr_as_constant(*lhs, num_dyn_const, lexer, stringtab, env, types)?; match types.get_field(typ, field_name) { - None => Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Type {} does not possess field {}", - unparse_type(types, typ, stringtab), - stringtab.lookup_id(field_name).unwrap())))), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Type {} does not possess field {}", + unparse_type(types, typ, stringtab), + stringtab.lookup_id(field_name).unwrap() + ), + ))), Some((field_idx, _)) => { - let Literal::Tuple(fields) = lit else { panic!("Wrong constant constructor") }; + let Literal::Tuple(fields) = lit else { + panic!("Wrong constant constructor") + }; Ok(fields[field_idx].clone()) - }, + } } - }, + } parser::Expr::NumField { span, lhs, rhs } => { - let (lit, typ) = process_expr_as_constant(*lhs, num_dyn_const, lexer, stringtab, env, types)?; + let (lit, typ) = + process_expr_as_constant(*lhs, num_dyn_const, lexer, stringtab, env, types)?; - let num = lexer.span_str(rhs)[1..].parse::<usize>() - .expect("From lexical analysis"); + let num = lexer.span_str(rhs)[1..] + .parse::<usize>() + .expect("From lexical analysis"); match types.get_index(typ, num) { - None => Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Type {} does not possess index {}", - unparse_type(types, typ, stringtab), - num)))), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Type {} does not possess index {}", + unparse_type(types, typ, stringtab), + num + ), + ))), Some(_) => { - let Literal::Tuple(fields) = lit else { panic!("Wrong constant constructor") }; + let Literal::Tuple(fields) = lit else { + panic!("Wrong constant constructor") + }; Ok(fields[num].clone()) - }, + } } - }, - parser::Expr::ArrIndex { span, .. } => { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Arrays are not allowed in constants")))) - }, - parser::Expr::Tuple { span : _, mut exprs } => { + } + parser::Expr::ArrIndex { span, .. } => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!("Arrays are not allowed in constants"), + ))), + parser::Expr::Tuple { span: _, mut exprs } => { if exprs.len() == 1 { - return process_expr_as_constant(exprs.pop().unwrap(), num_dyn_const, lexer, stringtab, env, types); + return process_expr_as_constant( + exprs.pop().unwrap(), + num_dyn_const, + lexer, + stringtab, + env, + types, + ); } if exprs.len() == 0 { return Ok((Literal::Unit, types.new_primitive(types::Primitive::Unit))); @@ -2093,7 +2816,7 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, Ok((lit, typ)) => { typs.push(typ); vals.push((lit, typ)); - }, + } } } @@ -2102,71 +2825,71 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, } else { Ok((Literal::Tuple(vals), types.new_tuple(typs))) } - }, - parser::Expr::Struct { span, name, ty_args, exprs } => { + } + parser::Expr::Struct { + span, + name, + ty_args, + exprs, + } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } let struct_nm = intern_package_name(&name, lexer, stringtab)[0]; match env.lookup(&struct_nm) { - Some(Entity::Variable { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "variable".to_string()))) - }, - Some(Entity::DynConst { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "dynamic constant".to_string()))) - }, - Some(Entity::Constant { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "constant".to_string()))) - }, - Some(Entity::Function { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "function".to_string()))) - }, - None => { - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(struct_nm).unwrap()))) - }, - Some(Entity::Type { type_args : kinds, value : typ }) => { + Some(Entity::Variable { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "variable".to_string(), + ))), + Some(Entity::DynConst { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "dynamic constant".to_string(), + ))), + Some(Entity::Constant { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "constant".to_string(), + ))), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "function".to_string(), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(struct_nm).unwrap(), + ))), + Some(Entity::Type { + type_args: kinds, + value: typ, + }) => { if !types.is_struct(*typ) { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "non-struct type".to_string())))? + Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "non-struct type".to_string(), + )))? } - let ty_args = - ty_args.unwrap_or_else( - || vec! [ parser::TypeExpr::WildcardType { span : span } - ; kinds.len() ]); + let ty_args = ty_args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; kinds.len()] + }); if kinds.len() != ty_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - kinds.len(), ty_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + kinds.len(), + ty_args.len() + ), + )))? } // Verify that the type arguments we are provided are correct and collect the @@ -2180,48 +2903,60 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dyn_consts.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, env, types, - true) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } - let struct_type = - if type_vars.len() == 0 && dyn_consts.len() == 0 { - *typ + let struct_type = if type_vars.len() == 0 && dyn_consts.len() == 0 { + *typ + } else { + if let Some(res) = types.instantiate(*typ, &type_vars, &dyn_consts) { + res } else { - if let Some(res) - = types.instantiate(*typ, &type_vars, &dyn_consts) { - res - } else { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))); - } - }; + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))); + } + }; // Check each field and construct the appropriate tuple // Note that fields that are omitted will be initialized with their type's @@ -2229,7 +2964,7 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, let num_fields = types.get_num_struct_fields(struct_type).unwrap(); // Values for the fields, in order - let mut values : Vec<Option<Constant>> = vec![None; num_fields]; + let mut values: Vec<Option<Constant>> = vec![None; num_fields]; for (field_name, expr) in exprs { let field_nm = intern_id(&field_name, lexer, stringtab); @@ -2237,232 +2972,268 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, match types.get_field(struct_type, field_nm) { None => { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(field_name, lexer), - format!("Struct {} does not have field {}", - unparse_type(types, struct_type, stringtab), - stringtab.lookup_id(field_nm).unwrap()))); - }, + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(field_name, lexer), + format!( + "Struct {} does not have field {}", + unparse_type(types, struct_type, stringtab), + stringtab.lookup_id(field_nm).unwrap() + ), + )); + } Some((idx, field_typ)) => { if values[idx].is_some() { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(field_name, lexer), - format!("Field {} defined multiple times", - stringtab.lookup_id(field_nm).unwrap()))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(field_name, lexer), + format!( + "Field {} defined multiple times", + stringtab.lookup_id(field_nm).unwrap() + ), + )); } else { - match process_expr_as_constant(expr, num_dyn_const, lexer, stringtab, env, types) { + match process_expr_as_constant( + expr, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok((lit, typ)) => { if !types.unify(field_typ, typ) { // Set the value at this index even though there's // an error so that we also report if the field is // defined multiple times - values[idx] - = Some((Literal::Unit, - types.new_primitive(types::Primitive::Unit))); - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(expr_span, lexer), - unparse_type(types, field_typ, stringtab), - unparse_type(types, typ, stringtab))); + values[idx] = Some(( + Literal::Unit, + types.new_primitive(types::Primitive::Unit), + )); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(expr_span, lexer), + unparse_type(types, field_typ, stringtab), + unparse_type(types, typ, stringtab), + )); } else { values[idx] = Some((lit, typ)); } - }, + } } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } if values.iter().any(|n| n.is_none()) { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "constant struct with missing fields".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "constant struct with missing fields".to_string(), + )))? } // Construct the list of field values, filling in zero values as needed - let filled_fields - = values.into_iter().map(|t| t.unwrap()).collect::<Vec<_>>(); + let filled_fields = values.into_iter().map(|t| t.unwrap()).collect::<Vec<_>>(); Ok((Literal::Tuple(filled_fields), struct_type)) - }, + } } - }, - parser::Expr::BoolLit { span : _, value } => { + } + parser::Expr::BoolLit { span: _, value } => { let bool_typ = types.new_primitive(types::Primitive::Bool); Ok((Literal::Bool(value), bool_typ)) - }, + } parser::Expr::IntLit { span, base } => { - let res = u64::from_str_radix(lexer.span_str(span), base.base()); + let res = u64::from_str_radix(&base.string(lexer, span), base.base()); assert!(res.is_ok(), "Internal Error: Int literal is not an integer"); let num_typ = types.new_of_kind(parser::Kind::Number, span_to_loc(span, lexer)); Ok((Literal::Integer(res.unwrap()), num_typ)) - }, + } parser::Expr::FloatLit { span } => { let res = lexer.span_str(span).parse::<f64>(); assert!(res.is_ok(), "Internal Error: Float literal is not a float"); let float_typ = types.new_of_kind(parser::Kind::Float, span_to_loc(span, lexer)); Ok((Literal::Float(res.unwrap()), float_typ)) - }, + } parser::Expr::UnaryExpr { span, op, expr } => { - let (expr_lit, expr_typ) - = process_expr_as_constant(*expr, num_dyn_const, lexer, stringtab, env, types)?; + let (expr_lit, expr_typ) = + process_expr_as_constant(*expr, num_dyn_const, lexer, stringtab, env, types)?; match op { parser::UnaryOp::Negation => { if !types.unify_kind(expr_typ, parser::Kind::Number) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "number".to_string(), - unparse_type(types, expr_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "number".to_string(), + unparse_type(types, expr_typ, stringtab), + ))) } else { - Ok((match expr_lit { - Literal::Integer(i) => Literal::Integer(- (i as i64) as u64), - Literal::Float(f) => Literal::Float(- f), + Ok(( + match expr_lit { + Literal::Integer(i) => Literal::Integer(-(i as i64) as u64), + Literal::Float(f) => Literal::Float(-f), _ => panic!("Incorrect literal constructor"), - }, expr_typ)) + }, + expr_typ, + )) } - }, + } parser::UnaryOp::BitwiseNot => { if !types.unify_kind(expr_typ, parser::Kind::Integer) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "integer".to_string(), - unparse_type(types, expr_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "integer".to_string(), + unparse_type(types, expr_typ, stringtab), + ))) } else { - let Literal::Integer(i) = expr_lit - else { panic!("Incorrect literal constructor"); }; - Ok((Literal::Integer(! i), expr_typ)) + let Literal::Integer(i) = expr_lit else { + panic!("Incorrect literal constructor"); + }; + Ok((Literal::Integer(!i), expr_typ)) } - }, + } parser::UnaryOp::LogicalNot => { if !types.unify_bool(expr_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "bool".to_string(), - unparse_type(types, expr_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "bool".to_string(), + unparse_type(types, expr_typ, stringtab), + ))) } else { - let Literal::Bool(b) = expr_lit - else { panic!("Incorrect literal constructor"); }; - Ok((Literal::Bool(! b), expr_typ)) + let Literal::Bool(b) = expr_lit else { + panic!("Incorrect literal constructor"); + }; + Ok((Literal::Bool(!b), expr_typ)) } - }, + } } - }, - parser::Expr::BinaryExpr { span : _, op, lhs, rhs } => { + } + parser::Expr::BinaryExpr { + span: _, + op, + lhs, + rhs, + } => { let lhs_span = lhs.span(); let rhs_span = rhs.span(); - let lhs_res = process_expr_as_constant(*lhs, num_dyn_const, lexer, stringtab, env, types); - let rhs_res = process_expr_as_constant(*rhs, num_dyn_const, lexer, stringtab, env, types); + let lhs_res = + process_expr_as_constant(*lhs, num_dyn_const, lexer, stringtab, env, types); + let rhs_res = + process_expr_as_constant(*rhs, num_dyn_const, lexer, stringtab, env, types); - let ((lhs_lit, lhs_typ), (rhs_lit, rhs_typ)) - = append_errors2(lhs_res, rhs_res)?; + let ((lhs_lit, lhs_typ), (rhs_lit, rhs_typ)) = append_errors2(lhs_res, rhs_res)?; // First, type-check match op { // Equality and inequality work on any types parser::BinaryOp::Eq | parser::BinaryOp::Neq => { if !types.unify(lhs_typ, rhs_typ) { - return Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab)))); + return Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + ))); } - }, + } // These work on any numbers - parser::BinaryOp::Add | parser::BinaryOp::Sub | parser::BinaryOp::Mul - | parser::BinaryOp::Div | parser::BinaryOp::Lt | parser::BinaryOp::Le - | parser::BinaryOp::Gt | parser::BinaryOp::Ge => { + parser::BinaryOp::Add + | parser::BinaryOp::Sub + | parser::BinaryOp::Mul + | parser::BinaryOp::Div + | parser::BinaryOp::Lt + | parser::BinaryOp::Le + | parser::BinaryOp::Gt + | parser::BinaryOp::Ge => { let mut errors = LinkedList::new(); let lhs_number = types.unify_kind(lhs_typ, parser::Kind::Number); let rhs_number = types.unify_kind(rhs_typ, parser::Kind::Number); let equal = types.unify(lhs_typ, rhs_typ); if lhs_number && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + )); } else if rhs_number && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - unparse_type(types, rhs_typ, stringtab), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + unparse_type(types, rhs_typ, stringtab), + unparse_type(types, lhs_typ, stringtab), + )); } else { // The types are equal or both are not numbers if !lhs_number { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - "number".to_string(), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + "number".to_string(), + unparse_type(types, lhs_typ, stringtab), + )); } if !rhs_number { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - "number".to_string(), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + "number".to_string(), + unparse_type(types, rhs_typ, stringtab), + )); } } - if !errors.is_empty() { return Err(errors); } - }, - parser::BinaryOp::Mod | parser::BinaryOp::BitAnd | parser::BinaryOp::BitOr - | parser::BinaryOp::Xor | parser::BinaryOp::LShift | parser::BinaryOp::RShift - => { + if !errors.is_empty() { + return Err(errors); + } + } + parser::BinaryOp::Mod + | parser::BinaryOp::BitAnd + | parser::BinaryOp::BitOr + | parser::BinaryOp::Xor + | parser::BinaryOp::LShift + | parser::BinaryOp::RShift => { let mut errors = LinkedList::new(); let lhs_integer = types.unify_kind(lhs_typ, parser::Kind::Integer); let rhs_integer = types.unify_kind(rhs_typ, parser::Kind::Integer); let equal = types.unify(lhs_typ, rhs_typ); if lhs_integer && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + )); } else if rhs_integer && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - unparse_type(types, rhs_typ, stringtab), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + unparse_type(types, rhs_typ, stringtab), + unparse_type(types, lhs_typ, stringtab), + )); } else { // The types are equal or both are not integers if !lhs_integer { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - "integer".to_string(), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + "integer".to_string(), + unparse_type(types, lhs_typ, stringtab), + )); } if !rhs_integer { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - "integer".to_string(), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + "integer".to_string(), + unparse_type(types, rhs_typ, stringtab), + )); } } - if !errors.is_empty() { return Err(errors); } - }, + if !errors.is_empty() { + return Err(errors); + } + } parser::BinaryOp::LogAnd | parser::BinaryOp::LogOr => { let mut errors = LinkedList::new(); let lhs_bool = types.unify_bool(lhs_typ); @@ -2470,400 +3241,459 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, let equal = types.unify(lhs_typ, rhs_typ); if lhs_bool && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + )); } else if rhs_bool && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - unparse_type(types, rhs_typ, stringtab), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + unparse_type(types, rhs_typ, stringtab), + unparse_type(types, lhs_typ, stringtab), + )); } else { // The types are equal or both are not bools if !lhs_bool { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - "bool".to_string(), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + "bool".to_string(), + unparse_type(types, lhs_typ, stringtab), + )); } if !rhs_bool { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - "bool".to_string(), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + "bool".to_string(), + unparse_type(types, rhs_typ, stringtab), + )); } } - if !errors.is_empty() { return Err(errors); } - }, + if !errors.is_empty() { + return Err(errors); + } + } }; match op { - parser::BinaryOp::Add => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i + j), lhs_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Float((i as f64) + j), lhs_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Float(i + (j as f64)), lhs_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Float(i + j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::Add => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i + j), lhs_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Float((i as f64) + j), lhs_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Float(i + (j as f64)), lhs_typ)) } + (Literal::Float(i), Literal::Float(j)) => Ok((Literal::Float(i + j), lhs_typ)), + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::Sub => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer((i as i64 - j as i64) as u64), lhs_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Float((i as f64) - j), lhs_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Float(i - (j as f64)), lhs_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Float(i - j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::Sub => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer((i as i64 - j as i64) as u64), lhs_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Float((i as f64) - j), lhs_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Float(i - (j as f64)), lhs_typ)) } + (Literal::Float(i), Literal::Float(j)) => Ok((Literal::Float(i - j), lhs_typ)), + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::Mul => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i * j), lhs_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Float((i as f64) * j), lhs_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Float(i * (j as f64)), lhs_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Float(i * j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::Mul => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i * j), lhs_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Float((i as f64) * j), lhs_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Float(i * (j as f64)), lhs_typ)) } + (Literal::Float(i), Literal::Float(j)) => Ok((Literal::Float(i * j), lhs_typ)), + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::Div => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i / j), lhs_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Float((i as f64) / j), lhs_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Float(i / (j as f64)), lhs_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Float(i / j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::Div => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i / j), lhs_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Float((i as f64) / j), lhs_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Float(i / (j as f64)), lhs_typ)) } + (Literal::Float(i), Literal::Float(j)) => Ok((Literal::Float(i / j), lhs_typ)), + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::Mod => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i % j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::Mod => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i % j), lhs_typ)) } + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::BitAnd => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i & j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::BitAnd => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i & j), lhs_typ)) } + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::BitOr => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i | j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::BitOr => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i | j), lhs_typ)) } + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::Xor => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i ^ j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::Xor => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i ^ j), lhs_typ)) } + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::Lt => { + parser::BinaryOp::Lt => { let bool_typ = types.new_primitive(types::Primitive::Bool); match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Bool(i < j), bool_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Bool(((i as f64)) < j), bool_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Bool(i < (j as f64)), bool_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Bool(i < j), bool_typ)), + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i < j), bool_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Bool((i as f64) < j), bool_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i < (j as f64)), bool_typ)) + } + (Literal::Float(i), Literal::Float(j)) => { + Ok((Literal::Bool(i < j), bool_typ)) + } _ => panic!("Incorrect literal constructor"), } - }, - parser::BinaryOp::Le => { + } + parser::BinaryOp::Le => { let bool_typ = types.new_primitive(types::Primitive::Bool); match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Bool(i <= j), bool_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Bool((i as f64) <= j), bool_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Bool(i <= (j as f64)), bool_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Bool(i <= j), bool_typ)), + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i <= j), bool_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Bool((i as f64) <= j), bool_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i <= (j as f64)), bool_typ)) + } + (Literal::Float(i), Literal::Float(j)) => { + Ok((Literal::Bool(i <= j), bool_typ)) + } _ => panic!("Incorrect literal constructor"), } - }, - parser::BinaryOp::Gt => { + } + parser::BinaryOp::Gt => { let bool_typ = types.new_primitive(types::Primitive::Bool); match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Bool(i > j), bool_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Bool((i as f64) > j), bool_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Bool(i > (j as f64)), bool_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Bool(i > j), bool_typ)), + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i > j), bool_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Bool((i as f64) > j), bool_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i > (j as f64)), bool_typ)) + } + (Literal::Float(i), Literal::Float(j)) => { + Ok((Literal::Bool(i > j), bool_typ)) + } _ => panic!("Incorrect literal constructor"), } - }, - parser::BinaryOp::Ge => { + } + parser::BinaryOp::Ge => { let bool_typ = types.new_primitive(types::Primitive::Bool); match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Bool(i >= j), bool_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Bool((i as f64) >= j), bool_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Bool(i >= (j as f64)), bool_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Bool(i >= j), bool_typ)), + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i >= j), bool_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Bool((i as f64) >= j), bool_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i >= (j as f64)), bool_typ)) + } + (Literal::Float(i), Literal::Float(j)) => { + Ok((Literal::Bool(i >= j), bool_typ)) + } _ => panic!("Incorrect literal constructor"), } - }, - parser::BinaryOp::Eq => { + } + parser::BinaryOp::Eq => { let bool_typ = types.new_primitive(types::Primitive::Bool); match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Bool(i == j), bool_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Bool((i as f64) == j), bool_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Bool(i == (j as f64)), bool_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Bool(i == j), bool_typ)), - (lhs_lit, rhs_lit) - => Ok((Literal::Bool(lhs_lit == rhs_lit), bool_typ)), + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i == j), bool_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Bool((i as f64) == j), bool_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i == (j as f64)), bool_typ)) + } + (Literal::Float(i), Literal::Float(j)) => { + Ok((Literal::Bool(i == j), bool_typ)) + } + (lhs_lit, rhs_lit) => Ok((Literal::Bool(lhs_lit == rhs_lit), bool_typ)), } - }, - parser::BinaryOp::Neq => { + } + parser::BinaryOp::Neq => { let bool_typ = types.new_primitive(types::Primitive::Bool); match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Bool(i != j), bool_typ)), - (Literal::Integer(i), Literal::Float(j)) - => Ok((Literal::Bool((i as f64) != j), bool_typ)), - (Literal::Float(i), Literal::Integer(j)) - => Ok((Literal::Bool(i != (j as f64)), bool_typ)), - (Literal::Float(i), Literal::Float(j)) - => Ok((Literal::Bool(i != j), bool_typ)), - (lhs_lit, rhs_lit) - => Ok((Literal::Bool(lhs_lit != rhs_lit), bool_typ)), + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i != j), bool_typ)) + } + (Literal::Integer(i), Literal::Float(j)) => { + Ok((Literal::Bool((i as f64) != j), bool_typ)) + } + (Literal::Float(i), Literal::Integer(j)) => { + Ok((Literal::Bool(i != (j as f64)), bool_typ)) + } + (Literal::Float(i), Literal::Float(j)) => { + Ok((Literal::Bool(i != j), bool_typ)) + } + (lhs_lit, rhs_lit) => Ok((Literal::Bool(lhs_lit != rhs_lit), bool_typ)), } - }, - parser::BinaryOp::LShift => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i << j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + } + parser::BinaryOp::LShift => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i << j), lhs_typ)) } + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::RShift => { - match (lhs_lit, rhs_lit) { - (Literal::Integer(i), Literal::Integer(j)) - => Ok((Literal::Integer(i >> j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), + parser::BinaryOp::RShift => match (lhs_lit, rhs_lit) { + (Literal::Integer(i), Literal::Integer(j)) => { + Ok((Literal::Integer(i >> j), lhs_typ)) } + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::LogAnd => { - match (lhs_lit, rhs_lit) { - (Literal::Bool(i), Literal::Bool(j)) - => Ok((Literal::Bool(i && j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), - } + parser::BinaryOp::LogAnd => match (lhs_lit, rhs_lit) { + (Literal::Bool(i), Literal::Bool(j)) => Ok((Literal::Bool(i && j), lhs_typ)), + _ => panic!("Incorrect literal constructor"), }, - parser::BinaryOp::LogOr => { - match (lhs_lit, rhs_lit) { - (Literal::Bool(i), Literal::Bool(j)) - => Ok((Literal::Bool(i || j), lhs_typ)), - _ => panic!("Incorrect literal constructor"), - } + parser::BinaryOp::LogOr => match (lhs_lit, rhs_lit) { + (Literal::Bool(i), Literal::Bool(j)) => Ok((Literal::Bool(i || j), lhs_typ)), + _ => panic!("Incorrect literal constructor"), }, } - }, + } parser::Expr::CastExpr { span, expr, typ } => { // Cast between numeric types - let expr_res = process_expr_as_constant(*expr, num_dyn_const, lexer, stringtab, env, types); + let expr_res = + process_expr_as_constant(*expr, num_dyn_const, lexer, stringtab, env, types); // Inferring the type of a cast seems weird, so not allowing let type_res = process_type(typ, num_dyn_const, lexer, stringtab, env, types, false); let ((expr_lit, expr_typ), to_typ) = append_errors2(expr_res, type_res)?; if !types.unify_kind(expr_typ, parser::Kind::Number) - || !types.unify_kind(to_typ, parser::Kind::Number) { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Can only cast between numeric types, cannot cast {} to {}", - unparse_type(types, expr_typ, stringtab), - unparse_type(types, to_typ, stringtab))))) + || !types.unify_kind(to_typ, parser::Kind::Number) + { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Can only cast between numeric types, cannot cast {} to {}", + unparse_type(types, expr_typ, stringtab), + unparse_type(types, to_typ, stringtab) + ), + ))) } else { if types.unify_kind(to_typ, parser::Kind::Integer) { - Ok((match expr_lit { - Literal::Integer(i) => Literal::Integer(i), - Literal::Float(f) => Literal::Integer(f as u64), - _ => panic!("Incorrect literal constructor"), - }, to_typ)) + Ok(( + match expr_lit { + Literal::Integer(i) => Literal::Integer(i), + Literal::Float(f) => Literal::Integer(f as u64), + _ => panic!("Incorrect literal constructor"), + }, + to_typ, + )) } else { - assert!(types.unify_kind(to_typ, parser::Kind::Float), - "Casting to type which is neither integer or float"); - Ok((match expr_lit { - Literal::Integer(i) => Literal::Float(i as f64), - Literal::Float(f) => Literal::Float(f), - _ => panic!("Incorrect literal constructor"), - }, to_typ)) + assert!( + types.unify_kind(to_typ, parser::Kind::Float), + "Casting to type which is neither integer or float" + ); + Ok(( + match expr_lit { + Literal::Integer(i) => Literal::Float(i as f64), + Literal::Float(f) => Literal::Float(f), + _ => panic!("Incorrect literal constructor"), + }, + to_typ, + )) } } - }, - parser::Expr::CondExpr { span, cond, thn, els } => { + } + parser::Expr::CondExpr { + span, + cond, + thn, + els, + } => { let cond_span = cond.span(); - let cond_res = process_expr_as_constant(*cond, num_dyn_const, lexer, stringtab, env, types); - let thn_res = process_expr_as_constant(*thn, num_dyn_const, lexer, stringtab, env, types); - let els_res = process_expr_as_constant(*els, num_dyn_const, lexer, stringtab, env, types); + let cond_res = + process_expr_as_constant(*cond, num_dyn_const, lexer, stringtab, env, types); + let thn_res = + process_expr_as_constant(*thn, num_dyn_const, lexer, stringtab, env, types); + let els_res = + process_expr_as_constant(*els, num_dyn_const, lexer, stringtab, env, types); - let ((cond_lit, cond_typ), (thn_lit, thn_typ), (els_lit, els_typ)) - = append_errors3(cond_res, thn_res, els_res)?; + let ((cond_lit, cond_typ), (thn_lit, thn_typ), (els_lit, els_typ)) = + append_errors3(cond_res, thn_res, els_res)?; let mut errors = LinkedList::new(); if !types.unify_bool(cond_typ) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(cond_span, lexer), - "bool".to_string(), - unparse_type(types, cond_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(cond_span, lexer), + "bool".to_string(), + unparse_type(types, cond_typ, stringtab), + )); } if !types.unify(thn_typ, els_typ) { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Types of conditional branches do not match, have {} and {}", - unparse_type(types, thn_typ, stringtab), - unparse_type(types, els_typ, stringtab)))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Types of conditional branches do not match, have {} and {}", + unparse_type(types, thn_typ, stringtab), + unparse_type(types, els_typ, stringtab) + ), + )); } if !errors.is_empty() { Err(errors) } else { - let Literal::Bool(condition) = cond_lit else { panic!("Incorrect literal constructor"); }; - if condition { Ok((thn_lit, thn_typ)) } else { Ok((els_lit, els_typ)) } + let Literal::Bool(condition) = cond_lit else { + panic!("Incorrect literal constructor"); + }; + if condition { + Ok((thn_lit, thn_typ)) + } else { + Ok((els_lit, els_typ)) + } } - }, - parser::Expr::CallExpr { span, name, ty_args, args } => { + } + parser::Expr::CallExpr { + span, + name, + ty_args, + args, + } => { // While calls cannot be evaluated as constants, enum values can be, so we need to // distinguish whether this is actually a call or the construction of some enum value if name.len() > 2 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } - + let nm = intern_package_name(&name, lexer, stringtab); match env.lookup(&nm[0]) { - Some(Entity::Variable { .. }) | Some(Entity::DynConst { .. }) - | Some(Entity::Constant { .. }) | Some(Entity::Function { .. }) - | None if name.len() != 1 => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string()))) - }, - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(name[0], lexer), - stringtab.lookup_id(nm[0]).unwrap()))), - Some(Entity::Variable { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a variable, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap())))), - Some(Entity::DynConst { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a dynamic constant, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap())))), - Some(Entity::Constant { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a constant, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap())))), - Some(Entity::Function { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("Function calls cannot be evaluated as a constant")))), - Some(Entity::Type { type_args : kinds, value : typ }) => { + Some(Entity::Variable { .. }) + | Some(Entity::DynConst { .. }) + | Some(Entity::Constant { .. }) + | Some(Entity::Function { .. }) + | None + if name.len() != 1 => + { + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + ))) + } + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(name[0], lexer), + stringtab.lookup_id(nm[0]).unwrap(), + ))), + Some(Entity::Variable { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a variable, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + ))), + Some(Entity::DynConst { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a dynamic constant, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + ))), + Some(Entity::Constant { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a constant, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + ))), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!("Function calls cannot be evaluated as a constant"), + ))), + Some(Entity::Type { + type_args: kinds, + value: typ, + }) => { if !types.is_union(*typ) { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a type, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a type, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + )))? } } if name.len() != 2 { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("Expected constructor name"))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!("Expected constructor name"), + )))? } if types.get_constructor_info(*typ, nm[1]).is_none() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[1], lexer), - format!("{} is not a constructor of type {}", - stringtab.lookup_id(nm[1]).unwrap(), - unparse_type(types, *typ, stringtab)))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[1], lexer), + format!( + "{} is not a constructor of type {}", + stringtab.lookup_id(nm[1]).unwrap(), + unparse_type(types, *typ, stringtab) + ), + )))? } - + // Now, we know that we are constructing some union, we need to verify that // the type arguments are appropriate - let ty_args = - ty_args.unwrap_or_else( - || vec! [ parser::TypeExpr::WildcardType { span : span } - ; kinds.len() ]); + let ty_args = ty_args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; kinds.len()] + }); if kinds.len() != ty_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - kinds.len(), ty_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + kinds.len(), + ty_args.len() + ), + )))? } - + let mut type_vars = vec![]; let mut dyn_consts = vec![]; let mut errors = LinkedList::new(); @@ -2873,51 +3703,65 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dyn_consts.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, env, types, - true) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { return Err(errors); } - - let union_type = - if type_vars.len() == 0 && dyn_consts.len() == 0 { - *typ + if !errors.is_empty() { + return Err(errors); + } + + let union_type = if type_vars.len() == 0 && dyn_consts.len() == 0 { + *typ + } else { + if let Some(res) = types.instantiate(*typ, &type_vars, &dyn_consts) { + res } else { - if let Some(res) - = types.instantiate(*typ, &type_vars, &dyn_consts) { - res - } else { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))); - } - }; - let Some((constr_idx, constr_typ)) - = types.get_constructor_info(union_type, nm[1]) - else { panic!("From above"); }; + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))); + } + }; + let Some((constr_idx, constr_typ)) = + types.get_constructor_info(union_type, nm[1]) + else { + panic!("From above"); + }; // Now, process the arguments to ensure they has the type needed by this // constructor @@ -2925,128 +3769,148 @@ fn process_expr_as_constant(expr : parser::Expr, num_dyn_const : usize, // a single tuple, reporting an error if inout is used anywhere for (is_inout, arg) in args.iter() { if *is_inout { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(arg.span(), lexer), - format!("Union constructors cannot be marked inout"))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(arg.span(), lexer), + format!("Union constructors cannot be marked inout"), + )))? } } - let (body_lit, body_typ) - = process_expr_as_constant( - parser::Expr::Tuple { - span : span, - exprs : args.into_iter().map(|(_, a)| a).collect::<Vec<_>>() }, - num_dyn_const, lexer, stringtab, env, types)?; + let (body_lit, body_typ) = process_expr_as_constant( + parser::Expr::Tuple { + span: span, + exprs: args.into_iter().map(|(_, a)| a).collect::<Vec<_>>(), + }, + num_dyn_const, + lexer, + stringtab, + env, + types, + )?; if !types.unify(constr_typ, body_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - unparse_type(types, constr_typ, stringtab), - unparse_type(types, body_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + unparse_type(types, constr_typ, stringtab), + unparse_type(types, body_typ, stringtab), + ))) } else { - Ok((Literal::Sum(constr_idx, Box::new((body_lit, body_typ))), - body_typ)) + Ok(( + Literal::Sum(constr_idx, Box::new((body_lit, body_typ))), + body_typ, + )) } - }, + } } - }, + } parser::Expr::IntrinsicExpr { span, .. } => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "Intrinsics evaluated as constants".to_string()))) - }, + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "Intrinsics evaluated as constants".to_string(), + ))) + } } } -fn process_expr(expr : parser::Expr, num_dyn_const : usize, - lexer : &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, - stringtab : &mut StringTable, env : &mut Env<usize, Entity>, - types : &mut TypeSolver) - -> Result<Expr, ErrorMessages> { - +fn process_expr( + expr: parser::Expr, + num_dyn_const: usize, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, + stringtab: &mut StringTable, + env: &mut Env<usize, Entity>, + types: &mut TypeSolver, +) -> Result<Expr, ErrorMessages> { match expr { parser::Expr::Variable { span, name } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } let nm = intern_package_name(&name, lexer, stringtab)[0]; match env.lookup(&nm) { - Some(Entity::Variable { variable, typ, .. }) => { - Ok(Expr::Variable { var : *variable, typ : *typ }) - }, + Some(Entity::Variable { variable, typ, .. }) => Ok(Expr::Variable { + var: *variable, + typ: *typ, + }), Some(Entity::DynConst { value }) => { let typ = types.new_primitive(types::Primitive::U64); - Ok(Expr::DynConst { val : value.clone(), typ : typ }) - }, - Some(Entity::Constant { value : (lit, typ) }) => { - Ok(Expr::Constant { val : (lit.clone(), *typ), typ : *typ }) - }, - Some(Entity::Function { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a function, expected a value", - stringtab.lookup_id(nm).unwrap())))), - Some(Entity::Type { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("{} is a type, expected a value", - stringtab.lookup_id(nm).unwrap())))), - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(nm).unwrap()))) + Ok(Expr::DynConst { + val: value.clone(), + typ: typ, + }) + } + Some(Entity::Constant { value: (lit, typ) }) => Ok(Expr::Constant { + val: (lit.clone(), *typ), + typ: *typ, + }), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a function, expected a value", + stringtab.lookup_id(nm).unwrap() + ), + ))), + Some(Entity::Type { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "{} is a type, expected a value", + stringtab.lookup_id(nm).unwrap() + ), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(nm).unwrap(), + ))), } - }, + } parser::Expr::Field { span, lhs, rhs } => { let field_name = intern_id(&rhs, lexer, stringtab); let exp = process_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; let exp_typ = exp.get_type(); match types.get_field(exp_typ, field_name) { - None => Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Type {} does not possess field {}", - unparse_type(types, exp_typ, stringtab), - stringtab.lookup_id(field_name).unwrap())))), - Some((field_idx, field_type)) => - Ok(Expr::Read { - index : vec![Index::Field(field_idx)], - val : Box::new(exp), - typ : field_type }), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Type {} does not possess field {}", + unparse_type(types, exp_typ, stringtab), + stringtab.lookup_id(field_name).unwrap() + ), + ))), + Some((field_idx, field_type)) => Ok(Expr::Read { + index: vec![Index::Field(field_idx)], + val: Box::new(exp), + typ: field_type, + }), } - }, + } parser::Expr::NumField { span, lhs, rhs } => { let exp = process_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; let exp_typ = exp.get_type(); - let num = lexer.span_str(rhs)[1..].parse::<usize>() - .expect("From lexical analysis"); + let num = lexer.span_str(rhs)[1..] + .parse::<usize>() + .expect("From lexical analysis"); match types.get_index(exp_typ, num) { - None => Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Type {} does not possess index {}", - unparse_type(types, exp_typ, stringtab), - num)))), - Some(field_type) => - Ok(Expr::Read { - index : vec![Index::Field(num)], - val : Box::new(exp), - typ : field_type }), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Type {} does not possess index {}", + unparse_type(types, exp_typ, stringtab), + num + ), + ))), + Some(field_type) => Ok(Expr::Read { + index: vec![Index::Field(num)], + val: Box::new(exp), + typ: field_type, + }), } - }, + } parser::Expr::ArrIndex { span, lhs, index } => { let exp = process_expr(*lhs, num_dyn_const, lexer, stringtab, env, types)?; let exp_typ = exp.get_type(); @@ -3060,15 +3924,15 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, Ok(exp) => { let typ = exp.get_type(); if !types.unify_u64(typ) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(idx_span, lexer), - "usize".to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(idx_span, lexer), + "usize".to_string(), + unparse_type(types, typ, stringtab), + )); } else { indices.push(exp); } - }, + } } } @@ -3077,11 +3941,13 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, } if !types.is_array(exp_typ) { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Array index does not apply to type {}", - unparse_type(types, exp_typ, stringtab)))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Array index does not apply to type {}", + unparse_type(types, exp_typ, stringtab) + ), + )))? } let num_dims = types.get_num_dimensions(exp_typ).unwrap(); @@ -3089,30 +3955,42 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, Err(singleton_error( ErrorMessage::NotImplemented( span_to_loc(span, lexer), - format!("fewer array indices than dimensions, array has {} dimensions but using {} indices", - num_dims, indices.len())))) - } else if indices.len() > num_dims { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Too many array indices, array has {} dimensions but using {} indices", + format!("fewer array indices than dimensions, array has {} dimensions but using {} indices", num_dims, indices.len())))) + } else if indices.len() > num_dims { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Too many array indices, array has {} dimensions but using {} indices", + num_dims, + indices.len() + ), + ))) } else { Ok(Expr::Read { - index : vec![Index::Array(indices)], - val : Box::new(exp), - typ : types.get_element_type(exp_typ).unwrap() }) + index: vec![Index::Array(indices)], + val: Box::new(exp), + typ: types.get_element_type(exp_typ).unwrap(), + }) } - }, - parser::Expr::Tuple { span : _, mut exprs } => { + } + parser::Expr::Tuple { span: _, mut exprs } => { if exprs.len() == 1 { - return process_expr(exprs.pop().unwrap(), num_dyn_const, lexer, stringtab, env, types); + return process_expr( + exprs.pop().unwrap(), + num_dyn_const, + lexer, + stringtab, + env, + types, + ); } if exprs.len() == 0 { let unit_type = types.new_primitive(types::Primitive::Unit); return Ok(Expr::Constant { - val : (Literal::Unit, unit_type), - typ : unit_type }); + val: (Literal::Unit, unit_type), + typ: unit_type, + }); } let mut vals = vec![]; @@ -3125,7 +4003,7 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, Ok(val) => { typs.push(val.get_type()); vals.push(val); - }, + } } } @@ -3133,74 +4011,75 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, Err(errors) } else { Ok(Expr::Tuple { - vals : vals, - typ : types.new_tuple(typs) }) + vals: vals, + typ: types.new_tuple(typs), + }) } - }, - parser::Expr::Struct { span, name, ty_args, exprs } => { + } + parser::Expr::Struct { + span, + name, + ty_args, + exprs, + } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } let struct_nm = intern_package_name(&name, lexer, stringtab)[0]; match env.lookup(&struct_nm) { - Some(Entity::Variable { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "variable".to_string()))) - }, - Some(Entity::DynConst { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "dynamic constant".to_string()))) - }, - Some(Entity::Constant { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "constant".to_string()))) - }, - Some(Entity::Function { .. }) => { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "function".to_string()))) - }, - None => { - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(span, lexer), - stringtab.lookup_id(struct_nm).unwrap()))) - }, - Some(Entity::Type { type_args : kinds, value : typ }) => { + Some(Entity::Variable { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "variable".to_string(), + ))), + Some(Entity::DynConst { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "dynamic constant".to_string(), + ))), + Some(Entity::Constant { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "constant".to_string(), + ))), + Some(Entity::Function { .. }) => Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "function".to_string(), + ))), + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(span, lexer), + stringtab.lookup_id(struct_nm).unwrap(), + ))), + Some(Entity::Type { + type_args: kinds, + value: typ, + }) => { if !types.is_struct(*typ) { - Err(singleton_error( - ErrorMessage::KindError( - span_to_loc(span, lexer), - "struct name".to_string(), - "non-struct type".to_string())))? + Err(singleton_error(ErrorMessage::KindError( + span_to_loc(span, lexer), + "struct name".to_string(), + "non-struct type".to_string(), + )))? } - let ty_args = - ty_args.unwrap_or_else( - || vec! [ parser::TypeExpr::WildcardType { span : span } - ; kinds.len() ]); + let ty_args = ty_args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; kinds.len()] + }); if kinds.len() != ty_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - kinds.len(), ty_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + kinds.len(), + ty_args.len() + ), + )))? } // Verify that the type arguments we are provided are correct and collect the @@ -3214,48 +4093,60 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dyn_consts.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, env, types, - true) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } - let struct_type = - if type_vars.len() == 0 && dyn_consts.len() == 0 { - *typ + let struct_type = if type_vars.len() == 0 && dyn_consts.len() == 0 { + *typ + } else { + if let Some(res) = types.instantiate(*typ, &type_vars, &dyn_consts) { + res } else { - if let Some(res) - = types.instantiate(*typ, &type_vars, &dyn_consts) { - res - } else { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))); - } - }; + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))); + } + }; // Check each field and construct the appropriate tuple // Note that fields that are omitted will be initialized with their type's @@ -3263,7 +4154,7 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, let num_fields = types.get_num_struct_fields(struct_type).unwrap(); // Values for the fields, in order - let mut values : Vec<Option<Expr>> = vec![None; num_fields]; + let mut values: Vec<Option<Expr>> = vec![None; num_fields]; for (field_name, expr) in exprs { let field_nm = intern_id(&field_name, lexer, stringtab); @@ -3271,22 +4162,33 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, match types.get_field(struct_type, field_nm) { None => { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(field_name, lexer), - format!("Struct {} does not have field {}", - unparse_type(types, struct_type, stringtab), - stringtab.lookup_id(field_nm).unwrap()))); - }, + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(field_name, lexer), + format!( + "Struct {} does not have field {}", + unparse_type(types, struct_type, stringtab), + stringtab.lookup_id(field_nm).unwrap() + ), + )); + } Some((idx, field_typ)) => { if values[idx].is_some() { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(field_name, lexer), - format!("Field {} defined multiple times", - stringtab.lookup_id(field_nm).unwrap()))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(field_name, lexer), + format!( + "Field {} defined multiple times", + stringtab.lookup_id(field_nm).unwrap() + ), + )); } else { - match process_expr(expr, num_dyn_const, lexer, stringtab, env, types) { + match process_expr( + expr, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => { let val_typ = val.get_type(); @@ -3294,61 +4196,71 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, // Set the value at this index even though there's // an error so that we also report if the field is // defined multiple times - values[idx] = Some(Expr::Zero { typ : field_typ }); - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(expr_span, lexer), - unparse_type(types, field_typ, stringtab), - unparse_type(types, val_typ, stringtab))); + values[idx] = Some(Expr::Zero { typ: field_typ }); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(expr_span, lexer), + unparse_type(types, field_typ, stringtab), + unparse_type(types, val_typ, stringtab), + )); } else { values[idx] = Some(val); } - }, + } } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } // Construct the list of field values, filling in zero values as needed - let filled_fields - = values.into_iter().enumerate() - .map(|(i, t)| t.unwrap_or( - Expr::Zero { - typ : types.get_struct_field_type(struct_type, i).unwrap() })) - .collect::<Vec<_>>(); - - Ok(Expr::Tuple { vals : filled_fields, - typ : struct_type }) - }, + let filled_fields = values + .into_iter() + .enumerate() + .map(|(i, t)| { + t.unwrap_or(Expr::Zero { + typ: types.get_struct_field_type(struct_type, i).unwrap(), + }) + }) + .collect::<Vec<_>>(); + + Ok(Expr::Tuple { + vals: filled_fields, + typ: struct_type, + }) + } } - }, - parser::Expr::BoolLit { span : _, value } => { + } + parser::Expr::BoolLit { span: _, value } => { let bool_typ = types.new_primitive(types::Primitive::Bool); Ok(Expr::Constant { - val : (Literal::Bool(value), bool_typ), - typ : bool_typ }) - }, + val: (Literal::Bool(value), bool_typ), + typ: bool_typ, + }) + } parser::Expr::IntLit { span, base } => { - let res = u64::from_str_radix(lexer.span_str(span), base.base()); + let res = u64::from_str_radix(&base.string(lexer, span), base.base()); assert!(res.is_ok(), "Internal Error: Int literal is not an integer"); let num_typ = types.new_of_kind(parser::Kind::Number, span_to_loc(span, lexer)); Ok(Expr::Constant { - val : (Literal::Integer(res.unwrap()), num_typ), - typ : num_typ }) - }, + val: (Literal::Integer(res.unwrap()), num_typ), + typ: num_typ, + }) + } parser::Expr::FloatLit { span } => { let res = lexer.span_str(span).parse::<f64>(); assert!(res.is_ok(), "Internal Error: Float literal is not a float"); let float_typ = types.new_of_kind(parser::Kind::Float, span_to_loc(span, lexer)); Ok(Expr::Constant { - val : (Literal::Float(res.unwrap()), float_typ), - typ : float_typ }) - }, + val: (Literal::Float(res.unwrap()), float_typ), + typ: float_typ, + }) + } parser::Expr::UnaryExpr { span, op, expr } => { let expr_val = process_expr(*expr, num_dyn_const, lexer, stringtab, env, types)?; let expr_typ = expr_val.get_type(); @@ -3356,59 +4268,67 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, match op { parser::UnaryOp::Negation => { if !types.unify_kind(expr_typ, parser::Kind::Number) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "number".to_string(), - unparse_type(types, expr_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "number".to_string(), + unparse_type(types, expr_typ, stringtab), + ))) } else { Ok(Expr::UnaryExp { - op : UnaryOp::Negation, - expr : Box::new(expr_val), - typ : expr_typ }) + op: UnaryOp::Negation, + expr: Box::new(expr_val), + typ: expr_typ, + }) } - }, + } parser::UnaryOp::BitwiseNot => { if !types.unify_kind(expr_typ, parser::Kind::Integer) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "integer".to_string(), - unparse_type(types, expr_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "integer".to_string(), + unparse_type(types, expr_typ, stringtab), + ))) } else { Ok(Expr::UnaryExp { - op : UnaryOp::BitwiseNot, - expr : Box::new(expr_val), - typ : expr_typ }) + op: UnaryOp::BitwiseNot, + expr: Box::new(expr_val), + typ: expr_typ, + }) } - }, + } parser::UnaryOp::LogicalNot => { if !types.unify_bool(expr_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - "bool".to_string(), - unparse_type(types, expr_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + "bool".to_string(), + unparse_type(types, expr_typ, stringtab), + ))) } else { // ! x is translated into if x then false else true - let val_true = - Expr::Constant { - val : (Literal::Bool(true), expr_typ), - typ : expr_typ }; - let val_false = - Expr::Constant { - val : (Literal::Bool(false), expr_typ), - typ : expr_typ }; + let val_true = Expr::Constant { + val: (Literal::Bool(true), expr_typ), + typ: expr_typ, + }; + let val_false = Expr::Constant { + val: (Literal::Bool(false), expr_typ), + typ: expr_typ, + }; Ok(Expr::CondExpr { - cond : Box::new(expr_val), - thn : Box::new(val_false), - els : Box::new(val_true), - typ : expr_typ }) + cond: Box::new(expr_val), + thn: Box::new(val_false), + els: Box::new(val_true), + typ: expr_typ, + }) } - }, + } } - }, - parser::Expr::BinaryExpr { span : _, op, lhs, rhs } => { + } + parser::Expr::BinaryExpr { + span: _, + op, + lhs, + rhs, + } => { let lhs_span = lhs.span(); let rhs_span = rhs.span(); @@ -3424,95 +4344,107 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, // Equality and inequality work on any types parser::BinaryOp::Eq | parser::BinaryOp::Neq => { if !types.unify(lhs_typ, rhs_typ) { - return Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab)))); + return Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + ))); } - }, + } // These work on any numbers - parser::BinaryOp::Add | parser::BinaryOp::Sub | parser::BinaryOp::Mul - | parser::BinaryOp::Div | parser::BinaryOp::Lt | parser::BinaryOp::Le - | parser::BinaryOp::Gt | parser::BinaryOp::Ge => { + parser::BinaryOp::Add + | parser::BinaryOp::Sub + | parser::BinaryOp::Mul + | parser::BinaryOp::Div + | parser::BinaryOp::Lt + | parser::BinaryOp::Le + | parser::BinaryOp::Gt + | parser::BinaryOp::Ge => { let mut errors = LinkedList::new(); let lhs_number = types.unify_kind(lhs_typ, parser::Kind::Number); let rhs_number = types.unify_kind(rhs_typ, parser::Kind::Number); let equal = types.unify(lhs_typ, rhs_typ); if lhs_number && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + )); } else if rhs_number && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - unparse_type(types, rhs_typ, stringtab), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + unparse_type(types, rhs_typ, stringtab), + unparse_type(types, lhs_typ, stringtab), + )); } else { // The types are equal or both are not numbers if !lhs_number { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - "number".to_string(), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + "number".to_string(), + unparse_type(types, lhs_typ, stringtab), + )); } if !rhs_number { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - "number".to_string(), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + "number".to_string(), + unparse_type(types, rhs_typ, stringtab), + )); } } - if !errors.is_empty() { return Err(errors); } - }, + if !errors.is_empty() { + return Err(errors); + } + } // These work on integer inputs - parser::BinaryOp::Mod | parser::BinaryOp::BitAnd | parser::BinaryOp::BitOr - | parser::BinaryOp::Xor | parser::BinaryOp::LShift | parser::BinaryOp::RShift - => { + parser::BinaryOp::Mod + | parser::BinaryOp::BitAnd + | parser::BinaryOp::BitOr + | parser::BinaryOp::Xor + | parser::BinaryOp::LShift + | parser::BinaryOp::RShift => { let mut errors = LinkedList::new(); let lhs_integer = types.unify_kind(lhs_typ, parser::Kind::Integer); let rhs_integer = types.unify_kind(rhs_typ, parser::Kind::Integer); let equal = types.unify(lhs_typ, rhs_typ); if lhs_integer && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + )); } else if rhs_integer && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - unparse_type(types, rhs_typ, stringtab), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + unparse_type(types, rhs_typ, stringtab), + unparse_type(types, lhs_typ, stringtab), + )); } else { // The types are equal or both are not integers if !lhs_integer { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - "integer".to_string(), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + "integer".to_string(), + unparse_type(types, lhs_typ, stringtab), + )); } if !rhs_integer { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - "integer".to_string(), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + "integer".to_string(), + unparse_type(types, rhs_typ, stringtab), + )); } } - if !errors.is_empty() { return Err(errors); } - }, + if !errors.is_empty() { + return Err(errors); + } + } // These work on boolean inputs parser::BinaryOp::LogAnd | parser::BinaryOp::LogOr => { let mut errors = LinkedList::new(); @@ -3521,37 +4453,39 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, let equal = types.unify(lhs_typ, rhs_typ); if lhs_bool && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - unparse_type(types, lhs_typ, stringtab), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + unparse_type(types, lhs_typ, stringtab), + unparse_type(types, rhs_typ, stringtab), + )); } else if rhs_bool && !equal { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - unparse_type(types, rhs_typ, stringtab), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + unparse_type(types, rhs_typ, stringtab), + unparse_type(types, lhs_typ, stringtab), + )); } else { // The types are equal or both are not bools if !lhs_bool { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(lhs_span, lexer), - "bool".to_string(), - unparse_type(types, lhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(lhs_span, lexer), + "bool".to_string(), + unparse_type(types, lhs_typ, stringtab), + )); } if !rhs_bool { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(rhs_span, lexer), - "bool".to_string(), - unparse_type(types, rhs_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(rhs_span, lexer), + "bool".to_string(), + unparse_type(types, rhs_typ, stringtab), + )); } } - if !errors.is_empty() { return Err(errors); } - }, + if !errors.is_empty() { + return Err(errors); + } + } }; match op { @@ -3559,47 +4493,50 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, // x && y = if x then y else false // x || y = if x then true else y parser::BinaryOp::LogAnd => { - let false_val = - Expr::Constant { - val : (Literal::Bool(false), lhs_typ), - typ : lhs_typ }; + let false_val = Expr::Constant { + val: (Literal::Bool(false), lhs_typ), + typ: lhs_typ, + }; Ok(Expr::CondExpr { - cond : Box::new(lhs_val), - thn : Box::new(rhs_val), - els : Box::new(false_val), - typ : lhs_typ }) - }, + cond: Box::new(lhs_val), + thn: Box::new(rhs_val), + els: Box::new(false_val), + typ: lhs_typ, + }) + } parser::BinaryOp::LogOr => { - let true_val = - Expr::Constant { - val : (Literal::Bool(true), lhs_typ), - typ : lhs_typ }; + let true_val = Expr::Constant { + val: (Literal::Bool(true), lhs_typ), + typ: lhs_typ, + }; Ok(Expr::CondExpr { - cond : Box::new(lhs_val), - thn : Box::new(true_val), - els : Box::new(rhs_val), - typ : lhs_typ }) - }, + cond: Box::new(lhs_val), + thn: Box::new(true_val), + els: Box::new(rhs_val), + typ: lhs_typ, + }) + } // For comparison operators, the resulting type is a boolean, while for all other // operations the result is the same as the two operands - parser::BinaryOp::Lt | parser::BinaryOp::Le - | parser::BinaryOp::Gt | parser::BinaryOp::Ge - | parser::BinaryOp::Eq | parser::BinaryOp::Neq => { - Ok(Expr::BinaryExp { - op : convert_binary_op(op), - lhs : Box::new(lhs_val), - rhs : Box::new(rhs_val), - typ : types.new_primitive(types::Primitive::Bool) }) - }, - _ => { - Ok(Expr::BinaryExp { - op : convert_binary_op(op), - lhs : Box::new(lhs_val), - rhs : Box::new(rhs_val), - typ : lhs_typ }) - }, + parser::BinaryOp::Lt + | parser::BinaryOp::Le + | parser::BinaryOp::Gt + | parser::BinaryOp::Ge + | parser::BinaryOp::Eq + | parser::BinaryOp::Neq => Ok(Expr::BinaryExp { + op: convert_binary_op(op), + lhs: Box::new(lhs_val), + rhs: Box::new(rhs_val), + typ: types.new_primitive(types::Primitive::Bool), + }), + _ => Ok(Expr::BinaryExp { + op: convert_binary_op(op), + lhs: Box::new(lhs_val), + rhs: Box::new(rhs_val), + typ: lhs_typ, + }), } - }, + } parser::Expr::CastExpr { span, expr, typ } => { // For the moment at least, casting is only supported between numeric types, and all // numeric types can be cast to each other @@ -3611,153 +4548,185 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, let expr_typ = expr_val.get_type(); if !types.unify_kind(expr_typ, parser::Kind::Number) - || !types.unify_kind(to_typ, parser::Kind::Number) { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Can only cast between numeric types, cannot cast {} to {}", - unparse_type(types, expr_typ, stringtab), - unparse_type(types, to_typ, stringtab))))) + || !types.unify_kind(to_typ, parser::Kind::Number) + { + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Can only cast between numeric types, cannot cast {} to {}", + unparse_type(types, expr_typ, stringtab), + unparse_type(types, to_typ, stringtab) + ), + ))) } else { - Ok(Expr::CastExpr { expr : Box::new(expr_val), - typ : to_typ }) + Ok(Expr::CastExpr { + expr: Box::new(expr_val), + typ: to_typ, + }) } - }, - parser::Expr::CondExpr { span, cond, thn, els } => { + } + parser::Expr::CondExpr { + span, + cond, + thn, + els, + } => { let cond_span = cond.span(); let cond_res = process_expr(*cond, num_dyn_const, lexer, stringtab, env, types); - let thn_res = process_expr(*thn, num_dyn_const, lexer, stringtab, env, types); - let els_res = process_expr(*els, num_dyn_const, lexer, stringtab, env, types); + let thn_res = process_expr(*thn, num_dyn_const, lexer, stringtab, env, types); + let els_res = process_expr(*els, num_dyn_const, lexer, stringtab, env, types); - let (cond_val, thn_val, els_val) - = append_errors3(cond_res, thn_res, els_res)?; + let (cond_val, thn_val, els_val) = append_errors3(cond_res, thn_res, els_res)?; let cond_typ = cond_val.get_type(); - let thn_typ = thn_val.get_type(); - let els_typ = els_val.get_type(); + let thn_typ = thn_val.get_type(); + let els_typ = els_val.get_type(); let mut errors = LinkedList::new(); if !types.unify_bool(cond_typ) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(cond_span, lexer), - "bool".to_string(), - unparse_type(types, cond_typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(cond_span, lexer), + "bool".to_string(), + unparse_type(types, cond_typ, stringtab), + )); } if !types.unify(thn_typ, els_typ) { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Types of conditional branches do not match, have {} and {}", - unparse_type(types, thn_typ, stringtab), - unparse_type(types, els_typ, stringtab)))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Types of conditional branches do not match, have {} and {}", + unparse_type(types, thn_typ, stringtab), + unparse_type(types, els_typ, stringtab) + ), + )); } if !errors.is_empty() { Err(errors) } else { Ok(Expr::CondExpr { - cond : Box::new(cond_val), - thn : Box::new(thn_val), - els : Box::new(els_val), - typ : thn_typ }) + cond: Box::new(cond_val), + thn: Box::new(thn_val), + els: Box::new(els_val), + typ: thn_typ, + }) } - }, - parser::Expr::CallExpr { span, name, ty_args, args } => { + } + parser::Expr::CallExpr { + span, + name, + ty_args, + args, + } => { // In the AST from the parser we have no way to distinguish between function calls and // union construction. We have to identify which case we're in here. We do this by // identifying whether the name (looking for the moment at just the first part of the // name) and determining whether it's a type or a function. Obviously we then report // errors if there are additional parts of the name if name.len() > 2 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } let nm = intern_package_name(&name, lexer, stringtab); match env.lookup(&nm[0]) { - Some(Entity::Variable { .. }) | Some(Entity::DynConst { .. }) - | Some(Entity::Constant { .. }) | Some(Entity::Function { .. }) - | None if name.len() != 1 => { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string()))) - }, - None => - Err(singleton_error( - ErrorMessage::UndefinedVariable( - span_to_loc(name[0], lexer), - stringtab.lookup_id(nm[0]).unwrap()))), - Some(Entity::Variable { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a variable, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap())))), - Some(Entity::DynConst { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a dynamic constant, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap())))), - Some(Entity::Constant { .. }) => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a constant, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap())))), - Some(Entity::Type { type_args : kinds, value : typ }) => { + Some(Entity::Variable { .. }) + | Some(Entity::DynConst { .. }) + | Some(Entity::Constant { .. }) + | Some(Entity::Function { .. }) + | None + if name.len() != 1 => + { + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + ))) + } + None => Err(singleton_error(ErrorMessage::UndefinedVariable( + span_to_loc(name[0], lexer), + stringtab.lookup_id(nm[0]).unwrap(), + ))), + Some(Entity::Variable { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a variable, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + ))), + Some(Entity::DynConst { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a dynamic constant, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + ))), + Some(Entity::Constant { .. }) => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a constant, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + ))), + Some(Entity::Type { + type_args: kinds, + value: typ, + }) => { if !types.is_union(*typ) { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } else { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("{} is a type, expected a function or union constructor", - stringtab.lookup_id(nm[0]).unwrap()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!( + "{} is a type, expected a function or union constructor", + stringtab.lookup_id(nm[0]).unwrap() + ), + )))? } } if name.len() != 2 { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[0], lexer), - format!("Expected constructor name"))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[0], lexer), + format!("Expected constructor name"), + )))? } if types.get_constructor_info(*typ, nm[1]).is_none() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(name[1], lexer), - format!("{} is not a constructor of type {}", - stringtab.lookup_id(nm[1]).unwrap(), - unparse_type(types, *typ, stringtab)))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(name[1], lexer), + format!( + "{} is not a constructor of type {}", + stringtab.lookup_id(nm[1]).unwrap(), + unparse_type(types, *typ, stringtab) + ), + )))? } // Now, we know that we are constructing some union, we need to verify that // the type arguments are appropriate - let ty_args = - ty_args.unwrap_or_else( - || vec! [ parser::TypeExpr::WildcardType { span : span } - ; kinds.len() ]); + let ty_args = ty_args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; kinds.len()] + }); if kinds.len() != ty_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - kinds.len(), ty_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + kinds.len(), + ty_args.len() + ), + )))? } - + let mut type_vars = vec![]; let mut dyn_consts = vec![]; let mut errors = LinkedList::new(); @@ -3767,51 +4736,65 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dyn_consts.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, env, types, - true) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } - let union_type = - if type_vars.len() == 0 && dyn_consts.len() == 0 { - *typ + let union_type = if type_vars.len() == 0 && dyn_consts.len() == 0 { + *typ + } else { + if let Some(res) = types.instantiate(*typ, &type_vars, &dyn_consts) { + res } else { - if let Some(res) - = types.instantiate(*typ, &type_vars, &dyn_consts) { - res - } else { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))); - } - }; - let Some((constr_idx, constr_typ)) - = types.get_constructor_info(union_type, nm[1]) - else { panic!("From above"); }; + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))); + } + }; + let Some((constr_idx, constr_typ)) = + types.get_constructor_info(union_type, nm[1]) + else { + panic!("From above"); + }; // Now, process the arguments to ensure they has the type needed by this // constructor @@ -3819,51 +4802,64 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, // a single tuple, reporting an error if inout is used anywhere for (is_inout, arg) in args.iter() { if *is_inout { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(arg.span(), lexer), - format!("Union constructors cannot be marked inout"))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(arg.span(), lexer), + format!("Union constructors cannot be marked inout"), + )))? } } let body = process_expr( - parser::Expr::Tuple { - span : span, - exprs : args.into_iter().map(|(_, a)| a).collect::<Vec<_>>() }, - num_dyn_const, lexer, stringtab, env, types)?; + parser::Expr::Tuple { + span: span, + exprs: args.into_iter().map(|(_, a)| a).collect::<Vec<_>>(), + }, + num_dyn_const, + lexer, + stringtab, + env, + types, + )?; let body_typ = body.get_type(); if !types.unify(constr_typ, body_typ) { - Err(singleton_error( - ErrorMessage::TypeError( - span_to_loc(span, lexer), - unparse_type(types, constr_typ, stringtab), - unparse_type(types, body_typ, stringtab)))) + Err(singleton_error(ErrorMessage::TypeError( + span_to_loc(span, lexer), + unparse_type(types, constr_typ, stringtab), + unparse_type(types, body_typ, stringtab), + ))) } else { Ok(Expr::Union { - tag : constr_idx, - val : Box::new(body), - typ : union_type }) + tag: constr_idx, + val: Box::new(body), + typ: union_type, + }) } - }, - Some(Entity::Function { index : function, type_args : kinds, - args : func_args, return_type }) => { + } + Some(Entity::Function { + index: function, + type_args: kinds, + args: func_args, + return_type, + }) => { let func = *function; // Verify that the type arguments are appropriate - let ty_args = - ty_args.unwrap_or_else( - || vec! [ parser::TypeExpr::WildcardType { span : span } - ; kinds.len() ]); + let ty_args = ty_args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; kinds.len()] + }); if kinds.len() != ty_args.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - kinds.len(), ty_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + kinds.len(), + ty_args.len() + ), + )))? } - + let mut type_vars = vec![]; let mut dyn_consts = vec![]; let mut errors = LinkedList::new(); @@ -3873,121 +4869,134 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, match kind { parser::Kind::USize => { match process_type_expr_as_expr( - arg, num_dyn_const, lexer, stringtab, env, types) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + ) { Err(mut errs) => errors.append(&mut errs), Ok(val) => dyn_consts.push(val), } - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, env, types, - true) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } - let arg_types = - if type_vars.len() == 0 && dyn_consts.len() == 0 { - func_args.clone() - } else { - let mut tys = vec![]; - for (t, inout) in func_args { - tys.push(( - if let Some(res) - = types.instantiate(*t, &type_vars, &dyn_consts) { - res - } else { - return Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))); - }, - *inout)); - } - tys - }; - let return_typ = - if let Some(res) - = types.instantiate(*return_type, &type_vars, &dyn_consts) { - res - } else { - return Err(singleton_error( - ErrorMessage::SemanticError( + let arg_types = if type_vars.len() == 0 && dyn_consts.len() == 0 { + func_args.clone() + } else { + let mut tys = vec![]; + for (t, inout) in func_args { + tys.push(( + if let Some(res) = types.instantiate(*t, &type_vars, &dyn_consts) { + res + } else { + return Err(singleton_error(ErrorMessage::SemanticError( span_to_loc(span, lexer), - "Failure in variable substitution".to_string()))); - }; + "Failure in variable substitution".to_string(), + ))); + }, + *inout, + )); + } + tys + }; + let return_typ = if let Some(res) = + types.instantiate(*return_type, &type_vars, &dyn_consts) + { + res + } else { + return Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + "Failure in variable substitution".to_string(), + ))); + }; // Now, process the arguments to ensure they has the type needed by this // constructor - let mut arg_vals : Vec<Either<Expr, usize>> = vec![]; + let mut arg_vals: Vec<Either<Expr, usize>> = vec![]; let mut errors = LinkedList::new(); - for ((is_inout, arg), (arg_typ, expect_inout)) - in args.into_iter().zip(arg_types.into_iter()) { - + for ((is_inout, arg), (arg_typ, expect_inout)) in + args.into_iter().zip(arg_types.into_iter()) + { let arg_span = arg.span(); if is_inout && !expect_inout { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(arg_span, lexer), - format!("Argument should be inout"))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(arg_span, lexer), + format!("Argument should be inout"), + )); } else if !is_inout && expect_inout { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(arg_span, lexer), - format!("Argument should not be inout"))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(arg_span, lexer), + format!("Argument should not be inout"), + )); } else if is_inout { // If the argument is an inout then it needs to just be a variable match process_expr(arg, num_dyn_const, lexer, stringtab, env, types) { Err(mut errs) => errors.append(&mut errs), Ok(Expr::Variable { var, typ }) => { if !types.unify(arg_typ, typ) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(arg_span, lexer), - unparse_type(types, arg_typ, stringtab), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(arg_span, lexer), + unparse_type(types, arg_typ, stringtab), + unparse_type(types, typ, stringtab), + )); } else { arg_vals.push(Either::Right(var)); } - }, + } Ok(_) => { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(arg_span, lexer), - format!("An inout argument must just be a variable"))); - }, + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(arg_span, lexer), + format!("An inout argument must just be a variable"), + )); + } } } else { match process_expr(arg, num_dyn_const, lexer, stringtab, env, types) { Err(mut errs) => errors.append(&mut errs), Ok(exp) => { if !types.unify(arg_typ, exp.get_type()) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(arg_span, lexer), - unparse_type(types, arg_typ, stringtab), - unparse_type(types, exp.get_type(), stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(arg_span, lexer), + unparse_type(types, arg_typ, stringtab), + unparse_type(types, exp.get_type(), stringtab), + )); } else { arg_vals.push(Either::Left(exp)); } - }, + } } } } @@ -3996,44 +5005,51 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, Err(errors) } else { Ok(Expr::CallExpr { - func : func, - ty_args : type_vars, - dyn_consts : dyn_consts, - args : arg_vals, - typ : return_typ }) + func: func, + ty_args: type_vars, + dyn_consts: dyn_consts, + args: arg_vals, + typ: return_typ, + }) } - }, + } } - }, - parser::Expr::IntrinsicExpr { span, name, ty_args, args } => { + } + parser::Expr::IntrinsicExpr { + span, + name, + ty_args, + args, + } => { if name.len() != 1 { - Err(singleton_error( - ErrorMessage::NotImplemented( - span_to_loc(span, lexer), - "packages".to_string())))? + Err(singleton_error(ErrorMessage::NotImplemented( + span_to_loc(span, lexer), + "packages".to_string(), + )))? } let nm = lexer.span_str(name[0]); match intrinsics::lookup(nm) { - None => - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Undefined intrinsic {}", nm)))), + None => Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!("Undefined intrinsic {}", nm), + ))), Some(intrinsic) => { let kinds = intrinsic.kinds; - let ty_args = - ty_args.unwrap_or_else( - || vec! [ parser::TypeExpr::WildcardType { span : span } - ; kinds.len() ]); + let ty_args = ty_args.unwrap_or_else(|| { + vec![parser::TypeExpr::WildcardType { span: span }; kinds.len()] + }); if ty_args.len() != kinds.len() { - Err(singleton_error( - ErrorMessage::SemanticError( - span_to_loc(span, lexer), - format!("Expected {} type arguments, provided {}", - kinds.len(), ty_args.len()))))? + Err(singleton_error(ErrorMessage::SemanticError( + span_to_loc(span, lexer), + format!( + "Expected {} type arguments, provided {}", + kinds.len(), + ty_args.len() + ), + )))? } let mut type_vars = vec![]; @@ -4044,61 +5060,67 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, match kind { parser::Kind::USize => { panic!("Intrinsics do not support dynamic constants"); - }, + } _ => { match process_type_expr_as_type( - arg, num_dyn_const, lexer, stringtab, env, types, - true) { + arg, + num_dyn_const, + lexer, + stringtab, + env, + types, + true, + ) { Err(mut errs) => errors.append(&mut errs), Ok(typ) => { if types.unify_kind(typ, *kind) { type_vars.push(typ); } else { - errors.push_back( - ErrorMessage::KindError( - span_to_loc(arg_span, lexer), - kind.to_string(), - unparse_type(types, typ, stringtab))); + errors.push_back(ErrorMessage::KindError( + span_to_loc(arg_span, lexer), + kind.to_string(), + unparse_type(types, typ, stringtab), + )); } - }, + } } - }, + } } } - if !errors.is_empty() { return Err(errors); } + if !errors.is_empty() { + return Err(errors); + } let (arg_types, return_typ) = (intrinsic.typ)(&type_vars, types); // Now, process the arguments to ensure they has the type needed by this // intrinsic - let mut arg_vals : Vec<Expr> = vec![]; + let mut arg_vals: Vec<Expr> = vec![]; let mut errors = LinkedList::new(); - for ((is_inout, arg), arg_typ) - in args.into_iter().zip(arg_types.into_iter()) { - + for ((is_inout, arg), arg_typ) in args.into_iter().zip(arg_types.into_iter()) { let arg_span = arg.span(); if is_inout { - errors.push_back( - ErrorMessage::SemanticError( - span_to_loc(arg_span, lexer), - format!("Arguments to intrinsics cannot be inout"))); + errors.push_back(ErrorMessage::SemanticError( + span_to_loc(arg_span, lexer), + format!("Arguments to intrinsics cannot be inout"), + )); } else { match process_expr(arg, num_dyn_const, lexer, stringtab, env, types) { Err(mut errs) => errors.append(&mut errs), Ok(exp) => { if !types.unify(arg_typ, exp.get_type()) { - errors.push_back( - ErrorMessage::TypeError( - span_to_loc(arg_span, lexer), - unparse_type(types, arg_typ, stringtab), - unparse_type(types, exp.get_type(), stringtab))); + errors.push_back(ErrorMessage::TypeError( + span_to_loc(arg_span, lexer), + unparse_type(types, arg_typ, stringtab), + unparse_type(types, exp.get_type(), stringtab), + )); } else { arg_vals.push(exp); } - }, + } } } } @@ -4107,48 +5129,63 @@ fn process_expr(expr : parser::Expr, num_dyn_const : usize, Err(errors) } else { Ok(Expr::Intrinsic { - id : intrinsic.id, - ty_args : type_vars, - args : arg_vals, - typ : return_typ }) + id: intrinsic.id, + ty_args: type_vars, + args: arg_vals, + typ: return_typ, + }) } - }, + } } - }, + } } } -fn generate_return(expr : Expr, vars : &Vec<usize>, var_types : &Vec<Type>, - types : &mut TypeSolver) -> Stmt { - let var_exprs = vars.iter().zip(var_types.iter()) - .map(|(var, typ)| Expr::Variable { var : *var, typ : *typ }) - .collect::<Vec<_>>(); +fn generate_return( + expr: Expr, + vars: &Vec<usize>, + var_types: &Vec<Type>, + types: &mut TypeSolver, +) -> Stmt { + let var_exprs = vars + .iter() + .zip(var_types.iter()) + .map(|(var, typ)| Expr::Variable { + var: *var, + typ: *typ, + }) + .collect::<Vec<_>>(); let inout_type = types.new_tuple(var_types.clone()); - let inout_vals = Expr::Tuple { vals : var_exprs, typ : inout_type }; + let inout_vals = Expr::Tuple { + vals: var_exprs, + typ: inout_type, + }; let expr_type = expr.get_type(); - let val = Expr::Tuple { vals : vec![expr, inout_vals], - typ : types.new_tuple(vec![expr_type, inout_type]) }; + let val = Expr::Tuple { + vals: vec![expr, inout_vals], + typ: types.new_tuple(vec![expr_type, inout_type]), + }; - Stmt::ReturnStmt { expr : val } + Stmt::ReturnStmt { expr: val } } -fn convert_primitive(prim : parser::Primitive) -> types::Primitive { +fn convert_primitive(prim: parser::Primitive) -> types::Primitive { match prim { parser::Primitive::Bool => types::Primitive::Bool, - parser::Primitive::I8 => types::Primitive::I8, - parser::Primitive::U8 => types::Primitive::U8, - parser::Primitive::I16 => types::Primitive::I16, - parser::Primitive::U16 => types::Primitive::U16, - parser::Primitive::I32 => types::Primitive::I32, - parser::Primitive::U32 => types::Primitive::U32, - parser::Primitive::I64 => types::Primitive::I64, - parser::Primitive::U64 => types::Primitive::U64, - parser::Primitive::USize=> types::Primitive::U64, - parser::Primitive::F32 => types::Primitive::F32, - parser::Primitive::F64 => types::Primitive::F64, + parser::Primitive::I8 => types::Primitive::I8, + parser::Primitive::U8 => types::Primitive::U8, + parser::Primitive::I16 => types::Primitive::I16, + parser::Primitive::U16 => types::Primitive::U16, + parser::Primitive::I32 => types::Primitive::I32, + parser::Primitive::U32 => types::Primitive::U32, + parser::Primitive::I64 => types::Primitive::I64, + parser::Primitive::U64 => types::Primitive::U64, + parser::Primitive::USize => types::Primitive::U64, + parser::Primitive::F32 => types::Primitive::F32, + parser::Primitive::F64 => types::Primitive::F64, parser::Primitive::Void => types::Primitive::Unit, } } diff --git a/juno_frontend/src/ssa.rs b/juno_frontend/src/ssa.rs index 924061ec..f5e1d830 100644 --- a/juno_frontend/src/ssa.rs +++ b/juno_frontend/src/ssa.rs @@ -8,35 +8,39 @@ extern crate hercules_ir; use std::collections::{HashMap, HashSet}; -use self::hercules_ir::ir::*; use self::hercules_ir::build::*; +use self::hercules_ir::ir::*; +use crate::labeled_builder::LabeledBuilder; pub struct SSA { // Map from variable (usize) to build (NodeID) to definition (NodeID) - current_def : HashMap<usize, HashMap<NodeID, NodeID>>, - sealed_blocks : HashSet<NodeID>, - incomplete_phis : HashMap<NodeID, HashMap<usize, NodeBuilder>>, + current_def: HashMap<usize, HashMap<NodeID, NodeID>>, + sealed_blocks: HashSet<NodeID>, + incomplete_phis: HashMap<NodeID, HashMap<usize, NodeBuilder>>, - function : FunctionID, - block_preds : HashMap<NodeID, Vec<NodeID>>, - unsealed_blocks : HashMap<NodeID, NodeBuilder>, + block_preds: HashMap<NodeID, Vec<NodeID>>, + unsealed_blocks: HashMap<NodeID, NodeBuilder>, } impl SSA { - pub fn new(func : FunctionID, entry : NodeID) -> SSA { - SSA { current_def : HashMap::new(), - sealed_blocks : HashSet::from([entry]), - incomplete_phis : HashMap::new(), - function : func, - block_preds : HashMap::from([(entry, vec![])]), - unsealed_blocks : HashMap::new() } + pub fn new(entry: NodeID) -> SSA { + SSA { + current_def: HashMap::new(), + sealed_blocks: HashSet::from([entry]), + incomplete_phis: HashMap::new(), + block_preds: HashMap::from([(entry, vec![])]), + unsealed_blocks: HashMap::new(), + } } - pub fn create_cond<'a>(&mut self, builder : &mut Builder<'a>, - pred : NodeID) -> (NodeBuilder, NodeID, NodeID) { - let if_builder = builder.allocate_node(self.function); - let mut left_builder = builder.allocate_node(self.function); - let mut right_builder = builder.allocate_node(self.function); + pub fn create_cond<'a>( + &mut self, + builder: &mut LabeledBuilder<'a>, + pred: NodeID, + ) -> (NodeBuilder, NodeID, NodeID) { + let if_builder = builder.allocate_node(); + let mut left_builder = builder.allocate_node(); + let mut right_builder = builder.allocate_node(); let left_proj = left_builder.id(); let right_proj = right_builder.id(); @@ -47,8 +51,8 @@ impl SSA { // False branch right_builder.build_projection(if_builder.id(), 0); - let _ = builder.add_node(left_builder); - let _ = builder.add_node(right_builder); + builder.add_node(left_builder); + builder.add_node(right_builder); self.sealed_blocks.insert(if_builder.id()); self.block_preds.insert(if_builder.id(), vec![pred]); @@ -62,8 +66,8 @@ impl SSA { (if_builder, left_proj, right_proj) } - pub fn create_block<'a>(&mut self, builder : &mut Builder<'a>) -> NodeID { - let node_builder = builder.allocate_node(self.function); + pub fn create_block<'a>(&mut self, builder: &mut LabeledBuilder<'a>) -> NodeID { + let node_builder = builder.allocate_node(); let block = node_builder.id(); self.unsealed_blocks.insert(block, node_builder); self.block_preds.insert(block, vec![]); @@ -72,26 +76,32 @@ impl SSA { } // Add "pred" as a predecessor of "block" - pub fn add_pred(&mut self, block : NodeID, pred : NodeID) { - assert!(self.unsealed_blocks.contains_key(&block), - "Block must be unsealed to add predecessors"); - self.block_preds.get_mut(&block) - .expect("Block was created") - .push(pred); + pub fn add_pred(&mut self, block: NodeID, pred: NodeID) { + assert!( + self.unsealed_blocks.contains_key(&block), + "Block must be unsealed to add predecessors" + ); + self.block_preds + .get_mut(&block) + .expect("Block was created") + .push(pred); } - pub fn seal_block<'a>(&mut self, block : NodeID, builder : &mut Builder<'a>) { - let mut block_builder = self.unsealed_blocks.remove(&block) - .expect("A block must be unsealed to seal it"); - - let preds = self.block_preds.get(&block) - .expect("A block must be created to seal it") - .clone(); - let mut phis = - match self.incomplete_phis.remove(&block) { - None => HashMap::new(), - Some(phis) => phis, - }; + pub fn seal_block<'a>(&mut self, block: NodeID, builder: &mut LabeledBuilder<'a>) { + let mut block_builder = self + .unsealed_blocks + .remove(&block) + .expect("A block must be unsealed to seal it"); + + let preds = self + .block_preds + .get(&block) + .expect("A block must be created to seal it") + .clone(); + let mut phis = match self.incomplete_phis.remove(&block) { + None => HashMap::new(), + Some(phis) => phis, + }; for (variable, phi) in phis.drain() { self.add_phi_operands(variable, block, phi, builder); @@ -102,62 +112,78 @@ impl SSA { let _ = builder.add_node(block_builder); } - pub fn write_variable(&mut self, variable : usize, block : NodeID, value : NodeID) { - match self.current_def.get_mut(&variable) { - Some(m) => { - m.insert(block, value); - }, - None => { - self.current_def.insert(variable, HashMap::from([(block, value)])); - }, - } + pub fn write_variable(&mut self, variable: usize, block: NodeID, value: NodeID) { + self.current_def + .entry(variable) + .or_insert(HashMap::new()) + .insert(block, value); } - pub fn read_variable<'a>(&mut self, variable : usize, block : NodeID, - builder : &mut Builder<'a>) -> NodeID { + pub fn read_variable<'a>( + &mut self, + variable: usize, + block: NodeID, + builder: &mut LabeledBuilder<'a>, + ) -> NodeID { match self.current_def.get(&variable) { - Some(var) => { - match var.get(&block) { - Some(val) => *val, - None => self.read_variable_recursive(variable, block, builder), - } + Some(var) => match var.get(&block) { + Some(val) => *val, + None => self.read_variable_recursive(variable, block, builder), }, None => { panic!("ERROR: Variable in read_variable never written") - }, + } } } - fn read_variable_recursive<'a>(&mut self, variable : usize, block : NodeID, - builder : &mut Builder<'a>) -> NodeID { - let val = - if !self.sealed_blocks.contains(&block) { - let node = builder.allocate_node(self.function); - let node_id = node.id(); - self.incomplete_phis.get_mut(&block) - .expect("Unsealed block has been added") - .insert(variable, node); - node_id - } else if self.block_preds.get(&block) - .expect("Sealed block has preds").len() == 1 { - self.read_variable(variable, - self.block_preds.get(&block) - .expect("Sealed block has preds")[0], - builder) - } else { - let node = builder.allocate_node(self.function); - let node_id = node.id(); - self.write_variable(variable, block, node_id); - self.add_phi_operands(variable, block, node, builder); - node_id - }; + fn read_variable_recursive<'a>( + &mut self, + variable: usize, + block: NodeID, + builder: &mut LabeledBuilder<'a>, + ) -> NodeID { + let val = if !self.sealed_blocks.contains(&block) { + // Label phi nodes the same as the region node they are associated with + let node = builder.allocate_node_labeled_with(block); + let node_id = node.id(); + self.incomplete_phis + .get_mut(&block) + .expect("Unsealed block has been added") + .insert(variable, node); + node_id + } else if self + .block_preds + .get(&block) + .expect("Sealed block has preds") + .len() + == 1 + { + self.read_variable( + variable, + self.block_preds + .get(&block) + .expect("Sealed block has preds")[0], + builder, + ) + } else { + let node = builder.allocate_node_labeled_with(block); + let node_id = node.id(); + self.write_variable(variable, block, node_id); + self.add_phi_operands(variable, block, node, builder); + node_id + }; self.write_variable(variable, block, val); val } - fn add_phi_operands<'a>(&mut self, variable : usize, block : NodeID, - mut phi : NodeBuilder, builder : &mut Builder<'a>) { + fn add_phi_operands<'a>( + &mut self, + variable: usize, + block: NodeID, + mut phi: NodeBuilder, + builder: &mut LabeledBuilder<'a>, + ) { let mut vals = vec![]; let preds = self.block_preds.get(&block).expect("Block exists").clone(); for pred in preds { diff --git a/juno_frontend/src/types.rs b/juno_frontend/src/types.rs index 53adbb57..582e7cfd 100644 --- a/juno_frontend/src/types.rs +++ b/juno_frontend/src/types.rs @@ -1,122 +1,150 @@ use std::collections::{HashMap, HashSet, VecDeque}; -use crate::hercules_ir::ir::*; -use crate::hercules_ir::build::*; use crate::dynconst::DynConst; -use crate::parser; +use crate::hercules_ir::build::*; +use crate::hercules_ir::ir::*; use crate::locs::Location; +use crate::parser; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Either<A, B> { Left(A), - Right(B) + Right(B), } impl parser::Kind { - fn is_a(&self, other : parser::Kind) -> bool { + fn is_a(&self, other: parser::Kind) -> bool { match other { parser::Kind::USize => false, parser::Kind::Type => true, - parser::Kind::Number => - match self { parser::Kind::Number | parser::Kind::Integer - | parser::Kind::Float => true, - _ => false }, - parser::Kind::Integer => - match self { parser::Kind::Integer => true, - _ => false }, - parser::Kind::Float => - match self { parser::Kind::Float => true, - _ => false }, + parser::Kind::Number => match self { + parser::Kind::Number | parser::Kind::Integer | parser::Kind::Float => true, + _ => false, + }, + parser::Kind::Integer => match self { + parser::Kind::Integer => true, + _ => false, + }, + parser::Kind::Float => match self { + parser::Kind::Float => true, + _ => false, + }, } } - fn unify(&self, other : parser::Kind) -> Option<parser::Kind> { + fn unify(&self, other: parser::Kind) -> Option<parser::Kind> { match self { parser::Kind::USize => None, parser::Kind::Type => Some(other), - parser::Kind::Number => - match other { - parser::Kind::USize => None, - parser::Kind::Type | parser::Kind::Number => Some(parser::Kind::Number), - parser::Kind::Integer => Some(parser::Kind::Integer), - parser::Kind::Float => Some(parser::Kind::Float), - }, - parser::Kind::Integer => - match other { - parser::Kind::USize => None, - parser::Kind::Type | parser::Kind::Number | parser::Kind::Integer - => Some(parser::Kind::Integer), - parser::Kind::Float => None, - }, - parser::Kind::Float => - match other { - parser::Kind::USize => None, - parser::Kind::Type | parser::Kind::Number | parser::Kind::Float - => Some(parser::Kind::Float), - parser::Kind::Integer => None, - }, + parser::Kind::Number => match other { + parser::Kind::USize => None, + parser::Kind::Type | parser::Kind::Number => Some(parser::Kind::Number), + parser::Kind::Integer => Some(parser::Kind::Integer), + parser::Kind::Float => Some(parser::Kind::Float), + }, + parser::Kind::Integer => match other { + parser::Kind::USize => None, + parser::Kind::Type | parser::Kind::Number | parser::Kind::Integer => { + Some(parser::Kind::Integer) + } + parser::Kind::Float => None, + }, + parser::Kind::Float => match other { + parser::Kind::USize => None, + parser::Kind::Type | parser::Kind::Number | parser::Kind::Float => { + Some(parser::Kind::Float) + } + parser::Kind::Integer => None, + }, } } pub fn to_string(&self) -> String { match self { - parser::Kind::USize => "usize".to_string(), - parser::Kind::Type => "type".to_string(), - parser::Kind::Number => "number".to_string(), - parser::Kind::Integer => "integer".to_string(), - parser::Kind::Float => "float".to_string(), + parser::Kind::USize => "usize".to_string(), + parser::Kind::Type => "type".to_string(), + parser::Kind::Number => "number".to_string(), + parser::Kind::Integer => "integer".to_string(), + parser::Kind::Float => "float".to_string(), } } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum Primitive { Bool, U8, I8, U16, I16, U32, I32, U64, I64, F32, F64, Unit } +pub enum Primitive { + Bool, + U8, + I8, + U16, + I16, + U32, + I32, + U64, + I64, + F32, + F64, + Unit, +} impl Primitive { - fn is_kind(&self, kind : parser::Kind) -> bool { + fn is_kind(&self, kind: parser::Kind) -> bool { match kind { parser::Kind::Type => true, parser::Kind::USize => false, - parser::Kind::Number => - match self { Primitive::U8 | Primitive::I8 - | Primitive::U16 | Primitive::I16 - | Primitive::U32 | Primitive::I32 - | Primitive::U64 | Primitive::I64 - | Primitive::F32 | Primitive::F64 => true, - | _ => false }, - parser::Kind::Integer => - match self { Primitive::U8 | Primitive::I8 - | Primitive::U16 | Primitive::I16 - | Primitive::U32 | Primitive::I32 - | Primitive::U64 | Primitive::I64 => true, - | _ => false }, - parser::Kind::Float => - match self { Primitive::F32 | Primitive::F64 => true, - | _ => false }, + parser::Kind::Number => match self { + Primitive::U8 + | Primitive::I8 + | Primitive::U16 + | Primitive::I16 + | Primitive::U32 + | Primitive::I32 + | Primitive::U64 + | Primitive::I64 + | Primitive::F32 + | Primitive::F64 => true, + _ => false, + }, + parser::Kind::Integer => match self { + Primitive::U8 + | Primitive::I8 + | Primitive::U16 + | Primitive::I16 + | Primitive::U32 + | Primitive::I32 + | Primitive::U64 + | Primitive::I64 => true, + _ => false, + }, + parser::Kind::Float => match self { + Primitive::F32 | Primitive::F64 => true, + _ => false, + }, } } fn to_string(&self) -> String { match self { - Primitive::Bool => "bool".to_string(), - Primitive::I8 => "i8".to_string(), - Primitive::U8 => "u8".to_string(), - Primitive::I16 => "i16".to_string(), - Primitive::U16 => "u16".to_string(), - Primitive::I32 => "i32".to_string(), - Primitive::U32 => "u32".to_string(), - Primitive::I64 => "i64".to_string(), - Primitive::U64 => "u64".to_string(), - Primitive::F32 => "f32".to_string(), - Primitive::F64 => "f64".to_string(), - Primitive::Unit => "()".to_string(), + Primitive::Bool => "bool".to_string(), + Primitive::I8 => "i8".to_string(), + Primitive::U8 => "u8".to_string(), + Primitive::I16 => "i16".to_string(), + Primitive::U16 => "u16".to_string(), + Primitive::I32 => "i32".to_string(), + Primitive::U32 => "u32".to_string(), + Primitive::I64 => "i64".to_string(), + Primitive::U64 => "u64".to_string(), + Primitive::F32 => "f32".to_string(), + Primitive::F64 => "f64".to_string(), + Primitive::Unit => "()".to_string(), } } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub struct Type { val : usize } +pub struct Type { + val: usize, +} // Type forms, which include both concrete types, as well as unsolved types that may have some // constraints. Note that constrained types are just primitives (particularly the numeric types) @@ -124,96 +152,153 @@ pub struct Type { val : usize } // and tuples #[derive(Clone, Debug)] enum TypeForm { - Primitive { prim : Primitive }, - Tuple { fields : Vec<Type> }, - Array { elem : Type, dims : Vec<DynConst> }, + Primitive { + prim: Primitive, + }, + Tuple { + fields: Vec<Type>, + }, + Array { + elem: Type, + dims: Vec<DynConst>, + }, // This type is the same type as another type - OtherType { other : Type }, + OtherType { + other: Type, + }, // For type variables, we record its name, its index (in the list of type variables in this // context), and anything we know about it (is it a number, is it an integer) - TypeVar { name : usize, index : usize, kind : parser::Kind }, + TypeVar { + name: usize, + index: usize, + kind: parser::Kind, + }, // For structs and unions we record the name (via its interned representation), a UID, and the // types of its fields/constructors in a set order and a map from field/constructor names to // its index in the list - Struct { name : usize, id : usize, fields : Vec<Type>, names : HashMap<usize, usize> }, - Union { name : usize, id : usize, constr : Vec<Type>, names : HashMap<usize, usize> }, + Struct { + name: usize, + id: usize, + fields: Vec<Type>, + names: HashMap<usize, usize>, + }, + Union { + name: usize, + id: usize, + constr: Vec<Type>, + names: HashMap<usize, usize>, + }, // Constrained types - AnyOfKind { kind : parser::Kind, loc : Location }, + AnyOfKind { + kind: parser::Kind, + loc: Location, + }, } #[derive(Debug)] pub struct TypeSolver { - types : Vec<TypeForm>, - solved : usize, // which types have been "solved" (i.e. are known not to be AnyOfKinds) + types: Vec<TypeForm>, + solved: usize, // which types have been "solved" (i.e. are known not to be AnyOfKinds) } #[derive(Debug)] pub struct TypeSolverInst<'a> { - solver : &'a TypeSolver, + solver: &'a TypeSolver, // A collection of current values for type variables, and variables that we've solved for in // that context - type_vars : Vec<TypeID>, - solved : Vec<Option<TypeID>>, + type_vars: Vec<TypeID>, + solved: Vec<Option<TypeID>>, } impl TypeSolver { pub fn new() -> TypeSolver { - TypeSolver { types : vec![], solved : 0 } + TypeSolver { + types: vec![], + solved: 0, + } } - pub fn new_of_kind(&mut self, kind : parser::Kind, loc : Location) -> Type { + pub fn new_of_kind(&mut self, kind: parser::Kind, loc: Location) -> Type { self.create_type(TypeForm::AnyOfKind { kind, loc }) } - pub fn new_primitive(&mut self, prim : Primitive) -> Type { + pub fn new_primitive(&mut self, prim: Primitive) -> Type { self.create_type(TypeForm::Primitive { prim }) } - pub fn new_tuple(&mut self, fields : Vec<Type>) -> Type { + pub fn new_tuple(&mut self, fields: Vec<Type>) -> Type { self.create_type(TypeForm::Tuple { fields }) } - pub fn new_array(&mut self, elem : Type, dims : Vec<DynConst>) -> Type { + pub fn new_array(&mut self, elem: Type, dims: Vec<DynConst>) -> Type { self.create_type(TypeForm::Array { elem, dims }) } - pub fn new_type_var(&mut self, name : usize, index : usize, kind : parser::Kind) -> Type { + pub fn new_type_var(&mut self, name: usize, index: usize, kind: parser::Kind) -> Type { self.create_type(TypeForm::TypeVar { name, index, kind }) } - pub fn new_struct(&mut self, name : usize, id : usize, fields : Vec<Type>, - names : HashMap<usize, usize>) -> Type { - self.create_type(TypeForm::Struct { name, id, fields, names }) + pub fn new_struct( + &mut self, + name: usize, + id: usize, + fields: Vec<Type>, + names: HashMap<usize, usize>, + ) -> Type { + self.create_type(TypeForm::Struct { + name, + id, + fields, + names, + }) } - pub fn new_union(&mut self, name : usize, id : usize, constr : Vec<Type>, - names : HashMap<usize, usize>) -> Type { - self.create_type(TypeForm::Union { name, id, constr, names }) + pub fn new_union( + &mut self, + name: usize, + id: usize, + constr: Vec<Type>, + names: HashMap<usize, usize>, + ) -> Type { + self.create_type(TypeForm::Union { + name, + id, + constr, + names, + }) } - fn create_type(&mut self, typ : TypeForm) -> Type { + fn create_type(&mut self, typ: TypeForm) -> Type { let idx = self.types.len(); self.types.push(typ); - Type { val : idx } + Type { val: idx } } - pub fn unify_void(&mut self, Type { val } : Type) -> bool { + pub fn unify_void(&mut self, Type { val }: Type) -> bool { match &self.types[val] { - TypeForm::Primitive { prim : Primitive::Unit, .. } => true, + TypeForm::Primitive { + prim: Primitive::Unit, + .. + } => true, TypeForm::OtherType { other, .. } => self.unify_void(*other), - TypeForm::AnyOfKind { kind : parser::Kind::Type, .. } => { - self.types[val] = TypeForm::Primitive { prim : Primitive::Unit }; + TypeForm::AnyOfKind { + kind: parser::Kind::Type, + .. + } => { + self.types[val] = TypeForm::Primitive { + prim: Primitive::Unit, + }; true - }, + } _ => false, } } - pub fn is_array(&self, Type { val } : Type) -> bool { + pub fn is_array(&self, Type { val }: Type) -> bool { match &self.types[val] { TypeForm::Array { .. } => true, TypeForm::OtherType { other, .. } => self.is_array(*other), @@ -221,7 +306,7 @@ impl TypeSolver { } } - pub fn get_element_type(&self, Type { val } : Type) -> Option<Type> { + pub fn get_element_type(&self, Type { val }: Type) -> Option<Type> { match &self.types[val] { TypeForm::Array { elem, .. } => Some(*elem), TypeForm::OtherType { other, .. } => self.get_element_type(*other), @@ -229,61 +314,75 @@ impl TypeSolver { } } - pub fn get_dimensions(&self, Type { val } : Type) -> Option<Vec<DynConst>> { + pub fn get_dimensions(&self, Type { val }: Type) -> Option<Vec<DynConst>> { match &self.types[val] { - TypeForm::Array { elem : _, dims, .. } => Some(dims.to_vec()), + TypeForm::Array { elem: _, dims, .. } => Some(dims.to_vec()), TypeForm::OtherType { other, .. } => self.get_dimensions(*other), _ => None, } } - pub fn unify_bool(&mut self, Type { val } : Type) -> bool { + pub fn unify_bool(&mut self, Type { val }: Type) -> bool { match &self.types[val] { - TypeForm::Primitive { prim : Primitive::Bool, .. } => true, + TypeForm::Primitive { + prim: Primitive::Bool, + .. + } => true, TypeForm::OtherType { other, .. } => self.unify_bool(*other), - TypeForm::AnyOfKind { kind : parser::Kind::Type, .. } => { - self.types[val] = TypeForm::Primitive { prim : Primitive::Bool }; + TypeForm::AnyOfKind { + kind: parser::Kind::Type, + .. + } => { + self.types[val] = TypeForm::Primitive { + prim: Primitive::Bool, + }; true - }, + } _ => false, } } - pub fn unify_u64(&mut self, Type { val } : Type) -> bool { + pub fn unify_u64(&mut self, Type { val }: Type) -> bool { match &self.types[val] { - TypeForm::Primitive { prim : Primitive::U64, .. } => true, + TypeForm::Primitive { + prim: Primitive::U64, + .. + } => true, TypeForm::OtherType { other, .. } => self.unify_u64(*other), - TypeForm::AnyOfKind { kind, .. } => { - match kind { - parser::Kind::Type | parser::Kind::Number - | parser::Kind::Integer => { - self.types[val] = TypeForm::Primitive { prim : Primitive::U64 }; - true - }, - _ => false, + TypeForm::AnyOfKind { kind, .. } => match kind { + parser::Kind::Type | parser::Kind::Number | parser::Kind::Integer => { + self.types[val] = TypeForm::Primitive { + prim: Primitive::U64, + }; + true } + _ => false, }, _ => false, } } - pub fn unify_kind(&mut self, Type { val } : Type, kind : parser::Kind) -> bool { + pub fn unify_kind(&mut self, Type { val }: Type, kind: parser::Kind) -> bool { match &self.types[val] { - TypeForm::Primitive{ prim, .. } => prim.is_kind(kind), + TypeForm::Primitive { prim, .. } => prim.is_kind(kind), TypeForm::OtherType { other, .. } => self.unify_kind(*other, kind), - TypeForm::TypeVar { name : _, index : _, kind : var_kind, .. } => - var_kind.is_a(kind), - - TypeForm::AnyOfKind { kind : ty_kind, loc } => { - match ty_kind.unify(kind) { - None => false, - Some(unified) => { - self.types[val] - = TypeForm::AnyOfKind { kind : unified, loc : *loc }; - true - } + TypeForm::TypeVar { + name: _, + index: _, + kind: var_kind, + .. + } => var_kind.is_a(kind), + + TypeForm::AnyOfKind { kind: ty_kind, loc } => match ty_kind.unify(kind) { + None => false, + Some(unified) => { + self.types[val] = TypeForm::AnyOfKind { + kind: unified, + loc: *loc, + }; + true } }, @@ -291,111 +390,183 @@ impl TypeSolver { } } - pub fn unify(&mut self, Type { val : ty1 } : Type, Type { val : ty2 } : Type) -> bool { + pub fn unify(&mut self, Type { val: ty1 }: Type, Type { val: ty2 }: Type) -> bool { if let TypeForm::OtherType { other, .. } = self.types[ty1] { - return self.unify(other, Type { val : ty2 }); + return self.unify(other, Type { val: ty2 }); } if let TypeForm::OtherType { other, .. } = self.types[ty2] { - return self.unify(Type { val : ty1 }, other); + return self.unify(Type { val: ty1 }, other); } match (&self.types[ty1], &self.types[ty2]) { - (TypeForm::Primitive { prim : p1, .. }, - TypeForm::Primitive { prim : p2, .. }) => p1 == p2, + (TypeForm::Primitive { prim: p1, .. }, TypeForm::Primitive { prim: p2, .. }) => { + p1 == p2 + } - (TypeForm::Primitive { prim, .. }, - TypeForm::AnyOfKind { kind, .. }) if prim.is_kind(*kind) => { - self.types[ty2] = TypeForm::OtherType { other : Type { val : ty1 } }; + (TypeForm::Primitive { prim, .. }, TypeForm::AnyOfKind { kind, .. }) + if prim.is_kind(*kind) => + { + self.types[ty2] = TypeForm::OtherType { + other: Type { val: ty1 }, + }; true - }, - (TypeForm::TypeVar { name : _, index : _, kind : var_kind, .. }, - TypeForm::AnyOfKind { kind, .. }) if var_kind.is_a(*kind) => { - self.types[ty2] = TypeForm::OtherType { other : Type { val : ty1 } }; + } + ( + TypeForm::TypeVar { + name: _, + index: _, + kind: var_kind, + .. + }, + TypeForm::AnyOfKind { kind, .. }, + ) if var_kind.is_a(*kind) => { + self.types[ty2] = TypeForm::OtherType { + other: Type { val: ty1 }, + }; true - }, + } - (TypeForm::AnyOfKind { kind, .. }, - TypeForm::Primitive { prim, .. }) if prim.is_kind(*kind) => { - self.types[ty1] = TypeForm::OtherType { other : Type { val : ty2 } }; + (TypeForm::AnyOfKind { kind, .. }, TypeForm::Primitive { prim, .. }) + if prim.is_kind(*kind) => + { + self.types[ty1] = TypeForm::OtherType { + other: Type { val: ty2 }, + }; true - }, - (TypeForm::AnyOfKind { kind, .. }, - TypeForm::TypeVar { name : _, index : _, kind : var_kind, .. }) - if var_kind.is_a(*kind) => { - self.types[ty1] = TypeForm::OtherType { other : Type { val : ty2 } }; + } + ( + TypeForm::AnyOfKind { kind, .. }, + TypeForm::TypeVar { + name: _, + index: _, + kind: var_kind, + .. + }, + ) if var_kind.is_a(*kind) => { + self.types[ty1] = TypeForm::OtherType { + other: Type { val: ty2 }, + }; true - }, + } - (TypeForm::Tuple { fields : f1, .. }, TypeForm::Tuple { fields : f2, .. }) - if f1.len() == f2.len() => { + (TypeForm::Tuple { fields: f1, .. }, TypeForm::Tuple { fields: f2, .. }) + if f1.len() == f2.len() => + { for (t1, t2) in f1.clone().iter().zip(f2.clone().iter()) { - if !self.unify(*t1, *t2) { return false; } + if !self.unify(*t1, *t2) { + return false; + } } true - }, - - (TypeForm::Array { elem : t1, dims : dm1, .. }, - TypeForm::Array { elem : t2, dims : dm2, .. }) => - dm1 == dm2 && self.unify(*t1, *t2), + } - (TypeForm::TypeVar { name : _, index : idx1, .. }, - TypeForm::TypeVar { name : _, index : idx2, .. }) => idx1 == idx2, + ( + TypeForm::Array { + elem: t1, + dims: dm1, + .. + }, + TypeForm::Array { + elem: t2, + dims: dm2, + .. + }, + ) => dm1 == dm2 && self.unify(*t1, *t2), - (TypeForm::Struct { name : _, id : id1, fields : fs1, .. }, - TypeForm::Struct { name : _, id : id2, fields : fs2, .. }) - | (TypeForm::Union {name : _, id : id1, constr : fs1, .. }, - TypeForm::Union {name : _, id : id2, constr : fs2, .. }) - if id1 == id2 && fs1.len() == fs2.len() => { + ( + TypeForm::TypeVar { + name: _, + index: idx1, + .. + }, + TypeForm::TypeVar { + name: _, + index: idx2, + .. + }, + ) => idx1 == idx2, + + ( + TypeForm::Struct { + name: _, + id: id1, + fields: fs1, + .. + }, + TypeForm::Struct { + name: _, + id: id2, + fields: fs2, + .. + }, + ) + | ( + TypeForm::Union { + name: _, + id: id1, + constr: fs1, + .. + }, + TypeForm::Union { + name: _, + id: id2, + constr: fs2, + .. + }, + ) if id1 == id2 && fs1.len() == fs2.len() => { for (t1, t2) in fs1.clone().iter().zip(fs2.clone().iter()) { - if !self.unify(*t1, *t2) { return false; } + if !self.unify(*t1, *t2) { + return false; + } } true - }, + } - (TypeForm::AnyOfKind { kind : k1, loc : l1 }, - TypeForm::AnyOfKind { kind : k2, .. }) => { + (TypeForm::AnyOfKind { kind: k1, loc: l1 }, TypeForm::AnyOfKind { kind: k2, .. }) => { match k1.unify(*k2) { None => false, Some(kind) => { let loc = *l1; self.types[ty1] = TypeForm::AnyOfKind { kind, loc }; - self.types[ty2] = TypeForm::OtherType { other : Type { val : ty1 } }; + self.types[ty2] = TypeForm::OtherType { + other: Type { val: ty1 }, + }; true - }, + } } - }, + } _ => false, } } -/* - pub fn is_tuple(&self, Type { val } : Type) -> bool { - match &self.types[val] { - TypeForm::Tuple(_) => true, - TypeForm::OtherType(t) => self.is_tuple(*t), - _ => false, + /* + pub fn is_tuple(&self, Type { val } : Type) -> bool { + match &self.types[val] { + TypeForm::Tuple(_) => true, + TypeForm::OtherType(t) => self.is_tuple(*t), + _ => false, + } } - } - pub fn get_num_fields(&self, Type { val } : Type) -> Option<usize> { - match &self.types[val] { - TypeForm::Tuple(fields) => { Some(fields.len()) }, - TypeForm::OtherType(t) => self.get_num_fields(*t), - _ => None, + pub fn get_num_fields(&self, Type { val } : Type) -> Option<usize> { + match &self.types[val] { + TypeForm::Tuple(fields) => { Some(fields.len()) }, + TypeForm::OtherType(t) => self.get_num_fields(*t), + _ => None, + } } - } - fn get_fields(&self, Type { val } : Type) -> Vec<Type> { - match &self.types[val] { - TypeForm::Tuple(fields) => { fields.clone() }, - TypeForm::OtherType(t) => self.get_fields(*t), - _ => panic!("Internal function get_fields used on non-tuple"), + fn get_fields(&self, Type { val } : Type) -> Vec<Type> { + match &self.types[val] { + TypeForm::Tuple(fields) => { fields.clone() }, + TypeForm::OtherType(t) => self.get_fields(*t), + _ => panic!("Internal function get_fields used on non-tuple"), + } } - } -*/ + */ // Return the type of the field (in a tuple) at a particular index - pub fn get_index(&self, Type { val } : Type, idx : usize) -> Option<Type> { + pub fn get_index(&self, Type { val }: Type, idx: usize) -> Option<Type> { match &self.types[val] { TypeForm::Tuple { fields, .. } => fields.get(idx).copied(), TypeForm::OtherType { other, .. } => self.get_index(*other, idx), @@ -403,7 +574,7 @@ impl TypeSolver { } } - pub fn is_struct(&self, Type { val } : Type) -> bool { + pub fn is_struct(&self, Type { val }: Type) -> bool { match &self.types[val] { TypeForm::Struct { .. } => true, TypeForm::OtherType { other, .. } => self.is_struct(*other), @@ -412,56 +583,69 @@ impl TypeSolver { } // Return the number of fields a struct has - pub fn get_num_struct_fields(&self, Type { val } : Type) -> Option<usize> { + pub fn get_num_struct_fields(&self, Type { val }: Type) -> Option<usize> { match &self.types[val] { - TypeForm::Struct { name : _, id : _, fields, .. } => Some(fields.len()), + TypeForm::Struct { + name: _, + id: _, + fields, + .. + } => Some(fields.len()), TypeForm::OtherType { other, .. } => self.get_num_struct_fields(*other), _ => None, } } // Returns the position and type of a field in a type (if it exists) - pub fn get_field(&self, Type { val } : Type, name : usize) -> Option<(usize, Type)> { + pub fn get_field(&self, Type { val }: Type, name: usize) -> Option<(usize, Type)> { match &self.types[val] { - TypeForm::Struct { name : _, id : _, fields, names, .. } => { - names.get(&name).map(|idx| (*idx, fields[*idx])) - }, + TypeForm::Struct { + name: _, + id: _, + fields, + names, + .. + } => names.get(&name).map(|idx| (*idx, fields[*idx])), TypeForm::OtherType { other, .. } => self.get_field(*other, name), _ => None, } } // Returns the type of the field at a certain index in a struct - pub fn get_struct_field_type(&self, Type { val } : Type, idx : usize) -> Option<Type> { + pub fn get_struct_field_type(&self, Type { val }: Type, idx: usize) -> Option<Type> { match &self.types[val] { - TypeForm::Struct { name : _, id : _, fields, .. } => - fields.get(idx).copied(), + TypeForm::Struct { + name: _, + id: _, + fields, + .. + } => fields.get(idx).copied(), TypeForm::OtherType { other, .. } => self.get_struct_field_type(*other, idx), _ => None, } } -/* - pub fn get_field_names(&self, Type { val } : Type) -> Option<Vec<usize>> { - match &self.types[val] { - TypeForm::Struct { name : _, id : _, fields : _, names } => { - Some(names.keys().map(|i| *i).collect::<Vec<_>>()) - }, - TypeForm::OtherType(t) => self.get_field_names(*t), - _ => None, + /* + pub fn get_field_names(&self, Type { val } : Type) -> Option<Vec<usize>> { + match &self.types[val] { + TypeForm::Struct { name : _, id : _, fields : _, names } => { + Some(names.keys().map(|i| *i).collect::<Vec<_>>()) + }, + TypeForm::OtherType(t) => self.get_field_names(*t), + _ => None, + } } - } -*/ + */ - pub fn get_num_dimensions(&self, Type { val } : Type) -> Option<usize> { + pub fn get_num_dimensions(&self, Type { val }: Type) -> Option<usize> { match &self.types[val] { - TypeForm::Array { elem : _, dims, .. } => Some(dims.len()), + TypeForm::Array { elem: _, dims, .. } => Some(dims.len()), TypeForm::OtherType { other, .. } => self.get_num_dimensions(*other), _ => None, } } - pub fn is_union(&self, Type { val } : Type) -> bool { + pub fn is_union(&self, Type { val }: Type) -> bool { match &self.types[val] { TypeForm::Union { .. } => true, TypeForm::OtherType { other, .. } => self.is_union(*other), @@ -469,75 +653,85 @@ impl TypeSolver { } } - pub fn get_constructor_info(&self, Type { val } : Type, name : usize) - -> Option<(usize, Type)> { + pub fn get_constructor_info(&self, Type { val }: Type, name: usize) -> Option<(usize, Type)> { match &self.types[val] { - TypeForm::Union { name : _, id : _, constr, names, .. } => { - names.get(&name).map(|idx| (*idx, constr[*idx])) - }, + TypeForm::Union { + name: _, + id: _, + constr, + names, + .. + } => names.get(&name).map(|idx| (*idx, constr[*idx])), TypeForm::OtherType { other, .. } => self.get_constructor_info(*other, name), _ => None, } } -/* - pub fn get_constructor_list(&self, Type { val } : Type) -> Option<Vec<usize>> { - match &self.types[val] { - TypeForm::Union { name : _, id : _, constr : _, names } => { - Some(names.keys().map(|i| *i).collect::<Vec<_>>()) - }, - TypeForm::OtherType(t) => self.get_constructor_list(*t), - _ => None, + /* + pub fn get_constructor_list(&self, Type { val } : Type) -> Option<Vec<usize>> { + match &self.types[val] { + TypeForm::Union { name : _, id : _, constr : _, names } => { + Some(names.keys().map(|i| *i).collect::<Vec<_>>()) + }, + TypeForm::OtherType(t) => self.get_constructor_list(*t), + _ => None, + } } - } - fn is_type_var_num(&self, num : usize, Type { val } : Type) -> bool { - match &self.types[val] { - TypeForm::TypeVar { name : _, index, .. } => *index == num, - TypeForm::OtherType(t) => self.is_type_var_num(num, *t), - _ => false, + fn is_type_var_num(&self, num : usize, Type { val } : Type) -> bool { + match &self.types[val] { + TypeForm::TypeVar { name : _, index, .. } => *index == num, + TypeForm::OtherType(t) => self.is_type_var_num(num, *t), + _ => false, + } } - } -*/ + */ - pub fn to_string(&self, Type { val } : Type, stringtab : &dyn Fn(usize) -> String) - -> String { + pub fn to_string(&self, Type { val }: Type, stringtab: &dyn Fn(usize) -> String) -> String { match &self.types[val] { TypeForm::Primitive { prim, .. } => prim.to_string(), TypeForm::Tuple { fields, .. } => { - "(" .to_string() - + &fields.iter().map(|t| self.to_string(*t, stringtab)).collect::<Vec<_>>().join(", ") - + ")" - }, + "(".to_string() + + &fields + .iter() + .map(|t| self.to_string(*t, stringtab)) + .collect::<Vec<_>>() + .join(", ") + + ")" + } TypeForm::Array { elem, dims, .. } => { self.to_string(*elem, stringtab) - + "[" - + &dims.iter().map(|d| d.to_string(stringtab)).collect::<Vec<_>>().join(", ") - + "]" - }, - TypeForm::OtherType { other, .. } => { - self.to_string(*other, stringtab) - }, - TypeForm::TypeVar { name, .. } | TypeForm::Struct { name, .. } - | TypeForm::Union { name, .. } => { - stringtab(*name) - }, + + "[" + + &dims + .iter() + .map(|d| d.to_string(stringtab)) + .collect::<Vec<_>>() + .join(", ") + + "]" + } + TypeForm::OtherType { other, .. } => self.to_string(*other, stringtab), + TypeForm::TypeVar { name, .. } + | TypeForm::Struct { name, .. } + | TypeForm::Union { name, .. } => stringtab(*name), TypeForm::AnyOfKind { kind, .. } => kind.to_string(), } } // Instantiate a type using the provided list of type variables and dynamic constants // This is useful for instantiating the return type of a function and parametric types - pub fn instantiate(&mut self, Type { val } : Type, type_vars : &Vec<Type>, - dynamic_constants : &Vec<DynConst>) -> Option<Type> { + pub fn instantiate( + &mut self, + Type { val }: Type, + type_vars: &Vec<Type>, + dynamic_constants: &Vec<DynConst>, + ) -> Option<Type> { match self.types[val].clone() { TypeForm::Primitive { .. } => Some(Type { val }), - TypeForm::AnyOfKind { kind, loc } => { - Some(self.new_of_kind(kind, loc)) - }, - TypeForm::OtherType { other, .. } => - self.instantiate(other, type_vars, dynamic_constants), + TypeForm::AnyOfKind { kind, loc } => Some(self.new_of_kind(kind, loc)), + TypeForm::OtherType { other, .. } => { + self.instantiate(other, type_vars, dynamic_constants) + } TypeForm::Tuple { fields } => { let mut types = vec![]; let mut changed = false; @@ -546,9 +740,12 @@ impl TypeSolver { changed = changed || typ.val != inst.val; types.push(inst); } - if changed { Some(self.new_tuple(types)) } - else { Some(Type { val }) } - }, + if changed { + Some(self.new_tuple(types)) + } else { + Some(Type { val }) + } + } TypeForm::Array { elem, dims } => { let elem_typ = self.instantiate(elem, type_vars, dynamic_constants)?; let mut subst_dims = vec![]; @@ -558,13 +755,23 @@ impl TypeSolver { } Some(self.new_array(elem_typ, subst_dims)) - }, - TypeForm::TypeVar { name : _, index, kind, .. } => { + } + TypeForm::TypeVar { + name: _, + index, + kind, + .. + } => { let typ = type_vars[index]; assert!(self.unify_kind(typ, kind)); Some(typ) - }, - TypeForm::Struct { name, id, fields, names } => { + } + TypeForm::Struct { + name, + id, + fields, + names, + } => { let mut new_fields = vec![]; let mut changed = false; for typ in fields { @@ -573,13 +780,23 @@ impl TypeSolver { new_fields.push(inst); } - if changed { Some(self.create_type(TypeForm::Struct { - name : name, id : id, - fields : new_fields, - names : names.clone() })) } - else { Some(Type { val }) } - }, - TypeForm::Union { name, id, constr, names } => { + if changed { + Some(self.create_type(TypeForm::Struct { + name: name, + id: id, + fields: new_fields, + names: names.clone(), + })) + } else { + Some(Type { val }) + } + } + TypeForm::Union { + name, + id, + constr, + names, + } => { let mut new_constr = vec![]; let mut changed = false; for typ in constr { @@ -588,12 +805,17 @@ impl TypeSolver { new_constr.push(inst); } - if changed { Some(self.create_type(TypeForm::Union { - name : name, id : id, - constr : new_constr, - names : names.clone() })) } - else { Some(Type { val }) } - }, + if changed { + Some(self.create_type(TypeForm::Union { + name: name, + id: id, + constr: new_constr, + names: names.clone(), + })) + } else { + Some(Type { val }) + } + } } } @@ -611,24 +833,29 @@ impl TypeSolver { Ok(()) } - pub fn create_instance(&self, type_vars : Vec<TypeID>) -> TypeSolverInst { + pub fn create_instance(&self, type_vars: Vec<TypeID>) -> TypeSolverInst { let num_vars = self.types.len(); - assert!(self.solved == num_vars, "Cannot instantiate with unsolved variables"); - - TypeSolverInst { solver : self, - type_vars : type_vars, - solved : vec![None; num_vars] } + assert!( + self.solved == num_vars, + "Cannot instantiate with unsolved variables" + ); + + TypeSolverInst { + solver: self, + type_vars: type_vars, + solved: vec![None; num_vars], + } } } impl TypeSolverInst<'_> { - pub fn lower_type(&mut self, builder : &mut Builder, Type { val } : Type) -> TypeID { + pub fn lower_type(&mut self, builder: &mut Builder, Type { val }: Type) -> TypeID { if self.solved[val].is_some() { return self.solved[val].unwrap(); } let mut worklist = VecDeque::from([val]); - let mut depends : HashMap<usize, HashSet<usize>> = HashMap::new(); + let mut depends: HashMap<usize, HashSet<usize>> = HashMap::new(); while !worklist.is_empty() { let typ = worklist.pop_front().unwrap(); @@ -636,159 +863,185 @@ impl TypeSolverInst<'_> { // If this type is already solved, just continue. // Since we don't depend on something unless its unsolved we only need to drain the set // of dependences once - if self.solved[typ].is_some() { continue; } - - let solution : Either<TypeID, usize> = - match &self.solver.types[typ] { - TypeForm::Primitive { prim, .. } => - Either::Left(Self::build_primitive(builder, *prim)), - TypeForm::Tuple { fields, .. } => { - let mut needs = None; - let mut i_fields = vec![]; - - for Type { val } in fields { - match &self.solved[*val] { - Some(ty) => i_fields.push(*ty), - None => { needs = Some(*val); break; }, - } - } + if self.solved[typ].is_some() { + continue; + } - if let Some(t) = needs { - Either::Right(t) - } else { - Either::Left(Self::build_product(builder, i_fields)) - } - }, - TypeForm::Array { elem : Type { val }, dims, .. } => { + let solution: Either<TypeID, usize> = match &self.solver.types[typ] { + TypeForm::Primitive { prim, .. } => { + Either::Left(Self::build_primitive(builder, *prim)) + } + TypeForm::Tuple { fields, .. } => { + let mut needs = None; + let mut i_fields = vec![]; + + for Type { val } in fields { match &self.solved[*val] { - Some(ty) => - Either::Left(Self::build_array(builder, *ty, dims)), - None => Either::Right(*val), + Some(ty) => i_fields.push(*ty), + None => { + needs = Some(*val); + break; + } } - }, - TypeForm::OtherType { other : Type { val }, .. } => { + } + + if let Some(t) = needs { + Either::Right(t) + } else { + Either::Left(Self::build_product(builder, i_fields)) + } + } + TypeForm::Array { + elem: Type { val }, + dims, + .. + } => match &self.solved[*val] { + Some(ty) => Either::Left(Self::build_array(builder, *ty, dims)), + None => Either::Right(*val), + }, + TypeForm::OtherType { + other: Type { val }, + .. + } => match &self.solved[*val] { + Some(ty) => Either::Left(*ty), + None => Either::Right(*val), + }, + TypeForm::TypeVar { name: _, index, .. } => Either::Left(self.type_vars[*index]), + TypeForm::Struct { + name: _, + id: _, + fields, + .. + } => { + let mut needs = None; + let mut i_fields = vec![]; + + for Type { val } in fields { match &self.solved[*val] { - Some(ty) => Either::Left(*ty), - None => Either::Right(*val), - } - }, - TypeForm::TypeVar { name : _, index, .. } => { - Either::Left(self.type_vars[*index]) - }, - TypeForm::Struct { name : _, id : _, fields, .. } => { - let mut needs = None; - let mut i_fields = vec![]; - - for Type { val } in fields { - match &self.solved[*val] { - Some(ty) => i_fields.push(*ty), - None => { needs = Some(*val); break; }, + Some(ty) => i_fields.push(*ty), + None => { + needs = Some(*val); + break; } } + } - if let Some(t) = needs { - Either::Right(t) - } else { - Either::Left(Self::build_product(builder, i_fields)) - } - }, - TypeForm::Union { name : _, id : _, constr, .. } => { - let mut needs = None; - let mut i_constr = vec![]; - - for Type { val } in constr { - match &self.solved[*val] { - Some(ty) => i_constr.push(*ty), - None => { needs = Some(*val); break; }, + if let Some(t) = needs { + Either::Right(t) + } else { + Either::Left(Self::build_product(builder, i_fields)) + } + } + TypeForm::Union { + name: _, + id: _, + constr, + .. + } => { + let mut needs = None; + let mut i_constr = vec![]; + + for Type { val } in constr { + match &self.solved[*val] { + Some(ty) => i_constr.push(*ty), + None => { + needs = Some(*val); + break; } } + } - if let Some(t) = needs { - Either::Right(t) - } else { - Either::Left(Self::build_union(builder, i_constr)) - } - }, - TypeForm::AnyOfKind { .. } => { - panic!("TypeSolverInst only works on solved types which do not have AnyOfKinds") - }, - }; + if let Some(t) = needs { + Either::Right(t) + } else { + Either::Left(Self::build_union(builder, i_constr)) + } + } + TypeForm::AnyOfKind { .. } => { + panic!("TypeSolverInst only works on solved types which do not have AnyOfKinds") + } + }; match solution { Either::Left(solution) => { self.solved[typ] = Some(solution); match depends.get_mut(&typ) { - None => {}, + None => {} Some(set) => { for idx in set.drain() { worklist.push_back(idx); } - }, + } } - }, + } Either::Right(needs) => { - match depends.get_mut(&needs) { - None => { - depends.insert(needs, HashSet::from([typ])); - }, - Some(set) => { - set.insert(typ); - }, - } + depends.entry(needs).or_insert(HashSet::new()).insert(typ); worklist.push_back(needs); - }, + } } } self.solved[val].expect("Failure to solve type constraints") } - - pub fn as_numeric_type(&mut self, builder : &mut Builder, ty : Type) -> Primitive { + + pub fn as_numeric_type(&mut self, builder: &mut Builder, ty: Type) -> Primitive { let type_id = self.lower_type(builder, ty); - if type_id == builder.create_type_i8() { Primitive::I8 } - else if type_id == builder.create_type_i16() { Primitive::I16 } - else if type_id == builder.create_type_i32() { Primitive::I32 } - else if type_id == builder.create_type_i64() { Primitive::I64 } - else if type_id == builder.create_type_u8() { Primitive::U8 } - else if type_id == builder.create_type_u16() { Primitive::U16 } - else if type_id == builder.create_type_u32() { Primitive::U32 } - else if type_id == builder.create_type_u64() { Primitive::U64 } - else if type_id == builder.create_type_f32() { Primitive::F32 } - else if type_id == builder.create_type_f64() { Primitive::F64 } - else { panic!("as_numeric_type() called on non-numeric type") } - } - - fn build_primitive(builder : &mut Builder, p : Primitive) -> TypeID { + if type_id == builder.create_type_i8() { + Primitive::I8 + } else if type_id == builder.create_type_i16() { + Primitive::I16 + } else if type_id == builder.create_type_i32() { + Primitive::I32 + } else if type_id == builder.create_type_i64() { + Primitive::I64 + } else if type_id == builder.create_type_u8() { + Primitive::U8 + } else if type_id == builder.create_type_u16() { + Primitive::U16 + } else if type_id == builder.create_type_u32() { + Primitive::U32 + } else if type_id == builder.create_type_u64() { + Primitive::U64 + } else if type_id == builder.create_type_f32() { + Primitive::F32 + } else if type_id == builder.create_type_f64() { + Primitive::F64 + } else { + panic!("as_numeric_type() called on non-numeric type") + } + } + + fn build_primitive(builder: &mut Builder, p: Primitive) -> TypeID { match p { Primitive::Bool => builder.create_type_bool(), - Primitive::I8 => builder.create_type_i8(), - Primitive::I16 => builder.create_type_i16(), - Primitive::I32 => builder.create_type_i32(), - Primitive::I64 => builder.create_type_i64(), - Primitive::U8 => builder.create_type_u8(), - Primitive::U16 => builder.create_type_u16(), - Primitive::U32 => builder.create_type_u32(), - Primitive::U64 => builder.create_type_u64(), - Primitive::F32 => builder.create_type_f32(), - Primitive::F64 => builder.create_type_f64(), + Primitive::I8 => builder.create_type_i8(), + Primitive::I16 => builder.create_type_i16(), + Primitive::I32 => builder.create_type_i32(), + Primitive::I64 => builder.create_type_i64(), + Primitive::U8 => builder.create_type_u8(), + Primitive::U16 => builder.create_type_u16(), + Primitive::U32 => builder.create_type_u32(), + Primitive::U64 => builder.create_type_u64(), + Primitive::F32 => builder.create_type_f32(), + Primitive::F64 => builder.create_type_f64(), Primitive::Unit => builder.create_type_prod(vec![].into()), } } - fn build_product(builder : &mut Builder, tys : Vec<TypeID>) -> TypeID { + fn build_product(builder: &mut Builder, tys: Vec<TypeID>) -> TypeID { builder.create_type_prod(tys.into()) } - fn build_union(builder : &mut Builder, tys : Vec<TypeID>) -> TypeID { + fn build_union(builder: &mut Builder, tys: Vec<TypeID>) -> TypeID { builder.create_type_sum(tys.into()) } - fn build_array(builder : &mut Builder, elem : TypeID, dims : &Vec<DynConst>) -> TypeID { + fn build_array(builder: &mut Builder, elem: TypeID, dims: &Vec<DynConst>) -> TypeID { let extents = Self::build_dyn_consts(builder, dims); builder.create_type_array(elem, extents.into()) } - pub fn build_dyn_consts(builder : &mut Builder, vals : &Vec<DynConst>) -> Vec<DynamicConstantID> { + pub fn build_dyn_consts(builder: &mut Builder, vals: &Vec<DynConst>) -> Vec<DynamicConstantID> { let mut res = vec![]; for val in vals { res.push(val.build(builder)); diff --git a/juno_samples/matmul/Cargo.toml b/juno_samples/matmul/Cargo.toml new file mode 100644 index 00000000..dd40d209 --- /dev/null +++ b/juno_samples/matmul/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "juno_matmul" +version = "0.1.0" +authors = ["Aaron Councilman <aaronjc4@illinois.edu>"] +edition = "2021" + +[[bin]] +name = "juno_matmul" +path = "src/main.rs" + +[build-dependencies] +juno_build = { path = "../../juno_build" } + +[dependencies] +juno_build = { path = "../../juno_build" } +hercules_rt = { path = "../../hercules_rt" } +with_builtin_macros = "0.1.0" +async-std = "*" diff --git a/juno_samples/matmul/build.rs b/juno_samples/matmul/build.rs new file mode 100644 index 00000000..e68df998 --- /dev/null +++ b/juno_samples/matmul/build.rs @@ -0,0 +1,12 @@ +extern crate juno_build; +use juno_build::JunoCompiler; + +fn main() { + JunoCompiler::new() + .file_in_src("matmul.jn") + .unwrap() + .schedule_in_src("matmul.sch") + .unwrap() + .build() + .unwrap(); +} diff --git a/juno_samples/matmul/src/main.rs b/juno_samples/matmul/src/main.rs new file mode 100644 index 00000000..e6a73a3f --- /dev/null +++ b/juno_samples/matmul/src/main.rs @@ -0,0 +1,19 @@ +#![feature(future_join)] + +extern crate async_std; +extern crate juno_build; +extern crate hercules_rt; + +juno_build::juno!("matmul.jn"); + +fn main() { + async_std::task::block_on(async { + let mut a = vec![1.0, 2.0, 3.0, 4.0]; + let mut b = vec![5.0, 6.0, 7.0, 8.0]; + let mut c = vec![0.0, 0.0, 0.0, 0.0]; + unsafe { + matmul(a.as_mut_prt(), b.as_mut_ptr(), c.as_mut_ptr(), 2, 2, 2).await; + } + println!("[[{}, {}], [{}, {}]]", c[0], c[1], c[2], c[3]); + }); +} diff --git a/juno_samples/matmul/src/matmul.jn b/juno_samples/matmul/src/matmul.jn new file mode 100644 index 00000000..2dc5ec3d --- /dev/null +++ b/juno_samples/matmul/src/matmul.jn @@ -0,0 +1,15 @@ +#[entry] +fn matmul<n : usize, m : usize, l : usize>(a : f32[n, m], b : f32[m, l]) -> f32[n, l] { + let res : f32[n, l]; + + @outer for i = 0 to n { + @middle for j = 0 to l { + @inner for k = 0 to m { + res[i, j] += a[i, k] * b[k, j]; + } + } + } + + @exit + return res; +} diff --git a/juno_samples/matmul/src/matmul.sch b/juno_samples/matmul/src/matmul.sch new file mode 100644 index 00000000..bbc7ed0e --- /dev/null +++ b/juno_samples/matmul/src/matmul.sch @@ -0,0 +1,7 @@ +function matmul { + partition { @outer, @middle, @inner } on gpu + partition @exit on cpu + + parallelize @outer + vectorize @inner +} diff --git a/juno_samples/simple3/Cargo.toml b/juno_samples/simple3/Cargo.toml new file mode 100644 index 00000000..8060c5b3 --- /dev/null +++ b/juno_samples/simple3/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "juno_simple3" +version = "0.1.0" +authors = ["Aaron Councilman <aaronjc4@illinois.edu>"] +edition = "2021" + +[[bin]] +name = "juno_simple3" +path = "src/main.rs" + +[build-dependencies] +juno_build = { path = "../../juno_build" } + +[dependencies] +juno_build = { path = "../../juno_build" } +hercules_rt = { path = "../../hercules_rt" } +with_builtin_macros = "0.1.0" +async-std = "*" diff --git a/juno_samples/simple3/build.rs b/juno_samples/simple3/build.rs new file mode 100644 index 00000000..38b198b0 --- /dev/null +++ b/juno_samples/simple3/build.rs @@ -0,0 +1,12 @@ +extern crate juno_build; +use juno_build::JunoCompiler; + +fn main() { + JunoCompiler::new() + .file_in_src("simple3.jn") + .unwrap() + .schedule_in_src("simple3.sch") + .unwrap() + .build() + .unwrap(); +} diff --git a/juno_samples/simple3/src/main.rs b/juno_samples/simple3/src/main.rs new file mode 100644 index 00000000..441408ee --- /dev/null +++ b/juno_samples/simple3/src/main.rs @@ -0,0 +1,18 @@ +#![feature(future_join)] + +extern crate async_std; +extern crate juno_build; +extern crate hercules_rt; + +juno_build::juno!("simple3"); + +fn main() { + async_std::task::block_on(async { + let mut a = vec![1, 2, 3, 4, 5, 6, 7, 8]; + let mut b = vec![8, 7, 6, 5, 4, 3, 2, 1]; + unsafe { + let c = simple3(a.as_mut_ptr(), b.as_mut_ptr(), 8).await; + println!("{:?}", c); + } + }); +} diff --git a/juno_samples/simple3/src/simple3.jn b/juno_samples/simple3/src/simple3.jn new file mode 100644 index 00000000..31cf78bd --- /dev/null +++ b/juno_samples/simple3/src/simple3.jn @@ -0,0 +1,12 @@ +#[entry] +fn simple3<n : usize>(a : i32[n], b : i32[n]) -> i32 { + let res : i32 = 0; + + @loop + for i = 0 to n { + res += a[i] * b[i]; + } + + @exit + return res; +} diff --git a/juno_samples/simple3/src/simple3.sch b/juno_samples/simple3/src/simple3.sch new file mode 100644 index 00000000..b3842bee --- /dev/null +++ b/juno_samples/simple3/src/simple3.sch @@ -0,0 +1,6 @@ +function simple3 { + partition @loop on cpu + partition @exit on cpu + + vectorize @loop +} diff --git a/juno_scheduler/Cargo.toml b/juno_scheduler/Cargo.toml new file mode 100644 index 00000000..49e5f4a3 --- /dev/null +++ b/juno_scheduler/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "juno_scheduler" +version = "0.0.1" +authors = ["Aaron Councilman <aaronjc4@illinois.edu>"] +edition = "2021" + +[build-dependencies] +cfgrammar = "0.13" +lrlex = "0.13" +lrpar = "0.13" + +[dependencies] +cfgrammar = "0.13" +lrlex = "0.13" +lrpar = "0.13" +hercules_ir = { path = "../hercules_ir" } diff --git a/juno_scheduler/build.rs b/juno_scheduler/build.rs new file mode 100644 index 00000000..eac7103d --- /dev/null +++ b/juno_scheduler/build.rs @@ -0,0 +1,15 @@ +use cfgrammar::yacc::YaccKind; +use lrlex::CTLexerBuilder; + +fn main() { + CTLexerBuilder::new() + .lrpar_config(|ctp| { + ctp.yacckind(YaccKind::Grmtools) + .grammar_in_src_dir("lang.y") + .unwrap() + }) + .lexer_in_src_dir("lang.l") + .unwrap() + .build() + .unwrap(); +} diff --git a/juno_scheduler/examples/matmul.sch b/juno_scheduler/examples/matmul.sch new file mode 100644 index 00000000..bbc7ed0e --- /dev/null +++ b/juno_scheduler/examples/matmul.sch @@ -0,0 +1,7 @@ +function matmul { + partition { @outer, @middle, @inner } on gpu + partition @exit on cpu + + parallelize @outer + vectorize @inner +} diff --git a/juno_scheduler/examples/simple3.sch b/juno_scheduler/examples/simple3.sch new file mode 100644 index 00000000..b3842bee --- /dev/null +++ b/juno_scheduler/examples/simple3.sch @@ -0,0 +1,6 @@ +function simple3 { + partition @loop on cpu + partition @exit on cpu + + vectorize @loop +} diff --git a/juno_scheduler/src/lang.l b/juno_scheduler/src/lang.l new file mode 100644 index 00000000..e6526c74 --- /dev/null +++ b/juno_scheduler/src/lang.l @@ -0,0 +1,27 @@ +%x comment +%% +/\* <+comment>; + +<comment>/\* <+comment>; +<comment>\*+/ <-comment>; +<comment>\*+ ; +<comment>[\n\r] ; +<comment>. ; + +//[^\n\r]* ; +[\t ]+ ; +[\n\r] ; + +, "," + +\{ "{" +\} "}" + +function "function" +on "on" +partition "partition" + +[a-zA-Z][a-zA-Z0-9_]* "ID" +@[a-zA-Z0-9_]+ "LABEL" + +. "UNMATCHED" diff --git a/juno_scheduler/src/lang.y b/juno_scheduler/src/lang.y new file mode 100644 index 00000000..e7d98dba --- /dev/null +++ b/juno_scheduler/src/lang.y @@ -0,0 +1,94 @@ +%start Schedule + +%avoid_insert "ID" "LABEL" +%expect-unused Unmatched 'UNMATCHED' + +%% + +Schedule -> Vec<FuncDirectives> : FunctionList { $1 }; + +FunctionList -> Vec<FuncDirectives> + : { vec![] } + | FunctionList FunctionDef { snoc($1, $2) } + ; + +FunctionDef -> FuncDirectives + : 'function' Func '{' DirectiveList '}' + { FuncDirectives { span : $span, func : $2, directives : $4 }}; + +DirectiveList -> Vec<Directive> + : { vec![] } + | DirectiveList Directive { snoc($1, $2) } + ; + +Directive -> Directive + : 'partition' Labels 'on' Devices + { Directive::Partition { span : $span, labels : $2, devices : $4 } } + | 'ID' Labels + { Directive::Schedule { span : $span, command : span_of_tok($1), args : $2 } } + ; + +Func -> Func + : 'ID' { Func { span : $span, name : $span, }} + ; + +Labels -> Vec<Span> + : 'LABEL' { vec![span_of_tok($1)] } + | '{' LabelsRev '}' { rev($2) } + ; +LabelsRev -> Vec<Span> + : { vec![] } + | 'LABEL' { vec![span_of_tok($1)] } + | 'LABEL' ',' LabelsRev { cons(span_of_tok($1), $3) } + ; + +Devices -> Vec<Device> + : Device { vec![$1] } + | '{' SomeDevices '}' { $2 } + ; +SomeDevices -> Vec<Device> + : Device { vec![$1] } + | SomeDevices ',' Device { snoc($1, $3) } + ; + +Device -> Device + : 'ID' + { Device { span : $span, name : span_of_tok($1), } } + ; + +Unmatched -> () : 'UNMATCHED' {}; + +%% + +use cfgrammar::Span; +use lrlex::DefaultLexeme; + +fn span_of_tok(t : Result<DefaultLexeme, DefaultLexeme>) -> Span { + t.map_err(|_| ()).map(|l| l.span()).unwrap() +} + +fn cons<A>(hd : A, mut tl : Vec<A>) -> Vec<A> { + tl.push(hd); + tl +} + +fn snoc<A>(mut hd : Vec<A>, tl : A) -> Vec<A> { + hd.push(tl); + hd +} + +fn rev<A>(mut lst : Vec<A>) -> Vec<A> { + lst.reverse(); + lst +} + +pub struct Func { pub span : Span, pub name : Span, } +pub struct Device { pub span : Span, pub name : Span, } + +pub struct FuncDirectives { pub span : Span, pub func : Func, + pub directives : Vec<Directive> } + +pub enum Directive { + Schedule { span : Span, command : Span, args : Vec<Span> }, + Partition { span : Span, labels : Vec<Span>, devices : Vec<Device> }, +} diff --git a/juno_scheduler/src/lib.rs b/juno_scheduler/src/lib.rs new file mode 100644 index 00000000..36ea79e9 --- /dev/null +++ b/juno_scheduler/src/lib.rs @@ -0,0 +1,339 @@ +extern crate hercules_ir; + +use std::collections::{HashMap, HashSet}; +use std::fs::File; +use std::io::Read; + +use lrlex::DefaultLexerTypes; +use lrpar::NonStreamingLexer; + +use self::hercules_ir::ir::*; +use self::hercules_ir::schedule::*; + +mod parser; +use crate::parser::lexer; + +// FunctionMap tracks a map from function numbers (produced by semantic analysis) to a tuple of +// - The map from label names to their numbers +// - The name of the function +// - A list of the instances of the function tracking +// + The instantiated type variables +// + The resulting FunctionID +// + A list of each label, tracking the structure at the label and a set of +// the labels which are its descendants +// + A map from NodeID to the innermost label containing it +// This is the core data structure provided from code generation, along with the +// module +pub type FunctionMap = HashMap< + usize, + ( + HashMap<String, usize>, + String, + Vec<( + Vec<TypeID>, + FunctionID, + Vec<(LabeledStructure, HashSet<usize>)>, + HashMap<NodeID, usize>, + )>, + ), +>; +// LabeledStructure represents structures from the source code and where they +// exist in the IR +#[derive(Copy, Clone)] +pub enum LabeledStructure { + Nothing(), + Expression(NodeID), + Loop(NodeID), // Header + Branch(NodeID), // If node +} + +pub fn schedule(module: &Module, info: FunctionMap, schedule: String) -> Result<Vec<Plan>, String> { + if let Ok(mut file) = File::open(schedule) { + let mut contents = String::new(); + if let Ok(_) = file.read_to_string(&mut contents) { + let lexerdef = lexer::lexerdef(); + let lexer = lexerdef.lexer(&contents); + let (res, errs) = parser::parse(&lexer); + + if errs.is_empty() { + match res { + None => Err(format!("No parse errors, but no parsing failed")), + Some(schd) => { + let mut sched = generate_schedule(module, info, schd, &lexer)?; + let mut schedules = vec![]; + for i in 0..sched.len() { + schedules.push(sched.remove(&FunctionID::new(i)).unwrap()); + } + Ok(schedules) + } + } + } else { + Err(errs + .iter() + .map(|e| format!("Syntax Error: {}", e.pp(&lexer, &parser::token_epp))) + .collect::<Vec<_>>() + .join("\n")) + } + } else { + Err(format!("Unable to read input file")) + } + } else { + Err(format!("Unable to open input file")) + } +} + +// a plan that tracks additional information useful while we construct the +// schedule +struct TempPlan { + schedules: Vec<Vec<Schedule>>, + // we track both the partition each node is in and what labeled caused us + // to assign that partition + partitions: Vec<(usize, PartitionNumber)>, + partition_devices: Vec<Vec<Device>>, +} +type PartitionNumber = usize; + +impl Into<Plan> for TempPlan { + fn into(self) -> Plan { + let num_partitions = self.partition_devices.len(); + Plan { + schedules: self.schedules, + partitions: self + .partitions + .into_iter() + .map(|(_, n)| PartitionID::new(n)) + .collect::<Vec<_>>(), + partition_devices: self + .partition_devices + .into_iter() + .map(|mut d| { + if d.len() != 1 { + panic!("Partition with multiple devices") + } else { + d.pop().unwrap() + } + }) + .collect::<Vec<_>>(), + num_partitions: num_partitions, + } + } +} + +fn generate_schedule( + module: &Module, + info: FunctionMap, + schedule: Vec<parser::FuncDirectives>, + lexer: &dyn NonStreamingLexer<DefaultLexerTypes<u32>>, +) -> Result<HashMap<FunctionID, Plan>, String> { + let mut res: HashMap<FunctionID, TempPlan> = HashMap::new(); + + // We initialize every node in every function as not having any schedule + // and being in the default partition which is a CPU-only partition + // (a result of label 0) + for (_, (_, _, func_insts)) in info.iter() { + for (_, func_id, _, _) in func_insts.iter() { + let num_nodes = module.functions[func_id.idx()].nodes.len(); + res.insert( + *func_id, + TempPlan { + schedules: vec![vec![]; num_nodes], + partitions: vec![(0, 0); num_nodes], + partition_devices: vec![vec![Device::CPU]], + }, + ); + } + } + + // Construct a map from function names to function numbers + let mut function_names: HashMap<String, usize> = HashMap::new(); + for (num, (_, nm, _)) in info.iter() { + function_names.insert(nm.clone(), *num); + } + // Make the map immutable + let function_names = function_names; + + for parser::FuncDirectives { + span: _, + func, + directives, + } in schedule + { + // Identify the function + let parser::Func { + span: _, + name: func_name, + } = func; + let name = lexer.span_str(func_name).to_string(); + let func_num = match function_names.get(&name) { + Some(num) => num, + None => { + return Err(format!("Function {} is undefined", name)); + } + }; + + // Identify label information + let (label_map, _, func_inst) = info.get(func_num).unwrap(); + let get_label_num = |label_span| { + let label_name = lexer.span_str(label_span).to_string(); + match label_map.get(&label_name) { + Some(num) => Ok(*num), + None => Err(format!("Label {} undefined in {}", label_name, name)), + } + }; + + // Process the partitioning and scheduling directives for each instance + // of the function + for (_, func_id, label_info, node_labels) in func_inst { + let func_info = res.get_mut(func_id).unwrap(); + + for directive in &directives { + match directive { + parser::Directive::Partition { + span: _, + labels, + devices, + } => { + // Setup the new partition + let partition_num = func_info.partition_devices.len(); + let mut partition_devices = vec![]; + + for parser::Device { span: _, name } in devices { + let device_name = lexer.span_str(*name).to_string(); + if device_name == "cpu" { + partition_devices.push(Device::CPU); + } else if device_name == "gpu" { + partition_devices.push(Device::GPU); + } else { + return Err(format!("Invalid device {}", device_name)); + } + } + + func_info.partition_devices.push(partition_devices); + + for label in labels { + let label_num = get_label_num(*label)?; + let descendants = &label_info[label_num].1; + + node_labels + .iter() + .filter_map(|(node, label)| { + if *label == label_num || descendants.contains(label) { + Some(node.idx()) + } else { + None + } + }) + .for_each(|node| { + let node_part: &mut (usize, PartitionNumber) = + &mut func_info.partitions[node]; + if !descendants.contains(&node_part.0) { + *node_part = (label_num, partition_num); + } + }); + } + } + parser::Directive::Schedule { + span: _, + command, + args, + } => { + let command = lexer.span_str(*command).to_string(); + if command == "parallelize" { + for label in args { + let label_num = get_label_num(*label)?; + match label_info[label_num].0 { + LabeledStructure::Loop(header) => { + func_info.schedules[header.idx()] + .push(Schedule::ParallelReduce); + } + _ => { + return Err(format!( + "Cannot parallelize {}, not a loop", + lexer.span_str(*label) + )); + } + } + } + } else if command == "vectorize" { + for label in args { + let label_num = get_label_num(*label)?; + match label_info[label_num].0 { + LabeledStructure::Loop(header) => { + // FIXME: Take the factor as part of schedule + func_info.schedules[header.idx()] + .push(Schedule::Vectorizable(8)); + } + _ => { + return Err(format!( + "Cannot vectorize {}, not a loop", + lexer.span_str(*label) + )); + } + } + } + } else { + return Err(format!("Command {} undefined", command)); + } + } + } + } + /* + + /* + for parser::Command { span : _, name : command_name, + args : command_args } in commands.iter() { + if command_args.len() != 0 { todo!("Command arguments not supported") } + + let command = lexer.span_str(*command_name).to_string(); + if command == "cpu" || command == "gpu" { + let partition = res.get(func_id).unwrap() + .partition_devices.len(); + res.get_mut(func_id).unwrap().partition_devices.push( + if command == "cpu" { Device::CPU } + else { Device::GPU }); + + node_labels.iter() + .filter_map(|(node, label)| + if label_num == *label + || label_info[label_num].1.contains(&label) { + Some(node.idx()) + } else { + None + }) + .for_each(|node| { + let node_part : &mut (usize, PartitionNumber) = + &mut res.get_mut(func_id).unwrap().partitions[node]; + if !label_info[label_num].1.contains(&node_part.0) { + *node_part = (label_num, partition); + }}); + } else if command == "parallel" || command == "vectorize" { + match label_info[label_num].0 { + LabeledStructure::Loop(header) => { + res.get_mut(func_id).unwrap() + .schedules[header.idx()] + .push(if command == "parallel" { + Schedule::ParallelReduce + } else { + Schedule::Vectorize + }); + }, + _ => { + return Err(format!("Cannot parallelize, not a loop")); + }, + } + } else { + return Err(format!("Command {} undefined", command)); + } + } + */ + + func_info.partition_devices.push(partition_devices); + */ + } + } + + Ok(res + .into_iter() + .map(|(f, p)| (f, p.into())) + .collect::<HashMap<_, _>>()) +} diff --git a/juno_scheduler/src/parser.rs b/juno_scheduler/src/parser.rs new file mode 100644 index 00000000..89bdf2ec --- /dev/null +++ b/juno_scheduler/src/parser.rs @@ -0,0 +1,10 @@ +use lrlex::lrlex_mod; +use lrpar::lrpar_mod; + +lrlex_mod!("lang.l"); +lrpar_mod!("lang.y"); + +pub use lang_y::*; +pub mod lexer { + pub use super::lang_l::*; +} -- GitLab