Compare commits

..

No commits in common. "antiquated-master" and "peg-parser" have entirely different histories.

53 changed files with 3382 additions and 3059 deletions

356
Cargo.lock generated
View File

@ -13,9 +13,9 @@ dependencies = [
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
version = "0.7.20" version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" checksum = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
@ -29,6 +29,12 @@ dependencies = [
"winapi 0.3.8", "winapi 0.3.8",
] ]
[[package]]
name = "approx"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08abcc3b4e9339e33a3d0a5ed15d84a687350c05689d825e0f6655eef9e76a94"
[[package]] [[package]]
name = "arrayref" name = "arrayref"
version = "0.3.5" version = "0.3.5"
@ -104,12 +110,6 @@ dependencies = [
"constant_time_eq", "constant_time_eq",
] ]
[[package]]
name = "bytecount"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
version = "1.3.2" version = "1.3.2"
@ -134,6 +134,17 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cgmath"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64a4b57c8f4e3a2e9ac07e0f6abc9c24b6fc9e1b54c3478cfb598f3d0023e51c"
dependencies = [
"approx",
"num-traits 0.1.43",
"rand 0.4.6",
]
[[package]] [[package]]
name = "cloudabi" name = "cloudabi"
version = "0.0.3" version = "0.0.3"
@ -145,13 +156,12 @@ dependencies = [
[[package]] [[package]]
name = "colored" name = "colored"
version = "1.9.4" version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f741c91823341bebf717d4c71bda820630ce065443b58bd1b7451af008355" checksum = "6cdb90b60f2927f8d76139c72dbde7e10c3a2bc47c8594c9c7a66529f2687c03"
dependencies = [ dependencies = [
"is-terminal",
"lazy_static 1.4.0", "lazy_static 1.4.0",
"winapi 0.3.8", "winconsole",
] ]
[[package]] [[package]]
@ -176,19 +186,19 @@ version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa" checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa"
dependencies = [ dependencies = [
"quote", "quote 1.0.10",
"syn", "syn 1.0.80",
] ]
[[package]] [[package]]
name = "derivative" name = "derivative"
version = "2.2.0" version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" checksum = "942ca430eef7a3806595a6737bc388bf51adb888d3fc0dd1b50f1c170167ee3a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 0.4.30",
"quote", "quote 0.6.13",
"syn", "syn 0.15.44",
] ]
[[package]] [[package]]
@ -231,9 +241,9 @@ checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
[[package]] [[package]]
name = "failure" name = "failure"
version = "0.1.8" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" checksum = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"failure_derive", "failure_derive",
@ -241,13 +251,13 @@ dependencies = [
[[package]] [[package]]
name = "failure_derive" name = "failure_derive"
version = "0.1.8" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" checksum = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 0.4.30",
"quote", "quote 0.6.13",
"syn", "syn 0.15.44",
"synstructure", "synstructure",
] ]
@ -282,12 +292,6 @@ dependencies = [
"unicode-width", "unicode-width",
] ]
[[package]]
name = "hermit-abi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
[[package]] [[package]]
name = "includedir" name = "includedir"
version = "0.2.2" version = "0.2.2"
@ -309,31 +313,20 @@ dependencies = [
"walkdir", "walkdir",
] ]
[[package]]
name = "is-terminal"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b"
dependencies = [
"hermit-abi",
"libc",
"windows-sys",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.10.5" version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
dependencies = [ dependencies = [
"either", "either",
] ]
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.11" version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" checksum = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
[[package]] [[package]]
name = "kernel32-sys" name = "kernel32-sys"
@ -383,7 +376,7 @@ dependencies = [
"cc", "cc",
"lazy_static 1.4.0", "lazy_static 1.4.0",
"libc", "libc",
"regex 1.7.3", "regex 1.3.1",
"semver", "semver",
] ]
@ -404,15 +397,9 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.4.1" version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" checksum = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]] [[package]]
name = "miniz-sys" name = "miniz-sys"
@ -475,27 +462,6 @@ dependencies = [
"version_check 0.1.5", "version_check 0.1.5",
] ]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "nom_locate"
version = "4.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e3c83c053b0713da60c5b8de47fe8e494fe3ece5267b2f23090a07a053ba8f3"
dependencies = [
"bytecount",
"memchr",
"nom 7.1.3",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.1.42" version = "0.1.42"
@ -507,7 +473,7 @@ dependencies = [
"num-integer", "num-integer",
"num-iter", "num-iter",
"num-rational", "num-rational",
"num-traits", "num-traits 0.2.8",
] ]
[[package]] [[package]]
@ -517,7 +483,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1" checksum = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1"
dependencies = [ dependencies = [
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.8",
"rand 0.4.6", "rand 0.4.6",
"rustc-serialize", "rustc-serialize",
] ]
@ -528,7 +494,7 @@ version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656" checksum = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656"
dependencies = [ dependencies = [
"num-traits", "num-traits 0.2.8",
"rustc-serialize", "rustc-serialize",
] ]
@ -539,7 +505,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09" checksum = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-traits", "num-traits 0.2.8",
] ]
[[package]] [[package]]
@ -550,7 +516,7 @@ checksum = "76bd5272412d173d6bf9afdf98db8612bbabc9a7a830b7bfc9c188911716132e"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.8",
] ]
[[package]] [[package]]
@ -561,10 +527,19 @@ checksum = "ee314c74bd753fc86b4780aa9475da469155f3848473a261d2d18e35245a784e"
dependencies = [ dependencies = [
"num-bigint", "num-bigint",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.8",
"rustc-serialize", "rustc-serialize",
] ]
[[package]]
name = "num-traits"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
dependencies = [
"num-traits 0.2.8",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.8" version = "0.2.8"
@ -585,9 +560,8 @@ dependencies = [
[[package]] [[package]]
name = "peg" name = "peg"
version = "0.8.4" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/kevinmehall/rust-peg?rev=960222580c8da25b17d32c2aae6f52f902728b62#960222580c8da25b17d32c2aae6f52f902728b62"
checksum = "295283b02df346d1ef66052a757869b2876ac29a6bb0ac3f5f7cd44aebe40e8f"
dependencies = [ dependencies = [
"peg-macros", "peg-macros",
"peg-runtime", "peg-runtime",
@ -595,20 +569,18 @@ dependencies = [
[[package]] [[package]]
name = "peg-macros" name = "peg-macros"
version = "0.8.4" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/kevinmehall/rust-peg?rev=960222580c8da25b17d32c2aae6f52f902728b62#960222580c8da25b17d32c2aae6f52f902728b62"
checksum = "bdad6a1d9cf116a059582ce415d5f5566aabcd4008646779dab7fdc2a9a9d426"
dependencies = [ dependencies = [
"peg-runtime", "peg-runtime",
"proc-macro2", "proc-macro2 1.0.30",
"quote", "quote 1.0.10",
] ]
[[package]] [[package]]
name = "peg-runtime" name = "peg-runtime"
version = "0.8.3" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/kevinmehall/rust-peg?rev=960222580c8da25b17d32c2aae6f52f902728b62#960222580c8da25b17d32c2aae6f52f902728b62"
checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a"
[[package]] [[package]]
name = "phf" name = "phf"
@ -660,13 +632,31 @@ dependencies = [
"output_vt100", "output_vt100",
] ]
[[package]]
name = "proc-macro2"
version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
dependencies = [
"unicode-xid 0.1.0",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.30" version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70" checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid 0.2.0",
]
[[package]]
name = "quote"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
dependencies = [
"proc-macro2 0.4.30",
] ]
[[package]] [[package]]
@ -675,14 +665,14 @@ version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.30",
] ]
[[package]] [[package]]
name = "radix_trie" name = "radix_trie"
version = "0.1.6" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d3681b28cd95acfb0560ea9441f82d6a4504fa3b15b97bd7b6e952131820e95" checksum = "deafbff7bda0a58975ce92723aa18be10eae2e9271f3c3f48de52d131d9f248c"
dependencies = [ dependencies = [
"endian-type", "endian-type",
"nibble_vec", "nibble_vec",
@ -849,13 +839,14 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.7.3" version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" checksum = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd"
dependencies = [ dependencies = [
"aho-corasick 0.7.20", "aho-corasick 0.7.6",
"memchr", "memchr",
"regex-syntax 0.6.29", "regex-syntax 0.6.12",
"thread_local",
] ]
[[package]] [[package]]
@ -869,9 +860,15 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.6.29" version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" checksum = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
[[package]]
name = "rgb"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2089e4031214d129e201f8c3c8c2fe97cd7322478a0d1cdf78e7029b0042efdb"
[[package]] [[package]]
name = "rust-argon2" name = "rust-argon2"
@ -892,9 +889,9 @@ checksum = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
[[package]] [[package]]
name = "rustc-serialize" name = "rustc-serialize"
version = "0.3.25" version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe834bc780604f4674073badbad26d7219cadfb4a2275802db12cbae17498401" checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
[[package]] [[package]]
name = "ryu" name = "ryu"
@ -922,17 +919,24 @@ dependencies = [
"ena", "ena",
"failure", "failure",
"itertools", "itertools",
"nom 7.1.3",
"nom_locate",
"peg", "peg",
"pretty_assertions", "pretty_assertions",
"radix_trie", "radix_trie",
"schala-lang-codegen",
"schala-repl", "schala-repl",
"stopwatch", "stopwatch",
"take_mut", "take_mut",
"test-case", "test-case",
] ]
[[package]]
name = "schala-lang-codegen"
version = "0.1.0"
dependencies = [
"quote 0.6.13",
"syn 0.15.44",
]
[[package]] [[package]]
name = "schala-repl" name = "schala-repl"
version = "0.1.0" version = "0.1.0"
@ -970,26 +974,26 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.185" version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be9b6f69f1dfd54c3b568ffa45c310d6973a5e5148fd40cf515acaf38cf5bc31" checksum = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd"
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.136" version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" checksum = "4b133a43a1ecd55d4086bd5b4dc6c1751c68b1bfbeba7a5040442022c7e7c02e"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.30",
"quote", "quote 1.0.10",
"syn", "syn 1.0.80",
] ]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.109" version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0652c533506ad7a2e353cce269330d6afd8bdfb6d75e0ace5b35aacbd7b9e9" checksum = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -1026,27 +1030,38 @@ dependencies = [
"num", "num",
] ]
[[package]]
name = "syn"
version = "0.15.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"unicode-xid 0.1.0",
]
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.80" version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.30",
"quote", "quote 1.0.10",
"unicode-xid", "unicode-xid 0.2.0",
] ]
[[package]] [[package]]
name = "synstructure" name = "synstructure"
version = "0.12.6" version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" checksum = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 0.4.30",
"quote", "quote 0.6.13",
"syn", "syn 0.15.44",
"unicode-xid", "unicode-xid 0.1.0",
] ]
[[package]] [[package]]
@ -1062,7 +1077,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e" checksum = "8e51065bafd2abe106b6036483b69d1741f4a1ec56ce8a2378de341637de689e"
dependencies = [ dependencies = [
"fnv", "fnv",
"nom 4.2.3", "nom",
"phf", "phf",
"phf_codegen", "phf_codegen",
] ]
@ -1074,9 +1089,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b114ece25254e97bf48dd4bfc2a12bad0647adacfe4cae1247a9ca6ad302cec" checksum = "3b114ece25254e97bf48dd4bfc2a12bad0647adacfe4cae1247a9ca6ad302cec"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"proc-macro2", "proc-macro2 1.0.30",
"quote", "quote 1.0.10",
"syn", "syn 1.0.80",
"version_check 0.9.3", "version_check 0.9.3",
] ]
@ -1110,6 +1125,12 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" checksum = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
version = "0.2.0" version = "0.2.0"
@ -1185,74 +1206,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]] [[package]]
name = "windows-sys" name = "winconsole"
version = "0.52.0" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" checksum = "3ef84b96d10db72dd980056666d7f1e7663ce93d82fa33b63e71c966f4cf5032"
dependencies = [ dependencies = [
"windows-targets", "cgmath",
"lazy_static 1.4.0",
"rgb",
"winapi 0.3.8",
] ]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"

View File

@ -9,7 +9,7 @@ resolver = "2"
getopts = "0.2.21" getopts = "0.2.21"
schala-repl = { path = "schala-repl" } schala-repl = { path = "schala-repl" }
schala-lang = { path = "schala-lang" } schala-lang = { path = "schala-lang/language" }
# maaru-lang = { path = "maaru" } # maaru-lang = { path = "maaru" }
# rukka-lang = { path = "rukka" } # rukka-lang = { path = "rukka" }
# robo-lang = { path = "robo" } # robo-lang = { path = "robo" }

View File

@ -20,26 +20,6 @@ environment. Type `:help` for more information, or type in text in any
supported programming language (currently only `schala-lang`) to evaluate it in supported programming language (currently only `schala-lang`) to evaluate it in
the REPL. the REPL.
### Examples
Try running the following `schala-lang` code example in the REPL:
```
>> 1 + 1
(Total time)=> 736.368µs
=> 2
>> fn foo(x) { x + 10 }
(Total time)=> 772.496µs
=>
>> foo(0)
(Total time)=> 593.591µs
=> 10
>> 5 + foo(1)
(Total time)=> 1.119916ms
=> 16
>>
```
## History ## History
Schala started out life as an experiment in writing a Javascript-like Schala started out life as an experiment in writing a Javascript-like

17
TODO.md
View File

@ -1,16 +1,5 @@
# Immediate TODOs / General Code Cleanup # Immediate TODOs / General Code Cleanup
## Parsing
* cf. https://siraben.dev/2022/03/22/tree-sitter-linter.html write a tree-sitter parser for Schala
* Create a macro system, perhaps c.f. Crystal's?
* Macro system should be able to implement:
* printf-style variadic arguments
* something like the Rust/Haskell `Derive` construct
* doing useful things with all variants of an enum
* (e.g. what https://matklad.github.io//2022/03/26/self-modifying-code.html tries to solve)
## Testing ## Testing
@ -34,6 +23,9 @@
2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away 2. Once FQSNs are aware of function parameters, most of the Rc<String> things in eval.rs can go away
## Parser
* I think I can restructure the parser to get rid of most instances of expect!, at least at the beginning of a rule
## Typechecking ## Typechecking
* make a type to represent types rather than relying on string comparisons * make a type to represent types rather than relying on string comparisons
@ -52,8 +44,9 @@
## Language Syntax ## Language Syntax
* a type like `type Klewos = Klewos { <fields> }` (i.e. a type with exactly one record-like variant) should be writeable as
`type Klewos = { <fields> }` as a shorthand, and should not require explicit matching.
* the `type` declaration should have some kind of GADT-like syntax * the `type` declaration should have some kind of GADT-like syntax
* syntactic sugar for typestates? (cf. https://rustype.github.io/notes/notes/rust-typestate-series/rust-typestate-part-1.html )
* use `let` sigil to indicate a variable in a pattern explicitly: * use `let` sigil to indicate a variable in a pattern explicitly:
``` ```

View File

@ -1,2 +0,0 @@
[toolchain]
channel = "nightly"

View File

@ -0,0 +1,13 @@
[package]
name = "schala-lang-codegen"
version = "0.1.0"
authors = ["greg <greg.shuflin@protonmail.com>"]
edition = "2018"
resolver = "2"
[lib]
proc-macro = true
[dependencies]
syn = { version = "0.15.12", features = ["full", "extra-traits", "fold"] }
quote = "0.6.8"

View File

@ -0,0 +1,54 @@
#![feature(box_patterns)]
#![recursion_limit="128"]
extern crate proc_macro;
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
use self::proc_macro::TokenStream;
use self::syn::fold::Fold;
struct RecursiveDescentFn {
}
impl Fold for RecursiveDescentFn {
fn fold_item_fn(&mut self, mut i: syn::ItemFn) -> syn::ItemFn {
let box block = i.block;
let ident = &i.ident;
let new_block: syn::Block = parse_quote! {
{
let next_token_before_parse = self.token_handler.peek();
let record = ParseRecord {
production_name: stringify!(#ident).to_string(),
next_token: format!("{}", next_token_before_parse.to_string_with_metadata()),
level: self.parse_level,
};
self.parse_level += 1;
self.parse_record.push(record);
let result = { #block };
if self.parse_level != 0 {
self.parse_level -= 1;
}
result.map_err(|mut parse_error: ParseError| {
parse_error.production_name = Some(stringify!(#ident).to_string());
parse_error
})
}
};
i.block = Box::new(new_block);
i
}
}
#[proc_macro_attribute]
pub fn recursive_descent_method(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input: syn::ItemFn = parse_macro_input!(item as syn::ItemFn);
let mut folder = RecursiveDescentFn {};
let output = folder.fold_item_fn(input);
TokenStream::from(quote!(#output))
}

View File

@ -10,17 +10,16 @@ take_mut = "0.2.2"
failure = "0.1.5" failure = "0.1.5"
ena = "0.11.0" ena = "0.11.0"
stopwatch = "0.0.7" stopwatch = "0.0.7"
derivative = "2.2.0" derivative = "1.0.3"
colored = "1.8" colored = "1.8"
radix_trie = "0.1.5" radix_trie = "0.1.5"
assert_matches = "1.5" assert_matches = "1.5"
#peg = "0.7.0" #peg = "0.7.0"
peg = "0.8.1" peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "960222580c8da25b17d32c2aae6f52f902728b62" }
nom = "7.1.0"
nom_locate = "4.0.0"
schala-repl = { path = "../schala-repl" } schala-lang-codegen = { path = "../codegen" }
schala-repl = { path = "../../schala-repl" }
[dev-dependencies] [dev-dependencies]
test-case = "1.2.0" test-case = "1.2.0"

View File

@ -14,8 +14,8 @@ fn getline(arg) { }
fn map(input: Option<T>, func: Func): Option<T> { fn map(input: Option<T>, func: Func): Option<T> {
if input { if input {
is Option::Some(x) then Option::Some(func(x)) is Option::Some(x) then Option::Some(func(x)),
is Option::None then Option::None is Option::None then Option::None,
} }
} }

View File

@ -17,8 +17,7 @@ pub use visitor::*;
use crate::{ use crate::{
derivative::Derivative, derivative::Derivative,
identifier::{define_id_kind, Id}, identifier::{define_id_kind, Id},
parsing::Location, tokenizing::Location,
util::delim_wrapped,
}; };
define_id_kind!(ASTItem); define_id_kind!(ASTItem);
@ -41,12 +40,12 @@ impl fmt::Display for AST {
#[derive(Derivative, Debug, Clone)] #[derive(Derivative, Debug, Clone)]
#[derivative(PartialEq)] #[derivative(PartialEq)]
pub struct Statement<K> { pub struct Statement {
#[derivative(PartialEq = "ignore")] #[derivative(PartialEq = "ignore")]
pub id: ItemId, pub id: ItemId,
#[derivative(PartialEq = "ignore")] #[derivative(PartialEq = "ignore")]
pub location: Location, pub location: Location,
pub kind: K, pub kind: StatementKind,
} }
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
@ -66,23 +65,23 @@ pub enum FlowControl {
#[derive(Debug, Clone, PartialEq, Default)] #[derive(Debug, Clone, PartialEq, Default)]
pub struct Block { pub struct Block {
pub statements: Vec<Statement<StatementKind>>, pub statements: Vec<Statement>,
} }
impl From<Vec<Statement<StatementKind>>> for Block { impl From<Vec<Statement>> for Block {
fn from(statements: Vec<Statement<StatementKind>>) -> Self { fn from(statements: Vec<Statement>) -> Self {
Self { statements } Self { statements }
} }
} }
impl From<Statement<StatementKind>> for Block { impl From<Statement> for Block {
fn from(statement: Statement<StatementKind>) -> Self { fn from(statement: Statement) -> Self {
Self { statements: vec![statement] } Self { statements: vec![statement] }
} }
} }
impl AsRef<[Statement<StatementKind>]> for Block { impl AsRef<[Statement]> for Block {
fn as_ref(&self) -> &[Statement<StatementKind>] { fn as_ref(&self) -> &[Statement] {
self.statements.as_ref() self.statements.as_ref()
} }
} }
@ -124,41 +123,15 @@ pub struct FormalParam {
pub enum Declaration { pub enum Declaration {
FuncSig(Signature), FuncSig(Signature),
FuncDecl(Signature, Block), FuncDecl(Signature, Block),
TypeDecl { TypeDecl { name: TypeSingletonName, body: TypeBody, mutable: bool },
name: TypeSingletonName,
body: TypeBody,
mutable: bool,
},
//TODO TypeAlias `original` needs to be a more complex type definition //TODO TypeAlias `original` needs to be a more complex type definition
TypeAlias { TypeAlias { alias: Rc<String>, original: Rc<String> },
alias: Rc<String>, Binding { name: Rc<String>, constant: bool, type_anno: Option<TypeIdentifier>, expr: Expression },
original: Rc<String>, Impl { type_name: TypeIdentifier, interface_name: Option<TypeSingletonName>, block: Vec<Declaration> },
}, Interface { name: Rc<String>, signatures: Vec<Signature> },
Binding {
name: Rc<String>,
constant: bool,
type_anno: Option<TypeIdentifier>,
expr: Expression,
},
Impl {
type_name: TypeIdentifier,
interface_name: Option<TypeSingletonName>,
block: Vec<Statement<Declaration>>,
},
Interface {
name: Rc<String>,
signatures: Vec<Signature>,
},
//TODO need to limit the types of statements that can be annotated //TODO need to limit the types of statements that can be annotated
Annotation { Annotation { name: Rc<String>, arguments: Vec<Expression>, inner: Box<Statement> },
name: Rc<String>, Module { name: Rc<String>, items: Block },
arguments: Vec<Expression>,
inner: Box<Statement<StatementKind>>,
},
Module {
name: Rc<String>,
items: Block,
},
} }
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
@ -170,15 +143,10 @@ pub struct Signature {
} }
//TODO I can probably get rid of TypeBody //TODO I can probably get rid of TypeBody
#[derive(Debug, Derivative, Clone)] #[derive(Debug, PartialEq, Clone)]
#[derivative(PartialEq)]
pub enum TypeBody { pub enum TypeBody {
Variants(Vec<Variant>), Variants(Vec<Variant>),
ImmediateRecord { ImmediateRecord(ItemId, Vec<(Rc<String>, TypeIdentifier)>),
#[derivative(PartialEq = "ignore")]
id: ItemId,
fields: Vec<(Rc<String>, TypeIdentifier)>,
},
} }
#[derive(Debug, Derivative, Clone)] #[derive(Debug, Derivative, Clone)]
@ -219,22 +187,6 @@ pub enum TypeIdentifier {
Singleton(TypeSingletonName), Singleton(TypeSingletonName),
} }
impl fmt::Display for TypeIdentifier {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TypeIdentifier::Tuple(items) =>
write!(f, "{}", delim_wrapped('(', ')', items.iter().map(|item| item.to_string()))),
TypeIdentifier::Singleton(tsn) => {
write!(f, "{}", tsn.name)?;
if !tsn.params.is_empty() {
write!(f, "{}", delim_wrapped('<', '>', tsn.params.iter().map(|item| item.to_string())))?;
}
Ok(())
}
}
}
}
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct TypeSingletonName { pub struct TypeSingletonName {
pub name: Rc<String>, pub name: Rc<String>,
@ -245,13 +197,12 @@ pub struct TypeSingletonName {
pub enum ExpressionKind { pub enum ExpressionKind {
NatLiteral(u64), NatLiteral(u64),
FloatLiteral(f64), FloatLiteral(f64),
StringLiteral { prefix: Option<Rc<String>>, s: Rc<String> }, StringLiteral(Rc<String>),
BoolLiteral(bool), BoolLiteral(bool),
BinExp(BinOp, Box<Expression>, Box<Expression>), BinExp(BinOp, Box<Expression>, Box<Expression>),
PrefixExp(PrefixOp, Box<Expression>), PrefixExp(PrefixOp, Box<Expression>),
TupleLiteral(Vec<Expression>), TupleLiteral(Vec<Expression>),
Value(QualifiedName), Value(QualifiedName),
SelfValue,
NamedStruct { name: QualifiedName, fields: Vec<(Rc<String>, Expression)> }, NamedStruct { name: QualifiedName, fields: Vec<(Rc<String>, Expression)> },
Call { f: Box<Expression>, arguments: Vec<InvocationArgument> }, Call { f: Box<Expression>, arguments: Vec<InvocationArgument> },
Index { indexee: Box<Expression>, indexers: Vec<Expression> }, Index { indexee: Box<Expression>, indexers: Vec<Expression> },
@ -311,9 +262,8 @@ pub enum PatternLiteral {
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Enumerator { pub struct Enumerator {
pub identifier: Rc<String>, pub id: Rc<String>, //TODO rename this field
pub generator: Expression, pub generator: Expression,
pub assignment: bool, //true if `=`, false if `<-`
} }
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]

View File

@ -1,5 +1,7 @@
use std::rc::Rc; use std::rc::Rc;
use crate::tokenizing::TokenKind;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct PrefixOp { pub struct PrefixOp {
sigil: Rc<String>, sigil: Rc<String>,
@ -13,6 +15,10 @@ impl PrefixOp {
pub fn sigil(&self) -> &str { pub fn sigil(&self) -> &str {
&self.sigil &self.sigil
} }
pub fn is_prefix(op: &str) -> bool {
matches!(op, "+" | "-" | "!")
}
} }
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
@ -29,14 +35,38 @@ impl BinOp {
&self.sigil &self.sigil
} }
pub fn from_sigil_token(tok: &TokenKind) -> Option<BinOp> {
let s = token_kind_to_sigil(tok)?;
Some(BinOp::from_sigil(s))
}
pub fn min_precedence() -> i32 { pub fn min_precedence() -> i32 {
i32::min_value() i32::min_value()
} }
pub fn get_precedence_from_token(op_tok: &TokenKind) -> Option<i32> {
let s = token_kind_to_sigil(op_tok)?;
Some(binop_precedences(s))
}
pub fn get_precedence(&self) -> i32 { pub fn get_precedence(&self) -> i32 {
binop_precedences(self.sigil.as_ref()) binop_precedences(self.sigil.as_ref())
} }
} }
fn token_kind_to_sigil(tok: &TokenKind) -> Option<&str> {
use self::TokenKind::*;
Some(match tok {
Operator(op) => op.as_str(),
Period => ".",
Pipe => "|",
Slash => "/",
LAngleBracket => "<",
RAngleBracket => ">",
Equals => "=",
_ => return None,
})
}
fn binop_precedences(s: &str) -> i32 { fn binop_precedences(s: &str) -> i32 {
let default = 10_000_000; let default = 10_000_000;
match s { match s {

View File

@ -39,10 +39,12 @@ pub fn walk_block<V: ASTVisitor>(v: &mut V, block: &Block) {
Import(ref import_spec) => { Import(ref import_spec) => {
v.import(import_spec); v.import(import_spec);
} }
Flow(ref flow_control) => Flow(ref flow_control) => match flow_control {
if let FlowControl::Return(Some(ref retval)) = flow_control { FlowControl::Return(Some(ref retval)) => {
walk_expression(v, retval); walk_expression(v, retval);
}, }
_ => (),
},
} }
} }
} }
@ -71,12 +73,7 @@ pub fn walk_expression<V: ASTVisitor>(v: &mut V, expr: &Expression) {
if let Recursion::Continue = v.expression(expr) { if let Recursion::Continue = v.expression(expr) {
match &expr.kind { match &expr.kind {
NatLiteral(_) NatLiteral(_) | FloatLiteral(_) | StringLiteral(_) | BoolLiteral(_) | Value(_) => (),
| FloatLiteral(_)
| StringLiteral { .. }
| BoolLiteral(_)
| Value(_)
| SelfValue => (),
BinExp(_, lhs, rhs) => { BinExp(_, lhs, rhs) => {
walk_expression(v, lhs); walk_expression(v, lhs);
walk_expression(v, rhs); walk_expression(v, rhs);

View File

@ -29,7 +29,7 @@ pub(super) fn render_ast(ast: &AST) -> String {
buf buf
} }
fn render_statement(stmt: &Statement<StatementKind>, indent: usize, buf: &mut String) { fn render_statement(stmt: &Statement, indent: usize, buf: &mut String) {
use StatementKind::*; use StatementKind::*;
do_indent(indent, buf); do_indent(indent, buf);
match stmt.kind { match stmt.kind {
@ -45,10 +45,9 @@ fn render_expression(expr: &Expression, indent: usize, buf: &mut String) {
buf.push_str("(Expr "); buf.push_str("(Expr ");
match &expr.kind { match &expr.kind {
SelfValue => write!(buf, "(SelfValue)").unwrap(),
NatLiteral(n) => buf.push_str(&format!("(NatLiteral {})", n)), NatLiteral(n) => buf.push_str(&format!("(NatLiteral {})", n)),
FloatLiteral(f) => buf.push_str(&format!("(FloatLiteral {})", f)), FloatLiteral(f) => buf.push_str(&format!("(FloatLiteral {})", f)),
StringLiteral { s, prefix } => buf.push_str(&format!("(StringLiteral prefix: {:?} {})", prefix, s)), StringLiteral(s) => buf.push_str(&format!("(StringLiteral {})", s)),
BoolLiteral(b) => buf.push_str(&format!("(BoolLiteral {})", b)), BoolLiteral(b) => buf.push_str(&format!("(BoolLiteral {})", b)),
BinExp(binop, lhs, rhs) => { BinExp(binop, lhs, rhs) => {
let new_indent = indent + LEVEL; let new_indent = indent + LEVEL;

View File

@ -0,0 +1,112 @@
use crate::{
parsing::ParseError,
schala::{SourceReference, Stage},
symbol_table::SymbolError,
tokenizing::{Location, Token, TokenKind},
type_inference::TypeError,
};
pub struct SchalaError {
errors: Vec<Error>,
//TODO unify these sometime
formatted_parse_error: Option<String>,
}
impl SchalaError {
pub(crate) fn display(&self) -> String {
if let Some(ref err) = self.formatted_parse_error {
err.clone()
} else {
self.errors[0].text.as_ref().cloned().unwrap_or_default()
}
}
#[allow(dead_code)]
pub(crate) fn from_type_error(err: TypeError) -> Self {
Self {
formatted_parse_error: None,
errors: vec![Error { location: None, text: Some(err.msg), stage: Stage::Typechecking }],
}
}
pub(crate) fn from_symbol_table(symbol_errs: Vec<SymbolError>) -> Self {
//TODO this could be better
let errors = symbol_errs
.into_iter()
.map(|_symbol_err| Error {
location: None,
text: Some("symbol table error".to_string()),
stage: Stage::Symbols,
})
.collect();
Self { errors, formatted_parse_error: None }
}
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
Self { formatted_parse_error: None, errors: vec![Error { location: None, text: Some(text), stage }] }
}
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
Self {
formatted_parse_error: Some(format_parse_error(parse_error, source_reference)),
errors: vec![],
}
}
pub(crate) fn from_tokens(tokens: &[Token]) -> Option<SchalaError> {
let token_errors: Vec<Error> = tokens
.iter()
.filter_map(|tok| match tok.kind {
TokenKind::Error(ref err) => Some(Error {
location: Some(tok.location),
text: Some(err.clone()),
stage: Stage::Tokenizing,
}),
_ => None,
})
.collect();
if token_errors.is_empty() {
None
} else {
Some(SchalaError { errors: token_errors, formatted_parse_error: None })
}
}
}
#[allow(dead_code)]
struct Error {
location: Option<Location>,
text: Option<String>,
stage: Stage,
}
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
let line_num = error.token.location.line_num;
let ch = error.token.location.char_num;
let line_from_program = source_reference.get_line(line_num as usize);
let location_pointer = format!("{}^", " ".repeat(ch.into()));
let line_num_digits = format!("{}", line_num).chars().count();
let space_padding = " ".repeat(line_num_digits);
let production = match error.production_name {
Some(n) => format!("\n(from production \"{}\")", n),
None => "".to_string(),
};
format!(
r#"
{error_msg}{production}
{space_padding} |
{line_num} | {}
{space_padding} | {}
"#,
line_from_program,
location_pointer,
error_msg = error.msg,
space_padding = space_padding,
line_num = line_num,
production = production
)
}

View File

@ -38,7 +38,6 @@ where T: IdKind
} }
} }
#[derive(Debug)]
pub struct IdStore<T> pub struct IdStore<T>
where T: IdKind where T: IdKind
{ {

View File

@ -1,13 +1,15 @@
#![feature(trace_macros)] #![feature(trace_macros)]
//#![feature(unrestricted_attribute_tokens)] //#![feature(unrestricted_attribute_tokens)]
#![feature(box_patterns, iter_intersperse)] #![feature(box_patterns, box_syntax, iter_intersperse)]
//! `schala-lang` is where the Schala programming language is actually implemented. //! `schala-lang` is where the Schala programming language is actually implemented.
//! It defines the `Schala` type, which contains the state for a Schala REPL, and implements //! It defines the `Schala` type, which contains the state for a Schala REPL, and implements
//! `ProgrammingLanguageInterface` and the chain of compiler passes for it. //! `ProgrammingLanguageInterface` and the chain of compiler passes for it.
extern crate derivative;
extern crate schala_repl; extern crate schala_repl;
#[macro_use]
extern crate schala_lang_codegen;
extern crate derivative;
#[macro_use] #[macro_use]
mod util; mod util;
@ -17,6 +19,7 @@ mod type_inference;
mod ast; mod ast;
mod parsing; mod parsing;
mod tokenizing;
#[macro_use] #[macro_use]
mod symbol_table; mod symbol_table;
mod builtin; mod builtin;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,494 @@
use std::rc::Rc;
use crate::ast::*;
fn rc_string(s: &str) -> Rc<String> {
Rc::new(s.to_string())
}
peg::parser! {
pub grammar schala_parser() for str {
rule whitespace() = [' ' | '\t' | '\n']*
rule _ = quiet!{ whitespace() }
rule __ = quiet!{ [' ' | '\t' ]* }
pub rule program() -> AST =
n:(statement() ** delimiter() ) { AST { id: Default::default(), statements: n.into() } }
rule delimiter() = (";" / "\n")+
//Note - this is a hack, ideally the rule `rule block() -> Block = "{" _ items:(statement() **
//delimiter()) _ "}" { items.into() }` would've worked, but it doesn't.
pub rule block() -> Block = "{" _ items:block_item()* _ "}" { items.into() } /
"{" _ stmt:statement() _ "}" { vec![stmt].into() }
rule block_item() -> Statement =
stmt:statement() delimiter()+ { stmt }
rule statement() -> Statement =
kind:statement_kind() { Statement { id: Default::default(), location: Default::default(), kind } }
rule statement_kind() -> StatementKind =
_ decl:declaration() { StatementKind::Declaration(decl) } /
_ expr:expression() { StatementKind::Expression(expr) }
rule declaration() -> Declaration =
binding() / type_decl() / annotation() / func()
rule func() -> Declaration =
sig:func_signature() __ body:block() { Declaration::FuncDecl(sig, body) } /
sig:func_signature() { Declaration::FuncSig(sig) }
//TODO handle operators
rule func_signature() -> Signature =
"fn" _ name:identifier() "(" _ params:formal_params() _ ")" _ type_anno:type_anno()? { Signature {
name: rc_string(name), operator: false, params, type_anno
} }
rule formal_params() -> Vec<FormalParam> = params:(formal_param() ** (_ "," _)) {? if params.len() < 256 { Ok(params) } else {
Err("function-too-long") }
}
rule formal_param() -> FormalParam =
name:identifier() _ anno:type_anno()? _ "=" expr:expression() { FormalParam { name: rc_string(name),
default: Some(expr), anno } } /
name:identifier() _ anno:type_anno()? { FormalParam { name: rc_string(name), default: None, anno } }
rule annotation() -> Declaration =
"@" name:identifier() args:annotation_args()? delimiter() _ inner:statement() { Declaration::Annotation {
name: rc_string(name), arguments: if let Some(args) = args { args } else { vec![] }, inner: Box::new(inner) }
}
rule annotation_args() -> Vec<Expression> =
"(" _ args:(expression() ** (_ "," _)) _ ")" { args }
rule binding() -> Declaration =
"let" _ mutable:"mut"? _ ident:identifier() _ type_anno:type_anno()? _ "=" _ expr:expression() {
Declaration::Binding { name: Rc::new(ident.to_string()), constant: mutable.is_none(),
type_anno, expr }
}
rule type_decl() -> Declaration =
"type" _ "alias" _ alias:type_alias() { alias } /
"type" _ mutable:"mut"? _ name:type_singleton_name() _ "=" _ body:type_body() {
Declaration::TypeDecl { name, body, mutable: mutable.is_some() }
}
rule type_singleton_name() -> TypeSingletonName =
name:identifier() params:type_params()? { TypeSingletonName { name: rc_string(name), params: if let Some(params) = params { params } else { vec![] } } }
rule type_params() -> Vec<TypeIdentifier> =
"<" _ idents:(type_identifier() ** (_ "," _)) _ ">" { idents }
rule type_identifier() -> TypeIdentifier =
"(" _ items:(type_identifier() ** (_ "," _)) _ ")" { TypeIdentifier::Tuple(items) } /
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
rule type_body() -> TypeBody =
"{" _ items:(record_variant_item() ++ (_ "," _)) _ "}" { TypeBody::ImmediateRecord(Default::default(), items) } /
variants:(variant_spec() ** (_ "|" _)) { TypeBody::Variants(variants) }
rule variant_spec() -> Variant =
name:identifier() _ "{" _ typed_identifier_list:(record_variant_item() ++ (_ "," _)) _ "}" { Variant {
id: Default::default(), name: rc_string(name), kind: VariantKind::Record(typed_identifier_list)
} } /
name:identifier() "(" tuple_members:(type_identifier() ++ (_ "," _)) ")" { Variant {
id: Default::default(), name: rc_string(name), kind: VariantKind::TupleStruct(tuple_members) } } /
name:identifier() { Variant { id: Default::default(), name: rc_string(name), kind: VariantKind::UnitStruct } }
rule record_variant_item() -> (Rc<String>, TypeIdentifier) =
name:identifier() _ ":" _ ty:type_identifier() { (rc_string(name), ty) }
rule type_alias() -> Declaration =
alias:identifier() _ "=" _ name:identifier() { Declaration::TypeAlias { alias: rc_string(alias), original: rc_string(name), } }
rule type_anno() -> TypeIdentifier =
":" _ ident:identifier() { TypeIdentifier::Singleton(TypeSingletonName { name: Rc::new(ident.to_string()), params: vec![] }) }
pub rule expression() -> Expression =
_ kind:expression_kind() { Expression { id: Default::default(), type_anno: None, kind: kind } }
rule expression_no_struct() -> Expression =
_ kind:expression_kind_no_struct() { Expression { id: Default::default(), type_anno: None, kind: kind } }
rule expression_kind() -> ExpressionKind =
precedence_expr(true)
rule expression_kind_no_struct() -> ExpressionKind =
precedence_expr(false)
rule precedence_expr(struct_ok: bool) -> ExpressionKind =
first:prefix_expr(struct_ok) _ next:(precedence_continuation(struct_ok))* {
let next = next.into_iter().map(|(sigil, expr)| (BinOp::from_sigil(sigil), expr)).collect();
BinopSequence { first, next }.do_precedence()
}
rule precedence_continuation(struct_ok: bool) -> (&'input str, ExpressionKind) =
op:operator() _ expr:prefix_expr(struct_ok) _ { (op, expr) }
rule prefix_expr(struct_ok: bool) -> ExpressionKind =
prefix:prefix()? expr:extended_expr(struct_ok) {
if let Some(p) = prefix {
let expr = Expression::new(Default::default(), expr);
let prefix = PrefixOp::from_sigil(p);
ExpressionKind::PrefixExp(prefix, Box::new(expr))
} else {
expr
}
}
rule prefix() -> &'input str =
$(['+' | '-' | '!' ])
//TODO make the definition of operators more complex
rule operator() -> &'input str =
quiet!{$( ['+' | '-' | '*' | '/' | '%' | '<' | '>' | '=' | '!' | '$' | '&' | '|' | '?' | '^' | '`']+ )} /
expected!("operator")
rule extended_expr(struct_ok: bool) -> ExpressionKind =
item:extended_expr_ok_struct() {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
item:extended_expr_no_struct() {? if !struct_ok { Ok(item) } else { Err("!no-struct-allowed") } }
#[cache_left_rec]
rule extended_expr_ok_struct() -> ExpressionKind =
indexee:extended_expr_ok_struct() indexers:index_part() {
ExpressionKind::Index {
indexee: Box::new(Expression::new(Default::default(), indexee)),
indexers,
}
} /
f:extended_expr_ok_struct() arguments:call_part() {
ExpressionKind::Call {
f: Box::new(Expression::new(Default::default(), f)),
arguments,
}
} /
expr:extended_expr_ok_struct() "." name:identifier() { ExpressionKind::Access {
name: Rc::new(name.to_string()),
expr: Box::new(Expression::new(Default::default(),expr)),
} } /
primary(true)
#[cache_left_rec]
rule extended_expr_no_struct() -> ExpressionKind =
indexee:extended_expr_no_struct() indexers:index_part() {
ExpressionKind::Index {
indexee: Box::new(Expression::new(Default::default(), indexee)),
indexers,
}
} /
f:extended_expr_no_struct() arguments:call_part() {
ExpressionKind::Call {
f: Box::new(Expression::new(Default::default(), f)),
arguments,
}
} /
expr:extended_expr_no_struct() "." name:identifier() { ExpressionKind::Access {
name: Rc::new(name.to_string()),
expr: Box::new(Expression::new(Default::default(),expr)),
} } /
primary(false)
rule index_part() -> Vec<Expression> =
"[" indexers:(expression() ++ ",") "]" { indexers }
rule call_part() -> Vec<InvocationArgument> =
"(" arguments:(invocation_argument() ** ",") ")" { arguments }
//TODO this shouldn't be an expression b/c type annotations disallowed here
rule invocation_argument() -> InvocationArgument =
_ "_" _ { InvocationArgument::Ignored } /
_ ident:identifier() _ "=" _ expr:expression() { InvocationArgument::Keyword {
name: Rc::new(ident.to_string()),
expr
} } /
_ expr:expression() _ { InvocationArgument::Positional(expr) }
rule primary(struct_ok: bool) -> ExpressionKind =
while_expr() / for_expr() / float_literal() / nat_literal() / bool_literal() / string_literal() / paren_expr() /
list_expr() / if_expr() /
item:named_struct() {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
identifier_expr()
rule for_expr() -> ExpressionKind =
"for" _ enumerators:for_enumerators() _ body:for_body() {
ExpressionKind::ForExpression { enumerators, body }
}
rule for_enumerators() -> Vec<Enumerator> =
"{" _ enumerators:(enumerator() ++ ",") _ "}" { enumerators } /
enumerator:enumerator() { vec![enumerator] }
//TODO add guards, etc.
rule enumerator() -> Enumerator =
ident:identifier() _ "<-" _ generator:expression_no_struct() {
Enumerator { id: Rc::new(ident.to_string()), generator }
} /
//TODO need to distinguish these two cases in AST
ident:identifier() _ "=" _ generator:expression_no_struct() {
Enumerator { id: Rc::new(ident.to_string()), generator }
}
rule for_body() -> Box<ForBody> =
"return" _ expr:expression() { Box::new(ForBody::MonadicReturn(expr)) } /
body:block() { Box::new(ForBody::StatementBlock(body)) }
rule while_expr() -> ExpressionKind =
"while" _ cond:expression_kind_no_struct()? _ body:block() {
ExpressionKind::WhileExpression {
condition: cond.map(|kind| Box::new(Expression::new(Default::default(), kind))),
body,
}
}
rule identifier_expr() -> ExpressionKind =
qn:qualified_identifier() { ExpressionKind::Value(qn) }
rule named_struct() -> ExpressionKind =
name:qualified_identifier() _ fields:record_block() {
ExpressionKind::NamedStruct {
name,
fields: fields.into_iter().map(|(n, exp)| (Rc::new(n.to_string()), exp)).collect(),
}
}
//TODO anonymous structs, update syntax for structs
rule record_block() -> Vec<(&'input str, Expression)> =
"{" _ entries:(record_entry() ** ",") _ "}" { entries }
rule record_entry() -> (&'input str, Expression) =
_ name:identifier() _ ":" _ expr:expression() _ { (name, expr) }
rule qualified_identifier() -> QualifiedName =
names:(identifier() ++ "::") { QualifiedName { id: Default::default(), components: names.into_iter().map(|name| Rc::new(name.to_string())).collect() } }
//TODO improve the definition of identifiers
rule identifier() -> &'input str =
$(['a'..='z' | 'A'..='Z' | '_'] ['a'..='z' | 'A'..='Z' | '0'..='9' | '_']*)
rule if_expr() -> ExpressionKind =
"if" _ discriminator:(expression()?) _ body:if_expr_body() {
ExpressionKind::IfExpression {
discriminator: discriminator.map(Box::new),
body: Box::new(body),
}
}
rule if_expr_body() -> IfExpressionBody =
cond_block() / simple_pattern_match() / simple_conditional()
rule simple_conditional() -> IfExpressionBody =
"then" _ then_case:expr_or_block() _ else_case:else_case() {
IfExpressionBody::SimpleConditional { then_case, else_case }
}
rule simple_pattern_match() -> IfExpressionBody =
"is" _ pattern:pattern() _ "then" _ then_case:expr_or_block() _ else_case:else_case() {
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
}
rule cond_block() -> IfExpressionBody =
"{" _ cond_arms:(cond_arm() ++ ",") _ "}" { IfExpressionBody::CondList(cond_arms) }
rule cond_arm() -> ConditionArm =
_ "else" _ body:expr_or_block() { ConditionArm { condition: Condition::Else, guard: None, body } } /
_ condition:condition() _ guard:condition_guard() _ "then" _ body:expr_or_block()
{ ConditionArm { condition, guard, body } }
rule condition() -> Condition =
"is" _ pat:pattern() { Condition::Pattern(pat) } /
op:operator() _ expr:expression() { Condition::TruncatedOp(BinOp::from_sigil(op), expr) }
rule condition_guard() -> Option<Expression> =
("if" _ expr:expression() { expr } )?
rule expr_or_block() -> Block = block() / ex:expression() {
Statement {
id: Default::default(), location: Default::default(),
kind: StatementKind::Expression(ex)
}.into()
}
rule else_case() -> Option<Block> =
("else" _ eorb:expr_or_block() { eorb })?
rule pattern() -> Pattern =
"(" _ variants:(pattern() ++ ",") _ ")" { Pattern::TuplePattern(variants) } /
_ pat:simple_pattern() { pat }
rule simple_pattern() -> Pattern =
pattern_literal() /
qn:qualified_identifier() "(" members:(pattern() ** ",") ")" {
Pattern::TupleStruct(qn, members)
} /
qn:qualified_identifier() _ "{" _ items:(record_pattern_entry() ** ",") "}" _ {
let items = items.into_iter().map(|(name, pat)| (Rc::new(name.to_string()), pat)).collect();
Pattern::Record(qn, items)
} /
qn:qualified_identifier() { Pattern::VarOrName(qn) }
rule record_pattern_entry() -> (&'input str, Pattern) =
_ name:identifier() _ ":" _ pat:pattern() _ { (name, pat) } /
_ name:identifier() _ {
let qn = QualifiedName {
id: Default::default(),
components: vec![Rc::new(name.to_string())],
};
(name, Pattern::VarOrName(qn))
}
rule pattern_literal() -> Pattern =
"true" { Pattern::Literal(PatternLiteral::BoolPattern(true)) } /
"false" { Pattern::Literal(PatternLiteral::BoolPattern(false)) } /
s:bare_string_literal() { Pattern::Literal(PatternLiteral::StringPattern(Rc::new(s.to_string()))) } /
sign:("-"?) num:nat_literal() {
let neg = sign.is_some();
Pattern::Literal(PatternLiteral::NumPattern { neg, num })
} /
"_" { Pattern::Ignored }
rule list_expr() -> ExpressionKind =
"[" exprs:(expression() ** ",") "]" {
let mut exprs = exprs;
ExpressionKind::ListLiteral(exprs)
}
rule paren_expr() -> ExpressionKind =
"(" exprs:(expression() ** ",") ")" {
let mut exprs = exprs;
match exprs.len() {
1 => exprs.pop().unwrap().kind,
_ => ExpressionKind::TupleLiteral(exprs),
}
}
rule string_literal() -> ExpressionKind =
s:bare_string_literal(){ ExpressionKind::StringLiteral(Rc::new(s.to_string())) }
//TODO string escapes, prefixes
rule bare_string_literal() -> &'input str =
"\"" items:$([^ '"' ]*) "\"" { items }
rule bool_literal() -> ExpressionKind =
"true" { ExpressionKind::BoolLiteral(true) } / "false" { ExpressionKind::BoolLiteral(false) }
rule nat_literal() -> ExpressionKind =
bin_literal() / hex_literal() / unmarked_literal()
rule unmarked_literal() -> ExpressionKind =
digits:digits() { ExpressionKind::NatLiteral(digits.parse().unwrap()) }
rule bin_literal() -> ExpressionKind =
"0b" digits:bin_digits() { ExpressionKind::NatLiteral(parse_binary(digits)) }
rule hex_literal() -> ExpressionKind =
"0x" digits:hex_digits() { ExpressionKind::NatLiteral(parse_hex(digits)) }
rule float_literal() -> ExpressionKind =
ds:$( digits() "." digits()? / "." digits() ) { ExpressionKind::FloatLiteral(ds.parse().unwrap()) }
rule digits() -> &'input str = $((digit_group() "_"*)+)
rule bin_digits() -> &'input str = $((bin_digit_group() "_"*)+)
rule hex_digits() -> &'input str = $((hex_digit_group() "_"*)+)
rule digit_group() -> &'input str = $(['0'..='9']+)
rule bin_digit_group() -> &'input str = $(['0' | '1']+)
rule hex_digit_group() -> &'input str = $(['0'..='9' | 'a'..='f' | 'A'..='F']+)
}
}
fn parse_binary(digits: &str /*, tok: Token*/) -> u64 {
let mut result: u64 = 0;
let mut multiplier = 1;
for d in digits.chars().rev() {
match d {
'1' => result += multiplier,
'0' => (),
'_' => continue,
_ => unreachable!(),
}
multiplier = match multiplier.checked_mul(2) {
Some(m) => m,
None =>
/*return ParseError::new_with_token("This binary expression will overflow", tok),*/
panic!(),
}
}
//Ok(result)
result
}
//TODO fix these two functions
fn parse_hex(digits: &str) -> u64 {
let mut result: u64 = 0;
let mut multiplier: u64 = 1;
for d in digits.chars().rev() {
if d == '_' {
continue;
}
match d.to_digit(16) {
Some(n) => result += n as u64 * multiplier,
None => panic!(),
}
multiplier = match multiplier.checked_mul(16) {
Some(m) => m,
None => panic!(),
}
}
result
}
#[derive(Debug)]
struct BinopSequence {
first: ExpressionKind,
next: Vec<(BinOp, ExpressionKind)>,
}
impl BinopSequence {
fn do_precedence(self) -> ExpressionKind {
fn helper(
precedence: i32,
lhs: ExpressionKind,
rest: &mut Vec<(BinOp, ExpressionKind)>,
) -> Expression {
let mut lhs = Expression::new(Default::default(), lhs);
loop {
let (next_op, next_rhs) = match rest.pop() {
Some((a, b)) => (a, b),
None => break,
};
let new_precedence = next_op.get_precedence();
if precedence >= new_precedence {
rest.push((next_op, next_rhs));
break;
}
let rhs = helper(new_precedence, next_rhs, rest);
lhs = Expression::new(
Default::default(),
ExpressionKind::BinExp(next_op, Box::new(lhs), Box::new(rhs)),
);
}
lhs
}
let mut as_stack = self.next.into_iter().rev().collect();
helper(BinOp::min_precedence(), self.first, &mut as_stack).kind
}
}

View File

@ -6,8 +6,8 @@ use std::{fmt::Write, rc::Rc};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use super::Parser; use super::{new::schala_parser, tokenize, ParseResult, Parser};
use crate::{ast::*, parsing::Location}; use crate::{ast::*, tokenizing::Location};
fn rc(s: &str) -> Rc<String> { fn rc(s: &str) -> Rc<String> {
Rc::new(s.to_owned()) Rc::new(s.to_owned())
@ -17,15 +17,23 @@ fn bx<T>(item: T) -> Box<T> {
Box::new(item) Box::new(item)
} }
fn strlit(s: &str) -> ExpressionKind { fn make_parser(input: &str) -> Parser {
ExpressionKind::StringLiteral { s: Rc::new(s.to_string()), prefix: None } let tokens: Vec<crate::tokenizing::Token> = tokenize(input);
let mut parser = super::Parser::new();
parser.add_new_tokens(tokens);
parser
} }
fn stmt<K>(kind: K) -> Statement<K> { fn parse(input: &str) -> ParseResult<AST> {
let mut parser = make_parser(input);
parser.parse()
}
fn stmt(kind: StatementKind) -> Statement {
Statement { location: Location::default(), id: ItemId::default(), kind } Statement { location: Location::default(), id: ItemId::default(), kind }
} }
fn exst(kind: ExpressionKind) -> Statement<StatementKind> { fn exst(kind: ExpressionKind) -> Statement {
Statement { Statement {
location: Location::default(), location: Location::default(),
id: ItemId::default(), id: ItemId::default(),
@ -33,7 +41,7 @@ fn exst(kind: ExpressionKind) -> Statement<StatementKind> {
} }
} }
fn decl(declaration: Declaration) -> Statement<StatementKind> { fn decl(declaration: Declaration) -> Statement {
Statement { Statement {
location: Location::default(), location: Location::default(),
id: ItemId::default(), id: ItemId::default(),
@ -41,7 +49,7 @@ fn decl(declaration: Declaration) -> Statement<StatementKind> {
} }
} }
fn fn_decl(sig: Signature, stmts: Block) -> Statement<StatementKind> { fn fn_decl(sig: Signature, stmts: Block) -> Statement {
Statement { Statement {
kind: StatementKind::Declaration(Declaration::FuncDecl(sig, stmts)), kind: StatementKind::Declaration(Declaration::FuncDecl(sig, stmts)),
location: Default::default(), location: Default::default(),
@ -91,116 +99,88 @@ fn ty_simple(name: &str) -> TypeIdentifier {
macro_rules! assert_ast { macro_rules! assert_ast {
($input:expr, $statements:expr) => { ($input:expr, $statements:expr) => {
let mut parser = Parser::new(); let ast = parse($input).unwrap();
let ast = parser.parse_comb($input);
let ast2 = parser.parse_peg($input);
let expected = AST { id: Default::default(), statements: $statements.into() }; let expected = AST { id: Default::default(), statements: $statements.into() };
let ast = match ast { println!("Expected: {}", expected);
Err(err) => { println!("Actual: {}", ast);
println!("Parse error: {}", err.msg);
panic!();
}
Ok(ast) => ast,
};
assert_eq!(ast, ast2.unwrap());
assert_eq!(ast, expected); assert_eq!(ast, expected);
}; };
} }
macro_rules! assert_fail { macro_rules! assert_ast2 {
($input:expr) => { ($input:expr, $statements:expr) => {
let mut parser = Parser::new(); let ast = schala_parser::program($input);
let _err = parser.parse_comb($input).unwrap_err(); let expected = AST { id: Default::default(), statements: $statements.into() };
if ast.is_err() {
println!("Parse error: {}", ast.unwrap_err());
panic!();
}
assert_eq!(ast.unwrap(), expected);
}; };
}
macro_rules! assert_fail {
($input:expr, $failure:expr) => { ($input:expr, $failure:expr) => {
let mut parser = Parser::new(); let err = parse($input).unwrap_err();
let err = parser.parse_comb($input).unwrap_err();
println!("assert_fail: {}", err.msg);
assert_eq!(err.msg, $failure); assert_eq!(err.msg, $failure);
}; };
} }
macro_rules! assert_fail2 {
($input:expr, $failure:expr) => {
let err = schala_parser::program($input).unwrap_err();
assert_eq!(err.to_string(), $failure);
};
}
macro_rules! assert_expr { macro_rules! assert_expr {
($input:expr, $correct:expr) => { ($input:expr, $correct:expr) => {
let mut parser = Parser::new(); let mut parser = make_parser($input);
let expr = parser.expression_comb($input.trim_start()); assert_eq!(parser.expression().unwrap(), $correct);
let expr2 = parser.expression($input.trim_start());
let expr = match expr {
Err(err) => {
println!("Expression parse error: {}", err.msg);
panic!();
}
Ok(expr) => expr,
};
assert_eq!(expr, expr2.unwrap());
assert_eq!(expr, $correct);
}; };
} }
macro_rules! assert_fail_expr { macro_rules! assert_expr2 {
($input:expr, $correct:expr) => {
let expr = schala_parser::expression($input);
if expr.is_err() {
println!("Expression parse error: {}", expr.unwrap_err());
panic!();
}
assert_eq!(expr.unwrap(), $correct);
};
}
macro_rules! assert_fail_expr2 {
($input:expr, $failure:expr) => { ($input:expr, $failure:expr) => {
let mut parser = Parser::new(); let _err = schala_parser::expression($input).unwrap_err();
let _err = parser.expression_comb($input).unwrap_err();
//TODO make real tests for failures //TODO make real tests for failures
//assert_eq!(err.to_string(), $failure); //assert_eq!(err.to_string(), $failure);
}; };
} }
macro_rules! assert_block {
($input:expr, $correct:expr) => {
let mut parser = Parser::new();
let block = parser.block_comb($input);
let block2 = parser.block($input);
let block = match block {
Err(err) => {
println!("Expression parse error: {}", err.msg);
panic!();
}
Ok(item) => item,
};
assert_eq!(block, block2.unwrap());
assert_eq!(block, $correct);
};
}
#[test] #[test]
fn basic_literals() { fn basic_literals() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!(".2", expr(FloatLiteral(0.2))); assert_expr2!(".2", expr(FloatLiteral(0.2)));
assert_expr!("8.1", expr(FloatLiteral(8.1))); assert_expr2!("8.1", expr(FloatLiteral(8.1)));
assert_expr!("0b010", expr(NatLiteral(2))); assert_expr2!("0b010", expr(NatLiteral(2)));
assert_expr!("0b0_1_0", expr(NatLiteral(2))); assert_expr2!("0b0_1_0", expr(NatLiteral(2)));
assert_expr!("0xff", expr(NatLiteral(255))); assert_expr2!("0xff", expr(NatLiteral(255)));
assert_expr!("0x032f", expr(NatLiteral(815))); assert_expr2!("0x032f", expr(NatLiteral(815)));
assert_expr!("0xf_f", expr(NatLiteral(255))); assert_expr2!("0xf_f_", expr(NatLiteral(255)));
assert_expr!("false", expr(BoolLiteral(false))); assert_expr2!("false", expr(BoolLiteral(false)));
assert_expr!("true", expr(BoolLiteral(true))); assert_expr2!("true", expr(BoolLiteral(true)));
} assert_expr2!(r#""hello""#, expr(StringLiteral(rc("hello"))));
#[test]
fn string_literals() {
use ExpressionKind::*;
assert_expr!(r#""""#, expr(strlit("")));
assert_expr!(r#""hello""#, expr(strlit("hello")));
assert_expr!(
r#"b"some bytestring""#,
expr(StringLiteral { s: rc("some bytestring"), prefix: Some(rc("b")) })
);
assert_expr!(r#""Do \n \" escapes work\t""#, expr(strlit("Do \n \" escapes work\t")));
assert_expr!(r#""Georgian letter jani \u{10ef}""#, expr(strlit("Georgian letter jani ჯ")));
} }
#[test] #[test]
fn list_literals() { fn list_literals() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("[]", expr(ListLiteral(vec![]))); assert_expr2!("[]", expr(ListLiteral(vec![])));
assert_expr!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),]))); assert_expr2!("[1,2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),])));
assert_expr!("[1, /*no*/2]", expr(ListLiteral(vec![expr(NatLiteral(1)), expr(NatLiteral(2)),]))); assert_fail_expr2!("[1,,2]", "some failure");
assert_fail_expr!("[1,,2]", "some failure");
} }
#[test] #[test]
@ -208,8 +188,8 @@ fn binexps() {
use ExpressionKind::*; use ExpressionKind::*;
use StatementKind::Expression; use StatementKind::Expression;
assert_expr!("0xf_f+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1)))); assert_expr2!("0xf_f_+1", binop("+", expr(NatLiteral(255)), expr(NatLiteral(1))));
assert_ast!( assert_ast2!(
"3; 4; 4.3", "3; 4; 4.3",
vec![ vec![
stmt(Expression(expr(NatLiteral(3)))), stmt(Expression(expr(NatLiteral(3)))),
@ -218,16 +198,16 @@ fn binexps() {
] ]
); );
assert_expr!( assert_expr2!(
"1 + 2 * 3", "1 + 2 * 3",
binop("+", expr(NatLiteral(1)), binop("*", expr(NatLiteral(2)), expr(NatLiteral(3)))) binop("+", expr(NatLiteral(1)), binop("*", expr(NatLiteral(2)), expr(NatLiteral(3))))
); );
assert_expr!( assert_expr2!(
"1 * 2 + 3", "1 * 2 + 3",
binop("+", binop("*", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3))) binop("+", binop("*", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3)))
); );
assert_expr!("1 && 2", binop("&&", expr(NatLiteral(1)), expr(NatLiteral(2)))); assert_expr2!("1 && 2", binop("&&", expr(NatLiteral(1)), expr(NatLiteral(2))));
assert_expr!( assert_expr2!(
"1 + 2 * 3 + 4", "1 + 2 * 3 + 4",
binop( binop(
"+", "+",
@ -235,48 +215,48 @@ fn binexps() {
expr(NatLiteral(4)) expr(NatLiteral(4))
) )
); );
assert_expr!( assert_expr2!(
"(1 + 2) * 3", "(1 + 2) * 3",
binop("*", binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3))) binop("*", binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))), expr(NatLiteral(3)))
); );
assert_expr!(".1 + .2", binop("+", expr(FloatLiteral(0.1)), expr(FloatLiteral(0.2)))); assert_expr2!(".1 + .2", binop("+", expr(FloatLiteral(0.1)), expr(FloatLiteral(0.2))));
assert_expr!("1 / 2.", binop("/", expr(NatLiteral(1)), expr(FloatLiteral(2.)))); assert_expr2!("1 / 2.", binop("/", expr(NatLiteral(1)), expr(FloatLiteral(2.))));
} }
#[test] #[test]
fn prefix_exps() { fn prefix_exps() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("-3", prefixop("-", expr(NatLiteral(3)))); assert_expr2!("-3", prefixop("-", expr(NatLiteral(3))));
assert_expr!("-0.2", prefixop("-", expr(FloatLiteral(0.2)))); assert_expr2!("-0.2", prefixop("-", expr(FloatLiteral(0.2))));
assert_expr!("!3", prefixop("!", expr(NatLiteral(3)))); assert_expr2!("!3", prefixop("!", expr(NatLiteral(3))));
assert_expr!("!t", prefixop("!", expr(Value(qn!(t))))); assert_expr2!("!t", prefixop("!", expr(Value(qn!(t)))));
assert_expr!("a <- -b", binop("<-", expr(Value(qn!(a))), prefixop("-", expr(Value(qn!(b)))))); assert_expr2!("a <- -b", binop("<-", expr(Value(qn!(a))), prefixop("-", expr(Value(qn!(b))))));
assert_expr!("a <--b", binop("<--", expr(Value(qn!(a))), expr(Value(qn!(b))))); assert_expr2!("a <--b", binop("<--", expr(Value(qn!(a))), expr(Value(qn!(b)))));
} }
#[test] #[test]
fn operators() { fn operators() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("a <- 1", binop("<-", expr(Value(qn!(a))), expr(NatLiteral(1)))); assert_expr2!("a <- 1", binop("<-", expr(Value(qn!(a))), expr(NatLiteral(1))));
assert_expr!("a || 1", binop("||", expr(Value(qn!(a))), expr(NatLiteral(1)))); assert_expr2!("a || 1", binop("||", expr(Value(qn!(a))), expr(NatLiteral(1))));
assert_expr!("a <> 1", binop("<>", expr(Value(qn!(a))), expr(NatLiteral(1)))); assert_expr2!("a <> 1", binop("<>", expr(Value(qn!(a))), expr(NatLiteral(1))));
} }
#[test] #[test]
fn accessors() { fn accessors() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("a.b", expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) })); assert_expr2!("a.b", expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }));
assert_expr!( assert_expr2!(
"a.b.c", "a.b.c",
expr(Access { expr(Access {
name: rc("c"), name: rc("c"),
expr: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) })) expr: bx(expr(Access { name: rc("b"), expr: bx(expr(Value(qn!(a)))) }))
}) })
); );
assert_expr!( assert_expr2!(
"a.b.c(3)", "a.b.c(3)",
expr(Call { expr(Call {
f: bx(expr(Access { f: bx(expr(Access {
@ -286,7 +266,7 @@ fn accessors() {
arguments: vec![InvocationArgument::Positional(expr(NatLiteral(3)))], arguments: vec![InvocationArgument::Positional(expr(NatLiteral(3)))],
}) })
); );
assert_expr!( assert_expr2!(
"a.b().c", "a.b().c",
expr(Access { expr(Access {
name: rc("c"), name: rc("c"),
@ -302,13 +282,16 @@ fn accessors() {
fn tuples() { fn tuples() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("()", expr(TupleLiteral(vec![]))); assert_expr2!("()", expr(TupleLiteral(vec![])));
assert_expr!(r#"("hella", 34)"#, expr(TupleLiteral(vec![expr(strlit("hella")), expr(NatLiteral(34))]))); assert_expr2!(
assert_expr!( r#"("hella", 34)"#,
expr(TupleLiteral(vec![expr(StringLiteral(rc("hella"))), expr(NatLiteral(34))]))
);
assert_expr2!(
r#"(1+2, "slough")"#, r#"(1+2, "slough")"#,
expr(TupleLiteral(vec![ expr(TupleLiteral(vec![
binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))), binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))),
expr(strlit("slough")), expr(StringLiteral(rc("slough"))),
])) ]))
); );
} }
@ -317,12 +300,12 @@ fn tuples() {
fn identifiers() { fn identifiers() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!("a", expr(Value(qn!(a)))); assert_expr2!("a", expr(Value(qn!(a))));
assert_expr!("some_value", expr(Value(qn!(some_value)))); assert_expr2!("some_value", expr(Value(qn!(some_value))));
assert_expr!("alpha::beta::gamma", expr(Value(qn!(alpha, beta, gamma)))); assert_expr2!("alpha::beta::gamma", expr(Value(qn!(alpha, beta, gamma))));
assert_expr!("a + b", binop("+", expr(Value(qn!(a))), expr(Value(qn!(b))))); assert_expr2!("a + b", binop("+", expr(Value(qn!(a))), expr(Value(qn!(b)))));
assert_expr!("None", expr(Value(qn!(None)))); assert_expr2!("None", expr(Value(qn!(None))));
assert_expr!( assert_expr2!(
"thing::item::call()", "thing::item::call()",
expr(Call { f: bx(expr(Value(qn!(thing, item, call)))), arguments: vec![] }) expr(Call { f: bx(expr(Value(qn!(thing, item, call)))), arguments: vec![] })
); );
@ -331,14 +314,14 @@ fn identifiers() {
#[test] #[test]
fn named_struct() { fn named_struct() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!( assert_expr2!(
"Pandas { a: x + y }", "Pandas { a: x + y }",
expr(NamedStruct { expr(NamedStruct {
name: qn!(Pandas), name: qn!(Pandas),
fields: vec![(rc("a"), binop("+", expr(Value(qn!(x))), expr(Value(qn!(y)))))] fields: vec![(rc("a"), binop("+", expr(Value(qn!(x))), expr(Value(qn!(y)))))]
}) })
); );
assert_expr!( assert_expr2!(
"Trousers { a:1, b:800 }", "Trousers { a:1, b:800 }",
expr(NamedStruct { expr(NamedStruct {
name: qn!(Trousers), name: qn!(Trousers),
@ -350,14 +333,14 @@ fn named_struct() {
#[test] #[test]
fn index() { fn index() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!( assert_expr2!(
"armok[b,c]", "armok[b,c]",
expr(Index { expr(Index {
indexee: bx(expr(Value(qn!(armok)))), indexee: bx(expr(Value(qn!(armok)))),
indexers: vec![expr(Value(qn!(b))), expr(Value(qn!(c)))] indexers: vec![expr(Value(qn!(b))), expr(Value(qn!(c)))]
}) })
); );
assert_expr!( assert_expr2!(
"a[b,c][1]", "a[b,c][1]",
expr(Index { expr(Index {
indexee: bx(expr(Index { indexee: bx(expr(Index {
@ -367,7 +350,7 @@ fn index() {
indexers: vec![expr(NatLiteral(1))] indexers: vec![expr(NatLiteral(1))]
}) })
); );
assert_expr!( assert_expr2!(
"perspicacity()[a]", "perspicacity()[a]",
expr(Index { expr(Index {
indexee: bx(expr(Call { f: bx(expr(Value(qn!(perspicacity)))), arguments: vec![] })), indexee: bx(expr(Call { f: bx(expr(Value(qn!(perspicacity)))), arguments: vec![] })),
@ -379,17 +362,17 @@ fn index() {
let b = expr(Index { indexee: bx(a), indexers: vec![expr(Value(qn!(b)))] }); let b = expr(Index { indexee: bx(a), indexers: vec![expr(Value(qn!(b)))] });
let c = expr(Call { f: bx(b), arguments: vec![] }); let c = expr(Call { f: bx(b), arguments: vec![] });
let d = expr(Index { indexee: bx(c), indexers: vec![expr(Value(qn!(d)))] }); let d = expr(Index { indexee: bx(c), indexers: vec![expr(Value(qn!(d)))] });
assert_expr!("a()[b]()[d]", d); assert_expr2!("a()[b]()[d]", d);
assert_fail_expr!("a[]", "Empty index expressions are not allowed"); assert_fail_expr2!("a[]", "Empty index expressions are not allowed");
} }
#[test] #[test]
fn while_expression() { fn while_expression() {
use ExpressionKind::*; use ExpressionKind::*;
// assert_expr_comb!("while { }", expr(WhileExpression { condition: None, body: Block::default() })); assert_expr2!("while { }", expr(WhileExpression { condition: None, body: Block::default() }));
assert_expr!( assert_expr2!(
"while a == b { }", "while a == b { }",
expr(WhileExpression { expr(WhileExpression {
condition: Some(bx(binop("==", expr(Value(qn!(a))), expr(Value(qn!(b)))))), condition: Some(bx(binop("==", expr(Value(qn!(a))), expr(Value(qn!(b)))))),
@ -402,26 +385,18 @@ fn while_expression() {
fn for_expression() { fn for_expression() {
use ExpressionKind::*; use ExpressionKind::*;
assert_expr!( assert_expr2!(
"for { a <- garodzny::maybeValue } return 1", "for { a <- garodzny::maybeValue } return 1",
expr(ForExpression { expr(ForExpression {
enumerators: vec![Enumerator { enumerators: vec![Enumerator { id: rc("a"), generator: expr(Value(qn!(garodzny, maybeValue))) }],
identifier: rc("a"),
assignment: false,
generator: expr(Value(qn!(garodzny, maybeValue)))
}],
body: bx(ForBody::MonadicReturn(expr(NatLiteral(1)))) body: bx(ForBody::MonadicReturn(expr(NatLiteral(1))))
}) })
); );
assert_expr!( assert_expr2!(
"for n <- someRange { f(n) ; }", "for n <- someRange { f(n) ; }",
expr(ForExpression { expr(ForExpression {
enumerators: vec![Enumerator { enumerators: vec![Enumerator { id: rc("n"), generator: expr(Value(qn!(someRange))) }],
identifier: rc("n"),
assignment: false,
generator: expr(Value(qn!(someRange)))
}],
body: bx(ForBody::StatementBlock( body: bx(ForBody::StatementBlock(
vec![stmt(StatementKind::Expression(expr(Call { vec![stmt(StatementKind::Expression(expr(Call {
f: bx(expr(Value(qn!(f)))), f: bx(expr(Value(qn!(f)))),
@ -485,7 +460,7 @@ fn lambda_expressions() {
name: rc("String"), name: rc("String"),
params: vec![] params: vec![]
})), })),
body: vec![stmt(StatementKind::Expression(expr(strlit("q")))),].into() body: vec![stmt(StatementKind::Expression(expr(StringLiteral(rc("q"))))),].into()
}) })
); );
} }
@ -528,7 +503,7 @@ fn complex_lambdas() {
use ExpressionKind::*; use ExpressionKind::*;
assert_ast! { assert_ast! {
r#"fn wahoo() { let a = 10; \(x) { x + a } } r#"fn wahoo() { let a = 10; \(x) { x + a } };
wahoo()(3) "#, wahoo()(3) "#,
vec![ vec![
fn_decl(Signature { name: rc("wahoo"), operator: false, type_anno: None, params: vec![] }, fn_decl(Signature { name: rc("wahoo"), operator: false, type_anno: None, params: vec![] },
@ -563,9 +538,7 @@ fn complex_lambdas() {
#[test] #[test]
fn reserved_words() { fn reserved_words() {
//TODO assert a good error message for this assert_fail!("module::item::call()", "Expected an identifier, got Colon");
assert_fail!("module::item::call()");
assert_expr!("modulek::item", expr(ExpressionKind::Value(qn!(modulek, item))));
} }
#[test] #[test]
@ -631,7 +604,7 @@ fn type_annotations() {
#[test] #[test]
fn type_declarations() { fn type_declarations() {
use Declaration::TypeDecl; use Declaration::TypeDecl;
assert_ast! { assert_ast2! {
"type Alpha = Alpha", vec![ "type Alpha = Alpha", vec![
decl(TypeDecl { decl(TypeDecl {
name: TypeSingletonName { name: rc("Alpha"), params: vec![] }, name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
@ -647,7 +620,7 @@ fn type_declarations() {
] ]
}; };
assert_ast!( assert_ast2!(
"type mut Kuah = Kuah", "type mut Kuah = Kuah",
decl(TypeDecl { decl(TypeDecl {
name: TypeSingletonName { name: rc("Kuah"), params: vec![] }, name: TypeSingletonName { name: rc("Kuah"), params: vec![] },
@ -660,7 +633,7 @@ fn type_declarations() {
}) })
); );
assert_ast! { assert_ast2! {
"type Alpha = Alpha { a: Int, b: Int }", "type Alpha = Alpha { a: Int, b: Int }",
vec![decl(TypeDecl { vec![decl(TypeDecl {
name: TypeSingletonName { name: rc("Alpha"), params: vec![] }, name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
@ -678,20 +651,20 @@ fn type_declarations() {
})] })]
}; };
assert_ast! { assert_ast2! {
"type Alpha = { a: Int, b: Int }", "type Alpha = { a: Int, b: Int }",
vec![decl(TypeDecl { vec![decl(TypeDecl {
name: TypeSingletonName { name: rc("Alpha"), params: vec![] }, name: TypeSingletonName { name: rc("Alpha"), params: vec![] },
mutable: false, mutable: false,
body: TypeBody::ImmediateRecord { id: Default::default(), fields: vec![ body: TypeBody::ImmediateRecord(Default::default(), vec![
(rc("a"), ty_simple("Int")), (rc("a"), ty_simple("Int")),
(rc("b"), ty_simple("Int")) (rc("b"), ty_simple("Int"))
]} ])
})] })]
}; };
assert_ast!( assert_ast2!(
"type Option<T> = None | Some(T)", "type Option<T> = None | Some(T)",
vec![decl(TypeDecl { vec![decl(TypeDecl {
name: TypeSingletonName { name: TypeSingletonName {
@ -713,12 +686,12 @@ fn type_declarations() {
})] })]
); );
assert_ast!( assert_ast2!(
"type alias Alpha = Beta", "type alias Alpha = Beta",
decl(Declaration::TypeAlias { alias: rc("Alpha"), original: rc("Beta") }) decl(Declaration::TypeAlias { alias: rc("Alpha"), original: rc("Beta") })
); );
assert_ast!("type Complex<T, U> = Unit | Record { field: AnotherType<Bool>, field2: (Nat, Int), field3: T } | Tuple(Int, (String, T))", assert_ast2!("type Complex<T, U> = Unit | Record { field: AnotherType<Bool>, field2: (Nat, Int), field3: T } | Tuple(Int, (String, T))",
decl(TypeDecl { decl(TypeDecl {
name: TypeSingletonName { name: rc("Complex"), params: vec![ name: TypeSingletonName { name: rc("Complex"), params: vec![
TypeIdentifier::Singleton(TypeSingletonName { name: rc("T"), params: vec![] }), TypeIdentifier::Singleton(TypeSingletonName { name: rc("T"), params: vec![] }),
@ -755,7 +728,7 @@ fn type_declarations() {
fn declarations() { fn declarations() {
use ExpressionKind::*; use ExpressionKind::*;
assert_ast!( assert_ast2!(
"let q_q = Yolo::Swaggins", "let q_q = Yolo::Swaggins",
vec![decl(Declaration::Binding { vec![decl(Declaration::Binding {
name: rc("q_q"), name: rc("q_q"),
@ -770,7 +743,7 @@ fn declarations() {
fn bindings() { fn bindings() {
use ExpressionKind::*; use ExpressionKind::*;
assert_ast!( assert_ast2!(
"let mut a = 10", "let mut a = 10",
vec![decl(Declaration::Binding { vec![decl(Declaration::Binding {
name: rc("a"), name: rc("a"),
@ -780,7 +753,7 @@ fn bindings() {
})] })]
); );
assert_ast!( assert_ast2!(
"let a = 2 + a", "let a = 2 + a",
vec![stmt(StatementKind::Declaration(Declaration::Binding { vec![stmt(StatementKind::Declaration(Declaration::Binding {
name: rc("a"), name: rc("a"),
@ -790,7 +763,7 @@ fn bindings() {
}))] }))]
); );
assert_ast!( assert_ast2!(
"let a: Nat = 2", "let a: Nat = 2",
vec![stmt(StatementKind::Declaration(Declaration::Binding { vec![stmt(StatementKind::Declaration(Declaration::Binding {
name: rc("a"), name: rc("a"),
@ -804,7 +777,7 @@ fn bindings() {
#[test] #[test]
fn functions() { fn functions() {
use ExpressionKind::*; use ExpressionKind::*;
assert_ast!( assert_ast2!(
"fn oi()", "fn oi()",
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature { vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
name: rc("oi"), name: rc("oi"),
@ -814,12 +787,12 @@ fn functions() {
})))] })))]
); );
assert_ast!( assert_ast2!(
"oi()", "oi()",
vec![stmt(StatementKind::Expression(expr(Call { f: bx(expr(Value(qn!(oi)))), arguments: vec![] })))] vec![stmt(StatementKind::Expression(expr(Call { f: bx(expr(Value(qn!(oi)))), arguments: vec![] })))]
); );
assert_expr!( assert_expr2!(
"oi(a, 2+2)", "oi(a, 2+2)",
expr(Call { expr(Call {
f: bx(expr(Value(qn!(oi)))), f: bx(expr(Value(qn!(oi)))),
@ -829,10 +802,9 @@ fn functions() {
] ]
}) })
); );
let err_msg = "0: at line 1, in Alpha:\na(b,,c)\n ^\n\n1: at line 1, in Alt:\na(b,,c)\n ^\n\n2: at line 1, in token:\na(b,,c)\n ^\n\n3: at line 1, in identifier-expr:\na(b,,c)\n ^\n\n4: at line 1, in Alt:\na(b,,c)\n ^\n\n5: at line 1, in primary-expr:\na(b,,c)\n ^\n\n6: at line 1, in extended-expr:\na(b,,c)\n ^\n\n7: at line 1, in prefix-expr:\na(b,,c)\n ^\n\n8: at line 1, in expression-kind:\na(b,,c)\n ^\n\n9: at line 1, in Alt:\na(b,,c)\n ^\n\n10: at line 1, in invocation-argument:\na(b,,c)\n ^\n\n11: at line 1, in call-part:\na(b,,c)\n ^\n\n12: at line 1, in extended-expr:\na(b,,c)\n^\n\n13: at line 1, in prefix-expr:\na(b,,c)\n^\n\n14: at line 1, in expression-kind:\na(b,,c)\n^\n\n15: at line 1, in Parsing-statement:\na(b,,c)\n^\n\n16: at line 1, in AST:\na(b,,c)\n^\n\n"; assert_fail!("a(b,,c)", "Expected a literal expression, got Comma");
assert_fail!("a(b,,c)", err_msg);
assert_ast!( assert_ast2!(
"fn a(b, c: Int): Int", "fn a(b, c: Int): Int",
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature { vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
name: rc("a"), name: rc("a"),
@ -851,57 +823,18 @@ fn functions() {
type_anno: Some(TypeIdentifier::Singleton(TypeSingletonName { name: rc("Int"), params: vec![] })), type_anno: Some(TypeIdentifier::Singleton(TypeSingletonName { name: rc("Int"), params: vec![] })),
})))] })))]
); );
let source = r#"
fn some_function() {
}"#;
assert_ast!(
source,
vec![fn_decl(
Signature { name: rc("some_function"), operator: false, type_anno: None, params: vec![] },
vec![].into()
)]
);
}
#[test]
fn custom_operator() {
let source = "fn (!!)(lhs,rhs)";
assert_ast!(
source,
vec![stmt(StatementKind::Declaration(Declaration::FuncSig(Signature {
name: rc("!!"),
operator: true,
params: vec![
FormalParam { name: rc("lhs"), default: None, anno: None },
FormalParam { name: rc("rhs"), default: None, anno: None },
],
type_anno: None
})))]
);
} }
#[test] #[test]
fn max_function_params() { fn max_function_params() {
let mut buf = "fn longfunc(".to_string(); let mut buf = "fn longfunc(".to_string();
for n in 0..255 { for n in 0..256 {
write!(buf, "a{}, ", n).unwrap(); write!(buf, "a{}, ", n).unwrap();
} }
write!(buf, " a256").unwrap();
write!(buf, ") {{ return 20 }}").unwrap(); write!(buf, ") {{ return 20 }}").unwrap();
//TODO need to create a good, custom error message for this case //assert_fail2!(&buf, "A function cannot have more than 255 arguments");
//assert_fail!(&buf, "A function cannot have more than 255 arguments"); //TODO better errors again
assert_fail!(&buf); assert_fail2!(&buf, "error at 1:1439: expected ['a' ..= 'z' | 'A' ..= 'Z' | '_']");
let mut buf = r#"\("#.to_string();
for n in 0..255 {
write!(buf, "a{}, ", n).unwrap();
}
write!(buf, " a256").unwrap();
write!(buf, ") {{ return 10 }}").unwrap();
assert_fail!(&buf);
} }
#[test] #[test]
@ -989,42 +922,44 @@ fn interface() {
#[test] #[test]
fn impls() { fn impls() {
use Declaration::{FuncDecl, Impl}; use Declaration::{FuncSig, Impl};
let block = vec![
stmt(FuncDecl(
Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None },
vec![].into(),
)),
stmt(FuncDecl(
Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None },
vec![].into(),
)),
];
assert_ast!( assert_ast!(
"impl Heh { fn yolo() { }; fn swagg() { } }", "impl Heh { fn yolo(); fn swagg(); }",
vec![decl(Impl { type_name: ty_simple("Heh"), interface_name: None, block: block.clone() })] vec![decl(Impl {
type_name: ty_simple("Heh"),
interface_name: None,
block: vec![
FuncSig(Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None }),
FuncSig(Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None })
]
})]
); );
assert_ast!( assert_ast!(
"impl Heh<X> { fn yolo() { }; fn swagg() { }; }", "impl Heh<X> { fn yolo(); fn swagg(); }",
vec![decl(Impl { vec![decl(Impl {
type_name: TypeIdentifier::Singleton(TypeSingletonName { type_name: TypeIdentifier::Singleton(TypeSingletonName {
name: rc("Heh"), name: rc("Heh"),
params: vec![ty_simple("X")] params: vec![ty_simple("X")]
}), }),
interface_name: None, interface_name: None,
block: block.clone(), block: vec![
FuncSig(Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None }),
FuncSig(Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None })
]
})] })]
); );
assert_ast!( assert_ast!(
"impl Heh for Saraz { fn yolo() {}; fn swagg() {} }", "impl Heh for Saraz { fn yolo(); fn swagg(); }",
vec![decl(Impl { vec![decl(Impl {
type_name: ty_simple("Saraz"), type_name: ty_simple("Saraz"),
interface_name: Some(TypeSingletonName { name: rc("Heh"), params: vec![] }), interface_name: Some(TypeSingletonName { name: rc("Heh"), params: vec![] }),
block: block.clone(), block: vec![
FuncSig(Signature { name: rc("yolo"), operator: false, params: vec![], type_anno: None }),
FuncSig(Signature { name: rc("swagg"), operator: false, params: vec![], type_anno: None })
]
})] })]
); );
@ -1123,7 +1058,14 @@ fn imports() {
}))] }))]
}; };
assert_fail!("import bespouri::{}"); assert_ast! {
"import bespouri::{}",
vec![stmt(StatementKind::Import(ImportSpecifier {
id: Default::default(),
path_components: vec![rc("bespouri")],
imported_names: ImportedNames::List(vec![]),
}))]
};
assert_ast! { assert_ast! {
"import bespouri::*", "import bespouri::*",
@ -1149,8 +1091,7 @@ fn if_exprs() {
}) })
); );
//TODO add tests for named expressions assert_expr2!(
assert_expr!(
"if a then b else c", "if a then b else c",
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(a))))), discriminator: Some(bx(expr(Value(qn!(a))))),
@ -1162,7 +1103,8 @@ fn if_exprs() {
); );
assert_expr!( assert_expr!(
r#"if true then { r#"
if true then {
let a = 10 let a = 10
b b
} else { } else {
@ -1192,7 +1134,7 @@ fn pattern_matching() {
use ExpressionKind::*; use ExpressionKind::*;
for item in ["if x is Some(a) then { 4 } else { 9 }", "if x is Some(a) then 4 else 9"] { for item in ["if x is Some(a) then { 4 } else { 9 }", "if x is Some(a) then 4 else 9"] {
assert_expr!( assert_expr2!(
item, item,
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(x))))), discriminator: Some(bx(expr(Value(qn!(x))))),
@ -1205,7 +1147,7 @@ fn pattern_matching() {
); );
} }
assert_expr!( assert_expr2!(
"if x is Something { a, b: x } then { 4 } else { 9 }", "if x is Something { a, b: x } then { 4 } else { 9 }",
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(x))))), discriminator: Some(bx(expr(Value(qn!(x))))),
@ -1220,7 +1162,7 @@ fn pattern_matching() {
}) })
); );
assert_expr!( assert_expr2!(
"if x is -1 then 1 else 2", "if x is -1 then 1 else 2",
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(x))))), discriminator: Some(bx(expr(Value(qn!(x))))),
@ -1232,7 +1174,7 @@ fn pattern_matching() {
}) })
); );
assert_expr!( assert_expr2!(
"if x is true then 1 else 2", "if x is true then 1 else 2",
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(x))))), discriminator: Some(bx(expr(Value(qn!(x))))),
@ -1244,8 +1186,8 @@ fn pattern_matching() {
}) })
); );
assert_expr!( assert_expr2!(
"if x { is 1 then 5; else 20 }", "if x { is 1 then 5, else 20 }",
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(x))))), discriminator: Some(bx(expr(Value(qn!(x))))),
body: bx(IfExpressionBody::CondList(vec![ body: bx(IfExpressionBody::CondList(vec![
@ -1266,7 +1208,7 @@ fn pattern_matching() {
}) })
); );
assert_expr!( assert_expr2!(
r#"if x is "gnosticism" then 1 else 2"#, r#"if x is "gnosticism" then 1 else 2"#,
expr(IfExpression { expr(IfExpression {
discriminator: Some(bx(expr(Value(qn!(x))))), discriminator: Some(bx(expr(Value(qn!(x))))),
@ -1281,14 +1223,14 @@ fn pattern_matching() {
assert_expr! { assert_expr! {
r#" r#"
if (45, "panda", false, 2.2) { if (45, "panda", false, 2.2) {
is (49, "pablo", _, 28.4) then "no" is (49, "pablo", _, 28.4) then "no"
is (_, "panda", _, -2.2) then "yes" is (_, "panda", _, -2.2) then "yes"
is _ then "maybe" is _ then "maybe"
}"#, }"#,
expr( expr(
IfExpression { IfExpression {
discriminator: Some(bx(expr(TupleLiteral(vec![ discriminator: Some(bx(expr(TupleLiteral(vec![
expr(NatLiteral(45)), expr(strlit("panda")), expr(BoolLiteral(false)), expr(FloatLiteral(2.2)) expr(NatLiteral(45)), expr(StringLiteral(rc("panda"))), expr(BoolLiteral(false)), expr(FloatLiteral(2.2))
])))), ])))),
body: bx(IfExpressionBody::CondList(vec![ body: bx(IfExpressionBody::CondList(vec![
ConditionArm { ConditionArm {
@ -1301,7 +1243,7 @@ if (45, "panda", false, 2.2) {
] ]
)), )),
guard: None, guard: None,
body: vec![stmt(StatementKind::Expression(expr(strlit("no"))))].into(), body: vec![stmt(StatementKind::Expression(expr(StringLiteral(rc("no")))))].into(),
}, },
ConditionArm { ConditionArm {
condition: Condition::Pattern(Pattern::TuplePattern( condition: Condition::Pattern(Pattern::TuplePattern(
@ -1313,12 +1255,12 @@ if (45, "panda", false, 2.2) {
] ]
)), )),
guard: None, guard: None,
body: vec![stmt(StatementKind::Expression(expr(strlit("yes"))))].into(), body: vec![stmt(StatementKind::Expression(expr(StringLiteral(rc("yes")))))].into(),
}, },
ConditionArm { ConditionArm {
condition: Condition::Pattern(Pattern::Ignored), condition: Condition::Pattern(Pattern::Ignored),
guard: None, guard: None,
body: vec![exst(strlit("maybe"))].into(), body: vec![exst(StringLiteral(rc("maybe")))].into(),
}, },
])) ]))
} }
@ -1330,7 +1272,7 @@ if (45, "panda", false, 2.2) {
fn flow_control() { fn flow_control() {
use ExpressionKind::*; use ExpressionKind::*;
// This is an incorrect program, but should parse correctly. // This is an incorrect program, but shoudl parse correctly.
let source = r#" let source = r#"
fn test() { fn test() {
let a = 10; let a = 10;
@ -1368,94 +1310,7 @@ fn blocks() {
let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"]; let cases = ["{ a }", "{ a; }", "{a}", "{ a\n }", "{ a\n\n }", "{ a;\n\n; }"];
for case in cases.iter() { for case in cases.iter() {
assert_block!(case, vec![exst(Value(qn!(a)))].into()); let block = schala_parser::block(case);
assert_eq!(block.unwrap(), vec![exst(Value(qn!(a)))].into());
} }
let source = r#"{
fn quah() {
fn foo() { }
}
}"#;
assert_block!(
source,
vec![decl(Declaration::FuncDecl(
Signature { name: rc("quah"), operator: false, params: vec![], type_anno: None },
vec![decl(Declaration::FuncDecl(
Signature { name: rc("foo"), operator: false, params: vec![], type_anno: None },
vec![].into(),
))]
.into()
))]
.into()
);
assert_block!("{}", vec![].into());
let source = r#"{
//hella
4_5 //bog
11; /*chutney*/0xf
}"#;
assert_block!(
source,
vec![
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(45)
))
},
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(11)
))
},
Statement {
id: Default::default(),
location: Default::default(),
kind: StatementKind::Expression(Expression::new(
Default::default(),
ExpressionKind::NatLiteral(15)
))
},
]
.into()
);
} }
#[test]
fn comments() {
use ExpressionKind::*;
let source = "1 + /* hella /* bro */ */ 2";
assert_expr!(source, binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))));
//TODO make sure this error message makes sense
let source = "1 + /* hella /* bro */ 2";
assert_fail_expr!(source, "foo");
let source = "1 + /* hella */ bro */ 2";
assert_fail_expr!(source, binop("+", expr(NatLiteral(1)), expr(NatLiteral(2))));
let source = "5//no man\n";
assert_ast!(source, vec![exst(NatLiteral(5))]);
let source = " /*yolo*/ barnaby";
assert_ast!(source, exst(ExpressionKind::Value(qn!(barnaby))));
}
//TODO support backtick operators like this
/*
#[test]
fn backtick_operators() {
let output = token_kinds("1 `plus` 2");
assert_eq!(output, vec![digit!("1"), op!("plus"), digit!("2")]);
}
*/

View File

@ -32,7 +32,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
// First reduce all functions // First reduce all functions
// TODO once this works, maybe rewrite it using the Visitor // TODO once this works, maybe rewrite it using the Visitor
for statement in ast.statements.statements.iter() { for statement in ast.statements.statements.iter() {
self.top_level_definition(statement); self.top_level_statement(statement);
} }
// Then compute the entrypoint statements (which may reference previously-computed // Then compute the entrypoint statements (which may reference previously-computed
@ -51,8 +51,9 @@ impl<'a, 'b> Reducer<'a, 'b> {
.. ..
}) => { }) => {
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap(); let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
let def_id = symbol.def_id().unwrap();
entrypoint.push(Statement::Binding { entrypoint.push(Statement::Binding {
id: symbol.def_id(), id: def_id,
constant: *constant, constant: *constant,
expr: self.expression(expr), expr: self.expression(expr),
}); });
@ -64,30 +65,17 @@ impl<'a, 'b> Reducer<'a, 'b> {
ReducedIR { functions: self.functions, entrypoint } ReducedIR { functions: self.functions, entrypoint }
} }
fn top_level_definition(&mut self, statement: &ast::Statement<ast::StatementKind>) { fn top_level_statement(&mut self, statement: &ast::Statement) {
let ast::Statement { id: item_id, kind, .. } = statement; let ast::Statement { id: item_id, kind, .. } = statement;
match kind { match kind {
ast::StatementKind::Expression(_expr) => { ast::StatementKind::Expression(_expr) => {
//TODO expressions can in principle contain definitions, but I won't worry //TODO expressions can in principle contain definitions, but I won't worry
//about it now //about it now
} }
ast::StatementKind::Declaration(decl) => match decl { ast::StatementKind::Declaration(decl) =>
ast::Declaration::FuncDecl(_, statements) => { if let ast::Declaration::FuncDecl(_, statements) = decl {
self.insert_function_definition(item_id, statements); self.insert_function_definition(item_id, statements);
} },
ast::Declaration::Impl { type_name: _, interface_name: _, block } =>
for item in block {
if let ast::Statement {
id: item_id,
kind: ast::Declaration::FuncDecl(_, statements),
..
} = item
{
self.insert_function_definition(item_id, statements);
}
},
_ => (),
},
// Imports should have already been processed by the symbol table and are irrelevant // Imports should have already been processed by the symbol table and are irrelevant
// for this representation. // for this representation.
ast::StatementKind::Import(..) => (), ast::StatementKind::Import(..) => (),
@ -97,10 +85,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
} }
} }
fn function_internal_statement( fn function_internal_statement(&mut self, statement: &ast::Statement) -> Option<Statement> {
&mut self,
statement: &ast::Statement<ast::StatementKind>,
) -> Option<Statement> {
let ast::Statement { id: item_id, kind, .. } = statement; let ast::Statement { id: item_id, kind, .. } = statement;
match kind { match kind {
ast::StatementKind::Expression(expr) => Some(Statement::Expression(self.expression(expr))), ast::StatementKind::Expression(expr) => Some(Statement::Expression(self.expression(expr))),
@ -111,11 +96,8 @@ impl<'a, 'b> Reducer<'a, 'b> {
} }
ast::Declaration::Binding { constant, expr, .. } => { ast::Declaration::Binding { constant, expr, .. } => {
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap(); let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
Some(Statement::Binding { let def_id = symbol.def_id().unwrap();
id: symbol.def_id(), Some(Statement::Binding { id: def_id, constant: *constant, expr: self.expression(expr) })
constant: *constant,
expr: self.expression(expr),
})
} }
_ => None, _ => None,
}, },
@ -133,48 +115,26 @@ impl<'a, 'b> Reducer<'a, 'b> {
fn insert_function_definition(&mut self, item_id: &ast::ItemId, statements: &ast::Block) { fn insert_function_definition(&mut self, item_id: &ast::ItemId, statements: &ast::Block) {
let symbol = self.symbol_table.lookup_symbol(item_id).unwrap(); let symbol = self.symbol_table.lookup_symbol(item_id).unwrap();
let def_id = symbol.def_id().unwrap();
let function_def = FunctionDefinition { body: self.function_internal_block(statements) }; let function_def = FunctionDefinition { body: self.function_internal_block(statements) };
self.functions.insert(symbol.def_id(), function_def); self.functions.insert(def_id, function_def);
}
//TODO this needs to be type-aware to work correctly
fn lookup_method(&mut self, name: &str) -> Option<DefId> {
for (def_id, function) in self.functions.iter() {
let symbol = self.symbol_table.lookup_symbol_by_def(def_id)?;
println!("Def Id: {} symbol: {:?}", def_id, symbol);
if symbol.local_name() == name {
return Some(*def_id);
}
}
None
} }
fn expression(&mut self, expr: &ast::Expression) -> Expression { fn expression(&mut self, expr: &ast::Expression) -> Expression {
use crate::ast::ExpressionKind::*; use crate::ast::ExpressionKind::*;
match &expr.kind { match &expr.kind {
SelfValue => Expression::Lookup(Lookup::SelfParam),
NatLiteral(n) => Expression::Literal(Literal::Nat(*n)), NatLiteral(n) => Expression::Literal(Literal::Nat(*n)),
FloatLiteral(f) => Expression::Literal(Literal::Float(*f)), FloatLiteral(f) => Expression::Literal(Literal::Float(*f)),
//TODO implement handling string literal prefixes StringLiteral(s) => Expression::Literal(Literal::StringLit(s.clone())),
StringLiteral { s, prefix: _ } => Expression::Literal(Literal::StringLit(s.clone())),
BoolLiteral(b) => Expression::Literal(Literal::Bool(*b)), BoolLiteral(b) => Expression::Literal(Literal::Bool(*b)),
BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs), BinExp(binop, lhs, rhs) => self.binop(binop, lhs, rhs),
PrefixExp(op, arg) => self.prefix(op, arg), PrefixExp(op, arg) => self.prefix(op, arg),
Value(qualified_name) => self.value(qualified_name), Value(qualified_name) => self.value(qualified_name),
Call { f, arguments } => { Call { f, arguments } => Expression::Call {
let f = self.expression(f); f: Box::new(self.expression(f)),
let args = arguments.iter().map(|arg| self.invocation_argument(arg)).collect(); args: arguments.iter().map(|arg| self.invocation_argument(arg)).collect(),
//TODO need to have full type availability at this point to do this method lookup },
//correctly
if let Expression::Access { name, expr } = f {
let def_id = self.lookup_method(&name).unwrap();
let method = Expression::Lookup(Lookup::Function(def_id));
Expression::CallMethod { f: Box::new(method), args, self_expr: expr }
} else {
Expression::Call { f: Box::new(f), args }
}
}
TupleLiteral(exprs) => Expression::Tuple(exprs.iter().map(|e| self.expression(e)).collect()), TupleLiteral(exprs) => Expression::Tuple(exprs.iter().map(|e| self.expression(e)).collect()),
IfExpression { discriminator, body } => IfExpression { discriminator, body } =>
self.reduce_if_expression(discriminator.as_ref().map(|x| x.as_ref()), body), self.reduce_if_expression(discriminator.as_ref().map(|x| x.as_ref()), body),
@ -185,7 +145,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
NamedStruct { name, fields } => { NamedStruct { name, fields } => {
let symbol = match self.symbol_table.lookup_symbol(&name.id) { let symbol = match self.symbol_table.lookup_symbol(&name.id) {
Some(symbol) => symbol, Some(symbol) => symbol,
None => return Expression::ReductionError(format!("No symbol found for {}", name)), None => return Expression::ReductionError(format!("No symbol found for {:?}", name)),
}; };
let (tag, type_id) = match symbol.spec() { let (tag, type_id) = match symbol.spec() {
SymbolSpec::RecordConstructor { tag, type_id } => (tag, type_id), SymbolSpec::RecordConstructor { tag, type_id } => (tag, type_id),
@ -345,7 +305,7 @@ impl<'a, 'b> Reducer<'a, 'b> {
let lval = match &lhs.kind { let lval = match &lhs.kind {
ast::ExpressionKind::Value(qualified_name) => { ast::ExpressionKind::Value(qualified_name) => {
if let Some(symbol) = self.symbol_table.lookup_symbol(&qualified_name.id) { if let Some(symbol) = self.symbol_table.lookup_symbol(&qualified_name.id) {
symbol.def_id() symbol.def_id().unwrap()
} else { } else {
return ReductionError(format!("Couldn't look up name: {:?}", qualified_name)); return ReductionError(format!("Couldn't look up name: {:?}", qualified_name));
} }
@ -370,16 +330,16 @@ impl<'a, 'b> Reducer<'a, 'b> {
let symbol = match self.symbol_table.lookup_symbol(&qualified_name.id) { let symbol = match self.symbol_table.lookup_symbol(&qualified_name.id) {
Some(s) => s, Some(s) => s,
None => None =>
return Expression::ReductionError(format!("No symbol found for name: `{}`", qualified_name)), return Expression::ReductionError(format!("No symbol found for name: {:?}", qualified_name)),
}; };
let def_id = symbol.def_id(); let def_id = symbol.def_id();
match symbol.spec() { match symbol.spec() {
Builtin(b) => Expression::Callable(Callable::Builtin(b)), Builtin(b) => Expression::Callable(Callable::Builtin(b)),
Func { .. } => Expression::Lookup(Lookup::Function(def_id)), Func => Expression::Lookup(Lookup::Function(def_id.unwrap())),
GlobalBinding => Expression::Lookup(Lookup::GlobalVar(def_id)), GlobalBinding => Expression::Lookup(Lookup::GlobalVar(def_id.unwrap())),
LocalVariable => Expression::Lookup(Lookup::LocalVar(def_id)), LocalVariable => Expression::Lookup(Lookup::LocalVar(def_id.unwrap())),
FunctionParam(n) => Expression::Lookup(Lookup::Param(n)), FunctionParam(n) => Expression::Lookup(Lookup::Param(n)),
DataConstructor { tag, type_id } => DataConstructor { tag, type_id } =>
Expression::Callable(Callable::DataConstructor { type_id, tag }), Expression::Callable(Callable::DataConstructor { type_id, tag }),
@ -432,7 +392,7 @@ impl ast::Pattern {
SymbolSpec::DataConstructor { tag, type_id: _ } => SymbolSpec::DataConstructor { tag, type_id: _ } =>
Pattern::Tuple { tag: Some(tag), subpatterns: vec![] }, Pattern::Tuple { tag: Some(tag), subpatterns: vec![] },
SymbolSpec::LocalVariable => { SymbolSpec::LocalVariable => {
let def_id = symbol.def_id(); let def_id = symbol.def_id().unwrap();
Pattern::Binding(def_id) Pattern::Binding(def_id)
} }
spec => return Err(format!("Unexpected VarOrName symbol: {:?}", spec).into()), spec => return Err(format!("Unexpected VarOrName symbol: {:?}", spec).into()),

View File

@ -40,22 +40,5 @@ fn test_ir() {
let reduced = build_ir(src); let reduced = build_ir(src);
assert_eq!(reduced.functions.len(), 3); assert_eq!(reduced.functions.len(), 3);
} //assert!(1 == 2);
#[test]
fn test_methods() {
let src = r#"
type Thing = Thing
impl Thing {
fn a_method() {
20
}
}
let a = Thing
4 + a.a_method()
"#;
let reduced = build_ir(src);
assert_eq!(reduced.functions.len(), 1);
} }

View File

@ -57,7 +57,6 @@ pub enum Expression {
Access { name: String, expr: Box<Expression> }, Access { name: String, expr: Box<Expression> },
Callable(Callable), Callable(Callable),
Call { f: Box<Expression>, args: Vec<Expression> }, Call { f: Box<Expression>, args: Vec<Expression> },
CallMethod { f: Box<Expression>, args: Vec<Expression>, self_expr: Box<Expression> },
Conditional { cond: Box<Expression>, then_clause: Vec<Statement>, else_clause: Vec<Statement> }, Conditional { cond: Box<Expression>, then_clause: Vec<Statement>, else_clause: Vec<Statement> },
CaseMatch { cond: Box<Expression>, alternatives: Vec<Alternative> }, CaseMatch { cond: Box<Expression>, alternatives: Vec<Alternative> },
Loop { cond: Box<Expression>, statements: Vec<Statement> }, Loop { cond: Box<Expression>, statements: Vec<Statement> },
@ -91,7 +90,6 @@ pub enum Lookup {
GlobalVar(DefId), GlobalVar(DefId),
Function(DefId), Function(DefId),
Param(u8), Param(u8),
SelfParam,
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]

View File

@ -4,7 +4,9 @@ use schala_repl::{
}; };
use stopwatch::Stopwatch; use stopwatch::Stopwatch;
use crate::{error::SchalaError, parsing, reduced_ir, symbol_table, tree_walk_eval, type_inference}; use crate::{
error::SchalaError, parsing, reduced_ir, symbol_table, tokenizing, tree_walk_eval, type_inference,
};
/// All the state necessary to parse and execute a Schala program are stored in this struct. /// All the state necessary to parse and execute a Schala program are stored in this struct.
pub struct Schala<'a> { pub struct Schala<'a> {
@ -72,10 +74,18 @@ impl<'a> Schala<'a> {
self.timings = vec![]; self.timings = vec![];
let sw = Stopwatch::start_new(); let sw = Stopwatch::start_new();
self.source_reference.load_new_source(source); // 1st stage - tokenization
// TODO tokenize should return its own error type
let tokens = tokenizing::tokenize(source);
if let Some(err) = SchalaError::from_tokens(&tokens) {
return Err(err);
}
//2nd stage - parsing
self.active_parser.add_new_tokens(tokens);
let ast = self let ast = self
.active_parser .active_parser
.parse(source) .parse()
.map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?; .map_err(|err| SchalaError::from_parse_error(err, &self.source_reference))?;
self.timings.push(("parsing", sw.elapsed())); self.timings.push(("parsing", sw.elapsed()));
@ -88,7 +98,8 @@ impl<'a> Schala<'a> {
self.timings.push(("symbol_table", sw.elapsed())); self.timings.push(("symbol_table", sw.elapsed()));
// Typechecking // Typechecking
let _overall_type = self.type_context.typecheck(&ast).map_err(SchalaError::from_type_error); // TODO typechecking not working
//let _overall_type = self.type_context.typecheck(&ast).map_err(SchalaError::from_type_error);
let sw = Stopwatch::start_new(); let sw = Stopwatch::start_new();
let reduced_ir = reduced_ir::reduce(&ast, &self.symbol_table, &self.type_context); let reduced_ir = reduced_ir::reduce(&ast, &self.symbol_table, &self.type_context);
@ -111,50 +122,31 @@ impl<'a> Schala<'a> {
/// Represents lines of source code /// Represents lines of source code
pub(crate) struct SourceReference { pub(crate) struct SourceReference {
last_source: Option<String>, lines: Option<Vec<String>>,
/// Offsets in *bytes* (not chars) representing a newline character
newline_offsets: Vec<usize>,
} }
impl SourceReference { impl SourceReference {
pub(crate) fn new() -> SourceReference { fn new() -> SourceReference {
SourceReference { last_source: None, newline_offsets: vec![] } SourceReference { lines: None }
} }
pub(crate) fn load_new_source(&mut self, source: &str) { fn load_new_source(&mut self, source: &str) {
self.newline_offsets = vec![]; //TODO this is a lot of heap allocations - maybe there's a way to make it more efficient?
for (offset, ch) in source.as_bytes().iter().enumerate() { self.lines = Some(source.lines().map(|s| s.to_string()).collect());
if *ch == b'\n' {
self.newline_offsets.push(offset);
}
}
self.last_source = Some(source.to_string());
} }
// (line_start, line_num, the string itself) pub fn get_line(&self, line: usize) -> String {
pub fn get_line(&self, line: usize) -> (usize, usize, String) { self.lines
if self.newline_offsets.is_empty() { .as_ref()
return (0, 0, self.last_source.as_ref().cloned().unwrap()); .and_then(|x| x.get(line).map(|s| s.to_string()))
} .unwrap_or_else(|| "NO LINE FOUND".to_string())
//TODO make sure this is utf8-safe
let start_idx = match self.newline_offsets.binary_search(&line) {
Ok(index) | Err(index) => index,
};
let last_source = self.last_source.as_ref().unwrap();
let start = self.newline_offsets[start_idx];
let end = self.newline_offsets.get(start_idx + 1).cloned().unwrap_or_else(|| last_source.len());
let slice = &last_source.as_bytes()[start..end];
(start, start_idx, std::str::from_utf8(slice).unwrap().to_string())
} }
} }
#[allow(dead_code)] #[allow(dead_code)]
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub(crate) enum Stage { pub(crate) enum Stage {
Tokenizing,
Parsing, Parsing,
Symbols, Symbols,
ScopeResolution, ScopeResolution,
@ -164,7 +156,7 @@ pub(crate) enum Stage {
} }
fn stage_names() -> Vec<&'static str> { fn stage_names() -> Vec<&'static str> {
vec!["parsing", "symbol-table", "typechecking", "ast-reduction", "ast-walking-evaluation"] vec!["tokenizing", "parsing", "symbol-table", "typechecking", "ast-reduction", "ast-walking-evaluation"]
} }
#[derive(Default, Clone)] #[derive(Default, Clone)]
@ -185,13 +177,14 @@ impl<'a> ProgrammingLanguageInterface for Schala<'a> {
fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse { fn run_computation(&mut self, request: ComputationRequest<Self::Config>) -> ComputationResponse {
let ComputationRequest { source, debug_requests: _, config: _ } = request; let ComputationRequest { source, debug_requests: _, config: _ } = request;
self.source_reference.load_new_source(source);
let sw = Stopwatch::start_new(); let sw = Stopwatch::start_new();
let main_output = let main_output =
self.run_pipeline(source, request.config).map_err(|schala_err| schala_err.display()); self.run_pipeline(source, request.config).map_err(|schala_err| schala_err.display());
let total_duration = sw.elapsed(); let total_duration = sw.elapsed();
let stage_durations: Vec<_> = std::mem::take(&mut self.timings) let stage_durations: Vec<_> = std::mem::replace(&mut self.timings, vec![])
.into_iter() .into_iter()
.map(|(label, duration)| (label.to_string(), duration)) .map(|(label, duration)| (label.to_string(), duration))
.collect(); .collect();

View File

@ -17,12 +17,6 @@ impl Fqsn {
Fqsn { scopes: v } Fqsn { scopes: v }
} }
pub fn extend(&self, new_item: &str) -> Self {
let mut new = self.clone();
new.scopes.push(ScopeSegment::Name(Rc::new(new_item.to_string())));
new
}
#[allow(dead_code)] #[allow(dead_code)]
pub fn from_strs(strs: &[&str]) -> Fqsn { pub fn from_strs(strs: &[&str]) -> Fqsn {
let mut scopes = vec![]; let mut scopes = vec![];

View File

@ -10,7 +10,7 @@ use crate::{
ast, ast,
ast::ItemId, ast::ItemId,
builtin::Builtin, builtin::Builtin,
parsing::Location, tokenizing::Location,
type_inference::{TypeContext, TypeId}, type_inference::{TypeContext, TypeId},
}; };
@ -32,10 +32,9 @@ pub type DefId = Id<DefItem>;
pub enum SymbolError { pub enum SymbolError {
DuplicateName { prev_name: Fqsn, location: Location }, DuplicateName { prev_name: Fqsn, location: Location },
DuplicateVariant { type_fqsn: Fqsn, name: String }, DuplicateVariant { type_fqsn: Fqsn, name: String },
DuplicateRecord { type_fqsn: Fqsn, location: Location, record: String, member: String }, DuplicateRecord { type_name: Fqsn, location: Location, member: String },
UnknownAnnotation { name: String }, UnknownAnnotation { name: String },
BadAnnotation { name: String, msg: String }, BadAnnotation { name: String, msg: String },
BadImplBlockEntry,
} }
#[allow(dead_code)] #[allow(dead_code)]
@ -53,10 +52,7 @@ enum NameKind {
} }
#[derive(Debug)] #[derive(Debug)]
enum TypeKind { struct TypeKind;
Function,
Constructor,
}
/// Keeps track of what names were used in a given namespace. /// Keeps track of what names were used in a given namespace.
struct NameTable<K> { struct NameTable<K> {
@ -118,7 +114,7 @@ impl SymbolTable {
) -> Result<(), Vec<SymbolError>> { ) -> Result<(), Vec<SymbolError>> {
let mut populator = SymbolTablePopulator { type_context, table: self }; let mut populator = SymbolTablePopulator { type_context, table: self };
let errs = populator.populate_definition_tables(ast); let errs = populator.populate_name_tables(ast);
if !errs.is_empty() { if !errs.is_empty() {
return Err(errs); return Err(errs);
} }
@ -157,8 +153,7 @@ impl SymbolTable {
/// to a Symbol, a descriptor of what that name refers to. /// to a Symbol, a descriptor of what that name refers to.
fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) { fn add_symbol(&mut self, id: &ItemId, fqsn: Fqsn, spec: SymbolSpec) {
let def_id = self.def_id_store.fresh(); let def_id = self.def_id_store.fresh();
let local_name = fqsn.last_elem(); let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), spec, def_id });
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
self.symbol_trie.insert(&fqsn, def_id); self.symbol_trie.insert(&fqsn, def_id);
self.id_to_def.insert(*id, def_id); self.id_to_def.insert(*id, def_id);
self.def_to_symbol.insert(def_id, symbol); self.def_to_symbol.insert(def_id, symbol);
@ -167,8 +162,7 @@ impl SymbolTable {
fn populate_single_builtin(&mut self, fqsn: Fqsn, builtin: Builtin) { fn populate_single_builtin(&mut self, fqsn: Fqsn, builtin: Builtin) {
let def_id = self.def_id_store.fresh(); let def_id = self.def_id_store.fresh();
let spec = SymbolSpec::Builtin(builtin); let spec = SymbolSpec::Builtin(builtin);
let local_name = fqsn.last_elem(); let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), spec, def_id });
let symbol = Rc::new(Symbol { fully_qualified_name: fqsn.clone(), local_name, spec, def_id });
self.symbol_trie.insert(&fqsn, def_id); self.symbol_trie.insert(&fqsn, def_id);
self.def_to_symbol.insert(def_id, symbol); self.def_to_symbol.insert(def_id, symbol);
@ -179,18 +173,17 @@ impl SymbolTable {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Symbol { pub struct Symbol {
fully_qualified_name: Fqsn, fully_qualified_name: Fqsn,
local_name: Rc<String>,
spec: SymbolSpec, spec: SymbolSpec,
def_id: DefId, def_id: DefId,
} }
impl Symbol { impl Symbol {
pub fn local_name(&self) -> &str { pub fn local_name(&self) -> Rc<String> {
self.local_name.as_ref() self.fully_qualified_name.last_elem()
} }
pub fn def_id(&self) -> DefId { pub fn def_id(&self) -> Option<DefId> {
self.def_id Some(self.def_id)
} }
pub fn spec(&self) -> SymbolSpec { pub fn spec(&self) -> SymbolSpec {
@ -219,7 +212,7 @@ impl fmt::Display for Symbol {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum SymbolSpec { pub enum SymbolSpec {
Builtin(Builtin), Builtin(Builtin),
Func { method: Option<crate::ast::TypeSingletonName> }, Func,
DataConstructor { tag: u32, type_id: TypeId }, DataConstructor { tag: u32, type_id: TypeId },
RecordConstructor { tag: u32, type_id: TypeId }, RecordConstructor { tag: u32, type_id: TypeId },
GlobalBinding, //Only for global variables, not for function-local ones or ones within a `let` scope context GlobalBinding, //Only for global variables, not for function-local ones or ones within a `let` scope context
@ -232,7 +225,7 @@ impl fmt::Display for SymbolSpec {
use self::SymbolSpec::*; use self::SymbolSpec::*;
match self { match self {
Builtin(b) => write!(f, "Builtin: {:?}", b), Builtin(b) => write!(f, "Builtin: {:?}", b),
Func { .. } => write!(f, "Func"), Func => write!(f, "Func"),
DataConstructor { tag, type_id } => write!(f, "DataConstructor(tag: {}, type: {})", tag, type_id), DataConstructor { tag, type_id } => write!(f, "DataConstructor(tag: {}, type: {})", tag, type_id),
RecordConstructor { type_id, tag, .. } => RecordConstructor { type_id, tag, .. } =>
write!(f, "RecordConstructor(tag: {})(<members> -> {})", tag, type_id), write!(f, "RecordConstructor(tag: {})(<members> -> {})", tag, type_id),

View File

@ -11,7 +11,7 @@ use crate::{
TypeSingletonName, Variant, VariantKind, AST, TypeSingletonName, Variant, VariantKind, AST,
}, },
builtin::Builtin, builtin::Builtin,
parsing::Location, tokenizing::Location,
type_inference::{self, PendingType, TypeBuilder, TypeContext, VariantBuilder}, type_inference::{self, PendingType, TypeBuilder, TypeContext, VariantBuilder},
}; };
@ -32,14 +32,14 @@ impl<'a> SymbolTablePopulator<'a> {
/// constants, functions, types, and modules defined within. This simultaneously /// constants, functions, types, and modules defined within. This simultaneously
/// checks for dupicate definitions (and returns errors if discovered), and sets /// checks for dupicate definitions (and returns errors if discovered), and sets
/// up name tables that will be used by further parts of the compiler /// up name tables that will be used by further parts of the compiler
pub fn populate_definition_tables(&mut self, ast: &AST) -> Vec<SymbolError> { pub fn populate_name_tables(&mut self, ast: &AST) -> Vec<SymbolError> {
let mut scope_stack = vec![]; let mut scope_stack = vec![];
self.add_from_scope(ast.statements.as_ref(), &mut scope_stack, false) self.add_from_scope(ast.statements.as_ref(), &mut scope_stack, false)
} }
fn add_from_scope( fn add_from_scope(
&mut self, &mut self,
statements: &[Statement<StatementKind>], statements: &[Statement],
scope_stack: &mut Vec<ScopeSegment>, scope_stack: &mut Vec<ScopeSegment>,
function_scope: bool, function_scope: bool,
) -> Vec<SymbolError> { ) -> Vec<SymbolError> {
@ -51,58 +51,24 @@ impl<'a> SymbolTablePopulator<'a> {
if let Err(err) = self.add_single_statement(id, kind, location, scope_stack, function_scope) { if let Err(err) = self.add_single_statement(id, kind, location, scope_stack, function_scope) {
errors.push(err); errors.push(err);
} else { } else {
let decl = match kind {
StatementKind::Declaration(decl) => decl,
_ => continue,
};
// If there's an error with a name, don't recurse into subscopes of that name // If there's an error with a name, don't recurse into subscopes of that name
let recursive_errs = match decl { let recursive_errs = match kind {
Declaration::FuncDecl(signature, body) => { StatementKind::Declaration(Declaration::FuncDecl(signature, body)) => {
let new_scope = ScopeSegment::Name(signature.name.clone()); let new_scope = ScopeSegment::Name(signature.name.clone());
scope_stack.push(new_scope); scope_stack.push(new_scope);
let output = self.add_from_scope(body.as_ref(), scope_stack, true); let output = self.add_from_scope(body.as_ref(), scope_stack, true);
scope_stack.pop(); scope_stack.pop();
output output
} }
Declaration::Module { name, items } => { StatementKind::Declaration(Declaration::Module { name, items }) => {
let new_scope = ScopeSegment::Name(name.clone()); let new_scope = ScopeSegment::Name(name.clone());
scope_stack.push(new_scope); scope_stack.push(new_scope);
let output = self.add_from_scope(items.as_ref(), scope_stack, false); let output = self.add_from_scope(items.as_ref(), scope_stack, false);
scope_stack.pop(); scope_stack.pop();
output output
} }
Declaration::TypeDecl { name, body, mutable } => { StatementKind::Declaration(Declaration::TypeDecl { name, body, mutable }) =>
let type_fqsn = Fqsn::from_scope_stack(scope_stack, name.name.clone()); self.add_type_members(name, body, mutable, location, scope_stack),
self.add_type_members(name, body, mutable, location, type_fqsn)
}
Declaration::Impl { type_name, interface_name: _, block } => {
let mut errors = vec![];
let new_scope = ScopeSegment::Name(Rc::new(format!("<impl-block>{}", type_name)));
scope_stack.push(new_scope);
for decl_stmt in block.iter() {
let Statement { id, kind, location } = decl_stmt;
let location = *location;
match kind {
decl @ Declaration::FuncDecl(signature, body) => {
let output =
self.add_single_declaration(id, decl, location, scope_stack, true);
if let Err(e) = output {
errors.push(e);
};
let new_scope = ScopeSegment::Name(signature.name.clone());
scope_stack.push(new_scope);
let output = self.add_from_scope(body.as_ref(), scope_stack, true);
scope_stack.pop();
errors.extend(output.into_iter());
}
_other => errors.push(SymbolError::BadImplBlockEntry),
};
}
scope_stack.pop();
errors
}
_ => vec![], _ => vec![],
}; };
errors.extend(recursive_errs.into_iter()); errors.extend(recursive_errs.into_iter());
@ -121,51 +87,30 @@ impl<'a> SymbolTablePopulator<'a> {
function_scope: bool, function_scope: bool,
) -> Result<(), SymbolError> { ) -> Result<(), SymbolError> {
match kind { match kind {
StatementKind::Declaration(decl) => StatementKind::Declaration(Declaration::FuncSig(signature)) => {
self.add_single_declaration(id, decl, location, scope_stack, function_scope),
_ => return Ok(()),
}
}
fn add_single_declaration(
&mut self,
id: &ItemId,
decl: &Declaration,
location: Location,
scope_stack: &[ScopeSegment],
function_scope: bool,
) -> Result<(), SymbolError> {
match decl {
Declaration::FuncSig(signature) => {
let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone()); let fq_function = Fqsn::from_scope_stack(scope_stack, signature.name.clone());
self.table self.table
.fq_names .fq_names
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?; .register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
self.table self.table.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind })?;
.types
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None }); self.add_symbol(id, fq_function, SymbolSpec::Func);
} }
Declaration::FuncDecl(signature, ..) => { StatementKind::Declaration(Declaration::FuncDecl(signature, ..)) => {
let fn_name = &signature.name; let fn_name = &signature.name;
let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone()); let fq_function = Fqsn::from_scope_stack(scope_stack, fn_name.clone());
self.table self.table
.fq_names .fq_names
.register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?; .register(fq_function.clone(), NameSpec { location, kind: NameKind::Function })?;
self.table self.table.types.register(fq_function.clone(), NameSpec { location, kind: TypeKind })?;
.types
.register(fq_function.clone(), NameSpec { location, kind: TypeKind::Function })?;
self.add_symbol(id, fq_function, SymbolSpec::Func { method: None }); self.add_symbol(id, fq_function, SymbolSpec::Func);
} }
Declaration::TypeDecl { name, .. } => { StatementKind::Declaration(Declaration::TypeDecl { name, .. }) => {
let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone()); let fq_type = Fqsn::from_scope_stack(scope_stack, name.name.clone());
self.table.types.register(fq_type, NameSpec { location, kind: TypeKind::Constructor })?; self.table.types.register(fq_type, NameSpec { location, kind: TypeKind })?;
} }
//TODO handle type aliases StatementKind::Declaration(Declaration::Binding { name, .. }) => {
Declaration::TypeAlias { .. } => (),
Declaration::Binding { name, .. } => {
let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone()); let fq_binding = Fqsn::from_scope_stack(scope_stack, name.clone());
self.table self.table
.fq_names .fq_names
@ -174,14 +119,11 @@ impl<'a> SymbolTablePopulator<'a> {
self.add_symbol(id, fq_binding, SymbolSpec::GlobalBinding); self.add_symbol(id, fq_binding, SymbolSpec::GlobalBinding);
} }
} }
//TODO implement interfaces StatementKind::Declaration(Declaration::Module { name, .. }) => {
Declaration::Interface { .. } => (),
Declaration::Impl { .. } => (),
Declaration::Module { name, .. } => {
let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone()); let fq_module = Fqsn::from_scope_stack(scope_stack, name.clone());
self.table.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?; self.table.fq_names.register(fq_module, NameSpec { location, kind: NameKind::Module })?;
} }
Declaration::Annotation { name, arguments, inner } => { StatementKind::Declaration(Declaration::Annotation { name, arguments, inner }) => {
let inner = inner.as_ref(); let inner = inner.as_ref();
self.add_single_statement( self.add_single_statement(
&inner.id, &inner.id,
@ -192,6 +134,7 @@ impl<'a> SymbolTablePopulator<'a> {
)?; )?;
self.process_annotation(name.as_ref(), arguments.as_slice(), scope_stack, inner)?; self.process_annotation(name.as_ref(), arguments.as_slice(), scope_stack, inner)?;
} }
_ => (),
} }
Ok(()) Ok(())
} }
@ -201,7 +144,7 @@ impl<'a> SymbolTablePopulator<'a> {
name: &str, name: &str,
arguments: &[Expression], arguments: &[Expression],
scope_stack: &[ScopeSegment], scope_stack: &[ScopeSegment],
inner: &Statement<StatementKind>, inner: &Statement,
) -> Result<(), SymbolError> { ) -> Result<(), SymbolError> {
if name == "register_builtin" { if name == "register_builtin" {
if let Statement { if let Statement {
@ -247,11 +190,11 @@ impl<'a> SymbolTablePopulator<'a> {
type_body: &TypeBody, type_body: &TypeBody,
_mutable: &bool, _mutable: &bool,
location: Location, location: Location,
type_fqsn: Fqsn, scope_stack: &mut Vec<ScopeSegment>,
) -> Vec<SymbolError> { ) -> Vec<SymbolError> {
let (variants, immediate_variant) = match type_body { let (variants, immediate_variant) = match type_body {
TypeBody::Variants(variants) => (variants.clone(), false), TypeBody::Variants(variants) => (variants.clone(), false),
TypeBody::ImmediateRecord { id, fields } => ( TypeBody::ImmediateRecord(id, fields) => (
vec![Variant { vec![Variant {
id: *id, id: *id,
name: type_name.name.clone(), name: type_name.name.clone(),
@ -260,6 +203,10 @@ impl<'a> SymbolTablePopulator<'a> {
true, true,
), ),
}; };
let type_fqsn = Fqsn::from_scope_stack(scope_stack, type_name.name.clone());
let new_scope = ScopeSegment::Name(type_name.name.clone());
scope_stack.push(new_scope);
// Check for duplicates before registering any types with the TypeContext // Check for duplicates before registering any types with the TypeContext
let mut seen_variants = HashSet::new(); let mut seen_variants = HashSet::new();
@ -275,15 +222,15 @@ impl<'a> SymbolTablePopulator<'a> {
seen_variants.insert(variant.name.clone()); seen_variants.insert(variant.name.clone());
if let VariantKind::Record(ref members) = variant.kind { if let VariantKind::Record(ref members) = variant.kind {
let variant_name = Fqsn::from_scope_stack(scope_stack.as_ref(), variant.name.clone());
let mut seen_members = HashMap::new(); let mut seen_members = HashMap::new();
for (member_name, _) in members.iter() { for (member_name, _) in members.iter() {
match seen_members.entry(member_name.as_ref()) { match seen_members.entry(member_name.as_ref()) {
Entry::Occupied(o) => { Entry::Occupied(o) => {
let location = *o.get(); let location = *o.get();
errors.push(SymbolError::DuplicateRecord { errors.push(SymbolError::DuplicateRecord {
type_fqsn: type_fqsn.clone(), type_name: variant_name.clone(),
location, location,
record: variant.name.as_ref().to_string(),
member: member_name.as_ref().to_string(), member: member_name.as_ref().to_string(),
}); });
} }
@ -302,11 +249,11 @@ impl<'a> SymbolTablePopulator<'a> {
let mut type_builder = TypeBuilder::new(type_name.name.as_ref()); let mut type_builder = TypeBuilder::new(type_name.name.as_ref());
let mut variant_name_map = HashMap::new(); let mut fqsn_id_map = HashMap::new();
for variant in variants.iter() { for variant in variants.iter() {
let Variant { name, kind, id } = variant; let Variant { name, kind, id } = variant;
variant_name_map.insert(name.clone(), id); fqsn_id_map.insert(Fqsn::from_scope_stack(scope_stack.as_ref(), name.clone()), id);
let mut variant_builder = VariantBuilder::new(name.as_ref()); let mut variant_builder = VariantBuilder::new(name.as_ref());
match kind { match kind {
@ -330,23 +277,30 @@ impl<'a> SymbolTablePopulator<'a> {
// This index is guaranteed to be the correct tag // This index is guaranteed to be the correct tag
for (index, variant) in type_definition.variants.iter().enumerate() { for (index, variant) in type_definition.variants.iter().enumerate() {
let id = variant_name_map.get(&variant.name).unwrap(); let fqsn = Fqsn::from_scope_stack(scope_stack.as_ref(), Rc::new(variant.name.to_string()));
let id = fqsn_id_map.get(&fqsn).unwrap();
let tag = index as u32; let tag = index as u32;
let spec = match &variant.members { let spec = match &variant.members {
type_inference::VariantMembers::Unit => SymbolSpec::DataConstructor { tag, type_id }, type_inference::VariantMembers::Unit => SymbolSpec::DataConstructor { tag, type_id },
type_inference::VariantMembers::Tuple(..) => SymbolSpec::DataConstructor { tag, type_id }, type_inference::VariantMembers::Tuple(..) => SymbolSpec::DataConstructor { tag, type_id },
type_inference::VariantMembers::Record(..) => SymbolSpec::RecordConstructor { tag, type_id }, type_inference::VariantMembers::Record(..) => SymbolSpec::RecordConstructor { tag, type_id },
}; };
self.table.add_symbol(id, type_fqsn.extend(&variant.name), spec); self.table.add_symbol(id, fqsn, spec);
} }
if immediate_variant { if immediate_variant {
let variant = &type_definition.variants[0]; let variant = &type_definition.variants[0];
let id = variant_name_map.get(&variant.name).unwrap(); let fqsn = Fqsn::from_scope_stack(scope_stack.as_ref(), Rc::new(variant.name.to_string()));
let id = fqsn_id_map.get(&fqsn).unwrap();
let abbrev_fqsn = Fqsn::from_scope_stack(
scope_stack[0..scope_stack.len() - 1].as_ref(),
Rc::new(variant.name.to_string()),
);
let spec = SymbolSpec::RecordConstructor { tag: 0, type_id }; let spec = SymbolSpec::RecordConstructor { tag: 0, type_id };
self.table.add_symbol(id, type_fqsn, spec); self.table.add_symbol(id, abbrev_fqsn, spec);
} }
scope_stack.pop();
vec![] vec![]
} }
} }

View File

@ -15,20 +15,19 @@ enum NameType {
Import(Fqsn), Import(Fqsn),
} }
type LexScope<'a> = ScopeStack<'a, Rc<String>, NameType, ScopeType>;
#[derive(Debug)] #[derive(Debug)]
enum ScopeType { enum ScopeType {
Function { name: Rc<String> }, Function { name: Rc<String> },
Lambda, Lambda,
PatternMatch, PatternMatch,
ImplBlock,
//TODO add some notion of a let-like scope? //TODO add some notion of a let-like scope?
} }
pub struct ScopeResolver<'a> { pub struct ScopeResolver<'a> {
symbol_table: &'a mut super::SymbolTable, symbol_table: &'a mut super::SymbolTable,
lexical_scopes: LexScope<'a>, //TODO maybe this shouldn't be a scope stack, b/c the recursion behavior comes from multiple
//instances of ScopeResolver
lexical_scopes: ScopeStack<'a, Rc<String>, NameType, ScopeType>,
} }
impl<'a> ScopeResolver<'a> { impl<'a> ScopeResolver<'a> {
@ -44,7 +43,6 @@ impl<'a> ScopeResolver<'a> {
/// This method correctly modifies the id_to_def table (ItemId) to have the appropriate /// This method correctly modifies the id_to_def table (ItemId) to have the appropriate
/// mappings. /// mappings.
fn lookup_name_in_scope(&mut self, name: &QualifiedName) { fn lookup_name_in_scope(&mut self, name: &QualifiedName) {
//TODO this method badly needs attention
let QualifiedName { id, components } = name; let QualifiedName { id, components } = name;
let local_name = components.first().unwrap().clone(); let local_name = components.first().unwrap().clone();
@ -57,7 +55,7 @@ impl<'a> ScopeResolver<'a> {
if components.len() == 1 { if components.len() == 1 {
match name_type { match name_type {
Some(NameType::Import(fqsn)) => { Some(NameType::Import(fqsn)) => {
let def_id = self.symbol_table.symbol_trie.lookup(fqsn); let def_id = self.symbol_table.symbol_trie.lookup(&fqsn);
if let Some(def_id) = def_id { if let Some(def_id) = def_id {
self.symbol_table.id_to_def.insert(*id, def_id); self.symbol_table.id_to_def.insert(*id, def_id);
@ -73,14 +71,14 @@ impl<'a> ScopeResolver<'a> {
Some(NameType::LocalFunction(item_id)) => { Some(NameType::LocalFunction(item_id)) => {
let def_id = self.symbol_table.id_to_def.get(item_id); let def_id = self.symbol_table.id_to_def.get(item_id);
if let Some(def_id) = def_id { if let Some(def_id) = def_id {
let def_id = *def_id; let def_id = def_id.clone();
self.symbol_table.id_to_def.insert(*id, def_id); self.symbol_table.id_to_def.insert(*id, def_id);
} }
} }
Some(NameType::LocalVariable(item_id)) => { Some(NameType::LocalVariable(item_id)) => {
let def_id = self.symbol_table.id_to_def.get(item_id); let def_id = self.symbol_table.id_to_def.get(item_id);
if let Some(def_id) = def_id { if let Some(def_id) = def_id {
let def_id = *def_id; let def_id = def_id.clone();
self.symbol_table.id_to_def.insert(*id, def_id); self.symbol_table.id_to_def.insert(*id, def_id);
} }
} }
@ -89,8 +87,10 @@ impl<'a> ScopeResolver<'a> {
self.symbol_table.id_to_def.insert(*id, def_id); self.symbol_table.id_to_def.insert(*id, def_id);
}, },
} }
} else if let Some(def_id) = def_id { } else {
self.symbol_table.id_to_def.insert(*id, def_id); if let Some(def_id) = def_id {
self.symbol_table.id_to_def.insert(*id, def_id);
}
} }
} }
} }
@ -140,8 +140,6 @@ impl<'a> ASTVisitor for ScopeResolver<'a> {
let param_names = signature.params.iter().map(|param| param.name.clone()); let param_names = signature.params.iter().map(|param| param.name.clone());
//TODO I'm 90% sure this is right, until I get to closures //TODO I'm 90% sure this is right, until I get to closures
//let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() })); //let mut new_scope = self.lexical_scopes.new_scope(Some(ScopeType::Function { name: signature.name.clone() }));
//TODO this will recurse unwantedly into scopes; need to pop an outer function
//scope off first before going into a non-closure scope
let mut new_scope = let mut new_scope =
ScopeStack::new(Some(ScopeType::Function { name: signature.name.clone() })); ScopeStack::new(Some(ScopeType::Function { name: signature.name.clone() }));
@ -166,15 +164,6 @@ impl<'a> ASTVisitor for ScopeResolver<'a> {
} }
Recursion::Continue Recursion::Continue
} }
Declaration::Impl { block, .. } => {
let new_scope = ScopeStack::new(Some(ScopeType::ImplBlock));
let mut new_resolver =
ScopeResolver { symbol_table: self.symbol_table, lexical_scopes: new_scope };
for stmt in block.iter() {
walk_declaration(&mut new_resolver, &stmt.kind, &stmt.id);
}
Recursion::Stop
}
_ => Recursion::Continue, _ => Recursion::Continue,
} }
} }

View File

@ -2,7 +2,7 @@
use assert_matches::assert_matches; use assert_matches::assert_matches;
use super::*; use super::*;
use crate::util::quick_ast; use crate::{tokenizing::Location, util::quick_ast};
fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) { fn add_symbols(src: &str) -> (SymbolTable, Result<(), Vec<SymbolError>>) {
let ast = quick_ast(src); let ast = quick_ast(src);
@ -79,11 +79,9 @@ fn no_type_definition_duplicates() {
let err = &errs[0]; let err = &errs[0];
match err { match err {
SymbolError::DuplicateName { location: _, prev_name } => { SymbolError::DuplicateName { location, prev_name } => {
assert_eq!(prev_name, &Fqsn::from_strs(&["Food"])); assert_eq!(prev_name, &Fqsn::from_strs(&["Food"]));
assert_eq!(location, &Location { line_num: 2, char_num: 2 });
//TODO restore this Location test
//assert_eq!(location, &Location { line_num: 2, char_num: 2 });
} }
_ => panic!(), _ => panic!(),
} }
@ -137,7 +135,7 @@ fn dont_falsely_detect_duplicates() {
let a = 40; let a = 40;
77 77
} }
let q = 39 let q = 39;
"#; "#;
let (symbols, _) = add_symbols(source); let (symbols, _) = add_symbols(source);
@ -173,8 +171,7 @@ fn second_inner_func() {
} }
inner_func(x) inner_func(x)
} }"#;
"#;
let (symbols, _) = add_symbols(source); let (symbols, _) = add_symbols(source);
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some()); assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some()); assert!(symbols.fq_names.table.get(&make_fqsn(&["outer_func", "inner_func"])).is_some());
@ -190,8 +187,7 @@ inner_func(x)
fn enclosing_scopes_3() { fn enclosing_scopes_3() {
let source = r#" let source = r#"
fn outer_func(x) { fn outer_func(x) {
fn inner_func(arg) {
fn inner_func(arg) {
arg arg
} }
@ -254,61 +250,17 @@ fn duplicate_modules() {
fn duplicate_struct_members() { fn duplicate_struct_members() {
let source = r#" let source = r#"
type Tarak = Tarak { type Tarak = Tarak {
loujet: i32 loujet: i32,
, mets: i32,
mets: i32, mets: i32,
mets: i32
,
} }
"#; "#;
let (_, output) = add_symbols(source); let (_, output) = add_symbols(source);
let errs = dbg!(output.unwrap_err());
assert_matches!(&errs[..], [
SymbolError::DuplicateRecord {
type_fqsn, member, record, ..},
] if type_fqsn == &Fqsn::from_strs(&["Tarak"]) && member == "mets" && record == "Tarak"
);
}
#[test]
fn method_definition_added_to_symbol_table() {
let source = r#"
type Foo = { x: Int, y: Int }
impl Foo {
fn hella() {
let a = 50
self.x + a
}
}
"#;
let (symbols, _) = add_symbols(source);
symbols.debug();
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella"])).is_some());
assert!(symbols.fq_names.table.get(&make_fqsn(&["<impl-block>Foo", "hella", "a"])).is_some());
}
#[test]
fn duplicate_method_definitions_detected() {
let source = r#"
type Foo = { x: Int, y: Int }
impl Foo {
fn hella() {
self.x + 50
}
fn hella() {
self.x + 40
}
}
"#;
let (_symbols, output) = add_symbols(source);
let errs = output.unwrap_err(); let errs = output.unwrap_err();
assert_matches!(&errs[..], [ assert_matches!(&errs[..], [
SymbolError::DuplicateName { prev_name: pn1, ..}, SymbolError::DuplicateRecord {
] if pn1 == &Fqsn::from_strs(&["<impl-block>Foo", "hella"])); type_name, member, ..},
] if type_name == &Fqsn::from_strs(&["Tarak", "Tarak"]) && member == "mets"
);
} }

View File

@ -0,0 +1,460 @@
#![allow(clippy::upper_case_acronyms)]
use std::{
convert::{TryFrom, TryInto},
fmt,
iter::{Iterator, Peekable},
rc::Rc,
};
use itertools::Itertools;
/// A location in a particular source file. Note that the
/// sizes of the internal unsigned integer types limit
/// the size of a source file to 2^32 lines of
/// at most 2^16 characters, which should be plenty big.
#[derive(Debug, Clone, Copy, PartialEq, Default)]
pub struct Location {
pub(crate) line_num: u32,
pub(crate) char_num: u16,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.line_num, self.char_num)
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum TokenKind {
Newline,
Semicolon,
LParen,
RParen,
LSquareBracket,
RSquareBracket,
LAngleBracket,
RAngleBracket,
LCurlyBrace,
RCurlyBrace,
Pipe,
Backslash,
AtSign,
Comma,
Period,
Colon,
Underscore,
Slash,
Equals,
Operator(Rc<String>),
DigitGroup(Rc<String>),
HexLiteral(Rc<String>),
BinNumberSigil,
StrLiteral { s: Rc<String>, prefix: Option<Rc<String>> },
Identifier(Rc<String>),
Keyword(Kw),
EOF,
Error(String),
}
use self::TokenKind::*;
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Operator(ref s) => write!(f, "Operator({})", **s),
&DigitGroup(ref s) => write!(f, "DigitGroup({})", s),
&HexLiteral(ref s) => write!(f, "HexLiteral({})", s),
&StrLiteral { ref s, .. } => write!(f, "StrLiteral({})", s),
&Identifier(ref s) => write!(f, "Identifier({})", s),
&Error(ref s) => write!(f, "Error({})", s),
other => write!(f, "{:?}", other),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Kw {
If,
Then,
Else,
Is,
Func,
For,
While,
Let,
In,
Mut,
Return,
Continue,
Break,
Alias,
Type,
SelfType,
SelfIdent,
Interface,
Impl,
True,
False,
Module,
Import,
}
impl TryFrom<&str> for Kw {
type Error = ();
fn try_from(value: &str) -> Result<Self, Self::Error> {
Ok(match value {
"if" => Kw::If,
"then" => Kw::Then,
"else" => Kw::Else,
"is" => Kw::Is,
"fn" => Kw::Func,
"for" => Kw::For,
"while" => Kw::While,
"let" => Kw::Let,
"in" => Kw::In,
"mut" => Kw::Mut,
"return" => Kw::Return,
"break" => Kw::Break,
"continue" => Kw::Continue,
"alias" => Kw::Alias,
"type" => Kw::Type,
"Self" => Kw::SelfType,
"self" => Kw::SelfIdent,
"interface" => Kw::Interface,
"impl" => Kw::Impl,
"true" => Kw::True,
"false" => Kw::False,
"module" => Kw::Module,
"import" => Kw::Import,
_ => return Err(()),
})
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Token {
pub kind: TokenKind,
pub(crate) location: Location,
}
impl Token {
pub fn to_string_with_metadata(&self) -> String {
format!("{}({})", self.kind, self.location)
}
pub fn get_kind(&self) -> TokenKind {
self.kind.clone()
}
}
const OPERATOR_CHARS: [char; 17] =
['!', '$', '%', '&', '*', '+', '-', '.', ':', '<', '>', '=', '?', '^', '|', '~', '`'];
fn is_operator(c: &char) -> bool {
OPERATOR_CHARS.iter().any(|x| x == c)
}
type CharData = (usize, usize, char);
pub fn tokenize(input: &str) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new();
let mut input = Iterator::intersperse(input.lines().enumerate(), (0, "\n"))
.flat_map(|(line_idx, line)| line.chars().enumerate().map(move |(ch_idx, ch)| (line_idx, ch_idx, ch)))
.peekable();
while let Some((line_num, char_num, c)) = input.next() {
let cur_tok_kind = match c {
'/' => match input.peek().map(|t| t.2) {
Some('/') => {
for (_, _, c) in input.by_ref() {
if c == '\n' {
break;
}
}
continue;
}
Some('*') => {
input.next();
let mut comment_level = 1;
while let Some((_, _, c)) = input.next() {
if c == '*' && input.peek().map(|t| t.2) == Some('/') {
input.next();
comment_level -= 1;
} else if c == '/' && input.peek().map(|t| t.2) == Some('*') {
input.next();
comment_level += 1;
}
if comment_level == 0 {
break;
}
}
if comment_level != 0 {
Error("Unclosed comment".to_string())
} else {
continue;
}
}
_ => Slash,
},
c if c.is_whitespace() && c != '\n' => continue,
'\n' => Newline,
';' => Semicolon,
':' => Colon,
',' => Comma,
'(' => LParen,
')' => RParen,
'{' => LCurlyBrace,
'}' => RCurlyBrace,
'[' => LSquareBracket,
']' => RSquareBracket,
'"' => handle_quote(&mut input, None),
'\\' => Backslash,
'@' => AtSign,
c if c.is_digit(10) => handle_digit(c, &mut input),
c if c.is_alphabetic() || c == '_' => handle_alphabetic(c, &mut input),
c if is_operator(&c) => handle_operator(c, &mut input),
unknown => Error(format!("Unexpected character: {}", unknown)),
};
let location =
Location { line_num: line_num.try_into().unwrap(), char_num: char_num.try_into().unwrap() };
tokens.push(Token { kind: cur_tok_kind, location });
}
tokens
}
fn handle_digit(c: char, input: &mut Peekable<impl Iterator<Item = CharData>>) -> TokenKind {
let next_ch = input.peek().map(|&(_, _, c)| c);
if c == '0' && next_ch == Some('x') {
input.next();
let rest: String = input
.peeking_take_while(|&(_, _, ref c)| c.is_digit(16) || *c == '_')
.map(|(_, _, c)| c)
.collect();
HexLiteral(Rc::new(rest))
} else if c == '0' && next_ch == Some('b') {
input.next();
BinNumberSigil
} else {
let mut buf = c.to_string();
buf.extend(input.peeking_take_while(|&(_, _, ref c)| c.is_digit(10)).map(|(_, _, c)| c));
DigitGroup(Rc::new(buf))
}
}
fn handle_quote(
input: &mut Peekable<impl Iterator<Item = CharData>>,
quote_prefix: Option<&str>,
) -> TokenKind {
let mut buf = String::new();
loop {
match input.next().map(|(_, _, c)| c) {
Some('"') => break,
Some('\\') => {
let next = input.peek().map(|&(_, _, c)| c);
if next == Some('n') {
input.next();
buf.push('\n')
} else if next == Some('"') {
input.next();
buf.push('"');
} else if next == Some('t') {
input.next();
buf.push('\t');
}
}
Some(c) => buf.push(c),
None => return TokenKind::Error("Unclosed string".to_string()),
}
}
TokenKind::StrLiteral { s: Rc::new(buf), prefix: quote_prefix.map(|s| Rc::new(s.to_string())) }
}
fn handle_alphabetic(c: char, input: &mut Peekable<impl Iterator<Item = CharData>>) -> TokenKind {
let mut buf = String::new();
buf.push(c);
let next_is_alphabetic = input.peek().map(|&(_, _, c)| !c.is_alphabetic()).unwrap_or(true);
if c == '_' && next_is_alphabetic {
return TokenKind::Underscore;
}
loop {
match input.peek().map(|&(_, _, c)| c) {
Some(c) if c == '"' => {
input.next();
return handle_quote(input, Some(&buf));
}
Some(c) if c.is_alphanumeric() || c == '_' => {
input.next();
buf.push(c);
}
_ => break,
}
}
match Kw::try_from(buf.as_str()) {
Ok(kw) => TokenKind::Keyword(kw),
Err(()) => TokenKind::Identifier(Rc::new(buf)),
}
}
fn handle_operator(c: char, input: &mut Peekable<impl Iterator<Item = CharData>>) -> TokenKind {
match c {
'<' | '>' | '|' | '.' | '=' => {
let next = &input.peek().map(|&(_, _, c)| c);
let next_is_op = next.map(|n| is_operator(&n)).unwrap_or(false);
if !next_is_op {
return match c {
'<' => LAngleBracket,
'>' => RAngleBracket,
'|' => Pipe,
'.' => Period,
'=' => Equals,
_ => unreachable!(),
};
}
}
_ => (),
};
let mut buf = String::new();
if c == '`' {
loop {
match input.peek().map(|&(_, _, c)| c) {
Some(c) if c.is_alphabetic() || c == '_' => {
input.next();
buf.push(c);
}
Some('`') => {
input.next();
break;
}
_ => break,
}
}
} else {
buf.push(c);
loop {
match input.peek().map(|&(_, _, c)| c) {
Some(c) if is_operator(&c) => {
input.next();
buf.push(c);
}
_ => break,
}
}
}
TokenKind::Operator(Rc::new(buf))
}
#[cfg(test)]
mod schala_tokenizer_tests {
use super::{Kw::*, *};
macro_rules! digit {
($ident:expr) => {
DigitGroup(Rc::new($ident.to_string()))
};
}
macro_rules! ident {
($ident:expr) => {
Identifier(Rc::new($ident.to_string()))
};
}
macro_rules! op {
($ident:expr) => {
Operator(Rc::new($ident.to_string()))
};
}
fn token_kinds(input: &str) -> Vec<TokenKind> {
tokenize(input).into_iter().map(move |tok| tok.kind).collect()
}
#[test]
fn tokens() {
let output = token_kinds("let a: A<B> = c ++ d");
assert_eq!(
output,
vec![
Keyword(Let),
ident!("a"),
Colon,
ident!("A"),
LAngleBracket,
ident!("B"),
RAngleBracket,
Equals,
ident!("c"),
op!("++"),
ident!("d")
]
);
}
#[test]
fn underscores() {
let output = token_kinds("4_8");
assert_eq!(output, vec![digit!("4"), Underscore, digit!("8")]);
let output = token_kinds("aba_yo");
assert_eq!(output, vec![ident!("aba_yo")]);
}
#[test]
fn comments() {
let output = token_kinds("1 + /* hella /* bro */ */ 2");
assert_eq!(output, vec![digit!("1"), op!("+"), digit!("2")]);
let output = token_kinds("1 + /* hella /* bro */ 2");
assert_eq!(output, vec![digit!("1"), op!("+"), Error("Unclosed comment".to_string())]);
//TODO not sure if I want this behavior
let output = token_kinds("1 + /* hella */ bro */ 2");
assert_eq!(
output,
vec![
digit!("1"),
op!("+"),
Identifier(Rc::new("bro".to_string())),
Operator(Rc::new("*".to_string())),
Slash,
DigitGroup(Rc::new("2".to_string()))
]
);
}
#[test]
fn backtick_operators() {
let output = token_kinds("1 `plus` 2");
assert_eq!(output, vec![digit!("1"), op!("plus"), digit!("2")]);
}
#[test]
fn string_literals() {
let output = token_kinds(r#""some string""#);
assert_eq!(output, vec![StrLiteral { s: Rc::new("some string".to_string()), prefix: None }]);
let output = token_kinds(r#"b"some bytestring""#);
assert_eq!(
output,
vec![StrLiteral {
s: Rc::new("some bytestring".to_string()),
prefix: Some(Rc::new("b".to_string()))
}]
);
let output = token_kinds(r#""Do \n \" escapes work\t""#);
assert_eq!(
output,
vec![StrLiteral { s: Rc::new("Do \n \" escapes work\t".to_string()), prefix: None }]
);
}
}

View File

@ -38,7 +38,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
let mut acc = vec![]; let mut acc = vec![];
for (def_id, function) in reduced.functions.into_iter() { for (def_id, function) in reduced.functions.into_iter() {
let mem = (&def_id).into(); let mem = (&def_id).into();
self.state.memory.insert(mem, MemoryValue::Function(function)); self.state.environments.insert(mem, MemoryValue::Function(function));
} }
for statement in reduced.entrypoint.into_iter() { for statement in reduced.entrypoint.into_iter() {
@ -67,7 +67,8 @@ impl<'a, 'b> Evaluator<'a, 'b> {
if self.early_returning { if self.early_returning {
break; break;
} }
if self.loop_control.is_some() { if let Some(_) = self.loop_control {
println!("We here?");
break; break;
} }
} }
@ -78,7 +79,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
match stmt { match stmt {
Statement::Binding { ref id, expr, constant: _ } => { Statement::Binding { ref id, expr, constant: _ } => {
let evaluated = self.expression(expr)?; let evaluated = self.expression(expr)?;
self.state.memory.insert(id.into(), evaluated.into()); self.state.environments.insert(id.into(), evaluated.into());
Ok(StatementOutput::Nothing) Ok(StatementOutput::Nothing)
} }
Statement::Expression(expr) => { Statement::Expression(expr) => {
@ -119,7 +120,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
Expression::Lookup(kind) => match kind { Expression::Lookup(kind) => match kind {
Lookup::Function(ref id) => { Lookup::Function(ref id) => {
let mem = id.into(); let mem = id.into();
match self.state.memory.lookup(&mem) { match self.state.environments.lookup(&mem) {
// This just checks that the function exists in "memory" by ID, we don't // This just checks that the function exists in "memory" by ID, we don't
// actually retrieve it until `apply_function()` // actually retrieve it until `apply_function()`
Some(MemoryValue::Function(_)) => Primitive::Callable(Callable::UserDefined(*id)), Some(MemoryValue::Function(_)) => Primitive::Callable(Callable::UserDefined(*id)),
@ -128,21 +129,14 @@ impl<'a, 'b> Evaluator<'a, 'b> {
} }
Lookup::Param(n) => { Lookup::Param(n) => {
let mem = n.into(); let mem = n.into();
match self.state.memory.lookup(&mem) { match self.state.environments.lookup(&mem) {
Some(MemoryValue::Primitive(prim)) => prim.clone(), Some(MemoryValue::Primitive(prim)) => prim.clone(),
e => return Err(format!("Param lookup error, got {:?}", e).into()), e => return Err(format!("Param lookup error, got {:?}", e).into()),
} }
} }
Lookup::SelfParam => {
let mem = Memory::self_param();
match self.state.memory.lookup(&mem) {
Some(MemoryValue::Primitive(prim)) => prim.clone(),
e => return Err(format!("SelfParam lookup error, got {:?}", e).into()),
}
}
Lookup::LocalVar(ref id) | Lookup::GlobalVar(ref id) => { Lookup::LocalVar(ref id) | Lookup::GlobalVar(ref id) => {
let mem = id.into(); let mem = id.into();
match self.state.memory.lookup(&mem) { match self.state.environments.lookup(&mem) {
Some(MemoryValue::Primitive(expr)) => expr.clone(), Some(MemoryValue::Primitive(expr)) => expr.clone(),
_ => _ =>
return Err( return Err(
@ -155,12 +149,10 @@ impl<'a, 'b> Evaluator<'a, 'b> {
let mem = lval.into(); let mem = lval.into();
let evaluated = self.expression(rval)?; let evaluated = self.expression(rval)?;
println!("Inserting {:?} into {:?}", evaluated, mem); println!("Inserting {:?} into {:?}", evaluated, mem);
self.state.memory.insert(mem, MemoryValue::Primitive(evaluated)); self.state.environments.insert(mem, MemoryValue::Primitive(evaluated));
Primitive::unit() Primitive::unit()
} }
Expression::Call { box f, args } => self.call_expression(f, args, None)?, Expression::Call { box f, args } => self.call_expression(f, args)?,
Expression::CallMethod { box f, args, box self_expr } =>
self.call_expression(f, args, Some(self_expr))?,
Expression::Callable(Callable::DataConstructor { type_id, tag }) => { Expression::Callable(Callable::DataConstructor { type_id, tag }) => {
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap(); let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
if arity == 0 { if arity == 0 {
@ -311,9 +303,9 @@ impl<'a, 'b> Evaluator<'a, 'b> {
let cond = self.expression(cond)?; let cond = self.expression(cond)?;
for alt in alternatives.into_iter() { for alt in alternatives.into_iter() {
let mut new_scope = self.state.memory.new_scope(None); let mut new_scope = self.state.environments.new_scope(None);
if matches(&cond, &alt.pattern, &mut new_scope) { if matches(&cond, &alt.pattern, &mut new_scope) {
let mut new_state = State { memory: new_scope }; let mut new_state = State { environments: new_scope };
let mut evaluator = Evaluator::new(&mut new_state, self.type_context); let mut evaluator = Evaluator::new(&mut new_state, self.type_context);
let output = evaluator.block(alt.item); let output = evaluator.block(alt.item);
self.early_returning = evaluator.early_returning; self.early_returning = evaluator.early_returning;
@ -323,13 +315,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
Err("No valid match in match expression".into()) Err("No valid match in match expression".into())
} }
//TODO need to do something with self_expr to make method invocations actually work fn call_expression(&mut self, f: Expression, args: Vec<Expression>) -> EvalResult<Primitive> {
fn call_expression(
&mut self,
f: Expression,
args: Vec<Expression>,
self_expr: Option<Expression>,
) -> EvalResult<Primitive> {
let func = match self.expression(f)? { let func = match self.expression(f)? {
Primitive::Callable(func) => func, Primitive::Callable(func) => func,
other => return Err(format!("Trying to call non-function value: {:?}", other).into()), other => return Err(format!("Trying to call non-function value: {:?}", other).into()),
@ -338,10 +324,10 @@ impl<'a, 'b> Evaluator<'a, 'b> {
Callable::Builtin(builtin) => self.apply_builtin(builtin, args), Callable::Builtin(builtin) => self.apply_builtin(builtin, args),
Callable::UserDefined(def_id) => { Callable::UserDefined(def_id) => {
let mem = (&def_id).into(); let mem = (&def_id).into();
match self.state.memory.lookup(&mem) { match self.state.environments.lookup(&mem) {
Some(MemoryValue::Function(FunctionDefinition { body })) => { Some(MemoryValue::Function(FunctionDefinition { body })) => {
let body = body.clone(); //TODO ideally this clone would not happen let body = body.clone(); //TODO ideally this clone would not happen
self.apply_function(body, args, self_expr) self.apply_function(body, args)
} }
e => Err(format!("Error looking up function with id {}: {:?}", def_id, e).into()), e => Err(format!("Error looking up function with id {}: {:?}", def_id, e).into()),
} }
@ -355,7 +341,7 @@ impl<'a, 'b> Evaluator<'a, 'b> {
) )
.into()); .into());
} }
self.apply_function(body, args, None) self.apply_function(body, args)
} }
Callable::DataConstructor { type_id, tag } => { Callable::DataConstructor { type_id, tag } => {
let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap(); let arity = self.type_context.lookup_variant_arity(&type_id, tag).unwrap();
@ -484,30 +470,21 @@ impl<'a, 'b> Evaluator<'a, 'b> {
}) })
} }
fn apply_function( fn apply_function(&mut self, body: Vec<Statement>, args: Vec<Expression>) -> EvalResult<Primitive> {
&mut self,
body: Vec<Statement>,
args: Vec<Expression>,
self_expr: Option<Expression>,
) -> EvalResult<Primitive> {
let self_expr = if let Some(expr) = self_expr { Some(self.expression(expr)?) } else { None };
let mut evaluated_args: Vec<Primitive> = vec![]; let mut evaluated_args: Vec<Primitive> = vec![];
for arg in args.into_iter() { for arg in args.into_iter() {
evaluated_args.push(self.expression(arg)?); evaluated_args.push(self.expression(arg)?);
} }
let mut frame_state = State { memory: self.state.memory.new_scope(None) }; let mut frame_state = State { environments: self.state.environments.new_scope(None) };
let mut evaluator = Evaluator::new(&mut frame_state, self.type_context); let mut evaluator = Evaluator::new(&mut frame_state, self.type_context);
if let Some(evaled) = self_expr {
let mem = Memory::self_param();
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled));
}
for (n, evaled) in evaluated_args.into_iter().enumerate() { for (n, evaled) in evaluated_args.into_iter().enumerate() {
let n = n as u8; let n = n as u8;
let mem = n.into(); let mem = n.into();
evaluator.state.memory.insert(mem, MemoryValue::Primitive(evaled)); evaluator.state.environments.insert(mem, MemoryValue::Primitive(evaled));
} }
evaluator.block(body) evaluator.block(body)
} }
} }

View File

@ -4,7 +4,7 @@ use crate::{
reduced_ir::{Callable, Expression, FunctionDefinition, Literal, ReducedIR}, reduced_ir::{Callable, Expression, FunctionDefinition, Literal, ReducedIR},
symbol_table::DefId, symbol_table::DefId,
type_inference::{TypeContext, TypeId}, type_inference::{TypeContext, TypeId},
util::{delim_wrapped, ScopeStack}, util::ScopeStack,
}; };
mod evaluator; mod evaluator;
@ -14,7 +14,7 @@ type EvalResult<T> = Result<T, RuntimeError>;
#[derive(Debug)] #[derive(Debug)]
pub struct State<'a> { pub struct State<'a> {
memory: ScopeStack<'a, Memory, MemoryValue>, environments: ScopeStack<'a, Memory, MemoryValue>,
} }
//TODO - eh, I dunno, maybe it doesn't matter exactly how memory works in the tree-walking //TODO - eh, I dunno, maybe it doesn't matter exactly how memory works in the tree-walking
@ -24,12 +24,6 @@ enum Memory {
Index(u32), Index(u32),
} }
impl Memory {
fn self_param() -> Self {
Memory::Index(3_999_999)
}
}
// This is for function param lookups, and is a hack // This is for function param lookups, and is a hack
impl From<u8> for Memory { impl From<u8> for Memory {
fn from(n: u8) -> Self { fn from(n: u8) -> Self {
@ -67,6 +61,19 @@ impl RuntimeError {
} }
} }
fn delim_wrapped(lhs: char, rhs: char, terms: impl Iterator<Item = String>) -> String {
let mut buf = String::new();
write!(buf, "{}", lhs).unwrap();
for term in terms.map(Some).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e).unwrap(),
None => write!(buf, ", ").unwrap(),
};
}
write!(buf, "{}", rhs).unwrap();
buf
}
/// Anything that can be stored in memory; that is, a function definition, or a fully-evaluated /// Anything that can be stored in memory; that is, a function definition, or a fully-evaluated
/// program value. /// program value.
#[derive(Debug)] #[derive(Debug)]
@ -158,7 +165,7 @@ impl From<Literal> for Primitive {
impl<'a> State<'a> { impl<'a> State<'a> {
pub fn new() -> Self { pub fn new() -> Self {
Self { memory: ScopeStack::new(Some("global".to_string())) } Self { environments: ScopeStack::new(Some("global".to_string())) }
} }
pub fn evaluate( pub fn evaluate(

View File

@ -43,7 +43,7 @@ fn test_basic_eval() {
#[test] #[test]
fn op_eval() { fn op_eval() {
eval_assert("-13", "-13"); eval_assert("- 13", "-13");
eval_assert("10 - 2", "8"); eval_assert("10 - 2", "8");
} }
@ -95,7 +95,8 @@ trad()"#,
"30", "30",
); );
let err = "No symbol found for name: `a`"; let err =
"No symbol found for name: QualifiedName { id: Id { idx: 4, t: PhantomData }, components: [\"a\"] }";
eval_assert_failure( eval_assert_failure(
r#" r#"
@ -270,26 +271,26 @@ fn full_if_matching() {
let source = r#" let source = r#"
type Option<T> = Some(T) | None type Option<T> = Some(T) | None
let a = Option::None let a = Option::None
if a { is Option::None then 4; is Option::Some(x) then x } if a { is Option::None then 4, is Option::Some(x) then x }
"#; "#;
eval_assert(source, "4"); eval_assert(source, "4");
let source = r#" let source = r#"
type Option<T> = Some(T) | None type Option<T> = Some(T) | None
let sara = Option::Some(99) let sara = Option::Some(99)
if sara { is Option::None then 1 + 3; is Option::Some(x) then x } if sara { is Option::None then 1 + 3, is Option::Some(x) then x }
"#; "#;
eval_assert(source, "99"); eval_assert(source, "99");
let source = r#" let source = r#"
let a = 10 let a = 10
if a { is 10 then "x"; is 4 then "y" } if a { is 10 then "x", is 4 then "y" }
"#; "#;
eval_assert(source, "\"x\""); eval_assert(source, "\"x\"");
let source = r#" let source = r#"
let a = 10 let a = 10
if a { is 15 then "x"; is 10 then "y" } if a { is 15 then "x", is 10 then "y" }
"#; "#;
eval_assert(source, "\"y\""); eval_assert(source, "\"y\"");
} }
@ -299,7 +300,7 @@ if a { is 15 then "x"; is 10 then "y" }
fn string_pattern() { fn string_pattern() {
let source = r#" let source = r#"
let a = "foo" let a = "foo"
if a { is "foo" then "x"; is _ then "y" } if a { is "foo" then "x", is _ then "y" }
"#; "#;
eval_assert(source, "\"x\""); eval_assert(source, "\"x\"");
} }
@ -309,7 +310,7 @@ fn boolean_pattern() {
let source = r#" let source = r#"
let a = true let a = true
if a { if a {
is true then "x" is true then "x",
is false then "y" is false then "y"
} }
"#; "#;
@ -320,7 +321,7 @@ if a {
fn boolean_pattern_2() { fn boolean_pattern_2() {
let source = r#" let source = r#"
let a = false let a = false
if a { is true then "x"; is false then "y" } if a { is true then "x", is false then "y" }
"#; "#;
eval_assert(source, "\"y\""); eval_assert(source, "\"y\"");
} }
@ -340,7 +341,7 @@ if Option::Some(10) {
fn tuple_pattern() { fn tuple_pattern() {
let source = r#" let source = r#"
if (1, 2) { if (1, 2) {
is (1, x) then x; is (1, x) then x,
is _ then 99 is _ then 99
} }
"#; "#;
@ -351,7 +352,7 @@ if (1, 2) {
fn tuple_pattern_2() { fn tuple_pattern_2() {
let source = r#" let source = r#"
if (1, 2) { if (1, 2) {
is (10, x) then x is (10, x) then x,
is (y, x) then x + y is (y, x) then x + y
} }
"#; "#;
@ -362,7 +363,7 @@ if (1, 2) {
fn tuple_pattern_3() { fn tuple_pattern_3() {
let source = r#" let source = r#"
if (1, 5) { if (1, 5) {
is (10, x) then x is (10, x) then x,
is (1, x) then x is (1, x) then x
} }
"#; "#;
@ -373,8 +374,8 @@ if (1, 5) {
fn tuple_pattern_4() { fn tuple_pattern_4() {
let source = r#" let source = r#"
if (1, 5) { if (1, 5) {
is (10, x) then x is (10, x) then x,
is (1, x) then x is (1, x) then x,
} }
"#; "#;
eval_assert(source, "5"); eval_assert(source, "5");
@ -389,21 +390,21 @@ let b = Stuff::Jugs(1, "haha")
let c = Stuff::Mardok let c = Stuff::Mardok
let x = if a { let x = if a {
is Stuff::Mulch(20) then "x" is Stuff::Mulch(20) then "x",
is _ then "ERR" is _ then "ERR"
} }
let y = if b { let y = if b {
is Stuff::Mulch(n) then "ERR" is Stuff::Mulch(n) then "ERR",
is Stuff::Jugs(2, _) then "ERR" is Stuff::Jugs(2, _) then "ERR",
is Stuff::Jugs(1, s) then s is Stuff::Jugs(1, s) then s,
is _ then "ERR" is _ then "ERR",
} }
let z = if c { let z = if c {
is Stuff::Jugs(_, _) then "ERR" is Stuff::Jugs(_, _) then "ERR",
is Stuff::Mardok then "NIGH" is Stuff::Mardok then "NIGH",
is _ then "ERR" is _ then "ERR",
} }
(x, y, z) (x, y, z)
@ -545,20 +546,3 @@ fn foo() { return 2 }
"(7, 9)", "(7, 9)",
); );
} }
#[test]
fn eval_method() {
let src = r#"
type Thing = Thing
impl Thing {
fn a_method() {
20
}
}
let a = Thing::Thing
4 + a.a_method()
"#;
eval_assert(src, "24");
}

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, convert::From}; use std::{collections::HashMap, convert::From};
use crate::{ use crate::{
ast::{TypeIdentifier, AST}, ast::TypeIdentifier,
identifier::{define_id_kind, Id, IdStore}, identifier::{define_id_kind, Id, IdStore},
}; };
@ -96,11 +96,6 @@ impl TypeContext {
pub fn lookup_type(&self, type_id: &TypeId) -> Option<&DefinedType> { pub fn lookup_type(&self, type_id: &TypeId) -> Option<&DefinedType> {
self.defined_types.get(type_id) self.defined_types.get(type_id)
} }
//TODO return some kind of overall type later?
pub fn typecheck(&mut self, ast: &AST) -> Result<(), TypeError> {
Ok(())
}
} }
/// A type defined in program source code, as opposed to a builtin. /// A type defined in program source code, as opposed to a builtin.
@ -216,12 +211,12 @@ macro_rules! ty {
Type::Const(crate::type_inference::TypeConst::$type_name) Type::Const(crate::type_inference::TypeConst::$type_name)
}; };
($t1:ident -> $t2:ident) => { ($t1:ident -> $t2:ident) => {
Type::Arrow { params: vec![ty!($t1)], ret: Box::new(ty!($t2)) } Type::Arrow { params: vec![ty!($t1)], ret: box ty!($t2) }
}; };
($t1:ident -> $t2:ident -> $t3:ident) => { ($t1:ident -> $t2:ident -> $t3:ident) => {
Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: Box::new(ty!($t3)) } Type::Arrow { params: vec![ty!($t1), ty!($t2)], ret: box ty!($t3) }
}; };
($type_list:ident, $ret_type:ident) => { ($type_list:ident, $ret_type:ident) => {
Type::Arrow { params: $type_list, ret: Box::new($ret_type) } Type::Arrow { params: $type_list, ret: box $ret_type }
}; };
} }

View File

@ -1,18 +1,4 @@
use std::{cmp::Eq, collections::HashMap, fmt::Write, hash::Hash}; use std::{cmp::Eq, collections::HashMap, hash::Hash};
/// Utility function for printing a comma-delimited list of things
pub(crate) fn delim_wrapped(lhs: char, rhs: char, terms: impl Iterator<Item = String>) -> String {
let mut buf = String::new();
write!(buf, "{}", lhs).unwrap();
for term in terms.map(Some).intersperse(None) {
match term {
Some(e) => write!(buf, "{}", e).unwrap(),
None => write!(buf, ", ").unwrap(),
};
}
write!(buf, "{}", rhs).unwrap();
buf
}
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub struct ScopeStack<'a, T: 'a, V: 'a, N = String> pub struct ScopeStack<'a, T: 'a, V: 'a, N = String>
@ -66,15 +52,11 @@ where T: Hash + Eq
/// Quickly create an AST from a string, with no error checking. For test use only /// Quickly create an AST from a string, with no error checking. For test use only
#[cfg(test)] #[cfg(test)]
pub fn quick_ast(input: &str) -> crate::ast::AST { pub fn quick_ast(input: &str) -> crate::ast::AST {
let tokens = crate::tokenizing::tokenize(input);
let mut parser = crate::parsing::Parser::new(); let mut parser = crate::parsing::Parser::new();
let output = parser.parse(input); parser.add_new_tokens(tokens);
match output { let output = parser.parse();
Ok(output) => output, output.unwrap()
Err(err) => {
println!("Parse error: {}", err.msg);
panic!();
}
}
} }
#[allow(unused_macros)] #[allow(unused_macros)]

View File

@ -1,79 +0,0 @@
use crate::{
parsing::{Location, ParseError},
schala::{SourceReference, Stage},
symbol_table::SymbolError,
type_inference::TypeError,
};
pub struct SchalaError {
errors: Vec<Error>,
}
impl SchalaError {
pub(crate) fn display(&self) -> String {
match self.errors[0] {
Error::Parse(ref parse_err) => parse_err.to_string(),
Error::Standard { ref text, .. } => text.as_ref().cloned().unwrap_or_default(),
}
}
#[allow(dead_code)]
pub(crate) fn from_type_error(err: TypeError) -> Self {
Self {
errors: vec![Error::Standard { location: None, text: Some(err.msg), stage: Stage::Typechecking }],
}
}
pub(crate) fn from_symbol_table(symbol_errs: Vec<SymbolError>) -> Self {
//TODO this could be better
let errors = symbol_errs
.into_iter()
.map(|_symbol_err| Error::Standard {
location: None,
text: Some("symbol table error".to_string()),
stage: Stage::Symbols,
})
.collect();
Self { errors }
}
pub(crate) fn from_string(text: String, stage: Stage) -> Self {
Self { errors: vec![Error::Standard { location: None, text: Some(text), stage }] }
}
pub(crate) fn from_parse_error(parse_error: ParseError, source_reference: &SourceReference) -> Self {
let formatted_parse_error = format_parse_error(parse_error, source_reference);
Self { errors: vec![Error::Parse(formatted_parse_error)] }
}
}
#[allow(dead_code)]
enum Error {
Standard { location: Option<Location>, text: Option<String>, stage: Stage },
Parse(String),
}
fn format_parse_error(error: ParseError, source_reference: &SourceReference) -> String {
let offset = error.location.offset;
let (line_start, line_num, line_from_program) = source_reference.get_line(offset);
let ch = offset - line_start;
let location_pointer = format!("{}^", " ".repeat(ch));
let line_num_digits = format!("{}", line_num).chars().count();
let space_padding = " ".repeat(line_num_digits);
format!(
r#"
{error_msg}
{space_padding} |
{line_num} | {}
{space_padding} | {}
"#,
line_from_program,
location_pointer,
error_msg = error.msg,
space_padding = space_padding,
line_num = line_num,
)
}

File diff suppressed because it is too large Load Diff

View File

@ -1,126 +0,0 @@
#![allow(clippy::upper_case_acronyms)]
pub mod combinator;
mod peg_parser;
mod test;
use std::{cell::RefCell, fmt, rc::Rc};
use combinator::Span;
#[cfg(test)]
use crate::ast::{Block, Expression};
use crate::{
ast::{ASTItem, AST},
identifier::{Id, IdStore},
};
pub(crate) type StoreRef = Rc<RefCell<IdStore<ASTItem>>>;
pub struct Parser {
id_store: StoreRef,
use_combinator: bool,
}
impl Parser {
pub(crate) fn new() -> Self {
let id_store: IdStore<ASTItem> = IdStore::new();
Self { id_store: Rc::new(RefCell::new(id_store)), use_combinator: true }
}
pub(crate) fn parse(&mut self, input: &str) -> Result<AST, ParseError> {
if self.use_combinator {
self.parse_comb(input)
} else {
self.parse_peg(input)
}
}
pub(crate) fn parse_peg(&mut self, input: &str) -> Result<AST, ParseError> {
peg_parser::schala_parser::program(input, self).map_err(ParseError::from_peg)
}
pub(crate) fn parse_comb(&mut self, input: &str) -> Result<AST, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::program(span))
}
#[cfg(test)]
fn expression(&mut self, input: &str) -> Result<Expression, ParseError> {
peg_parser::schala_parser::expression(input, self).map_err(ParseError::from_peg)
}
#[cfg(test)]
fn expression_comb(&mut self, input: &str) -> Result<Expression, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::expression(span))
}
#[cfg(test)]
fn block(&mut self, input: &str) -> Result<Block, ParseError> {
peg_parser::schala_parser::block(input, self).map_err(ParseError::from_peg)
}
#[cfg(test)]
fn block_comb(&mut self, input: &str) -> Result<Block, ParseError> {
let span = Span::new_extra(input, self.id_store.clone());
convert(input, combinator::block(span))
}
fn fresh(&mut self) -> Id<ASTItem> {
self.id_store.borrow_mut().fresh()
}
}
fn convert<'a, O>(input: &'a str, result: combinator::ParseResult<'a, O>) -> Result<O, ParseError> {
use nom::{error::VerboseError, Finish};
match result.finish() {
Ok((rest, output)) => {
if rest.fragment() != &"" {
return Err(ParseError {
location: Default::default(),
msg: format!("Bad parse state, remaining text: `{}`", rest.fragment()),
});
}
Ok(output)
}
Err(err) => {
let err = VerboseError {
errors: err.errors.into_iter().map(|(sp, kind)| (*sp.fragment(), kind)).collect(),
};
let msg = nom::error::convert_error(input, err);
Err(ParseError { msg, location: (0).into() })
}
}
}
/// Represents a parsing error
#[derive(Debug)]
pub struct ParseError {
pub msg: String,
pub location: Location,
}
impl ParseError {
fn from_peg(err: peg::error::ParseError<peg::str::LineCol>) -> Self {
let msg = err.to_string();
Self { msg, location: err.location.offset.into() }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Default)]
pub struct Location {
pub(crate) offset: usize,
}
impl From<usize> for Location {
fn from(offset: usize) -> Self {
Self { offset }
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.offset)
}
}

View File

@ -1,567 +0,0 @@
use std::rc::Rc;
use super::Parser;
use crate::ast::*;
fn rc_string(s: &str) -> Rc<String> {
Rc::new(s.to_string())
}
enum ExtendedPart<'a> {
Index(Vec<Expression>),
Accessor(&'a str),
Call(Vec<InvocationArgument>),
}
peg::parser! {
pub grammar schala_parser() for str {
rule whitespace() = [' ' | '\t' ]
rule whitespace_or_newline() = [' ' | '\t' | '\n' ]
rule _ = quiet!{ (block_comment() / line_comment() / whitespace())* }
rule __ = quiet!{ (block_comment() / line_comment() / whitespace_or_newline())* }
rule block_comment() = "/*" (block_comment() / !"*/" [_])* "*/"
rule line_comment() = "//" (!['\n'] [_])* &"\n"
pub rule program(parser: &mut Parser) -> AST =
__ statements:(statement(parser) ** (delimiter()+) ) __ { AST { id: parser.fresh(), statements: statements.into() } }
rule delimiter() = (";" / "\n")+
//Note - this is a hack, ideally the rule `rule block() -> Block = "{" _ items:(statement() **
//delimiter()) _ "}" { items.into() }` would've worked, but it doesn't.
pub rule block(parser: &mut Parser) -> Block =
"{" __ items:(statement(parser) ** delimiter()) delimiter()? __ "}" { items.into() } /
"{" __ stmt:statement(parser) __ "}" { vec![stmt].into() }
rule block_item(parser: &mut Parser) -> Statement<StatementKind> =
_ stmt:statement(parser) _ delimiter()+ { stmt }
rule statement(parser: &mut Parser) -> Statement<StatementKind> =
_ pos:position!() kind:statement_kind(parser) _ { Statement { id: parser.fresh(), location: pos.into(), kind } }
rule statement_kind(parser: &mut Parser) -> StatementKind =
__ import:import(parser) { StatementKind::Import(import) } /
__ decl:declaration(parser) { StatementKind::Declaration(decl) } /
__ flow:flow(parser) { StatementKind::Flow(flow) } /
__ expr:expression(parser) { StatementKind::Expression(expr) }
rule flow(parser: &mut Parser) -> FlowControl =
"continue" { FlowControl::Continue } /
"break" { FlowControl::Break } /
"return" _ expr:expression(parser)? { FlowControl::Return(expr) }
//TODO add the ability to rename and exclude imports
rule import(parser: &mut Parser) -> ImportSpecifier =
"import" _ path_components:path_components() suffix:import_suffix()? {
ImportSpecifier {
id: parser.fresh(),
path_components,
imported_names: suffix.unwrap_or(ImportedNames::LastOfPath)
}
}
rule path_components() -> Vec<Rc<String>> =
"::"? name:identifier() rest:path_component()* {
let mut items = vec![rc_string(name)];
items.extend(rest.into_iter().map(rc_string));
items
}
rule path_component() -> &'input str = "::" ident:identifier() { ident }
rule import_suffix() -> ImportedNames =
"::*" { ImportedNames::All } /
"::{" __ names:(identifier() ** (_ "," _)) __ "}" {?
if names.is_empty() {
Err("import groups must have at least one item")
} else {
Ok(ImportedNames::List(names.into_iter().map(rc_string).collect()))
}
}
rule declaration(parser: &mut Parser) -> Declaration =
binding(parser) / type_decl(parser) / annotation(parser) / func(parser) / interface(parser) /
implementation(parser) / module(parser)
rule module(parser: &mut Parser) -> Declaration =
"module" _ name:identifier() _ items:block(parser) { Declaration::Module { name: rc_string(name), items } }
rule implementation(parser: &mut Parser) -> Declaration =
"impl" _ interface:type_singleton_name() _ "for" _ type_name:type_identifier() _ block:decl_block(parser) {
Declaration::Impl { type_name, interface_name: Some(interface), block }
} /
"impl" _ type_name:type_identifier() _ block:decl_block(parser) {
Declaration::Impl { type_name, interface_name: None, block }
}
rule decl_block(parser: &mut Parser) -> Vec<Statement<Declaration>> =
"{" __ decls:(func_declaration_stmt(parser) ** (delimiter()+)) delimiter()? __ "}" { decls }
rule func_declaration_stmt(parser: &mut Parser) -> Statement<Declaration> =
pos:position!() decl:func_declaration(parser) { Statement { id: parser.fresh(), location: pos.into(), kind: decl } }
rule interface(parser: &mut Parser) -> Declaration =
"interface" _ name:identifier() _ signatures:signature_block(parser) { Declaration::Interface { name: rc_string(name), signatures } }
rule signature_block(parser: &mut Parser) -> Vec<Signature> =
"{" __ signatures:(func_signature(parser) ** (delimiter()+)) __ "}" { signatures }
rule func(parser: &mut Parser) -> Declaration =
decl:func_declaration(parser) { decl } /
sig:func_signature(parser) { Declaration::FuncSig(sig) }
rule func_declaration(parser: &mut Parser) -> Declaration =
_ sig:func_signature(parser) __ body:block(parser) { Declaration::FuncDecl(sig, body) }
rule func_signature(parser: &mut Parser) -> Signature =
_ "fn" _ name:identifier() "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
name: rc_string(name), operator: false, params, type_anno
} } /
_ "fn" _ "(" op:operator() ")" _ "(" _ params:formal_params(parser) _ ")" _ type_anno:type_anno()? { Signature {
name: rc_string(op), operator: true, params, type_anno
} }
rule formal_params(parser: &mut Parser) -> Vec<FormalParam> =
params:(formal_param(parser) ** (_ "," _)) {? if params.len() < 256 { Ok(params) } else {
Err("function-too-long") }
}
rule formal_param(parser: &mut Parser) -> FormalParam =
name:identifier() _ anno:type_anno()? _ "=" expr:expression(parser) { FormalParam { name: rc_string(name),
default: Some(expr), anno } } /
name:identifier() _ anno:type_anno()? { FormalParam { name: rc_string(name), default: None, anno } }
rule annotation(parser: &mut Parser) -> Declaration =
"@" name:identifier() args:annotation_args(parser)? delimiter()+ _ inner:statement(parser) { Declaration::Annotation {
name: rc_string(name), arguments: if let Some(args) = args { args } else { vec![] }, inner: Box::new(inner) }
}
rule annotation_args(parser: &mut Parser) -> Vec<Expression> =
"(" _ args:(expression(parser) ** (_ "," _)) _ ")" { args }
rule binding(parser: &mut Parser) -> Declaration =
"let" _ mutable:"mut"? _ ident:identifier() _ type_anno:type_anno()? _ "=" _ expr:expression(parser) {
Declaration::Binding { name: Rc::new(ident.to_string()), constant: mutable.is_none(),
type_anno, expr }
}
rule type_decl(parser: &mut Parser) -> Declaration =
"type" _ "alias" _ alias:type_alias() { alias } /
"type" _ mutable:"mut"? _ name:type_singleton_name() _ "=" _ body:type_body(parser) {
Declaration::TypeDecl { name, body, mutable: mutable.is_some() }
}
rule type_singleton_name() -> TypeSingletonName =
name:identifier() params:type_params()? { TypeSingletonName {
name: rc_string(name), params: if let Some(params) = params { params } else { vec![] }
} }
rule type_params() -> Vec<TypeIdentifier> =
"<" _ idents:(type_identifier() ** (_ "," _)) _ ">" { idents }
rule type_identifier() -> TypeIdentifier =
"(" _ items:(type_identifier() ** (_ "," _)) _ ")" { TypeIdentifier::Tuple(items) } /
singleton:type_singleton_name() { TypeIdentifier::Singleton(singleton) }
rule type_body(parser: &mut Parser) -> TypeBody =
"{" _ items:(record_variant_item() ** (__ "," __)) __ "}" { TypeBody::ImmediateRecord { id: parser.fresh(), fields: items } } /
variants:(variant_spec(parser) ** (__ "|" __)) { TypeBody::Variants(variants) }
rule variant_spec(parser: &mut Parser) -> Variant =
name:identifier() __ "{" __ typed_identifier_list:(record_variant_item() ** (__ "," __)) __ ","? __ "}" { Variant {
id: parser.fresh(), name: rc_string(name), kind: VariantKind::Record(typed_identifier_list)
} } /
name:identifier() "(" tuple_members:(type_identifier() ++ (__ "," __)) ")" { Variant {
id: parser.fresh(), name: rc_string(name), kind: VariantKind::TupleStruct(tuple_members) } } /
name:identifier() { Variant { id: parser.fresh(), name: rc_string(name), kind: VariantKind::UnitStruct } }
rule record_variant_item() -> (Rc<String>, TypeIdentifier) =
name:identifier() _ ":" _ ty:type_identifier() { (rc_string(name), ty) }
rule type_alias() -> Declaration =
alias:identifier() _ "=" _ name:identifier() { Declaration::TypeAlias { alias: rc_string(alias), original: rc_string(name), } }
rule type_anno() -> TypeIdentifier =
":" _ identifier:type_identifier() { identifier }
pub rule expression(parser: &mut Parser) -> Expression =
__ kind:expression_kind(true, parser) _ type_anno:type_anno()? { Expression { id: parser.fresh(), type_anno, kind } }
rule expression_no_struct(parser: &mut Parser) -> Expression =
__ kind:expression_kind(false, parser) { Expression { id: parser.fresh(), type_anno: None, kind } }
rule expression_kind(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
precedence_expr(struct_ok, parser)
rule precedence_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
first:prefix_expr(struct_ok, parser) _ next:(precedence_continuation(struct_ok, parser))* {
let next = next.into_iter().map(|(sigil, expr)| (BinOp::from_sigil(sigil), expr)).collect();
BinopSequence { first, next }.do_precedence(parser)
}
rule precedence_continuation(struct_ok: bool, parser: &mut Parser) -> (&'input str, ExpressionKind) =
op:operator() _ expr:prefix_expr(struct_ok, parser) _ { (op, expr) }
rule prefix_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
prefix:prefix()? expr:extended_expr(struct_ok, parser) {
if let Some(p) = prefix {
let expr = Expression::new(parser.fresh(), expr);
let prefix = PrefixOp::from_sigil(p);
ExpressionKind::PrefixExp(prefix, Box::new(expr))
} else {
expr
}
}
rule prefix() -> &'input str =
$(['+' | '-' | '!' ])
//TODO make the definition of operators more complex
rule operator() -> &'input str =
quiet!{!"*/" s:$( ['+' | '-' | '*' | '/' | '%' | '<' | '>' | '=' | '!' | '$' | '&' | '|' | '?' | '^' | '`']+ ) { s } } /
expected!("operator")
rule extended_expr(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
primary:primary(struct_ok, parser) parts:(extended_expr_part(parser)*) {
let mut expression = Expression::new(parser.fresh(), primary);
for part in parts.into_iter() {
let kind = match part {
ExtendedPart::Index(indexers) => {
ExpressionKind::Index { indexee: Box::new(expression), indexers }
},
ExtendedPart::Accessor(name) => {
let name = rc_string(name);
ExpressionKind::Access { name, expr: Box::new(expression) }
},
ExtendedPart::Call(arguments) => {
ExpressionKind::Call { f: Box::new(expression), arguments }
}
};
expression = Expression::new(parser.fresh(), kind);
}
expression.kind
}
rule extended_expr_part(parser: &mut Parser) -> ExtendedPart<'input> =
indexers:index_part(parser) { ExtendedPart::Index(indexers) } /
arguments:call_part(parser) { ExtendedPart::Call(arguments) } /
"." name:identifier() { ExtendedPart::Accessor(name) }
rule index_part(parser: &mut Parser) -> Vec<Expression> =
"[" indexers:(expression(parser) ++ ",") "]" { indexers }
rule call_part(parser: &mut Parser) -> Vec<InvocationArgument> =
"(" arguments:(invocation_argument(parser) ** ",") ")" { arguments }
rule invocation_argument(parser: &mut Parser) -> InvocationArgument =
_ "_" _ { InvocationArgument::Ignored } /
_ ident:identifier() _ "=" _ expr:expression(parser) { InvocationArgument::Keyword {
name: Rc::new(ident.to_string()),
expr
} } /
_ expr:expression(parser) _ { InvocationArgument::Positional(expr) }
rule primary(struct_ok: bool, parser: &mut Parser) -> ExpressionKind =
while_expr(parser) / for_expr(parser) / float_literal() / nat_literal() / bool_literal() /
string_literal() / paren_expr(parser) /
list_expr(parser) / if_expr(parser) / lambda_expr(parser) /
item:named_struct(parser) {? if struct_ok { Ok(item) } else { Err("no-struct-allowed") } } /
identifier_expr(parser)
rule lambda_expr(parser: &mut Parser) -> ExpressionKind =
r#"\"# __ "(" _ params:formal_params(parser) _ ")" _ type_anno:(type_anno()?) _ body:block(parser) {
ExpressionKind::Lambda { params, type_anno, body }
} /
r#"\"# param:formal_param(parser) _ type_anno:(type_anno()?) _ body:block(parser) {
ExpressionKind::Lambda { params: vec![param], type_anno, body }
}
rule for_expr(parser: &mut Parser) -> ExpressionKind =
"for" _ enumerators:for_enumerators(parser) _ body:for_body(parser) {
ExpressionKind::ForExpression { enumerators, body }
}
rule for_enumerators(parser: &mut Parser) -> Vec<Enumerator> =
"{" _ enumerators:(enumerator(parser) ++ ",") _ "}" { enumerators } /
enumerator:enumerator(parser) { vec![enumerator] }
//TODO add guards, etc.
rule enumerator(parser: &mut Parser) -> Enumerator =
ident:identifier() _ "<-" _ generator:expression_no_struct(parser) {
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: false }
} /
//TODO need to distinguish these two cases in AST
ident:identifier() _ "=" _ generator:expression_no_struct(parser) {
Enumerator { identifier: Rc::new(ident.to_string()), generator, assignment: true }
}
rule for_body(parser: &mut Parser) -> Box<ForBody> =
"return" _ expr:expression(parser) { Box::new(ForBody::MonadicReturn(expr)) } /
body:block(parser) { Box::new(ForBody::StatementBlock(body)) }
rule while_expr(parser: &mut Parser) -> ExpressionKind =
"while" _ cond:expression_kind(false, parser)? _ body:block(parser) {
ExpressionKind::WhileExpression {
condition: cond.map(|kind| Box::new(Expression::new(parser.fresh(), kind))),
body,
}
}
rule identifier_expr(parser: &mut Parser) -> ExpressionKind =
qn:qualified_identifier(parser) { ExpressionKind::Value(qn) }
rule named_struct(parser: &mut Parser) -> ExpressionKind =
name:qualified_identifier(parser) _ fields:record_block(parser) {
ExpressionKind::NamedStruct {
name,
fields: fields.into_iter().map(|(n, exp)| (Rc::new(n.to_string()), exp)).collect(),
}
}
//TODO support anonymous structs and Elm-style update syntax for structs
rule record_block(parser: &mut Parser) -> Vec<(&'input str, Expression)> =
"{" _ entries:(record_entry(parser) ** ",") _ "}" { entries }
rule record_entry(parser: &mut Parser) -> (&'input str, Expression) =
_ name:identifier() _ ":" _ expr:expression(parser) _ { (name, expr) }
rule qualified_identifier(parser: &mut Parser) -> QualifiedName =
names:(identifier() ++ "::") { QualifiedName { id: parser.fresh(), components: names.into_iter().map(|name| Rc::new(name.to_string())).collect() } }
//TODO improve the definition of identifiers
rule identifier() -> &'input str =
!(reserved() !(ident_continuation())) text:$(['a'..='z' | 'A'..='Z' | '_'] ident_continuation()*) { text }
rule ident_continuation() -> &'input str =
text:$(['a'..='z' | 'A'..='Z' | '0'..='9' | '_'])
rule reserved() = "if" / "then" / "else" / "is" / "fn" / "for" / "while" / "let" / "in" / "mut" / "return" /
"break" / "alias" / "type" / "self" / "Self" / "interface" / "impl" / "true" / "false" / "module" / "import"
rule if_expr(parser: &mut Parser) -> ExpressionKind =
"if" _ discriminator:(expression(parser)?) _ body:if_expr_body(parser) {
ExpressionKind::IfExpression {
discriminator: discriminator.map(Box::new),
body: Box::new(body),
}
}
rule if_expr_body(parser: &mut Parser) -> IfExpressionBody =
cond_block(parser) / simple_pattern_match(parser) / simple_conditional(parser)
rule simple_conditional(parser: &mut Parser) -> IfExpressionBody =
"then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
IfExpressionBody::SimpleConditional { then_case, else_case }
}
rule simple_pattern_match(parser: &mut Parser) -> IfExpressionBody =
"is" _ pattern:pattern(parser) _ "then" _ then_case:expr_or_block(parser) _ else_case:else_case(parser) {
IfExpressionBody::SimplePatternMatch { pattern, then_case, else_case }
}
rule cond_block(parser: &mut Parser) -> IfExpressionBody =
"{" __ cond_arms:(cond_arm(parser) ++ (delimiter()+)) __ "}" { IfExpressionBody::CondList(cond_arms) }
rule cond_arm(parser: &mut Parser) -> ConditionArm =
_ "else" _ body:expr_or_block(parser) { ConditionArm { condition: Condition::Else, guard: None, body } } /
_ condition:condition(parser) _ guard:condition_guard(parser) _ "then" _ body:expr_or_block(parser)
{ ConditionArm { condition, guard, body } }
rule condition(parser: &mut Parser) -> Condition =
"is" _ pat:pattern(parser) { Condition::Pattern(pat) } /
op:operator() _ expr:expression(parser) { Condition::TruncatedOp(BinOp::from_sigil(op), expr) }
rule condition_guard(parser: &mut Parser) -> Option<Expression> =
("if" _ expr:expression(parser) { expr } )?
rule expr_or_block(parser: &mut Parser) -> Block = block(parser) / pos:position!() ex:expression(parser) {
Statement {
id: parser.fresh() , location: pos.into(),
kind: StatementKind::Expression(ex)
}.into()
}
rule else_case(parser: &mut Parser) -> Option<Block> =
("else" _ eorb:expr_or_block(parser) { eorb })?
rule pattern(parser: &mut Parser) -> Pattern =
"(" _ variants:(pattern(parser) ++ ",") _ ")" { Pattern::TuplePattern(variants) } /
_ pat:simple_pattern(parser) { pat }
rule simple_pattern(parser: &mut Parser) -> Pattern =
pattern_literal() /
qn:qualified_identifier(parser) "(" members:(pattern(parser) ** ",") ")" {
Pattern::TupleStruct(qn, members)
} /
qn:qualified_identifier(parser) _ "{" _ items:(record_pattern_entry(parser) ** ",") "}" _ {
let items = items.into_iter().map(|(name, pat)| (Rc::new(name.to_string()), pat)).collect();
Pattern::Record(qn, items)
} /
qn:qualified_identifier(parser) { Pattern::VarOrName(qn) }
rule record_pattern_entry(parser: &mut Parser) -> (&'input str, Pattern) =
_ name:identifier() _ ":" _ pat:pattern(parser) _ { (name, pat) } /
_ name:identifier() _ {
let qn = QualifiedName {
id: parser.fresh(),
components: vec![Rc::new(name.to_string())],
};
(name, Pattern::VarOrName(qn))
}
rule pattern_literal() -> Pattern =
"true" { Pattern::Literal(PatternLiteral::BoolPattern(true)) } /
"false" { Pattern::Literal(PatternLiteral::BoolPattern(false)) } /
s:bare_string_literal() { Pattern::Literal(PatternLiteral::StringPattern(Rc::new(s))) } /
sign:("-"?) num:(float_literal() / nat_literal()) {
let neg = sign.is_some();
Pattern::Literal(PatternLiteral::NumPattern { neg, num })
} /
"_" { Pattern::Ignored }
rule list_expr(parser: &mut Parser) -> ExpressionKind =
"[" exprs:(expression(parser) ** ",") "]" {
let mut exprs = exprs;
ExpressionKind::ListLiteral(exprs)
}
rule paren_expr(parser: &mut Parser) -> ExpressionKind =
"(" exprs:(expression(parser) ** ",") ")" {
let mut exprs = exprs;
match exprs.len() {
1 => exprs.pop().unwrap().kind,
_ => ExpressionKind::TupleLiteral(exprs),
}
}
rule string_literal() -> ExpressionKind =
prefix:identifier()? s:bare_string_literal(){ ExpressionKind::StringLiteral{ s: Rc::new(s),
prefix: prefix.map(rc_string)
} }
rule bare_string_literal() -> String =
"\"" chars:string_component()* "\"" { chars.into_iter().collect::<String>() }
rule string_component() -> char =
!(r#"""# / r#"\"#) ch:$([_]) { ch.chars().next().unwrap() } /
r#"\u{"# value:$(['0'..='9' | 'a'..='f' | 'A'..='F']+) "}" { char::from_u32(u32::from_str_radix(value, 16).unwrap()).unwrap() } /
r#"\n"# { '\n' } / r#"\t"# { '\t' } / r#"\""# { '"' } / r#"\\"# { '\\' } /
expected!("Valid escape sequence")
rule bool_literal() -> ExpressionKind =
"true" { ExpressionKind::BoolLiteral(true) } / "false" { ExpressionKind::BoolLiteral(false) }
rule nat_literal() -> ExpressionKind =
bin_literal() / hex_literal() / unmarked_literal()
rule unmarked_literal() -> ExpressionKind =
digits:digits() { let n = digits.chars().filter(|ch| *ch != '_').collect::<String>().parse().unwrap(); ExpressionKind::NatLiteral(n) }
rule bin_literal() -> ExpressionKind =
"0b" digits:bin_digits() {? parse_binary(digits).map(ExpressionKind::NatLiteral) }
rule hex_literal() -> ExpressionKind =
"0x" digits:hex_digits() {? parse_hex(digits).map(ExpressionKind::NatLiteral) }
rule float_literal() -> ExpressionKind =
ds:$( digits() "." digits()? / "." digits() ) { ExpressionKind::FloatLiteral(ds.parse().unwrap()) }
rule digits() -> &'input str = $((digit_group() "_"*)+)
rule bin_digits() -> &'input str = $((bin_digit_group() "_"*)+)
rule hex_digits() -> &'input str = $((hex_digit_group() "_"*)+)
rule digit_group() -> &'input str = $(['0'..='9']+)
rule bin_digit_group() -> &'input str = $(['0' | '1']+)
rule hex_digit_group() -> &'input str = $(['0'..='9' | 'a'..='f' | 'A'..='F']+)
}
}
fn parse_binary(digits: &str) -> Result<u64, &'static str> {
let mut result: u64 = 0;
let mut multiplier = 1;
for d in digits.chars().rev() {
match d {
'1' => result += multiplier,
'0' => (),
'_' => continue,
_ => unreachable!(),
}
multiplier = match multiplier.checked_mul(2) {
Some(m) => m,
None => return Err("Binary expression will overflow"),
}
}
Ok(result)
}
fn parse_hex(digits: &str) -> Result<u64, &'static str> {
let mut result: u64 = 0;
let mut multiplier: u64 = 1;
for d in digits.chars().rev() {
if d == '_' {
continue;
}
match d.to_digit(16) {
Some(n) => result += n as u64 * multiplier,
None => return Err("Internal parser error: invalid hex digit"),
}
multiplier = match multiplier.checked_mul(16) {
Some(m) => m,
None => return Err("Hexadecimal expression will overflow"),
}
}
Ok(result)
}
#[derive(Debug)]
struct BinopSequence {
first: ExpressionKind,
next: Vec<(BinOp, ExpressionKind)>,
}
impl BinopSequence {
fn do_precedence(self, parser: &mut Parser) -> ExpressionKind {
fn helper(
precedence: i32,
lhs: ExpressionKind,
rest: &mut Vec<(BinOp, ExpressionKind)>,
parser: &mut Parser,
) -> Expression {
let mut lhs = Expression::new(parser.fresh(), lhs);
while let Some((next_op, next_rhs)) = rest.pop() {
let new_precedence = next_op.get_precedence();
if precedence >= new_precedence {
rest.push((next_op, next_rhs));
break;
}
let rhs = helper(new_precedence, next_rhs, rest, parser);
lhs = Expression::new(
parser.fresh(),
ExpressionKind::BinExp(next_op, Box::new(lhs), Box::new(rhs)),
);
}
lhs
}
let mut as_stack = self.next.into_iter().rev().collect();
helper(BinOp::min_precedence(), self.first, &mut as_stack, parser).kind
}
}

View File

@ -3,5 +3,8 @@ extern crate includedir_codegen;
use includedir_codegen::Compression; use includedir_codegen::Compression;
fn main() { fn main() {
includedir_codegen::start("WEBFILES").dir("../static", Compression::Gzip).build("static.rs").unwrap(); includedir_codegen::start("WEBFILES")
.dir("../static", Compression::Gzip)
.build("static.rs")
.unwrap();
} }

View File

@ -1,10 +1,8 @@
use crate::directive_actions::DirectiveAction;
use crate::language::ProgrammingLanguageInterface;
use crate::{InterpreterDirectiveOutput, Repl};
use colored::*; use colored::*;
use crate::{
directive_actions::DirectiveAction, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput,
Repl,
};
/// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first /// A CommandTree is either a `Terminal` or a `NonTerminal`. When command parsing reaches the first
/// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute, /// Terminal, it will use the `DirectiveAction` found there to find an appropriate function to execute,
/// and then execute it with any remaining arguments /// and then execute it with any remaining arguments
@ -41,7 +39,12 @@ impl CommandTree {
children: Vec<CommandTree>, children: Vec<CommandTree>,
action: DirectiveAction, action: DirectiveAction,
) -> CommandTree { ) -> CommandTree {
CommandTree::Terminal { name: s.to_string(), help_msg: help.map(|x| x.to_string()), children, action } CommandTree::Terminal {
name: s.to_string(),
help_msg: help.map(|x| x.to_string()),
children,
action,
}
} }
pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree { pub fn nonterm(s: &str, help: Option<&str>, children: Vec<CommandTree>) -> CommandTree {
@ -62,10 +65,14 @@ impl CommandTree {
} }
pub fn get_help(&self) -> &str { pub fn get_help(&self) -> &str {
match self { match self {
CommandTree::Terminal { help_msg, .. } => CommandTree::Terminal { help_msg, .. } => help_msg
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"), .as_ref()
CommandTree::NonTerminal { help_msg, .. } => .map(|s| s.as_str())
help_msg.as_ref().map(|s| s.as_str()).unwrap_or("<no help text provided>"), .unwrap_or("<no help text provided>"),
CommandTree::NonTerminal { help_msg, .. } => help_msg
.as_ref()
.map(|s| s.as_str())
.unwrap_or("<no help text provided>"),
CommandTree::Top(_) => "", CommandTree::Top(_) => "",
} }
} }
@ -89,7 +96,11 @@ impl CommandTree {
let res: Result<(DirectiveAction, usize), String> = loop { let res: Result<(DirectiveAction, usize), String> = loop {
match dir_pointer { match dir_pointer {
CommandTree::Top(subcommands) | CommandTree::NonTerminal { children: subcommands, .. } => { CommandTree::Top(subcommands)
| CommandTree::NonTerminal {
children: subcommands,
..
} => {
let next_command = match arguments.get(idx) { let next_command = match arguments.get(idx) {
Some(cmd) => cmd, Some(cmd) => cmd,
None => break Err("Command requires arguments".to_owned()), None => break Err("Command requires arguments".to_owned()),

View File

@ -1,10 +1,9 @@
use std::fmt::Write as FmtWrite; use crate::help::help;
use crate::language::{
use crate::{ LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface,
help::help,
language::{LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface},
InterpreterDirectiveOutput, Repl,
}; };
use crate::{InterpreterDirectiveOutput, Repl};
use std::fmt::Write as FmtWrite;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum DirectiveAction { pub enum DirectiveAction {
@ -32,7 +31,10 @@ impl DirectiveAction {
::std::process::exit(0) ::std::process::exit(0)
} }
ListPasses => { ListPasses => {
let pass_names = match repl.language_state.request_meta(LangMetaRequest::StageNames) { let pass_names = match repl
.language_state
.request_meta(LangMetaRequest::StageNames)
{
LangMetaResponse::StageNames(names) => names, LangMetaResponse::StageNames(names) => names,
_ => vec![], _ => vec![],
}; };

View File

@ -1,4 +1,5 @@
use crate::{command_tree::CommandTree, directive_actions::DirectiveAction}; use crate::command_tree::CommandTree;
use crate::directive_actions::DirectiveAction;
pub fn directives_from_pass_names(pass_names: &[String]) -> CommandTree { pub fn directives_from_pass_names(pass_names: &[String]) -> CommandTree {
let passes_directives: Vec<CommandTree> = pass_names let passes_directives: Vec<CommandTree> = pass_names
@ -32,7 +33,11 @@ fn get_list(passes_directives: &[CommandTree], include_help: bool) -> Vec<Comman
CommandTree::terminal( CommandTree::terminal(
"help", "help",
Some("Print this help message"), Some("Print this help message"),
if include_help { get_list(passes_directives, false) } else { vec![] }, if include_help {
get_list(passes_directives, false)
} else {
vec![]
},
Help, Help,
), ),
CommandTree::nonterm( CommandTree::nonterm(
@ -63,6 +68,11 @@ fn get_list(passes_directives: &[CommandTree], include_help: bool) -> Vec<Comman
), ),
], ],
), ),
CommandTree::terminal("doc", Some("Get language-specific help for an item"), vec![], Doc), CommandTree::terminal(
"doc",
Some("Get language-specific help for an item"),
vec![],
Doc,
),
] ]
} }

View File

@ -1,11 +1,10 @@
use std::fmt::Write as FmtWrite; use std::fmt::Write as FmtWrite;
use crate::command_tree::CommandTree;
use crate::language::ProgrammingLanguageInterface;
use crate::{InterpreterDirectiveOutput, Repl};
use colored::*; use colored::*;
use crate::{
command_tree::CommandTree, language::ProgrammingLanguageInterface, InterpreterDirectiveOutput, Repl,
};
pub fn help<L: ProgrammingLanguageInterface>( pub fn help<L: ProgrammingLanguageInterface>(
repl: &mut Repl<L>, repl: &mut Repl<L>,
arguments: &[&str], arguments: &[&str],
@ -22,7 +21,8 @@ pub fn help<L: ProgrammingLanguageInterface>(
let children = dir.get_children(); let children = dir.get_children();
writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap(); writeln!(buf, "`{}` - {}", cmd, dir.get_help()).unwrap();
for sub in children.iter() { for sub in children.iter() {
writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help()).unwrap(); writeln!(buf, "\t`{} {}` - {}", cmd, sub.get_cmd(), sub.get_help())
.unwrap();
} }
buf buf
} }
@ -31,11 +31,16 @@ pub fn help<L: ProgrammingLanguageInterface>(
} }
} }
fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) -> Option<&'a CommandTree> { fn get_directive_from_commands<'a>(
commands: &[&str],
dirs: &'a CommandTree,
) -> Option<&'a CommandTree> {
let mut directive_list = dirs.get_children(); let mut directive_list = dirs.get_children();
let mut matched_directive = None; let mut matched_directive = None;
for cmd in commands { for cmd in commands {
let found = directive_list.iter().find(|directive| directive.get_cmd() == *cmd); let found = directive_list
.iter()
.find(|directive| directive.get_cmd() == *cmd);
if let Some(dir) = found { if let Some(dir) = found {
directive_list = dir.get_children(); directive_list = dir.get_children();
} }
@ -48,16 +53,33 @@ fn get_directive_from_commands<'a>(commands: &[&str], dirs: &'a CommandTree) ->
fn global_help<L: ProgrammingLanguageInterface>(repl: &mut Repl<L>) -> InterpreterDirectiveOutput { fn global_help<L: ProgrammingLanguageInterface>(repl: &mut Repl<L>) -> InterpreterDirectiveOutput {
let mut buf = String::new(); let mut buf = String::new();
writeln!(buf, "{} version {}", "Schala REPL".bright_red().bold(), crate::VERSION_STRING).unwrap(); writeln!(
buf,
"{} version {}",
"Schala REPL".bright_red().bold(),
crate::VERSION_STRING
)
.unwrap();
writeln!(buf, "-----------------------").unwrap(); writeln!(buf, "-----------------------").unwrap();
for directive in repl.get_directives().get_children() { for directive in repl.get_directives().get_children() {
writeln!(buf, "{}{} - {}", repl.sigil, directive.get_cmd(), directive.get_help()).unwrap(); writeln!(
buf,
"{}{} - {}",
repl.sigil,
directive.get_cmd(),
directive.get_help()
)
.unwrap();
} }
writeln!(buf).unwrap(); writeln!(buf).unwrap();
writeln!(buf, "Language-specific help for {}", <L as ProgrammingLanguageInterface>::language_name()) writeln!(
.unwrap(); buf,
"Language-specific help for {}",
<L as ProgrammingLanguageInterface>::language_name()
)
.unwrap();
writeln!(buf, "-----------------------").unwrap(); writeln!(buf, "-----------------------").unwrap();
Some(buf) Some(buf)
} }

View File

@ -1,4 +1,5 @@
use std::{collections::HashSet, time}; use std::collections::HashSet;
use std::time;
pub trait ProgrammingLanguageInterface { pub trait ProgrammingLanguageInterface {
type Config: Default + Clone; type Config: Default + Clone;
@ -8,7 +9,10 @@ pub trait ProgrammingLanguageInterface {
fn run_computation(&mut self, _request: ComputationRequest<Self::Config>) -> ComputationResponse; fn run_computation(&mut self, _request: ComputationRequest<Self::Config>) -> ComputationResponse;
fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse { fn request_meta(&mut self, _request: LangMetaRequest) -> LangMetaResponse {
LangMetaResponse::Custom { kind: "not-implemented".to_owned(), value: format!("") } LangMetaResponse::Custom {
kind: "not-implemented".to_owned(),
value: format!(""),
}
} }
} }
@ -33,7 +37,10 @@ pub struct GlobalOutputStats {
#[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)] #[derive(Debug, Clone, Hash, Eq, PartialEq, Deserialize, Serialize)]
pub enum DebugAsk { pub enum DebugAsk {
Timing, Timing,
ByStage { stage_name: String, token: Option<String> }, ByStage {
stage_name: String,
token: Option<String>,
},
} }
pub struct DebugResponse { pub struct DebugResponse {

View File

@ -1,4 +1,5 @@
#![feature(box_patterns, proc_macro_hygiene, decl_macro, iter_intersperse)] #![feature(box_patterns, box_syntax, proc_macro_hygiene, decl_macro, iter_intersperse)]
#![feature(plugin)]
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
@ -16,14 +17,16 @@ mod directives;
use directives::directives_from_pass_names; use directives::directives_from_pass_names;
mod help; mod help;
mod response; mod response;
use std::{collections::HashSet, sync::Arc}; use response::ReplResponse;
use colored::*; use colored::*;
use std::collections::HashSet;
use std::sync::Arc;
pub use language::{ pub use language::{
ComputationRequest, ComputationResponse, DebugAsk, DebugResponse, GlobalOutputStats, LangMetaRequest, ComputationRequest, ComputationResponse, DebugAsk, DebugResponse, GlobalOutputStats,
LangMetaResponse, ProgrammingLanguageInterface, LangMetaRequest, LangMetaResponse, ProgrammingLanguageInterface,
}; };
use response::ReplResponse;
include!(concat!(env!("OUT_DIR"), "/static.rs")); include!(concat!(env!("OUT_DIR"), "/static.rs"));
const VERSION_STRING: &str = "0.1.0"; const VERSION_STRING: &str = "0.1.0";
@ -55,12 +58,20 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
let line_reader = Interface::new("schala-repl").unwrap(); let line_reader = Interface::new("schala-repl").unwrap();
let sigil = ':'; let sigil = ':';
Repl { sigil, line_reader, language_state: initial_state, options: ReplOptions::new() } Repl {
sigil,
line_reader,
language_state: initial_state,
options: ReplOptions::new(),
}
} }
pub fn run_repl(&mut self, config: L::Config) { pub fn run_repl(&mut self, config: L::Config) {
println!("Schala meta-interpeter version {}", VERSION_STRING); println!("Schala meta-interpeter version {}", VERSION_STRING);
println!("Type {} for help with the REPL", format!("{}help", self.sigil).bright_green().bold()); println!(
"Type {} for help with the REPL",
format!("{}help", self.sigil).bright_green().bold()
);
self.load_options(); self.load_options();
self.handle_repl_loop(config); self.handle_repl_loop(config);
self.save_before_exit(); self.save_before_exit();
@ -68,7 +79,9 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
} }
fn load_options(&mut self) { fn load_options(&mut self) {
self.line_reader.load_history(HISTORY_SAVE_FILE).unwrap_or(()); self.line_reader
.load_history(HISTORY_SAVE_FILE)
.unwrap_or(());
match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) { match ReplOptions::load_from_file(OPTIONS_SAVE_FILE) {
Ok(options) => { Ok(options) => {
self.options = options; self.options = options;
@ -100,7 +113,7 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
self.line_reader.add_history_unique(input.to_string()); self.line_reader.add_history_unique(input.to_string());
let mut chars = input.chars().peekable(); let mut chars = input.chars().peekable();
let repl_responses = match chars.next() { let repl_responses = match chars.next() {
Some(ch) if ch == self.sigil => Some(ch) if ch == self.sigil => {
if chars.peek() == Some(&'{') { if chars.peek() == Some(&'{') {
let mut buf = String::new(); let mut buf = String::new();
buf.push_str(input.get(2..).unwrap()); buf.push_str(input.get(2..).unwrap());
@ -117,11 +130,12 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
} }
self.handle_input(&buf, &config) self.handle_input(&buf, &config)
} else { } else {
if let Some(output) = self.handle_interpreter_directive(input.get(1..).unwrap()) { if let Some(output) = self.handle_interpreter_directive(input.get(1..).unwrap()) {
println!("{}", output); println!("{}", output);
} }
continue; continue;
}, }
}
_ => self.handle_input(input, &config), _ => self.handle_input(input, &config),
}; };
@ -133,7 +147,8 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
fn update_line_reader(&mut self) { fn update_line_reader(&mut self) {
let tab_complete_handler = TabCompleteHandler::new(self.sigil, self.get_directives()); let tab_complete_handler = TabCompleteHandler::new(self.sigil, self.get_directives());
self.line_reader.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here self.line_reader
.set_completer(Arc::new(tab_complete_handler)); //TODO fix this here
self.set_prompt(PromptStyle::Normal); self.set_prompt(PromptStyle::Normal);
} }
@ -147,7 +162,9 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
} }
fn save_before_exit(&self) { fn save_before_exit(&self) {
self.line_reader.save_history(HISTORY_SAVE_FILE).unwrap_or(()); self.line_reader
.save_history(HISTORY_SAVE_FILE)
.unwrap_or(());
self.options.save_to_file(OPTIONS_SAVE_FILE); self.options.save_to_file(OPTIONS_SAVE_FILE);
} }
@ -168,13 +185,20 @@ impl<L: ProgrammingLanguageInterface> Repl<L> {
debug_requests.insert(ask.clone()); debug_requests.insert(ask.clone());
} }
let request = ComputationRequest { source: input, config: config.clone(), debug_requests }; let request = ComputationRequest {
source: input,
config: config.clone(),
debug_requests,
};
let response = self.language_state.run_computation(request); let response = self.language_state.run_computation(request);
response::handle_computation_response(response, &self.options) response::handle_computation_response(response, &self.options)
} }
fn get_directives(&mut self) -> CommandTree { fn get_directives(&mut self) -> CommandTree {
let pass_names = match self.language_state.request_meta(LangMetaRequest::StageNames) { let pass_names = match self
.language_state
.request_meta(LangMetaRequest::StageNames)
{
LangMetaResponse::StageNames(names) => names, LangMetaResponse::StageNames(names) => names,
_ => vec![], _ => vec![],
}; };
@ -188,14 +212,15 @@ struct TabCompleteHandler {
top_level_commands: CommandTree, top_level_commands: CommandTree,
} }
use linefeed::{ use linefeed::complete::{Completer, Completion};
complete::{Completer, Completion}, use linefeed::terminal::Terminal;
terminal::Terminal,
};
impl TabCompleteHandler { impl TabCompleteHandler {
fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler { fn new(sigil: char, top_level_commands: CommandTree) -> TabCompleteHandler {
TabCompleteHandler { top_level_commands, sigil } TabCompleteHandler {
top_level_commands,
sigil,
}
} }
} }
@ -222,7 +247,11 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
None => { None => {
let top = matches!(command_tree, Some(CommandTree::Top(_))); let top = matches!(command_tree, Some(CommandTree::Top(_)));
let word = if top { word.get(1..).unwrap() } else { word }; let word = if top { word.get(1..).unwrap() } else { word };
for cmd in command_tree.map(|x| x.get_subcommands()).unwrap_or_default().into_iter() { for cmd in command_tree
.map(|x| x.get_subcommands())
.unwrap_or_default()
.into_iter()
{
if cmd.starts_with(word) { if cmd.starts_with(word) {
completions.push(Completion { completions.push(Completion {
completion: format!("{}{}", if top { ":" } else { "" }, cmd), completion: format!("{}{}", if top { ":" } else { "" }, cmd),
@ -236,9 +265,12 @@ impl<T: Terminal> Completer<T> for TabCompleteHandler {
Some(s) => { Some(s) => {
let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm { let new_ptr: Option<&CommandTree> = command_tree.and_then(|cm| match cm {
CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s), CommandTree::Top(children) => children.iter().find(|c| c.get_cmd() == s),
CommandTree::NonTerminal { children, .. } => CommandTree::NonTerminal { children, .. } => {
children.iter().find(|c| c.get_cmd() == s), children.iter().find(|c| c.get_cmd() == s)
CommandTree::Terminal { children, .. } => children.iter().find(|c| c.get_cmd() == s), }
CommandTree::Terminal { children, .. } => {
children.iter().find(|c| c.get_cmd() == s)
}
}); });
command_tree = new_ptr; command_tree = new_ptr;
} }

View File

@ -1,11 +1,9 @@
use std::{
collections::HashSet,
fs::File,
io::{self, Read, Write},
};
use crate::language::DebugAsk; use crate::language::DebugAsk;
use std::collections::HashSet;
use std::fs::File;
use std::io::{self, Read, Write};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ReplOptions { pub struct ReplOptions {
pub debug_asks: HashSet<DebugAsk>, pub debug_asks: HashSet<DebugAsk>,
@ -15,7 +13,11 @@ pub struct ReplOptions {
impl ReplOptions { impl ReplOptions {
pub fn new() -> ReplOptions { pub fn new() -> ReplOptions {
ReplOptions { debug_asks: HashSet::new(), show_total_time: true, show_stage_times: false } ReplOptions {
debug_asks: HashSet::new(),
show_total_time: true,
show_stage_times: false,
}
} }
pub fn save_to_file(&self, filename: &str) { pub fn save_to_file(&self, filename: &str) {

View File

@ -1,11 +1,9 @@
use std::{fmt, fmt::Write};
use colored::*; use colored::*;
use std::fmt;
use std::fmt::Write;
use crate::{ use crate::language::{ComputationResponse, DebugAsk};
language::{ComputationResponse, DebugAsk}, use crate::ReplOptions;
ReplOptions,
};
pub struct ReplResponse { pub struct ReplResponse {
label: Option<String>, label: Option<String>,
@ -66,8 +64,16 @@ pub fn handle_computation_response(
} }
responses.push(match response.main_output { responses.push(match response.main_output {
Ok(s) => ReplResponse { label: None, text: s, color: None }, Ok(s) => ReplResponse {
Err(e) => ReplResponse { label: Some("Error".to_string()), text: e, color: Some(Color::Red) }, label: None,
text: s,
color: None,
},
Err(e) => ReplResponse {
label: Some("Error".to_string()),
text: e,
color: Some(Color::Red),
},
}); });
responses responses

View File

@ -77,7 +77,7 @@ x is Some(t) // type bool
if x { if x {
is Some(t) => { is Some(t) => {
} },
is None => { is None => {
} }

View File

@ -1,15 +1,17 @@
use std::{collections::HashSet, fs::File, io::Read, path::PathBuf, process::exit}; use schala_repl::{Repl, ProgrammingLanguageInterface, ComputationRequest};
use std::{fs::File, io::Read, path::PathBuf, process::exit, collections::HashSet};
use schala_lang::{Schala, SchalaConfig}; use schala_lang::{Schala, SchalaConfig};
use schala_repl::{ComputationRequest, ProgrammingLanguageInterface, Repl};
//TODO specify multiple langs, and have a way to switch between them //TODO specify multiple langs, and have a way to switch between them
fn main() { fn main() {
let args: Vec<String> = std::env::args().collect(); let args: Vec<String> = std::env::args().collect();
let matches = command_line_options().parse(&args[1..]).unwrap_or_else(|e| { let matches = command_line_options()
eprintln!("Error parsing options: {}", e); .parse(&args[1..])
exit(1); .unwrap_or_else(|e| {
}); eprintln!("Error parsing options: {}", e);
exit(1);
});
if matches.opt_present("help") { if matches.opt_present("help") {
println!("{}", command_line_options().usage("Schala metainterpreter")); println!("{}", command_line_options().usage("Schala metainterpreter"));
@ -25,28 +27,27 @@ fn main() {
let paths: Vec<PathBuf> = matches.free.iter().map(PathBuf::from).collect(); let paths: Vec<PathBuf> = matches.free.iter().map(PathBuf::from).collect();
//TODO handle more than one file //TODO handle more than one file
let filename = &paths[0]; let filename = &paths[0];
let extension = filename.extension().and_then(|e| e.to_str()).unwrap_or_else(|| { let extension = filename.extension().and_then(|e| e.to_str())
.unwrap_or_else(|| {
eprintln!("Source file `{}` has no extension.", filename.display()); eprintln!("Source file `{}` has no extension.", filename.display());
exit(1); exit(1);
}); });
//TODO this proably should be a macro for every supported language //TODO this proably should be a macro for every supported language
if extension == Schala::source_file_suffix() { if extension == Schala::source_file_suffix() {
let config = SchalaConfig { repl: false }; let config = SchalaConfig {
repl: false,
};
run_noninteractive(paths, Schala::new(), config); run_noninteractive(paths, Schala::new(), config);
} else { } else {
eprintln!("Extension .{} not recognized", extension); eprintln!("Extension .{} not recognized", extension);
exit(1); exit(1);
} }
} }
} }
pub fn run_noninteractive<L: ProgrammingLanguageInterface>( pub fn run_noninteractive<L: ProgrammingLanguageInterface>(filenames: Vec<PathBuf>, mut language: L, config: L::Config) {
filenames: Vec<PathBuf>,
mut language: L,
config: L::Config,
) {
// for now, ony do something with the first filename // for now, ony do something with the first filename
let filename = &filenames[0]; let filename = &filenames[0];
@ -54,7 +55,11 @@ pub fn run_noninteractive<L: ProgrammingLanguageInterface>(
let mut buffer = String::new(); let mut buffer = String::new();
source_file.read_to_string(&mut buffer).unwrap(); source_file.read_to_string(&mut buffer).unwrap();
let request = ComputationRequest { source: &buffer, config, debug_requests: HashSet::new() }; let request = ComputationRequest {
source: &buffer,
config,
debug_requests: HashSet::new(),
};
let response = language.run_computation(request); let response = language.run_computation(request);
match response.main_output { match response.main_output {