Compare commits
858 Commits
a69f9f633e
...
i-love-pro
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a088d6739 | |||
| 626baefd76 | |||
| 4602d02720 | |||
| 7fbd4ea9f8 | |||
| 6fd1e1962b | |||
| 62c338e382 | |||
| 40ea9ec637 | |||
| 787e194d41 | |||
| 71162e2db9 | |||
| 43debc65e4 | |||
| b07ea85b70 | |||
| 06e8b8e022 | |||
| 647f47a5f3 | |||
| 36e4feb668 | |||
| 11be991946 | |||
| a8c2b1d05a | |||
| fb46142e9d | |||
| 0b33d03b73 | |||
| 804147caef | |||
| d847d20666 | |||
| 07408d01a9 | |||
| 816a473913 | |||
| ce8f8fb872 | |||
| 2f60004241 | |||
| 7130c6bd11 | |||
| c5aacc060a | |||
| 6048dc0b9c | |||
| 1f01c3caff | |||
| bca44343eb | |||
| 3b9c2edcdd | |||
| fa180ee24e | |||
| 5846dd5d04 | |||
| f6b347eb05 | |||
| c1b27a13ae | |||
| 147658ee89 | |||
| 4017c52fee | |||
| 65d290556f | |||
| 854dccd4d2 | |||
| 07859cd1af | |||
| 19be7eb1f5 | |||
| 5de2ae1203 | |||
| 595a1ad99b | |||
| 32ccfc76ae | |||
| ae0d54e567 | |||
| b774f0e088 | |||
| e294bcb2d0 | |||
| 1322e0249c | |||
| e40a05633c | |||
| e5fb0a2929 | |||
| 3bd1f0c8a0 | |||
| 948759b7d4 | |||
| f2e424944e | |||
| 8a471c6b45 | |||
| 9b93582d18 | |||
| 9fc2d16fb8 | |||
| f00c69f02c | |||
| 4fc1191d13 | |||
| 951aafc90a | |||
| ee13409b33 | |||
| 615aeb72da | |||
| 3be67ca4c8 | |||
| b216581f2b | |||
| 1060198b01 | |||
| 0485917a20 | |||
| 37dd9ad6d4 | |||
| 77dade1d1d | |||
| d140422225 | |||
| 2defe99c73 | |||
| 0766211d79 | |||
| b854eade3c | |||
| c6f34f8eaa | |||
| 0ba5799c75 | |||
| 324fa948e6 | |||
| 3e46ff8be6 | |||
| 18eb1da5ba | |||
| dde7df4604 | |||
| e4101f1396 | |||
| d94ceeab2e | |||
| 697f083237 | |||
| 3e97fdcfea | |||
| 40007c427e | |||
| 5ab0d0d40e | |||
| 7c65afbc93 | |||
| fb071e55aa | |||
| 406c934b7a | |||
| 292cf009e6 | |||
| 6bf7659b19 | |||
| 0a90e8da29 | |||
| 32fe8e5ee6 | |||
| 7ccbaa7829 | |||
| 7817c9a4ce | |||
| bf9b0aedf9 | |||
| 21b2ff208e | |||
| ecad4541f6 | |||
| 26a29865e7 | |||
| ba287130d3 | |||
| 864276ea72 | |||
| 23eb36b911 | |||
| 51bcd116b3 | |||
| 928adbd594 | |||
| e91b6f692f | |||
| 861dafef70 | |||
| 388c23c376 | |||
| 82d9196c90 | |||
| 826a16eb66 | |||
| 0b97eb85a1 | |||
| a2132001e8 | |||
| 4fd6dd5606 | |||
| 7b71ec4402 | |||
| 07771117ac | |||
| 7fbc884e94 | |||
| b40be72590 | |||
| b3db25c470 | |||
| 6a7b6ffc1f | |||
| c87bb90c48 | |||
| a6225191d0 | |||
| afb904a64c | |||
| cbad391dcd | |||
| 0bf3facf6c | |||
| a9da39b987 | |||
| 1071bdd35a | |||
| 8656985885 | |||
| 3be523b79e | |||
| 1fb7e5ff85 | |||
| df75d6e017 | |||
| 29c9af4902 | |||
| 7b03183e75 | |||
| 4c70e61a14 | |||
| b2b225f4ae | |||
| c17142e648 | |||
| 8e4759bd2b | |||
| d2807917d2 | |||
| 71c030b947 | |||
| 1f3ab5349a | |||
| daaccb9b2f | |||
| b66c58b68e | |||
| 13636a0d29 | |||
| 5232f0a6e2 | |||
| 6a168f2fe1 | |||
| 04f12b545d | |||
| 711b01175d | |||
| 272c2666c5 | |||
| 60ba43378a | |||
| 1da60b3b28 | |||
| ee118b07e5 | |||
| 06ee998d54 | |||
| c197a45540 | |||
| 4d23b45633 | |||
| c027efa931 | |||
| 96c4d6fecc | |||
| 4e1fd54c58 | |||
| d6db020e1c | |||
| c0f4fe017f | |||
| 11d726a6ad | |||
| 37399afc68 | |||
| 5c19fc4208 | |||
| 00f0f13b93 | |||
| 54844fb954 | |||
| 6ffd3afeaa | |||
| 9d0dcd98bd | |||
| f78f877e21 | |||
| 5d0b903c03 | |||
| 032411fe9c | |||
| 76b061d699 | |||
| 08d37c839c | |||
| 0a26230fe1 | |||
| 2e59beda0c | |||
| ee8b1f5dc0 | |||
| 4938cdaecd | |||
| 84f28ae5ce | |||
| 58e7a1f2dc | |||
| 78bba7a0e9 | |||
| 9d31764073 | |||
| d787548915 | |||
| a29bca499f | |||
| 60d3b3025a | |||
| c036041339 | |||
| 1df315612a | |||
| 15beddf96b | |||
| 20d8b18a9b | |||
| 53ff0c39e4 | |||
| 357a3bef09 | |||
| 81eef51e88 | |||
| 10dfb2fe49 | |||
| ef76149112 | |||
| a6a330a78e | |||
| 7f4d0df366 | |||
| 3eddac0a89 | |||
| 6e7ac1c1ca | |||
| 68d9cf1274 | |||
| 5eb0d1548c | |||
| e543904995 | |||
| ffda1d3235 | |||
| b705aa217c | |||
| 6f20b17948 | |||
| 2fde7e5cf8 | |||
| bee06b6731 | |||
| d3fa7336a2 | |||
| 96545a899f | |||
| 6ef5ae2394 | |||
| 05a31dd4d4 | |||
| d9d5c8bf14 | |||
| 291a1f0178 | |||
| 2547b53aa2 | |||
| 409f8b7186 | |||
| 189422bf1e | |||
| 74daeee140 | |||
| befcd3cf98 | |||
| e063ff6aa5 | |||
| 6179c86919 | |||
| a20fe07a56 | |||
| 2b5dcf12d7 | |||
| 5873c1ca96 | |||
| c6e2ecb996 | |||
| 2130b00752 | |||
| 474c3a8348 | |||
| 29a18b8b37 | |||
| 21ab4f0a8b | |||
| 7d2842fd64 | |||
| cd4c121e07 | |||
| d0570f876e | |||
| 8bf99a1ab0 | |||
| fdcd4ddd60 | |||
| c09fff455f | |||
| 0b3755c69a | |||
| e90fb64946 | |||
| d542a4790e | |||
| bc5cb4009c | |||
| 6724533d0e | |||
| 0f0668b77b | |||
| 266bf9b4cf | |||
| a6f3bccf64 | |||
| 2d640f2e6a | |||
| d7d99205a1 | |||
| 9f437d5b9f | |||
| 0d3100ba33 | |||
| 72fb69d87b | |||
| ed4fcf5e9d | |||
| 8742c6e7b9 | |||
| d7d7254a7b | |||
| 7e8870de6c | |||
| 8f2b2addc2 | |||
| e4743bbdef | |||
| 80cdea6932 | |||
| 645f2c5c9c | |||
| 85b81ffc98 | |||
| fa5536f504 | |||
| 16086e79b0 | |||
| b001bba3b8 | |||
| 0c895a2662 | |||
| 6f0641f315 | |||
| dc9dbe8a0f | |||
| 0b8096f973 | |||
| d58a2a9975 | |||
| a83268a6e3 | |||
| 5c83f234c6 | |||
| 24abec4045 | |||
| 56ff56281e | |||
| c25f9ad9ae | |||
| 5041c90ac0 | |||
| 2855675fa5 | |||
| 209689c5f4 | |||
| 3d64b0aa28 | |||
| 3bceab0606 | |||
| c189da3671 | |||
| dd232cedb5 | |||
| 88c5daa561 | |||
| 4f281ef108 | |||
| 12aca7ca58 | |||
| 77ec1aa969 | |||
| 8710a5554c | |||
| 6b24d67409 | |||
| 48c3105f42 | |||
| 032453c4d0 | |||
| f093868da1 | |||
| 1f5e38190d | |||
| 250884c7bc | |||
| 8a2e91e65e | |||
| 5910ce7980 | |||
| 00bec06012 | |||
| 54dccdbc7d | |||
| 2bd776ec55 | |||
| 23cf7c9e8b | |||
| 384f5de765 | |||
| 9ae4798d80 | |||
| 850ccbdcee | |||
| d8ab3f2226 | |||
| 9ddd2dd3bc | |||
| f579641866 | |||
| a71c0c4e74 | |||
| d3921f9e20 | |||
| e0d7332dea | |||
| d6b8eb8548 | |||
| 2964b6c6fa | |||
| a0cd1074e1 | |||
| cc2b5ef918 | |||
| d003fdf357 | |||
| 5384faf3ec | |||
| a833cd84f3 | |||
| 7f1b9d31ea | |||
| 5bd8c11a86 | |||
| 846d85bb7a | |||
| b492c2d0fb | |||
| 1a6f5e061b | |||
| 5c62107e3b | |||
| 0891c5da62 | |||
| 2d22f0b2f2 | |||
| 1961e545c0 | |||
| 7951fcc494 | |||
| 6bf0b37694 | |||
| 7a51132e69 | |||
| 98f071df6d | |||
| 9543296ffd | |||
| 16dd545c22 | |||
| e7cb818f05 | |||
| ed89ade5e3 | |||
| 80d2eec5ed | |||
| 8d980b0287 | |||
|
|
baa199c50f | ||
|
|
29a0d2e357 | ||
|
|
7ab72668e1 | ||
| ca3345cb33 | |||
| 0ade4b2efb | |||
| 2439a02dbb | |||
| a785c71c5f | |||
| db852cab68 | |||
| 6a5718c45e | |||
| c68dbdb8fe | |||
| 49e0e2eb67 | |||
| 7d5b39f130 | |||
| dd8c846b3e | |||
| 392b103f38 | |||
| 347c818ab6 | |||
| 9192b870b6 | |||
| c21757694e | |||
| 770c7d0663 | |||
| bd8c9d8cdc | |||
| 10a4435760 | |||
| bc52c2c685 | |||
| 81c8b2a903 | |||
| 5ef10238e3 | |||
| d2e8f809d0 | |||
| 8e550dc982 | |||
| ed81ca957b | |||
| f8ab8257e7 | |||
| 2474804258 | |||
| e23dc1c063 | |||
| 9b28528acc | |||
| 86fd460a7a | |||
| 609b8c76b6 | |||
| 57aecc46be | |||
| ded50480a8 | |||
| 4043a5c1a3 | |||
| b96d405270 | |||
| 85751ba294 | |||
| 6145d9e804 | |||
| fe1c05fe46 | |||
| dc3f086b9d | |||
| e778b8710d | |||
| 20be96253c | |||
| e6129dd01d | |||
| 1ce0e8903f | |||
| b67c1ab615 | |||
| f044082fa5 | |||
| b2a02c5a9c | |||
| 3946fecd1d | |||
| 27f43d0ad0 | |||
| 684b3e0838 | |||
| adc66d8605 | |||
| 58b831cd24 | |||
| 8cf0502492 | |||
| a490514079 | |||
| a658286776 | |||
| 7fb3c26633 | |||
| 21ca8e5e90 | |||
| f719cedc37 | |||
| 22e70f7164 | |||
| a573a0b765 | |||
| ca1abf951f | |||
| 2efa3c4a42 | |||
| f3fd177235 | |||
| 092f98c17a | |||
| eec6174562 | |||
| 81efcea0e5 | |||
| 7ac85b5b1e | |||
| 1b35ca32ac | |||
| 97c989e465 | |||
| b43b81cc02 | |||
| c061e3e1b2 | |||
| cd61c47e35 | |||
| 4f9b3669a2 | |||
| 7164140c15 | |||
| d41973f1a8 | |||
| 8806f2862d | |||
| 36989c76ee | |||
| 13aef5b3c0 | |||
| b8f9f93537 | |||
| 1c93d28441 | |||
| 2ce351f7ef | |||
| 826dde759f | |||
| d1aa966737 | |||
| 4d24e7095b | |||
| 6c1940f5d2 | |||
| 30c395151d | |||
| d72e64c7f9 | |||
| abdc8e5056 | |||
| bc754c7a7d | |||
| 84ad8d43b5 | |||
| e440630497 | |||
| 71689fce79 | |||
| e7185ff460 | |||
| 18f493675a | |||
| 0c004b2e85 | |||
| c214d9ee37 | |||
| 72259c16a9 | |||
| 66b656ada5 | |||
| 46e4ca3948 | |||
| f2bf2fb025 | |||
| 50d48deec1 | |||
| 3c905aa1d7 | |||
| d5f478b3c6 | |||
| 0f96b93532 | |||
| 5449affbc8 | |||
| 2cf19900db | |||
| efe5d08430 | |||
| 994e9ed8d2 | |||
| 72af5cb7f0 | |||
| 308ee34025 | |||
| 9839befdf1 | |||
| d688df6c92 | |||
| 24eef25984 | |||
| 77ae0be899 | |||
| ca939da28e | |||
| 5d0920cb6d | |||
| d1ea7b5364 | |||
| ebdb986e2a | |||
| 4bb6695c2e | |||
| a6c5a42c1d | |||
| c44c718d06 | |||
| 5e4097453b | |||
| bfeae89ab5 | |||
| 755364c0df | |||
| dcb1e9a736 | |||
| c8543961af | |||
| cbad3b76eb | |||
| b3ff2fe135 | |||
| 6a6f25547e | |||
| 43dfee56cc | |||
| 6f9a2ce092 | |||
| 06014eade9 | |||
| 6f92a50c83 | |||
| 60eb50737d | |||
| 250746e686 | |||
| 3bac151b08 | |||
| c61d9ccb99 | |||
| 56ad03b833 | |||
| 2f9e6278ba | |||
| 17e0fbc6fb | |||
| 7ee7feadf3 | |||
| b36ea558a3 | |||
| 17d6a75465 | |||
| d5541bc985 | |||
| 98a46e9fd4 | |||
| 2e3074df00 | |||
| b3dc3e690b | |||
| b1943ede2f | |||
| 0467e4e12f | |||
| 8164624cee | |||
| e0451d026c | |||
| 1f1345477f | |||
| 44529e872f | |||
| a10996954e | |||
| 4d1dfb5f66 | |||
| f97b624688 | |||
| 8215c59122 | |||
| eb97bd9c3e | |||
| d2e100fe4b | |||
| de09a1f6bd | |||
| c40672e762 | |||
| 565d4a6955 | |||
| 8f0f2eb35e | |||
| 234b795157 | |||
| e317c56c99 | |||
| 29d12a9914 | |||
| b459e9cbfe | |||
| 52abe73ef7 | |||
| f0fe481bcf | |||
| 222446a937 | |||
| e7edd43034 | |||
| 2bc2c282e1 | |||
| 5cc92d3a9d | |||
| 4be8a25699 | |||
| d3421733e1 | |||
| 4c099a54e8 | |||
| 9f77f07ed2 | |||
| 04ab1a137c | |||
| 53744ac772 | |||
| 50a1c33adb | |||
| d153af5212 | |||
| a336b27b6c | |||
| 97eb4b6e3e | |||
| 430768eac5 | |||
| 5db864881a | |||
| d3b1047d37 | |||
| 98cac103c4 | |||
| 7226d66f67 | |||
| 8a352ed3ea | |||
| 02f8306c7b | |||
| cf6f353f20 | |||
| 7a631b3557 | |||
| 5e13047846 | |||
| c17d532802 | |||
| 55e4e61906 | |||
| f2f88ab9ca | |||
| ba418d357f | |||
| 0e3f16139d | |||
| 55486d511f | |||
| 6080094c41 | |||
| 6b8d3b0f8a | |||
| 725958137a | |||
| 1f6b4bef74 | |||
| fe1e0a6de0 | |||
| 1f3c42fc44 | |||
| 8bf67c7dc3 | |||
| 13214cee96 | |||
| 579c7bad92 | |||
| f00a6a7783 | |||
| 2a81fdd9fb | |||
| 17c59e595c | |||
| ad2576eae2 | |||
| 72d8179cc5 | |||
| dbabec0db6 | |||
| 76675fbc9b | |||
| ca395b5c09 | |||
| 1a05d5ff7a | |||
| 56f0dbd02f | |||
| 9fc0ff961d | |||
| 73441dc93b | |||
| df5f5eba1c | |||
| d950b8dc90 | |||
| 85394b185d | |||
| 86b49f9cc3 | |||
| 9769b3e396 | |||
| e337992410 | |||
| d5c3a44041 | |||
| eade42be49 | |||
| d0fac50cfd | |||
| dd4aa6fb9d | |||
| aa867b2e5f | |||
| 2fa2be4b9e | |||
| d5536467f6 | |||
| 67cb61c93f | |||
| 578d580683 | |||
| 789f277780 | |||
| 308ec615b9 | |||
| 0e40c9e216 | |||
| 5dbf75b5e4 | |||
| b921ddfc8d | |||
| bf3c81fe24 | |||
| 06cbd93f05 | |||
| 6c3780d9ea | |||
| 6f0667bb28 | |||
| 8368283a3e | |||
| 18ee3a1526 | |||
| b0e501f086 | |||
| 385ae59133 | |||
| 49469bdf12 | |||
| 020417e971 | |||
| eff0de5330 | |||
| b219f6855e | |||
| 068d0218b0 | |||
| 65215ccdd6 | |||
| 3e9f6a14f2 | |||
| 7623787b1c | |||
| e15daa8f6d | |||
| 298cf6599c | |||
| 841930a8ef | |||
| 9b37e496cb | |||
| 58e6ad9e79 | |||
| 3aa2a6783e | |||
| d64a0d1fcd | |||
| ba141031dd | |||
| ebdc63f5a0 | |||
| 5af0a09714 | |||
| 8a2bc2660c | |||
| e59b8cf403 | |||
| b078ef9a22 | |||
| fdaec6d5a9 | |||
| b631346379 | |||
| e9f2378b47 | |||
| 7d2f78d25c | |||
| 1f734a613c | |||
| a3c299b057 | |||
| 12aedfce92 | |||
| 65645346a2 | |||
| cb65e89e53 | |||
| 6a2fec8ef4 | |||
| aa59c90810 | |||
| 2b317930a0 | |||
| e7d56dd4bd | |||
| a4fedb276d | |||
| 277c0a2ce6 | |||
| ef3c61e9e6 | |||
| 1908126607 | |||
| 2d77f8489f | |||
| 0371651fdd | |||
| 01734d24f7 | |||
| 71fc0546e0 | |||
| 871a745702 | |||
| 3f0df8ae0d | |||
| 1746011c16 | |||
| 7c4cfbf3d4 | |||
| 8524e098a8 | |||
| 971f58da9b | |||
| c496be1031 | |||
| 21851e3a9c | |||
| 600d5b91ea | |||
| 09b90c3bbc | |||
| f6ca13d6dc | |||
| 9c4d7c514f | |||
| ad1946e9fb | |||
| 68910458e8 | |||
| 240e87eca4 | |||
| 6b5f7e25b7 | |||
| e7229e644f | |||
| 08c8aca144 | |||
| 7f8dae74ac | |||
| 08503116ff | |||
| a1d679a59d | |||
| 4586bd0188 | |||
| a97b50f497 | |||
| c84ff11d0d | |||
| e966e74487 | |||
| 3865abfb4d | |||
| 1905601aaa | |||
| aacb9e2874 | |||
| 78f3b18969 | |||
| 9f73ca2950 | |||
| 035b98a602 | |||
| 17f4ebc297 | |||
| 906e15674e | |||
| 85bd0b6c9c | |||
| b19e8713e0 | |||
| 68fb78e765 | |||
| be8a0a4a3a | |||
| e883e3c60e | |||
| 4ede62b39a | |||
| 7d9f487a78 | |||
| 9da584ded4 | |||
| 9452c90cf3 | |||
| a80064f40a | |||
| 49691803cc | |||
| ee4738b245 | |||
| b270fa78da | |||
| 18339d7e4d | |||
| 78563448fb | |||
| 144d5f3324 | |||
| 0fb315ec47 | |||
| 1ff67341a1 | |||
| a441280812 | |||
| eda9bbb191 | |||
| 2d9da2899f | |||
| a95490d9d4 | |||
| 44135b1824 | |||
| 4a0367b401 | |||
| c1f0104edb | |||
| c9a7fbf6dd | |||
| 1f00b6a3f8 | |||
| acb22c4119 | |||
| be2b855ffe | |||
| 88c9418350 | |||
| 2255543d94 | |||
| b4c91d2dd4 | |||
| 98c1b5a3b2 | |||
| 122a1d73d3 | |||
| 74e6dba914 | |||
| d7846e0b32 | |||
| 902ffc0bc0 | |||
| 8c1168d818 | |||
| fd77fe078a | |||
| 7f3883fb39 | |||
| 6a2698e911 | |||
| 569fea74a7 | |||
| b04d82f0b3 | |||
| 33cd4f5f68 | |||
| 49cf8e3e08 | |||
| c53a8ba68e | |||
| 5cccb97ede | |||
| 493419f324 | |||
| 577e0ad930 | |||
| 2a12f7f31e | |||
| ae3e661d7a | |||
| 0efa05142f | |||
| 6714e18e7c | |||
| 5d53678e83 | |||
| 3cb66a606d | |||
| 074db07275 | |||
| e3834ed6ea | |||
| 1bdb4a650e | |||
| 6966973497 | |||
| 8ee016e189 | |||
| fa0a96f057 | |||
| a2c84f5c40 | |||
| 768c43df2d | |||
| 579d988f4a | |||
| 45bc113e3f | |||
| 1abc13b20f | |||
| cdc9e28c90 | |||
| 8a48a110ff | |||
| 0eb1abd26d | |||
| a7a6d7ff13 | |||
| fb544e0545 | |||
| 84029fbc5b | |||
| 8039e459fa | |||
| f202c8ea44 | |||
| d8d1aa66e6 | |||
| 79ef221820 | |||
| 67ecc741d0 | |||
| 80d722568e | |||
| b9fcac974d | |||
| 31e9e58304 | |||
| 8f09b518ba | |||
| 2d6aab6b71 | |||
| 6712c0064a | |||
| 5e6d97ab36 | |||
| ea753fdfe7 | |||
| 1db8a24b4d | |||
| f8adac8b76 | |||
| 4bae586e36 | |||
| 3522c34adf | |||
| 96fc519b3c | |||
| 28f686eb80 | |||
| 6f0c95e49c | |||
| 130086db00 | |||
| 2a1ad171c0 | |||
| db8a050bdf | |||
| 3ff5ce4dec | |||
| 9f8855a4d3 | |||
| d3515d3fa5 | |||
| 277427af57 | |||
| f6c1079bda | |||
| 252d82469c | |||
| 1879ba2c2b | |||
| fc444c1986 | |||
| ae9805e4f2 | |||
| 33b1457e91 | |||
| 9e399ebe3c | |||
| eac1151616 | |||
| f7a7100fea | |||
| c207d1dfcf | |||
| df051fd643 | |||
| 419ab937b6 | |||
| 7ff919c31b | |||
| ee90351c17 | |||
| fbdbf67ce3 | |||
| a7e32d300a | |||
| 56387cb936 | |||
| df965816ac | |||
| c7341c9b15 | |||
| 00322d7e9f | |||
| ef93632130 | |||
| 0f744888ef | |||
| a5b84bab69 | |||
| 12725500a8 | |||
| 1917c08e51 | |||
| b304057560 | |||
| e5a39d8dfb | |||
| 54ccef9c72 | |||
| c103c6acbf | |||
| d6f53076c0 | |||
| b07ea8fe9c | |||
| 9a7441779f | |||
| a6f27e446d | |||
| e7f0ccfa16 | |||
| e5d01a4e19 | |||
| b7d72f2fbf | |||
| 281dbbd174 | |||
| 153349f3d5 | |||
| 8d22acfe78 | |||
| c1b030ee97 | |||
| 803f52b2d0 | |||
| 2f96abeef6 | |||
| 163fcd2b2e | |||
| 9ddcb1b3f2 | |||
| 133979218a | |||
| ef545be03c | |||
| c534dc7508 | |||
| 263ffe2b8c | |||
| 67181fb033 | |||
| a026e67a3b | |||
| d9544398b9 | |||
| 1c4bb29fdd | |||
| 765d497724 | |||
| 80410c9200 | |||
| 4e918db5cb | |||
| 382102f071 | |||
| 6e88780f8b | |||
| e3035b9d66 | |||
| 8765626898 | |||
| c38247df9e | |||
| baf44f8627 | |||
| 19aa126025 | |||
| a406fb0846 | |||
| 75664e90bb | |||
| f74209c970 | |||
| c7ce8a3107 | |||
| b3b906dd90 | |||
| b8e0e0b4ce | |||
| eb02e1e6b0 | |||
| b2fc6ea5a8 | |||
| f75a47e273 | |||
| 9eae560cae | |||
| b0529a9124 | |||
| 3df9c57482 | |||
| cb5163e1d9 | |||
| c309ac4c14 | |||
| 58c9d5f982 | |||
| dc9a68ad10 | |||
| db16dbda18 | |||
| 172630c2ee | |||
| 6dc7734c70 | |||
| 19a1ffbc98 | |||
| 2cce2859bb | |||
| 654239e29f | |||
| 50fbe3e196 | |||
| 1a8a1c3052 | |||
| 2994f8983d | |||
| 64227f2873 | |||
| 9aef499deb | |||
| c79b5a4120 | |||
| 81ee50d0d4 | |||
| 43b140285f | |||
| adb894869e | |||
| 1f6032a30e | |||
| 9531f4d8e3 | |||
| 37097d3a40 | |||
| 3aa468c2f6 | |||
| c704187012 | |||
| a834fd578e | |||
| 4b5e2f4454 | |||
| 7812b1064b | |||
| 65b9f385cf | |||
| ed88d54aa6 | |||
| d1b515ec5b | |||
| 1ffc43af98 | |||
| b27dc19e57 | |||
| df0b819b0e | |||
| 21f90d85c5 | |||
| 18e3f2af55 | |||
| 3901c9b115 | |||
| d9486d08ae | |||
| d90993a93c | |||
| 7e9bd95846 | |||
| d3d73e0e9c | |||
| d9c151d774 | |||
| 64f4abb8d6 | |||
| bcaa67cc7a | |||
| 8c0a6c834e |
38
.drone.yml
Normal file
38
.drone.yml
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: default
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- name: live-output
|
||||||
|
temp: {}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: test-compiler
|
||||||
|
image: archlinux
|
||||||
|
commands:
|
||||||
|
- pacman -Sy cmake gcc make llvm bison flex gettext libffi --noconfirm
|
||||||
|
- cd code/compiler
|
||||||
|
- ./test.sh
|
||||||
|
- name: build-live
|
||||||
|
image: klakegg/hugo:ext-alpine
|
||||||
|
commands:
|
||||||
|
- hugo -D --baseUrl "http://danilafe.com:8080"
|
||||||
|
volumes:
|
||||||
|
- name: live-output
|
||||||
|
path: /live-output
|
||||||
|
environment:
|
||||||
|
HUGO_DESTINATION: /live-output
|
||||||
|
# - name: upload-live
|
||||||
|
# image: eeacms/rsync
|
||||||
|
# commands:
|
||||||
|
# - eval `ssh-agent -s`
|
||||||
|
# - echo "$CUSTOM_KEY" | ssh-add -
|
||||||
|
# - mkdir -p ~/.ssh
|
||||||
|
# - echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config
|
||||||
|
# - rsync -rv -e "ssh -p 22" /live-output/ blog-live@danilafe.com:/var/www/blog-live/ --checksum
|
||||||
|
# environment:
|
||||||
|
# CUSTOM_KEY:
|
||||||
|
# from_secret: live_ssh_key
|
||||||
|
# volumes:
|
||||||
|
# - name: live-output
|
||||||
|
# path: /live-output
|
||||||
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
**/build/*
|
||||||
18
.gitmodules
vendored
Normal file
18
.gitmodules
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
[submodule "code/aoc-2020"]
|
||||||
|
path = code/aoc-2020
|
||||||
|
url = https://dev.danilafe.com/Advent-of-Code/AdventOfCode-2020.git
|
||||||
|
[submodule "themes/vanilla"]
|
||||||
|
path = themes/vanilla
|
||||||
|
url = https://dev.danilafe.com/Web-Projects/vanilla-hugo.git
|
||||||
|
[submodule "code/server-config"]
|
||||||
|
path = code/server-config
|
||||||
|
url = https://dev.danilafe.com/Nix-Configs/server-config
|
||||||
|
[submodule "code/blog-static-flake"]
|
||||||
|
path = code/blog-static-flake
|
||||||
|
url = https://dev.danilafe.com/Nix-Configs/blog-static-flake.git
|
||||||
|
[submodule "code/compiler"]
|
||||||
|
path = code/compiler
|
||||||
|
url = https://dev.danilafe.com/DanilaFe/bloglang.git
|
||||||
|
[submodule "code/agda-spa"]
|
||||||
|
path = code/agda-spa
|
||||||
|
url = https://dev.danilafe.com/DanilaFe/agda-spa.git
|
||||||
9
Gemfile
Normal file
9
Gemfile
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
source "https://rubygems.org"
|
||||||
|
|
||||||
|
git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
|
||||||
|
|
||||||
|
gem 'nokogiri'
|
||||||
|
gem 'execjs'
|
||||||
|
gem 'duktape'
|
||||||
21
Gemfile.lock
Normal file
21
Gemfile.lock
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
GEM
|
||||||
|
remote: https://rubygems.org/
|
||||||
|
specs:
|
||||||
|
duktape (2.7.0.0)
|
||||||
|
execjs (2.9.1)
|
||||||
|
mini_portile2 (2.8.8)
|
||||||
|
nokogiri (1.18.3)
|
||||||
|
mini_portile2 (~> 2.8.2)
|
||||||
|
racc (~> 1.4)
|
||||||
|
racc (1.8.1)
|
||||||
|
|
||||||
|
PLATFORMS
|
||||||
|
ruby
|
||||||
|
|
||||||
|
DEPENDENCIES
|
||||||
|
duktape
|
||||||
|
execjs
|
||||||
|
nokogiri
|
||||||
|
|
||||||
|
BUNDLED WITH
|
||||||
|
2.1.4
|
||||||
351
agda.rb
Normal file
351
agda.rb
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
require "nokogiri"
|
||||||
|
require "pathname"
|
||||||
|
|
||||||
|
files = ARGV[0..-1]
|
||||||
|
|
||||||
|
class LineInfo
|
||||||
|
attr_accessor :links
|
||||||
|
|
||||||
|
def initialize
|
||||||
|
@links = []
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class AgdaContext
|
||||||
|
def initialize
|
||||||
|
@file_infos = {}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Traverse the preformatted Agda block in the given Agda HTML file
|
||||||
|
# and find which textual ranges have IDs and links to other ranges.
|
||||||
|
# Store this information in a hash, line => LineInfo.
|
||||||
|
def process_agda_html_file(file)
|
||||||
|
return @file_infos[file] if @file_infos.include? file
|
||||||
|
|
||||||
|
@file_infos[file] = line_infos = {}
|
||||||
|
unless File.exist?(file)
|
||||||
|
return line_infos
|
||||||
|
end
|
||||||
|
|
||||||
|
document = Nokogiri::HTML.parse(File.open(file))
|
||||||
|
pre_code = document.css("pre.Agda")[0]
|
||||||
|
|
||||||
|
# The traversal postorder; we always visit children before their
|
||||||
|
# parents, and we visit leaves in sequence.
|
||||||
|
offset = 0
|
||||||
|
line = 1
|
||||||
|
pre_code.traverse do |at|
|
||||||
|
# Text nodes are always leaves; visiting a new leaf means we've advanced
|
||||||
|
# in the text by the length of that text. However, if there are newlines
|
||||||
|
# -- since this is a preformatted block -- we also advanced by a line.
|
||||||
|
# At this time, do not support links that span multiple lines, but
|
||||||
|
# Agda doesn't produce those either.
|
||||||
|
if at.text?
|
||||||
|
if at.content.include? "\n"
|
||||||
|
# This textual leaf is at least part whitespace. The logic
|
||||||
|
# assumes that links can't span multiple pages, and that links
|
||||||
|
# aren't nested, so ensure that the parent of the textual node
|
||||||
|
# is the preformatted block itself.
|
||||||
|
raise "unsupported Agda HTML output" if at.parent.name != "pre"
|
||||||
|
|
||||||
|
# Increase the line and track the final offset. Written as a loop
|
||||||
|
# in case we eventually want to add some handling for the pieces
|
||||||
|
# sandwiched between newlines.
|
||||||
|
at.content.split("\n", -1).each_with_index do |bit, idx|
|
||||||
|
line += 1 unless idx == 0
|
||||||
|
offset = bit.length
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# It's not a newline node, so it could be anywhere. All we need to
|
||||||
|
# do is adjust the offset within the full pre block's text.
|
||||||
|
offset += at.content.length
|
||||||
|
end
|
||||||
|
elsif at.name == "a"
|
||||||
|
# We found a link. Agda emits both links and "things to link to" as
|
||||||
|
# 'a' nodes, so check for either, and record them. Even if
|
||||||
|
# the link is nested, the .content.length accessor will only
|
||||||
|
# retrieve the textual content, and thus -- assuming the link
|
||||||
|
# isn't split across lines -- will find the proper from-to range.
|
||||||
|
|
||||||
|
line_info = line_infos.fetch(line) { line_infos[line] = LineInfo.new }
|
||||||
|
href = at.attribute("href")
|
||||||
|
id = at.attribute("id")
|
||||||
|
if href or id
|
||||||
|
new_node = { :from => offset-at.content.length, :to => offset }
|
||||||
|
new_node[:href] = href if href
|
||||||
|
new_node[:id] = id if id
|
||||||
|
|
||||||
|
line_info.links << new_node
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return line_infos
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class FileGroup
|
||||||
|
def initialize(agda_context)
|
||||||
|
@agda_context = agda_context
|
||||||
|
# Agda HTML href -> list of (file, Hugo HTML node that links to it)
|
||||||
|
@nodes_referencing_href = {}
|
||||||
|
# Agda HTML href -> (its new ID in Hugo-land, file in which it's defined)
|
||||||
|
# This supports cross-post linking within a seires.
|
||||||
|
@global_seen_hrefs = {}
|
||||||
|
# file name -> Agda HTML href -> its new ID in Hugo-land
|
||||||
|
# This supports linking within a particular post.
|
||||||
|
@local_seen_hrefs = Hash.new { {} }
|
||||||
|
# Global counter to generate fresh IDs. There's no reason for it to
|
||||||
|
# be global within a series (since IDs are namespaced by the file they're in),
|
||||||
|
# but it's just more convenient this way.
|
||||||
|
@id_counter = 0
|
||||||
|
end
|
||||||
|
|
||||||
|
def note_defined_href(file, href)
|
||||||
|
file_hrefs = @local_seen_hrefs.fetch(file) do
|
||||||
|
@local_seen_hrefs[file] = {}
|
||||||
|
end
|
||||||
|
|
||||||
|
uniq_id = file_hrefs.fetch(href) do
|
||||||
|
new_id = "agda-unique-ident-#{@id_counter}"
|
||||||
|
@id_counter += 1
|
||||||
|
file_hrefs[href] = new_id
|
||||||
|
end
|
||||||
|
|
||||||
|
unless @global_seen_hrefs.include? href
|
||||||
|
@global_seen_hrefs[href] = { :file => file, :id => uniq_id }
|
||||||
|
end
|
||||||
|
|
||||||
|
return uniq_id
|
||||||
|
end
|
||||||
|
|
||||||
|
def note_used_href(file, node, href)
|
||||||
|
ref_list = @nodes_referencing_href.fetch(href) { @nodes_referencing_href[href] = [] }
|
||||||
|
ref_list << { :file => file, :node => node }
|
||||||
|
return href
|
||||||
|
end
|
||||||
|
|
||||||
|
# Given a Hugo HTML file which references potentially several Agda modules
|
||||||
|
# in code blocks, insert links into the code blocks.
|
||||||
|
#
|
||||||
|
# There are several things we need to take care of:
|
||||||
|
# 1. Finding the HTML files associated with each referenced Agda module.
|
||||||
|
# For this, we make use of the data-base-path etc. attributes that
|
||||||
|
# the vanilla theme inserts.
|
||||||
|
# 2. "zipping together" the Agda and Hugo HTML representations. Each of
|
||||||
|
# them encode the code, but they use different HTML elements and structures.
|
||||||
|
# So, given a Hugo HTML code block, traverse its textual contents
|
||||||
|
# and find any that are covered by links in the related Agda HTML file.
|
||||||
|
# 3. Fixing up links: the Agda HTML links assume each module has its own HTML
|
||||||
|
# file. This isn't true for us: multiple modules are stitched into
|
||||||
|
# one Hugo HTML file. Also, we don't include the entire Agda HTML
|
||||||
|
# file in the Hugo HTML, so some links may be broken. So, find IDs
|
||||||
|
# that are visible in the Hugo file, rename them to be globally unique,
|
||||||
|
# and re-write cross-file links that reference these IDs to point
|
||||||
|
# inside the Hugo file.
|
||||||
|
def process_source_file(file, document)
|
||||||
|
# Process each highlight group that's been marked as an Agda file.
|
||||||
|
document.css('div[data-agda-block]').each do |t|
|
||||||
|
first_line, last_line = nil, nil
|
||||||
|
|
||||||
|
if first_line_attr = t.attribute("data-first-line")
|
||||||
|
first_line = first_line_attr.to_s.to_i
|
||||||
|
end
|
||||||
|
if last_line_attr = t.attribute("data-last-line")
|
||||||
|
last_line = last_line_attr.to_s.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
if first_line and last_line
|
||||||
|
line_range = first_line..last_line
|
||||||
|
else
|
||||||
|
line_range = 1..
|
||||||
|
end
|
||||||
|
|
||||||
|
# Sometimes, code is deeply nested in the source file, but we don't
|
||||||
|
# want to show the leading space. In that case, the generator sets
|
||||||
|
# data-source-offset with how much leading space was stripped off.
|
||||||
|
initial_offset = 0
|
||||||
|
if source_offset_attr = t.attribute("data-source-offset")
|
||||||
|
initial_offset = source_offset_attr.to_s.to_i
|
||||||
|
end
|
||||||
|
|
||||||
|
full_path = t.attribute("data-file-path").to_s
|
||||||
|
full_path_dirs = Pathname(full_path).each_filename.to_a
|
||||||
|
|
||||||
|
# The name of an Agda module is determined from its directory
|
||||||
|
# structure: A/B/C.agda creates A.B.C.html. Depending on where
|
||||||
|
# the code is included, there might be some additional folders
|
||||||
|
# that precede A that we want to ignore.
|
||||||
|
base_path = t.attribute("data-base-path").to_s
|
||||||
|
base_dir_depth = 0
|
||||||
|
if base_path.empty?
|
||||||
|
# No submodules were used. Assume code/<X> is the root, since
|
||||||
|
# that's the code layout of the blog right now.
|
||||||
|
base_dir_depth = 1
|
||||||
|
base_path = full_path_dirs[0]
|
||||||
|
else
|
||||||
|
# The code is in a submodule. Assume that the base path / submodule
|
||||||
|
# root is the Agda module root, ignore all folders before that.
|
||||||
|
base_path_dirs = Pathname(base_path).each_filename.to_a
|
||||||
|
base_dir_depth = base_path_dirs.length
|
||||||
|
end
|
||||||
|
|
||||||
|
dirs_in_base = full_path_dirs[base_dir_depth..-1]
|
||||||
|
html_file = dirs_in_base.join(".").gsub(/\.agda$/, ".html")
|
||||||
|
html_path = File.join(["code", base_path, "html", html_file])
|
||||||
|
|
||||||
|
agda_info = @agda_context.process_agda_html_file(html_path)
|
||||||
|
|
||||||
|
# Hugo conveniently generates a bunch of spans, each encoding a line
|
||||||
|
# of code output. We can iterate over these and match them up with
|
||||||
|
# the line numbers we got from reading the Agda HTML output.
|
||||||
|
lines = t.css("pre.chroma code[data-lang] .line")
|
||||||
|
lines.zip(line_range).each do |line, line_no|
|
||||||
|
line_info = agda_info[line_no]
|
||||||
|
next unless line_info
|
||||||
|
|
||||||
|
offset = initial_offset
|
||||||
|
line.traverse do |lt|
|
||||||
|
if lt.text?
|
||||||
|
content = lt.content
|
||||||
|
new_offset = offset + content.length
|
||||||
|
|
||||||
|
# The span/a/etc. structure of the Agda and Hugo HTML files
|
||||||
|
# need not line up; it's possible for there to be a single link
|
||||||
|
# in the Agda file that's broken up across multiple HTML nodes
|
||||||
|
# in the Hugo output. For now, just don't link those, since inserting
|
||||||
|
# such overlapping links is relatively complicated. Instead,
|
||||||
|
# require links to fit fully within a current text node (and thus,
|
||||||
|
# not overlap the boundaries of any HTML).
|
||||||
|
matching_links = line_info.links.filter do |link|
|
||||||
|
link[:from] >= offset and link[:to] <= new_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
# A given text node can be broken into any number of sub-nodes,
|
||||||
|
# where some sub-nodes are still text, and others have been turned
|
||||||
|
# into links. Store the new pieces in replace_with. E.g.,
|
||||||
|
#
|
||||||
|
# Original:
|
||||||
|
# abc
|
||||||
|
#
|
||||||
|
# New:
|
||||||
|
# a<a href="..">b</a>c
|
||||||
|
#
|
||||||
|
# replace_with:
|
||||||
|
# ["a", <Nokogiri::XML::Node...>, "c"]
|
||||||
|
#
|
||||||
|
# match_offset represents how much of the original text we've
|
||||||
|
# already converted. The below iteration assumes that matching
|
||||||
|
# links are in order, and don't overlap.
|
||||||
|
replace_with = []
|
||||||
|
replace_offset = 0
|
||||||
|
matching_links.each do |match|
|
||||||
|
# The link's range is an offset from the beginning of the line,
|
||||||
|
# but the text piece we're splitting up might be partway into
|
||||||
|
# the line. Convert the link coordinates to piece-relative ones.
|
||||||
|
relative_from = match[:from] - offset
|
||||||
|
relative_to = match[:to] - offset
|
||||||
|
|
||||||
|
# If the previous link ended some time before the new link
|
||||||
|
# began (or if the current link is the first one, and is not
|
||||||
|
# at the beginning), ensure that the plain text "in between"
|
||||||
|
# is kept.
|
||||||
|
replace_with << content[replace_offset...relative_from]
|
||||||
|
|
||||||
|
tag = (match.include? :href) ? 'a' : 'span'
|
||||||
|
new_node = Nokogiri::XML::Node.new(tag, document)
|
||||||
|
if match.include? :href
|
||||||
|
# For nodes with links, note what they're referring to, so
|
||||||
|
# we can adjust their hrefs when we assign global IDs.
|
||||||
|
href = match[:href].to_s
|
||||||
|
new_node['href'] = note_used_href file, new_node, href
|
||||||
|
end
|
||||||
|
if match.include? :id
|
||||||
|
# For nodes with IDs visible in the current Hugo file, we'll
|
||||||
|
# want to redirect links that previously go to other Agda
|
||||||
|
# module HTML files. So, note the ID that we want to redirect,
|
||||||
|
# and pick a new unique ID to replace it with.
|
||||||
|
id = match[:id].to_s
|
||||||
|
new_node['id'] = note_defined_href file, "#{html_file}##{id}"
|
||||||
|
end
|
||||||
|
new_node.content = content[relative_from...relative_to]
|
||||||
|
|
||||||
|
replace_with << new_node
|
||||||
|
replace_offset = relative_to
|
||||||
|
end
|
||||||
|
replace_with << content[replace_offset..-1]
|
||||||
|
|
||||||
|
# Finally, replace the node under consideration with the new
|
||||||
|
# pieces.
|
||||||
|
replace_with.each do |replacement|
|
||||||
|
lt.add_previous_sibling replacement
|
||||||
|
end
|
||||||
|
lt.remove
|
||||||
|
|
||||||
|
offset = new_offset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def cross_link_files
|
||||||
|
# Now, we have a complete list of all the IDs visible in scope.
|
||||||
|
# Redirect relevant links to these IDs. This achieves within-post
|
||||||
|
# links.
|
||||||
|
@nodes_referencing_href.each do |href, references|
|
||||||
|
references.each do |reference|
|
||||||
|
file = reference[:file]
|
||||||
|
node = reference[:node]
|
||||||
|
|
||||||
|
local_targets = @local_seen_hrefs[file]
|
||||||
|
if local_targets.include? href
|
||||||
|
# A code block in this fine provides this href, create a local link.
|
||||||
|
node['href'] = "##{local_targets[href]}"
|
||||||
|
elsif @global_seen_hrefs.include? href
|
||||||
|
# A code block in this series, but not in this file, defines
|
||||||
|
# this href. Create a cross-file link.
|
||||||
|
target = @global_seen_hrefs[href]
|
||||||
|
other_file = target[:file]
|
||||||
|
id = target[:id]
|
||||||
|
|
||||||
|
relpath = Pathname.new(other_file).dirname.relative_path_from(Pathname.new(file).dirname)
|
||||||
|
node['href'] = "#{relpath}##{id}"
|
||||||
|
else
|
||||||
|
# No definitions in any blog page. For now, just delete the anchor.
|
||||||
|
node.replace node.content
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
agda_context = AgdaContext.new
|
||||||
|
|
||||||
|
file_documents = {}
|
||||||
|
series_groups = files.group_by do |file|
|
||||||
|
file_documents[file] = document = Nokogiri::HTML.parse(File.open(file))
|
||||||
|
document.css("meta[name=blog-series]")&.attribute("content")&.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# For the 'nil' group, process individually.
|
||||||
|
if files_with_no_series = series_groups.delete(nil)
|
||||||
|
files_with_no_series.each do |file|
|
||||||
|
file_group = FileGroup.new agda_context
|
||||||
|
file_group.process_source_file file, file_documents[file]
|
||||||
|
file_group.cross_link_files
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# For groups, process them together to allow cross-linking
|
||||||
|
series_groups.each do |_, files_in_series|
|
||||||
|
file_group = FileGroup.new agda_context
|
||||||
|
files_in_series.each do |file|
|
||||||
|
file_group.process_source_file file, file_documents[file]
|
||||||
|
end
|
||||||
|
file_group.cross_link_files
|
||||||
|
end
|
||||||
|
|
||||||
|
# Having modified all the HTML files, save them.
|
||||||
|
file_documents.each do |file, document|
|
||||||
|
File.write(file, document.to_html(encoding: 'UTF-8'))
|
||||||
|
end
|
||||||
110
analyze.rb
Normal file
110
analyze.rb
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
require "pathname"
|
||||||
|
require "set"
|
||||||
|
require "json"
|
||||||
|
|
||||||
|
def resolve_path(bp, p)
|
||||||
|
path = nil
|
||||||
|
if bp.start_with? "."
|
||||||
|
path = Pathname.new(File.join(bp, p)).cleanpath.to_s
|
||||||
|
elsif p.start_with? "blog/"
|
||||||
|
path = File.join("content", p)
|
||||||
|
else
|
||||||
|
path = File.join("content", "blog", p)
|
||||||
|
end
|
||||||
|
if File.directory? path
|
||||||
|
path = File.join(path, "index.md")
|
||||||
|
elsif !path.end_with? ".md"
|
||||||
|
path += ".md"
|
||||||
|
end
|
||||||
|
path.gsub("blog/blog/", "blog/")
|
||||||
|
end
|
||||||
|
|
||||||
|
files = Set.new
|
||||||
|
refs = {}
|
||||||
|
Dir['content/blog/**/*.md'].each do |file|
|
||||||
|
file = file.chomp
|
||||||
|
files << file
|
||||||
|
arr = refs[file] || (refs[file] = [])
|
||||||
|
pattern = Regexp.union(/< relref "([^"]+)" >/, /< draftlink "[^"]+" "([^"]+)" >/)
|
||||||
|
File.open(file).read.scan(pattern) do |ref|
|
||||||
|
ref = resolve_path(File.dirname(file), ref[0] || ref[1])
|
||||||
|
arr << ref
|
||||||
|
files << ref
|
||||||
|
end
|
||||||
|
arr.uniq!
|
||||||
|
end
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
id = 0
|
||||||
|
series = {}
|
||||||
|
files.each do |file|
|
||||||
|
id += 1
|
||||||
|
name = file
|
||||||
|
tags = []
|
||||||
|
group = 1
|
||||||
|
draft = false
|
||||||
|
next unless File.exist?(file)
|
||||||
|
value = File.size(file)
|
||||||
|
url = file.gsub(/^content/, "https://danilafe.com").delete_suffix("/index.md").delete_suffix(".md")
|
||||||
|
File.readlines(file).each do |l|
|
||||||
|
if l =~ /^title: (.+)$/
|
||||||
|
name = $~[1].delete_prefix('"').delete_suffix('"')
|
||||||
|
elsif l =~ /^draft: true$/
|
||||||
|
draft = true
|
||||||
|
elsif l =~ /^series: (.+)$/
|
||||||
|
this_series = $~[1]
|
||||||
|
series_list = series.fetch(this_series) do
|
||||||
|
series[this_series] = []
|
||||||
|
end
|
||||||
|
series_list << file
|
||||||
|
elsif l =~ /^tags: (.+)$/
|
||||||
|
tags = $~[1].delete_prefix("[").delete_suffix("]").split(/,\s?/).map { |it| it.gsub('"', '') }
|
||||||
|
if tags.include? "Compilers"
|
||||||
|
group = 2
|
||||||
|
elsif tags.include? "Coq"
|
||||||
|
group = 3
|
||||||
|
elsif tags.include? "Programming Languages"
|
||||||
|
group = 4
|
||||||
|
elsif tags.include? "Haskell"
|
||||||
|
group = 5
|
||||||
|
elsif tags.include? "Crystal"
|
||||||
|
group = 6
|
||||||
|
elsif tags.include? "Agda"
|
||||||
|
group = 7
|
||||||
|
elsif tags.include? "Hugo"
|
||||||
|
group = 8
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
next if draft
|
||||||
|
data[file] = { :id => id, :name => name, :group => group, :tags => tags, :url => url, :value => value }
|
||||||
|
end
|
||||||
|
|
||||||
|
edges = []
|
||||||
|
files.each do |file1|
|
||||||
|
# files.each do |file2|
|
||||||
|
# next if file1 == file2
|
||||||
|
# next unless data[file1][:tags].any? { |t| data[file2][:tags].include? t }
|
||||||
|
# edges << { :from => data[file1][:id], :to => data[file2][:id] }
|
||||||
|
# end
|
||||||
|
next unless frefs = refs[file1]
|
||||||
|
frefs.each do |ref|
|
||||||
|
next unless data[file1]
|
||||||
|
next unless data[ref]
|
||||||
|
edges << { :from => data[file1][:id], :to => data[ref][:id] }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
series.each do |series, files|
|
||||||
|
files.sort.each_cons(2) do |file1, file2|
|
||||||
|
next unless data[file1]
|
||||||
|
next unless data[file2]
|
||||||
|
edges << { :from => data[file1][:id], :to => data[file2][:id] }
|
||||||
|
edges << { :from => data[file2][:id], :to => data[file1][:id] }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
edges.uniq!
|
||||||
|
# edges.filter! { |e| e[:from] < e[:to] }
|
||||||
|
edges.map! { |e| { :from => [e[:from], e[:to]].min, :to => [e[:from], e[:to]].max } }.uniq!
|
||||||
|
|
||||||
|
puts ("export const nodes = " + JSON.pretty_unparse(data.values) + ";")
|
||||||
|
puts ("export const edges = " + JSON.pretty_unparse(edges) + ";")
|
||||||
65
assets/bergamot/rendering/imp.bergamot
Normal file
65
assets/bergamot/rendering/imp.bergamot
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
LatexListNil @ latexlist(nil, nil) <-;
|
||||||
|
LatexListCons @ latexlist(cons(?x, ?xs), cons(?l_x, ?l_s)) <- latex(?x, ?l_x), latexlist(?xs, ?l_s);
|
||||||
|
|
||||||
|
IntercalateNil @ intercalate(?sep, nil, nil) <-;
|
||||||
|
IntercalateConsCons @ intercalate(?sep, cons(?x_1, cons(?x_2, ?xs)), cons(?x_1, cons(?sep, ?ys))) <- intercalate(?sep, cons(?x_2, ?xs), ?ys);
|
||||||
|
IntercalateConsNil @ intercalate(?sep, cons(?x, nil), cons(?x, nil)) <-;
|
||||||
|
|
||||||
|
NonEmpty @ nonempty(cons(?x, ?xs)) <-;
|
||||||
|
|
||||||
|
InListHere @ inlist(?e, cons(?e, ?es)) <-;
|
||||||
|
InListThere @ inlist(?e_1, cons(?e_2, ?es)) <- inlist(?e_1, ?es);
|
||||||
|
|
||||||
|
BasicParenLit @ paren(lit(?v), ?l) <- latex(lit(?v), ?l);
|
||||||
|
BasicParenVar @ paren(var(?x), ?l) <- latex(var(?x), ?l);
|
||||||
|
BasicParenVar @ paren(metavariable(?x), ?l) <- latex(metavariable(?x), ?l);
|
||||||
|
BasicParenOther @ paren(?t, ?l) <- latex(?t, ?l_t), join(["(", ?l_t, ")"], ?l);
|
||||||
|
|
||||||
|
LatexInt @ latex(?i, ?l) <- int(?i), tostring(?i, ?l);
|
||||||
|
LatexFloat @ latex(?f, ?l) <- float(?f), tostring(?f, ?l);
|
||||||
|
LatexStr @ latex(?s, ?l) <- str(?s), escapestring(?s, ?l_1), latexifystring(?s, ?l_2), join(["\\texttt{\"", ?l_2, "\"}"], ?l);
|
||||||
|
LatexMeta @ latex(metavariable(?l), ?l) <-;
|
||||||
|
LatexLit @ latex(lit(?i), ?l) <- latex(?i, ?l);
|
||||||
|
LatexVar @ latex(var(metavariable(?s)), ?l) <- latex(metavariable(?s), ?l);
|
||||||
|
LatexVar @ latex(var(?s), ?l) <- latex(?s, ?l_v), join(["\\texttt{", ?l_v, "}"], ?l);
|
||||||
|
LatexPlus @ latex(plus(?e_1, ?e_2), ?l) <-
|
||||||
|
paren(?e_1, ?l_1), paren(?e_2, ?l_2),
|
||||||
|
join([?l_1, " + ", ?l_2], ?l);
|
||||||
|
LatexMinus @ latex(minus(?e_1, ?e_2), ?l) <-
|
||||||
|
paren(?e_1, ?l_1), paren(?e_2, ?l_2),
|
||||||
|
join([?l_1, " - ", ?l_2], ?l);
|
||||||
|
|
||||||
|
EnvLiteralNil @ envlitrec(empty, "\\{", "", ?seen) <-;
|
||||||
|
EnvLiteralSingle @ envlitsingle(?pre, ?e, ?v, "", ?pre, ?seen) <- inlist(?e, ?seen);
|
||||||
|
EnvLiteralSingle @ envlitsingle(?pre, ?e, ?v, ?l, ", ", ?seen) <- latex(?e, ?l_e), latex(?v, ?l_v), join([?pre, "\\texttt{", ?l_e, "} \\mapsto", ?l_v], ?l);
|
||||||
|
EnvLiteralCons @ envlitrec(extend(empty, ?e, ?v), ?l, ?newnext, ?seen) <- envlitrec(?rho, ?l_rho, ?next, cons(?e, ?seen)), envlitsingle(?next, ?e, ?v, ?l_v, ?newnext, ?seen), join([?l_rho, ?l_v], ?l);
|
||||||
|
EnvLiteralCons @ envlitrec(extend(?rho, ?e, ?v), ?l, ?newnext, ?seen) <- envlitrec(?rho, ?l_rho, ?next, cons(?e, ?seen)), envlitsingle(?next, ?e, ?v, ?l_v, ?newnext, ?seen), join([?l_rho, ?l_v], ?l);
|
||||||
|
EnvLiteralOuter @ envlit(?rho, ?l) <- envlitrec(?rho, ?l_rho, ?rest, []), join([?l_rho, "\\}"], ?l);
|
||||||
|
|
||||||
|
LatexEnvLit @ latex(?rho, ?l) <- envlit(?rho, ?l);
|
||||||
|
LatexTypeEmpty @ latex(empty, "\\{\\}") <-;
|
||||||
|
LatexExtend @ latex(extend(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, "[", ?l_b, " \\mapsto ", ?l_c, "]"], ?l);
|
||||||
|
LatexInenv @ latex(inenv(?x, ?v, ?G), ?l) <-latex(?x, ?l_x), latex(?v, ?l_v), latex(?G, ?l_G), join([?l_G, "(", ?l_x, ") = ", ?l_v], ?l);
|
||||||
|
LatexEvalTer @ latex(eval(?G, ?e, ?t), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?t, ?l_t), join([?l_G, ",\\ ", ?l_e, " \\Downarrow ", ?l_t], ?l);
|
||||||
|
|
||||||
|
LatexAdd @ latex(add(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, "+", ?l_b, "=", ?l_c], ?l);
|
||||||
|
LatexSubtract @ latex(subtract(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, "-", ?l_b, "=", ?l_c], ?l);
|
||||||
|
LatexEvalTer @ latex(stepbasic(?G, ?e, ?H), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?H, ?l_H), join([?l_G, ",\\ ", ?l_e, " \\Rightarrow ", ?l_H], ?l);
|
||||||
|
LatexEvalTer @ latex(step(?G, ?e, ?H), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?H, ?l_H), join([?l_G, ",\\ ", ?l_e, " \\Rightarrow ", ?l_H], ?l);
|
||||||
|
|
||||||
|
LatexNoop @ latex(noop, "\\texttt{noop}") <-;
|
||||||
|
LatexAssign @ latex(assign(?x, ?e), ?l) <- latex(?x, ?l_x), latex(?e, ?l_e), join([?l_x, " = ", ?l_e], ?l);
|
||||||
|
LatexAssign @ latex(if(?e, ?s_1, ?s_2), ?l) <- latex(?e, ?l_e), latex(?s_1, ?l_1), latex(?s_2, ?l_2), join(["\\textbf{if}\\ ", ?l_e, "\\ \\{\\ ", ?l_1, "\\ \\}\\ \\textbf{else}\\ \\{\\ ", ?l_2, "\\ \\}"], ?l);
|
||||||
|
LatexAssign @ latex(while(?e, ?s), ?l) <- latex(?e, ?l_e), latex(?s, ?l_s), join(["\\textbf{while}\\ ", ?l_e, "\\ \\{\\ ", ?l_s, "\\ \\}"], ?l);
|
||||||
|
LatexAssign @ latex(seq(?s_1, ?s_2), ?l) <- latex(?s_1, ?l_1), latex(?s_2, ?l_2), join([?l_1, "; ", ?l_2], ?l);
|
||||||
|
|
||||||
|
LatexNumNeq @ latex(not(eq(?e_1, ?e_2)), ?l) <- latex(?e_1, ?l_1), latex(?e_2, ?l_2), join([?l_1, " \\neq ", ?l_2], ?l);
|
||||||
|
LatexNot @ latex(not(?e), ?l) <- latex(?e, ?l_e), join(["\\neg (", ?l_e, ")"], ?l);
|
||||||
|
LatexNumEq @ latex(eq(?e_1, ?e_2), ?l) <- latex(?e_1, ?l_1), latex(?e_2, ?l_2), join([?l_1, " = ", ?l_2], ?l);
|
||||||
|
|
||||||
|
LatexIsInt @ latex(int(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Int}"], ?l);
|
||||||
|
LatexIsFloat @ latex(float(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Float}"], ?l);
|
||||||
|
LatexIsNum @ latex(num(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Num}"], ?l);
|
||||||
|
LatexIsStr @ latex(str(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Str}"], ?l);
|
||||||
|
LatexSym @ latex(?s, ?l) <- sym(?s), tostring(?s, ?l_1), join(["\\text{", ?l_1,"}"], ?l);
|
||||||
|
LatexCall @ latex(?c, ?l) <- call(?c, ?n, ?ts), nonempty(?ts), latexlist(?ts, ?lts_1), intercalate(", ", ?lts_1, ?lts_2), join(?lts_2, ?lts_3), join(["\\text{", ?n, "}", "(", ?lts_3, ")"], ?l);
|
||||||
74
assets/bergamot/rendering/lc.bergamot
Normal file
74
assets/bergamot/rendering/lc.bergamot
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
PrecApp @ prec(app(?l, ?r), 100, left) <-;
|
||||||
|
PrecPlus @ prec(plus(?l, ?r), 80, either) <-;
|
||||||
|
PrecAbs @ prec(abs(?x, ?t, ?e), 0, right) <-;
|
||||||
|
PrecArr @ prec(tarr(?l, ?r), 0, right) <-;
|
||||||
|
|
||||||
|
SelectHead @ select(cons([?t, ?v], ?rest), ?default, ?v) <- ?t;
|
||||||
|
SelectTail @ select(cons([?t, ?v], ?rest), ?default, ?found) <- not(?t), select(?rest, ?default, ?found);
|
||||||
|
SelectEmpty @ select(nil, ?default, ?default) <-;
|
||||||
|
|
||||||
|
Eq @ eq(?x, ?x) <-;
|
||||||
|
|
||||||
|
ParenthAssocLeft @ parenthassoc(?a_i, left, right) <-;
|
||||||
|
ParenthAssocRight @ parenthassoc(?a_i, right, left) <-;
|
||||||
|
ParenthAssocNone @ parenthassoc(?a_i, none, ?pos) <-;
|
||||||
|
ParenthAssocNeq @ parenthassoc(?a_i, ?a_o, ?pos) <- not(eq(?a_i, ?a_o));
|
||||||
|
|
||||||
|
Parenth @ parenth(?inner, ?outer, ?pos, ?strin, ?strout) <-
|
||||||
|
prec(?inner, ?p_i, ?a_i), prec(?outer, ?p_o, ?a_o),
|
||||||
|
join(["(", ?strin, ")"], ?strinparen),
|
||||||
|
select([ [less(?p_i, ?p_o), strinparen], [less(?p_o, ?p_i), ?strin], [ parenthassoc(?a_i, ?a_o, ?pos), ?strinparen ] ], ?strin, ?strout);
|
||||||
|
ParenthFallback @ parenth(?inner, ?outer, ?pos, ?strin, ?strin) <-;
|
||||||
|
|
||||||
|
LatexListNil @ latexlist(nil, nil) <-;
|
||||||
|
LatexListCons @ latexlist(cons(?x, ?xs), cons(?l_x, ?l_s)) <- latex(?x, ?l_x), latexlist(?xs, ?l_s);
|
||||||
|
|
||||||
|
IntercalateNil @ intercalate(?sep, nil, nil) <-;
|
||||||
|
IntercalateConsCons @ intercalate(?sep, cons(?x_1, cons(?x_2, ?xs)), cons(?x_1, cons(?sep, ?ys))) <- intercalate(?sep, cons(?x_2, ?xs), ?ys);
|
||||||
|
IntercalateConsNil @ intercalate(?sep, cons(?x, nil), cons(?x, nil)) <-;
|
||||||
|
|
||||||
|
NonEmpty @ nonempty(cons(?x, ?xs)) <-;
|
||||||
|
|
||||||
|
LatexInt @ latex(?i, ?l) <- int(?i), tostring(?i, ?l);
|
||||||
|
LatexFloat @ latex(?f, ?l) <- float(?f), tostring(?f, ?l);
|
||||||
|
LatexStr @ latex(?s, ?l) <- str(?s), escapestring(?s, ?l_1), latexifystring(?s, ?l_2), join(["\\texttt{\"", ?l_2, "\"}"], ?l);
|
||||||
|
LatexMeta @ latex(metavariable(?l), ?l) <-;
|
||||||
|
LatexLit @ latex(lit(?i), ?l) <- latex(?i, ?l);
|
||||||
|
LatexVar @ latex(var(?s), ?l) <- latex(?s, ?l);
|
||||||
|
LatexPlus @ latex(plus(?e_1, ?e_2), ?l) <-
|
||||||
|
latex(?e_1, ?l_1), latex(?e_2, ?l_2),
|
||||||
|
parenth(?e_1, plus(?e_1, ?e_2), left, ?l_1, ?lp_1),
|
||||||
|
parenth(?e_2, plus(?e_1, ?e_2), right, ?l_2, ?lp_2),
|
||||||
|
join([?lp_1, " + ", ?lp_2], ?l);
|
||||||
|
LatexPair @ latex(pair(?e_1, ?e_2), ?l) <- latex(?e_1, ?l_1), latex(?e_2, ?l_2), join(["(", ?l_1, ", ", ?l_2, ")"], ?l);
|
||||||
|
LatexAbs @ latex(abs(?x, ?t, ?e), ?l) <- latex(?e, ?l_e), latex(?t, ?l_t), latex(?x, ?l_x), join(["\\lambda ", ?l_x, " : ", ?l_t, " . ", ?l_e], ?l);
|
||||||
|
LatexApp @ latex(app(?e_1, ?e_2), ?l) <-
|
||||||
|
latex(?e_1, ?l_1), latex(?e_2, ?l_2),
|
||||||
|
parenth(?e_1, app(?e_1, ?e_2), left, ?l_1, ?lp_1),
|
||||||
|
parenth(?e_2, app(?e_1, ?e_2), right, ?l_2, ?lp_2),
|
||||||
|
join([?lp_1, " \\enspace ", ?lp_2], ?l);
|
||||||
|
|
||||||
|
LatexTInt @ latex(tint, "\\text{tint}") <-;
|
||||||
|
LatexTStr @ latex(tstr, "\\text{tstr}") <-;
|
||||||
|
LatexTArr @ latex(tarr(?t_1, ?t_2), ?l) <-
|
||||||
|
latex(?t_1, ?l_1), latex(?t_2, ?l_2),
|
||||||
|
parenth(?t_1, tarr(?t_1, ?t_2), left, ?l_1, ?lp_1),
|
||||||
|
parenth(?t_2, tarr(?t_1, ?t_2), right, ?l_2, ?lp_2),
|
||||||
|
join([?lp_1, " \\to ", ?lp_2], ?l);
|
||||||
|
LatexTPair @ latex(tpair(?t_1, ?t_2), ?l) <- latex(?t_1, ?l_1), latex(?t_2, ?l_2), join(["(", ?l_1, ", ", ?l_2, ")"], ?l);
|
||||||
|
|
||||||
|
LatexTypeEmpty @ latex(empty, "\\varnothing") <-;
|
||||||
|
LatexTypeExtend @ latex(extend(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, " , ", ?l_b, " : ", ?l_c], ?l);
|
||||||
|
LatexTypeInenv @ latex(inenv(?x, ?t, ?G), ?l) <-latex(?x, ?l_x), latex(?t, ?l_t), latex(?G, ?l_G), join([?l_x, " : ", ?l_t, " \\in ", ?l_G], ?l);
|
||||||
|
|
||||||
|
LatexTypeBin @ latex(type(?e, ?t), ?l) <- latex(?e, ?l_e), latex(?t, ?l_t), join([?l_e, " : ", ?l_t], ?l);
|
||||||
|
LatexTypeTer @ latex(type(?G, ?e, ?t), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?t, ?l_t), join([?l_G, " \\vdash ", ?l_e, " : ", ?l_t], ?l);
|
||||||
|
|
||||||
|
LatexConverts @ latex(converts(?f, ?t), ?l) <- latex(?f, ?l_f), latex(?t, ?l_t), join([?l_f, " \\preceq ", ?l_t], ?l);
|
||||||
|
|
||||||
|
LatexIsInt @ latex(int(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Int}"], ?l);
|
||||||
|
LatexIsFloat @ latex(float(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Float}"], ?l);
|
||||||
|
LatexIsNum @ latex(num(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Num}"], ?l);
|
||||||
|
LatexIsStr @ latex(str(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Str}"], ?l);
|
||||||
|
LatexSym @ latex(?s, ?l) <- sym(?s), tostring(?s, ?l_1), join(["\\text{", ?l_1,"}"], ?l);
|
||||||
|
LatexCall @ latex(?c, ?l) <- call(?c, ?n, ?ts), nonempty(?ts), latexlist(?ts, ?lts_1), intercalate(", ", ?lts_1, ?lts_2), join(?lts_2, ?lts_3), join(["\\text{", ?n, "}", "(", ?lts_3, ")"], ?l);
|
||||||
174
assets/scss/bergamot.scss
Normal file
174
assets/scss/bergamot.scss
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
@import "variables.scss";
|
||||||
|
@import "mixins.scss";
|
||||||
|
|
||||||
|
.bergamot-exercise {
|
||||||
|
counter-increment: bergamot-exercise;
|
||||||
|
|
||||||
|
.bergamot-root {
|
||||||
|
border: none;
|
||||||
|
padding: 0;
|
||||||
|
margin-top: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.bergamot-exercise-label {
|
||||||
|
.bergamot-exercise-number::after {
|
||||||
|
content: "Exercise " counter(bergamot-exercise);
|
||||||
|
font-weight: bold;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-button {
|
||||||
|
@include bordered-block;
|
||||||
|
padding: 0.25em;
|
||||||
|
padding-left: 1em;
|
||||||
|
padding-right: 1em;
|
||||||
|
background-color: inherit;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
transition: 0.25s;
|
||||||
|
font-family: $font-body;
|
||||||
|
@include var(color, text-color);
|
||||||
|
|
||||||
|
&.bergamot-hidden {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.feather {
|
||||||
|
margin-right: 0.5em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-play {
|
||||||
|
.feather { color: $primary-color; }
|
||||||
|
&:hover, &:focus {
|
||||||
|
.feather { color: lighten($primary-color, 20%); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-reset {
|
||||||
|
.feather { color: #0099CC; }
|
||||||
|
&:hover, &:focus {
|
||||||
|
.feather { color: lighten(#0099CC, 20%); }
|
||||||
|
}
|
||||||
|
|
||||||
|
svg {
|
||||||
|
fill: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-close {
|
||||||
|
.feather { color: tomato; }
|
||||||
|
&:hover, &:focus {
|
||||||
|
.feather { color: lighten(tomato, 20%); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-button-group {
|
||||||
|
margin-top: 1em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-root {
|
||||||
|
@include bordered-block;
|
||||||
|
padding: 1em;
|
||||||
|
|
||||||
|
.bergamot-section-heading {
|
||||||
|
margin-bottom: 0.5em;
|
||||||
|
font-family: $font-body;
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.25em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-section {
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
textarea {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
height: 10em;
|
||||||
|
resize: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type="text"] {
|
||||||
|
width: 100%;
|
||||||
|
@include textual-input;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-rule-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-rule-list katex-expression {
|
||||||
|
margin-left: .5em;
|
||||||
|
margin-right: .5em;
|
||||||
|
flex-grow: 1;
|
||||||
|
flex-basis: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-rule-section {
|
||||||
|
.bergamot-rule-section-name {
|
||||||
|
text-align: center;
|
||||||
|
margin: 0.25em;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-proof-tree {
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-error {
|
||||||
|
@include bordered-block;
|
||||||
|
padding: 0.5rem;
|
||||||
|
border-color: tomato;
|
||||||
|
background-color: rgba(tomato, 0.25);
|
||||||
|
margin-top: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-selector {
|
||||||
|
button {
|
||||||
|
@include var(background-color, background-color);
|
||||||
|
@include var(color, text-color);
|
||||||
|
@include bordered-block;
|
||||||
|
padding: 0.5rem;
|
||||||
|
font-family: $font-body;
|
||||||
|
border-style: dotted;
|
||||||
|
|
||||||
|
&.active {
|
||||||
|
border-color: $primary-color;
|
||||||
|
border-style: solid;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
&:not(:first-child) {
|
||||||
|
border-bottom-left-radius: 0;
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
&:not(:last-child) {
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
border-right-width: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
button.active + button {
|
||||||
|
border-left-color: $primary-color;
|
||||||
|
border-left-style: solid;
|
||||||
|
}
|
||||||
|
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bergamot-no-proofs {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
56
assets/scss/donate.scss
Normal file
56
assets/scss/donate.scss
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
@import "../../themes/vanilla/assets/scss/mixins.scss";
|
||||||
|
|
||||||
|
.donation-methods {
|
||||||
|
padding: 0;
|
||||||
|
border: none;
|
||||||
|
border-spacing: 0 0.5rem;
|
||||||
|
|
||||||
|
td {
|
||||||
|
padding: 0;
|
||||||
|
overflow: hidden;
|
||||||
|
|
||||||
|
&:first-child {
|
||||||
|
@include bordered-block;
|
||||||
|
text-align: right;
|
||||||
|
border-right: none;
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
padding-left: 0.5em;
|
||||||
|
padding-right: 0.5rem;
|
||||||
|
|
||||||
|
@include below-container-width {
|
||||||
|
@include bordered-block;
|
||||||
|
text-align: center;
|
||||||
|
border-bottom: none;
|
||||||
|
border-bottom-left-radius: 0;
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&:last-child {
|
||||||
|
@include bordered-block;
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
border-bottom-left-radius: 0;
|
||||||
|
|
||||||
|
@include below-container-width {
|
||||||
|
@include bordered-block;
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tr {
|
||||||
|
@include below-container-width {
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
width: 100%;
|
||||||
|
box-sizing: border-box;
|
||||||
|
border: none;
|
||||||
|
display: inline-block;
|
||||||
|
padding: 0.25rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
11
assets/scss/gametheory.scss
Normal file
11
assets/scss/gametheory.scss
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
@import "variables.scss";
|
||||||
|
@import "mixins.scss";
|
||||||
|
|
||||||
|
.assumption-number {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.assumption {
|
||||||
|
@include bordered-block;
|
||||||
|
padding: 0.8rem;
|
||||||
|
}
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
$basic-border: 1px solid #bfbfbf;
|
@import "variables.scss";
|
||||||
|
@import "mixins.scss";
|
||||||
|
|
||||||
.gmachine-instruction {
|
.gmachine-instruction {
|
||||||
display: flex;
|
display: flex;
|
||||||
border: $basic-border;
|
@include bordered-block;
|
||||||
border-radius: 2px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.gmachine-instruction-name {
|
.gmachine-instruction-name {
|
||||||
padding: 10px;
|
padding: .8rem;
|
||||||
border-right: $basic-border;
|
border-right: $standard-border;
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
flex-basis: 20%;
|
flex-basis: 20%;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
@@ -20,7 +20,7 @@ $basic-border: 1px solid #bfbfbf;
|
|||||||
}
|
}
|
||||||
|
|
||||||
.gmachine-inner {
|
.gmachine-inner {
|
||||||
border-bottom: $basic-border;
|
border-bottom: $standard-border;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
|
|
||||||
&:last-child {
|
&:last-child {
|
||||||
@@ -29,12 +29,12 @@ $basic-border: 1px solid #bfbfbf;
|
|||||||
}
|
}
|
||||||
|
|
||||||
.gmachine-inner-label {
|
.gmachine-inner-label {
|
||||||
padding: 10px;
|
padding: .8rem;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
.gmachine-inner-text {
|
.gmachine-inner-text {
|
||||||
padding: 10px;
|
padding: .8rem;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
}
|
}
|
||||||
|
|||||||
19
assets/scss/stack.scss
Normal file
19
assets/scss/stack.scss
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
@import "variables.scss";
|
||||||
|
@import "mixins.scss";
|
||||||
|
|
||||||
|
.stack {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
max-width: 10rem;
|
||||||
|
margin: auto;
|
||||||
|
@include bordered-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stack-element {
|
||||||
|
text-align: center;
|
||||||
|
min-height: 1.5rem;
|
||||||
|
|
||||||
|
&:not(:last-child) {
|
||||||
|
border-bottom: $standard-border;
|
||||||
|
}
|
||||||
|
}
|
||||||
430
assets/scss/thevoid.scss
Normal file
430
assets/scss/thevoid.scss
Normal file
@@ -0,0 +1,430 @@
|
|||||||
|
@import "variables.scss";
|
||||||
|
|
||||||
|
body {
|
||||||
|
background-color: #1c1e26;
|
||||||
|
--text-color: white;
|
||||||
|
font-family: $font-code;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1, h2, h3, h4, h5, h6 {
|
||||||
|
text-align: left;
|
||||||
|
font-family: $font-code;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1::after {
|
||||||
|
content: "(writing)";
|
||||||
|
font-size: 1rem;
|
||||||
|
margin-left: 0.5em;
|
||||||
|
position: relative;
|
||||||
|
bottom: -0.5em;
|
||||||
|
color: $primary-color;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav .container {
|
||||||
|
justify-content: flex-start;
|
||||||
|
|
||||||
|
a {
|
||||||
|
padding-left: 0;
|
||||||
|
margin-right: 1em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-divider {
|
||||||
|
visibility: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
height: auto;
|
||||||
|
border: none;
|
||||||
|
|
||||||
|
&::after {
|
||||||
|
content: "* * *";
|
||||||
|
color: $primary-color;
|
||||||
|
font-size: 2rem;
|
||||||
|
display: block;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Code for the CSS glitch effect. Originally from: https://codepen.io/mattgrosswork/pen/VwprebG */
|
||||||
|
|
||||||
|
.glitch {
|
||||||
|
position: relative;
|
||||||
|
|
||||||
|
span {
|
||||||
|
animation: paths 5s step-end infinite;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::before, &::after {
|
||||||
|
content: attr(data-text);
|
||||||
|
position: absolute;
|
||||||
|
width: 110%;
|
||||||
|
z-index: -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::before {
|
||||||
|
top: 10px;
|
||||||
|
left: 15px;
|
||||||
|
color: #e0287d;
|
||||||
|
|
||||||
|
animation: paths 5s step-end infinite, opacity 5s step-end infinite,
|
||||||
|
font 8s step-end infinite, movement 10s step-end infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::after {
|
||||||
|
top: 5px;
|
||||||
|
left: -10px;
|
||||||
|
color: #1bc7fb;
|
||||||
|
|
||||||
|
animation: paths 5s step-end infinite, opacity 5s step-end infinite,
|
||||||
|
font 7s step-end infinite, movement 8s step-end infinite;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes paths {
|
||||||
|
0% {
|
||||||
|
clip-path: polygon(
|
||||||
|
0% 43%,
|
||||||
|
83% 43%,
|
||||||
|
83% 22%,
|
||||||
|
23% 22%,
|
||||||
|
23% 24%,
|
||||||
|
91% 24%,
|
||||||
|
91% 26%,
|
||||||
|
18% 26%,
|
||||||
|
18% 83%,
|
||||||
|
29% 83%,
|
||||||
|
29% 17%,
|
||||||
|
41% 17%,
|
||||||
|
41% 39%,
|
||||||
|
18% 39%,
|
||||||
|
18% 82%,
|
||||||
|
54% 82%,
|
||||||
|
54% 88%,
|
||||||
|
19% 88%,
|
||||||
|
19% 4%,
|
||||||
|
39% 4%,
|
||||||
|
39% 14%,
|
||||||
|
76% 14%,
|
||||||
|
76% 52%,
|
||||||
|
23% 52%,
|
||||||
|
23% 35%,
|
||||||
|
19% 35%,
|
||||||
|
19% 8%,
|
||||||
|
36% 8%,
|
||||||
|
36% 31%,
|
||||||
|
73% 31%,
|
||||||
|
73% 16%,
|
||||||
|
1% 16%,
|
||||||
|
1% 56%,
|
||||||
|
50% 56%,
|
||||||
|
50% 8%
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
5% {
|
||||||
|
clip-path: polygon(
|
||||||
|
0% 29%,
|
||||||
|
44% 29%,
|
||||||
|
44% 83%,
|
||||||
|
94% 83%,
|
||||||
|
94% 56%,
|
||||||
|
11% 56%,
|
||||||
|
11% 64%,
|
||||||
|
94% 64%,
|
||||||
|
94% 70%,
|
||||||
|
88% 70%,
|
||||||
|
88% 32%,
|
||||||
|
18% 32%,
|
||||||
|
18% 96%,
|
||||||
|
10% 96%,
|
||||||
|
10% 62%,
|
||||||
|
9% 62%,
|
||||||
|
9% 84%,
|
||||||
|
68% 84%,
|
||||||
|
68% 50%,
|
||||||
|
52% 50%,
|
||||||
|
52% 55%,
|
||||||
|
35% 55%,
|
||||||
|
35% 87%,
|
||||||
|
25% 87%,
|
||||||
|
25% 39%,
|
||||||
|
15% 39%,
|
||||||
|
15% 88%,
|
||||||
|
52% 88%
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
30% {
|
||||||
|
clip-path: polygon(
|
||||||
|
0% 53%,
|
||||||
|
93% 53%,
|
||||||
|
93% 62%,
|
||||||
|
68% 62%,
|
||||||
|
68% 37%,
|
||||||
|
97% 37%,
|
||||||
|
97% 89%,
|
||||||
|
13% 89%,
|
||||||
|
13% 45%,
|
||||||
|
51% 45%,
|
||||||
|
51% 88%,
|
||||||
|
17% 88%,
|
||||||
|
17% 54%,
|
||||||
|
81% 54%,
|
||||||
|
81% 75%,
|
||||||
|
79% 75%,
|
||||||
|
79% 76%,
|
||||||
|
38% 76%,
|
||||||
|
38% 28%,
|
||||||
|
61% 28%,
|
||||||
|
61% 12%,
|
||||||
|
55% 12%,
|
||||||
|
55% 62%,
|
||||||
|
68% 62%,
|
||||||
|
68% 51%,
|
||||||
|
0% 51%,
|
||||||
|
0% 92%,
|
||||||
|
63% 92%,
|
||||||
|
63% 4%,
|
||||||
|
65% 4%
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
45% {
|
||||||
|
clip-path: polygon(
|
||||||
|
0% 33%,
|
||||||
|
2% 33%,
|
||||||
|
2% 69%,
|
||||||
|
58% 69%,
|
||||||
|
58% 94%,
|
||||||
|
55% 94%,
|
||||||
|
55% 25%,
|
||||||
|
33% 25%,
|
||||||
|
33% 85%,
|
||||||
|
16% 85%,
|
||||||
|
16% 19%,
|
||||||
|
5% 19%,
|
||||||
|
5% 20%,
|
||||||
|
79% 20%,
|
||||||
|
79% 96%,
|
||||||
|
93% 96%,
|
||||||
|
93% 50%,
|
||||||
|
5% 50%,
|
||||||
|
5% 74%,
|
||||||
|
55% 74%,
|
||||||
|
55% 57%,
|
||||||
|
96% 57%,
|
||||||
|
96% 59%,
|
||||||
|
87% 59%,
|
||||||
|
87% 65%,
|
||||||
|
82% 65%,
|
||||||
|
82% 39%,
|
||||||
|
63% 39%,
|
||||||
|
63% 92%,
|
||||||
|
4% 92%,
|
||||||
|
4% 36%,
|
||||||
|
24% 36%,
|
||||||
|
24% 70%,
|
||||||
|
1% 70%,
|
||||||
|
1% 43%,
|
||||||
|
15% 43%,
|
||||||
|
15% 28%,
|
||||||
|
23% 28%,
|
||||||
|
23% 71%,
|
||||||
|
90% 71%,
|
||||||
|
90% 86%,
|
||||||
|
97% 86%,
|
||||||
|
97% 1%,
|
||||||
|
60% 1%,
|
||||||
|
60% 67%,
|
||||||
|
71% 67%,
|
||||||
|
71% 91%,
|
||||||
|
17% 91%,
|
||||||
|
17% 14%,
|
||||||
|
39% 14%,
|
||||||
|
39% 30%,
|
||||||
|
58% 30%,
|
||||||
|
58% 11%,
|
||||||
|
52% 11%,
|
||||||
|
52% 83%,
|
||||||
|
68% 83%
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
76% {
|
||||||
|
clip-path: polygon(
|
||||||
|
0% 26%,
|
||||||
|
15% 26%,
|
||||||
|
15% 73%,
|
||||||
|
72% 73%,
|
||||||
|
72% 70%,
|
||||||
|
77% 70%,
|
||||||
|
77% 75%,
|
||||||
|
8% 75%,
|
||||||
|
8% 42%,
|
||||||
|
4% 42%,
|
||||||
|
4% 61%,
|
||||||
|
17% 61%,
|
||||||
|
17% 12%,
|
||||||
|
26% 12%,
|
||||||
|
26% 63%,
|
||||||
|
73% 63%,
|
||||||
|
73% 43%,
|
||||||
|
90% 43%,
|
||||||
|
90% 67%,
|
||||||
|
50% 67%,
|
||||||
|
50% 41%,
|
||||||
|
42% 41%,
|
||||||
|
42% 46%,
|
||||||
|
50% 46%,
|
||||||
|
50% 84%,
|
||||||
|
96% 84%,
|
||||||
|
96% 78%,
|
||||||
|
49% 78%,
|
||||||
|
49% 25%,
|
||||||
|
63% 25%,
|
||||||
|
63% 14%
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
90% {
|
||||||
|
clip-path: polygon(
|
||||||
|
0% 41%,
|
||||||
|
13% 41%,
|
||||||
|
13% 6%,
|
||||||
|
87% 6%,
|
||||||
|
87% 93%,
|
||||||
|
10% 93%,
|
||||||
|
10% 13%,
|
||||||
|
89% 13%,
|
||||||
|
89% 6%,
|
||||||
|
3% 6%,
|
||||||
|
3% 8%,
|
||||||
|
16% 8%,
|
||||||
|
16% 79%,
|
||||||
|
0% 79%,
|
||||||
|
0% 99%,
|
||||||
|
92% 99%,
|
||||||
|
92% 90%,
|
||||||
|
5% 90%,
|
||||||
|
5% 60%,
|
||||||
|
0% 60%,
|
||||||
|
0% 48%,
|
||||||
|
89% 48%,
|
||||||
|
89% 13%,
|
||||||
|
80% 13%,
|
||||||
|
80% 43%,
|
||||||
|
95% 43%,
|
||||||
|
95% 19%,
|
||||||
|
80% 19%,
|
||||||
|
80% 85%,
|
||||||
|
38% 85%,
|
||||||
|
38% 62%
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
1%,
|
||||||
|
7%,
|
||||||
|
33%,
|
||||||
|
47%,
|
||||||
|
78%,
|
||||||
|
93% {
|
||||||
|
clip-path: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes movement {
|
||||||
|
0% {
|
||||||
|
top: 0px;
|
||||||
|
left: -20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
15% {
|
||||||
|
top: 10px;
|
||||||
|
left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
60% {
|
||||||
|
top: 5px;
|
||||||
|
left: -10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
75% {
|
||||||
|
top: -5px;
|
||||||
|
left: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
100% {
|
||||||
|
top: 10px;
|
||||||
|
left: 5px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes opacity {
|
||||||
|
0% {
|
||||||
|
opacity: 0.1;
|
||||||
|
}
|
||||||
|
|
||||||
|
5% {
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
30% {
|
||||||
|
opacity: 0.4;
|
||||||
|
}
|
||||||
|
|
||||||
|
45% {
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
76% {
|
||||||
|
opacity: 0.4;
|
||||||
|
}
|
||||||
|
|
||||||
|
90% {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
1%,
|
||||||
|
7%,
|
||||||
|
33%,
|
||||||
|
47%,
|
||||||
|
78%,
|
||||||
|
93% {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@keyframes font {
|
||||||
|
0% {
|
||||||
|
font-weight: 100;
|
||||||
|
color: #e0287d;
|
||||||
|
filter: blur(3px);
|
||||||
|
}
|
||||||
|
|
||||||
|
20% {
|
||||||
|
font-weight: 500;
|
||||||
|
color: #fff;
|
||||||
|
filter: blur(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
50% {
|
||||||
|
font-weight: 300;
|
||||||
|
color: #1bc7fb;
|
||||||
|
filter: blur(2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
60% {
|
||||||
|
font-weight: 700;
|
||||||
|
color: #fff;
|
||||||
|
filter: blur(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
90% {
|
||||||
|
font-weight: 500;
|
||||||
|
color: #e0287d;
|
||||||
|
filter: blur(6px);
|
||||||
|
}
|
||||||
|
}
|
||||||
70
build-agda-html.rb
Normal file
70
build-agda-html.rb
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
require "json"
|
||||||
|
require "set"
|
||||||
|
require "optparse"
|
||||||
|
require "fileutils"
|
||||||
|
|
||||||
|
# For target_dir, use absolute paths because when invoking Agda, we'll be
|
||||||
|
# using chdir.
|
||||||
|
root_path = "code"
|
||||||
|
target_dir = File.expand_path "code"
|
||||||
|
data_file = "data/submodules.json"
|
||||||
|
OptionParser.new do |opts|
|
||||||
|
opts.banner = "Usage: build-agda-html.rb [options]"
|
||||||
|
|
||||||
|
opts.on("--root-path=PATH", "Search for Agda project folders in the given location") do |f|
|
||||||
|
root_path = f
|
||||||
|
end
|
||||||
|
opts.on("--target-dir=PATH", "Generate HTML files into the given directory") do |f|
|
||||||
|
target_dir = File.expand_path f
|
||||||
|
end
|
||||||
|
opts.on("--data-file=FILE", "Specify the submodules.json that encodes nested submodule structure") do |f|
|
||||||
|
data_file = f
|
||||||
|
end
|
||||||
|
end.parse!
|
||||||
|
files = ARGV
|
||||||
|
|
||||||
|
code_paths = Dir.entries(root_path).select do |f|
|
||||||
|
File.directory?(File.join(root_path, f)) and f != '.' and f != '..'
|
||||||
|
end.to_set
|
||||||
|
code_paths += JSON.parse(File.read(data_file)).keys if File.exist? data_file
|
||||||
|
# Extending code_paths from submodules.json means that nested Agda modules
|
||||||
|
# have their root dir correctly set.
|
||||||
|
|
||||||
|
max_path = ->(path) {
|
||||||
|
code_paths.max_by do |code_path|
|
||||||
|
count = 0
|
||||||
|
path.chars.zip(code_path.chars) do |c1, c2|
|
||||||
|
break unless c1 == c2
|
||||||
|
count += 1
|
||||||
|
end
|
||||||
|
|
||||||
|
next count
|
||||||
|
end
|
||||||
|
}
|
||||||
|
|
||||||
|
files_for_paths = {}
|
||||||
|
Dir.glob("**/*.agda", base: root_path) do |agda_file|
|
||||||
|
best_path = max_path.call(agda_file)
|
||||||
|
files_for_path = files_for_paths.fetch(best_path) do
|
||||||
|
files_for_paths[best_path] = []
|
||||||
|
end
|
||||||
|
|
||||||
|
files_for_path << agda_file[best_path.length + File::SEPARATOR.length..-1]
|
||||||
|
end
|
||||||
|
|
||||||
|
original_wd = Dir.getwd
|
||||||
|
files_for_paths.each do |path, files|
|
||||||
|
Dir.chdir(original_wd)
|
||||||
|
Dir.chdir(File.join(root_path, path))
|
||||||
|
html_dir = File.join [target_dir, path, "html"]
|
||||||
|
FileUtils.mkdir_p html_dir
|
||||||
|
|
||||||
|
files.each do |file|
|
||||||
|
command = "#{ARGV[0]} #{file} --html --html-dir=#{html_dir}"
|
||||||
|
puts command
|
||||||
|
puts `#{command}`
|
||||||
|
|
||||||
|
# Allow some programs to fail (e.g., IO.agda in SPA without --guardedness)
|
||||||
|
# fail unless $? == 0
|
||||||
|
end
|
||||||
|
end
|
||||||
49
chatgpt-subset-feather-icon.rb
Normal file
49
chatgpt-subset-feather-icon.rb
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
#!/usr/bin/env ruby
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'nokogiri'
|
||||||
|
require 'set'
|
||||||
|
|
||||||
|
# 1) Process all files passed in from the command line
|
||||||
|
svgpath = ARGV[0]
|
||||||
|
files = ARGV[1..]
|
||||||
|
|
||||||
|
# 2) Extract used Feather icons
|
||||||
|
used_icons = Set.new
|
||||||
|
|
||||||
|
files.each do |file|
|
||||||
|
# Parse each HTML file
|
||||||
|
doc = File.open(file, "r:UTF-8") { |f| Nokogiri::HTML(f) }
|
||||||
|
|
||||||
|
# Look for <use xlink:href="/feather-sprite.svg#iconName">
|
||||||
|
doc.css("use").each do |use_tag|
|
||||||
|
href = use_tag["xlink:href"] || use_tag["href"]
|
||||||
|
if href && href.start_with?("/feather-sprite.svg#")
|
||||||
|
icon_name = href.split("#").last
|
||||||
|
used_icons << icon_name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
puts "Found #{used_icons.size} unique icons: #{used_icons.to_a.join(', ')}"
|
||||||
|
|
||||||
|
# 3) Load the full feather-sprite.svg as XML
|
||||||
|
sprite_doc = File.open(svgpath, "r:UTF-8") { |f| Nokogiri::XML(f) }
|
||||||
|
|
||||||
|
# 4) Create a new SVG with only the required symbols
|
||||||
|
new_svg = Nokogiri::XML::Document.new
|
||||||
|
svg_tag = Nokogiri::XML::Node.new("svg", new_svg)
|
||||||
|
svg_tag["xmlns"] = "http://www.w3.org/2000/svg"
|
||||||
|
new_svg.add_child(svg_tag)
|
||||||
|
|
||||||
|
sprite_doc.css("symbol").each do |symbol_node|
|
||||||
|
if used_icons.include?(symbol_node["id"])
|
||||||
|
# Duplicate the symbol node (so it can be inserted in the new document)
|
||||||
|
svg_tag.add_child(symbol_node.dup)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# 5) Save the subset sprite
|
||||||
|
File.open(svgpath, "w:UTF-8") do |f|
|
||||||
|
f.write(new_svg.to_xml)
|
||||||
|
end
|
||||||
69
chatgpt-subset-one-go.py
Normal file
69
chatgpt-subset-one-go.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from fontTools.subset import Subsetter, Options
|
||||||
|
from fontTools.ttLib import TTFont
|
||||||
|
|
||||||
|
FONT_EXTENSIONS = (".ttf", ".woff", ".woff2", ".otf") # Font file types
|
||||||
|
|
||||||
|
def extract_text_from_html(file_path):
|
||||||
|
"""Extract text content from a single HTML file."""
|
||||||
|
with open(file_path, "r", encoding="utf-8") as f:
|
||||||
|
soup = BeautifulSoup(f.read(), "html.parser")
|
||||||
|
return soup.get_text()
|
||||||
|
|
||||||
|
def get_used_characters(files):
|
||||||
|
"""Collect unique characters from all .html files in the given directory."""
|
||||||
|
char_set = set()
|
||||||
|
for file in files:
|
||||||
|
text = extract_text_from_html(file)
|
||||||
|
char_set.update(text)
|
||||||
|
return "".join(sorted(char_set))
|
||||||
|
|
||||||
|
def find_font_files(directory):
|
||||||
|
"""Find all font files in the given directory, recursively."""
|
||||||
|
font_files = []
|
||||||
|
for root, _, files in os.walk(directory):
|
||||||
|
for file in files:
|
||||||
|
if file.endswith(FONT_EXTENSIONS):
|
||||||
|
font_files.append(os.path.join(root, file))
|
||||||
|
return font_files
|
||||||
|
|
||||||
|
def subset_font_in_place(font_path, characters):
|
||||||
|
"""Subsets the given font file to include only the specified characters."""
|
||||||
|
# Convert characters to their integer code points
|
||||||
|
unicode_set = {ord(c) for c in characters}
|
||||||
|
|
||||||
|
font = TTFont(font_path)
|
||||||
|
options = Options()
|
||||||
|
options.drop_tables += ["DSIG"]
|
||||||
|
options.drop_tables += ["LTSH", "VDMX", "hdmx", "gasp"]
|
||||||
|
options.unicodes = unicode_set
|
||||||
|
options.variations = False
|
||||||
|
options.drop_variations = True
|
||||||
|
options.layout_features = ["*"] # keep all OT features
|
||||||
|
options.hinting = False
|
||||||
|
|
||||||
|
# Preserve original format if it was WOFF/WOFF2
|
||||||
|
if font_path.endswith(".woff2"):
|
||||||
|
options.flavor = "woff2"
|
||||||
|
elif font_path.endswith(".woff"):
|
||||||
|
options.flavor = "woff"
|
||||||
|
|
||||||
|
subsetter = Subsetter(options)
|
||||||
|
subsetter.populate(unicodes=unicode_set)
|
||||||
|
subsetter.subset(font)
|
||||||
|
|
||||||
|
# Overwrite the original font file
|
||||||
|
font.save(font_path)
|
||||||
|
print(f"Subsetted font in place: {font_path}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
used_chars = get_used_characters(sys.argv[2:])
|
||||||
|
print(f"Extracted {len(used_chars)} unique characters from {len(sys.argv[2:])} HTML files.")
|
||||||
|
|
||||||
|
font_files = find_font_files(sys.argv[1])
|
||||||
|
print(f"Found {len(font_files)} font files to subset.")
|
||||||
|
|
||||||
|
for font_file in font_files:
|
||||||
|
subset_font_in_place(font_file, used_chars)
|
||||||
87
code/agda-issomething/example.agda
Normal file
87
code/agda-issomething/example.agda
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
open import Agda.Primitive using (Level; lsuc)
|
||||||
|
open import Relation.Binary.PropositionalEquality using (_≡_)
|
||||||
|
|
||||||
|
variable
|
||||||
|
a : Level
|
||||||
|
A : Set a
|
||||||
|
|
||||||
|
module FirstAttempt where
|
||||||
|
record Semigroup (A : Set a) : Set a where
|
||||||
|
field
|
||||||
|
_∙_ : A → A → A
|
||||||
|
|
||||||
|
isAssociative : ∀ (a₁ a₂ a₃ : A) → a₁ ∙ (a₂ ∙ a₃) ≡ (a₁ ∙ a₂) ∙ a₃
|
||||||
|
|
||||||
|
record Monoid (A : Set a) : Set a where
|
||||||
|
field semigroup : Semigroup A
|
||||||
|
|
||||||
|
open Semigroup semigroup public
|
||||||
|
|
||||||
|
field
|
||||||
|
zero : A
|
||||||
|
|
||||||
|
isIdentityLeft : ∀ (a : A) → zero ∙ a ≡ a
|
||||||
|
isIdentityRight : ∀ (a : A) → a ∙ zero ≡ a
|
||||||
|
|
||||||
|
record ContrivedExample (A : Set a) : Set a where
|
||||||
|
field
|
||||||
|
-- first property
|
||||||
|
monoid : Monoid A
|
||||||
|
|
||||||
|
-- second property; Semigroup is a stand-in.
|
||||||
|
semigroup : Semigroup A
|
||||||
|
|
||||||
|
operationsEqual : Monoid._∙_ monoid ≡ Semigroup._∙_ semigroup
|
||||||
|
|
||||||
|
module SecondAttempt where
|
||||||
|
record IsSemigroup {A : Set a} (_∙_ : A → A → A) : Set a where
|
||||||
|
field isAssociative : ∀ (a₁ a₂ a₃ : A) → a₁ ∙ (a₂ ∙ a₃) ≡ (a₁ ∙ a₂) ∙ a₃
|
||||||
|
|
||||||
|
record IsMonoid {A : Set a} (zero : A) (_∙_ : A → A → A) : Set a where
|
||||||
|
field
|
||||||
|
isSemigroup : IsSemigroup _∙_
|
||||||
|
|
||||||
|
isIdentityLeft : ∀ (a : A) → zero ∙ a ≡ a
|
||||||
|
isIdentityRight : ∀ (a : A) → a ∙ zero ≡ a
|
||||||
|
|
||||||
|
open IsSemigroup isSemigroup public
|
||||||
|
|
||||||
|
record IsContrivedExample {A : Set a} (zero : A) (_∙_ : A → A → A) : Set a where
|
||||||
|
field
|
||||||
|
-- first property
|
||||||
|
monoid : IsMonoid zero _∙_
|
||||||
|
|
||||||
|
-- second property; Semigroup is a stand-in.
|
||||||
|
semigroup : IsSemigroup _∙_
|
||||||
|
|
||||||
|
record Semigroup (A : Set a) : Set a where
|
||||||
|
field
|
||||||
|
_∙_ : A → A → A
|
||||||
|
isSemigroup : IsSemigroup _∙_
|
||||||
|
|
||||||
|
record Monoid (A : Set a) : Set a where
|
||||||
|
field
|
||||||
|
zero : A
|
||||||
|
_∙_ : A → A → A
|
||||||
|
isMonoid : IsMonoid zero _∙_
|
||||||
|
|
||||||
|
module ThirdAttempt {A : Set a} (_∙_ : A → A → A) where
|
||||||
|
record IsSemigroup : Set a where
|
||||||
|
field isAssociative : ∀ (a₁ a₂ a₃ : A) → a₁ ∙ (a₂ ∙ a₃) ≡ (a₁ ∙ a₂) ∙ a₃
|
||||||
|
|
||||||
|
record IsMonoid (zero : A) : Set a where
|
||||||
|
field
|
||||||
|
isSemigroup : IsSemigroup
|
||||||
|
|
||||||
|
isIdentityLeft : ∀ (a : A) → zero ∙ a ≡ a
|
||||||
|
isIdentityRight : ∀ (a : A) → a ∙ zero ≡ a
|
||||||
|
|
||||||
|
open IsSemigroup isSemigroup public
|
||||||
|
|
||||||
|
record IsContrivedExample (zero : A) : Set a where
|
||||||
|
field
|
||||||
|
-- first property
|
||||||
|
monoid : IsMonoid zero
|
||||||
|
|
||||||
|
-- second property; Semigroup is a stand-in.
|
||||||
|
semigroup : IsSemigroup
|
||||||
1
code/agda-spa
Submodule
1
code/agda-spa
Submodule
Submodule code/agda-spa added at 9131214880
1
code/aoc-2020
Submodule
1
code/aoc-2020
Submodule
Submodule code/aoc-2020 added at 7a8503c3fe
1
code/blog-static-flake
Submodule
1
code/blog-static-flake
Submodule
Submodule code/blog-static-flake added at 67b47d9c29
112
code/catamorphisms/Cata.hs
Normal file
112
code/catamorphisms/Cata.hs
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
{-# LANGUAGE LambdaCase, DeriveFunctor, DeriveFoldable, MultiParamTypeClasses #-}
|
||||||
|
import Prelude hiding (length, sum, fix)
|
||||||
|
|
||||||
|
length :: [a] -> Int
|
||||||
|
length [] = 0
|
||||||
|
length (_:xs) = 1 + length xs
|
||||||
|
|
||||||
|
lengthF :: ([a] -> Int) -> [a] -> Int
|
||||||
|
lengthF rec [] = 0
|
||||||
|
lengthF rec (_:xs) = 1 + rec xs
|
||||||
|
|
||||||
|
lengthF' = \rec -> \case
|
||||||
|
[] -> 0
|
||||||
|
_:xs -> 1 + rec xs
|
||||||
|
|
||||||
|
fix f = let x = f x in x
|
||||||
|
|
||||||
|
length' = fix lengthF
|
||||||
|
|
||||||
|
data MyList = MyNil | MyCons Int MyList
|
||||||
|
data MyListF a = MyNilF | MyConsF Int a
|
||||||
|
|
||||||
|
newtype Fix f = Fix { unFix :: f (Fix f) }
|
||||||
|
|
||||||
|
testList :: Fix MyListF
|
||||||
|
testList = Fix (MyConsF 1 (Fix (MyConsF 2 (Fix (MyConsF 3 (Fix MyNilF))))))
|
||||||
|
|
||||||
|
myOut :: MyList -> MyListF MyList
|
||||||
|
myOut MyNil = MyNilF
|
||||||
|
myOut (MyCons i xs) = MyConsF i xs
|
||||||
|
|
||||||
|
myIn :: MyListF MyList -> MyList
|
||||||
|
myIn MyNilF = MyNil
|
||||||
|
myIn (MyConsF i xs) = MyCons i xs
|
||||||
|
|
||||||
|
instance Functor MyListF where
|
||||||
|
fmap f MyNilF = MyNilF
|
||||||
|
fmap f (MyConsF i a) = MyConsF i (f a)
|
||||||
|
|
||||||
|
mySumF :: MyListF Int -> Int
|
||||||
|
mySumF MyNilF = 0
|
||||||
|
mySumF (MyConsF i rest) = i + rest
|
||||||
|
|
||||||
|
mySum :: MyList -> Int
|
||||||
|
mySum = mySumF . fmap mySum . myOut
|
||||||
|
|
||||||
|
myCata :: (MyListF a -> a) -> MyList -> a
|
||||||
|
myCata f = f . fmap (myCata f) . myOut
|
||||||
|
|
||||||
|
myLength = myCata $ \case
|
||||||
|
MyNilF -> 0
|
||||||
|
MyConsF _ l -> 1 + l
|
||||||
|
|
||||||
|
myMax = myCata $ \case
|
||||||
|
MyNilF -> 0
|
||||||
|
MyConsF x y -> max x y
|
||||||
|
|
||||||
|
myMin = myCata $ \case
|
||||||
|
MyNilF -> 0
|
||||||
|
MyConsF x y -> min x y
|
||||||
|
|
||||||
|
myTestList = MyCons 2 (MyCons 1 (MyCons 3 MyNil))
|
||||||
|
|
||||||
|
pack :: a -> (Int -> a -> a) -> MyListF a -> a
|
||||||
|
pack b f MyNilF = b
|
||||||
|
pack b f (MyConsF x y) = f x y
|
||||||
|
|
||||||
|
unpack :: (MyListF a -> a) -> (a, Int -> a -> a)
|
||||||
|
unpack f = (f MyNilF, \i a -> f (MyConsF i a))
|
||||||
|
|
||||||
|
class Functor f => Cata a f where
|
||||||
|
out :: a -> f a
|
||||||
|
|
||||||
|
cata :: Cata a f => (f b -> b) -> a -> b
|
||||||
|
cata f = f . fmap (cata f) . out
|
||||||
|
|
||||||
|
instance Cata MyList MyListF where
|
||||||
|
out = myOut
|
||||||
|
|
||||||
|
data ListF a b = Nil | Cons a b deriving Functor
|
||||||
|
|
||||||
|
instance Cata [a] (ListF a) where
|
||||||
|
out [] = Nil
|
||||||
|
out (x:xs) = Cons x xs
|
||||||
|
|
||||||
|
sum :: Num a => [a] -> a
|
||||||
|
sum = cata $ \case
|
||||||
|
Nil -> 0
|
||||||
|
Cons x xs -> x + xs
|
||||||
|
|
||||||
|
data BinaryTree a = Node a (BinaryTree a) (BinaryTree a) | Leaf deriving (Show, Foldable)
|
||||||
|
data BinaryTreeF a b = NodeF a b b | LeafF deriving Functor
|
||||||
|
|
||||||
|
instance Cata (BinaryTree a) (BinaryTreeF a) where
|
||||||
|
out (Node a l r) = NodeF a l r
|
||||||
|
out Leaf = LeafF
|
||||||
|
|
||||||
|
invert :: BinaryTree a -> BinaryTree a
|
||||||
|
invert = cata $ \case
|
||||||
|
LeafF -> Leaf
|
||||||
|
NodeF a l r -> Node a r l
|
||||||
|
|
||||||
|
data MaybeF a b = NothingF | JustF a deriving Functor
|
||||||
|
|
||||||
|
instance Cata (Maybe a) (MaybeF a) where
|
||||||
|
out Nothing = NothingF
|
||||||
|
out (Just x) = JustF x
|
||||||
|
|
||||||
|
getOrDefault :: a -> Maybe a -> a
|
||||||
|
getOrDefault d = cata $ \case
|
||||||
|
NothingF -> d
|
||||||
|
JustF a -> a
|
||||||
1
code/compiler
Submodule
1
code/compiler
Submodule
Submodule code/compiler added at 137455b0f4
@@ -1,33 +0,0 @@
|
|||||||
%option noyywrap
|
|
||||||
|
|
||||||
%{
|
|
||||||
#include <iostream>
|
|
||||||
%}
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
[ \n]+ {}
|
|
||||||
\+ { std::cout << "PLUS" << std::endl; }
|
|
||||||
\* { std::cout << "TIMES" << std::endl; }
|
|
||||||
- { std::cout << "MINUS" << std::endl; }
|
|
||||||
\/ { std::cout << "DIVIDE" << std::endl; }
|
|
||||||
[0-9]+ { std::cout << "NUMBER: " << yytext << std::endl; }
|
|
||||||
defn { std::cout << "KEYWORD: defn" << std::endl; }
|
|
||||||
data { std::cout << "KEYWORD: data" << std::endl; }
|
|
||||||
case { std::cout << "KEYWORD: case" << std::endl; }
|
|
||||||
of { std::cout << "KEYWORD: of" << std::endl; }
|
|
||||||
\{ { std::cout << "OPEN CURLY" << std::endl; }
|
|
||||||
\} { std::cout << "CLOSED CURLY" << std::endl; }
|
|
||||||
\( { std::cout << "OPEN PARENTH" << std::endl; }
|
|
||||||
\) { std::cout << "CLOSE PARENTH" << std::endl; }
|
|
||||||
, { std::cout << "COMMA" << std::endl; }
|
|
||||||
-> { std::cout << "PATTERN ARROW" << std::endl; }
|
|
||||||
= { std::cout << "EQUAL" << std::endl; }
|
|
||||||
[a-z][a-zA-Z]* { std::cout << "LOWERCASE IDENTIFIER: " << yytext << std::endl; }
|
|
||||||
[A-Z][a-zA-Z]* { std::cout << "UPPERCASE IDENTIFIER: " << yytext << std::endl; }
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
yylex();
|
|
||||||
}
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
struct ast {
|
|
||||||
virtual ~ast() = default;
|
|
||||||
};
|
|
||||||
|
|
||||||
using ast_ptr = std::unique_ptr<ast>;
|
|
||||||
|
|
||||||
struct pattern {
|
|
||||||
virtual ~pattern() = default;
|
|
||||||
};
|
|
||||||
|
|
||||||
using pattern_ptr = std::unique_ptr<pattern>;
|
|
||||||
|
|
||||||
struct branch {
|
|
||||||
pattern_ptr pat;
|
|
||||||
ast_ptr expr;
|
|
||||||
|
|
||||||
branch(pattern_ptr p, ast_ptr a)
|
|
||||||
: pat(std::move(p)), expr(std::move(a)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using branch_ptr = std::unique_ptr<branch>;
|
|
||||||
|
|
||||||
struct constructor {
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> types;
|
|
||||||
|
|
||||||
constructor(std::string n, std::vector<std::string> ts)
|
|
||||||
: name(std::move(n)), types(std::move(ts)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using constructor_ptr = std::unique_ptr<constructor>;
|
|
||||||
|
|
||||||
struct definition {
|
|
||||||
virtual ~definition() = default;
|
|
||||||
};
|
|
||||||
|
|
||||||
using definition_ptr = std::unique_ptr<definition>;
|
|
||||||
|
|
||||||
enum binop {
|
|
||||||
PLUS,
|
|
||||||
MINUS,
|
|
||||||
TIMES,
|
|
||||||
DIVIDE
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_int : public ast {
|
|
||||||
int value;
|
|
||||||
|
|
||||||
explicit ast_int(int v)
|
|
||||||
: value(v) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_lid : public ast {
|
|
||||||
std::string id;
|
|
||||||
|
|
||||||
explicit ast_lid(std::string i)
|
|
||||||
: id(std::move(i)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_uid : public ast {
|
|
||||||
std::string id;
|
|
||||||
|
|
||||||
explicit ast_uid(std::string i)
|
|
||||||
: id(std::move(i)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_binop : public ast {
|
|
||||||
binop op;
|
|
||||||
ast_ptr left;
|
|
||||||
ast_ptr right;
|
|
||||||
|
|
||||||
ast_binop(binop o, ast_ptr l, ast_ptr r)
|
|
||||||
: op(o), left(std::move(l)), right(std::move(r)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_app : public ast {
|
|
||||||
ast_ptr left;
|
|
||||||
ast_ptr right;
|
|
||||||
|
|
||||||
ast_app(ast_ptr l, ast_ptr r)
|
|
||||||
: left(std::move(l)), right(std::move(r)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_case : public ast {
|
|
||||||
ast_ptr of;
|
|
||||||
std::vector<branch_ptr> branches;
|
|
||||||
|
|
||||||
ast_case(ast_ptr o, std::vector<branch_ptr> b)
|
|
||||||
: of(std::move(o)), branches(std::move(b)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct pattern_var : public pattern {
|
|
||||||
std::string var;
|
|
||||||
|
|
||||||
pattern_var(std::string v)
|
|
||||||
: var(std::move(v)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct pattern_constr : public pattern {
|
|
||||||
std::string constr;
|
|
||||||
std::vector<std::string> params;
|
|
||||||
|
|
||||||
pattern_constr(std::string c, std::vector<std::string> p)
|
|
||||||
: constr(std::move(c)), params(std::move(p)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct definition_defn : public definition {
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> params;
|
|
||||||
ast_ptr body;
|
|
||||||
|
|
||||||
definition_defn(std::string n, std::vector<std::string> p, ast_ptr b)
|
|
||||||
: name(std::move(n)), params(std::move(p)), body(std::move(b)) {
|
|
||||||
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct definition_data : public definition {
|
|
||||||
std::string name;
|
|
||||||
std::vector<constructor_ptr> constructors;
|
|
||||||
|
|
||||||
definition_data(std::string n, std::vector<constructor_ptr> cs)
|
|
||||||
: name(std::move(n)), constructors(std::move(cs)) {}
|
|
||||||
};
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
rm -f parser.o parser.cpp parser.hpp stack.hh scanner.cpp scanner.o a.out
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
bison -o parser.cpp -d parser.y
|
|
||||||
flex -o scanner.cpp scanner.l
|
|
||||||
g++ -c -o scanner.o scanner.cpp
|
|
||||||
g++ -c -o parser.o parser.cpp
|
|
||||||
g++ main.cpp parser.o scanner.o
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
void yy::parser::error(const std::string& msg) {
|
|
||||||
std::cout << "An error occured: " << msg << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
extern std::vector<definition_ptr> program;
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
yy::parser parser;
|
|
||||||
parser.parse();
|
|
||||||
std::cout << program.size() << std::endl;
|
|
||||||
}
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
%{
|
|
||||||
#include <string>
|
|
||||||
#include <iostream>
|
|
||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
std::vector<definition_ptr> program;
|
|
||||||
extern yy::parser::symbol_type yylex();
|
|
||||||
|
|
||||||
%}
|
|
||||||
|
|
||||||
%token PLUS
|
|
||||||
%token TIMES
|
|
||||||
%token MINUS
|
|
||||||
%token DIVIDE
|
|
||||||
%token <int> INT
|
|
||||||
%token DEFN
|
|
||||||
%token DATA
|
|
||||||
%token CASE
|
|
||||||
%token OF
|
|
||||||
%token OCURLY
|
|
||||||
%token CCURLY
|
|
||||||
%token OPAREN
|
|
||||||
%token CPAREN
|
|
||||||
%token COMMA
|
|
||||||
%token ARROW
|
|
||||||
%token EQUAL
|
|
||||||
%token <std::string> LID
|
|
||||||
%token <std::string> UID
|
|
||||||
|
|
||||||
%language "c++"
|
|
||||||
%define api.value.type variant
|
|
||||||
%define api.token.constructor
|
|
||||||
|
|
||||||
%type <std::vector<std::string>> lowercaseParams uppercaseParams
|
|
||||||
%type <std::vector<definition_ptr>> program definitions
|
|
||||||
%type <std::vector<branch_ptr>> branches
|
|
||||||
%type <std::vector<constructor_ptr>> constructors
|
|
||||||
%type <ast_ptr> aAdd aMul case app appBase
|
|
||||||
%type <definition_ptr> definition defn data
|
|
||||||
%type <branch_ptr> branch
|
|
||||||
%type <pattern_ptr> pattern
|
|
||||||
%type <constructor_ptr> constructor
|
|
||||||
|
|
||||||
%start program
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
program
|
|
||||||
: definitions { program = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
definitions
|
|
||||||
: definitions definition { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
| definition { $$ = std::vector<definition_ptr>(); $$.push_back(std::move($1)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
definition
|
|
||||||
: defn { $$ = std::move($1); }
|
|
||||||
| data { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
defn
|
|
||||||
: DEFN LID lowercaseParams EQUAL OCURLY aAdd CCURLY
|
|
||||||
{ $$ = definition_ptr(
|
|
||||||
new definition_defn(std::move($2), std::move($3), std::move($6))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
lowercaseParams
|
|
||||||
: %empty { $$ = std::vector<std::string>(); }
|
|
||||||
| lowercaseParams LID { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
uppercaseParams
|
|
||||||
: %empty { $$ = std::vector<std::string>(); }
|
|
||||||
| uppercaseParams UID { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
aAdd
|
|
||||||
: aAdd PLUS aMul { $$ = ast_ptr(new ast_binop(PLUS, std::move($1), std::move($3))); }
|
|
||||||
| aAdd MINUS aMul { $$ = ast_ptr(new ast_binop(MINUS, std::move($1), std::move($3))); }
|
|
||||||
| aMul { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
aMul
|
|
||||||
: aMul TIMES app { $$ = ast_ptr(new ast_binop(TIMES, std::move($1), std::move($3))); }
|
|
||||||
| aMul DIVIDE app { $$ = ast_ptr(new ast_binop(DIVIDE, std::move($1), std::move($3))); }
|
|
||||||
| app { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
app
|
|
||||||
: app appBase { $$ = ast_ptr(new ast_app(std::move($1), std::move($2))); }
|
|
||||||
| appBase { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
appBase
|
|
||||||
: INT { $$ = ast_ptr(new ast_int($1)); }
|
|
||||||
| LID { $$ = ast_ptr(new ast_lid(std::move($1))); }
|
|
||||||
| UID { $$ = ast_ptr(new ast_uid(std::move($1))); }
|
|
||||||
| OPAREN aAdd CPAREN { $$ = std::move($2); }
|
|
||||||
| case { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
case
|
|
||||||
: CASE aAdd OF OCURLY branches CCURLY
|
|
||||||
{ $$ = ast_ptr(new ast_case(std::move($2), std::move($5))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
branches
|
|
||||||
: branches branch { $$ = std::move($1); $1.push_back(std::move($2)); }
|
|
||||||
| branch { $$ = std::vector<branch_ptr>(); $$.push_back(std::move($1));}
|
|
||||||
;
|
|
||||||
|
|
||||||
branch
|
|
||||||
: pattern ARROW OCURLY aAdd CCURLY
|
|
||||||
{ $$ = branch_ptr(new branch(std::move($1), std::move($4))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
pattern
|
|
||||||
: LID { $$ = pattern_ptr(new pattern_var(std::move($1))); }
|
|
||||||
| UID lowercaseParams
|
|
||||||
{ $$ = pattern_ptr(new pattern_constr(std::move($1), std::move($2))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
data
|
|
||||||
: DATA UID EQUAL OCURLY constructors CCURLY
|
|
||||||
{ $$ = definition_ptr(new definition_data(std::move($2), std::move($5))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
constructors
|
|
||||||
: constructors COMMA constructor { $$ = std::move($1); $$.push_back(std::move($3)); }
|
|
||||||
| constructor
|
|
||||||
{ $$ = std::vector<constructor_ptr>(); $$.push_back(std::move($1)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
constructor
|
|
||||||
: UID uppercaseParams
|
|
||||||
{ $$ = constructor_ptr(new constructor(std::move($1), std::move($2))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
%option noyywrap
|
|
||||||
|
|
||||||
%{
|
|
||||||
#include <iostream>
|
|
||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
#define YY_DECL yy::parser::symbol_type yylex()
|
|
||||||
|
|
||||||
%}
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
[ \n]+ {}
|
|
||||||
\+ { return yy::parser::make_PLUS(); }
|
|
||||||
\* { return yy::parser::make_TIMES(); }
|
|
||||||
- { return yy::parser::make_MINUS(); }
|
|
||||||
\/ { return yy::parser::make_DIVIDE(); }
|
|
||||||
[0-9]+ { return yy::parser::make_INT(atoi(yytext)); }
|
|
||||||
defn { return yy::parser::make_DEFN(); }
|
|
||||||
data { return yy::parser::make_DATA(); }
|
|
||||||
case { return yy::parser::make_CASE(); }
|
|
||||||
of { return yy::parser::make_OF(); }
|
|
||||||
\{ { return yy::parser::make_OCURLY(); }
|
|
||||||
\} { return yy::parser::make_CCURLY(); }
|
|
||||||
\( { return yy::parser::make_OPAREN(); }
|
|
||||||
\) { return yy::parser::make_CPAREN(); }
|
|
||||||
, { return yy::parser::make_COMMA(); }
|
|
||||||
-> { return yy::parser::make_ARROW(); }
|
|
||||||
= { return yy::parser::make_EQUAL(); }
|
|
||||||
[a-z][a-zA-Z]* { return yy::parser::make_LID(std::string(yytext)); }
|
|
||||||
[A-Z][a-zA-Z]* { return yy::parser::make_UID(std::string(yytext)); }
|
|
||||||
|
|
||||||
%%
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
|
|
||||||
std::string op_name(binop op) {
|
|
||||||
switch(op) {
|
|
||||||
case PLUS: return "+";
|
|
||||||
case MINUS: return "-";
|
|
||||||
case TIMES: return "*";
|
|
||||||
case DIVIDE: return "/";
|
|
||||||
}
|
|
||||||
throw 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_int::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
return type_ptr(new type_base("Int"));
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_lid::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
return env.lookup(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_uid::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
return env.lookup(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_binop::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_ptr ltype = left->typecheck(mgr, env);
|
|
||||||
type_ptr rtype = right->typecheck(mgr, env);
|
|
||||||
type_ptr ftype = env.lookup(op_name(op));
|
|
||||||
if(!ftype) throw 0;
|
|
||||||
|
|
||||||
type_ptr return_type = mgr.new_type();
|
|
||||||
type_ptr arrow_one = type_ptr(new type_arr(rtype, return_type));
|
|
||||||
type_ptr arrow_two = type_ptr(new type_arr(ltype, arrow_one));
|
|
||||||
|
|
||||||
mgr.unify(arrow_two, ftype);
|
|
||||||
return return_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_app::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_ptr ltype = left->typecheck(mgr, env);
|
|
||||||
type_ptr rtype = right->typecheck(mgr, env);
|
|
||||||
|
|
||||||
type_ptr return_type = mgr.new_type();
|
|
||||||
type_ptr arrow = type_ptr(new type_arr(rtype, return_type));
|
|
||||||
mgr.unify(arrow, ltype);
|
|
||||||
return return_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_case::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_ptr case_type = of->typecheck(mgr, env);
|
|
||||||
type_ptr branch_type = mgr.new_type();
|
|
||||||
|
|
||||||
for(auto& branch : branches) {
|
|
||||||
type_env new_env = env.scope();
|
|
||||||
branch->pat->match(case_type, mgr, new_env);
|
|
||||||
type_ptr curr_branch_type = branch->expr->typecheck(mgr, new_env);
|
|
||||||
mgr.unify(branch_type, curr_branch_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
return branch_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
void pattern_var::match(type_ptr t, type_mgr& mgr, type_env& env) const {
|
|
||||||
env.bind(var, t);
|
|
||||||
}
|
|
||||||
|
|
||||||
void pattern_constr::match(type_ptr t, type_mgr& mgr, type_env& env) const {
|
|
||||||
type_ptr constructor_type = env.lookup(constr);
|
|
||||||
if(!constructor_type) throw 0;
|
|
||||||
|
|
||||||
for(int i = 0; i < params.size(); i++) {
|
|
||||||
type_arr* arr = dynamic_cast<type_arr*>(constructor_type.get());
|
|
||||||
if(!arr) throw 0;
|
|
||||||
|
|
||||||
env.bind(params[i], arr->left);
|
|
||||||
constructor_type = arr->right;
|
|
||||||
}
|
|
||||||
|
|
||||||
mgr.unify(t, constructor_type);
|
|
||||||
type_base* result_type = dynamic_cast<type_base*>(constructor_type.get());
|
|
||||||
if(!result_type) throw 0;
|
|
||||||
}
|
|
||||||
@@ -1,162 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
#include "type.hpp"
|
|
||||||
#include "env.hpp"
|
|
||||||
|
|
||||||
struct ast {
|
|
||||||
virtual ~ast() = default;
|
|
||||||
|
|
||||||
virtual type_ptr typecheck(type_mgr& mgr, const type_env& env) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using ast_ptr = std::unique_ptr<ast>;
|
|
||||||
|
|
||||||
struct pattern {
|
|
||||||
virtual ~pattern() = default;
|
|
||||||
|
|
||||||
virtual void match(type_ptr t, type_mgr& mgr, type_env& env) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using pattern_ptr = std::unique_ptr<pattern>;
|
|
||||||
|
|
||||||
struct branch {
|
|
||||||
pattern_ptr pat;
|
|
||||||
ast_ptr expr;
|
|
||||||
|
|
||||||
branch(pattern_ptr p, ast_ptr a)
|
|
||||||
: pat(std::move(p)), expr(std::move(a)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using branch_ptr = std::unique_ptr<branch>;
|
|
||||||
|
|
||||||
struct constructor {
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> types;
|
|
||||||
|
|
||||||
constructor(std::string n, std::vector<std::string> ts)
|
|
||||||
: name(std::move(n)), types(std::move(ts)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using constructor_ptr = std::unique_ptr<constructor>;
|
|
||||||
|
|
||||||
struct definition {
|
|
||||||
virtual ~definition() = default;
|
|
||||||
|
|
||||||
virtual void typecheck_first(type_mgr& mgr, type_env& env) = 0;
|
|
||||||
virtual void typecheck_second(type_mgr& mgr, const type_env& env) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using definition_ptr = std::unique_ptr<definition>;
|
|
||||||
|
|
||||||
enum binop {
|
|
||||||
PLUS,
|
|
||||||
MINUS,
|
|
||||||
TIMES,
|
|
||||||
DIVIDE
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_int : public ast {
|
|
||||||
int value;
|
|
||||||
|
|
||||||
explicit ast_int(int v)
|
|
||||||
: value(v) {}
|
|
||||||
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_lid : public ast {
|
|
||||||
std::string id;
|
|
||||||
|
|
||||||
explicit ast_lid(std::string i)
|
|
||||||
: id(std::move(i)) {}
|
|
||||||
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_uid : public ast {
|
|
||||||
std::string id;
|
|
||||||
|
|
||||||
explicit ast_uid(std::string i)
|
|
||||||
: id(std::move(i)) {}
|
|
||||||
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_binop : public ast {
|
|
||||||
binop op;
|
|
||||||
ast_ptr left;
|
|
||||||
ast_ptr right;
|
|
||||||
|
|
||||||
ast_binop(binop o, ast_ptr l, ast_ptr r)
|
|
||||||
: op(o), left(std::move(l)), right(std::move(r)) {}
|
|
||||||
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_app : public ast {
|
|
||||||
ast_ptr left;
|
|
||||||
ast_ptr right;
|
|
||||||
|
|
||||||
ast_app(ast_ptr l, ast_ptr r)
|
|
||||||
: left(std::move(l)), right(std::move(r)) {}
|
|
||||||
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_case : public ast {
|
|
||||||
ast_ptr of;
|
|
||||||
std::vector<branch_ptr> branches;
|
|
||||||
|
|
||||||
ast_case(ast_ptr o, std::vector<branch_ptr> b)
|
|
||||||
: of(std::move(o)), branches(std::move(b)) {}
|
|
||||||
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct pattern_var : public pattern {
|
|
||||||
std::string var;
|
|
||||||
|
|
||||||
pattern_var(std::string v)
|
|
||||||
: var(std::move(v)) {}
|
|
||||||
|
|
||||||
void match(type_ptr t, type_mgr& mgr, type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct pattern_constr : public pattern {
|
|
||||||
std::string constr;
|
|
||||||
std::vector<std::string> params;
|
|
||||||
|
|
||||||
pattern_constr(std::string c, std::vector<std::string> p)
|
|
||||||
: constr(std::move(c)), params(std::move(p)) {}
|
|
||||||
|
|
||||||
void match(type_ptr t, type_mgr&, type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct definition_defn : public definition {
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> params;
|
|
||||||
ast_ptr body;
|
|
||||||
|
|
||||||
type_ptr return_type;
|
|
||||||
std::vector<type_ptr> param_types;
|
|
||||||
|
|
||||||
definition_defn(std::string n, std::vector<std::string> p, ast_ptr b)
|
|
||||||
: name(std::move(n)), params(std::move(p)), body(std::move(b)) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
void typecheck_first(type_mgr& mgr, type_env& env);
|
|
||||||
void typecheck_second(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct definition_data : public definition {
|
|
||||||
std::string name;
|
|
||||||
std::vector<constructor_ptr> constructors;
|
|
||||||
|
|
||||||
definition_data(std::string n, std::vector<constructor_ptr> cs)
|
|
||||||
: name(std::move(n)), constructors(std::move(cs)) {}
|
|
||||||
|
|
||||||
void typecheck_first(type_mgr& mgr, type_env& env);
|
|
||||||
void typecheck_second(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
data Bool = { True, False }
|
|
||||||
defn main = { 3 + True }
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
defn main = { 1 2 3 4 5 }
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
rm -f parser.o parser.cpp parser.hpp stack.hh scanner.cpp scanner.o type.o env.o ast.o definition.o a.out
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
bison -o parser.cpp -d parser.y
|
|
||||||
flex -o scanner.cpp scanner.l
|
|
||||||
g++ -g -c -o scanner.o scanner.cpp
|
|
||||||
g++ -g -c -o parser.o parser.cpp
|
|
||||||
g++ -g -c -o type.o type.cpp
|
|
||||||
g++ -g -c -o env.o env.cpp
|
|
||||||
g++ -g -c -o ast.o ast.cpp
|
|
||||||
g++ -g -c -o definition.o definition.cpp
|
|
||||||
g++ -g main.cpp parser.o scanner.o type.o env.o ast.o definition.o
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
|
|
||||||
void definition_defn::typecheck_first(type_mgr& mgr, type_env& env) {
|
|
||||||
return_type = mgr.new_type();
|
|
||||||
type_ptr full_type = return_type;
|
|
||||||
|
|
||||||
for(auto it = params.rbegin(); it != params.rend(); it++) {
|
|
||||||
type_ptr param_type = mgr.new_type();
|
|
||||||
full_type = type_ptr(new type_arr(param_type, full_type));
|
|
||||||
param_types.push_back(param_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
env.bind(name, full_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
void definition_defn::typecheck_second(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_env new_env = env.scope();
|
|
||||||
auto param_it = params.begin();
|
|
||||||
auto type_it = param_types.rbegin();
|
|
||||||
|
|
||||||
while(param_it != params.end() && type_it != param_types.rend()) {
|
|
||||||
new_env.bind(*param_it, *type_it);
|
|
||||||
param_it++;
|
|
||||||
type_it++;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr body_type = body->typecheck(mgr, new_env);
|
|
||||||
mgr.unify(return_type, body_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
void definition_data::typecheck_first(type_mgr& mgr, type_env& env) {
|
|
||||||
type_ptr return_type = type_ptr(new type_base(name));
|
|
||||||
|
|
||||||
for(auto& constructor : constructors) {
|
|
||||||
type_ptr full_type = return_type;
|
|
||||||
|
|
||||||
for(auto& type_name : constructor->types) {
|
|
||||||
type_ptr type = type_ptr(new type_base(type_name));
|
|
||||||
full_type = type_ptr(new type_arr(type, full_type));
|
|
||||||
}
|
|
||||||
|
|
||||||
env.bind(constructor->name, full_type);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void definition_data::typecheck_second(type_mgr& mgr, const type_env& env) const {
|
|
||||||
// Nothing
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
#include "env.hpp"
|
|
||||||
|
|
||||||
type_ptr type_env::lookup(const std::string& name) const {
|
|
||||||
auto it = names.find(name);
|
|
||||||
if(it != names.end()) return it->second;
|
|
||||||
if(parent) return parent->lookup(name);
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_env::bind(const std::string& name, type_ptr t) {
|
|
||||||
names[name] = t;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_env type_env::scope() const {
|
|
||||||
return type_env(this);
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <map>
|
|
||||||
#include "type.hpp"
|
|
||||||
|
|
||||||
struct type_env {
|
|
||||||
std::map<std::string, type_ptr> names;
|
|
||||||
type_env const* parent = nullptr;
|
|
||||||
|
|
||||||
type_env(type_env const* p)
|
|
||||||
: parent(p) {}
|
|
||||||
type_env() : type_env(nullptr) {}
|
|
||||||
|
|
||||||
type_ptr lookup(const std::string& name) const;
|
|
||||||
void bind(const std::string& name, type_ptr t);
|
|
||||||
type_env scope() const;
|
|
||||||
};
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
#include "type.hpp"
|
|
||||||
|
|
||||||
void yy::parser::error(const std::string& msg) {
|
|
||||||
std::cout << "An error occured: " << msg << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
extern std::vector<definition_ptr> program;
|
|
||||||
|
|
||||||
void typecheck_program(const std::vector<definition_ptr>& prog) {
|
|
||||||
type_mgr mgr;
|
|
||||||
type_env env;
|
|
||||||
|
|
||||||
type_ptr int_type = type_ptr(new type_base("Int"));
|
|
||||||
type_ptr binop_type = type_ptr(new type_arr(
|
|
||||||
int_type,
|
|
||||||
type_ptr(new type_arr(int_type, int_type))));
|
|
||||||
|
|
||||||
env.bind("+", binop_type);
|
|
||||||
env.bind("-", binop_type);
|
|
||||||
env.bind("*", binop_type);
|
|
||||||
env.bind("/", binop_type);
|
|
||||||
|
|
||||||
for(auto& def : prog) {
|
|
||||||
def->typecheck_first(mgr, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
for(auto& def : prog) {
|
|
||||||
def->typecheck_second(mgr, env);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
yy::parser parser;
|
|
||||||
parser.parse();
|
|
||||||
typecheck_program(program);
|
|
||||||
std::cout << program.size() << std::endl;
|
|
||||||
}
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
%{
|
|
||||||
#include <string>
|
|
||||||
#include <iostream>
|
|
||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
std::vector<definition_ptr> program;
|
|
||||||
extern yy::parser::symbol_type yylex();
|
|
||||||
|
|
||||||
%}
|
|
||||||
|
|
||||||
%token PLUS
|
|
||||||
%token TIMES
|
|
||||||
%token MINUS
|
|
||||||
%token DIVIDE
|
|
||||||
%token <int> INT
|
|
||||||
%token DEFN
|
|
||||||
%token DATA
|
|
||||||
%token CASE
|
|
||||||
%token OF
|
|
||||||
%token OCURLY
|
|
||||||
%token CCURLY
|
|
||||||
%token OPAREN
|
|
||||||
%token CPAREN
|
|
||||||
%token COMMA
|
|
||||||
%token ARROW
|
|
||||||
%token EQUAL
|
|
||||||
%token <std::string> LID
|
|
||||||
%token <std::string> UID
|
|
||||||
|
|
||||||
%language "c++"
|
|
||||||
%define api.value.type variant
|
|
||||||
%define api.token.constructor
|
|
||||||
|
|
||||||
%type <std::vector<std::string>> lowercaseParams uppercaseParams
|
|
||||||
%type <std::vector<definition_ptr>> program definitions
|
|
||||||
%type <std::vector<branch_ptr>> branches
|
|
||||||
%type <std::vector<constructor_ptr>> constructors
|
|
||||||
%type <ast_ptr> aAdd aMul case app appBase
|
|
||||||
%type <definition_ptr> definition defn data
|
|
||||||
%type <branch_ptr> branch
|
|
||||||
%type <pattern_ptr> pattern
|
|
||||||
%type <constructor_ptr> constructor
|
|
||||||
|
|
||||||
%start program
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
program
|
|
||||||
: definitions { program = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
definitions
|
|
||||||
: definitions definition { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
| definition { $$ = std::vector<definition_ptr>(); $$.push_back(std::move($1)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
definition
|
|
||||||
: defn { $$ = std::move($1); }
|
|
||||||
| data { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
defn
|
|
||||||
: DEFN LID lowercaseParams EQUAL OCURLY aAdd CCURLY
|
|
||||||
{ $$ = definition_ptr(
|
|
||||||
new definition_defn(std::move($2), std::move($3), std::move($6))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
lowercaseParams
|
|
||||||
: %empty { $$ = std::vector<std::string>(); }
|
|
||||||
| lowercaseParams LID { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
uppercaseParams
|
|
||||||
: %empty { $$ = std::vector<std::string>(); }
|
|
||||||
| uppercaseParams UID { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
aAdd
|
|
||||||
: aAdd PLUS aMul { $$ = ast_ptr(new ast_binop(PLUS, std::move($1), std::move($3))); }
|
|
||||||
| aAdd MINUS aMul { $$ = ast_ptr(new ast_binop(MINUS, std::move($1), std::move($3))); }
|
|
||||||
| aMul { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
aMul
|
|
||||||
: aMul TIMES app { $$ = ast_ptr(new ast_binop(TIMES, std::move($1), std::move($3))); }
|
|
||||||
| aMul DIVIDE app { $$ = ast_ptr(new ast_binop(DIVIDE, std::move($1), std::move($3))); }
|
|
||||||
| app { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
app
|
|
||||||
: app appBase { $$ = ast_ptr(new ast_app(std::move($1), std::move($2))); }
|
|
||||||
| appBase { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
appBase
|
|
||||||
: INT { $$ = ast_ptr(new ast_int($1)); }
|
|
||||||
| LID { $$ = ast_ptr(new ast_lid(std::move($1))); }
|
|
||||||
| UID { $$ = ast_ptr(new ast_uid(std::move($1))); }
|
|
||||||
| OPAREN aAdd CPAREN { $$ = std::move($2); }
|
|
||||||
| case { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
case
|
|
||||||
: CASE aAdd OF OCURLY branches CCURLY
|
|
||||||
{ $$ = ast_ptr(new ast_case(std::move($2), std::move($5))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
branches
|
|
||||||
: branches branch { $$ = std::move($1); $1.push_back(std::move($2)); }
|
|
||||||
| branch { $$ = std::vector<branch_ptr>(); $$.push_back(std::move($1));}
|
|
||||||
;
|
|
||||||
|
|
||||||
branch
|
|
||||||
: pattern ARROW OCURLY aAdd CCURLY
|
|
||||||
{ $$ = branch_ptr(new branch(std::move($1), std::move($4))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
pattern
|
|
||||||
: LID { $$ = pattern_ptr(new pattern_var(std::move($1))); }
|
|
||||||
| UID lowercaseParams
|
|
||||||
{ $$ = pattern_ptr(new pattern_constr(std::move($1), std::move($2))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
data
|
|
||||||
: DATA UID EQUAL OCURLY constructors CCURLY
|
|
||||||
{ $$ = definition_ptr(new definition_data(std::move($2), std::move($5))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
constructors
|
|
||||||
: constructors COMMA constructor { $$ = std::move($1); $$.push_back(std::move($3)); }
|
|
||||||
| constructor
|
|
||||||
{ $$ = std::vector<constructor_ptr>(); $$.push_back(std::move($1)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
constructor
|
|
||||||
: UID uppercaseParams
|
|
||||||
{ $$ = constructor_ptr(new constructor(std::move($1), std::move($2))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
%option noyywrap
|
|
||||||
|
|
||||||
%{
|
|
||||||
#include <iostream>
|
|
||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
#define YY_DECL yy::parser::symbol_type yylex()
|
|
||||||
|
|
||||||
%}
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
[ \n]+ {}
|
|
||||||
\+ { return yy::parser::make_PLUS(); }
|
|
||||||
\* { return yy::parser::make_TIMES(); }
|
|
||||||
- { return yy::parser::make_MINUS(); }
|
|
||||||
\/ { return yy::parser::make_DIVIDE(); }
|
|
||||||
[0-9]+ { return yy::parser::make_INT(atoi(yytext)); }
|
|
||||||
defn { return yy::parser::make_DEFN(); }
|
|
||||||
data { return yy::parser::make_DATA(); }
|
|
||||||
case { return yy::parser::make_CASE(); }
|
|
||||||
of { return yy::parser::make_OF(); }
|
|
||||||
\{ { return yy::parser::make_OCURLY(); }
|
|
||||||
\} { return yy::parser::make_CCURLY(); }
|
|
||||||
\( { return yy::parser::make_OPAREN(); }
|
|
||||||
\) { return yy::parser::make_CPAREN(); }
|
|
||||||
, { return yy::parser::make_COMMA(); }
|
|
||||||
-> { return yy::parser::make_ARROW(); }
|
|
||||||
= { return yy::parser::make_EQUAL(); }
|
|
||||||
[a-z][a-zA-Z]* { return yy::parser::make_LID(std::string(yytext)); }
|
|
||||||
[A-Z][a-zA-Z]* { return yy::parser::make_UID(std::string(yytext)); }
|
|
||||||
|
|
||||||
%%
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
#include "type.hpp"
|
|
||||||
#include <sstream>
|
|
||||||
#include <algorithm>
|
|
||||||
|
|
||||||
std::string type_mgr::new_type_name() {
|
|
||||||
int temp = last_id++;
|
|
||||||
std::string str = "";
|
|
||||||
|
|
||||||
while(temp != -1) {
|
|
||||||
str += (char) ('a' + (temp % 26));
|
|
||||||
temp = temp / 26 - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::reverse(str.begin(), str.end());
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr type_mgr::new_type() {
|
|
||||||
return type_ptr(new type_var(new_type_name()));
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr type_mgr::new_arrow_type() {
|
|
||||||
return type_ptr(new type_arr(new_type(), new_type()));
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr type_mgr::resolve(type_ptr t, type_var*& var) {
|
|
||||||
type_var* cast;
|
|
||||||
|
|
||||||
var = nullptr;
|
|
||||||
while((cast = dynamic_cast<type_var*>(t.get()))) {
|
|
||||||
auto it = types.find(cast->name);
|
|
||||||
|
|
||||||
if(it == types.end()) {
|
|
||||||
var = cast;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
t = it->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_mgr::unify(type_ptr l, type_ptr r) {
|
|
||||||
type_var* lvar;
|
|
||||||
type_var* rvar;
|
|
||||||
type_arr* larr;
|
|
||||||
type_arr* rarr;
|
|
||||||
type_base* lid;
|
|
||||||
type_base* rid;
|
|
||||||
|
|
||||||
l = resolve(l, lvar);
|
|
||||||
r = resolve(r, rvar);
|
|
||||||
|
|
||||||
if(lvar) {
|
|
||||||
bind(lvar->name, r);
|
|
||||||
return;
|
|
||||||
} else if(rvar) {
|
|
||||||
bind(rvar->name, l);
|
|
||||||
return;
|
|
||||||
} else if((larr = dynamic_cast<type_arr*>(l.get())) &&
|
|
||||||
(rarr = dynamic_cast<type_arr*>(r.get()))) {
|
|
||||||
unify(larr->left, rarr->left);
|
|
||||||
unify(larr->right, rarr->right);
|
|
||||||
return;
|
|
||||||
} else if((lid = dynamic_cast<type_base*>(l.get())) &&
|
|
||||||
(rid = dynamic_cast<type_base*>(r.get()))) {
|
|
||||||
if(lid->name == rid->name) return;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_mgr::bind(const std::string& s, type_ptr t) {
|
|
||||||
type_var* other = dynamic_cast<type_var*>(t.get());
|
|
||||||
|
|
||||||
if(other && other->name == s) return;
|
|
||||||
types[s] = t;
|
|
||||||
}
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <memory>
|
|
||||||
#include <map>
|
|
||||||
|
|
||||||
struct type {
|
|
||||||
virtual ~type() = default;
|
|
||||||
};
|
|
||||||
|
|
||||||
using type_ptr = std::shared_ptr<type>;
|
|
||||||
|
|
||||||
struct type_var : public type {
|
|
||||||
std::string name;
|
|
||||||
|
|
||||||
type_var(std::string n)
|
|
||||||
: name(std::move(n)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct type_base : public type {
|
|
||||||
std::string name;
|
|
||||||
|
|
||||||
type_base(std::string n)
|
|
||||||
: name(std::move(n)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct type_arr : public type {
|
|
||||||
type_ptr left;
|
|
||||||
type_ptr right;
|
|
||||||
|
|
||||||
type_arr(type_ptr l, type_ptr r)
|
|
||||||
: left(std::move(l)), right(std::move(r)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct type_mgr {
|
|
||||||
int last_id = 0;
|
|
||||||
std::map<std::string, type_ptr> types;
|
|
||||||
|
|
||||||
std::string new_type_name();
|
|
||||||
type_ptr new_type();
|
|
||||||
type_ptr new_arrow_type();
|
|
||||||
|
|
||||||
void unify(type_ptr l, type_ptr r);
|
|
||||||
type_ptr resolve(type_ptr t, type_var*& var);
|
|
||||||
void bind(const std::string& s, type_ptr t);
|
|
||||||
};
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
defn main = { plus 320 6 }
|
|
||||||
defn plus x y = { x + y }
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
defn add x y = { x + y }
|
|
||||||
defn double x = { add x x }
|
|
||||||
defn main = { double 163 }
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
data List = { Nil, Cons Int List }
|
|
||||||
defn length l = {
|
|
||||||
case l of {
|
|
||||||
Nil -> { 0 }
|
|
||||||
Cons x xs -> { 1 + length xs }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
cmake_minimum_required(VERSION 3.1)
|
|
||||||
project(compiler)
|
|
||||||
|
|
||||||
find_package(BISON)
|
|
||||||
find_package(FLEX)
|
|
||||||
bison_target(parser
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/parser.y
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/parser.cpp
|
|
||||||
COMPILE_FLAGS "-d")
|
|
||||||
flex_target(scanner
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/scanner.l
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/scanner.cpp)
|
|
||||||
add_flex_bison_dependency(scanner parser)
|
|
||||||
|
|
||||||
add_executable(compiler
|
|
||||||
ast.cpp ast.hpp definition.cpp
|
|
||||||
env.cpp env.hpp
|
|
||||||
type.cpp type.hpp
|
|
||||||
error.cpp error.hpp
|
|
||||||
${BISON_parser_OUTPUTS}
|
|
||||||
${FLEX_scanner_OUTPUTS}
|
|
||||||
main.cpp
|
|
||||||
)
|
|
||||||
target_include_directories(compiler PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
|
||||||
target_include_directories(compiler PUBLIC ${CMAKE_CURRENT_BINARY_DIR})
|
|
||||||
@@ -1,144 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
#include <ostream>
|
|
||||||
#include "error.hpp"
|
|
||||||
|
|
||||||
std::string op_name(binop op) {
|
|
||||||
switch(op) {
|
|
||||||
case PLUS: return "+";
|
|
||||||
case MINUS: return "-";
|
|
||||||
case TIMES: return "*";
|
|
||||||
case DIVIDE: return "/";
|
|
||||||
}
|
|
||||||
return "??";
|
|
||||||
}
|
|
||||||
|
|
||||||
void print_indent(int n, std::ostream& to) {
|
|
||||||
while(n--) to << " ";
|
|
||||||
}
|
|
||||||
|
|
||||||
void ast_int::print(int indent, std::ostream& to) const {
|
|
||||||
print_indent(indent, to);
|
|
||||||
to << "INT: " << value << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_int::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
return type_ptr(new type_base("Int"));
|
|
||||||
}
|
|
||||||
|
|
||||||
void ast_lid::print(int indent, std::ostream& to) const {
|
|
||||||
print_indent(indent, to);
|
|
||||||
to << "LID: " << id << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_lid::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
return env.lookup(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ast_uid::print(int indent, std::ostream& to) const {
|
|
||||||
print_indent(indent, to);
|
|
||||||
to << "UID: " << id << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_uid::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
return env.lookup(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ast_binop::print(int indent, std::ostream& to) const {
|
|
||||||
print_indent(indent, to);
|
|
||||||
to << "BINOP: " << op_name(op) << std::endl;
|
|
||||||
left->print(indent + 1, to);
|
|
||||||
right->print(indent + 1, to);
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_binop::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_ptr ltype = left->typecheck(mgr, env);
|
|
||||||
type_ptr rtype = right->typecheck(mgr, env);
|
|
||||||
type_ptr ftype = env.lookup(op_name(op));
|
|
||||||
if(!ftype) throw type_error(std::string("unknown binary operator ") + op_name(op));
|
|
||||||
|
|
||||||
type_ptr return_type = mgr.new_type();
|
|
||||||
type_ptr arrow_one = type_ptr(new type_arr(rtype, return_type));
|
|
||||||
type_ptr arrow_two = type_ptr(new type_arr(ltype, arrow_one));
|
|
||||||
|
|
||||||
mgr.unify(arrow_two, ftype);
|
|
||||||
return return_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ast_app::print(int indent, std::ostream& to) const {
|
|
||||||
print_indent(indent, to);
|
|
||||||
to << "APP:" << std::endl;
|
|
||||||
left->print(indent + 1, to);
|
|
||||||
right->print(indent + 1, to);
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_app::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_ptr ltype = left->typecheck(mgr, env);
|
|
||||||
type_ptr rtype = right->typecheck(mgr, env);
|
|
||||||
|
|
||||||
type_ptr return_type = mgr.new_type();
|
|
||||||
type_ptr arrow = type_ptr(new type_arr(rtype, return_type));
|
|
||||||
mgr.unify(arrow, ltype);
|
|
||||||
return return_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ast_case::print(int indent, std::ostream& to) const {
|
|
||||||
print_indent(indent, to);
|
|
||||||
to << "CASE: " << std::endl;
|
|
||||||
for(auto& branch : branches) {
|
|
||||||
print_indent(indent + 1, to);
|
|
||||||
branch->pat->print(to);
|
|
||||||
to << std::endl;
|
|
||||||
branch->expr->print(indent + 2, to);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr ast_case::typecheck(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_var* var;
|
|
||||||
type_ptr case_type = mgr.resolve(of->typecheck(mgr, env), var);
|
|
||||||
type_ptr branch_type = mgr.new_type();
|
|
||||||
|
|
||||||
if(!dynamic_cast<type_base*>(case_type.get())) {
|
|
||||||
throw type_error("attempting case analysis of non-data type");
|
|
||||||
}
|
|
||||||
|
|
||||||
for(auto& branch : branches) {
|
|
||||||
type_env new_env = env.scope();
|
|
||||||
branch->pat->match(case_type, mgr, new_env);
|
|
||||||
type_ptr curr_branch_type = branch->expr->typecheck(mgr, new_env);
|
|
||||||
mgr.unify(branch_type, curr_branch_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
return branch_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
void pattern_var::print(std::ostream& to) const {
|
|
||||||
to << var;
|
|
||||||
}
|
|
||||||
|
|
||||||
void pattern_var::match(type_ptr t, type_mgr& mgr, type_env& env) const {
|
|
||||||
env.bind(var, t);
|
|
||||||
}
|
|
||||||
|
|
||||||
void pattern_constr::print(std::ostream& to) const {
|
|
||||||
to << constr;
|
|
||||||
for(auto& param : params) {
|
|
||||||
to << " " << param;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void pattern_constr::match(type_ptr t, type_mgr& mgr, type_env& env) const {
|
|
||||||
type_ptr constructor_type = env.lookup(constr);
|
|
||||||
if(!constructor_type) {
|
|
||||||
throw type_error(std::string("pattern using unknown constructor ") + constr);
|
|
||||||
}
|
|
||||||
|
|
||||||
for(int i = 0; i < params.size(); i++) {
|
|
||||||
type_arr* arr = dynamic_cast<type_arr*>(constructor_type.get());
|
|
||||||
if(!arr) throw type_error("too many parameters in constructor pattern");
|
|
||||||
|
|
||||||
env.bind(params[i], arr->left);
|
|
||||||
constructor_type = arr->right;
|
|
||||||
}
|
|
||||||
|
|
||||||
mgr.unify(t, constructor_type);
|
|
||||||
}
|
|
||||||
@@ -1,172 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
#include "type.hpp"
|
|
||||||
#include "env.hpp"
|
|
||||||
|
|
||||||
struct ast {
|
|
||||||
virtual ~ast() = default;
|
|
||||||
|
|
||||||
virtual void print(int indent, std::ostream& to) const = 0;
|
|
||||||
virtual type_ptr typecheck(type_mgr& mgr, const type_env& env) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using ast_ptr = std::unique_ptr<ast>;
|
|
||||||
|
|
||||||
struct pattern {
|
|
||||||
virtual ~pattern() = default;
|
|
||||||
|
|
||||||
virtual void print(std::ostream& to) const = 0;
|
|
||||||
virtual void match(type_ptr t, type_mgr& mgr, type_env& env) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using pattern_ptr = std::unique_ptr<pattern>;
|
|
||||||
|
|
||||||
struct branch {
|
|
||||||
pattern_ptr pat;
|
|
||||||
ast_ptr expr;
|
|
||||||
|
|
||||||
branch(pattern_ptr p, ast_ptr a)
|
|
||||||
: pat(std::move(p)), expr(std::move(a)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using branch_ptr = std::unique_ptr<branch>;
|
|
||||||
|
|
||||||
struct constructor {
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> types;
|
|
||||||
|
|
||||||
constructor(std::string n, std::vector<std::string> ts)
|
|
||||||
: name(std::move(n)), types(std::move(ts)) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using constructor_ptr = std::unique_ptr<constructor>;
|
|
||||||
|
|
||||||
struct definition {
|
|
||||||
virtual ~definition() = default;
|
|
||||||
|
|
||||||
virtual void typecheck_first(type_mgr& mgr, type_env& env) = 0;
|
|
||||||
virtual void typecheck_second(type_mgr& mgr, const type_env& env) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using definition_ptr = std::unique_ptr<definition>;
|
|
||||||
|
|
||||||
enum binop {
|
|
||||||
PLUS,
|
|
||||||
MINUS,
|
|
||||||
TIMES,
|
|
||||||
DIVIDE
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_int : public ast {
|
|
||||||
int value;
|
|
||||||
|
|
||||||
explicit ast_int(int v)
|
|
||||||
: value(v) {}
|
|
||||||
|
|
||||||
void print(int indent, std::ostream& to) const;
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_lid : public ast {
|
|
||||||
std::string id;
|
|
||||||
|
|
||||||
explicit ast_lid(std::string i)
|
|
||||||
: id(std::move(i)) {}
|
|
||||||
|
|
||||||
void print(int indent, std::ostream& to) const;
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_uid : public ast {
|
|
||||||
std::string id;
|
|
||||||
|
|
||||||
explicit ast_uid(std::string i)
|
|
||||||
: id(std::move(i)) {}
|
|
||||||
|
|
||||||
void print(int indent, std::ostream& to) const;
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_binop : public ast {
|
|
||||||
binop op;
|
|
||||||
ast_ptr left;
|
|
||||||
ast_ptr right;
|
|
||||||
|
|
||||||
ast_binop(binop o, ast_ptr l, ast_ptr r)
|
|
||||||
: op(o), left(std::move(l)), right(std::move(r)) {}
|
|
||||||
|
|
||||||
void print(int indent, std::ostream& to) const;
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_app : public ast {
|
|
||||||
ast_ptr left;
|
|
||||||
ast_ptr right;
|
|
||||||
|
|
||||||
ast_app(ast_ptr l, ast_ptr r)
|
|
||||||
: left(std::move(l)), right(std::move(r)) {}
|
|
||||||
|
|
||||||
void print(int indent, std::ostream& to) const;
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ast_case : public ast {
|
|
||||||
ast_ptr of;
|
|
||||||
std::vector<branch_ptr> branches;
|
|
||||||
|
|
||||||
ast_case(ast_ptr o, std::vector<branch_ptr> b)
|
|
||||||
: of(std::move(o)), branches(std::move(b)) {}
|
|
||||||
|
|
||||||
void print(int indent, std::ostream& to) const;
|
|
||||||
type_ptr typecheck(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct pattern_var : public pattern {
|
|
||||||
std::string var;
|
|
||||||
|
|
||||||
pattern_var(std::string v)
|
|
||||||
: var(std::move(v)) {}
|
|
||||||
|
|
||||||
void print(std::ostream &to) const;
|
|
||||||
void match(type_ptr t, type_mgr& mgr, type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct pattern_constr : public pattern {
|
|
||||||
std::string constr;
|
|
||||||
std::vector<std::string> params;
|
|
||||||
|
|
||||||
pattern_constr(std::string c, std::vector<std::string> p)
|
|
||||||
: constr(std::move(c)), params(std::move(p)) {}
|
|
||||||
|
|
||||||
void print(std::ostream &to) const;
|
|
||||||
void match(type_ptr t, type_mgr&, type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct definition_defn : public definition {
|
|
||||||
std::string name;
|
|
||||||
std::vector<std::string> params;
|
|
||||||
ast_ptr body;
|
|
||||||
|
|
||||||
type_ptr return_type;
|
|
||||||
std::vector<type_ptr> param_types;
|
|
||||||
|
|
||||||
definition_defn(std::string n, std::vector<std::string> p, ast_ptr b)
|
|
||||||
: name(std::move(n)), params(std::move(p)), body(std::move(b)) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
void typecheck_first(type_mgr& mgr, type_env& env);
|
|
||||||
void typecheck_second(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct definition_data : public definition {
|
|
||||||
std::string name;
|
|
||||||
std::vector<constructor_ptr> constructors;
|
|
||||||
|
|
||||||
definition_data(std::string n, std::vector<constructor_ptr> cs)
|
|
||||||
: name(std::move(n)), constructors(std::move(cs)) {}
|
|
||||||
|
|
||||||
void typecheck_first(type_mgr& mgr, type_env& env);
|
|
||||||
void typecheck_second(type_mgr& mgr, const type_env& env) const;
|
|
||||||
};
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
|
|
||||||
void definition_defn::typecheck_first(type_mgr& mgr, type_env& env) {
|
|
||||||
return_type = mgr.new_type();
|
|
||||||
type_ptr full_type = return_type;
|
|
||||||
|
|
||||||
for(auto it = params.rbegin(); it != params.rend(); it++) {
|
|
||||||
type_ptr param_type = mgr.new_type();
|
|
||||||
full_type = type_ptr(new type_arr(param_type, full_type));
|
|
||||||
param_types.push_back(param_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
env.bind(name, full_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
void definition_defn::typecheck_second(type_mgr& mgr, const type_env& env) const {
|
|
||||||
type_env new_env = env.scope();
|
|
||||||
auto param_it = params.begin();
|
|
||||||
auto type_it = param_types.rbegin();
|
|
||||||
|
|
||||||
while(param_it != params.end() && type_it != param_types.rend()) {
|
|
||||||
new_env.bind(*param_it, *type_it);
|
|
||||||
param_it++;
|
|
||||||
type_it++;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr body_type = body->typecheck(mgr, new_env);
|
|
||||||
mgr.unify(return_type, body_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
void definition_data::typecheck_first(type_mgr& mgr, type_env& env) {
|
|
||||||
type_ptr return_type = type_ptr(new type_base(name));
|
|
||||||
|
|
||||||
for(auto& constructor : constructors) {
|
|
||||||
type_ptr full_type = return_type;
|
|
||||||
|
|
||||||
for(auto it = constructor->types.rbegin(); it != constructor->types.rend(); it++) {
|
|
||||||
type_ptr type = type_ptr(new type_base(*it));
|
|
||||||
full_type = type_ptr(new type_arr(type, full_type));
|
|
||||||
}
|
|
||||||
|
|
||||||
env.bind(constructor->name, full_type);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void definition_data::typecheck_second(type_mgr& mgr, const type_env& env) const {
|
|
||||||
// Nothing
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
#include "env.hpp"
|
|
||||||
|
|
||||||
type_ptr type_env::lookup(const std::string& name) const {
|
|
||||||
auto it = names.find(name);
|
|
||||||
if(it != names.end()) return it->second;
|
|
||||||
if(parent) return parent->lookup(name);
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_env::bind(const std::string& name, type_ptr t) {
|
|
||||||
names[name] = t;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_env type_env::scope() const {
|
|
||||||
return type_env(this);
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <map>
|
|
||||||
#include "type.hpp"
|
|
||||||
|
|
||||||
struct type_env {
|
|
||||||
std::map<std::string, type_ptr> names;
|
|
||||||
type_env const* parent = nullptr;
|
|
||||||
|
|
||||||
type_env(type_env const* p)
|
|
||||||
: parent(p) {}
|
|
||||||
type_env() : type_env(nullptr) {}
|
|
||||||
|
|
||||||
type_ptr lookup(const std::string& name) const;
|
|
||||||
void bind(const std::string& name, type_ptr t);
|
|
||||||
type_env scope() const;
|
|
||||||
};
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#include "error.hpp"
|
|
||||||
|
|
||||||
const char* type_error::what() const noexcept {
|
|
||||||
return "an error occured while checking the types of the program";
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <exception>
|
|
||||||
#include "type.hpp"
|
|
||||||
|
|
||||||
struct type_error : std::exception {
|
|
||||||
std::string description;
|
|
||||||
|
|
||||||
type_error(std::string d)
|
|
||||||
: description(std::move(d)) {}
|
|
||||||
|
|
||||||
const char* what() const noexcept override;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct unification_error : public type_error {
|
|
||||||
type_ptr left;
|
|
||||||
type_ptr right;
|
|
||||||
|
|
||||||
unification_error(type_ptr l, type_ptr r)
|
|
||||||
: left(std::move(l)), right(std::move(r)),
|
|
||||||
type_error("failed to unify types") {}
|
|
||||||
};
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
data Bool = { True, False }
|
|
||||||
defn main = { 3 + True }
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
defn main = { 1 2 3 4 5 }
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
defn main = { plus 320 6 }
|
|
||||||
defn plus x y = { x + y }
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
defn add x y = { x + y }
|
|
||||||
defn double x = { add x x }
|
|
||||||
defn main = { double 163 }
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
data List = { Nil, Cons Int List }
|
|
||||||
defn length l = {
|
|
||||||
case l of {
|
|
||||||
Nil -> { 0 }
|
|
||||||
Cons x xs -> { 1 + length xs }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
#include "ast.hpp"
|
|
||||||
#include <iostream>
|
|
||||||
#include "parser.hpp"
|
|
||||||
#include "error.hpp"
|
|
||||||
#include "type.hpp"
|
|
||||||
|
|
||||||
void yy::parser::error(const std::string& msg) {
|
|
||||||
std::cout << "An error occured: " << msg << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
extern std::vector<definition_ptr> program;
|
|
||||||
|
|
||||||
void typecheck_program(
|
|
||||||
const std::vector<definition_ptr>& prog,
|
|
||||||
type_mgr& mgr, type_env& env) {
|
|
||||||
type_ptr int_type = type_ptr(new type_base("Int"));
|
|
||||||
type_ptr binop_type = type_ptr(new type_arr(
|
|
||||||
int_type,
|
|
||||||
type_ptr(new type_arr(int_type, int_type))));
|
|
||||||
|
|
||||||
env.bind("+", binop_type);
|
|
||||||
env.bind("-", binop_type);
|
|
||||||
env.bind("*", binop_type);
|
|
||||||
env.bind("/", binop_type);
|
|
||||||
|
|
||||||
for(auto& def : prog) {
|
|
||||||
def->typecheck_first(mgr, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
for(auto& def : prog) {
|
|
||||||
def->typecheck_second(mgr, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
for(auto& pair : env.names) {
|
|
||||||
std::cout << pair.first << ": ";
|
|
||||||
pair.second->print(mgr, std::cout);
|
|
||||||
std::cout << std::endl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
yy::parser parser;
|
|
||||||
type_mgr mgr;
|
|
||||||
type_env env;
|
|
||||||
|
|
||||||
parser.parse();
|
|
||||||
for(auto& definition : program) {
|
|
||||||
definition_defn* def = dynamic_cast<definition_defn*>(definition.get());
|
|
||||||
if(!def) continue;
|
|
||||||
|
|
||||||
std::cout << def->name;
|
|
||||||
for(auto& param : def->params) std::cout << " " << param;
|
|
||||||
std::cout << ":" << std::endl;
|
|
||||||
|
|
||||||
def->body->print(1, std::cout);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
typecheck_program(program, mgr, env);
|
|
||||||
} catch(unification_error& err) {
|
|
||||||
std::cout << "failed to unify types: " << std::endl;
|
|
||||||
std::cout << " (1) \033[34m";
|
|
||||||
err.left->print(mgr, std::cout);
|
|
||||||
std::cout << "\033[0m" << std::endl;
|
|
||||||
std::cout << " (2) \033[32m";
|
|
||||||
err.right->print(mgr, std::cout);
|
|
||||||
std::cout << "\033[0m" << std::endl;
|
|
||||||
} catch(type_error& err) {
|
|
||||||
std::cout << "failed to type check program: " << err.description << std::endl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
%{
|
|
||||||
#include <string>
|
|
||||||
#include <iostream>
|
|
||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
std::vector<definition_ptr> program;
|
|
||||||
extern yy::parser::symbol_type yylex();
|
|
||||||
|
|
||||||
%}
|
|
||||||
|
|
||||||
%token PLUS
|
|
||||||
%token TIMES
|
|
||||||
%token MINUS
|
|
||||||
%token DIVIDE
|
|
||||||
%token <int> INT
|
|
||||||
%token DEFN
|
|
||||||
%token DATA
|
|
||||||
%token CASE
|
|
||||||
%token OF
|
|
||||||
%token OCURLY
|
|
||||||
%token CCURLY
|
|
||||||
%token OPAREN
|
|
||||||
%token CPAREN
|
|
||||||
%token COMMA
|
|
||||||
%token ARROW
|
|
||||||
%token EQUAL
|
|
||||||
%token <std::string> LID
|
|
||||||
%token <std::string> UID
|
|
||||||
|
|
||||||
%language "c++"
|
|
||||||
%define api.value.type variant
|
|
||||||
%define api.token.constructor
|
|
||||||
|
|
||||||
%type <std::vector<std::string>> lowercaseParams uppercaseParams
|
|
||||||
%type <std::vector<definition_ptr>> program definitions
|
|
||||||
%type <std::vector<branch_ptr>> branches
|
|
||||||
%type <std::vector<constructor_ptr>> constructors
|
|
||||||
%type <ast_ptr> aAdd aMul case app appBase
|
|
||||||
%type <definition_ptr> definition defn data
|
|
||||||
%type <branch_ptr> branch
|
|
||||||
%type <pattern_ptr> pattern
|
|
||||||
%type <constructor_ptr> constructor
|
|
||||||
|
|
||||||
%start program
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
program
|
|
||||||
: definitions { program = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
definitions
|
|
||||||
: definitions definition { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
| definition { $$ = std::vector<definition_ptr>(); $$.push_back(std::move($1)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
definition
|
|
||||||
: defn { $$ = std::move($1); }
|
|
||||||
| data { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
defn
|
|
||||||
: DEFN LID lowercaseParams EQUAL OCURLY aAdd CCURLY
|
|
||||||
{ $$ = definition_ptr(
|
|
||||||
new definition_defn(std::move($2), std::move($3), std::move($6))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
lowercaseParams
|
|
||||||
: %empty { $$ = std::vector<std::string>(); }
|
|
||||||
| lowercaseParams LID { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
uppercaseParams
|
|
||||||
: %empty { $$ = std::vector<std::string>(); }
|
|
||||||
| uppercaseParams UID { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
aAdd
|
|
||||||
: aAdd PLUS aMul { $$ = ast_ptr(new ast_binop(PLUS, std::move($1), std::move($3))); }
|
|
||||||
| aAdd MINUS aMul { $$ = ast_ptr(new ast_binop(MINUS, std::move($1), std::move($3))); }
|
|
||||||
| aMul { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
aMul
|
|
||||||
: aMul TIMES app { $$ = ast_ptr(new ast_binop(TIMES, std::move($1), std::move($3))); }
|
|
||||||
| aMul DIVIDE app { $$ = ast_ptr(new ast_binop(DIVIDE, std::move($1), std::move($3))); }
|
|
||||||
| app { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
app
|
|
||||||
: app appBase { $$ = ast_ptr(new ast_app(std::move($1), std::move($2))); }
|
|
||||||
| appBase { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
appBase
|
|
||||||
: INT { $$ = ast_ptr(new ast_int($1)); }
|
|
||||||
| LID { $$ = ast_ptr(new ast_lid(std::move($1))); }
|
|
||||||
| UID { $$ = ast_ptr(new ast_uid(std::move($1))); }
|
|
||||||
| OPAREN aAdd CPAREN { $$ = std::move($2); }
|
|
||||||
| case { $$ = std::move($1); }
|
|
||||||
;
|
|
||||||
|
|
||||||
case
|
|
||||||
: CASE aAdd OF OCURLY branches CCURLY
|
|
||||||
{ $$ = ast_ptr(new ast_case(std::move($2), std::move($5))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
branches
|
|
||||||
: branches branch { $$ = std::move($1); $$.push_back(std::move($2)); }
|
|
||||||
| branch { $$ = std::vector<branch_ptr>(); $$.push_back(std::move($1));}
|
|
||||||
;
|
|
||||||
|
|
||||||
branch
|
|
||||||
: pattern ARROW OCURLY aAdd CCURLY
|
|
||||||
{ $$ = branch_ptr(new branch(std::move($1), std::move($4))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
pattern
|
|
||||||
: LID { $$ = pattern_ptr(new pattern_var(std::move($1))); }
|
|
||||||
| UID lowercaseParams
|
|
||||||
{ $$ = pattern_ptr(new pattern_constr(std::move($1), std::move($2))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
data
|
|
||||||
: DATA UID EQUAL OCURLY constructors CCURLY
|
|
||||||
{ $$ = definition_ptr(new definition_data(std::move($2), std::move($5))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
constructors
|
|
||||||
: constructors COMMA constructor { $$ = std::move($1); $$.push_back(std::move($3)); }
|
|
||||||
| constructor
|
|
||||||
{ $$ = std::vector<constructor_ptr>(); $$.push_back(std::move($1)); }
|
|
||||||
;
|
|
||||||
|
|
||||||
constructor
|
|
||||||
: UID uppercaseParams
|
|
||||||
{ $$ = constructor_ptr(new constructor(std::move($1), std::move($2))); }
|
|
||||||
;
|
|
||||||
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
%option noyywrap
|
|
||||||
|
|
||||||
%{
|
|
||||||
#include <iostream>
|
|
||||||
#include "ast.hpp"
|
|
||||||
#include "parser.hpp"
|
|
||||||
|
|
||||||
#define YY_DECL yy::parser::symbol_type yylex()
|
|
||||||
|
|
||||||
%}
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
[ \n]+ {}
|
|
||||||
\+ { return yy::parser::make_PLUS(); }
|
|
||||||
\* { return yy::parser::make_TIMES(); }
|
|
||||||
- { return yy::parser::make_MINUS(); }
|
|
||||||
\/ { return yy::parser::make_DIVIDE(); }
|
|
||||||
[0-9]+ { return yy::parser::make_INT(atoi(yytext)); }
|
|
||||||
defn { return yy::parser::make_DEFN(); }
|
|
||||||
data { return yy::parser::make_DATA(); }
|
|
||||||
case { return yy::parser::make_CASE(); }
|
|
||||||
of { return yy::parser::make_OF(); }
|
|
||||||
\{ { return yy::parser::make_OCURLY(); }
|
|
||||||
\} { return yy::parser::make_CCURLY(); }
|
|
||||||
\( { return yy::parser::make_OPAREN(); }
|
|
||||||
\) { return yy::parser::make_CPAREN(); }
|
|
||||||
, { return yy::parser::make_COMMA(); }
|
|
||||||
-> { return yy::parser::make_ARROW(); }
|
|
||||||
= { return yy::parser::make_EQUAL(); }
|
|
||||||
[a-z][a-zA-Z]* { return yy::parser::make_LID(std::string(yytext)); }
|
|
||||||
[A-Z][a-zA-Z]* { return yy::parser::make_UID(std::string(yytext)); }
|
|
||||||
|
|
||||||
%%
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
#include "type.hpp"
|
|
||||||
#include <sstream>
|
|
||||||
#include <algorithm>
|
|
||||||
#include "error.hpp"
|
|
||||||
|
|
||||||
void type_var::print(const type_mgr& mgr, std::ostream& to) const {
|
|
||||||
auto it = mgr.types.find(name);
|
|
||||||
if(it != mgr.types.end()) {
|
|
||||||
it->second->print(mgr, to);
|
|
||||||
} else {
|
|
||||||
to << name;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_base::print(const type_mgr& mgr, std::ostream& to) const {
|
|
||||||
to << name;
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_arr::print(const type_mgr& mgr, std::ostream& to) const {
|
|
||||||
left->print(mgr, to);
|
|
||||||
to << " -> (";
|
|
||||||
right->print(mgr, to);
|
|
||||||
to << ")";
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string type_mgr::new_type_name() {
|
|
||||||
int temp = last_id++;
|
|
||||||
std::string str = "";
|
|
||||||
|
|
||||||
while(temp != -1) {
|
|
||||||
str += (char) ('a' + (temp % 26));
|
|
||||||
temp = temp / 26 - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::reverse(str.begin(), str.end());
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr type_mgr::new_type() {
|
|
||||||
return type_ptr(new type_var(new_type_name()));
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr type_mgr::new_arrow_type() {
|
|
||||||
return type_ptr(new type_arr(new_type(), new_type()));
|
|
||||||
}
|
|
||||||
|
|
||||||
type_ptr type_mgr::resolve(type_ptr t, type_var*& var) {
|
|
||||||
type_var* cast;
|
|
||||||
|
|
||||||
var = nullptr;
|
|
||||||
while((cast = dynamic_cast<type_var*>(t.get()))) {
|
|
||||||
auto it = types.find(cast->name);
|
|
||||||
|
|
||||||
if(it == types.end()) {
|
|
||||||
var = cast;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
t = it->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_mgr::unify(type_ptr l, type_ptr r) {
|
|
||||||
type_var* lvar;
|
|
||||||
type_var* rvar;
|
|
||||||
type_arr* larr;
|
|
||||||
type_arr* rarr;
|
|
||||||
type_base* lid;
|
|
||||||
type_base* rid;
|
|
||||||
|
|
||||||
l = resolve(l, lvar);
|
|
||||||
r = resolve(r, rvar);
|
|
||||||
|
|
||||||
if(lvar) {
|
|
||||||
bind(lvar->name, r);
|
|
||||||
return;
|
|
||||||
} else if(rvar) {
|
|
||||||
bind(rvar->name, l);
|
|
||||||
return;
|
|
||||||
} else if((larr = dynamic_cast<type_arr*>(l.get())) &&
|
|
||||||
(rarr = dynamic_cast<type_arr*>(r.get()))) {
|
|
||||||
unify(larr->left, rarr->left);
|
|
||||||
unify(larr->right, rarr->right);
|
|
||||||
return;
|
|
||||||
} else if((lid = dynamic_cast<type_base*>(l.get())) &&
|
|
||||||
(rid = dynamic_cast<type_base*>(r.get()))) {
|
|
||||||
if(lid->name == rid->name) return;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw unification_error(l, r);
|
|
||||||
}
|
|
||||||
|
|
||||||
void type_mgr::bind(const std::string& s, type_ptr t) {
|
|
||||||
type_var* other = dynamic_cast<type_var*>(t.get());
|
|
||||||
|
|
||||||
if(other && other->name == s) return;
|
|
||||||
types[s] = t;
|
|
||||||
}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
#include <memory>
|
|
||||||
#include <map>
|
|
||||||
|
|
||||||
struct type_mgr;
|
|
||||||
|
|
||||||
struct type {
|
|
||||||
virtual ~type() = default;
|
|
||||||
|
|
||||||
virtual void print(const type_mgr& mgr, std::ostream& to) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
using type_ptr = std::shared_ptr<type>;
|
|
||||||
|
|
||||||
struct type_var : public type {
|
|
||||||
std::string name;
|
|
||||||
|
|
||||||
type_var(std::string n)
|
|
||||||
: name(std::move(n)) {}
|
|
||||||
|
|
||||||
void print(const type_mgr& mgr, std::ostream& to) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct type_base : public type {
|
|
||||||
std::string name;
|
|
||||||
|
|
||||||
type_base(std::string n)
|
|
||||||
: name(std::move(n)) {}
|
|
||||||
|
|
||||||
void print(const type_mgr& mgr, std::ostream& to) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct type_arr : public type {
|
|
||||||
type_ptr left;
|
|
||||||
type_ptr right;
|
|
||||||
|
|
||||||
type_arr(type_ptr l, type_ptr r)
|
|
||||||
: left(std::move(l)), right(std::move(r)) {}
|
|
||||||
|
|
||||||
void print(const type_mgr& mgr, std::ostream& to) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct type_mgr {
|
|
||||||
int last_id = 0;
|
|
||||||
std::map<std::string, type_ptr> types;
|
|
||||||
|
|
||||||
std::string new_type_name();
|
|
||||||
type_ptr new_type();
|
|
||||||
type_ptr new_arrow_type();
|
|
||||||
|
|
||||||
void unify(type_ptr l, type_ptr r);
|
|
||||||
type_ptr resolve(type_ptr t, type_var*& var);
|
|
||||||
void bind(const std::string& s, type_ptr t);
|
|
||||||
};
|
|
||||||
119
code/cs325-langs/hws/hw1.txt
Normal file
119
code/cs325-langs/hws/hw1.txt
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
CS 325-001, Analysis of Algorithms, Fall 2019
|
||||||
|
HW1 - Python 3, qsort, BST, and qselect
|
||||||
|
Due electronically on flip on Monday 9/30 at 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Need to submit on flip: report.txt, qsort.py, and qselect.py.
|
||||||
|
qselect.py will be automatically graded for correctness (1%).
|
||||||
|
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw1 qselect.py qsort.py report.txt
|
||||||
|
|
||||||
|
Note:
|
||||||
|
|
||||||
|
1. You can ssh to flip machines from your own machine by:
|
||||||
|
$ ssh access.engr.oregonstate.edu
|
||||||
|
|
||||||
|
2. You can add /nfs/farm/classes/eecs/fall2019/cs325-001/ to your $PATH:
|
||||||
|
$ export PATH=$PATH:/nfs/farm/classes/eecs/fall2019/cs325-001/
|
||||||
|
and add the above command to your ~/.bash_profile,
|
||||||
|
so that you don't need to type it every time.
|
||||||
|
|
||||||
|
(alternatively, you can use symbolic links or aliases to avoid typing the long path)
|
||||||
|
|
||||||
|
3. You can choose to submit each file separately, or submit them together.
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] CLRS Ch. 9.2 and Ch. 12
|
||||||
|
|
||||||
|
0. Q: What's the best-case, worst-case, and average-case time complexities of quicksort.
|
||||||
|
Briefly explain each case.
|
||||||
|
|
||||||
|
1. [WILL BE GRADED]
|
||||||
|
Quickselect with Randomized Pivot (CLRS Ch. 9.2).
|
||||||
|
|
||||||
|
>>> from qselect import *
|
||||||
|
>>> qselect(2, [3, 10, 4, 7, 19])
|
||||||
|
4
|
||||||
|
>>> qselect(4, [11, 2, 8, 3])
|
||||||
|
11
|
||||||
|
|
||||||
|
Q: What's the best-case, worst-case, and average-case time complexities? Briefly explain.
|
||||||
|
|
||||||
|
Filename: qselect.py
|
||||||
|
|
||||||
|
|
||||||
|
2. Buggy Qsort Revisited
|
||||||
|
|
||||||
|
In the slides we showed a buggy version of qsort which is weird in an interesting way:
|
||||||
|
it actually returns a binary search tree for the given array, rooted at the pivot:
|
||||||
|
|
||||||
|
>>> from qsort import *
|
||||||
|
>>> tree = sort([4,2,6,3,5,7,1,9])
|
||||||
|
>>> tree
|
||||||
|
[[[[], 1, []], 2, [[], 3, []]], 4, [[[], 5, []], 6, [[], 7, [[], 9, []]]]]
|
||||||
|
|
||||||
|
which encodes a binary search tree:
|
||||||
|
|
||||||
|
4
|
||||||
|
/ \
|
||||||
|
2 6
|
||||||
|
/ \ / \
|
||||||
|
1 3 5 7
|
||||||
|
\
|
||||||
|
9
|
||||||
|
|
||||||
|
Now on top of that piece of code, add three functions:
|
||||||
|
* sorted(t): returns the sorted order (infix traversal)
|
||||||
|
* search(t, x): returns whether x is in t
|
||||||
|
* insert(t, x): inserts x into t (in-place) if it is missing, otherwise does nothing.
|
||||||
|
|
||||||
|
>>> sorted(tree)
|
||||||
|
[1, 2, 3, 4, 5, 6, 7, 9]
|
||||||
|
>>> search(tree, 6)
|
||||||
|
True
|
||||||
|
>>> search(tree, 6.5)
|
||||||
|
False
|
||||||
|
>>> insert(tree, 6.5)
|
||||||
|
>>> tree
|
||||||
|
[[[[], 1, []], 2, [[], 3, []]], 4, [[[], 5, []], 6, [[[], 6.5, []], 7, [[], 9, []]]]]
|
||||||
|
>>> insert(tree, 3)
|
||||||
|
>>> tree
|
||||||
|
[[[[], 1, []], 2, [[], 3, []]], 4, [[[], 5, []], 6, [[[], 6.5, []], 7, [[], 9, []]]]]
|
||||||
|
|
||||||
|
Hint: both search and insert should depend on a helper function _search(tree, x) which
|
||||||
|
returns the subtree (a list) rooted at x when x is found, or the [] where x should
|
||||||
|
be inserted.
|
||||||
|
|
||||||
|
e.g.,
|
||||||
|
>>> tree = sort([4,2,6,3,5,7,1,9]) # starting from the initial tree
|
||||||
|
>>> _search(tree, 3)
|
||||||
|
[[], 3, []]
|
||||||
|
>>> _search(tree, 0)
|
||||||
|
[]
|
||||||
|
>>> _search(tree, 6.5)
|
||||||
|
[]
|
||||||
|
>>> _search(tree, 0) is _search(tree, 6.5)
|
||||||
|
False
|
||||||
|
>>> _search(tree, 0) == _search(tree, 6.5)
|
||||||
|
True
|
||||||
|
|
||||||
|
Note the last two []'s are different nodes (with different memory addresses):
|
||||||
|
the first one is the left child of 1, while the second one is the left child of 7
|
||||||
|
(so that insert is very easy).
|
||||||
|
|
||||||
|
Filename: qsort.py
|
||||||
|
|
||||||
|
Q: What are the time complexities for the operations implemented?
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%–100%)?
|
||||||
|
5. Any other comments?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
|
|
||||||
170
code/cs325-langs/hws/hw10.txt
Normal file
170
code/cs325-langs/hws/hw10.txt
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
CS 325, Algorithms (MS/MEng-level), Fall 2019
|
||||||
|
|
||||||
|
HW10 - Challenge Problem - RNA Structure Prediction (6%)
|
||||||
|
This problem combines dynamic programming and priority queues.
|
||||||
|
|
||||||
|
Due Wednesday 12/4, 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Include in your submission: report.txt, rna.py.
|
||||||
|
Grading:
|
||||||
|
* report.txt -- 1%
|
||||||
|
* 1-best structure -- 2%
|
||||||
|
* number of structures -- 1%
|
||||||
|
* k-best structures -- 2%
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] KT Ch. 6.5 (DP over intervals -- RNA structure)
|
||||||
|
[2] KT slides: DP I (RNA section)
|
||||||
|
http://www.cs.princeton.edu/~wayne/kleinberg-tardos/
|
||||||
|
|
||||||
|
***Please analyze time/space complexities for each problem in report.txt.
|
||||||
|
|
||||||
|
1. Given an RNA sequence, such as ACAGU, we can predict its secondary structure
|
||||||
|
by tagging each nucleotide as (, ., or ). Each matching pair of () must be
|
||||||
|
AU, GC, or GU (or their mirror symmetries: UA, CG, UG).
|
||||||
|
We also assume pairs can _not_ cross each other.
|
||||||
|
The following are valid structures for ACAGU:
|
||||||
|
|
||||||
|
ACAGU
|
||||||
|
.....
|
||||||
|
...()
|
||||||
|
..(.)
|
||||||
|
.(.).
|
||||||
|
(...)
|
||||||
|
((.))
|
||||||
|
|
||||||
|
We want to find the structure with the maximum number of matching pairs.
|
||||||
|
In the above example, the last structure is optimal (2 pairs).
|
||||||
|
|
||||||
|
>>> best("ACAGU")
|
||||||
|
(2, '((.))')
|
||||||
|
|
||||||
|
Tie-breaking: arbitrary. Don't worry as long as your structure
|
||||||
|
is one of the correct best structures.
|
||||||
|
|
||||||
|
some other cases (more cases at the bottom):
|
||||||
|
|
||||||
|
GCACG
|
||||||
|
(2, '().()')
|
||||||
|
UUCAGGA
|
||||||
|
(3, '(((.)))')
|
||||||
|
GUUAGAGUCU
|
||||||
|
(4, '(.()((.)))')
|
||||||
|
AUAACCUUAUAGGGCUCUG
|
||||||
|
(8, '.(((..)()()((()))))')
|
||||||
|
AACCGCUGUGUCAAGCCCAUCCUGCCUUGUU
|
||||||
|
(11, '(((.(..(.((.)((...().))()))))))')
|
||||||
|
GAUGCCGUGUAGUCCAAAGACUUCACCGUUGG
|
||||||
|
(14, '.()()(()(()())(((.((.)(.))()))))')
|
||||||
|
CAUCGGGGUCUGAGAUGGCCAUGAAGGGCACGUACUGUUU
|
||||||
|
(18, '(()())(((((.)))()(((())(.(.().()()))))))')
|
||||||
|
ACGGCCAGUAAAGGUCAUAUACGCGGAAUGACAGGUCUAUCUAC
|
||||||
|
(19, '.()(((.)(..))(((.()()(())))(((.)((())))))())')
|
||||||
|
AGGCAUCAAACCCUGCAUGGGAGCACCGCCACUGGCGAUUUUGGUA
|
||||||
|
(20, '.(()())...((((()()))((()(.()(((.)))()())))))()')
|
||||||
|
|
||||||
|
2. Total number of all possible structures
|
||||||
|
|
||||||
|
>>> total("ACAGU")
|
||||||
|
6
|
||||||
|
|
||||||
|
3. k-best structures: output the 1-best, 2nd-best, ... kth-best structures.
|
||||||
|
|
||||||
|
>>> kbest("ACAGU", 3)
|
||||||
|
[(2, '((.))'), (1, '(...)'), (1, '.(.).')]
|
||||||
|
|
||||||
|
The list must be sorted.
|
||||||
|
Tie-breaking: arbitrary.
|
||||||
|
|
||||||
|
In case the input k is bigger than the number of possible structures, output all.
|
||||||
|
|
||||||
|
Sanity check: kbest(s, 1)[0][0] == best(s)[0] for each RNA sequence s.
|
||||||
|
|
||||||
|
All three functions should be in one file: rna.py.
|
||||||
|
|
||||||
|
See more testcases at the end.
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
5. Any other comments?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
|
|
||||||
|
|
||||||
|
TESTCASES:
|
||||||
|
|
||||||
|
for each sequence s, we list three lines:
|
||||||
|
best(s)
|
||||||
|
total(s)
|
||||||
|
kbest(s, 10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ACAGU
|
||||||
|
(2, '((.))')
|
||||||
|
6
|
||||||
|
[(2, '((.))'), (1, '.(.).'), (1, '..(.)'), (1, '...()'), (1, '(...)'), (0, '.....')]
|
||||||
|
------
|
||||||
|
AC
|
||||||
|
(0, '..')
|
||||||
|
1
|
||||||
|
[(0, '..')]
|
||||||
|
------
|
||||||
|
GUAC
|
||||||
|
(2, '(())')
|
||||||
|
5
|
||||||
|
[(2, '(())'), (1, '()..'), (1, '.().'), (1, '(..)'), (0, '....')]
|
||||||
|
------
|
||||||
|
GCACG
|
||||||
|
(2, '().()')
|
||||||
|
6
|
||||||
|
[(2, '().()'), (1, '(..).'), (1, '()...'), (1, '.(..)'), (1, '...()'), (0, '.....')]
|
||||||
|
------
|
||||||
|
CCGG
|
||||||
|
(2, '(())')
|
||||||
|
6
|
||||||
|
[(2, '(())'), (1, '(.).'), (1, '.().'), (1, '.(.)'), (1, '(..)'), (0, '....')]
|
||||||
|
------
|
||||||
|
CCCGGG
|
||||||
|
(3, '((()))')
|
||||||
|
20
|
||||||
|
[(3, '((()))'), (2, '((.)).'), (2, '(.()).'), (2, '.(()).'), (2, '.(().)'), (2, '.((.))'), (2, '((.).)'), (2, '(.(.))'), (2, '(.().)'), (2, '((..))')]
|
||||||
|
------
|
||||||
|
UUCAGGA
|
||||||
|
(3, '(((.)))')
|
||||||
|
24
|
||||||
|
[(3, '(((.)))'), (2, '((.).).'), (2, '((..)).'), (2, '(.(.)).'), (2, '((.))..'), (2, '.((.)).'), (2, '.((.).)'), (2, '.((..))'), (2, '((..).)'), (2, '((.)..)')]
|
||||||
|
------
|
||||||
|
AUAACCUA
|
||||||
|
(2, '.((...))')
|
||||||
|
19
|
||||||
|
[(2, '((.)..).'), (2, '(()...).'), (2, '()(...).'), (2, '().(..).'), (2, '()....()'), (2, '.()(..).'), (2, '.()...()'), (2, '.(.)..()'), (2, '.((...))'), (2, '.(.(..))')]
|
||||||
|
------
|
||||||
|
UUGGACUUG
|
||||||
|
(4, '(()((.)))')
|
||||||
|
129
|
||||||
|
[(4, '(())(.)()'), (4, '(()((.)))'), (3, '(().)..()'), (3, '(().).(.)'), (3, '(().)(..)'), (3, '((.))..()'), (3, '((.)).(.)'), (3, '((.))(..)'), (3, '(())(..).'), (3, '(())(.)..')]
|
||||||
|
------
|
||||||
|
UUUGGCACUA
|
||||||
|
(4, '(.()()(.))')
|
||||||
|
179
|
||||||
|
[(4, '((()).).()'), (4, '((.)()).()'), (4, '(.()()).()'), (4, '.(()()).()'), (4, '.(()()(.))'), (4, '((()).(.))'), (4, '((.)()(.))'), (4, '((()())..)'), (4, '(.()()(.))'), (3, '((()).)...')]
|
||||||
|
------
|
||||||
|
GAUGCCGUGUAGUCCAAAGACUUC
|
||||||
|
(11, '(((()()((()(.))))((.))))')
|
||||||
|
2977987
|
||||||
|
[(11, '(()())(((()().))(((.))))'), (11, '(()())(((()()).)(((.))))'), (11, '(()())(((()(.)))(((.))))'), (11, '(()()()((()(.)))(((.))))'), (11, '(((()()((()().)))((.))))'), (11, '(((()()((()(.))))((.))))'), (11, '(()()()((()()).)(((.))))'), (11, '(()()()((()().))(((.))))'), (11, '(((()()((()()).))((.))))'), (10, '(()()()((()().).)((.))).')]
|
||||||
|
------
|
||||||
|
AGGCAUCAAACCCUGCAUGGGAGCG
|
||||||
|
(10, '.(()())...((((()()))).())')
|
||||||
|
560580
|
||||||
|
[(10, '.(()())...((((())())).)()'), (10, '.(()())...((((()()))).)()'), (10, '.(()())...(((()(()))).)()'), (10, '.(()())...(((()(()))).())'), (10, '.(()())...((((())())).())'), (10, '.(()())...((((()()))).())'), (9, '((.).)(...(.((()()))).)()'), (9, '((.).)(...(((.)(()))).)()'), (9, '((.).)(...(.(()(()))).)()'), (9, '((.).)(...((.(()()))).)()')]
|
||||||
|
------
|
||||||
42
code/cs325-langs/hws/hw11.txt
Normal file
42
code/cs325-langs/hws/hw11.txt
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
HW11 -- OPTIONAL (for your practice only -- solutions will be released on Tuesday)
|
||||||
|
|
||||||
|
Edit Distance (see updated final review solutions)
|
||||||
|
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw11 edit.py
|
||||||
|
|
||||||
|
Implement two functions:
|
||||||
|
* distance1(s, t): Viterbi-style (either top-down or bottom-up)
|
||||||
|
* distance2(s, t): Dijkstra-style (best-first)
|
||||||
|
|
||||||
|
For Dijkstra, you can use either heapdict or heapq (see review problem 7).
|
||||||
|
Given that this graph is extremely sparse (why?), heapq (ElogE) might be faster than heapdict (ElogV)
|
||||||
|
because the latter has overhead for hash.
|
||||||
|
|
||||||
|
They should return the same result (just return the edit distance).
|
||||||
|
|
||||||
|
We have 10 testcases (listed below); the first 5 test distance1(),
|
||||||
|
and the second 5 test distance2() on the same 5 string pairs.
|
||||||
|
|
||||||
|
My solutions (on flip2):
|
||||||
|
Testing Case 1 (open)... 0.001 s, Correct
|
||||||
|
Testing Case 2 (open)... 0.000 s, Correct
|
||||||
|
Testing Case 3 (open)... 0.012 s, Correct
|
||||||
|
Testing Case 4 (open)... 0.155 s, Correct
|
||||||
|
Testing Case 5 (open)... 0.112 s, Correct
|
||||||
|
Testing Case 6 (hidden)... 0.000 s, Correct
|
||||||
|
Testing Case 7 (hidden)... 0.000 s, Correct
|
||||||
|
Testing Case 8 (hidden)... 0.004 s, Correct
|
||||||
|
Testing Case 9 (hidden)... 0.009 s, Correct
|
||||||
|
Testing Case 10 (hidden)... 0.021 s, Correct
|
||||||
|
Total Time: 0.316 s
|
||||||
|
|
||||||
|
distance1("abcdefh", "abbcdfg") == 3
|
||||||
|
distance1("pretty", "prettier") == 3
|
||||||
|
distance1("aaaaaaadaaaaaaaaaaaaaaaaacaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "aaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaxaaaaaaaaaaaaaaaaaaaaaa") == 5
|
||||||
|
distance1('cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbxtwiqzvokqpkecyywrbvhlqgxzutdjfmvlhsezfbhfjbllmfhzlqlcwibubyyjupbwhztskyksfthkptxqlmhivfjbgclwsombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasomrhotoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwy', 'cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbtwiqzvokqpkecyywrbvhlqgxzutdjfmvlhsezfbhfjbllmfhzlqlcwibubyyjupbwhztskyksfthkptxqlmhivfbgclwsombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasonrhotoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwy') == 3
|
||||||
|
distance1('cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbtwiqzvokqpasdfkecyywrbvhlqgxzutdjfmvlhsezfbhbllmfhzlqlcwibubyyjupbwhztsxyksfthkptxqlmhivfjbgclhombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasomrttoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwydmbihjkvziitusmkjljrsbafytsinql', 'cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbtwiqzvokqpkecyywrbvhlqgxzutdjfmvlhsezfbhfjbllmfhzlqlcwibubyyjupbwhztskyksfthkptxqlmhivfjbgclwsombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasomrhotoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwydmbihjkvziitusmkjljrsbafytsinql') == 11
|
||||||
|
distance2("abcdefh", "abbcdfg") == 3
|
||||||
|
distance2("pretty", "prettier") == 3
|
||||||
|
distance2("aaaaaaadaaaaaaaaaaaaaaaaacaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "aaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaxaaaaaaaaaaaaaaaaaaaaaa") == 5
|
||||||
|
distance2('cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbxtwiqzvokqpkecyywrbvhlqgxzutdjfmvlhsezfbhfjbllmfhzlqlcwibubyyjupbwhztskyksfthkptxqlmhivfjbgclwsombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasomrhotoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwy', 'cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbtwiqzvokqpkecyywrbvhlqgxzutdjfmvlhsezfbhfjbllmfhzlqlcwibubyyjupbwhztskyksfthkptxqlmhivfbgclwsombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasonrhotoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwy') == 3
|
||||||
|
distance2('cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbtwiqzvokqpasdfkecyywrbvhlqgxzutdjfmvlhsezfbhbllmfhzlqlcwibubyyjupbwhztsxyksfthkptxqlmhivfjbgclhombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasomrttoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwydmbihjkvziitusmkjljrsbafytsinql', 'cpuyedzrwcbritzclzhwwabmlyresvewkdxwkamyzbtwiqzvokqpkecyywrbvhlqgxzutdjfmvlhsezfbhfjbllmfhzlqlcwibubyyjupbwhztskyksfthkptxqlmhivfjbgclwsombvytdztapwpzmdqfwwrhqsgztobeuiatcwmrzfbwhfnpzzasomrhotoqiwvexlgxsnafiagfewmopdzwanxswfsmbxsmsczbwsgnwydmbihjkvziitusmkjljrsbafytsinql') == 11
|
||||||
80
code/cs325-langs/hws/hw2.txt
Normal file
80
code/cs325-langs/hws/hw2.txt
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
CS 325-001, Analysis of Algorithms, Fall 2019
|
||||||
|
HW2 - Divide-n-conquer: mergesort, number of inversions, longest path
|
||||||
|
|
||||||
|
Due Monday Oct 7, 11:59pm (same submission instructions as HW1).
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Need to submit: report.txt, msort.py, inversions.py, and longest.py.
|
||||||
|
longest.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
To submit:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw2 report.txt {msort,inversions,longest}.py
|
||||||
|
(You can submit each file separately, or submit them together.)
|
||||||
|
|
||||||
|
To see your best results so far:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/query hw2
|
||||||
|
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] CLRS Ch. 2
|
||||||
|
|
||||||
|
0. Which of the following sorting algorithms are (or can be made) stable?
|
||||||
|
(a) mergesort
|
||||||
|
(b) quicksort with the first element as pivot
|
||||||
|
(c) quicksort with randomized pivot
|
||||||
|
(d) selection sort
|
||||||
|
(e) insertion sort
|
||||||
|
(f) heap sort --- not covered yet (see CLRS Ch. 6)
|
||||||
|
|
||||||
|
1. Implement mergesort.
|
||||||
|
|
||||||
|
>>> mergesort([4, 2, 5, 1, 6, 3])
|
||||||
|
[1, 2, 3, 4, 5, 6]
|
||||||
|
|
||||||
|
Filename: msort.py
|
||||||
|
|
||||||
|
2. Calculate the number of inversions in a list.
|
||||||
|
|
||||||
|
>>> num_inversions([4, 1, 3, 2])
|
||||||
|
4
|
||||||
|
>>> num_inversions([2, 4, 1, 3])
|
||||||
|
3
|
||||||
|
|
||||||
|
Filename: inversions.py
|
||||||
|
Must run in O(nlogn) time.
|
||||||
|
|
||||||
|
3. [WILL BE GRADED]
|
||||||
|
|
||||||
|
Length of the longest path in a binary tree (number of edges).
|
||||||
|
|
||||||
|
We will use the "buggy qsort" representation of binary trees from HW1:
|
||||||
|
[left_subtree, root, right_subtree]
|
||||||
|
|
||||||
|
>>> longest([[], 1, []])
|
||||||
|
0
|
||||||
|
|
||||||
|
>>> longest([[[], 1, []], 2, [[], 3, []]])
|
||||||
|
2
|
||||||
|
|
||||||
|
>>> longest([[[[], 1, []], 2, [[], 3, []]], 4, [[[], 5, []], 6, [[], 7, [[], 9, []]]]])
|
||||||
|
5
|
||||||
|
|
||||||
|
Note the answer is 5 because the longest path is 1-2-4-6-7-9.
|
||||||
|
|
||||||
|
Filename: longest.py
|
||||||
|
Must run in O(n) time.
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
Note you are encouraged to discuss with your classmates,
|
||||||
|
but each students should submit his/her own code.
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%–100%)?
|
||||||
|
5. Any other comments?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
|
|
||||||
83
code/cs325-langs/hws/hw3.txt
Normal file
83
code/cs325-langs/hws/hw3.txt
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
CS 325, Algorithms, Fall 2019
|
||||||
|
HW3 - K closest numbers; Two Pointers
|
||||||
|
|
||||||
|
Due Monday Oct 14, 11:59pm. (same submission instructions as HW1-2).
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Need to submit: report.txt, closest_unsorted.py, closest_sorted.py, xyz.py.
|
||||||
|
closest_sorted.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
To submit:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw3 report.txt {closest*,xyz}.py
|
||||||
|
(You can submit each file separately, or submit them together.)
|
||||||
|
|
||||||
|
To see your best results so far:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/query hw3
|
||||||
|
|
||||||
|
|
||||||
|
1. Given an array A of n numbers, a query x, and a number k,
|
||||||
|
find the k numbers in A that are closest (in value) to x.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
find([4,1,3,2,7,4], 5.2, 2) returns [4,4]
|
||||||
|
find([4,1,3,2,7,4], 6.5, 3) returns [4,7,4]
|
||||||
|
find([5,3,4,1,6,3], 3.5, 2) returns [3,4]
|
||||||
|
|
||||||
|
|
||||||
|
Filename: closest_unsorted.py
|
||||||
|
Must run in O(n) time.
|
||||||
|
The elements in the returned list must be in the original order.
|
||||||
|
In case two numbers are equally close to x, choose the earlier one.
|
||||||
|
|
||||||
|
|
||||||
|
2. [WILL BE GRADED]
|
||||||
|
Now what if the input array is sorted? Can you do it faster?
|
||||||
|
|
||||||
|
find([1,2,3,4,4,7], 5.2, 2) returns [4,4]
|
||||||
|
find([1,2,3,4,4,7], 6.5, 3) returns [4,4,7]
|
||||||
|
|
||||||
|
Filename: closest_sorted.py
|
||||||
|
Must run in O(logn + k) time.
|
||||||
|
The elements in the returned list must be in the original order.
|
||||||
|
|
||||||
|
Note: in case two numbers are equally close to x, choose the smaller one:
|
||||||
|
find([1,2,3,4,4,6,6], 5, 3) returns [4,4,6]
|
||||||
|
find([1,2,3,4,4,5,6], 4, 5) returns [2,3,4,4,5]
|
||||||
|
|
||||||
|
Hint: you can use Python's bisect.bisect for binary search.
|
||||||
|
|
||||||
|
|
||||||
|
3. For a given array A of n *distinct* numbers, find all triples (x,y,z)
|
||||||
|
s.t. x + y = z. (x, y, z are distinct numbers)
|
||||||
|
|
||||||
|
e.g.,
|
||||||
|
|
||||||
|
find([1, 4, 2, 3, 5]) returns [(1,3,4), (1,2,3), (1,4,5), (2,3,5)]
|
||||||
|
|
||||||
|
Note that:
|
||||||
|
1) no duplicates in the input array
|
||||||
|
2) you can choose any arbitrary order for triples in the returned list.
|
||||||
|
|
||||||
|
Filename: xyz.py
|
||||||
|
Must run in O(n^2) time.
|
||||||
|
|
||||||
|
Hint: you can use any built-in sort in Python.
|
||||||
|
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
Note you are encouraged to discuss with your classmates,
|
||||||
|
but each students should submit his/her own code.
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
|
||||||
|
5. Which part(s) of the course you like the most so far?
|
||||||
|
6. Which part(s) of the course you dislike the most so far?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
|
|
||||||
114
code/cs325-langs/hws/hw4.txt
Normal file
114
code/cs325-langs/hws/hw4.txt
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
CS 325-001, Algorithms, Fall 2019
|
||||||
|
HW4 - Priority Queue and Heaps
|
||||||
|
|
||||||
|
Due via the submit program on Monday Oct 21, 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Need to submit: report.txt, nbest.py, kmergesort.py, datastream.py.
|
||||||
|
datastream.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
To submit:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw4 report.txt {nbest,kmergesort,datastream}.py
|
||||||
|
(You can submit each file separately, or submit them together.)
|
||||||
|
|
||||||
|
To see your best results so far:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/query hw4
|
||||||
|
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] CLRS Ch. 6
|
||||||
|
[2] KT slides for binary heaps (only read the first 20 pages!):
|
||||||
|
https://www.cs.princeton.edu/~wayne/kleinberg-tardos/pdf/BinomialHeaps.pdf
|
||||||
|
[3] Python heapq module
|
||||||
|
|
||||||
|
0. There are two methods for building a heap from an unsorted array:
|
||||||
|
(1) insert each element into the heap --- O(nlogn) -- heapq.heappush()
|
||||||
|
(2) heapify (top-down) --- O(n) -- heapq.heapify()
|
||||||
|
|
||||||
|
(a) Derive these time complexities.
|
||||||
|
(b) Use a long list of random numbers to show the difference in time. (Hint: random.shuffle or random.sample)
|
||||||
|
(c) What about sorted or reversely-sorted numbers?
|
||||||
|
|
||||||
|
1. Given two lists A and B, each with n integers, return
|
||||||
|
a sorted list C that contains the smallest n elements from AxB:
|
||||||
|
|
||||||
|
AxB = { (x, y) | x in A, y in B }
|
||||||
|
|
||||||
|
i.e., AxB is the Cartesian Product of A and B.
|
||||||
|
|
||||||
|
ordering: (x,y) < (x',y') iff. x+y < x'+y' or (x+y==x'+y' and y<y')
|
||||||
|
|
||||||
|
You need to implement three algorithms and compare:
|
||||||
|
|
||||||
|
(a) enumerate all n^2 pairs, sort, and take top n.
|
||||||
|
(b) enumerate all n^2 pairs, but use qselect from hw1.
|
||||||
|
(c) Dijkstra-style best-first, only enumerate O(n) (at most 2n) pairs.
|
||||||
|
Hint: you can use Python's heapq module for priority queue.
|
||||||
|
|
||||||
|
Q: What are the time complexities of these algorithms?
|
||||||
|
|
||||||
|
>>> a, b = [4, 1, 5, 3], [2, 6, 3, 4]
|
||||||
|
>>> nbesta(a, b) # algorithm (a), slowest
|
||||||
|
[(1, 2), (1, 3), (3, 2), (1, 4)]
|
||||||
|
>>> nbestb(a, b) # algorithm (b), slow
|
||||||
|
[(1, 2), (1, 3), (3, 2), (1, 4)]
|
||||||
|
>>> nbestc(a, b) # algorithm (c), fast
|
||||||
|
[(1, 2), (1, 3), (3, 2), (1, 4)]
|
||||||
|
|
||||||
|
Filename: nbest.py
|
||||||
|
|
||||||
|
2. k-way mergesort (the classical mergesort is a special case where k=2).
|
||||||
|
|
||||||
|
>>> kmergesort([4,1,5,2,6,3,7,0], 3) # k=3
|
||||||
|
[0,1,2,3,4,5,6,7]
|
||||||
|
|
||||||
|
Q: What is the complexity? Write down the detailed analysis in report.txt.
|
||||||
|
|
||||||
|
Filename: kmergesort.py
|
||||||
|
|
||||||
|
3. [WILL BE GRADED]
|
||||||
|
|
||||||
|
Find the k smallest numbers in a data stream of length n (k<<n),
|
||||||
|
using only O(k) space (the stream itself might be too big to fit in memory).
|
||||||
|
|
||||||
|
>>> ksmallest(4, [10, 2, 9, 3, 7, 8, 11, 5, 7])
|
||||||
|
[2, 3, 5, 7]
|
||||||
|
>>> ksmallest(3, range(1000000, 0, -1))
|
||||||
|
[1, 2, 3]
|
||||||
|
|
||||||
|
Note:
|
||||||
|
a) it should work with both lists and lazy lists
|
||||||
|
b) the output list should be sorted
|
||||||
|
|
||||||
|
Q: What is your complexity? Write down the detailed analysis in report.txt.
|
||||||
|
|
||||||
|
Filename: datastream.py
|
||||||
|
|
||||||
|
[UPDATE] The built-in function heapq.nsmallest() is _not_ allowed for this problem.
|
||||||
|
The whole point is to implement it yourself. :)
|
||||||
|
|
||||||
|
|
||||||
|
4. (optional) Summarize the time complexities of the basic operations (push, pop-min, peak, heapify) for these implementations of priority queue:
|
||||||
|
|
||||||
|
(a) unsorted array
|
||||||
|
(b) sorted array (highest priority first)
|
||||||
|
(c) reversly sorted array (lowest priority first)
|
||||||
|
(d) linked list
|
||||||
|
(e) binary heap
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
Note you are encouraged to discuss with your classmates,
|
||||||
|
but each students should submit his/her own code.
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
5. Which part(s) of the course you like the most so far?
|
||||||
|
6. Which part(s) of the course you dislike the most so far?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
|
|
||||||
130
code/cs325-langs/hws/hw5.txt
Normal file
130
code/cs325-langs/hws/hw5.txt
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
CS 532-001, Algorithms, Fall 2019
|
||||||
|
HW5 - DP (part 1: simple)
|
||||||
|
|
||||||
|
HWs 5-7 are all on DPs.
|
||||||
|
|
||||||
|
Due Monday Oct 28, 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Need to submit report.txt, mis.py, bsts.py, bitstrings.py.
|
||||||
|
mis.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
To submit:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw5 report.txt {mis,bsts,bitstrings}.py
|
||||||
|
(You can submit each file separately, or submit them together.)
|
||||||
|
|
||||||
|
To see your best results so far:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/query hw5
|
||||||
|
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] CLRS Ch. 15
|
||||||
|
[2] KT Ch. 6
|
||||||
|
or Ch. 5 in a previous version:
|
||||||
|
http://cs.furman.edu/~chealy/cs361/kleinbergbook.pdf
|
||||||
|
|
||||||
|
Hint: Among the three coding questions, p3 is the easiest, and p1 is similar to p3.
|
||||||
|
You'll realize that both are very similar to p0 (Fibonacci).
|
||||||
|
p2 is slightly different from these, but still very easy.
|
||||||
|
|
||||||
|
0. (Optional) Is Fibonacci REALLY O(n)?
|
||||||
|
Hint: the value of f(n) itself grows exponentially.
|
||||||
|
|
||||||
|
1. [WILL BE GRADED]
|
||||||
|
Maximum Weighted Independent Set
|
||||||
|
|
||||||
|
[HINT] independent set is a set where no two numbers are neighbors in the original list.
|
||||||
|
see also https://en.wikipedia.org/wiki/Independent_set_(graph_theory)
|
||||||
|
|
||||||
|
input: a list of numbers (could be negative)
|
||||||
|
output: a pair of the max sum and the list of numbers chosen
|
||||||
|
|
||||||
|
>>> max_wis([7,8,5])
|
||||||
|
(12, [7,5])
|
||||||
|
|
||||||
|
>>> max_wis([-1,8,10])
|
||||||
|
(10, [10])
|
||||||
|
|
||||||
|
>>> max_wis([])
|
||||||
|
(0, [])
|
||||||
|
|
||||||
|
[HINT] if all numbers are negative, the optimal solution is 0,
|
||||||
|
since [] is an independent set according to the definition above.
|
||||||
|
|
||||||
|
>>> max_wis([-5, -1, -4])
|
||||||
|
(0, [])
|
||||||
|
|
||||||
|
Q: What's the complexity?
|
||||||
|
|
||||||
|
Include both top-down (max_wis()) and bottom-up (max_wis2()) solutions,
|
||||||
|
and make sure they produce exact same results.
|
||||||
|
We'll only grade the top-down version.
|
||||||
|
|
||||||
|
Tie-breaking: any best solution is considered correct.
|
||||||
|
|
||||||
|
Filename: mis.py
|
||||||
|
|
||||||
|
[HINT] you can also use the naive O(2^n) exhaustive search method to verify your answer.
|
||||||
|
|
||||||
|
|
||||||
|
2. Number of n-node BSTs
|
||||||
|
|
||||||
|
input: n
|
||||||
|
output: number of n-node BSTs
|
||||||
|
|
||||||
|
>>> bsts(2)
|
||||||
|
2
|
||||||
|
>>> bsts(3)
|
||||||
|
5
|
||||||
|
>>> bsts(5)
|
||||||
|
42
|
||||||
|
|
||||||
|
[HINT] There are two 2-node BSTs:
|
||||||
|
2 1
|
||||||
|
/ \
|
||||||
|
1 2
|
||||||
|
Note that all other 2-node BSTs are *isomorphic* to either one.
|
||||||
|
|
||||||
|
Qa: What's the complexity of this DP?
|
||||||
|
|
||||||
|
Qb: What's the name of this famous number series?
|
||||||
|
|
||||||
|
Feel free to use any implementation style.
|
||||||
|
|
||||||
|
Filename: bsts.py
|
||||||
|
|
||||||
|
3. Number of bit strings of length n that has
|
||||||
|
|
||||||
|
1) no two consecutive 0s.
|
||||||
|
2) two consecutive 0s.
|
||||||
|
|
||||||
|
>>> num_no(3)
|
||||||
|
5
|
||||||
|
>>> num_yes(3)
|
||||||
|
3
|
||||||
|
|
||||||
|
[HINT] There are three 3-bit 0/1-strings that have two consecutive 0s.
|
||||||
|
001 100 000
|
||||||
|
The other five 3-bit 0/1-strings have no two consecutive 0s:
|
||||||
|
010 011 101 110 111
|
||||||
|
|
||||||
|
Feel free to choose any implementation style.
|
||||||
|
|
||||||
|
Filename: bitstrings.py
|
||||||
|
|
||||||
|
[HINT] Like problem 1, you can also use the O(2^n) exhaustive search method to verify your answer.
|
||||||
|
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
5. Which part(s) of the course you like the most so far?
|
||||||
|
6. Which part(s) of the course you dislike the most so far?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
114
code/cs325-langs/hws/hw6.txt
Normal file
114
code/cs325-langs/hws/hw6.txt
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
CS 325-001, Algorithms, Fall 2019
|
||||||
|
HW6 - DP (part 2)
|
||||||
|
|
||||||
|
Due on Monday Nov 4, 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Need to submit: report.txt, knapsack_unbounded.py, knapsack_bounded.py.
|
||||||
|
knapsack_bounded.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
To submit:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw6 report.txt knapsack*.py
|
||||||
|
(You can submit each file separately, or submit them together.)
|
||||||
|
|
||||||
|
To see your best results so far:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/query hw6
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] KT Ch. 6.4
|
||||||
|
or Ch. 5.3 in a previous version:
|
||||||
|
http://cs.furman.edu/~chealy/cs361/kleinbergbook.pdf
|
||||||
|
[2] KT slides for DP (pages 1-37):
|
||||||
|
https://www.cs.princeton.edu/~wayne/kleinberg-tardos/pdf/06DynamicProgrammingI.pdf
|
||||||
|
[3] Wikipedia: Knapsack (unbounded and 0/1)
|
||||||
|
[4] CLRS Ch. 15
|
||||||
|
|
||||||
|
Please answer time/space complexities for each problem in report.txt.
|
||||||
|
|
||||||
|
0. For each of the coding problems below:
|
||||||
|
(a) Describe a greedy solution.
|
||||||
|
(b) Show a counterexample to the greedy solution.
|
||||||
|
(c) Define the DP subproblem
|
||||||
|
(d) Write the recurrence relations
|
||||||
|
(e) Do not forget base cases
|
||||||
|
(f) Analyze the space and time complexities
|
||||||
|
|
||||||
|
1. Unbounded Knapsack
|
||||||
|
|
||||||
|
You have n items, each with weight w_i and value v_i, and each has infinite copies.
|
||||||
|
**All numbers are positive integers.**
|
||||||
|
What's the best value for a bag of W?
|
||||||
|
|
||||||
|
>>> best(3, [(2, 4), (3, 5)])
|
||||||
|
(5, [0, 1])
|
||||||
|
|
||||||
|
the input to the best() function is W and a list of pairs (w_i, v_i).
|
||||||
|
this output means to take 0 copies of item 1 and 1 copy of item 2.
|
||||||
|
|
||||||
|
tie-breaking: *reverse* lexicographical: i.e., [1, 0] is better than [0, 1]:
|
||||||
|
(i.e., take as many copies from the first item as possible, etc.)
|
||||||
|
|
||||||
|
>>> best(3, [(1, 5), (1, 5)])
|
||||||
|
(15, [3, 0])
|
||||||
|
|
||||||
|
>>> best(3, [(1, 2), (1, 5)])
|
||||||
|
(15, [0, 3])
|
||||||
|
|
||||||
|
>>> best(3, [(1, 2), (2, 5)])
|
||||||
|
(7, [1, 1])
|
||||||
|
|
||||||
|
>>> best(58, [(5, 9), (9, 18), (6, 12)])
|
||||||
|
(114, [2, 4, 2])
|
||||||
|
|
||||||
|
>>> best(92, [(8, 9), (9, 10), (10, 12), (5, 6)])
|
||||||
|
(109, [1, 1, 7, 1])
|
||||||
|
|
||||||
|
Q: What are the time and space complexities?
|
||||||
|
|
||||||
|
filename: knapsack_unbounded.py
|
||||||
|
|
||||||
|
2. [WILL BE GRADED]
|
||||||
|
Bounded Knapsack
|
||||||
|
|
||||||
|
You have n items, each with weight w_i and value v_i, and has c_i copies.
|
||||||
|
**All numbers are positive integers.**
|
||||||
|
What's the best value for a bag of W?
|
||||||
|
|
||||||
|
>>> best(3, [(2, 4, 2), (3, 5, 3)])
|
||||||
|
(5, [0, 1])
|
||||||
|
|
||||||
|
the input to the best() function is W and a list of triples (w_i, v_i, c_i).
|
||||||
|
|
||||||
|
tie-breaking: same as in p1:
|
||||||
|
|
||||||
|
>>> best(3, [(1, 5, 2), (1, 5, 3)])
|
||||||
|
(15, [2, 1])
|
||||||
|
|
||||||
|
>>> best(3, [(1, 5, 1), (1, 5, 3)])
|
||||||
|
(15, [1, 2])
|
||||||
|
|
||||||
|
>>> best(20, [(1, 10, 6), (3, 15, 4), (2, 10, 3)])
|
||||||
|
(130, [6, 4, 1])
|
||||||
|
|
||||||
|
>>> best(92, [(1, 6, 6), (6, 15, 7), (8, 9, 8), (2, 4, 7), (2, 20, 2)])
|
||||||
|
(236, [6, 7, 3, 7, 2])
|
||||||
|
|
||||||
|
Q: What are the time and space complexities?
|
||||||
|
|
||||||
|
filename: knapsack_bounded.py
|
||||||
|
|
||||||
|
You are encouraged to come up with a few other testcases yourself to test your code!
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
5. Which part(s) of the course you like the most so far?
|
||||||
|
6. Which part(s) of the course you dislike the most so far?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
147
code/cs325-langs/hws/hw8.txt
Normal file
147
code/cs325-langs/hws/hw8.txt
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
CS 325-001, Algorithms, Fall 2019
|
||||||
|
HW8 - Graphs (part I); DP (part III)
|
||||||
|
|
||||||
|
Due on Monday November 18, 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Include in your submission: report.txt, topol.py, viterbi.py.
|
||||||
|
viterbi.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
To submit:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/submit hw8 report.txt {topol,viterbi}.py
|
||||||
|
(You can submit each file separately, or submit them together.)
|
||||||
|
|
||||||
|
To see your best results so far:
|
||||||
|
flip $ /nfs/farm/classes/eecs/fall2019/cs325-001/query hw8
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] CLRS Ch. 23 (Elementary Graph Algorithms)
|
||||||
|
[2] KT Ch. 3 (graphs), or Ch. 2 in this earlier version:
|
||||||
|
http://cs.furman.edu/~chealy/cs361/kleinbergbook.pdf
|
||||||
|
[3] KT slides (highly recommend!):
|
||||||
|
https://www.cs.princeton.edu/~wayne/kleinberg-tardos/pdf/03Graphs.pdf
|
||||||
|
[4] Jeff Erickson: Ch. 5 (Basic Graph Algorithms):
|
||||||
|
http://jeffe.cs.illinois.edu/teaching/algorithms/book/05-graphs.pdf
|
||||||
|
[5] DPV Ch. 3, 4.2, 4.4, 4.7 (Dasgupta, Papadimitriou, Vazirani)
|
||||||
|
https://www.cs.berkeley.edu/~vazirani/algorithms/chap3.pdf (decomposition of graphs)
|
||||||
|
https://www.cs.berkeley.edu/~vazirani/algorithms/chap4.pdf (paths, shortest paths)
|
||||||
|
[6] my advanced DP tutorial (up to page 16):
|
||||||
|
http://web.engr.oregonstate.edu/~huanlian/slides/COLING-tutorial-anim.pdf
|
||||||
|
|
||||||
|
Please answer non-coding questions in report.txt.
|
||||||
|
|
||||||
|
0. For the following graphs, decide whether they are
|
||||||
|
(1) directed or undirected, (2) dense or sparse, and (3) cyclic or acyclic:
|
||||||
|
|
||||||
|
(a) Facebook
|
||||||
|
(b) Twitter
|
||||||
|
(c) a family
|
||||||
|
(d) V=airports, E=direct_flights
|
||||||
|
(e) a mesh
|
||||||
|
(f) V=courses, E=prerequisites
|
||||||
|
(g) a tree
|
||||||
|
(h) V=linux_software_packages, E=dependencies
|
||||||
|
(i) DP subproblems for 0-1 knapsack
|
||||||
|
|
||||||
|
Can you name a very big dense graph?
|
||||||
|
|
||||||
|
1. Topological Sort
|
||||||
|
|
||||||
|
For a given directed graph, output a topological order if it exists.
|
||||||
|
|
||||||
|
Tie-breaking: ARBITRARY tie-breaking. This will make the code
|
||||||
|
and time complexity analysis a lot easier.
|
||||||
|
|
||||||
|
e.g., for the following example:
|
||||||
|
|
||||||
|
0 --> 2 --> 3 --> 5 --> 6
|
||||||
|
/ \ | / \
|
||||||
|
/ \ v / \
|
||||||
|
1 > 4 > 7
|
||||||
|
|
||||||
|
>>> order(8, [(0,2), (1,2), (2,3), (2,4), (3,4), (3,5), (4,5), (5,6), (5,7)])
|
||||||
|
[0, 1, 2, 3, 4, 5, 6, 7]
|
||||||
|
|
||||||
|
Note that order() takes two arguments, n and list_of_edges,
|
||||||
|
where n specifies that the nodes are named 0..(n-1).
|
||||||
|
|
||||||
|
If we flip the (3,4) edge:
|
||||||
|
|
||||||
|
>>> order(8, [(0,2), (1,2), (2,3), (2,4), (4,3), (3,5), (4,5), (5,6), (5,7)])
|
||||||
|
[0, 1, 2, 4, 3, 5, 6, 7]
|
||||||
|
|
||||||
|
If there is a cycle, return None
|
||||||
|
|
||||||
|
>>> order(4, [(0,1), (1,2), (2,1), (2,3)])
|
||||||
|
None
|
||||||
|
|
||||||
|
Other cases:
|
||||||
|
|
||||||
|
>>> order(5, [(0,1), (1,2), (2,3), (3,4)])
|
||||||
|
[0, 1, 2, 3, 4]
|
||||||
|
|
||||||
|
>>> order(5, [])
|
||||||
|
[0, 1, 2, 3, 4] # could be any order
|
||||||
|
|
||||||
|
>>> order(3, [(1,2), (2,1)])
|
||||||
|
None
|
||||||
|
|
||||||
|
>>> order(1, [(0,0)]) # self-loop
|
||||||
|
None
|
||||||
|
|
||||||
|
Tie-breaking: arbitrary (any valid topological order is fine).
|
||||||
|
|
||||||
|
filename: topol.py
|
||||||
|
|
||||||
|
questions:
|
||||||
|
(a) did you realize that bottom-up implementations of DP use (implicit) topological orderings?
|
||||||
|
e.g., what is the topological ordering in your (or my) bottom-up bounded knapsack code?
|
||||||
|
(b) what about top-down implementations? what order do they use to traverse the graph?
|
||||||
|
(c) does that suggest there is a top-down solution for topological sort as well?
|
||||||
|
|
||||||
|
2. [WILL BE GRADED]
|
||||||
|
Viterbi Algorithm For Longest Path in DAG (see DPV 4.7, [2], CLRS problem 15-1)
|
||||||
|
|
||||||
|
Recall that the Viterbi algorithm has just two steps:
|
||||||
|
a) get a topological order (use problem 1 above)
|
||||||
|
b) follow that order, and do either forward or backward updates
|
||||||
|
|
||||||
|
This algorithm captures all DP problems on DAGs, for example,
|
||||||
|
longest path, shortest path, number of paths, etc.
|
||||||
|
|
||||||
|
In this problem, given a DAG (guaranteed acyclic!), output a pair (l, p)
|
||||||
|
where l is the length of the longest path (number of edges), and p is the path. (you can think of each edge being unit cost)
|
||||||
|
|
||||||
|
e.g., for the above example:
|
||||||
|
|
||||||
|
>>> longest(8, [(0,2), (1,2), (2,3), (2,4), (3,4), (3,5), (4,5), (5,6), (5,7)])
|
||||||
|
(5, [0, 2, 3, 4, 5, 6])
|
||||||
|
|
||||||
|
>>> longest(8, [(0,2), (1,2), (2,3), (2,4), (4,3), (3,5), (4,5), (5,6), (5,7)])
|
||||||
|
(5, [0, 2, 4, 3, 5, 6])
|
||||||
|
|
||||||
|
>>> longest(8, [(0,1), (0,2), (1,2), (2,3), (2,4), (4,3), (3,5), (4,5), (5,6), (5,7), (6,7)])
|
||||||
|
(7, [0, 1, 2, 4, 3, 5, 6, 7]) # unique answer
|
||||||
|
|
||||||
|
Note that longest() takes two arguments, n and list_of_edges,
|
||||||
|
where n specifies that the nodes are named 0..(n-1).
|
||||||
|
|
||||||
|
Tie-breaking: arbitrary. any longest path is fine.
|
||||||
|
|
||||||
|
Filename: viterbi.py
|
||||||
|
|
||||||
|
Note: you can use this program to solve MIS, knapsacks, coins, etc.
|
||||||
|
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
5. Any other comments?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
166
code/cs325-langs/hws/hw9.txt
Normal file
166
code/cs325-langs/hws/hw9.txt
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
CS 325, Algorithms, Fall 2019
|
||||||
|
HW9 - Graphs (part 2), DP (part 4)
|
||||||
|
|
||||||
|
Due Monday Nov 25, 11:59pm.
|
||||||
|
No late submission will be accepted.
|
||||||
|
|
||||||
|
Include in your submission: report.txt, dijkstra.py, nbest.py.
|
||||||
|
dijkstra.py will be graded for correctness (1%).
|
||||||
|
|
||||||
|
Textbooks for References:
|
||||||
|
[1] CLRS Ch. 22 (graph)
|
||||||
|
[2] my DP tutorial (up to page 16):
|
||||||
|
http://web.engr.oregonstate.edu/~huanlian/slides/COLING-tutorial-anim.pdf
|
||||||
|
[3] DPV Ch. 3, 4.2, 4.4, 4.7, 6 (Dasgupta, Papadimitriou, Vazirani)
|
||||||
|
https://www.cs.berkeley.edu/~vazirani/algorithms/chap3.pdf
|
||||||
|
https://www.cs.berkeley.edu/~vazirani/algorithms/chap4.pdf
|
||||||
|
https://www.cs.berkeley.edu/~vazirani/algorithms/chap6.pdf
|
||||||
|
[4] KT Ch. 6 (DP)
|
||||||
|
http://www.aw-bc.com/info/kleinberg/assets/downloads/ch6.pdf
|
||||||
|
[5] KT slides: Greedy II (Dijkstra)
|
||||||
|
http://www.cs.princeton.edu/~wayne/kleinberg-tardos/
|
||||||
|
|
||||||
|
***Please answer time/space complexities for each problem in report.txt.
|
||||||
|
|
||||||
|
1. [WILL BE GRADED]
|
||||||
|
Dijkstra (see CLRS 24.3 and DPV 4.4)
|
||||||
|
|
||||||
|
Given an undirected graph, find the shortest path from source (node 0)
|
||||||
|
to target (node n-1).
|
||||||
|
|
||||||
|
Edge weights are guaranteed to be non-negative, since Dijkstra doesn't work
|
||||||
|
with negative weights, e.g.
|
||||||
|
|
||||||
|
3
|
||||||
|
0 ------ 1
|
||||||
|
\ /
|
||||||
|
2 \ / -2
|
||||||
|
\/
|
||||||
|
2
|
||||||
|
|
||||||
|
in this example, Dijkstra would return length 2 (path 0-2),
|
||||||
|
but path 0-1-2 is better (length 1).
|
||||||
|
|
||||||
|
For example (return a pair of shortest-distance and shortest-path):
|
||||||
|
|
||||||
|
1
|
||||||
|
0 ------ 1
|
||||||
|
\ / \
|
||||||
|
5 \ /1 \6
|
||||||
|
\/ 2 \
|
||||||
|
2 ------ 3
|
||||||
|
|
||||||
|
>>> shortest(4, [(0,1,1), (0,2,5), (1,2,1), (2,3,2), (1,3,6)])
|
||||||
|
(4, [0,1,2,3])
|
||||||
|
|
||||||
|
If the target node (n-1) is unreachable from the source (0),
|
||||||
|
return None:
|
||||||
|
|
||||||
|
>>> shortest(5, [(0,1,1), (0,2,5), (1,2,1), (2,3,2), (1,3,6)])
|
||||||
|
None
|
||||||
|
|
||||||
|
Another example:
|
||||||
|
|
||||||
|
1 1
|
||||||
|
0-----1 2-----3
|
||||||
|
|
||||||
|
>>> shortest(4, [(0,1,1), (2,3,1)])
|
||||||
|
None
|
||||||
|
|
||||||
|
Tiebreaking: arbitrary. Any shortest path would do.
|
||||||
|
|
||||||
|
Filename: dijkstra.py
|
||||||
|
|
||||||
|
Hint: please use heapdict from here:
|
||||||
|
https://raw.githubusercontent.com/DanielStutzbach/heapdict/master/heapdict.py
|
||||||
|
|
||||||
|
>>> from heapdict import heapdict
|
||||||
|
>>> h = heapdict()
|
||||||
|
>>> h['a'] = 3
|
||||||
|
>>> h['b'] = 1
|
||||||
|
>>> h.peekitem()
|
||||||
|
('b', 1)
|
||||||
|
>>> h['a'] = 0
|
||||||
|
>>> h.peekitem()
|
||||||
|
('a', 0)
|
||||||
|
>>> h.popitem()
|
||||||
|
('a', 0)
|
||||||
|
>>> len(h)
|
||||||
|
1
|
||||||
|
>>> 'a' in h
|
||||||
|
False
|
||||||
|
>>> 'b' in h
|
||||||
|
True
|
||||||
|
|
||||||
|
You don't need to submit heapdict.py; we have it in our grader.
|
||||||
|
|
||||||
|
|
||||||
|
2. [Redo the nbest question from Midterm, preparing for HW10 part 3]
|
||||||
|
|
||||||
|
Given k pairs of lists A_i and B_i (0 <= i < k), each with n sorted numbers,
|
||||||
|
find the n smallest pairs in all the (k n^2) pairs.
|
||||||
|
We say (x,y) < (x', y') if and only if x+y < x'+y'.
|
||||||
|
Tie-breaking: lexicographical (i.e., prefer smaller x).
|
||||||
|
|
||||||
|
You can base your code on the skeleton from the Midterm:
|
||||||
|
|
||||||
|
from heapq import heappush, heappop
|
||||||
|
def nbest(ABs): # no need to pass in k or n
|
||||||
|
k = len(ABs)
|
||||||
|
n = len(ABs[0][0])
|
||||||
|
def trypush(i, p, q): # push pair (A_i,p, B_i,q) if possible
|
||||||
|
A, B = ABs[i] # A_i, B_i
|
||||||
|
if p < n and q < n and ______________________________:
|
||||||
|
heappush(h, (________________, i, p, q, (A[p],B[q])))
|
||||||
|
used.add((i, p, q))
|
||||||
|
h, used = ___________________ # initialize
|
||||||
|
for i in range(k): # NEED TO OPTIMIZE
|
||||||
|
trypush(______________)
|
||||||
|
for _ in range(n):
|
||||||
|
_, i, p, q, pair = ________________
|
||||||
|
yield pair # return the next pair (in a lazy list)
|
||||||
|
_______________________
|
||||||
|
_______________________
|
||||||
|
|
||||||
|
|
||||||
|
But recall we had two optimizations to speed up the first for-loop (queue initialization):
|
||||||
|
|
||||||
|
(1) using heapify instead of k initial pushes. You need to implement this (very easy).
|
||||||
|
|
||||||
|
(2) using qselect to choose top n out of the k bests. This one is OPTIONAL.
|
||||||
|
|
||||||
|
Analyze the time complexity for the version you implemented.
|
||||||
|
|
||||||
|
>>> list(nbest([([1,2,4], [2,3,5]), ([0,2,4], [3,4,5])]))
|
||||||
|
|
||||||
|
[(0, 3), (1, 2), (0, 4)]
|
||||||
|
|
||||||
|
>>> list(nbest([([-1,2],[1,4]), ([0,2],[3,4]), ([0,1],[4,6]), ([-1,2],[1,5])]))
|
||||||
|
[(-1, 1), (-1, 1)]
|
||||||
|
|
||||||
|
>>> list(nbest([([5,6,10,14],[3,5,10,14]),([2,7,9,11],[3,8,12,16]),([1,3,8,10],[5,9,10,11]),([1,2,3,5],[3,4,9,10]),([4,5,9,10],[2,4,6,11]),([4,6,10,13],[2,3,5,9]),([3,7,10,12],[1,2,5,10]),([5,9,14,15],[4,8,13,14])]))
|
||||||
|
|
||||||
|
[(1, 3), (3, 1), (1, 4), (2, 3)]
|
||||||
|
|
||||||
|
>>> list(nbest([([1,6,8,13],[5,8,11,12]),([1,2,3,5],[5,9,11,13]),([3,5,7,10],[4,6,7,11]),([1,4,7,8],[4,9,11,15]),([4,8,10,13],[4,6,10,11]),([4,8,12,15],[5,10,11,13]),([2,3,4,8],[4,7,11,15]),([4,5,10,15],[5,6,7,8])]))
|
||||||
|
|
||||||
|
[(1, 4), (1, 5), (1, 5), (2, 4)]
|
||||||
|
|
||||||
|
This problem prepares you for the hardest question in HW10 (part 3).
|
||||||
|
|
||||||
|
Filename: nbest.py
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Debriefing (required!): --------------------------
|
||||||
|
|
||||||
|
0. What's your name?
|
||||||
|
1. Approximately how many hours did you spend on this assignment?
|
||||||
|
2. Would you rate it as easy, moderate, or difficult?
|
||||||
|
3. Did you work on it mostly alone, or mostly with other people?
|
||||||
|
4. How deeply do you feel you understand the material it covers (0%-100%)?
|
||||||
|
5. Any other comments?
|
||||||
|
|
||||||
|
This section is intended to help us calibrate the homework assignments.
|
||||||
|
Your answers to this section will *not* affect your grade; however, skipping it
|
||||||
|
will certainly do.
|
||||||
19
code/cs325-langs/sols/hw1.lang
Normal file
19
code/cs325-langs/sols/hw1.lang
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
qselect(xs,k) =
|
||||||
|
~xs -> {
|
||||||
|
pivot <- xs[0]!
|
||||||
|
left <- xs[#0 <= pivot]
|
||||||
|
right <- xs[#0 > pivot]
|
||||||
|
} ->
|
||||||
|
if k > |left| + 1 then qselect(right, k - |left| - 1)
|
||||||
|
else if k == |left| + 1 then [pivot]
|
||||||
|
else qselect(left, k);
|
||||||
|
|
||||||
|
_search(xs, k) =
|
||||||
|
if xs[1] == k then xs
|
||||||
|
else if xs[1] > k then _search(xs[0], k)
|
||||||
|
else _search(xs[2], k);
|
||||||
|
|
||||||
|
sorted(xs) = sorted(xs[0]) ++ [xs[1]] ++ sorted(xs[2]);
|
||||||
|
search(xs, k) = |_search(xs, k)| != 0;
|
||||||
|
insert(xs, k) = _insert(k, _search(xs, k));
|
||||||
|
_insert(k, xs) = if |xs| == 0 then xs << [] << k << [] else xs
|
||||||
11
code/cs325-langs/sols/hw2.lang
Normal file
11
code/cs325-langs/sols/hw2.lang
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
state 0;
|
||||||
|
|
||||||
|
effect {
|
||||||
|
if(SOURCE == R) {
|
||||||
|
STATE = STATE + |LEFT|;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
combine {
|
||||||
|
STATE = STATE + LSTATE + RSTATE;
|
||||||
|
}
|
||||||
95
code/cs325-langs/sols/hw3.lang
Normal file
95
code/cs325-langs/sols/hw3.lang
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
function qselect(xs, k, c) {
|
||||||
|
if xs == [] {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
traverser bisector(list: xs, span: (0,len(xs)));
|
||||||
|
traverser pivot(list: xs, random: true);
|
||||||
|
|
||||||
|
let pivotE = pop!(pivot);
|
||||||
|
let (leftList, rightList) = bisect!(bisector, (x) -> c(x) < c(pivotE));
|
||||||
|
|
||||||
|
if k > len(leftList) + 1 {
|
||||||
|
return qselect(rightList, k - len(leftList) - 1, c);
|
||||||
|
} elsif k == len(leftList) + 1 {
|
||||||
|
return pivotE;
|
||||||
|
} else {
|
||||||
|
return qselect(leftList, k, c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function closestUnsorted(xs, k, n) {
|
||||||
|
let min = qselect(list(xs), k, (x) -> abs(x - n));
|
||||||
|
let out = [];
|
||||||
|
let countEqual = k;
|
||||||
|
|
||||||
|
traverser iter(list: xs, span: (0, len(xs)));
|
||||||
|
while valid!(iter) {
|
||||||
|
if abs(at!(iter)-n) < abs(min-n) {
|
||||||
|
let countEqual = countEqual - 1;
|
||||||
|
}
|
||||||
|
step!(iter);
|
||||||
|
}
|
||||||
|
|
||||||
|
traverser iter(list: xs, span: (0, len(xs)));
|
||||||
|
while valid!(iter) {
|
||||||
|
if abs(at!(iter)-n) == abs(min-n) and countEqual > 0 {
|
||||||
|
let countEqual = countEqual - 1;
|
||||||
|
let out = out + [at!(iter)];
|
||||||
|
} elsif abs(at!(iter)-n) < abs(min-n) {
|
||||||
|
let out = out + [at!(iter)];
|
||||||
|
}
|
||||||
|
step!(iter);
|
||||||
|
}
|
||||||
|
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function closestSorted(xs, k, n) {
|
||||||
|
let start = bisect(xs, n);
|
||||||
|
let counter = 0;
|
||||||
|
traverser left(list: xs, span: (0, start), reverse: true);
|
||||||
|
traverser right(list: xs, span: (start, len(xs)));
|
||||||
|
|
||||||
|
while counter != k and canstep!(left) and valid!(right) {
|
||||||
|
if abs(at!(left, 1) - n) < abs(at!(right) - n) {
|
||||||
|
step!(left);
|
||||||
|
} else {
|
||||||
|
step!(right);
|
||||||
|
}
|
||||||
|
let counter = counter + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
while counter != k and (canstep!(left) or valid!(right)) {
|
||||||
|
if canstep!(left) { step!(left); }
|
||||||
|
else { step!(right); }
|
||||||
|
let counter = counter + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return subset!(left, right);
|
||||||
|
}
|
||||||
|
|
||||||
|
sorted function xyz(xs, k) {
|
||||||
|
traverser x(list: xs, span: (0,len(xs)));
|
||||||
|
let dest = [];
|
||||||
|
|
||||||
|
while valid!(x) {
|
||||||
|
traverser z(list: xs, span: (pos!(x)+2,len(xs)));
|
||||||
|
traverser y(list: xs, span: (pos!(x)+1,pos!(z)));
|
||||||
|
|
||||||
|
while valid!(y) and valid!(z) {
|
||||||
|
if at!(x) + at!(y) == at!(z) {
|
||||||
|
let dest = dest + [(at!(x), at!(y), at!(z))];
|
||||||
|
step!(z);
|
||||||
|
} elsif at!(x) + at!(y) > at!(z) {
|
||||||
|
step!(z);
|
||||||
|
} else {
|
||||||
|
step!(y);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
step!(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
}
|
||||||
15
code/cs325-langs/src/Common.hs
Normal file
15
code/cs325-langs/src/Common.hs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
module Common where
|
||||||
|
import PythonAst
|
||||||
|
import PythonGen
|
||||||
|
import Text.Parsec
|
||||||
|
|
||||||
|
compile :: (String -> String -> Either ParseError p) -> (p -> [PyStmt]) -> String -> IO ()
|
||||||
|
compile p t f = do
|
||||||
|
let inputName = f ++ ".lang"
|
||||||
|
let outputName = f ++ ".py"
|
||||||
|
file <- readFile inputName
|
||||||
|
let either = p inputName file
|
||||||
|
case either of
|
||||||
|
Right prog -> writeFile outputName (translate $ t prog)
|
||||||
|
Left e -> print e
|
||||||
|
|
||||||
90
code/cs325-langs/src/CommonParsing.hs
Normal file
90
code/cs325-langs/src/CommonParsing.hs
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
module CommonParsing where
|
||||||
|
import Data.Char
|
||||||
|
import Data.Functor
|
||||||
|
import Text.Parsec
|
||||||
|
import Text.Parsec.Char
|
||||||
|
import Text.Parsec.Combinator
|
||||||
|
|
||||||
|
type Parser a b = Parsec String a b
|
||||||
|
|
||||||
|
kw :: String -> Parser a ()
|
||||||
|
kw s = try $ string s <* spaces $> ()
|
||||||
|
|
||||||
|
kwIf :: Parser a ()
|
||||||
|
kwIf = kw "if"
|
||||||
|
|
||||||
|
kwThen :: Parser a ()
|
||||||
|
kwThen = kw "then"
|
||||||
|
|
||||||
|
kwElse :: Parser a ()
|
||||||
|
kwElse = kw "else"
|
||||||
|
|
||||||
|
kwElsif :: Parser a ()
|
||||||
|
kwElsif = kw "elsif"
|
||||||
|
|
||||||
|
kwWhile :: Parser a ()
|
||||||
|
kwWhile = kw "while"
|
||||||
|
|
||||||
|
kwState :: Parser a ()
|
||||||
|
kwState = kw "state"
|
||||||
|
|
||||||
|
kwEffect :: Parser a ()
|
||||||
|
kwEffect = kw "effect"
|
||||||
|
|
||||||
|
kwCombine :: Parser a ()
|
||||||
|
kwCombine = kw "combine"
|
||||||
|
|
||||||
|
kwRand :: Parser a ()
|
||||||
|
kwRand = kw "rand"
|
||||||
|
|
||||||
|
kwFunction :: Parser a ()
|
||||||
|
kwFunction = kw "function"
|
||||||
|
|
||||||
|
kwSorted :: Parser a ()
|
||||||
|
kwSorted = kw "sorted"
|
||||||
|
|
||||||
|
kwLet :: Parser a ()
|
||||||
|
kwLet = kw "let"
|
||||||
|
|
||||||
|
kwTraverser :: Parser a ()
|
||||||
|
kwTraverser = kw "traverser"
|
||||||
|
|
||||||
|
kwReturn :: Parser a ()
|
||||||
|
kwReturn = kw "return"
|
||||||
|
|
||||||
|
op :: String -> op -> Parser a op
|
||||||
|
op s o = string s $> o
|
||||||
|
|
||||||
|
int :: Parser a Int
|
||||||
|
int = read <$> (many1 digit <* spaces)
|
||||||
|
|
||||||
|
var :: [String] -> Parser a String
|
||||||
|
var reserved =
|
||||||
|
do
|
||||||
|
c <- satisfy $ \c -> isLetter c || c == '_'
|
||||||
|
cs <- many (satisfy isLetter <|> digit) <* spaces
|
||||||
|
let name = c:cs
|
||||||
|
if name `elem` reserved
|
||||||
|
then fail "Can't use reserved keyword as identifier"
|
||||||
|
else return name
|
||||||
|
|
||||||
|
list :: Char -> Char -> Char -> Parser a b -> Parser a [b]
|
||||||
|
list co cc cd pe = surround co cc $ sepBy pe (char cd >> spaces)
|
||||||
|
|
||||||
|
surround :: Char -> Char -> Parser a b -> Parser a b
|
||||||
|
surround c1 c2 pe =
|
||||||
|
do
|
||||||
|
char c1 >> spaces
|
||||||
|
e <- pe
|
||||||
|
spaces >> char c2 >> spaces
|
||||||
|
return e
|
||||||
|
|
||||||
|
level :: (o -> e -> e -> e) -> Parser a o -> Parser a e -> Parser a e
|
||||||
|
level c po pe =
|
||||||
|
do
|
||||||
|
e <- pe <* spaces
|
||||||
|
ops <- many $ try $ (flip . c <$> (po <* spaces) <*> pe) <* spaces
|
||||||
|
return $ foldl (flip ($)) e ops
|
||||||
|
|
||||||
|
precedence :: (o -> e -> e -> e) -> Parser a e -> [ Parser a o ] -> Parser a e
|
||||||
|
precedence = foldl . flip . level
|
||||||
393
code/cs325-langs/src/LanguageOne.hs
Normal file
393
code/cs325-langs/src/LanguageOne.hs
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
module LanguageOne where
|
||||||
|
import qualified PythonAst as Py
|
||||||
|
import qualified CommonParsing as P
|
||||||
|
import Data.Bifunctor
|
||||||
|
import Data.Char
|
||||||
|
import Data.Functor
|
||||||
|
import qualified Data.Map as Map
|
||||||
|
import Data.Maybe
|
||||||
|
import qualified Data.Set as Set
|
||||||
|
import Text.Parsec
|
||||||
|
import Text.Parsec.Char
|
||||||
|
import Text.Parsec.Combinator
|
||||||
|
import Control.Monad.State
|
||||||
|
|
||||||
|
{- Data Types -}
|
||||||
|
data PossibleType = List | Any deriving Eq
|
||||||
|
|
||||||
|
data SelectorMarker = None | Remove
|
||||||
|
|
||||||
|
data Op
|
||||||
|
= Add
|
||||||
|
| Subtract
|
||||||
|
| Multiply
|
||||||
|
| Divide
|
||||||
|
| Insert
|
||||||
|
| Concat
|
||||||
|
| LessThan
|
||||||
|
| LessThanEq
|
||||||
|
| GreaterThan
|
||||||
|
| GreaterThanEq
|
||||||
|
| Equal
|
||||||
|
| NotEqual
|
||||||
|
| And
|
||||||
|
| Or
|
||||||
|
|
||||||
|
data Selector = Selector String Expr
|
||||||
|
|
||||||
|
data Expr
|
||||||
|
= Var String
|
||||||
|
| IntLiteral Int
|
||||||
|
| ListLiteral [Expr]
|
||||||
|
| Split Expr [Selector] Expr
|
||||||
|
| IfElse Expr Expr Expr
|
||||||
|
| BinOp Op Expr Expr
|
||||||
|
| FunctionCall Expr [Expr]
|
||||||
|
| LengthOf Expr
|
||||||
|
| Random
|
||||||
|
| Access Expr Expr SelectorMarker
|
||||||
|
| Parameter Int
|
||||||
|
|
||||||
|
data Function = Function String [String] Expr
|
||||||
|
|
||||||
|
data Prog = Prog [Function]
|
||||||
|
|
||||||
|
{- Parser -}
|
||||||
|
type Parser = Parsec String (Maybe Int)
|
||||||
|
|
||||||
|
parseVar :: Parser String
|
||||||
|
parseVar = P.var ["if", "then", "else", "var"]
|
||||||
|
|
||||||
|
parseThis :: Parser Expr
|
||||||
|
parseThis =
|
||||||
|
do
|
||||||
|
char '&'
|
||||||
|
contextNum <- getState
|
||||||
|
spaces
|
||||||
|
return (Var $ "context_" ++ show contextNum)
|
||||||
|
|
||||||
|
parseList :: Parser Expr
|
||||||
|
parseList = ListLiteral <$>
|
||||||
|
do
|
||||||
|
char '[' >> spaces
|
||||||
|
es <- sepBy parseExpr (char ',' >> spaces)
|
||||||
|
spaces >> char ']' >> spaces
|
||||||
|
return es
|
||||||
|
|
||||||
|
parseSplit :: Parser Expr
|
||||||
|
parseSplit =
|
||||||
|
do
|
||||||
|
char '~' >> spaces
|
||||||
|
e <- parseExpr
|
||||||
|
spaces >> string "->"
|
||||||
|
spaces >> char '{'
|
||||||
|
contextNum <- getState
|
||||||
|
putState $ return $ 1 + fromMaybe (-1) contextNum
|
||||||
|
es <- many1 (spaces >> parseSelector)
|
||||||
|
putState contextNum
|
||||||
|
spaces >> char '}' >> spaces >> string "->" >> spaces
|
||||||
|
e' <- parseExpr
|
||||||
|
spaces
|
||||||
|
return $ Split e es e'
|
||||||
|
|
||||||
|
parseSelectorMarker :: Parser SelectorMarker
|
||||||
|
parseSelectorMarker = (char '!' >> return Remove) <|> return None
|
||||||
|
|
||||||
|
parseSelector :: Parser Selector
|
||||||
|
parseSelector =
|
||||||
|
do
|
||||||
|
name <- parseVar
|
||||||
|
spaces >> string "<-" >> spaces
|
||||||
|
expr <- parseExpr
|
||||||
|
spaces
|
||||||
|
return $ Selector name expr
|
||||||
|
|
||||||
|
parseIfElse :: Parser Expr
|
||||||
|
parseIfElse =
|
||||||
|
do
|
||||||
|
P.kwIf >> spaces
|
||||||
|
ec <- parseExpr
|
||||||
|
spaces >> P.kwThen >> spaces
|
||||||
|
et <- parseExpr
|
||||||
|
spaces >> P.kwElse >> spaces
|
||||||
|
ee <- parseExpr
|
||||||
|
spaces
|
||||||
|
return $ IfElse ec et ee
|
||||||
|
|
||||||
|
parseLength :: Parser Expr
|
||||||
|
parseLength =
|
||||||
|
do
|
||||||
|
char '|' >> spaces
|
||||||
|
e <- parseExpr
|
||||||
|
spaces >> char '|' >> spaces
|
||||||
|
return $ LengthOf e
|
||||||
|
|
||||||
|
parseParameter :: Parser Expr
|
||||||
|
parseParameter =
|
||||||
|
do
|
||||||
|
char '#'
|
||||||
|
d <- digit
|
||||||
|
spaces
|
||||||
|
return $ Parameter $ read [d]
|
||||||
|
|
||||||
|
parseParenthesized :: Parser Expr
|
||||||
|
parseParenthesized =
|
||||||
|
do
|
||||||
|
char '(' >> spaces
|
||||||
|
e <- parseExpr
|
||||||
|
spaces >> char ')' >> spaces
|
||||||
|
return e
|
||||||
|
|
||||||
|
parseBasicExpr :: Parser Expr
|
||||||
|
parseBasicExpr = choice
|
||||||
|
[ IntLiteral <$> P.int
|
||||||
|
, parseThis
|
||||||
|
, parseList
|
||||||
|
, parseSplit
|
||||||
|
, parseLength
|
||||||
|
, parseParameter
|
||||||
|
, parseParenthesized
|
||||||
|
, Var <$> try parseVar
|
||||||
|
, P.kwRand $> Random
|
||||||
|
, parseIfElse
|
||||||
|
]
|
||||||
|
|
||||||
|
parsePostfix :: Parser (Expr -> Expr)
|
||||||
|
parsePostfix = parsePostfixAccess <|> parsePostfixCall
|
||||||
|
|
||||||
|
parsePostfixAccess :: Parser (Expr -> Expr)
|
||||||
|
parsePostfixAccess =
|
||||||
|
do
|
||||||
|
char '[' >> spaces
|
||||||
|
e <- parseExpr
|
||||||
|
spaces >> char ']' >> spaces
|
||||||
|
marker <- parseSelectorMarker
|
||||||
|
spaces
|
||||||
|
return $ \e' -> Access e' e marker
|
||||||
|
|
||||||
|
parsePostfixCall :: Parser (Expr -> Expr)
|
||||||
|
parsePostfixCall =
|
||||||
|
do
|
||||||
|
char '(' >> spaces
|
||||||
|
es <- sepBy parseExpr (char ',' >> spaces)
|
||||||
|
char ')' >> spaces
|
||||||
|
return $ flip FunctionCall es
|
||||||
|
|
||||||
|
parsePostfixedExpr :: Parser Expr
|
||||||
|
parsePostfixedExpr =
|
||||||
|
do
|
||||||
|
eb <- parseBasicExpr
|
||||||
|
spaces
|
||||||
|
ps <- many parsePostfix
|
||||||
|
return $ foldl (flip ($)) eb ps
|
||||||
|
|
||||||
|
parseExpr :: Parser Expr
|
||||||
|
parseExpr = P.precedence BinOp parsePostfixedExpr
|
||||||
|
[ P.op "*" Multiply, P.op "/" Divide
|
||||||
|
, P.op "+" Add, P.op "-" Subtract
|
||||||
|
, P.op "<<" Insert
|
||||||
|
, P.op "++" Concat
|
||||||
|
, try (P.op "<=" LessThanEq) <|> try (P.op ">=" GreaterThanEq) <|>
|
||||||
|
P.op "<" LessThan <|> P.op ">" GreaterThan <|>
|
||||||
|
P.op "==" Equal <|> P.op "!=" NotEqual
|
||||||
|
, P.op "&&" And <|> P.op "||" Or
|
||||||
|
]
|
||||||
|
|
||||||
|
parseFunction :: Parser Function
|
||||||
|
parseFunction =
|
||||||
|
do
|
||||||
|
name <- parseVar
|
||||||
|
spaces >> char '(' >> spaces
|
||||||
|
vs <- sepBy parseVar (char ',' >> spaces)
|
||||||
|
spaces >> char ')' >> spaces >> char '=' >> spaces
|
||||||
|
body <- parseExpr
|
||||||
|
spaces
|
||||||
|
return $ Function name vs body
|
||||||
|
|
||||||
|
parseProg :: Parser Prog
|
||||||
|
parseProg = Prog <$> sepBy1 parseFunction (char ';' >> spaces)
|
||||||
|
|
||||||
|
parse :: SourceName -> String -> Either ParseError Prog
|
||||||
|
parse = runParser parseProg Nothing
|
||||||
|
|
||||||
|
{- "Type" checker -}
|
||||||
|
mergePossibleType :: PossibleType -> PossibleType -> PossibleType
|
||||||
|
mergePossibleType List _ = List
|
||||||
|
mergePossibleType _ List = List
|
||||||
|
mergePossibleType _ _ = Any
|
||||||
|
|
||||||
|
getPossibleType :: String -> Expr -> PossibleType
|
||||||
|
getPossibleType s (Var s') = if s == s' then List else Any
|
||||||
|
getPossibleType _ (ListLiteral _) = List
|
||||||
|
getPossibleType s (Split _ _ e) = getPossibleType s e
|
||||||
|
getPossibleType s (IfElse i t e) =
|
||||||
|
foldl1 mergePossibleType $ map (getPossibleType s) [i, t, e]
|
||||||
|
getPossibleType _ (BinOp Insert _ _) = List
|
||||||
|
getPossibleType _ (BinOp Concat _ _) = List
|
||||||
|
getPossibleType _ _ = Any
|
||||||
|
|
||||||
|
{- Translator -}
|
||||||
|
type Translator = Control.Monad.State.State (Map.Map String [String], Int)
|
||||||
|
|
||||||
|
currentTemp :: Translator String
|
||||||
|
currentTemp = do
|
||||||
|
t <- gets snd
|
||||||
|
return $ "temp" ++ show t
|
||||||
|
|
||||||
|
incrementTemp :: Translator String
|
||||||
|
incrementTemp = do
|
||||||
|
modify (second (+1))
|
||||||
|
currentTemp
|
||||||
|
|
||||||
|
hasLambda :: Expr -> Bool
|
||||||
|
hasLambda (ListLiteral es) = any hasLambda es
|
||||||
|
hasLambda (Split e ss r) =
|
||||||
|
hasLambda e || any (\(Selector _ e') -> hasLambda e') ss || hasLambda r
|
||||||
|
hasLambda (IfElse i t e) = hasLambda i || hasLambda t || hasLambda e
|
||||||
|
hasLambda (BinOp o l r) = hasLambda l || hasLambda r
|
||||||
|
hasLambda (FunctionCall e es) = any hasLambda $ e : es
|
||||||
|
hasLambda (LengthOf e) = hasLambda e
|
||||||
|
hasLambda (Access e _ _) = hasLambda e
|
||||||
|
hasLambda Parameter{} = True
|
||||||
|
hasLambda _ = False
|
||||||
|
|
||||||
|
translate :: Prog -> [Py.PyStmt]
|
||||||
|
translate p = fst $ runState (translateProg p) (Map.empty, 0)
|
||||||
|
|
||||||
|
translateProg :: Prog -> Translator [Py.PyStmt]
|
||||||
|
translateProg (Prog fs) = concat <$> traverse translateFunction fs
|
||||||
|
|
||||||
|
translateFunction :: Function -> Translator [Py.PyStmt]
|
||||||
|
translateFunction (Function n ps ex) = do
|
||||||
|
let createIf p = Py.BinOp Py.Equal (Py.Var p) (Py.ListLiteral [])
|
||||||
|
let createReturn p = Py.IfElse (createIf p) [Py.Return (Py.Var p)] [] Nothing
|
||||||
|
let fastReturn = [createReturn p | p <- take 1 ps, getPossibleType p ex == List]
|
||||||
|
(ss, e) <- translateExpr ex
|
||||||
|
return $ return $ Py.FunctionDef n ps $ fastReturn ++ ss ++ [Py.Return e]
|
||||||
|
|
||||||
|
translateSelector :: Selector -> Translator Py.PyStmt
|
||||||
|
translateSelector (Selector n e) =
|
||||||
|
let
|
||||||
|
cacheCheck = Py.NotIn (Py.StrLiteral n) (Py.Var "cache")
|
||||||
|
cacheAccess = Py.Access (Py.Var "cache") [Py.StrLiteral n]
|
||||||
|
cacheSet = Py.Assign (Py.AccessPat (Py.Var "cache") [Py.StrLiteral n])
|
||||||
|
body e' = [ Py.IfElse cacheCheck [cacheSet e'] [] Nothing, Py.Return cacheAccess]
|
||||||
|
in
|
||||||
|
do
|
||||||
|
(ss, e') <- translateExpr e
|
||||||
|
vs <- gets fst
|
||||||
|
let callPrereq p = Py.Standalone $ Py.FunctionCall (Py.Var p) []
|
||||||
|
let prereqs = maybe [] (map callPrereq) $ Map.lookup n vs
|
||||||
|
return $ Py.FunctionDef n [] $ ss ++ prereqs ++ body e'
|
||||||
|
|
||||||
|
translateExpr :: Expr -> Translator ([Py.PyStmt], Py.PyExpr)
|
||||||
|
translateExpr (Var s) = do
|
||||||
|
vs <- gets fst
|
||||||
|
let sVar = Py.Var s
|
||||||
|
let expr = if Map.member s vs then Py.FunctionCall sVar [] else sVar
|
||||||
|
return ([], expr)
|
||||||
|
translateExpr (IntLiteral i) = return ([], Py.IntLiteral i)
|
||||||
|
translateExpr (ListLiteral l) = do
|
||||||
|
tl <- mapM translateExpr l
|
||||||
|
return (concatMap fst tl, Py.ListLiteral $ map snd tl)
|
||||||
|
translateExpr (Split e ss e') = do
|
||||||
|
vs <- gets fst
|
||||||
|
let cacheAssign = Py.Assign (Py.VarPat "cache") (Py.DictLiteral [])
|
||||||
|
let cacheStmt = [ cacheAssign | Map.size vs == 0 ]
|
||||||
|
let vnames = map (\(Selector n es) -> n) ss
|
||||||
|
let prereqs = snd $ foldl (\(ds, m) (Selector n es) -> (n:ds, Map.insert n ds m)) ([], Map.empty) ss
|
||||||
|
modify $ first $ Map.union prereqs
|
||||||
|
fs <- mapM translateSelector ss
|
||||||
|
(sts, te) <- translateExpr e'
|
||||||
|
modify $ first $ const vs
|
||||||
|
return (cacheStmt ++ fs ++ sts, te)
|
||||||
|
translateExpr (IfElse i t e) = do
|
||||||
|
temp <- incrementTemp
|
||||||
|
let tempPat = Py.VarPat temp
|
||||||
|
(ists, ie) <- translateExpr i
|
||||||
|
(tsts, te) <- translateExpr t
|
||||||
|
(ests, ee) <- translateExpr e
|
||||||
|
let thenSts = tsts ++ [Py.Assign tempPat te]
|
||||||
|
let elseSts = ests ++ [Py.Assign tempPat ee]
|
||||||
|
let newIf = Py.IfElse ie thenSts [] $ Just elseSts
|
||||||
|
return (ists ++ [newIf], Py.Var temp)
|
||||||
|
translateExpr (BinOp o l r) = do
|
||||||
|
(lsts, le) <- translateExpr l
|
||||||
|
(rsts, re) <- translateExpr r
|
||||||
|
(opsts, oe) <- translateOp o le re
|
||||||
|
return (lsts ++ rsts ++ opsts, oe)
|
||||||
|
translateExpr (FunctionCall f ps) = do
|
||||||
|
(fsts, fe) <- translateExpr f
|
||||||
|
tps <- mapM translateExpr ps
|
||||||
|
return (fsts ++ concatMap fst tps, Py.FunctionCall fe $ map snd tps)
|
||||||
|
translateExpr (LengthOf e) =
|
||||||
|
second (Py.FunctionCall (Py.Var "len") . return) <$> translateExpr e
|
||||||
|
translateExpr (Access e Random m) = do
|
||||||
|
temp <- incrementTemp
|
||||||
|
(sts, ce) <- translateExpr e
|
||||||
|
let lenExpr = Py.FunctionCall (Py.Var "len") [Py.Var temp]
|
||||||
|
let randExpr = Py.FunctionCall (Py.Var "randint") [ Py.IntLiteral 0, lenExpr ]
|
||||||
|
return (sts, singleAccess ce randExpr m)
|
||||||
|
translateExpr (Access c i m) = do
|
||||||
|
(csts, ce) <- translateExpr c
|
||||||
|
(ists, ie) <- translateExpr i
|
||||||
|
temp <- incrementTemp
|
||||||
|
if hasLambda i
|
||||||
|
then return (csts ++ ists ++ [createFilterLambda temp ie m], Py.FunctionCall (Py.Var temp) [ce])
|
||||||
|
else return (csts ++ ists, singleAccess ce ie m)
|
||||||
|
translateExpr (Parameter i) = return $ ([], Py.Var $ "arg" ++ show i)
|
||||||
|
translateExpr _ = fail "Invalid expression"
|
||||||
|
|
||||||
|
singleAccess :: Py.PyExpr -> Py.PyExpr -> SelectorMarker -> Py.PyExpr
|
||||||
|
singleAccess c i None = Py.Access c [i]
|
||||||
|
singleAccess c i Remove = Py.FunctionCall (Py.Member c "pop") [i]
|
||||||
|
|
||||||
|
createFilterLambda :: String -> Py.PyExpr -> SelectorMarker -> Py.PyStmt
|
||||||
|
createFilterLambda s e None = Py.FunctionDef s ["arg"]
|
||||||
|
[ Py.Assign (Py.VarPat "out") (Py.ListLiteral [])
|
||||||
|
, Py.For (Py.VarPat "arg0") (Py.Var "arg")
|
||||||
|
[ Py.IfElse e
|
||||||
|
[ Py.Standalone $ Py.FunctionCall (Py.Member (Py.Var "out") "append")
|
||||||
|
[ Py.Var "arg0" ]
|
||||||
|
]
|
||||||
|
[]
|
||||||
|
Nothing
|
||||||
|
]
|
||||||
|
, Py.Return $ Py.Var "out"
|
||||||
|
]
|
||||||
|
createFilterLambda s e Remove = Py.FunctionDef s ["arg"]
|
||||||
|
[ Py.Assign (Py.VarPat "i") $ Py.IntLiteral 0
|
||||||
|
, Py.Assign (Py.VarPat "out") (Py.ListLiteral [])
|
||||||
|
, Py.While (Py.BinOp Py.LessThan (Py.Var "i") $ Py.FunctionCall (Py.Var "len") [Py.Var "arg"])
|
||||||
|
[ Py.IfElse e
|
||||||
|
[ Py.Standalone $ Py.FunctionCall (Py.Member (Py.Var "out") "append")
|
||||||
|
[ singleAccess (Py.Var "arg") (Py.Var "i") Remove
|
||||||
|
]
|
||||||
|
]
|
||||||
|
[]
|
||||||
|
Nothing
|
||||||
|
, Py.Assign (Py.VarPat "i") (Py.BinOp Py.Add (Py.Var "i") (Py.IntLiteral 1))
|
||||||
|
]
|
||||||
|
, Py.Return $ Py.Var "out"
|
||||||
|
]
|
||||||
|
|
||||||
|
translateOp :: Op -> Py.PyExpr -> Py.PyExpr -> Translator ([Py.PyStmt], Py.PyExpr)
|
||||||
|
translateOp Add l r = return ([], Py.BinOp Py.Add l r)
|
||||||
|
translateOp Subtract l r = return ([], Py.BinOp Py.Subtract l r)
|
||||||
|
translateOp Multiply l r = return ([], Py.BinOp Py.Multiply l r)
|
||||||
|
translateOp Divide l r = return ([], Py.BinOp Py.Divide l r)
|
||||||
|
translateOp LessThan l r = return ([], Py.BinOp Py.LessThan l r)
|
||||||
|
translateOp LessThanEq l r = return ([], Py.BinOp Py.LessThanEq l r)
|
||||||
|
translateOp GreaterThan l r = return ([], Py.BinOp Py.GreaterThan l r)
|
||||||
|
translateOp GreaterThanEq l r = return ([], Py.BinOp Py.GreaterThanEq l r)
|
||||||
|
translateOp Equal l r = return ([], Py.BinOp Py.Equal l r)
|
||||||
|
translateOp NotEqual l r = return ([], Py.BinOp Py.NotEqual l r)
|
||||||
|
translateOp And l r = return ([], Py.BinOp Py.And l r)
|
||||||
|
translateOp Or l r = return ([], Py.BinOp Py.Or l r)
|
||||||
|
translateOp Concat l r = return ([], Py.BinOp Py.Add l r)
|
||||||
|
translateOp Insert l r = do
|
||||||
|
temp <- incrementTemp
|
||||||
|
let assignStmt = Py.Assign (Py.VarPat temp) l
|
||||||
|
let appendFunc = Py.Member (Py.Var temp) "append"
|
||||||
|
let insertStmt = Py.Standalone $ Py.FunctionCall appendFunc [r]
|
||||||
|
return ([assignStmt, insertStmt], Py.Var temp)
|
||||||
461
code/cs325-langs/src/LanguageThree.hs
Normal file
461
code/cs325-langs/src/LanguageThree.hs
Normal file
@@ -0,0 +1,461 @@
|
|||||||
|
module LanguageThree where
|
||||||
|
import qualified CommonParsing as P
|
||||||
|
import qualified PythonAst as Py
|
||||||
|
import Control.Monad.State
|
||||||
|
import Data.Bifunctor
|
||||||
|
import Data.Foldable
|
||||||
|
import Data.Functor
|
||||||
|
import qualified Data.Map as Map
|
||||||
|
import Data.Maybe
|
||||||
|
import Text.Parsec hiding (State)
|
||||||
|
import Text.Parsec.Char
|
||||||
|
import Text.Parsec.Combinator
|
||||||
|
|
||||||
|
{- Data Types -}
|
||||||
|
data Op
|
||||||
|
= Add
|
||||||
|
| Subtract
|
||||||
|
| Multiply
|
||||||
|
| Divide
|
||||||
|
| LessThan
|
||||||
|
| LessThanEqual
|
||||||
|
| GreaterThan
|
||||||
|
| GreaterThanEqual
|
||||||
|
| Equal
|
||||||
|
| NotEqual
|
||||||
|
| And
|
||||||
|
| Or
|
||||||
|
|
||||||
|
data Expr
|
||||||
|
= TraverserCall String [Expr]
|
||||||
|
| FunctionCall String [Expr]
|
||||||
|
| BinOp Op Expr Expr
|
||||||
|
| Lambda [String] Expr
|
||||||
|
| Var String
|
||||||
|
| IntLiteral Int
|
||||||
|
| BoolLiteral Bool
|
||||||
|
| ListLiteral [Expr]
|
||||||
|
| TupleLiteral [Expr]
|
||||||
|
|
||||||
|
type Branch = (Expr, [Stmt])
|
||||||
|
|
||||||
|
data Stmt
|
||||||
|
= IfElse Branch [Branch] [Stmt]
|
||||||
|
| While Branch
|
||||||
|
| Traverser String [(String, Expr)]
|
||||||
|
| Let Pat Expr
|
||||||
|
| Return Expr
|
||||||
|
| Standalone Expr
|
||||||
|
|
||||||
|
data Pat
|
||||||
|
= VarPat String
|
||||||
|
| TuplePat [Pat]
|
||||||
|
|
||||||
|
data SortedMarker = Sorted | Unsorted deriving Eq
|
||||||
|
|
||||||
|
data Function = Function SortedMarker String [String] [Stmt]
|
||||||
|
|
||||||
|
data Prog = Prog [Function]
|
||||||
|
|
||||||
|
{- Parser -}
|
||||||
|
type Parser = Parsec String ()
|
||||||
|
|
||||||
|
parseVar :: Parser String
|
||||||
|
parseVar = P.var
|
||||||
|
[ "if", "elif", "else"
|
||||||
|
, "while", "let", "traverser"
|
||||||
|
, "function", "sort"
|
||||||
|
, "true", "false"
|
||||||
|
]
|
||||||
|
|
||||||
|
parseBool :: Parser Bool
|
||||||
|
parseBool = (string "true" $> True) <|> (string "false" $> False)
|
||||||
|
|
||||||
|
parseList :: Parser Expr
|
||||||
|
parseList = ListLiteral <$> P.list '[' ']' ',' parseExpr
|
||||||
|
|
||||||
|
parseTupleElems :: Parser [Expr]
|
||||||
|
parseTupleElems = P.list '(' ')' ',' parseExpr
|
||||||
|
|
||||||
|
parseTuple :: Parser Expr
|
||||||
|
parseTuple = do
|
||||||
|
es <- parseTupleElems
|
||||||
|
return $ case es of
|
||||||
|
e:[] -> e
|
||||||
|
_ -> TupleLiteral es
|
||||||
|
|
||||||
|
parseLambda :: Parser Expr
|
||||||
|
parseLambda = try $ do
|
||||||
|
vs <- P.list '(' ')' ',' parseVar
|
||||||
|
string "->" >> spaces
|
||||||
|
Lambda vs <$> parseExpr
|
||||||
|
|
||||||
|
parseCall :: Parser Expr
|
||||||
|
parseCall = try $ do
|
||||||
|
v <- parseVar
|
||||||
|
choice
|
||||||
|
[ TraverserCall v <$> (char '!' *> parseTupleElems)
|
||||||
|
, FunctionCall v <$> parseTupleElems
|
||||||
|
]
|
||||||
|
|
||||||
|
parseBasic :: Parser Expr
|
||||||
|
parseBasic = choice
|
||||||
|
[ IntLiteral <$> P.int
|
||||||
|
, BoolLiteral <$> parseBool
|
||||||
|
, try parseCall
|
||||||
|
, Var <$> parseVar
|
||||||
|
, parseList
|
||||||
|
, parseLambda
|
||||||
|
, parseTuple
|
||||||
|
]
|
||||||
|
|
||||||
|
parseExpr :: Parser Expr
|
||||||
|
parseExpr = P.precedence BinOp parseBasic
|
||||||
|
[ P.op "*" Multiply <|> P.op "/" Divide
|
||||||
|
, P.op "+" Add <|> P.op "-" Subtract
|
||||||
|
, P.op "==" Equal <|> P.op "!=" NotEqual <|>
|
||||||
|
try (P.op "<=" LessThanEqual) <|> P.op "<" LessThan <|>
|
||||||
|
try (P.op ">=" GreaterThanEqual) <|> P.op ">" GreaterThan
|
||||||
|
, P.op "and" And
|
||||||
|
, P.op "or" Or
|
||||||
|
]
|
||||||
|
|
||||||
|
parseBlock :: Parser [Stmt]
|
||||||
|
parseBlock = char '{' >> spaces >> many parseStmt <* char '}' <* spaces
|
||||||
|
|
||||||
|
parseBranch :: Parser Branch
|
||||||
|
parseBranch = (,) <$> (parseExpr <* spaces) <*> parseBlock
|
||||||
|
|
||||||
|
parseIf :: Parser Stmt
|
||||||
|
parseIf = do
|
||||||
|
i <- P.kwIf >> parseBranch
|
||||||
|
els <- many (P.kwElsif >> parseBranch)
|
||||||
|
e <- try (P.kwElse >> parseBlock) <|> return []
|
||||||
|
return $ IfElse i els e
|
||||||
|
|
||||||
|
parseWhile :: Parser Stmt
|
||||||
|
parseWhile = While <$> (P.kwWhile >> parseBranch)
|
||||||
|
|
||||||
|
parseTraverser :: Parser Stmt
|
||||||
|
parseTraverser = Traverser
|
||||||
|
<$> (P.kwTraverser *> parseVar)
|
||||||
|
<*> (P.list '(' ')' ',' parseKey) <* char ';' <* spaces
|
||||||
|
|
||||||
|
parseKey :: Parser (String, Expr)
|
||||||
|
parseKey = (,)
|
||||||
|
<$> (parseVar <* spaces <* char ':' <* spaces)
|
||||||
|
<*> parseExpr
|
||||||
|
|
||||||
|
parseLet :: Parser Stmt
|
||||||
|
parseLet = Let
|
||||||
|
<$> (P.kwLet >> parsePat <* char '=' <* spaces)
|
||||||
|
<*> parseExpr <* char ';' <* spaces
|
||||||
|
|
||||||
|
parseReturn :: Parser Stmt
|
||||||
|
parseReturn = Return <$> (P.kwReturn >> parseExpr <* char ';' <* spaces)
|
||||||
|
|
||||||
|
parsePat :: Parser Pat
|
||||||
|
parsePat = (VarPat <$> parseVar) <|> (TuplePat <$> P.list '(' ')' ',' parsePat)
|
||||||
|
|
||||||
|
parseStmt :: Parser Stmt
|
||||||
|
parseStmt = choice
|
||||||
|
[ parseTraverser
|
||||||
|
, parseLet
|
||||||
|
, parseIf
|
||||||
|
, parseWhile
|
||||||
|
, parseReturn
|
||||||
|
, Standalone <$> (parseExpr <* char ';' <* spaces)
|
||||||
|
]
|
||||||
|
|
||||||
|
parseFunction :: Parser Function
|
||||||
|
parseFunction = Function
|
||||||
|
<$> (P.kwSorted $> Sorted <|> return Unsorted)
|
||||||
|
<*> (P.kwFunction >> parseVar)
|
||||||
|
<*> (P.list '(' ')' ',' parseVar)
|
||||||
|
<*> parseBlock
|
||||||
|
|
||||||
|
parseProg :: Parser Prog
|
||||||
|
parseProg = Prog <$> many parseFunction
|
||||||
|
|
||||||
|
parse :: String -> String -> Either ParseError Prog
|
||||||
|
parse = runParser parseProg ()
|
||||||
|
|
||||||
|
{- Translation -}
|
||||||
|
data TraverserBounds = Range Py.PyExpr Py.PyExpr | Random
|
||||||
|
|
||||||
|
data TraverserData = TraverserData
|
||||||
|
{ list :: Maybe String
|
||||||
|
, bounds :: Maybe TraverserBounds
|
||||||
|
, rev :: Bool
|
||||||
|
}
|
||||||
|
|
||||||
|
data ValidTraverserData = ValidTraverserData
|
||||||
|
{ validList :: String
|
||||||
|
, validBounds :: TraverserBounds
|
||||||
|
, validRev :: Bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type Translator = State (Map.Map String ValidTraverserData, [Py.PyStmt], Int)
|
||||||
|
|
||||||
|
getScoped :: Translator (Map.Map String ValidTraverserData)
|
||||||
|
getScoped = gets (\(m, _, _) -> m)
|
||||||
|
|
||||||
|
setScoped :: Map.Map String ValidTraverserData -> Translator ()
|
||||||
|
setScoped m = modify (\(_, ss, i) -> (m, ss, i))
|
||||||
|
|
||||||
|
scope :: Translator a -> Translator a
|
||||||
|
scope m = do
|
||||||
|
s <- getScoped
|
||||||
|
a <- m
|
||||||
|
setScoped s
|
||||||
|
return a
|
||||||
|
|
||||||
|
clearTraverser :: String -> Translator ()
|
||||||
|
clearTraverser s = modify (\(m, ss, i) -> (Map.delete s m, ss, i))
|
||||||
|
|
||||||
|
putTraverser :: String -> ValidTraverserData -> Translator ()
|
||||||
|
putTraverser s vtd = modify (\(m, ss, i) -> (Map.insert s vtd m, ss, i))
|
||||||
|
|
||||||
|
getTemp :: Translator String
|
||||||
|
getTemp = gets $ \(_, _, i) -> "temp" ++ show i
|
||||||
|
|
||||||
|
freshTemp :: Translator String
|
||||||
|
freshTemp = modify (second (+1)) >> getTemp
|
||||||
|
|
||||||
|
emitStatement :: Py.PyStmt -> Translator ()
|
||||||
|
emitStatement = modify . first . (:)
|
||||||
|
|
||||||
|
collectStatements :: Translator a -> Translator ([Py.PyStmt], a)
|
||||||
|
collectStatements t = do
|
||||||
|
modify (first $ const [])
|
||||||
|
a <- t
|
||||||
|
ss <- gets $ \(_, ss, _) -> ss
|
||||||
|
modify (first $ const [])
|
||||||
|
return (ss, a)
|
||||||
|
|
||||||
|
withdrawStatements :: Translator (Py.PyStmt) -> Translator [Py.PyStmt]
|
||||||
|
withdrawStatements ts =
|
||||||
|
(\(ss, s) -> ss ++ [s]) <$> (collectStatements ts)
|
||||||
|
|
||||||
|
requireTraverser :: String -> Translator ValidTraverserData
|
||||||
|
requireTraverser s = gets (\(m, _, _) -> Map.lookup s m) >>= handleMaybe
|
||||||
|
where
|
||||||
|
handleMaybe Nothing = fail "Invalid traverser"
|
||||||
|
handleMaybe (Just vtd) = return vtd
|
||||||
|
|
||||||
|
traverserIncrement :: Bool -> Py.PyExpr -> Py.PyExpr -> Py.PyExpr
|
||||||
|
traverserIncrement rev by e =
|
||||||
|
Py.BinOp op e (Py.BinOp Py.Multiply by (Py.IntLiteral 1))
|
||||||
|
where op = if rev then Py.Subtract else Py.Add
|
||||||
|
|
||||||
|
traverserValid :: Py.PyExpr -> ValidTraverserData -> Py.PyExpr
|
||||||
|
traverserValid e vtd =
|
||||||
|
case validBounds vtd of
|
||||||
|
Range f t ->
|
||||||
|
if validRev vtd
|
||||||
|
then Py.BinOp Py.GreaterThanEq e f
|
||||||
|
else Py.BinOp Py.LessThan e t
|
||||||
|
Random -> Py.BoolLiteral True
|
||||||
|
|
||||||
|
traverserStep :: String -> ValidTraverserData -> Py.PyStmt
|
||||||
|
traverserStep s vtd =
|
||||||
|
case validBounds vtd of
|
||||||
|
Range _ _ -> Py.Assign (Py.VarPat s) $ Py.BinOp op (Py.Var s) (Py.IntLiteral 1)
|
||||||
|
where op = if validRev vtd then Py.Subtract else Py.Add
|
||||||
|
Random -> traverserRandom s $ validList vtd
|
||||||
|
|
||||||
|
traverserRandom :: String -> String -> Py.PyStmt
|
||||||
|
traverserRandom s l =
|
||||||
|
Py.Assign (Py.VarPat s) $ Py.FunctionCall (Py.Var "random.randrange")
|
||||||
|
[Py.FunctionCall (Py.Var "len") [Py.Var l]]
|
||||||
|
|
||||||
|
hasVar :: String -> Py.PyPat -> Bool
|
||||||
|
hasVar s (Py.VarPat s') = s == s'
|
||||||
|
hasVar s (Py.TuplePat ps) = any (hasVar s) ps
|
||||||
|
hasVar s _ = False
|
||||||
|
|
||||||
|
substituteVariable :: String -> Py.PyExpr -> Py.PyExpr -> Py.PyExpr
|
||||||
|
substituteVariable s e (Py.BinOp o l r) =
|
||||||
|
Py.BinOp o (substituteVariable s e l) (substituteVariable s e r)
|
||||||
|
substituteVariable s e (Py.ListLiteral es) =
|
||||||
|
Py.ListLiteral $ map (substituteVariable s e) es
|
||||||
|
substituteVariable s e (Py.DictLiteral es) =
|
||||||
|
Py.DictLiteral $
|
||||||
|
map (first (substituteVariable s e) . second (substituteVariable s e)) es
|
||||||
|
substituteVariable s e (Py.Lambda ps e') =
|
||||||
|
Py.Lambda ps $ if any (hasVar s) ps then substituteVariable s e e' else e'
|
||||||
|
substituteVariable s e (Py.Var s')
|
||||||
|
| s == s' = e
|
||||||
|
| otherwise = Py.Var s'
|
||||||
|
substituteVariable s e (Py.TupleLiteral es) =
|
||||||
|
Py.TupleLiteral $ map (substituteVariable s e) es
|
||||||
|
substituteVariable s e (Py.FunctionCall e' es) =
|
||||||
|
Py.FunctionCall (substituteVariable s e e') $
|
||||||
|
map (substituteVariable s e) es
|
||||||
|
substituteVariable s e (Py.Access e' es) =
|
||||||
|
Py.Access (substituteVariable s e e') $
|
||||||
|
map (substituteVariable s e) es
|
||||||
|
substituteVariable s e (Py.Ternary i t e') =
|
||||||
|
Py.Ternary (substituteVariable s e i) (substituteVariable s e t)
|
||||||
|
(substituteVariable s e e')
|
||||||
|
substituteVariable s e (Py.Member e' m) =
|
||||||
|
Py.Member (substituteVariable s e e') m
|
||||||
|
substituteVariable s e (Py.In e1 e2) =
|
||||||
|
Py.In (substituteVariable s e e1) (substituteVariable s e e2)
|
||||||
|
substituteVariable s e (Py.NotIn e1 e2) =
|
||||||
|
Py.NotIn (substituteVariable s e e1) (substituteVariable s e e2)
|
||||||
|
substituteVariable s e (Py.Slice f t) =
|
||||||
|
Py.Slice (substituteVariable s e <$> f) (substituteVariable s e <$> t)
|
||||||
|
|
||||||
|
translateExpr :: Expr -> Translator Py.PyExpr
|
||||||
|
translateExpr (TraverserCall "pop" [Var s]) = do
|
||||||
|
l <- validList <$> requireTraverser s
|
||||||
|
return $ Py.FunctionCall (Py.Member (Py.Var l) "pop") [Py.Var s]
|
||||||
|
translateExpr (TraverserCall "pos" [Var s]) = do
|
||||||
|
requireTraverser s
|
||||||
|
return $ Py.Var s
|
||||||
|
translateExpr (TraverserCall "at" [Var s]) = do
|
||||||
|
l <- validList <$> requireTraverser s
|
||||||
|
return $ Py.Access (Py.Var l) [Py.Var s]
|
||||||
|
translateExpr (TraverserCall "at" [Var s, IntLiteral i]) = do
|
||||||
|
vtd <- requireTraverser s
|
||||||
|
return $ Py.Access (Py.Var $ validList vtd)
|
||||||
|
[traverserIncrement (validRev vtd) (Py.IntLiteral i) (Py.Var s)]
|
||||||
|
translateExpr (TraverserCall "step" [Var s]) = do
|
||||||
|
vtd <- requireTraverser s
|
||||||
|
emitStatement $ traverserStep s vtd
|
||||||
|
return $ Py.IntLiteral 0
|
||||||
|
translateExpr (TraverserCall "canstep" [Var s]) = do
|
||||||
|
vtd <- requireTraverser s
|
||||||
|
return $
|
||||||
|
traverserValid
|
||||||
|
(traverserIncrement (validRev vtd) (Py.IntLiteral 1) (Py.Var s)) vtd
|
||||||
|
translateExpr (TraverserCall "valid" [Var s]) = do
|
||||||
|
vtd <- requireTraverser s
|
||||||
|
return $ traverserValid (Py.Var s) vtd
|
||||||
|
translateExpr (TraverserCall "subset" [Var s1, Var s2]) = do
|
||||||
|
l1 <- validList <$> requireTraverser s1
|
||||||
|
l2 <- validList <$> requireTraverser s2
|
||||||
|
if l1 == l2
|
||||||
|
then return $ Py.Access (Py.Var l1) [Py.Slice (Just $ Py.Var s1) (Just $ Py.Var s2)]
|
||||||
|
else fail "Incompatible traversers!"
|
||||||
|
translateExpr (TraverserCall "bisect" [Var s, Lambda [x] e]) = do
|
||||||
|
vtd <- requireTraverser s
|
||||||
|
newTemp <- freshTemp
|
||||||
|
lambdaExpr <- translateExpr e
|
||||||
|
let access = Py.Access (Py.Var $ validList vtd) [Py.Var s]
|
||||||
|
let translated = substituteVariable x access lambdaExpr
|
||||||
|
let append s = Py.FunctionCall (Py.Member (Py.Var s) "append") [ access ]
|
||||||
|
let bisectStmt = Py.FunctionDef newTemp []
|
||||||
|
[ Py.Nonlocal [s]
|
||||||
|
, Py.Assign (Py.VarPat "l") (Py.ListLiteral [])
|
||||||
|
, Py.Assign (Py.VarPat "r") (Py.ListLiteral [])
|
||||||
|
, Py.While (traverserValid (Py.Var s) vtd)
|
||||||
|
[ Py.IfElse translated
|
||||||
|
[ Py.Standalone $ append "l" ]
|
||||||
|
[]
|
||||||
|
(Just [ Py.Standalone $ append "r" ])
|
||||||
|
, traverserStep s vtd
|
||||||
|
]
|
||||||
|
, Py.Return $ Py.TupleLiteral [Py.Var "l", Py.Var "r"]
|
||||||
|
]
|
||||||
|
emitStatement bisectStmt
|
||||||
|
return $ Py.FunctionCall (Py.Var newTemp) []
|
||||||
|
translateExpr (TraverserCall _ _) = fail "Invalid traverser operation"
|
||||||
|
translateExpr (FunctionCall f ps) = do
|
||||||
|
pes <- mapM translateExpr ps
|
||||||
|
return $ Py.FunctionCall (Py.Var f) pes
|
||||||
|
translateExpr (BinOp o l r) =
|
||||||
|
Py.BinOp (translateOp o) <$> translateExpr l <*> translateExpr r
|
||||||
|
translateExpr (Lambda ps e) =
|
||||||
|
Py.Lambda (map Py.VarPat ps) <$> translateExpr e
|
||||||
|
translateExpr (Var s) = return $ Py.Var s
|
||||||
|
translateExpr (IntLiteral i) = return $ Py.IntLiteral i
|
||||||
|
translateExpr (BoolLiteral b) = return $ Py.BoolLiteral b
|
||||||
|
translateExpr (ListLiteral es) = Py.ListLiteral <$> mapM translateExpr es
|
||||||
|
translateExpr (TupleLiteral es) = Py.TupleLiteral <$> mapM translateExpr es
|
||||||
|
|
||||||
|
applyOption :: TraverserData -> (String, Py.PyExpr) -> Maybe TraverserData
|
||||||
|
applyOption td ("list", Py.Var s) =
|
||||||
|
return $ td { list = Just s }
|
||||||
|
applyOption td ("span", Py.TupleLiteral [f, t]) =
|
||||||
|
return $ td { bounds = Just $ Range f t }
|
||||||
|
applyOption td ("random", Py.BoolLiteral True) =
|
||||||
|
return $ td { bounds = Just Random }
|
||||||
|
applyOption td ("reverse", Py.BoolLiteral b) =
|
||||||
|
return $ td { rev = b }
|
||||||
|
applyOption td _ = Nothing
|
||||||
|
|
||||||
|
translateOption :: (String, Expr) -> Translator (String, Py.PyExpr)
|
||||||
|
translateOption (s, e) = (,) s <$> translateExpr e
|
||||||
|
|
||||||
|
defaultTraverser :: TraverserData
|
||||||
|
defaultTraverser =
|
||||||
|
TraverserData { list = Nothing, bounds = Nothing, rev = False }
|
||||||
|
|
||||||
|
translateBranch :: Branch -> Translator (Py.PyExpr, [Py.PyStmt])
|
||||||
|
translateBranch (e, s) = (,) <$> translateExpr e <*>
|
||||||
|
(concat <$> mapM (withdrawStatements . translateStmt) s)
|
||||||
|
|
||||||
|
translateStmt :: Stmt -> Translator Py.PyStmt
|
||||||
|
translateStmt (IfElse i els e) = uncurry Py.IfElse
|
||||||
|
<$> (translateBranch i) <*> (mapM translateBranch els) <*> convertElse e
|
||||||
|
where
|
||||||
|
convertElse [] = return Nothing
|
||||||
|
convertElse es = Just . concat <$>
|
||||||
|
mapM (withdrawStatements . translateStmt) es
|
||||||
|
translateStmt (While b) = uncurry Py.While <$> translateBranch b
|
||||||
|
translateStmt (Traverser s os) =
|
||||||
|
foldlM applyOption defaultTraverser <$> mapM translateOption os >>= saveTraverser
|
||||||
|
where
|
||||||
|
saveTraverser :: Maybe TraverserData -> Translator Py.PyStmt
|
||||||
|
saveTraverser (Just (td@TraverserData { list = Just l, bounds = Just bs})) =
|
||||||
|
putTraverser s vtd $> translateInitialBounds s vtd
|
||||||
|
where
|
||||||
|
vtd = ValidTraverserData
|
||||||
|
{ validList = l
|
||||||
|
, validBounds = bs
|
||||||
|
, validRev = rev td
|
||||||
|
}
|
||||||
|
saveTraverser Nothing = fail "Invalid traverser (!)"
|
||||||
|
translateStmt (Let p e) = Py.Assign <$> translatePat p <*> translateExpr e
|
||||||
|
translateStmt (Return e) = Py.Return <$> translateExpr e
|
||||||
|
translateStmt (Standalone e) = Py.Standalone <$> translateExpr e
|
||||||
|
|
||||||
|
translateInitialBounds :: String -> ValidTraverserData -> Py.PyStmt
|
||||||
|
translateInitialBounds s vtd =
|
||||||
|
case (validBounds vtd, validRev vtd) of
|
||||||
|
(Random, _) -> traverserRandom s $ validList vtd
|
||||||
|
(Range l _, False) -> Py.Assign (Py.VarPat s) l
|
||||||
|
(Range _ r, True) -> Py.Assign (Py.VarPat s) r
|
||||||
|
|
||||||
|
translatePat :: Pat -> Translator Py.PyPat
|
||||||
|
translatePat (VarPat s) = clearTraverser s $> Py.VarPat s
|
||||||
|
translatePat (TuplePat ts) = Py.TuplePat <$> mapM translatePat ts
|
||||||
|
|
||||||
|
translateOp :: Op -> Py.PyBinOp
|
||||||
|
translateOp Add = Py.Add
|
||||||
|
translateOp Subtract = Py.Subtract
|
||||||
|
translateOp Multiply = Py.Multiply
|
||||||
|
translateOp Divide = Py.Divide
|
||||||
|
translateOp LessThan = Py.LessThan
|
||||||
|
translateOp LessThanEqual = Py.LessThanEq
|
||||||
|
translateOp GreaterThan = Py.GreaterThan
|
||||||
|
translateOp GreaterThanEqual = Py.GreaterThanEq
|
||||||
|
translateOp Equal = Py.Equal
|
||||||
|
translateOp NotEqual = Py.NotEqual
|
||||||
|
translateOp And = Py.And
|
||||||
|
translateOp Or = Py.Or
|
||||||
|
|
||||||
|
translateFunction :: Function -> [Py.PyStmt]
|
||||||
|
translateFunction (Function m s ps ss) = return $ Py.FunctionDef s ps $
|
||||||
|
[ Py.Standalone $ Py.FunctionCall (Py.Member (Py.Var p) "sort") []
|
||||||
|
| p <- take 1 ps, m == Sorted ] ++ stmts
|
||||||
|
where
|
||||||
|
stmts = concat $ evalState
|
||||||
|
(mapM (withdrawStatements . translateStmt) ss) (Map.empty, [], 0)
|
||||||
|
|
||||||
|
translate :: Prog -> [Py.PyStmt]
|
||||||
|
translate (Prog fs) =
|
||||||
|
(Py.FromImport "bisect" ["bisect"]) :
|
||||||
|
(Py.Import "random") : concatMap translateFunction fs
|
||||||
198
code/cs325-langs/src/LanguageTwo.hs
Normal file
198
code/cs325-langs/src/LanguageTwo.hs
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
module LanguageTwo where
|
||||||
|
import qualified PythonAst as Py
|
||||||
|
import qualified CommonParsing as P
|
||||||
|
import Data.Char
|
||||||
|
import Data.Functor
|
||||||
|
import Text.Parsec
|
||||||
|
import Text.Parsec.Char
|
||||||
|
import Text.Parsec.Combinator
|
||||||
|
|
||||||
|
{- Data Types -}
|
||||||
|
data Op
|
||||||
|
= Add
|
||||||
|
| Subtract
|
||||||
|
| Multiply
|
||||||
|
| Divide
|
||||||
|
| Equal
|
||||||
|
| NotEqual
|
||||||
|
| And
|
||||||
|
| Or
|
||||||
|
|
||||||
|
data Expr
|
||||||
|
= IntLiteral Int
|
||||||
|
| BinOp Op Expr Expr
|
||||||
|
| Var String
|
||||||
|
| Length Expr
|
||||||
|
|
||||||
|
data Stmt
|
||||||
|
= IfElse Expr Stmt (Maybe Stmt)
|
||||||
|
| Assign String Expr
|
||||||
|
| Block [Stmt]
|
||||||
|
|
||||||
|
data Prog = Prog Expr [Stmt] [Stmt]
|
||||||
|
|
||||||
|
{- Parser -}
|
||||||
|
type Parser = Parsec String ()
|
||||||
|
|
||||||
|
parseVar :: Parser String
|
||||||
|
parseVar = P.var [ "if", "else", "state", "effect", "combine" ]
|
||||||
|
|
||||||
|
parseLength :: Parser Expr
|
||||||
|
parseLength = Length <$> P.surround '|' '|' parseExpr
|
||||||
|
|
||||||
|
parseParenthesized :: Parser Expr
|
||||||
|
parseParenthesized = P.surround '(' ')' parseExpr
|
||||||
|
|
||||||
|
parseBasic :: Parser Expr
|
||||||
|
parseBasic = choice
|
||||||
|
[ IntLiteral <$> P.int
|
||||||
|
, Var <$> parseVar
|
||||||
|
, parseLength
|
||||||
|
, parseParenthesized
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
parseExpr :: Parser Expr
|
||||||
|
parseExpr = P.precedence BinOp parseBasic
|
||||||
|
[ P.op "*" Multiply <|> P.op "/" Divide
|
||||||
|
, P.op "+" Add <|> P.op "-" Subtract
|
||||||
|
, P.op "==" Equal <|> P.op "!=" NotEqual
|
||||||
|
, P.op "&&" And
|
||||||
|
, try $ P.op "||" Or
|
||||||
|
]
|
||||||
|
|
||||||
|
parseIf :: Parser Stmt
|
||||||
|
parseIf = do
|
||||||
|
P.kwIf >> spaces
|
||||||
|
c <- parseParenthesized
|
||||||
|
t <- parseStmt <* spaces
|
||||||
|
e <- (Just <$> (P.kwElse >> spaces *> parseStmt)) <|> return Nothing
|
||||||
|
return $ IfElse c t e
|
||||||
|
|
||||||
|
parseBlockStmts :: Parser [Stmt]
|
||||||
|
parseBlockStmts = P.surround '{' '}' (many parseStmt)
|
||||||
|
|
||||||
|
parseBlock :: Parser Stmt
|
||||||
|
parseBlock = Block <$> parseBlockStmts
|
||||||
|
|
||||||
|
parseAssign :: Parser Stmt
|
||||||
|
parseAssign = Assign <$>
|
||||||
|
(parseVar <* char '=' <* spaces) <*>
|
||||||
|
parseExpr <* (char ';' >> spaces)
|
||||||
|
|
||||||
|
parseStmt :: Parser Stmt
|
||||||
|
parseStmt = choice
|
||||||
|
[ parseIf
|
||||||
|
, parseAssign
|
||||||
|
, parseBlock
|
||||||
|
]
|
||||||
|
|
||||||
|
parseProgram :: Parser Prog
|
||||||
|
parseProgram = do
|
||||||
|
state <- P.kwState >> spaces *> parseExpr <* char ';' <* spaces
|
||||||
|
effect <- P.kwEffect >> spaces *> parseBlockStmts <* spaces
|
||||||
|
combined <- P.kwCombine >> spaces *> parseBlockStmts <* spaces
|
||||||
|
return $ Prog state effect combined
|
||||||
|
|
||||||
|
parse :: String -> String -> Either ParseError Prog
|
||||||
|
parse = runParser parseProgram ()
|
||||||
|
|
||||||
|
{- Translation -}
|
||||||
|
baseFunction :: Py.PyExpr -> [Py.PyStmt] -> [Py.PyStmt] -> Py.PyStmt
|
||||||
|
baseFunction s e c = Py.FunctionDef "prog" ["xs"] $
|
||||||
|
[Py.IfElse
|
||||||
|
(Py.BinOp Py.LessThan
|
||||||
|
(Py.FunctionCall (Py.Var "len") [Py.Var "xs"])
|
||||||
|
(Py.IntLiteral 2))
|
||||||
|
[Py.Return $ Py.Tuple [s, Py.Var "xs"]]
|
||||||
|
[]
|
||||||
|
Nothing
|
||||||
|
, Py.Assign (Py.VarPat "leng")
|
||||||
|
(Py.BinOp Py.FloorDiv
|
||||||
|
(Py.FunctionCall (Py.Var "len") [Py.Var "xs"])
|
||||||
|
(Py.IntLiteral 2))
|
||||||
|
, Py.Assign (Py.VarPat "left")
|
||||||
|
(Py.Access
|
||||||
|
(Py.Var "xs")
|
||||||
|
[Py.Slice Nothing $ Just (Py.Var "leng")])
|
||||||
|
, Py.Assign (Py.VarPat "right")
|
||||||
|
(Py.Access
|
||||||
|
(Py.Var "xs")
|
||||||
|
[Py.Slice (Just (Py.Var "leng")) Nothing])
|
||||||
|
, Py.Assign (Py.TuplePat [Py.VarPat "ls", Py.VarPat "left"])
|
||||||
|
(Py.FunctionCall (Py.Var "prog") [Py.Var "left"])
|
||||||
|
, Py.Assign (Py.TuplePat [Py.VarPat "rs", Py.VarPat "right"])
|
||||||
|
(Py.FunctionCall (Py.Var "prog") [Py.Var "right"])
|
||||||
|
, Py.Standalone $
|
||||||
|
Py.FunctionCall (Py.Member (Py.Var "left") "reverse") []
|
||||||
|
, Py.Standalone $
|
||||||
|
Py.FunctionCall (Py.Member (Py.Var "right") "reverse") []
|
||||||
|
, Py.Assign (Py.VarPat "state") s
|
||||||
|
, Py.Assign (Py.VarPat "source") (Py.IntLiteral 0)
|
||||||
|
, Py.Assign (Py.VarPat "total") (Py.ListLiteral [])
|
||||||
|
, Py.While
|
||||||
|
(Py.BinOp Py.And
|
||||||
|
(Py.BinOp Py.NotEqual (Py.Var "left") (Py.ListLiteral []))
|
||||||
|
(Py.BinOp Py.NotEqual (Py.Var "right") (Py.ListLiteral []))) $
|
||||||
|
[ Py.IfElse
|
||||||
|
(Py.BinOp Py.LessThanEq
|
||||||
|
(Py.Access (Py.Var "left") [Py.IntLiteral $ -1])
|
||||||
|
(Py.Access (Py.Var "right") [Py.IntLiteral $ -1]))
|
||||||
|
[ Py.Standalone $
|
||||||
|
Py.FunctionCall (Py.Member (Py.Var "total") "append")
|
||||||
|
[Py.FunctionCall (Py.Member (Py.Var "left") "pop") []]
|
||||||
|
, Py.Assign (Py.VarPat "source") (Py.IntLiteral 1)
|
||||||
|
]
|
||||||
|
[] $
|
||||||
|
Just
|
||||||
|
[ Py.Standalone $
|
||||||
|
Py.FunctionCall (Py.Member (Py.Var "total") "append")
|
||||||
|
[Py.FunctionCall (Py.Member (Py.Var "right") "pop") []]
|
||||||
|
, Py.Assign (Py.VarPat "source") (Py.IntLiteral 2)
|
||||||
|
]
|
||||||
|
] ++ e
|
||||||
|
] ++ c ++
|
||||||
|
[ Py.Standalone $ Py.FunctionCall (Py.Member (Py.Var "left") "reverse") []
|
||||||
|
, Py.Standalone $ Py.FunctionCall (Py.Member (Py.Var "right") "reverse") []
|
||||||
|
, Py.Return $ Py.Tuple
|
||||||
|
[ Py.Var "state"
|
||||||
|
, foldl (Py.BinOp Py.Add) (Py.Var "total") [Py.Var "left", Py.Var "right"]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
translateExpr :: Expr -> Py.PyExpr
|
||||||
|
translateExpr (IntLiteral i) = Py.IntLiteral i
|
||||||
|
translateExpr (BinOp op l r) =
|
||||||
|
Py.BinOp (translateOp op) (translateExpr l) (translateExpr r)
|
||||||
|
translateExpr (Var s)
|
||||||
|
| s == "SOURCE" = Py.Var "source"
|
||||||
|
| s == "LEFT" = Py.Var "left"
|
||||||
|
| s == "RIGHT" = Py.Var "right"
|
||||||
|
| s == "STATE" = Py.Var "state"
|
||||||
|
| s == "LSTATE" = Py.Var "ls"
|
||||||
|
| s == "RSTATE" = Py.Var "rs"
|
||||||
|
| s == "L" = Py.IntLiteral 1
|
||||||
|
| s == "R" = Py.IntLiteral 2
|
||||||
|
| otherwise = Py.Var s
|
||||||
|
translateExpr (Length e) = Py.FunctionCall (Py.Var "len") [translateExpr e]
|
||||||
|
|
||||||
|
translateOp :: Op -> Py.PyBinOp
|
||||||
|
translateOp Add = Py.Add
|
||||||
|
translateOp Subtract = Py.Subtract
|
||||||
|
translateOp Multiply = Py.Multiply
|
||||||
|
translateOp Divide = Py.Divide
|
||||||
|
translateOp Equal = Py.Equal
|
||||||
|
translateOp NotEqual = Py.NotEqual
|
||||||
|
translateOp And = Py.And
|
||||||
|
translateOp Or = Py.Or
|
||||||
|
|
||||||
|
translateStmt :: Stmt -> [Py.PyStmt]
|
||||||
|
translateStmt (IfElse c t e) =
|
||||||
|
[Py.IfElse (translateExpr c) (translateStmt t) [] (translateStmt <$> e)]
|
||||||
|
translateStmt (Assign "STATE" e) = [Py.Assign (Py.VarPat "state") (translateExpr e)]
|
||||||
|
translateStmt (Assign v e) = [Py.Assign (Py.VarPat v) (translateExpr e)]
|
||||||
|
translateStmt (Block s) = concatMap translateStmt s
|
||||||
|
|
||||||
|
translate :: Prog -> [Py.PyStmt]
|
||||||
|
translate (Prog s e c) =
|
||||||
|
[baseFunction (translateExpr s) (concatMap translateStmt e) (concatMap translateStmt c)]
|
||||||
52
code/cs325-langs/src/PythonAst.hs
Normal file
52
code/cs325-langs/src/PythonAst.hs
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
module PythonAst where
|
||||||
|
|
||||||
|
data PyBinOp
|
||||||
|
= Add
|
||||||
|
| Subtract
|
||||||
|
| Multiply
|
||||||
|
| Divide
|
||||||
|
| FloorDiv
|
||||||
|
| LessThan
|
||||||
|
| LessThanEq
|
||||||
|
| GreaterThan
|
||||||
|
| GreaterThanEq
|
||||||
|
| Equal
|
||||||
|
| NotEqual
|
||||||
|
| And
|
||||||
|
| Or
|
||||||
|
|
||||||
|
data PyExpr
|
||||||
|
= BinOp PyBinOp PyExpr PyExpr
|
||||||
|
| IntLiteral Int
|
||||||
|
| StrLiteral String
|
||||||
|
| BoolLiteral Bool
|
||||||
|
| ListLiteral [PyExpr]
|
||||||
|
| DictLiteral [(PyExpr, PyExpr)]
|
||||||
|
| Lambda [PyPat] PyExpr
|
||||||
|
| Var String
|
||||||
|
| TupleLiteral [PyExpr]
|
||||||
|
| FunctionCall PyExpr [PyExpr]
|
||||||
|
| Access PyExpr [PyExpr]
|
||||||
|
| Ternary PyExpr PyExpr PyExpr
|
||||||
|
| Member PyExpr String
|
||||||
|
| In PyExpr PyExpr
|
||||||
|
| NotIn PyExpr PyExpr
|
||||||
|
| Slice (Maybe PyExpr) (Maybe PyExpr)
|
||||||
|
|
||||||
|
data PyPat
|
||||||
|
= VarPat String
|
||||||
|
| IgnorePat
|
||||||
|
| TuplePat [PyPat]
|
||||||
|
| AccessPat PyExpr [PyExpr]
|
||||||
|
|
||||||
|
data PyStmt
|
||||||
|
= Assign PyPat PyExpr
|
||||||
|
| IfElse PyExpr [PyStmt] [(PyExpr, [PyStmt])] (Maybe [PyStmt])
|
||||||
|
| While PyExpr [PyStmt]
|
||||||
|
| For PyPat PyExpr [PyStmt]
|
||||||
|
| FunctionDef String [String] [PyStmt]
|
||||||
|
| Return PyExpr
|
||||||
|
| Standalone PyExpr
|
||||||
|
| Import String
|
||||||
|
| FromImport String [String]
|
||||||
|
| Nonlocal [String]
|
||||||
142
code/cs325-langs/src/PythonGen.hs
Normal file
142
code/cs325-langs/src/PythonGen.hs
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
module PythonGen where
|
||||||
|
import PythonAst
|
||||||
|
import Data.List
|
||||||
|
import Data.Bifunctor
|
||||||
|
import Data.Maybe
|
||||||
|
|
||||||
|
indent :: String -> String
|
||||||
|
indent = (" " ++)
|
||||||
|
|
||||||
|
stmtBlock :: [PyStmt] -> [String]
|
||||||
|
stmtBlock = concatMap translateStmt
|
||||||
|
|
||||||
|
block :: String -> [String] -> [String]
|
||||||
|
block s ss = (s ++ ":") : map indent ss
|
||||||
|
|
||||||
|
prefix :: String -> PyExpr -> [PyStmt] -> [String]
|
||||||
|
prefix s e sts = block (s ++ " " ++ translateExpr e) $ stmtBlock sts
|
||||||
|
|
||||||
|
if_ :: PyExpr -> [PyStmt] -> [String]
|
||||||
|
if_ = prefix "if"
|
||||||
|
|
||||||
|
elif :: PyExpr -> [PyStmt] -> [String]
|
||||||
|
elif = prefix "elif"
|
||||||
|
|
||||||
|
else_ :: [PyStmt] -> [String]
|
||||||
|
else_ = block "else" . stmtBlock
|
||||||
|
|
||||||
|
while :: PyExpr -> [PyStmt] -> [String]
|
||||||
|
while = prefix "while"
|
||||||
|
|
||||||
|
parenth :: String -> String
|
||||||
|
parenth s = "(" ++ s ++ ")"
|
||||||
|
|
||||||
|
translateStmt :: PyStmt -> [String]
|
||||||
|
translateStmt (Assign p e) = [translatePat p ++ " = " ++ translateExpr e]
|
||||||
|
translateStmt (IfElse i t es e) =
|
||||||
|
if_ i t ++ concatMap (uncurry elif) es ++ maybe [] else_ e
|
||||||
|
translateStmt (While c t) = while c t
|
||||||
|
translateStmt (For x in_ b) = block head body
|
||||||
|
where
|
||||||
|
head = "for " ++ translatePat x ++ " in " ++ translateExpr in_
|
||||||
|
body = stmtBlock b
|
||||||
|
translateStmt (FunctionDef s ps b) = block head body
|
||||||
|
where
|
||||||
|
head = "def " ++ s ++ "(" ++ intercalate "," ps ++ ")"
|
||||||
|
body = stmtBlock b
|
||||||
|
translateStmt (Return e) = ["return " ++ translateExpr e]
|
||||||
|
translateStmt (Standalone e) = [translateExpr e]
|
||||||
|
translateStmt (Import s) = ["import " ++ s]
|
||||||
|
translateStmt (FromImport s ss) =
|
||||||
|
["from " ++ s ++ " import " ++ intercalate "," ss]
|
||||||
|
translateStmt (Nonlocal vs) =
|
||||||
|
["nonlocal " ++ intercalate "," vs]
|
||||||
|
|
||||||
|
precedence :: PyBinOp -> Int
|
||||||
|
precedence Add = 3
|
||||||
|
precedence Subtract = 3
|
||||||
|
precedence Multiply = 4
|
||||||
|
precedence Divide = 4
|
||||||
|
precedence FloorDiv = 4
|
||||||
|
precedence LessThan = 2
|
||||||
|
precedence LessThanEq = 2
|
||||||
|
precedence GreaterThan = 2
|
||||||
|
precedence GreaterThanEq = 2
|
||||||
|
precedence Equal = 2
|
||||||
|
precedence NotEqual = 2
|
||||||
|
precedence And = 1
|
||||||
|
precedence Or = 0
|
||||||
|
|
||||||
|
opString :: PyBinOp -> String
|
||||||
|
opString Add = "+"
|
||||||
|
opString Subtract = "-"
|
||||||
|
opString Multiply = "*"
|
||||||
|
opString Divide = "/"
|
||||||
|
opString FloorDiv = "//"
|
||||||
|
opString LessThan = "<"
|
||||||
|
opString LessThanEq = "<="
|
||||||
|
opString GreaterThan = ">"
|
||||||
|
opString GreaterThanEq = ">="
|
||||||
|
opString Equal = "=="
|
||||||
|
opString NotEqual = "!="
|
||||||
|
opString And = " and "
|
||||||
|
opString Or = " or "
|
||||||
|
|
||||||
|
translateOp :: PyBinOp -> PyBinOp -> PyExpr -> String
|
||||||
|
translateOp o o' =
|
||||||
|
if precedence o > precedence o'
|
||||||
|
then parenth . translateExpr
|
||||||
|
else translateExpr
|
||||||
|
|
||||||
|
dictMapping :: PyExpr -> PyExpr -> String
|
||||||
|
dictMapping f t = translateExpr f ++ ": " ++ translateExpr t
|
||||||
|
|
||||||
|
list :: String -> String -> [PyExpr] -> String
|
||||||
|
list o c es = o ++ intercalate ", " (map translateExpr es) ++ c
|
||||||
|
|
||||||
|
translateExpr :: PyExpr -> String
|
||||||
|
translateExpr (BinOp o l@(BinOp o1 _ _) r@(BinOp o2 _ _)) =
|
||||||
|
translateOp o o1 l ++ opString o ++ translateOp o o2 r
|
||||||
|
translateExpr (BinOp o l@(BinOp o1 _ _) r) =
|
||||||
|
translateOp o o1 l ++ opString o ++ translateExpr r
|
||||||
|
translateExpr (BinOp o l r@(BinOp o2 _ _)) =
|
||||||
|
translateExpr l ++ opString o ++ translateOp o o2 r
|
||||||
|
translateExpr (BinOp o l r) =
|
||||||
|
translateExpr l ++ opString o ++ translateExpr r
|
||||||
|
translateExpr (IntLiteral i) = show i
|
||||||
|
translateExpr (StrLiteral s) = "\"" ++ s ++ "\""
|
||||||
|
translateExpr (BoolLiteral b) = if b then "true" else "false"
|
||||||
|
translateExpr (ListLiteral l) = list "[" "]" l
|
||||||
|
translateExpr (DictLiteral l) =
|
||||||
|
"{" ++ intercalate ", " (map (uncurry dictMapping) l) ++ "}"
|
||||||
|
translateExpr (Lambda ps e) = parenth (head ++ ": " ++ body)
|
||||||
|
where
|
||||||
|
head = "lambda " ++ intercalate ", " (map translatePat ps)
|
||||||
|
body = translateExpr e
|
||||||
|
translateExpr (Var s) = s
|
||||||
|
translateExpr (TupleLiteral es) = list "(" ")" es
|
||||||
|
translateExpr (FunctionCall f ps) = translateExpr f ++ list "(" ")" ps
|
||||||
|
translateExpr (Access (Var s) e) = s ++ list "[" "]" e
|
||||||
|
translateExpr (Access e@Access{} i) = translateExpr e ++ list "[" "]" i
|
||||||
|
translateExpr (Access e i) = "(" ++ translateExpr e ++ ")" ++ list "[" "]" i
|
||||||
|
translateExpr (Ternary c t e) =
|
||||||
|
translateExpr t ++ " if " ++ translateExpr c ++ " else " ++ translateExpr e
|
||||||
|
translateExpr (Member (Var s) m) = s ++ "." ++ m
|
||||||
|
translateExpr (Member e@Member{} m) = translateExpr e ++ "." ++ m
|
||||||
|
translateExpr (Member e m) = "(" ++ translateExpr e ++ ")." ++ m
|
||||||
|
translateExpr (In m c) =
|
||||||
|
"(" ++ translateExpr m ++ ") in (" ++ translateExpr c ++ ")"
|
||||||
|
translateExpr (NotIn m c) =
|
||||||
|
"(" ++ translateExpr m ++ ") not in (" ++ translateExpr c ++ ")"
|
||||||
|
translateExpr (Slice l r) =
|
||||||
|
maybe [] (parenth . translateExpr) l ++ ":" ++ maybe [] (parenth . translateExpr) r
|
||||||
|
|
||||||
|
translatePat :: PyPat -> String
|
||||||
|
translatePat (VarPat s) = s
|
||||||
|
translatePat IgnorePat = "_"
|
||||||
|
translatePat (TuplePat ps) =
|
||||||
|
"(" ++ intercalate "," (map translatePat ps) ++ ")"
|
||||||
|
translatePat (AccessPat e es) = translateExpr (Access e es)
|
||||||
|
|
||||||
|
translate :: [PyStmt] -> String
|
||||||
|
translate = intercalate "\n" . concatMap translateStmt
|
||||||
179
code/dawn/Dawn.v
Normal file
179
code/dawn/Dawn.v
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
Require Import Coq.Lists.List.
|
||||||
|
From Ltac2 Require Import Ltac2.
|
||||||
|
|
||||||
|
Inductive intrinsic :=
|
||||||
|
| swap
|
||||||
|
| clone
|
||||||
|
| drop
|
||||||
|
| quote
|
||||||
|
| compose
|
||||||
|
| apply.
|
||||||
|
|
||||||
|
Inductive expr :=
|
||||||
|
| e_int (i : intrinsic)
|
||||||
|
| e_quote (e : expr)
|
||||||
|
| e_comp (e1 e2 : expr).
|
||||||
|
|
||||||
|
Definition e_compose (e : expr) (es : list expr) := fold_left e_comp es e.
|
||||||
|
|
||||||
|
Inductive IsValue : expr -> Prop :=
|
||||||
|
| Val_quote : forall {e : expr}, IsValue (e_quote e).
|
||||||
|
|
||||||
|
Definition value := { v : expr & IsValue v }.
|
||||||
|
Definition value_stack := list value.
|
||||||
|
|
||||||
|
Definition v_quote (e : expr) := existT IsValue (e_quote e) Val_quote.
|
||||||
|
|
||||||
|
Inductive Sem_int : value_stack -> intrinsic -> value_stack -> Prop :=
|
||||||
|
| Sem_swap : forall (v v' : value) (vs : value_stack), Sem_int (v' :: v :: vs) swap (v :: v' :: vs)
|
||||||
|
| Sem_clone : forall (v : value) (vs : value_stack), Sem_int (v :: vs) clone (v :: v :: vs)
|
||||||
|
| Sem_drop : forall (v : value) (vs : value_stack), Sem_int (v :: vs) drop vs
|
||||||
|
| Sem_quote : forall (v : value) (vs : value_stack), Sem_int (v :: vs) quote ((v_quote (projT1 v)) :: vs)
|
||||||
|
| Sem_compose : forall (e e' : expr) (vs : value_stack), Sem_int (v_quote e' :: v_quote e :: vs) compose (v_quote (e_comp e e') :: vs)
|
||||||
|
| Sem_apply : forall (e : expr) (vs vs': value_stack), Sem_expr vs e vs' -> Sem_int (v_quote e :: vs) apply vs'
|
||||||
|
|
||||||
|
with Sem_expr : value_stack -> expr -> value_stack -> Prop :=
|
||||||
|
| Sem_e_int : forall (i : intrinsic) (vs vs' : value_stack), Sem_int vs i vs' -> Sem_expr vs (e_int i) vs'
|
||||||
|
| Sem_e_quote : forall (e : expr) (vs : value_stack), Sem_expr vs (e_quote e) (v_quote e :: vs)
|
||||||
|
| Sem_e_comp : forall (e1 e2 : expr) (vs1 vs2 vs3 : value_stack),
|
||||||
|
Sem_expr vs1 e1 vs2 -> Sem_expr vs2 e2 vs3 -> Sem_expr vs1 (e_comp e1 e2) vs3.
|
||||||
|
|
||||||
|
Definition false : expr := e_quote (e_int drop).
|
||||||
|
Definition false_v : value := v_quote (e_int drop).
|
||||||
|
|
||||||
|
Definition true : expr := e_quote (e_comp (e_int swap) (e_int drop)).
|
||||||
|
Definition true_v : value := v_quote (e_comp (e_int swap) (e_int drop)).
|
||||||
|
|
||||||
|
Theorem false_correct : forall (v v' : value) (vs : value_stack), Sem_expr (v' :: v :: vs) (e_comp false (e_int apply)) (v :: vs).
|
||||||
|
Proof.
|
||||||
|
intros v v' vs.
|
||||||
|
eapply Sem_e_comp.
|
||||||
|
- apply Sem_e_quote.
|
||||||
|
- apply Sem_e_int. apply Sem_apply. apply Sem_e_int. apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem true_correct : forall (v v' : value) (vs : value_stack), Sem_expr (v' :: v :: vs) (e_comp true (e_int apply)) (v' :: vs).
|
||||||
|
Proof.
|
||||||
|
intros v v' vs.
|
||||||
|
eapply Sem_e_comp.
|
||||||
|
- apply Sem_e_quote.
|
||||||
|
- apply Sem_e_int. apply Sem_apply. eapply Sem_e_comp.
|
||||||
|
* apply Sem_e_int. apply Sem_swap.
|
||||||
|
* apply Sem_e_int. apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Definition or : expr := e_comp (e_int clone) (e_int apply).
|
||||||
|
|
||||||
|
Theorem or_false_v : forall (v : value) (vs : value_stack), Sem_expr (false_v :: v :: vs) or (v :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v vs.
|
||||||
|
eapply Sem_e_comp...
|
||||||
|
- apply Sem_clone.
|
||||||
|
- apply Sem_apply... apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem or_true : forall (v : value) (vs : value_stack), Sem_expr (true_v :: v :: vs) or (true_v :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v vs.
|
||||||
|
eapply Sem_e_comp...
|
||||||
|
- apply Sem_clone...
|
||||||
|
- apply Sem_apply. eapply Sem_e_comp...
|
||||||
|
* apply Sem_swap.
|
||||||
|
* apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Definition or_false_false := or_false_v false_v.
|
||||||
|
Definition or_false_true := or_false_v true_v.
|
||||||
|
Definition or_true_false := or_true false_v.
|
||||||
|
Definition or_true_true := or_true true_v.
|
||||||
|
|
||||||
|
Fixpoint quote_n (n : nat) :=
|
||||||
|
match n with
|
||||||
|
| O => e_int quote
|
||||||
|
| S n' => e_compose (quote_n n') (e_int swap :: e_int quote :: e_int swap :: e_int compose :: nil)
|
||||||
|
end.
|
||||||
|
|
||||||
|
Theorem quote_2_correct : forall (v1 v2 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v2 :: v1 :: vs) (quote_n 1) (v_quote (e_comp (projT1 v1) (projT1 v2)) :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v1 v2 vs. simpl.
|
||||||
|
repeat (eapply Sem_e_comp)...
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_compose.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem quote_3_correct : forall (v1 v2 v3 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v3 :: v2 :: v1 :: vs) (quote_n 2) (v_quote (e_comp (projT1 v1) (e_comp (projT1 v2) (projT1 v3))) :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v1 v2 v3 vs. simpl.
|
||||||
|
repeat (eapply Sem_e_comp)...
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_compose.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_compose.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Ltac2 rec solve_basic () := Control.enter (fun _ =>
|
||||||
|
match! goal with
|
||||||
|
| [|- Sem_int ?vs1 swap ?vs2] => apply Sem_swap
|
||||||
|
| [|- Sem_int ?vs1 clone ?vs2] => apply Sem_clone
|
||||||
|
| [|- Sem_int ?vs1 drop ?vs2] => apply Sem_drop
|
||||||
|
| [|- Sem_int ?vs1 quote ?vs2] => apply Sem_quote
|
||||||
|
| [|- Sem_int ?vs1 compose ?vs2] => apply Sem_compose
|
||||||
|
| [|- Sem_int ?vs1 apply ?vs2] => apply Sem_apply
|
||||||
|
| [|- Sem_expr ?vs1 (e_comp ?e1 ?e2) ?vs2] => eapply Sem_e_comp; solve_basic ()
|
||||||
|
| [|- Sem_expr ?vs1 (e_int ?e) ?vs2] => apply Sem_e_int; solve_basic ()
|
||||||
|
| [|- Sem_expr ?vs1 (e_quote ?e) ?vs2] => apply Sem_e_quote
|
||||||
|
| [_ : _ |- _] => ()
|
||||||
|
end).
|
||||||
|
|
||||||
|
Theorem quote_2_correct' : forall (v1 v2 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v2 :: v1 :: vs) (quote_n 1) (v_quote (e_comp (projT1 v1) (projT1 v2)) :: vs).
|
||||||
|
Proof. intros. simpl. solve_basic (). Qed.
|
||||||
|
|
||||||
|
Theorem quote_3_correct' : forall (v1 v2 v3 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v3 :: v2 :: v1 :: vs) (quote_n 2) (v_quote (e_comp (projT1 v1) (e_comp (projT1 v2) (projT1 v3))) :: vs).
|
||||||
|
Proof. intros. simpl. solve_basic (). Qed.
|
||||||
|
|
||||||
|
Definition rotate_n (n : nat) := e_compose (quote_n n) (e_int swap :: e_int quote :: e_int compose :: e_int apply :: nil).
|
||||||
|
|
||||||
|
Lemma eval_value : forall (v : value) (vs : value_stack),
|
||||||
|
Sem_expr vs (projT1 v) (v :: vs).
|
||||||
|
Proof.
|
||||||
|
intros v vs.
|
||||||
|
destruct v. destruct i.
|
||||||
|
simpl. apply Sem_e_quote.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem rotate_3_correct : forall (v1 v2 v3 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v3 :: v2 :: v1 :: vs) (rotate_n 1) (v1 :: v3 :: v2 :: vs).
|
||||||
|
Proof.
|
||||||
|
intros. unfold rotate_n. simpl. solve_basic ().
|
||||||
|
repeat (eapply Sem_e_comp); apply eval_value.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem rotate_4_correct : forall (v1 v2 v3 v4 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v4 :: v3 :: v2 :: v1 :: vs) (rotate_n 2) (v1 :: v4 :: v3 :: v2 :: vs).
|
||||||
|
Proof.
|
||||||
|
intros. unfold rotate_n. simpl. solve_basic ().
|
||||||
|
repeat (eapply Sem_e_comp); apply eval_value.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem e_comp_assoc : forall (e1 e2 e3 : expr) (vs vs' : value_stack),
|
||||||
|
Sem_expr vs (e_comp e1 (e_comp e2 e3)) vs' <-> Sem_expr vs (e_comp (e_comp e1 e2) e3) vs'.
|
||||||
|
Proof.
|
||||||
|
intros e1 e2 e3 vs vs'.
|
||||||
|
split; intros Heval.
|
||||||
|
- inversion Heval; subst. inversion H4; subst.
|
||||||
|
eapply Sem_e_comp. eapply Sem_e_comp. apply H2. apply H3. apply H6.
|
||||||
|
- inversion Heval; subst. inversion H2; subst.
|
||||||
|
eapply Sem_e_comp. apply H3. eapply Sem_e_comp. apply H6. apply H4.
|
||||||
|
Qed.
|
||||||
254
code/dawn/DawnEval.v
Normal file
254
code/dawn/DawnEval.v
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
Require Import Coq.Lists.List.
|
||||||
|
Require Import DawnV2.
|
||||||
|
Require Import Coq.Program.Equality.
|
||||||
|
From Ltac2 Require Import Ltac2.
|
||||||
|
|
||||||
|
Inductive step_result :=
|
||||||
|
| err
|
||||||
|
| middle (e : expr) (s : value_stack)
|
||||||
|
| final (s : value_stack).
|
||||||
|
|
||||||
|
Fixpoint eval_step (s : value_stack) (e : expr) : step_result :=
|
||||||
|
match e, s with
|
||||||
|
| e_int swap, v' :: v :: vs => final (v :: v' :: vs)
|
||||||
|
| e_int clone, v :: vs => final (v :: v :: vs)
|
||||||
|
| e_int drop, v :: vs => final vs
|
||||||
|
| e_int quote, v :: vs => final (v_quote (value_to_expr v) :: vs)
|
||||||
|
| e_int compose, (v_quote v2) :: (v_quote v1) :: vs => final (v_quote (e_comp v1 v2) :: vs)
|
||||||
|
| e_int apply, (v_quote v1) :: vs => middle v1 vs
|
||||||
|
| e_quote e', vs => final (v_quote e' :: vs)
|
||||||
|
| e_comp e1 e2, vs =>
|
||||||
|
match eval_step vs e1 with
|
||||||
|
| final vs' => middle e2 vs'
|
||||||
|
| middle e1' vs' => middle (e_comp e1' e2) vs'
|
||||||
|
| err => err
|
||||||
|
end
|
||||||
|
| _, _ => err
|
||||||
|
end.
|
||||||
|
|
||||||
|
Theorem eval_step_correct : forall (e : expr) (vs vs' : value_stack), Sem_expr vs e vs' ->
|
||||||
|
(eval_step vs e = final vs') \/
|
||||||
|
(exists (ei : expr) (vsi : value_stack),
|
||||||
|
eval_step vs e = middle ei vsi /\
|
||||||
|
Sem_expr vsi ei vs').
|
||||||
|
Proof.
|
||||||
|
intros e vs vs' Hsem.
|
||||||
|
(* Proceed by induction on the semantics. *)
|
||||||
|
induction Hsem.
|
||||||
|
- inversion H; (* The expression is just an intrnsic. *)
|
||||||
|
(* Dismiss all the straightforward "final" cases,
|
||||||
|
of which most intrinsics are. *)
|
||||||
|
try (left; reflexivity).
|
||||||
|
(* Only apply remains; We are in an intermediate / middle case. *)
|
||||||
|
right.
|
||||||
|
(* The semantics guarantee that the expression in the
|
||||||
|
quote evaluates to the final state. *)
|
||||||
|
exists e, vs0. auto.
|
||||||
|
- (* The expression is a quote. This is yet another final case. *)
|
||||||
|
left; reflexivity.
|
||||||
|
- (* The composition is never a final step, since we have to evaluate both
|
||||||
|
branches to "finish up". *)
|
||||||
|
destruct IHHsem1; right.
|
||||||
|
+ (* If the left branch finihed, only the right branch needs to be evaluted. *)
|
||||||
|
simpl. rewrite H. exists e2, vs2. auto.
|
||||||
|
+ (* Otherwise, the left branch has an intermediate evaluation, guaranteed
|
||||||
|
by induction to be consitent. *)
|
||||||
|
destruct H as [ei [vsi [Heval Hsem']]].
|
||||||
|
(* We compose the remaining part of the left branch with the right branch. *)
|
||||||
|
exists (e_comp ei e2), vsi. simpl.
|
||||||
|
(* The evaluation is trivially to a "middle" state. *)
|
||||||
|
rewrite Heval. split. auto.
|
||||||
|
eapply Sem_e_comp. apply Hsem'. apply Hsem2.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Inductive eval_chain (vs : value_stack) (e : expr) (vs' : value_stack) : Prop :=
|
||||||
|
| chain_final (P : eval_step vs e = final vs')
|
||||||
|
| chain_middle (ei : expr) (vsi : value_stack)
|
||||||
|
(P : eval_step vs e = middle ei vsi) (rest : eval_chain vsi ei vs').
|
||||||
|
|
||||||
|
Lemma eval_chain_merge : forall (e1 e2 : expr) (vs vs' vs'' : value_stack),
|
||||||
|
eval_chain vs e1 vs' -> eval_chain vs' e2 vs'' -> eval_chain vs (e_comp e1 e2) vs''.
|
||||||
|
Proof.
|
||||||
|
intros e1 e2 vs vs' vs'' ch1 ch2.
|
||||||
|
induction ch1;
|
||||||
|
eapply chain_middle; simpl; try (rewrite P); auto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Lemma eval_chain_split : forall (e1 e2 : expr) (vs vs'' : value_stack),
|
||||||
|
eval_chain vs (e_comp e1 e2) vs'' -> exists vs', (eval_chain vs e1 vs') /\ (eval_chain vs' e2 vs'').
|
||||||
|
Proof.
|
||||||
|
intros e1 e2 vs vss'' ch.
|
||||||
|
ltac1:(dependent induction ch).
|
||||||
|
- simpl in P. destruct (eval_step vs e1); inversion P.
|
||||||
|
- simpl in P. destruct (eval_step vs e1) eqn:Hval; try (inversion P).
|
||||||
|
+ injection P as Hinj; subst. specialize (IHch e e2 H0) as [s'0 [ch1 ch2]].
|
||||||
|
eexists. split.
|
||||||
|
* eapply chain_middle. apply Hval. apply ch1.
|
||||||
|
* apply ch2.
|
||||||
|
+ subst. eexists. split.
|
||||||
|
* eapply chain_final. apply Hval.
|
||||||
|
* apply ch.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem val_step_sem : forall (e : expr) (vs vs' : value_stack),
|
||||||
|
Sem_expr vs e vs' -> eval_chain vs e vs'
|
||||||
|
with eval_step_int : forall (i : intrinsic) (vs vs' : value_stack),
|
||||||
|
Sem_int vs i vs' -> eval_chain vs (e_int i) vs'.
|
||||||
|
Proof.
|
||||||
|
- intros e vs vs' Hsem.
|
||||||
|
induction Hsem.
|
||||||
|
+ (* This is an intrinsic, which is handled by the second
|
||||||
|
theorem, eval_step_int. This lemma is used here. *)
|
||||||
|
auto.
|
||||||
|
+ (* A quote doesn't have a next step, and so is final. *)
|
||||||
|
apply chain_final. auto.
|
||||||
|
+ (* In composition, by induction, we know that the two sub-expressions produce
|
||||||
|
proper evaluation chains. Chains can be composed (via eval_chain_merge). *)
|
||||||
|
eapply eval_chain_merge; eauto.
|
||||||
|
- intros i vs vs' Hsem.
|
||||||
|
(* The evaluation chain depends on the specific intrinsic in use. *)
|
||||||
|
inversion Hsem; subst;
|
||||||
|
(* Most intrinsics produce a final value, and the evaluation chain is trivial. *)
|
||||||
|
try (apply chain_final; auto; fail).
|
||||||
|
(* Only apply is non-final. The first step is popping the quote from the stack,
|
||||||
|
and the rest of the steps are given by the evaluation of the code in the quote. *)
|
||||||
|
apply chain_middle with e vs0; auto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Ltac2 Type exn ::= [ | Not_intrinsic ].
|
||||||
|
|
||||||
|
Ltac2 rec destruct_n (n : int) (vs : constr) : unit :=
|
||||||
|
if Int.le n 0 then () else
|
||||||
|
let v := Fresh.in_goal @v in
|
||||||
|
let vs' := Fresh.in_goal @vs in
|
||||||
|
destruct $vs as [|$v $vs']; Control.enter (fun () =>
|
||||||
|
try (destruct_n (Int.sub n 1) (Control.hyp vs'))
|
||||||
|
).
|
||||||
|
|
||||||
|
Ltac2 int_arity (int : constr) : int :=
|
||||||
|
match! int with
|
||||||
|
| swap => 2
|
||||||
|
| clone => 1
|
||||||
|
| drop => 1
|
||||||
|
| quote => 1
|
||||||
|
| compose => 2
|
||||||
|
| apply => 1
|
||||||
|
| _ => Control.throw Not_intrinsic
|
||||||
|
end.
|
||||||
|
|
||||||
|
Ltac2 destruct_int_stack (int : constr) (va: constr) := destruct_n (int_arity int) va.
|
||||||
|
|
||||||
|
Ltac2 ensure_valid_stack () := Control.enter (fun () =>
|
||||||
|
match! goal with
|
||||||
|
| [h : eval_step ?a (e_int ?b) = ?c |- _] =>
|
||||||
|
let h := Control.hyp h in
|
||||||
|
destruct_int_stack b a;
|
||||||
|
try (inversion $h; fail)
|
||||||
|
| [|- _ ] => ()
|
||||||
|
end).
|
||||||
|
|
||||||
|
Theorem test : forall (vs vs': value_stack), eval_step vs (e_int swap) = final vs' ->
|
||||||
|
exists v1 v2 vs'', vs = v1 :: v2 :: vs'' /\ vs' = v2 :: v1 :: vs''.
|
||||||
|
Proof.
|
||||||
|
intros s s' Heq.
|
||||||
|
ensure_valid_stack ().
|
||||||
|
simpl in Heq. injection Heq as Hinj. subst. eauto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem eval_step_final_sem : forall (e : expr) (vs vs' : value_stack),
|
||||||
|
eval_step vs e = final vs' -> Sem_expr vs e vs'.
|
||||||
|
Proof.
|
||||||
|
intros e vs vs' Hev. destruct e.
|
||||||
|
- destruct i; ensure_valid_stack ();
|
||||||
|
(* Get rid of trivial cases that match one-to-one. *)
|
||||||
|
simpl in Hev; try (injection Hev as Hinj; subst; solve_basic ()).
|
||||||
|
+ (* compose with one quoted value is not final, but an error. *)
|
||||||
|
destruct v. inversion Hev.
|
||||||
|
+ (* compose with two quoted values. *)
|
||||||
|
destruct v; destruct v0.
|
||||||
|
injection Hev as Hinj; subst; solve_basic ().
|
||||||
|
+ (* Apply is not final. *) destruct v. inversion Hev.
|
||||||
|
- (* Quote is always final, trivially, and the semantics match easily. *)
|
||||||
|
simpl in Hev. injection Hev as Hinj; subst. solve_basic ().
|
||||||
|
- (* Compose is never final, so we don't need to handle it here. *)
|
||||||
|
simpl in Hev. destruct (eval_step vs e1); inversion Hev.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem eval_step_middle_sem : forall (e ei: expr) (vs vsi vs' : value_stack),
|
||||||
|
eval_step vs e = middle ei vsi ->
|
||||||
|
Sem_expr vsi ei vs' ->
|
||||||
|
Sem_expr vs e vs'.
|
||||||
|
Proof.
|
||||||
|
intros e. induction e; intros ei vs vsi vs' Hev Hsem.
|
||||||
|
- destruct i; ensure_valid_stack ().
|
||||||
|
+ (* compose with one quoted value; invalid. *)
|
||||||
|
destruct v. inversion Hev.
|
||||||
|
+ (* compose with two quoted values; not a middle step. *)
|
||||||
|
destruct v; destruct v0. inversion Hev.
|
||||||
|
+ (* Apply *)
|
||||||
|
destruct v. injection Hev as Hinj; subst.
|
||||||
|
solve_basic (). auto.
|
||||||
|
- (* quoting an expression is not middle. *)
|
||||||
|
inversion Hev.
|
||||||
|
- simpl in Hev.
|
||||||
|
destruct (eval_step vs e1) eqn:Hev1.
|
||||||
|
+ (* Step led to an error, which can't happen in a chain. *)
|
||||||
|
inversion Hev.
|
||||||
|
+ (* Left expression makes a non-final step. Milk this for equalities first. *)
|
||||||
|
injection Hev as Hinj; subst.
|
||||||
|
(* The rest of the program (e_comp e e2) evaluates using our semantics,
|
||||||
|
which means that both e and e2 evaluate using our semantics. *)
|
||||||
|
inversion Hsem; subst.
|
||||||
|
(* By induction, e1 evaluates using our semantics if e does, which we just confirmed. *)
|
||||||
|
specialize (IHe1 e vs vsi vs2 Hev1 H2).
|
||||||
|
(* The composition rule can now be applied. *)
|
||||||
|
eapply Sem_e_comp; eauto.
|
||||||
|
+ (* Left expression makes a final step. Milk this for equalities first. *)
|
||||||
|
injection Hev as Hinj; subst.
|
||||||
|
(* Using eval_step_final, we know that e1 evaluates to the intermediate
|
||||||
|
state given our semantics. *)
|
||||||
|
specialize (eval_step_final_sem e1 vs vsi Hev1) as Hsem1.
|
||||||
|
(* The composition rule can now be applied. *)
|
||||||
|
eapply Sem_e_comp; eauto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem eval_step_sem_back : forall (e : expr) (vs vs' : value_stack),
|
||||||
|
eval_chain vs e vs' -> Sem_expr vs e vs'.
|
||||||
|
Proof.
|
||||||
|
intros e vs vs' ch.
|
||||||
|
ltac1:(dependent induction ch).
|
||||||
|
- apply eval_step_final_sem. auto.
|
||||||
|
- specialize (eval_step_middle_sem e ei vs vsi vs' P IHch). auto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Corollary eval_step_no_sem : forall (e : expr) (vs vs' : value_stack),
|
||||||
|
~(Sem_expr vs e vs') -> ~(eval_chain vs e vs').
|
||||||
|
Proof.
|
||||||
|
intros e vs vs' Hnsem Hch.
|
||||||
|
specialize (eval_step_sem_back _ _ _ Hch). auto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Require Extraction.
|
||||||
|
Require Import ExtrHaskellBasic.
|
||||||
|
Extraction Language Haskell.
|
||||||
|
Set Extraction KeepSingleton.
|
||||||
|
Extraction "UccGen.hs" expr eval_step true false or.
|
||||||
|
|
||||||
|
Remark eval_swap_two_values : forall (vs vs' : value_stack),
|
||||||
|
eval_step vs (e_int swap) = final vs' -> exists v1 v2 vst, vs = v1 :: v2 :: vst /\ vs' = v2 :: v1 :: vst.
|
||||||
|
Proof.
|
||||||
|
intros vs vs' Hev.
|
||||||
|
(* Can't proceed until we know more about the stack. *)
|
||||||
|
destruct vs as [|v1 [|v2 vs]].
|
||||||
|
- (* Invalid case; empty stack. *) inversion Hev.
|
||||||
|
- (* Invalid case; stack only has one value. *) inversion Hev.
|
||||||
|
- (* Valid case: the stack has two values. *) injection Hev. eauto.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Remark eval_swap_two_values' : forall (vs vs' : value_stack),
|
||||||
|
eval_step vs (e_int swap) = final vs' -> exists v1 v2 vst, vs = v1 :: v2 :: vst /\ vs' = v2 :: v1 :: vst.
|
||||||
|
Proof.
|
||||||
|
intros vs vs' Hev.
|
||||||
|
ensure_valid_stack ().
|
||||||
|
injection Hev. eauto.
|
||||||
|
Qed.
|
||||||
179
code/dawn/DawnV2.v
Normal file
179
code/dawn/DawnV2.v
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
Require Import Coq.Lists.List.
|
||||||
|
From Ltac2 Require Import Ltac2.
|
||||||
|
|
||||||
|
Inductive intrinsic :=
|
||||||
|
| swap
|
||||||
|
| clone
|
||||||
|
| drop
|
||||||
|
| quote
|
||||||
|
| compose
|
||||||
|
| apply.
|
||||||
|
|
||||||
|
Inductive expr :=
|
||||||
|
| e_int (i : intrinsic)
|
||||||
|
| e_quote (e : expr)
|
||||||
|
| e_comp (e1 e2 : expr).
|
||||||
|
|
||||||
|
Definition e_compose (e : expr) (es : list expr) := fold_left e_comp es e.
|
||||||
|
|
||||||
|
Inductive value := v_quote (e : expr).
|
||||||
|
Definition value_stack := list value.
|
||||||
|
|
||||||
|
Definition value_to_expr (v : value) : expr :=
|
||||||
|
match v with
|
||||||
|
| v_quote e => e_quote e
|
||||||
|
end.
|
||||||
|
|
||||||
|
Inductive Sem_int : value_stack -> intrinsic -> value_stack -> Prop :=
|
||||||
|
| Sem_swap : forall (v v' : value) (vs : value_stack), Sem_int (v' :: v :: vs) swap (v :: v' :: vs)
|
||||||
|
| Sem_clone : forall (v : value) (vs : value_stack), Sem_int (v :: vs) clone (v :: v :: vs)
|
||||||
|
| Sem_drop : forall (v : value) (vs : value_stack), Sem_int (v :: vs) drop vs
|
||||||
|
| Sem_quote : forall (v : value) (vs : value_stack), Sem_int (v :: vs) quote ((v_quote (value_to_expr v)) :: vs)
|
||||||
|
| Sem_compose : forall (e e' : expr) (vs : value_stack), Sem_int (v_quote e' :: v_quote e :: vs) compose (v_quote (e_comp e e') :: vs)
|
||||||
|
| Sem_apply : forall (e : expr) (vs vs': value_stack), Sem_expr vs e vs' -> Sem_int (v_quote e :: vs) apply vs'
|
||||||
|
|
||||||
|
with Sem_expr : value_stack -> expr -> value_stack -> Prop :=
|
||||||
|
| Sem_e_int : forall (i : intrinsic) (vs vs' : value_stack), Sem_int vs i vs' -> Sem_expr vs (e_int i) vs'
|
||||||
|
| Sem_e_quote : forall (e : expr) (vs : value_stack), Sem_expr vs (e_quote e) (v_quote e :: vs)
|
||||||
|
| Sem_e_comp : forall (e1 e2 : expr) (vs1 vs2 vs3 : value_stack),
|
||||||
|
Sem_expr vs1 e1 vs2 -> Sem_expr vs2 e2 vs3 -> Sem_expr vs1 (e_comp e1 e2) vs3.
|
||||||
|
|
||||||
|
Definition false : expr := e_quote (e_int drop).
|
||||||
|
Definition false_v : value := v_quote (e_int drop).
|
||||||
|
|
||||||
|
Definition true : expr := e_quote (e_comp (e_int swap) (e_int drop)).
|
||||||
|
Definition true_v : value := v_quote (e_comp (e_int swap) (e_int drop)).
|
||||||
|
|
||||||
|
Theorem false_correct : forall (v v' : value) (vs : value_stack), Sem_expr (v' :: v :: vs) (e_comp false (e_int apply)) (v :: vs).
|
||||||
|
Proof.
|
||||||
|
intros v v' vs.
|
||||||
|
eapply Sem_e_comp.
|
||||||
|
- apply Sem_e_quote.
|
||||||
|
- apply Sem_e_int. apply Sem_apply. apply Sem_e_int. apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem true_correct : forall (v v' : value) (vs : value_stack), Sem_expr (v' :: v :: vs) (e_comp true (e_int apply)) (v' :: vs).
|
||||||
|
Proof.
|
||||||
|
intros v v' vs.
|
||||||
|
eapply Sem_e_comp.
|
||||||
|
- apply Sem_e_quote.
|
||||||
|
- apply Sem_e_int. apply Sem_apply. eapply Sem_e_comp.
|
||||||
|
* apply Sem_e_int. apply Sem_swap.
|
||||||
|
* apply Sem_e_int. apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Definition or : expr := e_comp (e_int clone) (e_int apply).
|
||||||
|
|
||||||
|
Theorem or_false_v : forall (v : value) (vs : value_stack), Sem_expr (false_v :: v :: vs) or (v :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v vs.
|
||||||
|
eapply Sem_e_comp...
|
||||||
|
- apply Sem_clone.
|
||||||
|
- apply Sem_apply... apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem or_true : forall (v : value) (vs : value_stack), Sem_expr (true_v :: v :: vs) or (true_v :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v vs.
|
||||||
|
eapply Sem_e_comp...
|
||||||
|
- apply Sem_clone...
|
||||||
|
- apply Sem_apply. eapply Sem_e_comp...
|
||||||
|
* apply Sem_swap.
|
||||||
|
* apply Sem_drop.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Definition or_false_false := or_false_v false_v.
|
||||||
|
Definition or_false_true := or_false_v true_v.
|
||||||
|
Definition or_true_false := or_true false_v.
|
||||||
|
Definition or_true_true := or_true true_v.
|
||||||
|
|
||||||
|
Fixpoint quote_n (n : nat) :=
|
||||||
|
match n with
|
||||||
|
| O => e_int quote
|
||||||
|
| S n' => e_compose (quote_n n') (e_int swap :: e_int quote :: e_int swap :: e_int compose :: nil)
|
||||||
|
end.
|
||||||
|
|
||||||
|
Theorem quote_2_correct : forall (v1 v2 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v2 :: v1 :: vs) (quote_n 1) (v_quote (e_comp (value_to_expr v1) (value_to_expr v2)) :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v1 v2 vs. simpl.
|
||||||
|
repeat (eapply Sem_e_comp)...
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_compose.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem quote_3_correct : forall (v1 v2 v3 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v3 :: v2 :: v1 :: vs) (quote_n 2) (v_quote (e_comp (value_to_expr v1) (e_comp (value_to_expr v2) (value_to_expr v3))) :: vs).
|
||||||
|
Proof with apply Sem_e_int.
|
||||||
|
intros v1 v2 v3 vs. simpl.
|
||||||
|
repeat (eapply Sem_e_comp)...
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_compose.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_quote.
|
||||||
|
- apply Sem_swap.
|
||||||
|
- apply Sem_compose.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Ltac2 rec solve_basic () := Control.enter (fun _ =>
|
||||||
|
match! goal with
|
||||||
|
| [|- Sem_int ?vs1 swap ?vs2] => apply Sem_swap
|
||||||
|
| [|- Sem_int ?vs1 clone ?vs2] => apply Sem_clone
|
||||||
|
| [|- Sem_int ?vs1 drop ?vs2] => apply Sem_drop
|
||||||
|
| [|- Sem_int ?vs1 quote ?vs2] => apply Sem_quote
|
||||||
|
| [|- Sem_int ?vs1 compose ?vs2] => apply Sem_compose
|
||||||
|
| [|- Sem_int ?vs1 apply ?vs2] => apply Sem_apply
|
||||||
|
| [|- Sem_expr ?vs1 (e_comp ?e1 ?e2) ?vs2] => eapply Sem_e_comp; solve_basic ()
|
||||||
|
| [|- Sem_expr ?vs1 (e_int ?e) ?vs2] => apply Sem_e_int; solve_basic ()
|
||||||
|
| [|- Sem_expr ?vs1 (e_quote ?e) ?vs2] => apply Sem_e_quote
|
||||||
|
| [_ : _ |- _] => ()
|
||||||
|
end).
|
||||||
|
|
||||||
|
Theorem quote_2_correct' : forall (v1 v2 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v2 :: v1 :: vs) (quote_n 1) (v_quote (e_comp (value_to_expr v1) (value_to_expr v2)) :: vs).
|
||||||
|
Proof. intros. simpl. solve_basic (). Qed.
|
||||||
|
|
||||||
|
Theorem quote_3_correct' : forall (v1 v2 v3 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v3 :: v2 :: v1 :: vs) (quote_n 2) (v_quote (e_comp (value_to_expr v1) (e_comp (value_to_expr v2) (value_to_expr v3))) :: vs).
|
||||||
|
Proof. intros. simpl. solve_basic (). Qed.
|
||||||
|
|
||||||
|
Definition rotate_n (n : nat) := e_compose (quote_n n) (e_int swap :: e_int quote :: e_int compose :: e_int apply :: nil).
|
||||||
|
|
||||||
|
Lemma eval_value : forall (v : value) (vs : value_stack),
|
||||||
|
Sem_expr vs (value_to_expr v) (v :: vs).
|
||||||
|
Proof.
|
||||||
|
intros v vs.
|
||||||
|
destruct v.
|
||||||
|
simpl. apply Sem_e_quote.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem rotate_3_correct : forall (v1 v2 v3 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v3 :: v2 :: v1 :: vs) (rotate_n 1) (v1 :: v3 :: v2 :: vs).
|
||||||
|
Proof.
|
||||||
|
intros. unfold rotate_n. simpl. solve_basic ().
|
||||||
|
repeat (eapply Sem_e_comp); apply eval_value.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem rotate_4_correct : forall (v1 v2 v3 v4 : value) (vs : value_stack),
|
||||||
|
Sem_expr (v4 :: v3 :: v2 :: v1 :: vs) (rotate_n 2) (v1 :: v4 :: v3 :: v2 :: vs).
|
||||||
|
Proof.
|
||||||
|
intros. unfold rotate_n. simpl. solve_basic ().
|
||||||
|
repeat (eapply Sem_e_comp); apply eval_value.
|
||||||
|
Qed.
|
||||||
|
|
||||||
|
Theorem e_comp_assoc : forall (e1 e2 e3 : expr) (vs vs' : value_stack),
|
||||||
|
Sem_expr vs (e_comp e1 (e_comp e2 e3)) vs' <-> Sem_expr vs (e_comp (e_comp e1 e2) e3) vs'.
|
||||||
|
Proof.
|
||||||
|
intros e1 e2 e3 vs vs'.
|
||||||
|
split; intros Heval.
|
||||||
|
- inversion Heval; subst. inversion H4; subst.
|
||||||
|
eapply Sem_e_comp. eapply Sem_e_comp. apply H2. apply H3. apply H6.
|
||||||
|
- inversion Heval; subst. inversion H2; subst.
|
||||||
|
eapply Sem_e_comp. apply H3. eapply Sem_e_comp. apply H6. apply H4.
|
||||||
|
Qed.
|
||||||
64
code/dawn/Ucc.hs
Normal file
64
code/dawn/Ucc.hs
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
module Ucc where
|
||||||
|
import UccGen
|
||||||
|
import Text.Parsec
|
||||||
|
import Data.Functor.Identity
|
||||||
|
import Control.Applicative hiding ((<|>))
|
||||||
|
import System.IO
|
||||||
|
|
||||||
|
instance Show Intrinsic where
|
||||||
|
show Swap = "swap"
|
||||||
|
show Clone = "clone"
|
||||||
|
show Drop = "drop"
|
||||||
|
show Quote = "quote"
|
||||||
|
show Compose = "compose"
|
||||||
|
show Apply = "apply"
|
||||||
|
|
||||||
|
instance Show Expr where
|
||||||
|
show (E_int i) = show i
|
||||||
|
show (E_quote e) = "[" ++ show e ++ "]"
|
||||||
|
show (E_comp e1 e2) = show e1 ++ " " ++ show e2
|
||||||
|
|
||||||
|
instance Show Value where
|
||||||
|
show (V_quote e) = show (E_quote e)
|
||||||
|
|
||||||
|
type Parser a = ParsecT String () Identity a
|
||||||
|
|
||||||
|
intrinsic :: Parser Intrinsic
|
||||||
|
intrinsic = (<* spaces) $ foldl1 (<|>) $ map (\(s, i) -> try (string s >> return i))
|
||||||
|
[ ("swap", Swap)
|
||||||
|
, ("clone", Clone)
|
||||||
|
, ("drop", Drop)
|
||||||
|
, ("quote", Quote)
|
||||||
|
, ("compose", Compose)
|
||||||
|
, ("apply", Apply)
|
||||||
|
]
|
||||||
|
|
||||||
|
expression :: Parser Expr
|
||||||
|
expression = foldl1 E_comp <$> many1 single
|
||||||
|
where
|
||||||
|
single
|
||||||
|
= (E_int <$> intrinsic)
|
||||||
|
<|> (fmap E_quote $ char '[' *> spaces *> expression <* char ']' <* spaces)
|
||||||
|
|
||||||
|
parseExpression :: String -> Either ParseError Expr
|
||||||
|
parseExpression = runParser expression () "<inline>"
|
||||||
|
|
||||||
|
eval :: [Value] -> Expr -> Maybe [Value]
|
||||||
|
eval s e =
|
||||||
|
case eval_step s e of
|
||||||
|
Err -> Nothing
|
||||||
|
Final s' -> Just s'
|
||||||
|
Middle e' s' -> eval s' e'
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = do
|
||||||
|
putStr "> "
|
||||||
|
hFlush stdout
|
||||||
|
str <- getLine
|
||||||
|
case parseExpression str of
|
||||||
|
Right e ->
|
||||||
|
case eval [] e of
|
||||||
|
Just st -> putStrLn $ show st
|
||||||
|
_ -> putStrLn "Evaluation error"
|
||||||
|
_ -> putStrLn "Parse error"
|
||||||
|
main
|
||||||
202
code/dyno-alloy/DynoAlloy.als
Normal file
202
code/dyno-alloy/DynoAlloy.als
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
enum Flag {Method, MethodOrField, Public}
|
||||||
|
|
||||||
|
/* There is a negative version for each flag (METHOD and NOT_METHOD).
|
||||||
|
Model this as two sets, one of positive flags, and one of netative flags,
|
||||||
|
and interpret the bitfield to be a conjunction of both flags. */
|
||||||
|
sig Bitfield {
|
||||||
|
, positiveFlags: set Flag
|
||||||
|
, negativeFlags: set Flag
|
||||||
|
}
|
||||||
|
|
||||||
|
/* A filter state has filterFlags and excludeFlags, both represented as conjunctions. */
|
||||||
|
sig FilterState {
|
||||||
|
, curFilter: Bitfield
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initially, no search has happeneed for a scope, so its 'found' is not set to anything. */
|
||||||
|
one sig NotSet {}
|
||||||
|
|
||||||
|
/* Finally, there's a search state (whether or not a particular scope has already been
|
||||||
|
searched with a particular configuration). */
|
||||||
|
one sig SearchState {
|
||||||
|
, var found: Bitfield + NotSet
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pred bitfieldEmpty[b: Bitfield] {
|
||||||
|
#b.positiveFlags = 0 and #b.negativeFlags = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pred bitfieldEqual[b1: Bitfield, b2: Bitfield] {
|
||||||
|
b1.positiveFlags = b2.positiveFlags and b1.negativeFlags = b2.negativeFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
pred bitfieldIntersection[b1: Bitfield, b2: Bitfield, b3: Bitfield] {
|
||||||
|
b3.positiveFlags = b1.positiveFlags & b2.positiveFlags
|
||||||
|
b3.negativeFlags = b1.negativeFlags & b2.negativeFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
pred bitfieldSubset[b1: Bitfield, b2: Bitfield] {
|
||||||
|
b1.positiveFlags in b2.positiveFlags
|
||||||
|
b1.negativeFlags in b2.negativeFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
pred bitfieldIncomparable[b1: Bitfield, b2: Bitfield] {
|
||||||
|
not bitfieldSubset[b1, b2]
|
||||||
|
not bitfieldSubset[b2, b1]
|
||||||
|
}
|
||||||
|
|
||||||
|
pred addBitfieldFlag[b1: Bitfield, b2: Bitfield, flag: Flag] {
|
||||||
|
b2.positiveFlags = b1.positiveFlags + flag
|
||||||
|
b2.negativeFlags = b1.negativeFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
pred addBitfieldFlagNeg[b1: Bitfield, b2: Bitfield, flag: Flag] {
|
||||||
|
b2.negativeFlags = b1.negativeFlags + flag
|
||||||
|
b2.positiveFlags = b1.positiveFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Property { PMethod, PField, PPublic }
|
||||||
|
|
||||||
|
sig Symbol {
|
||||||
|
properties: set Property
|
||||||
|
}
|
||||||
|
|
||||||
|
pred flagMatchesProperty[flag: Flag, property: Property] {
|
||||||
|
(flag = Method and property = PMethod) or
|
||||||
|
(flag = MethodOrField and (property = PMethod or property = PField)) or
|
||||||
|
(flag = Public and property = PPublic)
|
||||||
|
}
|
||||||
|
|
||||||
|
pred bitfieldMatchesProperties[bitfield: Bitfield, symbol: Symbol] {
|
||||||
|
all flag: bitfield.positiveFlags | some property: symbol.properties | flagMatchesProperty[flag, property]
|
||||||
|
all flag: bitfield.negativeFlags | no property: symbol.properties | flagMatchesProperty[flag, property]
|
||||||
|
}
|
||||||
|
|
||||||
|
bitfieldExists: run {
|
||||||
|
some Bitfield
|
||||||
|
}
|
||||||
|
|
||||||
|
matchingBitfieldExists: run {
|
||||||
|
some bitfield : Bitfield, symbol : Symbol | bitfieldMatchesProperties[bitfield, symbol]
|
||||||
|
}
|
||||||
|
|
||||||
|
matchingBitfieldExists2: run {
|
||||||
|
some bitfield : Bitfield, symbol : Symbol {
|
||||||
|
#bitfield.positiveFlags = 1
|
||||||
|
#bitfield.negativeFlags = 1
|
||||||
|
#symbol.properties = 2
|
||||||
|
bitfieldMatchesProperties[bitfield, symbol]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fact "method and field are incompatible" {
|
||||||
|
always no symbol: Symbol | {
|
||||||
|
PMethod in symbol.properties and PField in symbol.properties
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fact "public and field are incompatible" {
|
||||||
|
always no symbol: Symbol | {
|
||||||
|
PPublic in symbol.properties and PField in symbol.properties
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
matchingBitfieldExists3: run {
|
||||||
|
some bitfield : Bitfield, symbol : Symbol {
|
||||||
|
#bitfield.positiveFlags = 2
|
||||||
|
#symbol.properties = 2
|
||||||
|
bitfieldMatchesProperties[bitfield, symbol]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pred possibleState[filterState: FilterState] {
|
||||||
|
some initialState: FilterState {
|
||||||
|
// Each lookup in scope starts with empty filter flags
|
||||||
|
bitfieldEmpty[initialState.curFilter]
|
||||||
|
|
||||||
|
// The intermediate states (bitfieldMiddle) are used for sequencing of operations.
|
||||||
|
some bitfieldMiddle : Bitfield {
|
||||||
|
// Add "Public" depending on skipPrivateVisibilities
|
||||||
|
addBitfieldFlag[initialState.curFilter, bitfieldMiddle, Public] or
|
||||||
|
bitfieldEqual[initialState.curFilter, bitfieldMiddle]
|
||||||
|
|
||||||
|
// If it's a method receiver, add method or field restriction
|
||||||
|
addBitfieldFlag[bitfieldMiddle, filterState.curFilter, MethodOrField] or
|
||||||
|
// if it's not a receiver, filter to non-methods (could be overridden)
|
||||||
|
// addBitfieldFlagNeg[bitfieldMiddle, filterState.curFilter, Method] or
|
||||||
|
// Maybe methods are not being curFilterd but it's not a receiver, so no change.
|
||||||
|
bitfieldEqual[bitfieldMiddle, filterState.curFilter]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
possibleStateExists: run {
|
||||||
|
some filterState : FilterState | possibleState[filterState] and #filterState.curFilter.positiveFlags = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
pred update[toSet: Bitfield + NotSet, setTo: FilterState] {
|
||||||
|
toSet' in Bitfield and bitfieldIntersection[toSet, setTo.curFilter, toSet']
|
||||||
|
}
|
||||||
|
|
||||||
|
pred newUpdate[toSet: Bitfield + NotSet, setTo: FilterState] {
|
||||||
|
(not bitfieldIncomparable[toSet, setTo.curFilter] and update[toSet, setTo]) or
|
||||||
|
(bitfieldIncomparable[toSet, setTo.curFilter] and toSet = toSet')
|
||||||
|
}
|
||||||
|
|
||||||
|
pred updateOrSet[toSet: Bitfield + NotSet, setTo: FilterState] {
|
||||||
|
(toSet in NotSet and toSet' = setTo.curFilter) or
|
||||||
|
(toSet not in NotSet and update[toSet, setTo])
|
||||||
|
}
|
||||||
|
|
||||||
|
pred excludeBitfield[found: Bitfield + NotSet, exclude: Bitfield] {
|
||||||
|
(found != NotSet and bitfieldEqual[found, exclude]) or
|
||||||
|
(found = NotSet and bitfieldEmpty[exclude])
|
||||||
|
}
|
||||||
|
|
||||||
|
fact init {
|
||||||
|
all searchState: SearchState | searchState.found = NotSet
|
||||||
|
}
|
||||||
|
|
||||||
|
fact step {
|
||||||
|
always {
|
||||||
|
// Model that a new doLookupInScope could've occurred, with any combination of flags.
|
||||||
|
all searchState: SearchState {
|
||||||
|
some fs: FilterState {
|
||||||
|
// This is a possible combination of lookup flags
|
||||||
|
possibleState[fs]
|
||||||
|
|
||||||
|
// If a search has been performed before, take the intersection; otherwise,
|
||||||
|
// just insert the current filter flags.
|
||||||
|
updateOrSet[searchState.found, fs]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
counterexampleNotFound: run {
|
||||||
|
all searchState: SearchState {
|
||||||
|
// a way that subsequent results of searching will miss things.
|
||||||
|
eventually some symbol: Symbol,
|
||||||
|
fs: FilterState, fsBroken: FilterState,
|
||||||
|
exclude1: Bitfield, exclude2: Bitfield {
|
||||||
|
// Some search (fs) will cause a transition / modification of the search state...
|
||||||
|
possibleState[fs]
|
||||||
|
updateOrSet[searchState.found, fs]
|
||||||
|
excludeBitfield[searchState.found, exclude1]
|
||||||
|
// Such that a later, valid search... (fsBroken)
|
||||||
|
possibleState[fsBroken]
|
||||||
|
excludeBitfield[searchState.found', exclude2]
|
||||||
|
|
||||||
|
// Will allow for a symbol ...
|
||||||
|
// ... that are left out of the original search...
|
||||||
|
not bitfieldMatchesProperties[searchState.found, symbol]
|
||||||
|
// ... and out of the current search
|
||||||
|
not (bitfieldMatchesProperties[fs.curFilter, symbol] and not bitfieldMatchesProperties[exclude1, symbol])
|
||||||
|
// But would be matched by the broken search...
|
||||||
|
bitfieldMatchesProperties[fsBroken.curFilter, symbol]
|
||||||
|
// ... to not be matched by a search with the new state:
|
||||||
|
not (bitfieldMatchesProperties[fsBroken.curFilter, symbol] and not bitfieldMatchesProperties[exclude2, symbol])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
68
code/patterns/patterns.rb
Normal file
68
code/patterns/patterns.rb
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
require 'victor'
|
||||||
|
|
||||||
|
def sum_digits(n)
|
||||||
|
while n > 9
|
||||||
|
n = n.to_s.chars.map(&:to_i).sum
|
||||||
|
end
|
||||||
|
n
|
||||||
|
end
|
||||||
|
|
||||||
|
def step(x, y, n, dir)
|
||||||
|
case dir
|
||||||
|
when :top
|
||||||
|
return [x,y+n,:right]
|
||||||
|
when :right
|
||||||
|
return [x+n,y,:bottom]
|
||||||
|
when :bottom
|
||||||
|
return [x,y-n,:left]
|
||||||
|
when :left
|
||||||
|
return [x-n,y,:top]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run_number(number)
|
||||||
|
counter = 1
|
||||||
|
x, y, dir = 0, 0, :top
|
||||||
|
line_stack = [[0,0]]
|
||||||
|
|
||||||
|
loop do
|
||||||
|
x, y, dir = step(x,y, sum_digits(counter*number), dir)
|
||||||
|
line_stack << [x,y]
|
||||||
|
counter += 1
|
||||||
|
break if x == 0 && y == 0
|
||||||
|
end
|
||||||
|
return make_svg(line_stack)
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_svg(line_stack)
|
||||||
|
line_length = 20
|
||||||
|
xs = line_stack.map { |c| c[0] }
|
||||||
|
ys = line_stack.map { |c| c[1] }
|
||||||
|
|
||||||
|
x_offset = -xs.min
|
||||||
|
y_offset = -ys.min
|
||||||
|
svg_coords = ->(p) {
|
||||||
|
nx, ny = p
|
||||||
|
[(nx+x_offset)*line_length + line_length/2, (ny+y_offset)*line_length + line_length/2]
|
||||||
|
}
|
||||||
|
|
||||||
|
max_width = (xs.max - xs.min).abs * line_length + line_length
|
||||||
|
max_height = (ys.max - ys.min).abs * line_length + line_length
|
||||||
|
svg = Victor::SVG.new width: max_width, height: max_height
|
||||||
|
|
||||||
|
style = { stroke: 'black', stroke_width: 5 }
|
||||||
|
svg.build do
|
||||||
|
line_stack.each_cons(2) do |pair|
|
||||||
|
p1, p2 = pair
|
||||||
|
x1, y1 = svg_coords.call(p1)
|
||||||
|
x2, y2 = svg_coords.call(p2)
|
||||||
|
line x1: x1, y1: y1, x2: x2, y2: y2, style: style
|
||||||
|
circle cx: x2, cy: y2, r: line_length/6, style: style, fill: 'black'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return svg
|
||||||
|
end
|
||||||
|
|
||||||
|
(1..9).each do |i|
|
||||||
|
run_number(i).save "pattern_#{i}"
|
||||||
|
end
|
||||||
62
code/patterns/patterns_genbase.rb
Normal file
62
code/patterns/patterns_genbase.rb
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
require 'victor'
|
||||||
|
|
||||||
|
BASE = 4
|
||||||
|
DIRS = 7
|
||||||
|
|
||||||
|
def sum_digits(n)
|
||||||
|
x = n % BASE
|
||||||
|
x == 0 ? BASE : x
|
||||||
|
end
|
||||||
|
|
||||||
|
def step(x, y, n, dir)
|
||||||
|
return [n*Math.cos(2*Math::PI/DIRS*dir), n*Math.sin(2*Math::PI/DIRS*dir), (dir+1) % DIRS]
|
||||||
|
end
|
||||||
|
|
||||||
|
def run_number(number)
|
||||||
|
counter = 1
|
||||||
|
x, y, dir = 0.0, 0.0, 0
|
||||||
|
line_stack = [[0,0]]
|
||||||
|
|
||||||
|
(BASE/BASE.gcd(number) * DIRS).times do |i|
|
||||||
|
dx, dy, dir = step(x,y, sum_digits(i*number), dir)
|
||||||
|
x += dx
|
||||||
|
y += dy
|
||||||
|
line_stack << [x,y]
|
||||||
|
end
|
||||||
|
|
||||||
|
puts line_stack.to_s
|
||||||
|
return make_svg(line_stack)
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_svg(line_stack)
|
||||||
|
line_length = 20
|
||||||
|
xs = line_stack.map { |c| c[0] }
|
||||||
|
ys = line_stack.map { |c| c[1] }
|
||||||
|
|
||||||
|
x_offset = -xs.min
|
||||||
|
y_offset = -ys.min
|
||||||
|
svg_coords = ->(p) {
|
||||||
|
nx, ny = p
|
||||||
|
[(nx+x_offset)*line_length + line_length/2, (ny+y_offset)*line_length + line_length/2]
|
||||||
|
}
|
||||||
|
|
||||||
|
max_width = (xs.max - xs.min).abs * line_length + line_length
|
||||||
|
max_height = (ys.max - ys.min).abs * line_length + line_length
|
||||||
|
svg = Victor::SVG.new width: max_width, height: max_height
|
||||||
|
|
||||||
|
style = { stroke: 'black', stroke_width: 5 }
|
||||||
|
svg.build do
|
||||||
|
line_stack.each_cons(2) do |pair|
|
||||||
|
p1, p2 = pair
|
||||||
|
x1, y1 = svg_coords.call(p1)
|
||||||
|
x2, y2 = svg_coords.call(p2)
|
||||||
|
line x1: x1, y1: y1, x2: x2, y2: y2, style: style
|
||||||
|
circle cx: x2, cy: y2, r: line_length/6, style: style, fill: 'black'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return svg
|
||||||
|
end
|
||||||
|
|
||||||
|
(1..10).each do |i|
|
||||||
|
run_number(i).save "pattern_#{i}"
|
||||||
|
end
|
||||||
1
code/server-config
Submodule
1
code/server-config
Submodule
Submodule code/server-config added at 98cffe0954
21
code/time-traveling/TakeMax.hs
Normal file
21
code/time-traveling/TakeMax.hs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
takeUntilMax :: [Int] -> Int -> (Int, [Int])
|
||||||
|
takeUntilMax [] m = (m, [])
|
||||||
|
takeUntilMax [x] _ = (x, [x])
|
||||||
|
takeUntilMax (x:xs) m
|
||||||
|
| x == m = (x, [x])
|
||||||
|
| otherwise =
|
||||||
|
let (m', xs') = takeUntilMax xs m
|
||||||
|
in (max m' x, x:xs')
|
||||||
|
|
||||||
|
doTakeUntilMax :: [Int] -> [Int]
|
||||||
|
doTakeUntilMax l = l'
|
||||||
|
where (m, l') = takeUntilMax l m
|
||||||
|
|
||||||
|
takeUntilMax' :: [Int] -> Int -> (Int, [Int])
|
||||||
|
takeUntilMax' [] m = (m, [])
|
||||||
|
takeUntilMax' [x] _ = (x, [x])
|
||||||
|
takeUntilMax' (x:xs) m
|
||||||
|
| x == m = (maximum (x:xs), [x])
|
||||||
|
| otherwise =
|
||||||
|
let (m', xs') = takeUntilMax' xs m
|
||||||
|
in (max m' x, x:xs')
|
||||||
28
code/time-traveling/ValueScore.hs
Normal file
28
code/time-traveling/ValueScore.hs
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import Data.Map as Map
|
||||||
|
import Data.Maybe
|
||||||
|
import Control.Applicative
|
||||||
|
|
||||||
|
data Element = A | B | C | D
|
||||||
|
deriving (Eq, Ord, Show)
|
||||||
|
|
||||||
|
addElement :: Element -> Map Element Int -> Map Element Int
|
||||||
|
addElement = alter ((<|> Just 1) . fmap (+1))
|
||||||
|
|
||||||
|
getScore :: Element -> Map Element Int -> Float
|
||||||
|
getScore e m = fromMaybe 1.0 $ ((1.0/) . fromIntegral) <$> Map.lookup e m
|
||||||
|
|
||||||
|
data BinaryTree a = Empty | Node a (BinaryTree a) (BinaryTree a) deriving Show
|
||||||
|
type ElementTree = BinaryTree Element
|
||||||
|
type ScoredElementTree = BinaryTree (Element, Float)
|
||||||
|
|
||||||
|
assignScores :: ElementTree -> Map Element Int -> (Map Element Int, ScoredElementTree)
|
||||||
|
assignScores Empty m = (Map.empty, Empty)
|
||||||
|
assignScores (Node e t1 t2) m = (m', Node (e, getScore e m) t1' t2')
|
||||||
|
where
|
||||||
|
(m1, t1') = assignScores t1 m
|
||||||
|
(m2, t2') = assignScores t2 m
|
||||||
|
m' = addElement e $ unionWith (+) m1 m2
|
||||||
|
|
||||||
|
doAssignScores :: ElementTree -> ScoredElementTree
|
||||||
|
doAssignScores t = t'
|
||||||
|
where (m, t') = assignScores t m
|
||||||
7
code/typeclass-prolog/kb.pl
Normal file
7
code/typeclass-prolog/kb.pl
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
show(unit).
|
||||||
|
show(list(X)) :- show(X).
|
||||||
|
show(pair(X,Y)) :- show(X), show(Y).
|
||||||
|
|
||||||
|
eq(X) :- ord(X).
|
||||||
|
eq(nat).
|
||||||
|
ord(nat).
|
||||||
102
code/typesafe-imperative/TypesafeImp.idr
Normal file
102
code/typesafe-imperative/TypesafeImp.idr
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
data Reg = A | B | R
|
||||||
|
|
||||||
|
data Ty = IntTy | BoolTy
|
||||||
|
|
||||||
|
TypeState : Type
|
||||||
|
TypeState = (Ty, Ty, Ty)
|
||||||
|
|
||||||
|
getRegTy : Reg -> TypeState -> Ty
|
||||||
|
getRegTy A (a, _, _) = a
|
||||||
|
getRegTy B (_, b, _) = b
|
||||||
|
getRegTy R (_, _, r) = r
|
||||||
|
|
||||||
|
setRegTy : Reg -> Ty -> TypeState -> TypeState
|
||||||
|
setRegTy A a (_, b, r) = (a, b, r)
|
||||||
|
setRegTy B b (a, _, r) = (a, b, r)
|
||||||
|
setRegTy R r (a, b, _) = (a, b, r)
|
||||||
|
|
||||||
|
data Expr : TypeState -> Ty -> Type where
|
||||||
|
Lit : Int -> Expr s IntTy
|
||||||
|
Load : (r : Reg) -> Expr s (getRegTy r s)
|
||||||
|
Add : Expr s IntTy -> Expr s IntTy -> Expr s IntTy
|
||||||
|
Leq : Expr s IntTy -> Expr s IntTy -> Expr s BoolTy
|
||||||
|
Not : Expr s BoolTy -> Expr s BoolTy
|
||||||
|
|
||||||
|
mutual
|
||||||
|
data Stmt : TypeState -> TypeState -> TypeState -> Type where
|
||||||
|
Store : (r : Reg) -> Expr s t -> Stmt l s (setRegTy r t s)
|
||||||
|
If : Expr s BoolTy -> Prog l s n -> Prog l s n -> Stmt l s n
|
||||||
|
Loop : Prog s s s -> Stmt l s s
|
||||||
|
Break : Stmt s s s
|
||||||
|
|
||||||
|
data Prog : TypeState -> TypeState -> TypeState -> Type where
|
||||||
|
Nil : Prog l s s
|
||||||
|
(::) : Stmt l s n -> Prog l n m -> Prog l s m
|
||||||
|
|
||||||
|
initialState : TypeState
|
||||||
|
initialState = (IntTy, IntTy, IntTy)
|
||||||
|
|
||||||
|
testProg : Prog Main.initialState Main.initialState Main.initialState
|
||||||
|
testProg =
|
||||||
|
[ Store A (Lit 1 `Leq` Lit 2)
|
||||||
|
, If (Load A)
|
||||||
|
[ Store A (Lit 1) ]
|
||||||
|
[ Store A (Lit 2) ]
|
||||||
|
, Store B (Lit 2)
|
||||||
|
, Store R (Add (Load A) (Load B))
|
||||||
|
]
|
||||||
|
|
||||||
|
prodProg : Prog Main.initialState Main.initialState Main.initialState
|
||||||
|
prodProg =
|
||||||
|
[ Store A (Lit 7)
|
||||||
|
, Store B (Lit 9)
|
||||||
|
, Store R (Lit 0)
|
||||||
|
, Loop
|
||||||
|
[ If (Load A `Leq` Lit 0)
|
||||||
|
[ Break ]
|
||||||
|
[ Store R (Load R `Add` Load B)
|
||||||
|
, Store A (Load A `Add` Lit (-1))
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
repr : Ty -> Type
|
||||||
|
repr IntTy = Int
|
||||||
|
repr BoolTy = Bool
|
||||||
|
|
||||||
|
data State : TypeState -> Type where
|
||||||
|
MkState : (repr a, repr b, repr c) -> State (a, b, c)
|
||||||
|
|
||||||
|
getReg : (r : Reg) -> State s -> repr (getRegTy r s)
|
||||||
|
getReg A (MkState (a, _, _)) = a
|
||||||
|
getReg B (MkState (_, b, _)) = b
|
||||||
|
getReg R (MkState (_, _, r)) = r
|
||||||
|
|
||||||
|
setReg : (r : Reg) -> repr t -> State s -> State (setRegTy r t s)
|
||||||
|
setReg A a (MkState (_, b, r)) = MkState (a, b, r)
|
||||||
|
setReg B b (MkState (a, _, r)) = MkState (a, b, r)
|
||||||
|
setReg R r (MkState (a, b, _)) = MkState (a, b, r)
|
||||||
|
|
||||||
|
expr : Expr s t -> State s -> repr t
|
||||||
|
expr (Lit i) _ = i
|
||||||
|
expr (Load r) s = getReg r s
|
||||||
|
expr (Add l r) s = expr l s + expr r s
|
||||||
|
expr (Leq l r) s = expr l s <= expr r s
|
||||||
|
expr (Not e) s = not $ expr e s
|
||||||
|
|
||||||
|
mutual
|
||||||
|
stmt : Stmt l s n -> State s -> Either (State l) (State n)
|
||||||
|
stmt (Store r e) s = Right $ setReg r (expr e s) s
|
||||||
|
stmt (If c t e) s = if expr c s then prog t s else prog e s
|
||||||
|
stmt (Loop p) s =
|
||||||
|
case prog p s >>= stmt (Loop p) of
|
||||||
|
Right s => Right s
|
||||||
|
Left s => Right s
|
||||||
|
stmt Break s = Left s
|
||||||
|
|
||||||
|
prog : Prog l s n -> State s -> Either (State l) (State n)
|
||||||
|
prog Nil s = Right s
|
||||||
|
prog (st::p) s = stmt st s >>= prog p
|
||||||
|
|
||||||
|
run : Prog l s l -> State s -> State l
|
||||||
|
run p s = either id id $ prog p s
|
||||||
64
code/typesafe-interpreter/TypesafeIntr.idr
Normal file
64
code/typesafe-interpreter/TypesafeIntr.idr
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
data ExprType
|
||||||
|
= IntType
|
||||||
|
| BoolType
|
||||||
|
| StringType
|
||||||
|
|
||||||
|
repr : ExprType -> Type
|
||||||
|
repr IntType = Int
|
||||||
|
repr BoolType = Bool
|
||||||
|
repr StringType = String
|
||||||
|
|
||||||
|
data Op
|
||||||
|
= Add
|
||||||
|
| Subtract
|
||||||
|
| Multiply
|
||||||
|
| Divide
|
||||||
|
|
||||||
|
data Expr
|
||||||
|
= IntLit Int
|
||||||
|
| BoolLit Bool
|
||||||
|
| StringLit String
|
||||||
|
| BinOp Op Expr Expr
|
||||||
|
|
||||||
|
data SafeExpr : ExprType -> Type where
|
||||||
|
IntLiteral : Int -> SafeExpr IntType
|
||||||
|
BoolLiteral : Bool -> SafeExpr BoolType
|
||||||
|
StringLiteral : String -> SafeExpr StringType
|
||||||
|
BinOperation : (repr a -> repr b -> repr c) -> SafeExpr a -> SafeExpr b -> SafeExpr c
|
||||||
|
|
||||||
|
typecheckOp : Op -> (a : ExprType) -> (b : ExprType) -> Either String (c : ExprType ** repr a -> repr b -> repr c)
|
||||||
|
typecheckOp Add IntType IntType = Right (IntType ** (+))
|
||||||
|
typecheckOp Subtract IntType IntType = Right (IntType ** (-))
|
||||||
|
typecheckOp Multiply IntType IntType = Right (IntType ** (*))
|
||||||
|
typecheckOp Divide IntType IntType = Right (IntType ** div)
|
||||||
|
typecheckOp _ _ _ = Left "Invalid binary operator application"
|
||||||
|
|
||||||
|
typecheck : Expr -> Either String (n : ExprType ** SafeExpr n)
|
||||||
|
typecheck (IntLit i) = Right (_ ** IntLiteral i)
|
||||||
|
typecheck (BoolLit b) = Right (_ ** BoolLiteral b)
|
||||||
|
typecheck (StringLit s) = Right (_ ** StringLiteral s)
|
||||||
|
typecheck (BinOp o l r) = do
|
||||||
|
(lt ** le) <- typecheck l
|
||||||
|
(rt ** re) <- typecheck r
|
||||||
|
(ot ** f) <- typecheckOp o lt rt
|
||||||
|
pure (_ ** BinOperation f le re)
|
||||||
|
|
||||||
|
eval : SafeExpr t -> repr t
|
||||||
|
eval (IntLiteral i) = i
|
||||||
|
eval (BoolLiteral b) = b
|
||||||
|
eval (StringLiteral s) = s
|
||||||
|
eval (BinOperation f l r) = f (eval l) (eval r)
|
||||||
|
|
||||||
|
resultStr : {t : ExprType} -> repr t -> String
|
||||||
|
resultStr {t=IntType} i = show i
|
||||||
|
resultStr {t=BoolType} b = show b
|
||||||
|
resultStr {t=StringType} s = show s
|
||||||
|
|
||||||
|
tryEval : Expr -> String
|
||||||
|
tryEval ex =
|
||||||
|
case typecheck ex of
|
||||||
|
Left err => "Type error: " ++ err
|
||||||
|
Right (t ** e) => resultStr $ eval {t} e
|
||||||
|
|
||||||
|
main : IO ()
|
||||||
|
main = putStrLn $ tryEval $ BinOp Add (IntLit 6) (BinOp Multiply (IntLit 160) (IntLit 2))
|
||||||
99
code/typesafe-interpreter/TypesafeIntrV2.idr
Normal file
99
code/typesafe-interpreter/TypesafeIntrV2.idr
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
data ExprType
|
||||||
|
= IntType
|
||||||
|
| BoolType
|
||||||
|
| StringType
|
||||||
|
|
||||||
|
repr : ExprType -> Type
|
||||||
|
repr IntType = Int
|
||||||
|
repr BoolType = Bool
|
||||||
|
repr StringType = String
|
||||||
|
|
||||||
|
intBoolImpossible : IntType = BoolType -> Void
|
||||||
|
intBoolImpossible Refl impossible
|
||||||
|
|
||||||
|
intStringImpossible : IntType = StringType -> Void
|
||||||
|
intStringImpossible Refl impossible
|
||||||
|
|
||||||
|
boolStringImpossible : BoolType = StringType -> Void
|
||||||
|
boolStringImpossible Refl impossible
|
||||||
|
|
||||||
|
decEq : (a : ExprType) -> (b : ExprType) -> Dec (a = b)
|
||||||
|
decEq IntType IntType = Yes Refl
|
||||||
|
decEq BoolType BoolType = Yes Refl
|
||||||
|
decEq StringType StringType = Yes Refl
|
||||||
|
decEq IntType BoolType = No intBoolImpossible
|
||||||
|
decEq BoolType IntType = No $ intBoolImpossible . sym
|
||||||
|
decEq IntType StringType = No intStringImpossible
|
||||||
|
decEq StringType IntType = No $ intStringImpossible . sym
|
||||||
|
decEq BoolType StringType = No boolStringImpossible
|
||||||
|
decEq StringType BoolType = No $ boolStringImpossible . sym
|
||||||
|
|
||||||
|
data Op
|
||||||
|
= Add
|
||||||
|
| Subtract
|
||||||
|
| Multiply
|
||||||
|
| Divide
|
||||||
|
|
||||||
|
data Expr
|
||||||
|
= IntLit Int
|
||||||
|
| BoolLit Bool
|
||||||
|
| StringLit String
|
||||||
|
| BinOp Op Expr Expr
|
||||||
|
| IfElse Expr Expr Expr
|
||||||
|
|
||||||
|
data SafeExpr : ExprType -> Type where
|
||||||
|
IntLiteral : Int -> SafeExpr IntType
|
||||||
|
BoolLiteral : Bool -> SafeExpr BoolType
|
||||||
|
StringLiteral : String -> SafeExpr StringType
|
||||||
|
BinOperation : (repr a -> repr b -> repr c) -> SafeExpr a -> SafeExpr b -> SafeExpr c
|
||||||
|
IfThenElse : SafeExpr BoolType -> SafeExpr t -> SafeExpr t -> SafeExpr t
|
||||||
|
|
||||||
|
typecheckOp : Op -> (a : ExprType) -> (b : ExprType) -> Either String (c : ExprType ** repr a -> repr b -> repr c)
|
||||||
|
typecheckOp Add IntType IntType = Right (IntType ** (+))
|
||||||
|
typecheckOp Subtract IntType IntType = Right (IntType ** (-))
|
||||||
|
typecheckOp Multiply IntType IntType = Right (IntType ** (*))
|
||||||
|
typecheckOp Divide IntType IntType = Right (IntType ** div)
|
||||||
|
typecheckOp _ _ _ = Left "Invalid binary operator application"
|
||||||
|
|
||||||
|
requireBool : (n : ExprType ** SafeExpr n) -> Either String (SafeExpr BoolType)
|
||||||
|
requireBool (BoolType ** e) = Right e
|
||||||
|
requireBool _ = Left "Not a boolean."
|
||||||
|
|
||||||
|
typecheck : Expr -> Either String (n : ExprType ** SafeExpr n)
|
||||||
|
typecheck (IntLit i) = Right (_ ** IntLiteral i)
|
||||||
|
typecheck (BoolLit b) = Right (_ ** BoolLiteral b)
|
||||||
|
typecheck (StringLit s) = Right (_ ** StringLiteral s)
|
||||||
|
typecheck (BinOp o l r) = do
|
||||||
|
(lt ** le) <- typecheck l
|
||||||
|
(rt ** re) <- typecheck r
|
||||||
|
(ot ** f) <- typecheckOp o lt rt
|
||||||
|
pure (_ ** BinOperation f le re)
|
||||||
|
typecheck (IfElse c t e) =
|
||||||
|
do
|
||||||
|
ce <- typecheck c >>= requireBool
|
||||||
|
(tt ** te) <- typecheck t
|
||||||
|
(et ** ee) <- typecheck e
|
||||||
|
case decEq tt et of
|
||||||
|
Yes p => pure (_ ** IfThenElse ce (replace p te) ee)
|
||||||
|
No _ => Left "Incompatible branch types."
|
||||||
|
|
||||||
|
eval : SafeExpr t -> repr t
|
||||||
|
eval (IntLiteral i) = i
|
||||||
|
eval (BoolLiteral b) = b
|
||||||
|
eval (StringLiteral s) = s
|
||||||
|
eval (BinOperation f l r) = f (eval l) (eval r)
|
||||||
|
eval (IfThenElse c t e) = if (eval c) then (eval t) else (eval e)
|
||||||
|
|
||||||
|
resultStr : {t : ExprType} -> repr t -> String
|
||||||
|
resultStr {t=IntType} i = show i
|
||||||
|
resultStr {t=BoolType} b = show b
|
||||||
|
resultStr {t=StringType} s = show s
|
||||||
|
|
||||||
|
tryEval : Expr -> String
|
||||||
|
tryEval ex =
|
||||||
|
case typecheck ex of
|
||||||
|
Left err => "Type error: " ++ err
|
||||||
|
Right (t ** e) => resultStr $ eval {t} e
|
||||||
|
|
||||||
|
main : IO ()
|
||||||
|
main = putStrLn $ tryEval $ BinOp Add (IfElse (BoolLit True) (IntLit 6) (IntLit 7)) (BinOp Multiply (IntLit 160) (IntLit 2))
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user