diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4daaa986a2..37a217d2a0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,7 +15,7 @@ links to the major sections: * [Helpful Links and Information](#helpful-links-and-information) If you have questions, please make a post on [internals.rust-lang.org][internals] or -hop on the [Rust Discord server][rust-discord], [Rust Zulip server][rust-zulip] or [#rust-internals][pound-rust-internals]. +hop on the [Rust Discord server][rust-discord] or [Rust Zulip server][rust-zulip]. As a reminder, all contributors are expected to follow our [Code of Conduct][coc]. @@ -25,7 +25,6 @@ to contribute to it in more detail than this document. If this is your first time contributing, the [walkthrough] chapter of the guide can give you a good example of how a typical contribution would go. -[pound-rust-internals]: https://chat.mibbit.com/?server=irc.mozilla.org&channel=%23rust-internals [internals]: https://internals.rust-lang.org [rust-discord]: http://discord.gg/rust-lang [rust-zulip]: https://rust-lang.zulipchat.com @@ -129,6 +128,14 @@ the master branch to your feature branch. Also, please make sure that fixup commits are squashed into other related commits with meaningful commit messages. +GitHub allows [closing issues using keywords][closing-keywords]. This feature +should be used to keep the issue tracker tidy. However, it is generally preferred +to put the "closes #123" text in the PR description rather than the issue commit; +particularly during rebasing, citing the issue number in the commit can "spam" +the issue in question. + +[closing-keywords]: https://help.github.com/en/articles/closing-issues-using-keywords + Please make sure your pull request is in compliance with Rust's style guidelines by running @@ -404,7 +411,7 @@ If you're looking for somewhere to start, check out the [E-easy][eeasy] tag. There are a number of other ways to contribute to Rust that don't deal with this repository. -Answer questions in [#rust][pound-rust], or on [users.rust-lang.org][users], +Answer questions in the _Get Help!_ channels from the [Rust Discord server][rust-discord], on [users.rust-lang.org][users], or on [StackOverflow][so]. Participate in the [RFC process](https://github.com/rust-lang/rfcs). @@ -413,7 +420,7 @@ Find a [requested community library][community-library], build it, and publish it to [Crates.io](http://crates.io). Easier said than done, but very, very valuable! -[pound-rust]: http://chat.mibbit.com/?server=irc.mozilla.org&channel=%23rust +[rust-discord]: https://discord.gg/rust-lang [users]: https://users.rust-lang.org/ [so]: http://stackoverflow.com/questions/tagged/rust [community-library]: https://github.com/rust-lang/rfcs/labels/A-community-library diff --git a/Cargo.lock b/Cargo.lock index 5bc1938fee..f44ed3d07f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -27,16 +27,16 @@ dependencies = [ [[package]] name = "ammonia" -version = "2.1.2" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "384d704f242a0a9faf793fff775a0be6ab9aa27edabffa097331d73779142520" +checksum = "9e266e1f4be5ffa05309f650e2586fe1d3ae6034eb24025a7ae1dfecc330823a" dependencies = [ "html5ever", "lazy_static 1.3.0", "maplit", "matches", "tendril", - "url 1.7.2", + "url 2.1.0", ] [[package]] @@ -108,10 +108,16 @@ dependencies = [ ] [[package]] -name = "backtrace" -version = "0.3.37" +name = "autocfg" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5180c5a20655b14a819b652fd2378fa5f1697b6c9ddad3e695c2f9cedf6df4e2" +checksum = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" + +[[package]] +name = "backtrace" +version = "0.3.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea" dependencies = [ "backtrace-sys", "cfg-if", @@ -123,9 +129,9 @@ dependencies = [ [[package]] name = "backtrace-sys" -version = "0.1.30" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b3a000b9c543553af61bc01cbfc403b04b5caa9e421033866f2e98061eb3e61" +checksum = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" dependencies = [ "cc", "compiler_builtins", @@ -133,20 +139,11 @@ dependencies = [ "rustc-std-workspace-core", ] -[[package]] -name = "base64" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" -dependencies = [ - "byteorder", -] - [[package]] name = "bitflags" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" [[package]] name = "blake2-rfc" @@ -243,7 +240,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ade3d27603c2cb345eb0912aec461a6dec7e06a4ae48589904e808335c7afa" dependencies = [ "byteorder", - "either", "iovec", ] @@ -265,10 +261,11 @@ dependencies = [ [[package]] name = "cargo" -version = "0.40.0" +version = "0.41.0" dependencies = [ "atty", "bytesize", + "cargo-platform", "cargo-test-macro", "cargo-test-support", "clap", @@ -278,7 +275,7 @@ dependencies = [ "crypto-hash", "curl", "curl-sys", - "env_logger", + "env_logger 0.7.1", "failure", "filetime", "flate2", @@ -325,6 +322,13 @@ dependencies = [ "winapi 0.3.6", ] +[[package]] +name = "cargo-platform" +version = "0.1.0" +dependencies = [ + "serde", +] + [[package]] name = "cargo-test-macro" version = "0.1.0" @@ -359,15 +363,27 @@ dependencies = [ "serde_json", ] +[[package]] +name = "cargo_metadata" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d2d1617e838936c0d2323a65cc151e03ae19a7678dd24f72bccf27119b90a5d" +dependencies = [ + "semver", + "serde", + "serde_derive", + "serde_json", +] + [[package]] name = "cargotest2" version = "0.1.0" [[package]] name = "cc" -version = "1.0.35" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5f3fee5eeb60324c2781f1e41286bdee933850fff9b3c672587fed5ec58c83" +checksum = "39f75544d7bbaf57560d2168f28fd649ff9c76153874db88bdbdfd839b1a7e7d" [[package]] name = "cfg-if" @@ -429,7 +445,7 @@ dependencies = [ name = "clippy" version = "0.0.212" dependencies = [ - "cargo_metadata", + "cargo_metadata 0.9.0", "clippy-mini-macro-test", "clippy_lints", "compiletest_rs", @@ -450,12 +466,12 @@ version = "0.2.0" name = "clippy_lints" version = "0.0.212" dependencies = [ - "cargo_metadata", + "cargo_metadata 0.9.0", "if_chain", "itertools 0.8.0", "lazy_static 1.3.0", "matches", - "pulldown-cmark 0.6.0", + "pulldown-cmark 0.6.1", "quine-mc_cluskey", "regex-syntax", "semver", @@ -526,7 +542,7 @@ name = "compiletest" version = "0.0.0" dependencies = [ "diff", - "env_logger", + "env_logger 0.7.1", "getopts", "lazy_static 1.3.0", "libc", @@ -542,9 +558,9 @@ dependencies = [ [[package]] name = "compiletest_rs" -version = "0.3.22" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ecc9332b68270998995c00f8051ee856121764a0d3230e64c9efd059d27b6" +checksum = "f75b10a18fb53549fdd090846eb01c7f8593914494d1faabc4d3005c436e417a" dependencies = [ "diff", "filetime", @@ -558,7 +574,6 @@ dependencies = [ "serde_derive", "serde_json", "tempfile", - "tester", "winapi 0.3.6", ] @@ -593,7 +608,7 @@ checksum = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" [[package]] name = "crates-io" -version = "0.28.0" +version = "0.29.0" dependencies = [ "curl", "failure", @@ -652,6 +667,16 @@ dependencies = [ "crossbeam-utils 0.6.5", ] +[[package]] +name = "crossbeam-deque" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71" +dependencies = [ + "crossbeam-epoch 0.7.2", + "crossbeam-utils 0.6.5", +] + [[package]] name = "crossbeam-epoch" version = "0.3.1" @@ -723,25 +748,24 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.21" +version = "0.4.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a85f2f95f2bd277d316d1aa8a477687ab4a6942258c7db7c89c187534669979c" +checksum = "d08ad3cb89d076a36b0ce5749eec2c9964f70c0c58480ab6b75a91ec4fc206d8" dependencies = [ "curl-sys", - "kernel32-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", - "winapi 0.2.8", + "winapi 0.3.6", ] [[package]] name = "curl-sys" -version = "0.4.18" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d91a0052d5b982887d8e829bee0faffc7218ea3c6ebd3d6c2c8f678a93c9a42" +checksum = "2e9a9a4e417722876332136a00cacf92c2ceb331fab4b52b6a1ad16c6cd79255" dependencies = [ "cc", "libc", @@ -880,12 +904,6 @@ dependencies = [ "rustc-std-workspace-core", ] -[[package]] -name = "dtoa" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" - [[package]] name = "either" version = "1.5.0" @@ -909,22 +927,13 @@ dependencies = [ [[package]] name = "ena" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc01d68e08ca384955a3aeba9217102ca1aa85b6e168639bf27739f1d749d87" +checksum = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36" dependencies = [ "log", ] -[[package]] -name = "encoding_rs" -version = "0.8.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4155785c79f2f6701f185eb2e6b4caf0555ec03477cb4c70db67b465311620ed" -dependencies = [ - "cfg-if", -] - [[package]] name = "env_logger" version = "0.6.2" @@ -938,6 +947,19 @@ dependencies = [ "termcolor", ] +[[package]] +name = "env_logger" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + [[package]] name = "error-chain" version = "0.12.0" @@ -974,7 +996,7 @@ dependencies = [ "proc-macro2 0.4.30", "quote 0.6.12", "syn 0.15.35", - "synstructure", + "synstructure 0.10.2", ] [[package]] @@ -1115,16 +1137,6 @@ version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45dc39533a6cae6da2b56da48edae506bb767ec07370f86f70fc062e9d435869" -[[package]] -name = "futures-cpupool" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" -dependencies = [ - "futures", - "num_cpus", -] - [[package]] name = "fwdansi" version = "1.0.1" @@ -1216,31 +1228,13 @@ dependencies = [ name = "graphviz" version = "0.0.0" -[[package]] -name = "h2" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a539b63339fbbb00e081e84b6e11bd1d9634a82d91da2984a18ac74a8823f392" -dependencies = [ - "byteorder", - "bytes", - "fnv", - "futures", - "http", - "indexmap", - "log", - "slab", - "string", - "tokio-io", -] - [[package]] name = "handlebars" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df044dd42cdb7e32f28557b661406fc0f2494be75199779998810dbc35030e0d" dependencies = [ - "hashbrown", + "hashbrown 0.5.0", "lazy_static 1.3.0", "log", "pest", @@ -1257,10 +1251,19 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1de41fb8dba9714efd92241565cdff73f78508c95697dd56787d3cba27e2353" dependencies = [ + "serde", +] + +[[package]] +name = "hashbrown" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cd9867f119b19fecb08cd5c326ad4488d7a1da4bf75b4d95d71db742525aaab" +dependencies = [ + "autocfg", "compiler_builtins", "rustc-std-workspace-alloc", "rustc-std-workspace-core", - "serde", ] [[package]] @@ -1272,6 +1275,17 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "hermit-abi" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f22b8f315b98f415780ddbe9163c7dbbc5a07225b6d102ace1d8aeef85775140" +dependencies = [ + "compiler_builtins", + "libc", + "rustc-std-workspace-core", +] + [[package]] name = "hex" version = "0.3.2" @@ -1286,9 +1300,9 @@ checksum = "023b39be39e3a2da62a94feb433e91e8bcd37676fbc8bea371daf52b7a769a3e" [[package]] name = "home" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c07c315e106bd6f83f026a20ddaeef2706782e490db1dcdd37caad38a0e895b3" +checksum = "a3753954f7bd71f0e671afb8b5a992d1724cf43b7f95a563cd4a0bde94659ca8" dependencies = [ "scopeguard 1.0.0", "winapi 0.3.6", @@ -1296,99 +1310,27 @@ dependencies = [ [[package]] name = "html5ever" -version = "0.23.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce65ac8028cf5a287a7dbf6c4e0a6cf2dcf022ed5b167a81bae66ebf599a8b7" +checksum = "025483b0a1e4577bb28578318c886ee5f817dda6eb62473269349044406644cb" dependencies = [ "log", "mac", "markup5ever", - "proc-macro2 0.4.30", - "quote 0.6.12", - "syn 0.15.35", + "proc-macro2 1.0.3", + "quote 1.0.2", + "syn 1.0.5", ] -[[package]] -name = "http" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe67e3678f2827030e89cc4b9e7ecd16d52f132c0b940ab5005f88e821500f6a" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http-body" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" -dependencies = [ - "bytes", - "futures", - "http", - "tokio-buf", -] - -[[package]] -name = "httparse" -version = "1.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" - [[package]] name = "humantime" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114" +checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" dependencies = [ "quick-error", ] -[[package]] -name = "hyper" -version = "0.12.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6481fff8269772d4463253ca83c788104a7305cb3fb9136bc651a6211e46e03f" -dependencies = [ - "bytes", - "futures", - "futures-cpupool", - "h2", - "http", - "http-body", - "httparse", - "iovec", - "itoa", - "log", - "net2", - "rustc_version", - "time", - "tokio", - "tokio-buf", - "tokio-executor", - "tokio-io", - "tokio-reactor", - "tokio-tcp", - "tokio-threadpool", - "tokio-timer", - "want", -] - -[[package]] -name = "hyper-tls" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a800d6aa50af4b5850b2b0f659625ce9504df908e9733b635720483be26174f" -dependencies = [ - "bytes", - "futures", - "hyper", - "native-tls", - "tokio-io", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -1564,9 +1506,9 @@ dependencies = [ [[package]] name = "jsonrpc-core" -version = "13.1.0" +version = "13.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd42951eb35079520ee29b7efbac654d85821b397ef88c8151600ef7e2d00217" +checksum = "91d767c183a7e58618a609499d359ce3820700b3ebb4823a18c343b4a2a41a0d" dependencies = [ "futures", "log", @@ -1636,7 +1578,7 @@ dependencies = [ "num_cpus", "tokio", "tokio-codec", - "unicase 2.5.1", + "unicase", ] [[package]] @@ -1669,26 +1611,13 @@ checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" [[package]] name = "libc" -version = "0.2.62" +version = "0.2.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba" +checksum = "74dfca3d9957906e8d1e6a0b641dc9a59848e793f1da2165889fd4f62d10d79c" dependencies = [ "rustc-std-workspace-core", ] -[[package]] -name = "libflate" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90c6f86f4b0caa347206f916f8b687b51d77c6ef8ff18d52dd007491fd580529" -dependencies = [ - "adler32", - "byteorder", - "crc32fast", - "rle-decode-fast", - "take_mut", -] - [[package]] name = "libgit2-sys" version = "0.9.0" @@ -1835,9 +1764,9 @@ checksum = "08cbb6b4fef96b6d77bfc40ec491b1690c779e77b05cd9f07f787ed376fd4c43" [[package]] name = "markup5ever" -version = "0.8.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1af46a727284117e09780d05038b1ce6fc9c76cc6df183c3dae5a8955a25e21" +checksum = "65381d9d47506b8592b97c4efd936afcf673b09b059f2bef39c7211ee78b9d03" dependencies = [ "log", "phf", @@ -1858,15 +1787,15 @@ checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" [[package]] name = "mdbook" -version = "0.3.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "949bb2acb2cff9fa5c375cf9c43e70b3dba0a974d9fe01c31285d7a84d2a0fa2" +checksum = "031bdd9d4893c983e2f69ebc4b59070feee8276a584c4aabdcb351235ea28016" dependencies = [ "ammonia", "chrono", "clap", "elasticlunr-rs", - "env_logger", + "env_logger 0.6.2", "error-chain", "handlebars", "itertools 0.8.0", @@ -1874,7 +1803,7 @@ dependencies = [ "log", "memchr", "open", - "pulldown-cmark 0.5.3", + "pulldown-cmark 0.6.1", "regex", "serde", "serde_derive", @@ -1885,34 +1814,11 @@ dependencies = [ "toml-query", ] -[[package]] -name = "mdbook-linkcheck" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77d1f0ba4d1e6b86fa18e8853d026d7d76a97eb7eb5eb052ed80901e43b7fc10" -dependencies = [ - "env_logger", - "failure", - "log", - "mdbook", - "memchr", - "pulldown-cmark 0.5.3", - "rayon", - "regex", - "reqwest", - "semver", - "serde", - "serde_derive", - "serde_json", - "structopt 0.2.18", - "url 1.7.2", -] - [[package]] name = "measureme" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d09de7dafa3aa334bc806447c7e4de69419723312f4b88b80b561dea66601ce8" +checksum = "cd21b0e6e1af976b269ce062038fe5e1b9ca2f817ab7a3af09ec4210aebf0d30" dependencies = [ "byteorder", "memmap", @@ -1950,27 +1856,6 @@ dependencies = [ "rustc_version", ] -[[package]] -name = "mime" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e27ca21f40a310bd06d9031785f4801710d566c184a6e15bad4f1d9b65f9425" -dependencies = [ - "unicase 2.5.1", -] - -[[package]] -name = "mime_guess" -version = "2.0.0-alpha.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30de2e4613efcba1ec63d8133f344076952090c122992a903359be5a4f99c3ed" -dependencies = [ - "mime", - "phf", - "phf_codegen", - "unicase 1.4.2", -] - [[package]] name = "minifier" version = "0.0.33" @@ -2080,13 +1965,13 @@ name = "miri" version = "0.1.0" dependencies = [ "byteorder", - "cargo_metadata", + "cargo_metadata 0.9.0", "colored", "compiletest_rs", "directories", - "env_logger", + "env_logger 0.7.1", "getrandom", - "hex 0.3.2", + "hex 0.4.0", "log", "num-traits", "rand 0.7.0", @@ -2096,24 +1981,6 @@ dependencies = [ "vergen", ] -[[package]] -name = "native-tls" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2df1a4c22fd44a62147fd8f13dd0f95c9d8ca7b2610299b2a2f9cf8964274e" -dependencies = [ - "lazy_static 1.3.0", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "net2" version = "0.2.33" @@ -2444,7 +2311,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0" dependencies = [ "siphasher", - "unicase 1.4.2", ] [[package]] @@ -2493,7 +2359,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8b3f4e0475def7d9c2e5de8e5a1306949849761e107b360d03e98eafaffd61" dependencies = [ "chrono", - "env_logger", + "env_logger 0.6.2", "log", ] @@ -2558,21 +2424,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77043da1282374688ee212dc44b3f37ff929431de9c9adc3053bd3cee5630357" dependencies = [ "bitflags", - "getopts", "memchr", - "unicase 2.5.1", + "unicase", ] [[package]] name = "pulldown-cmark" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b0ad0d4c1702965ee6bb5b4ff5e71f83850b497d497e9444302987bf9e26a4" +checksum = "1c205cc82214f3594e2d50686730314f817c67ffa80fe800cf0db78c3c2b9d9e" dependencies = [ "bitflags", "getopts", "memchr", - "unicase 2.5.1", + "unicase", ] [[package]] @@ -2613,14 +2478,14 @@ dependencies = [ [[package]] name = "racer" -version = "2.1.27" +version = "2.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde22b84ab75220015cbd91240222402bf885cbe3a5dc856475771abb82533ae" +checksum = "acc70369054bad4ad0c16a3f45cd73e0695361a3af35c7b465e619ac2674f064" dependencies = [ "bitflags", "clap", "derive_more", - "env_logger", + "env_logger 0.6.2", "humantime", "lazy_static 1.3.0", "log", @@ -2772,22 +2637,22 @@ dependencies = [ [[package]] name = "rayon" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b0186e22767d5b9738a05eab7c6ac90b15db17e5b5f9bd87976dd7d89a10a4" +checksum = "83a27732a533a1be0a0035a111fe76db89ad312f6f0347004c220c57f209a123" dependencies = [ - "crossbeam-deque 0.6.3", + "crossbeam-deque 0.7.1", "either", "rayon-core", ] [[package]] name = "rayon-core" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebbe0df8435ac0c397d467b6cad6d25543d06e8a019ef3f6af3c384597515bd2" +checksum = "98dcf634205083b17d0861252431eb2acbfb698ab7478a2d20de07954f47ec7b" dependencies = [ - "crossbeam-deque 0.6.3", + "crossbeam-deque 0.7.1", "crossbeam-queue", "crossbeam-utils 0.6.5", "lazy_static 1.3.0", @@ -2869,52 +2734,16 @@ dependencies = [ "winapi 0.3.6", ] -[[package]] -name = "reqwest" -version = "0.9.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e542d9f077c126af32536b6aacc75bb7325400eab8cd0743543be5d91660780d" -dependencies = [ - "base64", - "bytes", - "encoding_rs", - "futures", - "http", - "hyper", - "hyper-tls", - "libflate", - "log", - "mime", - "mime_guess", - "native-tls", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-executor", - "tokio-io", - "tokio-threadpool", - "tokio-timer", - "url 1.7.2", - "uuid", -] - -[[package]] -name = "rle-decode-fast" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cabe4fa914dec5870285fa7f71f602645da47c486e68486d2b4ceb4a343e90ac" - [[package]] name = "rls" -version = "1.39.0" +version = "1.40.0" dependencies = [ "cargo", - "cargo_metadata", + "cargo_metadata 0.8.0", "clippy_lints", "crossbeam-channel", "difference", - "env_logger", + "env_logger 0.7.1", "failure", "futures", "heck", @@ -2928,7 +2757,7 @@ dependencies = [ "num_cpus", "ordslice", "racer", - "rand 0.6.1", + "rand 0.7.0", "rayon", "regex", "rls-analysis", @@ -2937,7 +2766,6 @@ dependencies = [ "rls-rustc", "rls-span", "rls-vfs", - "rustc-serialize", "rustc-workspace-hack", "rustc_tools_util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustfmt-nightly", @@ -2998,11 +2826,11 @@ name = "rls-rustc" version = "0.6.0" dependencies = [ "clippy_lints", - "env_logger", + "env_logger 0.7.1", "failure", "futures", "log", - "rand 0.6.1", + "rand 0.7.0", "rls-data", "rls-ipc", "serde", @@ -3035,7 +2863,7 @@ dependencies = [ "clap", "failure", "mdbook", - "mdbook-linkcheck", + "rustc-workspace-hack", ] [[package]] @@ -3046,6 +2874,7 @@ dependencies = [ "backtrace", "bitflags", "byteorder", + "cc", "chalk-engine", "fmt_macros", "graphviz", @@ -3055,12 +2884,13 @@ dependencies = [ "num_cpus", "parking_lot 0.9.0", "polonius-engine", - "rustc-rayon", - "rustc-rayon-core", + "rustc-rayon 0.3.0", + "rustc-rayon-core 0.3.0", "rustc_apfloat", "rustc_data_structures", "rustc_errors", "rustc_fs_util", + "rustc_index", "rustc_macros", "rustc_target", "scoped-tls", @@ -3072,9 +2902,9 @@ dependencies = [ [[package]] name = "rustc-ap-arena" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f59b76d334bd533f3fdc5c651c27678c5e80fac67c6f7da22ba21a58878c55f5" +checksum = "a623fd4805842e9bd0bb6e6dace63efede0ee22de4522a0b03b7c3d15a22f009" dependencies = [ "rustc-ap-rustc_data_structures", "smallvec", @@ -3082,15 +2912,15 @@ dependencies = [ [[package]] name = "rustc-ap-graphviz" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e632ef08ca17458acfd46d2ead3d541a1c249586cd5329f5fe333dacfab6142" +checksum = "ee549ade784b444ef10c0240c3487ed785aa65d711071f7984246b15329a17b6" [[package]] name = "rustc-ap-rustc_data_structures" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89e2c7be68185418f3cd56af3df8b29007a59a1cebefa63612d055f9bcb1a36" +checksum = "ca545744a5a9b42e3d0410d6290d40de96dd567253fe77f310c1de4afd213dd4" dependencies = [ "cfg-if", "crossbeam-utils 0.6.5", @@ -3099,21 +2929,22 @@ dependencies = [ "jobserver", "lazy_static 1.3.0", "log", - "parking_lot 0.7.1", + "parking_lot 0.9.0", "rustc-ap-graphviz", + "rustc-ap-rustc_index", "rustc-ap-serialize", "rustc-hash", - "rustc-rayon", - "rustc-rayon-core", + "rustc-rayon 0.2.0", + "rustc-rayon-core 0.2.0", "smallvec", "stable_deref_trait", ] [[package]] name = "rustc-ap-rustc_errors" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e47cb380abeb72b01e42b2342d592f7eeea7d536c2f1f0d0e550dc509e46333" +checksum = "a6967a41ed38ef4bce0f559fe9a4801d8ba12ac032f40a12a55e72f79d52c9bb" dependencies = [ "annotate-snippets", "atty", @@ -3127,45 +2958,56 @@ dependencies = [ ] [[package]] -name = "rustc-ap-rustc_lexer" -version = "583.0.0" +name = "rustc-ap-rustc_index" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "494cfaf67f49217d67d0774eeecbba61ac89acf478db97ef11f113ed8a959305" +checksum = "457a5c204ae2fdaa5bdb5b196e58ca59896870d80445fe423063c9453496e3ea" +dependencies = [ + "rustc-ap-serialize", + "smallvec", +] + +[[package]] +name = "rustc-ap-rustc_lexer" +version = "606.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed0c064676f8a08e42a36b0d4e4a102465fb0f4b75e11436cb7f66d2c3fa7139" dependencies = [ "unicode-xid 0.2.0", ] [[package]] name = "rustc-ap-rustc_macros" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e5d36becc59b4497f9cbd3ae0610081de0207a1d0e95c066369167b14f486f" +checksum = "b2d77e46159c5288c585decbcdc9d742889c65e307c31e104c7a36d63fe1f5d0" dependencies = [ "itertools 0.8.0", "proc-macro2 0.4.30", "quote 0.6.12", "syn 0.15.35", - "synstructure", + "synstructure 0.10.2", ] [[package]] name = "rustc-ap-rustc_target" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7bfc5f96dfc3b9f8d5b57884f7f37467ecff6776cd4b8b491a7daece6fdd7c2" +checksum = "86ca895350b0de14d064b499168c93fa183958d5462eb042c927d93623e41ec1" dependencies = [ "bitflags", "log", "rustc-ap-rustc_data_structures", + "rustc-ap-rustc_index", "rustc-ap-serialize", "rustc-ap-syntax_pos", ] [[package]] name = "rustc-ap-serialize" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bb9ee231cf79eded39c56647499f83d6136ff5c8c0baaa9e21b6febee00f4f6" +checksum = "92679240e86f4583cc05f8dcf6439bdab87bac9e6555718469176de9bd52ba20" dependencies = [ "indexmap", "smallvec", @@ -3173,17 +3015,17 @@ dependencies = [ [[package]] name = "rustc-ap-syntax" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3827fc208814efbde82d613e31d11b4250ce9e8cf8afe4a4d47bbbd099632c9" +checksum = "0a0c30f8e38c847dbfd9e2f1e472ab06d0bd0a23ab53ae4c5a44912842ce834e" dependencies = [ "bitflags", "lazy_static 1.3.0", "log", "rustc-ap-rustc_data_structures", "rustc-ap-rustc_errors", + "rustc-ap-rustc_index", "rustc-ap-rustc_lexer", - "rustc-ap-rustc_macros", "rustc-ap-rustc_target", "rustc-ap-serialize", "rustc-ap-syntax_pos", @@ -3193,13 +3035,14 @@ dependencies = [ [[package]] name = "rustc-ap-syntax_pos" -version = "583.0.0" +version = "606.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930ed81c34f325e512cc315c04d676fa84a373879d5c43bb54054a0522b05213" +checksum = "2bdaa0fb40143b4b878256ac4e2b498885daafc269502504d91929eab4744bf4" dependencies = [ "cfg-if", "rustc-ap-arena", "rustc-ap-rustc_data_structures", + "rustc-ap-rustc_index", "rustc-ap-rustc_macros", "rustc-ap-serialize", "scoped-tls", @@ -3243,7 +3086,18 @@ checksum = "0d2e07e19601f21c59aad953c2632172ba70cb27e685771514ea66e4062b3363" dependencies = [ "crossbeam-deque 0.2.0", "either", - "rustc-rayon-core", + "rustc-rayon-core 0.2.0", +] + +[[package]] +name = "rustc-rayon" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f32767f90d938f1b7199a174ef249ae1924f6e5bbdb9d112fea141e016f25b3a" +dependencies = [ + "crossbeam-deque 0.7.1", + "either", + "rustc-rayon-core 0.3.0", ] [[package]] @@ -3259,10 +3113,17 @@ dependencies = [ ] [[package]] -name = "rustc-serialize" -version = "0.3.24" +name = "rustc-rayon-core" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +checksum = "ea2427831f0053ea3ea73559c8eabd893133a51b251d142bacee53c62a288cb3" +dependencies = [ + "crossbeam-deque 0.7.1", + "crossbeam-queue", + "crossbeam-utils 0.6.5", + "lazy_static 1.3.0", + "num_cpus", +] [[package]] name = "rustc-std-workspace-alloc" @@ -3293,6 +3154,7 @@ dependencies = [ "serde", "serde_json", "smallvec", + "syn 0.15.35", "url 2.1.0", "winapi 0.3.6", ] @@ -3316,19 +3178,6 @@ dependencies = [ "core", ] -[[package]] -name = "rustc_ast_borrowck" -version = "0.0.0" -dependencies = [ - "graphviz", - "log", - "rustc", - "rustc_data_structures", - "rustc_errors", - "syntax", - "syntax_pos", -] - [[package]] name = "rustc_codegen_llvm" version = "0.0.0" @@ -3347,7 +3196,6 @@ dependencies = [ "log", "memmap", "num_cpus", - "parking_lot 0.9.0", "rustc", "rustc_apfloat", "rustc_codegen_utils", @@ -3355,6 +3203,7 @@ dependencies = [ "rustc_errors", "rustc_fs_util", "rustc_incremental", + "rustc_index", "rustc_target", "serialize", "syntax", @@ -3366,7 +3215,6 @@ dependencies = [ name = "rustc_codegen_utils" version = "0.0.0" dependencies = [ - "flate2", "log", "punycode", "rustc", @@ -3392,8 +3240,9 @@ dependencies = [ "log", "parking_lot 0.9.0", "rustc-hash", - "rustc-rayon", - "rustc-rayon-core", + "rustc-rayon 0.3.0", + "rustc-rayon-core 0.3.0", + "rustc_index", "serialize", "smallvec", "stable_deref_trait", @@ -3403,16 +3252,16 @@ dependencies = [ name = "rustc_driver" version = "0.0.0" dependencies = [ - "env_logger", + "env_logger 0.7.1", "graphviz", "lazy_static 1.3.0", "log", "rustc", - "rustc_ast_borrowck", "rustc_codegen_utils", "rustc_data_structures", "rustc_errors", "rustc_interface", + "rustc_lint", "rustc_metadata", "rustc_mir", "rustc_plugin", @@ -3458,6 +3307,14 @@ dependencies = [ "syntax_pos", ] +[[package]] +name = "rustc_index" +version = "0.0.0" +dependencies = [ + "serialize", + "smallvec", +] + [[package]] name = "rustc_interface" version = "0.0.0" @@ -3465,8 +3322,7 @@ dependencies = [ "log", "once_cell", "rustc", - "rustc-rayon", - "rustc_ast_borrowck", + "rustc-rayon 0.3.0", "rustc_codegen_ssa", "rustc_codegen_utils", "rustc_data_structures", @@ -3479,11 +3335,13 @@ dependencies = [ "rustc_plugin_impl", "rustc_privacy", "rustc_resolve", + "rustc_target", "rustc_traits", "rustc_typeck", "serialize", "smallvec", "syntax", + "syntax_expand", "syntax_ext", "syntax_pos", "tempfile", @@ -3503,6 +3361,7 @@ dependencies = [ "log", "rustc", "rustc_data_structures", + "rustc_index", "rustc_target", "syntax", "syntax_pos", @@ -3532,10 +3391,10 @@ name = "rustc_macros" version = "0.1.0" dependencies = [ "itertools 0.8.0", - "proc-macro2 0.4.30", - "quote 0.6.12", - "syn 0.15.35", - "synstructure", + "proc-macro2 1.0.3", + "quote 1.0.2", + "syn 1.0.5", + "synstructure 0.12.1", ] [[package]] @@ -3548,11 +3407,13 @@ dependencies = [ "rustc", "rustc_data_structures", "rustc_errors", + "rustc_index", "rustc_target", "serialize", "smallvec", "stable_deref_trait", "syntax", + "syntax_expand", "syntax_pos", ] @@ -3561,7 +3422,6 @@ name = "rustc_mir" version = "0.0.0" dependencies = [ "arena", - "byteorder", "either", "graphviz", "log", @@ -3571,6 +3431,7 @@ dependencies = [ "rustc_apfloat", "rustc_data_structures", "rustc_errors", + "rustc_index", "rustc_lexer", "rustc_target", "serialize", @@ -3598,6 +3459,8 @@ dependencies = [ "rustc", "rustc_data_structures", "rustc_errors", + "rustc_index", + "rustc_target", "syntax", "syntax_pos", ] @@ -3614,9 +3477,9 @@ name = "rustc_plugin_impl" version = "0.0.0" dependencies = [ "rustc", - "rustc_errors", "rustc_metadata", "syntax", + "syntax_expand", "syntax_pos", ] @@ -3638,7 +3501,6 @@ version = "0.0.0" dependencies = [ "arena", "bitflags", - "indexmap", "log", "rustc", "rustc_data_structures", @@ -3646,6 +3508,7 @@ dependencies = [ "rustc_metadata", "smallvec", "syntax", + "syntax_expand", "syntax_pos", ] @@ -3660,7 +3523,6 @@ dependencies = [ "rustc_codegen_utils", "rustc_data_structures", "rustc_target", - "rustc_typeck", "serde_json", "syntax", "syntax_pos", @@ -3673,6 +3535,7 @@ dependencies = [ "bitflags", "log", "rustc_data_structures", + "rustc_index", "serialize", "syntax_pos", ] @@ -3691,9 +3554,7 @@ checksum = "b725dadae9fabc488df69a287f5a99c5eaf5d10853842a8a3dfac52476f544ee" name = "rustc_traits" version = "0.0.0" dependencies = [ - "bitflags", "chalk-engine", - "graphviz", "log", "rustc", "rustc_data_structures", @@ -3723,6 +3584,7 @@ dependencies = [ "rustc", "rustc_data_structures", "rustc_errors", + "rustc_index", "rustc_target", "smallvec", "syntax", @@ -3744,7 +3606,7 @@ version = "0.0.0" dependencies = [ "minifier", "pulldown-cmark 0.5.3", - "rustc-rayon", + "rustc-rayon 0.3.0", "tempfile", ] @@ -3783,15 +3645,15 @@ dependencies = [ [[package]] name = "rustfmt-nightly" -version = "1.4.8" +version = "1.4.9" dependencies = [ "annotate-snippets", "bytecount", - "cargo_metadata", + "cargo_metadata 0.8.0", "derive-new", "diff", "dirs", - "env_logger", + "env_logger 0.6.2", "failure", "getopts", "ignore", @@ -3806,7 +3668,7 @@ dependencies = [ "rustfmt-config_proc_macro", "serde", "serde_json", - "structopt 0.3.1", + "structopt", "term 0.6.0", "toml", "unicode-segmentation", @@ -3863,27 +3725,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" -[[package]] -name = "security-framework" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee63d0f4a9ec776eeb30e220f0bc1e092c3ad744b2a379e3993070364d3adc2" -dependencies = [ - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9636f8989cbf61385ae4824b98c1aaa54c994d7d8b41f11c601ed799f0549a56" -dependencies = [ - "core-foundation-sys", -] - [[package]] name = "semver" version = "0.9.0" @@ -3951,18 +3792,6 @@ dependencies = [ "syn 1.0.5", ] -[[package]] -name = "serde_urlencoded" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "642dd69105886af2efd227f75a520ec9b44a820d65bc133a9131f7d229fd165a" -dependencies = [ - "dtoa", - "itoa", - "serde", - "url 1.7.2", -] - [[package]] name = "serialize" version = "0.0.0" @@ -4056,13 +3885,13 @@ version = "0.0.0" dependencies = [ "alloc", "backtrace", - "cc", "cfg-if", "compiler_builtins", "core", "dlmalloc", "fortanix-sgx-abi", - "hashbrown", + "hashbrown 0.6.2", + "hermit-abi", "libc", "panic_abort", "panic_unwind", @@ -4076,15 +3905,6 @@ dependencies = [ "wasi", ] -[[package]] -name = "string" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d24114bfcceb867ca7f71a0d3fe45d45619ec47a6fbfa98cb14e14250bfa5d6d" -dependencies = [ - "bytes", -] - [[package]] name = "string_cache" version = "0.7.3" @@ -4134,16 +3954,6 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" -[[package]] -name = "structopt" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c2cdbf9cc375f15d1b4141bc48aeef444806655cd0e904207edc8d68d86ed7" -dependencies = [ - "clap", - "structopt-derive 0.2.18", -] - [[package]] name = "structopt" version = "0.3.1" @@ -4151,19 +3961,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ac9d6e93dd792b217bf89cda5c14566e3043960c6f9da890c2ba5d09d07804c" dependencies = [ "clap", - "structopt-derive 0.3.1", -] - -[[package]] -name = "structopt-derive" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53010261a84b37689f9ed7d395165029f9cc7abb9f56bbfe86bee2597ed25107" -dependencies = [ - "heck", - "proc-macro2 0.4.30", - "quote 0.6.12", - "syn 0.15.35", + "structopt-derive", ] [[package]] @@ -4231,6 +4029,18 @@ dependencies = [ "unicode-xid 0.1.0", ] +[[package]] +name = "synstructure" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203" +dependencies = [ + "proc-macro2 1.0.3", + "quote 1.0.2", + "syn 1.0.5", + "unicode-xid 0.2.0", +] + [[package]] name = "syntax" version = "0.0.0" @@ -4240,8 +4050,8 @@ dependencies = [ "log", "rustc_data_structures", "rustc_errors", + "rustc_index", "rustc_lexer", - "rustc_macros", "rustc_target", "scoped-tls", "serialize", @@ -4249,6 +4059,25 @@ dependencies = [ "syntax_pos", ] +[[package]] +name = "syntax_expand" +version = "0.0.0" +dependencies = [ + "bitflags", + "lazy_static 1.3.0", + "log", + "rustc_data_structures", + "rustc_errors", + "rustc_index", + "rustc_lexer", + "rustc_target", + "scoped-tls", + "serialize", + "smallvec", + "syntax", + "syntax_pos", +] + [[package]] name = "syntax_ext" version = "0.0.0" @@ -4257,10 +4086,10 @@ dependencies = [ "log", "rustc_data_structures", "rustc_errors", - "rustc_lexer", "rustc_target", "smallvec", "syntax", + "syntax_expand", "syntax_pos", ] @@ -4271,18 +4100,13 @@ dependencies = [ "arena", "cfg-if", "rustc_data_structures", + "rustc_index", "rustc_macros", "scoped-tls", "serialize", "unicode-width", ] -[[package]] -name = "take_mut" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" - [[package]] name = "tar" version = "0.4.20" @@ -4328,16 +4152,6 @@ dependencies = [ "std", ] -[[package]] -name = "term" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" -dependencies = [ - "kernel32-sys", - "winapi 0.2.8", -] - [[package]] name = "term" version = "0.6.0" @@ -4394,17 +4208,6 @@ dependencies = [ "term 0.0.0", ] -[[package]] -name = "tester" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e812cb26c597f86a49b26dbb58b878bd2a2b4b93fc069dc39499228fe556ff6" -dependencies = [ - "getopts", - "libc", - "term 0.4.6", -] - [[package]] name = "textwrap" version = "0.11.0" @@ -4468,17 +4271,6 @@ dependencies = [ "tokio-uds", ] -[[package]] -name = "tokio-buf" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" -dependencies = [ - "bytes", - "either", - "futures", -] - [[package]] name = "tokio-codec" version = "0.1.1" @@ -4715,12 +4507,6 @@ dependencies = [ "syn 0.15.35", ] -[[package]] -name = "try-lock" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382" - [[package]] name = "typenum" version = "1.10.0" @@ -4739,15 +4525,6 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86" -[[package]] -name = "unicase" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" -dependencies = [ - "version_check", -] - [[package]] name = "unicase" version = "2.5.1" @@ -4867,15 +4644,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" -[[package]] -name = "uuid" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" -dependencies = [ - "rand 0.6.1", -] - [[package]] name = "vcpkg" version = "0.2.6" @@ -4925,17 +4693,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "want" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" -dependencies = [ - "futures", - "log", - "try-lock", -] - [[package]] name = "wasi" version = "0.7.0" diff --git a/README.md b/README.md index 96d7e938be..c5468a2924 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,7 @@ or reading the [rustc guide][rustcguidebuild]. * `curl` * `git` * `ssl` which comes in `libssl-dev` or `openssl-devel` + * `pkg-config` if you are compiling on Linux and targeting Linux 2. Clone the [source] with `git`: @@ -243,19 +244,17 @@ The Rust community congregates in a few places: To contribute to Rust, please see [CONTRIBUTING](CONTRIBUTING.md). -Rust has an [IRC] culture and most real-time collaboration happens in a -variety of channels on Mozilla's IRC network, irc.mozilla.org. The -most popular channel is [#rust], a venue for general discussion about -Rust. And a good place to ask for help would be [#rust-beginners]. +Most real-time collaboration happens in a variety of channels on the +[Rust Discord server][rust-discord], with channels dedicated for getting help, +community, documentation, and all major contribution areas in the Rust ecosystem. +A good place to ask for help would be the #help channel. The [rustc guide] might be a good place to start if you want to find out how various parts of the compiler work. Also, you may find the [rustdocs for the compiler itself][rustdocs] useful. -[IRC]: https://en.wikipedia.org/wiki/Internet_Relay_Chat -[#rust]: irc://irc.mozilla.org/rust -[#rust-beginners]: irc://irc.mozilla.org/rust-beginners +[rust-discord]: https://discord.gg/rust-lang [rustc guide]: https://rust-lang.github.io/rustc-guide/about-this-guide.html [rustdocs]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc/ diff --git a/config.toml.example b/config.toml.example index 848147c297..e832570ed9 100644 --- a/config.toml.example +++ b/config.toml.example @@ -258,10 +258,9 @@ [rust] # Whether or not to optimize the compiler and standard library. -# -# Note: the slowness of the non optimized compiler compiling itself usually -# outweighs the time gains in not doing optimizations, therefore a -# full bootstrap takes much more time with `optimize` set to false. +# WARNING: Building with optimize = false is NOT SUPPORTED. Due to bootstrapping, +# building without optimizations takes much longer than optimizing. Further, some platforms +# fail to build without this optimization (c.f. #65352). #optimize = true # Indicates that the build should be configured for debugging Rust. A @@ -341,6 +340,9 @@ # nightly features #channel = "dev" +# The root location of the MUSL installation directory. +#musl-root = "..." + # By default the `rustc` executable is built with `-Wl,-rpath` flags on Unix # platforms to ensure that the compiler is usable by default from the build # directory (as it links to a number of dynamic libraries). This may not be @@ -374,9 +376,7 @@ # This is an array of the codegen backends that will be compiled for the rustc # that's being compiled. The default is to only build the LLVM codegen backend, -# but you can also optionally enable the "emscripten" backend for asm.js or -# make this an empty array (but that probably won't get too far in the -# bootstrap) +# and currently the only standard option supported is `"llvm"` #codegen-backends = ["llvm"] # This is the name of the directory in which codegen backends will get installed diff --git a/git-commit-hash b/git-commit-hash index c702c5c981..70479621bc 100644 --- a/git-commit-hash +++ b/git-commit-hash @@ -1 +1 @@ -4560ea788cb760f0a34127156c78e2552949f734 \ No newline at end of file +73528e339aae0f17a15ffa49a8ac608f50c6cf14 \ No newline at end of file diff --git a/src/README.md b/src/README.md index 32ca4a1057..2f7cf90c5f 100644 --- a/src/README.md +++ b/src/README.md @@ -5,7 +5,4 @@ This directory contains the source code of the rust project, including: For more information on how various parts of the compiler work, see the [rustc guide]. -There is also useful content in this README: -https://github.com/rust-lang/rust/tree/master/src/librustc/infer/lexical_region_resolve. - [rustc guide]: https://rust-lang.github.io/rustc-guide/about-this-guide.html diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index 3e877fc4e3..c501378bff 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -328,6 +328,8 @@ are: `Config` struct. * Adding a sanity check? Take a look at `bootstrap/sanity.rs`. -If you have any questions feel free to reach out on `#rust-infra` on IRC or ask on -internals.rust-lang.org. When you encounter bugs, please file issues on the -rust-lang/rust issue tracker. +If you have any questions feel free to reach out on `#infra` channel in the +[Rust Discord server][rust-discord] or ask on internals.rust-lang.org. When +you encounter bugs, please file issues on the rust-lang/rust issue tracker. + +[rust-discord]: https://discord.gg/rust-lang diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 65129eeeec..4caf36a6f2 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -734,10 +734,6 @@ class RustBuild(object): if module.endswith("llvm-project"): if self.get_toml('llvm-config') and self.get_toml('lld') != 'true': continue - if module.endswith("llvm-emscripten"): - backends = self.get_toml('codegen-backends') - if backends is None or not 'emscripten' in backends: - continue check = self.check_submodule(module, slow_submodules) filtered_submodules.append((module, check)) submodules_names.append(module) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 5d586f0c46..2edcef203a 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -443,6 +443,7 @@ impl<'a> Builder<'a> { dist::Rustc, dist::DebuggerScripts, dist::Std, + dist::RustcDev, dist::Analysis, dist::Src, dist::PlainSourceTarball, @@ -817,12 +818,22 @@ impl<'a> Builder<'a> { let mut rustflags = Rustflags::new(&target); if stage != 0 { + if let Ok(s) = env::var("CARGOFLAGS_NOT_BOOTSTRAP") { + cargo.args(s.split_whitespace()); + } rustflags.env("RUSTFLAGS_NOT_BOOTSTRAP"); } else { + if let Ok(s) = env::var("CARGOFLAGS_BOOTSTRAP") { + cargo.args(s.split_whitespace()); + } rustflags.env("RUSTFLAGS_BOOTSTRAP"); rustflags.arg("--cfg=bootstrap"); } + if let Ok(s) = env::var("CARGOFLAGS") { + cargo.args(s.split_whitespace()); + } + match mode { Mode::Std | Mode::ToolBootstrap | Mode::ToolStd => {}, Mode::Rustc | Mode::Codegen | Mode::ToolRustc => { @@ -875,7 +886,18 @@ impl<'a> Builder<'a> { // things still build right, please do! match mode { Mode::Std => metadata.push_str("std"), - _ => {}, + // When we're building rustc tools, they're built with a search path + // that contains things built during the rustc build. For example, + // bitflags is built during the rustc build, and is a dependency of + // rustdoc as well. We're building rustdoc in a different target + // directory, though, which means that Cargo will rebuild the + // dependency. When we go on to build rustdoc, we'll look for + // bitflags, and find two different copies: one built during the + // rustc step and one that we just built. This isn't always a + // problem, somehow -- not really clear why -- but we know that this + // fixes things. + Mode::ToolRustc => metadata.push_str("tool-rustc"), + _ => {} } cargo.env("__CARGO_DEFAULT_LIB_METADATA", &metadata); @@ -970,6 +992,7 @@ impl<'a> Builder<'a> { Some("-Wl,-rpath,@loader_path/../lib") } else if !target.contains("windows") && !target.contains("wasm32") && + !target.contains("emscripten") && !target.contains("fuchsia") { Some("-Wl,-rpath,$ORIGIN/../lib") } else { diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs index 53071df855..4310f2c6fa 100644 --- a/src/bootstrap/cache.rs +++ b/src/bootstrap/cache.rs @@ -161,7 +161,7 @@ impl Ord for Interned { } } -struct TyIntern { +struct TyIntern { items: Vec, set: HashMap>, } diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index caa4843da4..ef1b6e217a 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -13,7 +13,7 @@ use build_helper::output; use crate::Build; // The version number -pub const CFG_RELEASE_NUM: &str = "1.39.0"; +pub const CFG_RELEASE_NUM: &str = "1.40.0"; pub struct GitInfo { inner: Option, diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index cadb9a7e44..df1c725758 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -55,6 +55,7 @@ impl Step for Std { cargo, args(builder.kind), &libstd_stamp(builder, compiler, target), + vec![], true); let libdir = builder.sysroot_libdir(compiler, target); @@ -103,6 +104,7 @@ impl Step for Rustc { cargo, args(builder.kind), &librustc_stamp(builder, compiler, target), + vec![], true); let libdir = builder.sysroot_libdir(compiler, target); @@ -155,6 +157,7 @@ impl Step for CodegenBackend { cargo, args(builder.kind), &codegen_backend_stamp(builder, compiler, target, backend), + vec![], true); } } @@ -199,6 +202,7 @@ impl Step for Rustdoc { cargo, args(builder.kind), &rustdoc_stamp(builder, compiler, target), + vec![], true); let libdir = builder.sysroot_libdir(compiler, target); diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 6ea32edfb2..8e5fe2520c 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -69,7 +69,7 @@ impl Step for Std { return; } - builder.ensure(StartupObjects { compiler, target }); + let mut target_deps = builder.ensure(StartupObjects { compiler, target }); let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); if compiler_to_use != compiler { @@ -91,7 +91,7 @@ impl Step for Std { return; } - copy_third_party_objects(builder, &compiler, target); + target_deps.extend(copy_third_party_objects(builder, &compiler, target).into_iter()); let mut cargo = builder.cargo(compiler, Mode::Std, target, "build"); std_cargo(builder, &compiler, target, &mut cargo); @@ -102,6 +102,7 @@ impl Step for Std { cargo, vec![], &libstd_stamp(builder, compiler, target), + target_deps, false); builder.ensure(StdLink { @@ -113,9 +114,22 @@ impl Step for Std { } /// Copies third pary objects needed by various targets. -fn copy_third_party_objects(builder: &Builder<'_>, compiler: &Compiler, target: Interned) { +fn copy_third_party_objects(builder: &Builder<'_>, compiler: &Compiler, target: Interned) + -> Vec +{ let libdir = builder.sysroot_libdir(*compiler, target); + let mut target_deps = vec![]; + + let mut copy_and_stamp = |sourcedir: &Path, name: &str| { + let target = libdir.join(name); + builder.copy( + &sourcedir.join(name), + &target, + ); + target_deps.push(target); + }; + // Copies the crt(1,i,n).o startup objects // // Since musl supports fully static linking, we can cross link for it even @@ -123,19 +137,13 @@ fn copy_third_party_objects(builder: &Builder<'_>, compiler: &Compiler, target: // files. As those shipped with glibc won't work, copy the ones provided by // musl so we have them on linux-gnu hosts. if target.contains("musl") { + let srcdir = builder.musl_root(target).unwrap().join("lib"); for &obj in &["crt1.o", "crti.o", "crtn.o"] { - builder.copy( - &builder.musl_root(target).unwrap().join("lib").join(obj), - &libdir.join(obj), - ); + copy_and_stamp(&srcdir, obj); } } else if target.ends_with("-wasi") { - for &obj in &["crt1.o"] { - builder.copy( - &builder.wasi_root(target).unwrap().join("lib/wasm32-wasi").join(obj), - &libdir.join(obj), - ); - } + let srcdir = builder.wasi_root(target).unwrap().join("lib/wasm32-wasi"); + copy_and_stamp(&srcdir, "crt1.o"); } // Copies libunwind.a compiled to be linked wit x86_64-fortanix-unknown-sgx. @@ -145,11 +153,11 @@ fn copy_third_party_objects(builder: &Builder<'_>, compiler: &Compiler, target: // which is provided by std for this target. if target == "x86_64-fortanix-unknown-sgx" { let src_path_env = "X86_FORTANIX_SGX_LIBS"; - let obj = "libunwind.a"; let src = env::var(src_path_env).expect(&format!("{} not found in env", src_path_env)); - let src = Path::new(&src).join(obj); - builder.copy(&src, &libdir.join(obj)); + copy_and_stamp(Path::new(&src), "libunwind.a"); } + + target_deps } /// Configure cargo to compile the standard library, adding appropriate env vars @@ -210,7 +218,6 @@ pub fn std_cargo(builder: &Builder<'_>, // config.toml equivalent) is used let llvm_config = builder.ensure(native::Llvm { target: builder.config.build, - emscripten: false, }); cargo.env("LLVM_CONFIG", llvm_config); cargo.env("RUSTC_BUILD_SANITIZERS", "1"); @@ -307,7 +314,7 @@ pub struct StartupObjects { } impl Step for StartupObjects { - type Output = (); + type Output = Vec; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { run.path("src/rtstartup") @@ -326,13 +333,15 @@ impl Step for StartupObjects { /// They don't require any library support as they're just plain old object /// files, so we just use the nightly snapshot compiler to always build them (as /// no other compilers are guaranteed to be available). - fn run(self, builder: &Builder<'_>) { + fn run(self, builder: &Builder<'_>) -> Vec { let for_compiler = self.compiler; let target = self.target; if !target.contains("windows-gnu") { - return + return vec![] } + let mut target_deps = vec![]; + let src_dir = &builder.src.join("src/rtstartup"); let dst_dir = &builder.native_dir(target).join("rtstartup"); let sysroot_dir = &builder.sysroot_libdir(for_compiler, target); @@ -351,7 +360,9 @@ impl Step for StartupObjects { .arg(src_file)); } - builder.copy(dst_file, &sysroot_dir.join(file.to_string() + ".o")); + let target = sysroot_dir.join(file.to_string() + ".o"); + builder.copy(dst_file, &target); + target_deps.push(target); } for obj in ["crt2.o", "dllcrt2.o"].iter() { @@ -359,8 +370,12 @@ impl Step for StartupObjects { builder.cc(target), target, obj); - builder.copy(&src, &sysroot_dir.join(obj)); + let target = sysroot_dir.join(obj); + builder.copy(&src, &target); + target_deps.push(target); } + + target_deps } } @@ -438,6 +453,7 @@ impl Step for Rustc { cargo, vec![], &librustc_stamp(builder, compiler, target), + vec![], false); builder.ensure(RustcLink { @@ -586,7 +602,7 @@ impl Step for CodegenBackend { let tmp_stamp = out_dir.join(".tmp.stamp"); - let files = run_cargo(builder, cargo, vec![], &tmp_stamp, false); + let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false); if builder.config.dry_run { return; } @@ -615,46 +631,37 @@ pub fn build_codegen_backend(builder: &Builder<'_>, compiler: &Compiler, target: Interned, backend: Interned) -> String { - let mut features = String::new(); - match &*backend { - "llvm" | "emscripten" => { + "llvm" => { // Build LLVM for our target. This will implicitly build the // host LLVM if necessary. let llvm_config = builder.ensure(native::Llvm { target, - emscripten: backend == "emscripten", }); - if backend == "emscripten" { - features.push_str(" emscripten"); - } - builder.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})", compiler.stage, &compiler.host, target, backend)); // Pass down configuration from the LLVM build into the build of // librustc_llvm and librustc_codegen_llvm. - if builder.is_rust_llvm(target) && backend != "emscripten" { + if builder.is_rust_llvm(target) { cargo.env("LLVM_RUSTLLVM", "1"); } cargo.env("LLVM_CONFIG", &llvm_config); - if backend != "emscripten" { - let target_config = builder.config.target_config.get(&target); - if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { - cargo.env("CFG_LLVM_ROOT", s); - } + let target_config = builder.config.target_config.get(&target); + if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { + cargo.env("CFG_LLVM_ROOT", s); } // Some LLVM linker flags (-L and -l) may be needed to link librustc_llvm. if let Some(ref s) = builder.config.llvm_ldflags { cargo.env("LLVM_LINKER_FLAGS", s); } - // Building with a static libstdc++ is only supported on linux right now, + // Building with a static libstdc++ is only supported on linux and mingw right now, // not for MSVC or macOS if builder.config.llvm_static_stdcpp && !target.contains("freebsd") && - !target.contains("windows") && + !target.contains("msvc") && !target.contains("apple") { let file = compiler_file(builder, builder.cxx(target).unwrap(), @@ -662,9 +669,7 @@ pub fn build_codegen_backend(builder: &Builder<'_>, "libstdc++.a"); cargo.env("LLVM_STATIC_STDCPP", file); } - if builder.config.llvm_link_shared || - (builder.config.llvm_thin_lto && backend != "emscripten") - { + if builder.config.llvm_link_shared || builder.config.llvm_thin_lto { cargo.env("LLVM_LINK_SHARED", "1"); } if builder.config.llvm_use_libcxx { @@ -676,8 +681,7 @@ pub fn build_codegen_backend(builder: &Builder<'_>, } _ => panic!("unknown backend: {}", backend), } - - features + String::new() } /// Creates the `codegen-backends` folder for a compiler that's about to be @@ -954,6 +958,7 @@ pub fn run_cargo(builder: &Builder<'_>, cargo: Cargo, tail_args: Vec, stamp: &Path, + additional_target_deps: Vec, is_check: bool) -> Vec { @@ -1070,6 +1075,7 @@ pub fn run_cargo(builder: &Builder<'_>, deps.push((path_to_add.into(), false)); } + deps.extend(additional_target_deps.into_iter().map(|d| (d, false))); deps.sort(); let mut new_contents = Vec::new(); for (dep, proc_macro) in deps.iter() { diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 52b5cd888d..d1bdfa0a76 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -200,16 +200,15 @@ struct Build { target: Vec, cargo: Option, rustc: Option, - low_priority: Option, - compiler_docs: Option, docs: Option, + compiler_docs: Option, submodules: Option, fast_submodules: Option, gdb: Option, - locked_deps: Option, - vendor: Option, nodejs: Option, python: Option, + locked_deps: Option, + vendor: Option, full_bootstrap: Option, extended: Option, tools: Option>, @@ -217,6 +216,7 @@ struct Build { sanitizers: Option, profiler: Option, cargo_native_static: Option, + low_priority: Option, configure_args: Option>, local_rebuild: Option, print_step_timings: Option, @@ -228,11 +228,11 @@ struct Build { struct Install { prefix: Option, sysconfdir: Option, - datadir: Option, docdir: Option, bindir: Option, libdir: Option, mandir: Option, + datadir: Option, // standard paths, currently unused infodir: Option, @@ -243,14 +243,14 @@ struct Install { #[derive(Deserialize, Default)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Llvm { - ccache: Option, - ninja: Option, - assertions: Option, optimize: Option, thin_lto: Option, release_debuginfo: Option, + assertions: Option, + ccache: Option, version_check: Option, static_libstdcpp: Option, + ninja: Option, targets: Option, experimental_targets: Option, link_jobs: Option, @@ -293,6 +293,7 @@ impl Default for StringOrBool { #[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Rust { optimize: Option, + debug: Option, codegen_units: Option, codegen_units_std: Option, debug_assertions: Option, @@ -301,25 +302,24 @@ struct Rust { debuginfo_level_std: Option, debuginfo_level_tools: Option, debuginfo_level_tests: Option, - parallel_compiler: Option, backtrace: Option, + incremental: Option, + parallel_compiler: Option, default_linker: Option, channel: Option, musl_root: Option, rpath: Option, + verbose_tests: Option, optimize_tests: Option, codegen_tests: Option, ignore_git: Option, - debug: Option, dist_src: Option, - verbose_tests: Option, - incremental: Option, save_toolstates: Option, codegen_backends: Option>, codegen_backends_dir: Option, lld: Option, - lldb: Option, llvm_tools: Option, + lldb: Option, deny_warnings: Option, backtrace_on_ice: Option, verify_llvm_ir: Option, @@ -333,13 +333,13 @@ struct Rust { #[derive(Deserialize, Default)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] struct TomlTarget { - llvm_config: Option, - llvm_filecheck: Option, cc: Option, cxx: Option, ar: Option, ranlib: Option, linker: Option, + llvm_config: Option, + llvm_filecheck: Option, android_ndk: Option, crt_static: Option, musl_root: Option, @@ -668,7 +668,6 @@ impl Config { pub fn llvm_enabled(&self) -> bool { self.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) - || self.rust_codegen_backends.contains(&INTERNER.intern_str("emscripten")) } } diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index 346f0cb203..bb6041d7f3 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -55,7 +55,6 @@ o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, lsan, m o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball") o("cargo-native-static", "build.cargo-native-static", "static native libraries in cargo") o("profiler", "build.profiler", "build the profiler runtime") -o("emscripten", None, "compile the emscripten backend as well as LLVM") o("full-tools", None, "enable all tools") o("lld", "rust.lld", "build lld") o("lldb", "rust.lldb", "build lldb") @@ -134,6 +133,10 @@ v("musl-root-mips", "target.mips-unknown-linux-musl.musl-root", "mips-unknown-linux-musl install directory") v("musl-root-mipsel", "target.mipsel-unknown-linux-musl.musl-root", "mipsel-unknown-linux-musl install directory") +v("musl-root-mips64", "target.mips64-unknown-linux-muslabi64.musl-root", + "mips64-unknown-linux-muslabi64 install directory") +v("musl-root-mips64el", "target.mips64el-unknown-linux-muslabi64.musl-root", + "mips64el-unknown-linux-muslabi64 install directory") v("qemu-armhf-rootfs", "target.arm-unknown-linux-gnueabihf.qemu-rootfs", "rootfs in qemu testing, you probably don't want to use this") v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs", @@ -335,10 +338,8 @@ for key in known_args: set('build.host', value.split(',')) elif option.name == 'target': set('build.target', value.split(',')) - elif option.name == 'emscripten': - set('rust.codegen-backends', ['llvm', 'emscripten']) elif option.name == 'full-tools': - set('rust.codegen-backends', ['llvm', 'emscripten']) + set('rust.codegen-backends', ['llvm']) set('rust.lld', True) set('rust.llvm-tools', True) set('build.extended', True) diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index d9dff77a30..67907bc8cb 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -236,7 +236,7 @@ fn make_win_dist( } let target_tools = ["gcc.exe", "ld.exe", "dlltool.exe", "libwinpthread-1.dll"]; - let mut rustc_dlls = vec!["libstdc++-6.dll", "libwinpthread-1.dll"]; + let mut rustc_dlls = vec!["libwinpthread-1.dll"]; if target_triple.starts_with("i686-") { rustc_dlls.push("libgcc_s_dw2-1.dll"); } else { @@ -637,6 +637,28 @@ impl Step for DebuggerScripts { } } +fn skip_host_target_lib(builder: &Builder<'_>, compiler: Compiler) -> bool { + // The only true set of target libraries came from the build triple, so + // let's reduce redundant work by only producing archives from that host. + if compiler.host != builder.config.build { + builder.info("\tskipping, not a build host"); + true + } else { + false + } +} + +/// Copy stamped files into an image's `target/lib` directory. +fn copy_target_libs(builder: &Builder<'_>, target: &str, image: &Path, stamp: &Path) { + let dst = image.join("lib/rustlib").join(target).join("lib"); + t!(fs::create_dir_all(&dst)); + for (path, host) in builder.read_stamp_file(stamp) { + if !host || builder.config.build == target { + builder.copy(&path, &dst.join(path.file_name().unwrap())); + } + } +} + #[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] pub struct Std { pub compiler: Compiler, @@ -667,44 +689,19 @@ impl Step for Std { let target = self.target; let name = pkgname(builder, "rust-std"); - - // The only true set of target libraries came from the build triple, so - // let's reduce redundant work by only producing archives from that host. - if compiler.host != builder.config.build { - builder.info("\tskipping, not a build host"); - return distdir(builder).join(format!("{}-{}.tar.gz", name, target)); + let archive = distdir(builder).join(format!("{}-{}.tar.gz", name, target)); + if skip_host_target_lib(builder, compiler) { + return archive; } - // We want to package up as many target libraries as possible - // for the `rust-std` package, so if this is a host target we - // depend on librustc and otherwise we just depend on libtest. - if builder.hosts.iter().any(|t| t == target) { - builder.ensure(compile::Rustc { compiler, target }); - } else { - builder.ensure(compile::Std { compiler, target }); - } + builder.ensure(compile::Std { compiler, target }); let image = tmpdir(builder).join(format!("{}-{}-image", name, target)); let _ = fs::remove_dir_all(&image); - let dst = image.join("lib/rustlib").join(target); - t!(fs::create_dir_all(&dst)); - let mut src = builder.sysroot_libdir(compiler, target).to_path_buf(); - src.pop(); // Remove the trailing /lib folder from the sysroot_libdir - builder.cp_filtered(&src, &dst, &|path| { - if let Some(name) = path.file_name().and_then(|s| s.to_str()) { - if name == builder.config.rust_codegen_backends_dir.as_str() { - return false - } - if name == "bin" { - return false - } - if name.contains("LLVM") { - return false - } - } - true - }); + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + let stamp = compile::libstd_stamp(builder, compiler_to_use, target); + copy_target_libs(builder, &target, &image, &stamp); let mut cmd = rust_installer(builder); cmd.arg("generate") @@ -723,7 +720,73 @@ impl Step for Std { let _time = timeit(builder); builder.run(&mut cmd); builder.remove_dir(&image); - distdir(builder).join(format!("{}-{}.tar.gz", name, target)) + archive + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustcDev { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for RustcDev { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("rustc-dev") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustcDev { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + let compiler = self.compiler; + let target = self.target; + + let name = pkgname(builder, "rustc-dev"); + let archive = distdir(builder).join(format!("{}-{}.tar.gz", name, target)); + if skip_host_target_lib(builder, compiler) { + return archive; + } + + builder.ensure(compile::Rustc { compiler, target }); + + let image = tmpdir(builder).join(format!("{}-{}-image", name, target)); + let _ = fs::remove_dir_all(&image); + + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + let stamp = compile::librustc_stamp(builder, compiler_to_use, target); + copy_target_libs(builder, &target, &image, &stamp); + + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-develop.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(builder)) + .arg("--output-dir").arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rustc-dev-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + + builder.info(&format!("Dist rustc-dev stage{} ({} -> {})", + compiler.stage, &compiler.host, target)); + let _time = timeit(builder); + builder.run(&mut cmd); + builder.remove_dir(&image); + archive } } @@ -826,7 +889,6 @@ fn copy_src_dirs(builder: &Builder<'_>, src_dirs: &[&str], exclude_dirs: &[&str] const LLVM_TEST: &[&str] = &[ "llvm-project/llvm/test", "llvm-project\\llvm\\test", - "llvm-emscripten/test", "llvm-emscripten\\test", ]; if LLVM_TEST.iter().any(|path| spath.contains(path)) && (spath.ends_with(".ll") || @@ -834,9 +896,6 @@ fn copy_src_dirs(builder: &Builder<'_>, src_dirs: &[&str], exclude_dirs: &[&str] spath.ends_with(".s")) { return false } - if spath.contains("test/emscripten") || spath.contains("test\\emscripten") { - return false - } let full_path = Path::new(dir).join(path); if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) { diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 9203a558f6..39d7ea922b 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -160,7 +160,7 @@ mod job { } } -#[cfg(any(target_os = "haiku", not(any(unix, windows))))] +#[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))] mod job { pub unsafe fn setup(_build: &mut crate::Build) { } @@ -232,7 +232,6 @@ pub struct Build { miri_info: channel::GitInfo, rustfmt_info: channel::GitInfo, in_tree_llvm_info: channel::GitInfo, - emscripten_llvm_info: channel::GitInfo, local_rebuild: bool, fail_fast: bool, doc_tests: DocTests, @@ -351,7 +350,6 @@ impl Build { // we always try to use git for LLVM builds let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project")); - let emscripten_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-emscripten")); let mut build = Build { initial_rustc: config.initial_rustc.clone(), @@ -376,7 +374,6 @@ impl Build { miri_info, rustfmt_info, in_tree_llvm_info, - emscripten_llvm_info, cc: HashMap::new(), cxx: HashMap::new(), ar: HashMap::new(), @@ -553,10 +550,6 @@ impl Build { self.out.join(&*target).join("llvm") } - fn emscripten_llvm_out(&self, target: Interned) -> PathBuf { - self.out.join(&*target).join("llvm-emscripten") - } - fn lld_out(&self, target: Interned) -> PathBuf { self.out.join(&*target).join("lld") } @@ -1087,6 +1080,10 @@ impl Build { /// done. The file is updated immediately after this function completes. pub fn save_toolstate(&self, tool: &str, state: ToolState) { if let Some(ref path) = self.config.save_toolstates { + if let Some(parent) = path.parent() { + // Ensure the parent directory always exists + t!(std::fs::create_dir_all(parent)); + } let mut file = t!(fs::OpenOptions::new() .create(true) .read(true) @@ -1126,7 +1123,7 @@ impl Build { } let mut paths = Vec::new(); - let contents = t!(fs::read(stamp)); + let contents = t!(fs::read(stamp), &stamp); // This is the method we use for extracting paths from the stamp file passed to us. See // run_cargo for more information (in compile.rs). for part in contents.split(|b| *b == 0) { @@ -1144,6 +1141,7 @@ impl Build { pub fn copy(&self, src: &Path, dst: &Path) { if self.config.dry_run { return; } self.verbose_than(1, &format!("Copy {:?} to {:?}", src, dst)); + if src == dst { return; } let _ = fs::remove_file(&dst); let metadata = t!(src.symlink_metadata()); if metadata.file_type().is_symlink() { diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 7bf9ea2688..2e89fd5398 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -28,7 +28,6 @@ use crate::GitRepo; #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Llvm { pub target: Interned, - pub emscripten: bool, } impl Step for Llvm { @@ -40,46 +39,35 @@ impl Step for Llvm { run.path("src/llvm-project") .path("src/llvm-project/llvm") .path("src/llvm") - .path("src/llvm-emscripten") } fn make_run(run: RunConfig<'_>) { - let emscripten = run.path.ends_with("llvm-emscripten"); run.builder.ensure(Llvm { target: run.target, - emscripten, }); } /// Compile LLVM for `target`. fn run(self, builder: &Builder<'_>) -> PathBuf { let target = self.target; - let emscripten = self.emscripten; // If we're using a custom LLVM bail out here, but we can only use a // custom LLVM for the build triple. - if !self.emscripten { - if let Some(config) = builder.config.target_config.get(&target) { - if let Some(ref s) = config.llvm_config { - check_llvm_version(builder, s); - return s.to_path_buf() - } + if let Some(config) = builder.config.target_config.get(&target) { + if let Some(ref s) = config.llvm_config { + check_llvm_version(builder, s); + return s.to_path_buf() } } - let (llvm_info, root, out_dir, llvm_config_ret_dir) = if emscripten { - let info = &builder.emscripten_llvm_info; - let dir = builder.emscripten_llvm_out(target); - let config_dir = dir.join("bin"); - (info, "src/llvm-emscripten", dir, config_dir) - } else { - let info = &builder.in_tree_llvm_info; - let mut dir = builder.llvm_out(builder.config.build); - if !builder.config.build.contains("msvc") || builder.config.ninja { - dir.push("build"); - } - (info, "src/llvm-project/llvm", builder.llvm_out(target), dir.join("bin")) - }; + let llvm_info = &builder.in_tree_llvm_info; + let root = "src/llvm-project/llvm"; + let out_dir = builder.llvm_out(target); + let mut llvm_config_ret_dir = builder.llvm_out(builder.config.build); + if !builder.config.build.contains("msvc") || builder.config.ninja { + llvm_config_ret_dir.push("build"); + } + llvm_config_ret_dir.push("bin"); let build_llvm_config = llvm_config_ret_dir .join(exe("llvm-config", &*builder.config.build)); @@ -107,8 +95,7 @@ impl Step for Llvm { } } - let descriptor = if emscripten { "Emscripten " } else { "" }; - builder.info(&format!("Building {}LLVM for {}", descriptor, target)); + builder.info(&format!("Building LLVM for {}", target)); let _time = util::timeit(&builder); t!(fs::create_dir_all(&out_dir)); @@ -123,23 +110,15 @@ impl Step for Llvm { // NOTE: remember to also update `config.toml.example` when changing the // defaults! - let llvm_targets = if self.emscripten { - "JSBackend" - } else { - match builder.config.llvm_targets { - Some(ref s) => s, - None => "AArch64;ARM;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;\ - Sparc;SystemZ;WebAssembly;X86", - } + let llvm_targets = match &builder.config.llvm_targets { + Some(s) => s, + None => "AArch64;ARM;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;\ + Sparc;SystemZ;WebAssembly;X86", }; - let llvm_exp_targets = if self.emscripten { - "" - } else { - match builder.config.llvm_experimental_targets { - Some(ref s) => s, - None => "", - } + let llvm_exp_targets = match builder.config.llvm_experimental_targets { + Some(ref s) => s, + None => "", }; let assertions = if builder.config.llvm_assertions {"ON"} else {"OFF"}; @@ -157,40 +136,30 @@ impl Step for Llvm { .define("WITH_POLLY", "OFF") .define("LLVM_ENABLE_TERMINFO", "OFF") .define("LLVM_ENABLE_LIBEDIT", "OFF") + .define("LLVM_ENABLE_BINDINGS", "OFF") .define("LLVM_ENABLE_Z3_SOLVER", "OFF") .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string()) .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) .define("LLVM_DEFAULT_TARGET_TRIPLE", target); - if builder.config.llvm_thin_lto && !emscripten { + if builder.config.llvm_thin_lto { cfg.define("LLVM_ENABLE_LTO", "Thin"); if !target.contains("apple") { cfg.define("LLVM_ENABLE_LLD", "ON"); } } - // By default, LLVM will automatically find OCaml and, if it finds it, - // install the LLVM bindings in LLVM_OCAML_INSTALL_PATH, which defaults - // to /usr/bin/ocaml. - // This causes problem for non-root builds of Rust. Side-step the issue - // by setting LLVM_OCAML_INSTALL_PATH to a relative path, so it installs - // in the prefix. - cfg.define("LLVM_OCAML_INSTALL_PATH", - env::var_os("LLVM_OCAML_INSTALL_PATH").unwrap_or_else(|| "usr/lib/ocaml".into())); - - let want_lldb = builder.config.lldb_enabled && !self.emscripten; - // This setting makes the LLVM tools link to the dynamic LLVM library, // which saves both memory during parallel links and overall disk space // for the tools. We don't do this on every platform as it doesn't work // equally well everywhere. - if builder.llvm_link_tools_dynamically(target) && !emscripten { + if builder.llvm_link_tools_dynamically(target) { cfg.define("LLVM_LINK_LLVM_DYLIB", "ON"); } // For distribution we want the LLVM tools to be *statically* linked to libstdc++ - if builder.config.llvm_tools_enabled || want_lldb { - if !target.contains("windows") { + if builder.config.llvm_tools_enabled || builder.config.lldb_enabled { + if !target.contains("msvc") { if target.contains("apple") { cfg.define("CMAKE_EXE_LINKER_FLAGS", "-static-libstdc++"); } else { @@ -217,7 +186,7 @@ impl Step for Llvm { enabled_llvm_projects.push("compiler-rt"); } - if want_lldb { + if builder.config.lldb_enabled { enabled_llvm_projects.push("clang"); enabled_llvm_projects.push("lldb"); // For the time being, disable code signing. @@ -242,10 +211,9 @@ impl Step for Llvm { } // http://llvm.org/docs/HowToCrossCompileLLVM.html - if target != builder.config.build && !emscripten { + if target != builder.config.build { builder.ensure(Llvm { target: builder.config.build, - emscripten: false, }); // FIXME: if the llvm root for the build triple is overridden then we // should use llvm-tblgen from there, also should verify that it @@ -427,7 +395,7 @@ fn configure_cmake(builder: &Builder<'_>, cfg.define("CMAKE_C_FLAGS", cflags); let mut cxxflags = builder.cflags(target, GitRepo::Llvm).join(" "); if builder.config.llvm_static_stdcpp && - !target.contains("windows") && + !target.contains("msvc") && !target.contains("netbsd") { cxxflags.push_str(" -static-libstdc++"); @@ -489,7 +457,6 @@ impl Step for Lld { let llvm_config = builder.ensure(Llvm { target: self.target, - emscripten: false, }); let out_dir = builder.lld_out(target); @@ -567,6 +534,10 @@ impl Step for TestHelpers { builder.info("Building test helpers"); t!(fs::create_dir_all(&dst)); let mut cfg = cc::Build::new(); + // FIXME: Workaround for https://github.com/emscripten-core/emscripten/issues/9013 + if target.contains("emscripten") { + cfg.pic(false); + } // We may have found various cross-compilers a little differently due to our // extra configuration, so inform gcc of these compilers. Note, though, that diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index b7ce9c7b39..60f0dccdb0 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -386,8 +386,17 @@ impl Step for Miri { extra_features: Vec::new(), }); if let Some(miri) = miri { + let mut cargo = builder.cargo(compiler, Mode::ToolRustc, host, "install"); + cargo.arg("xargo"); + // Configure `cargo install` path. cargo adds a `bin/`. + cargo.env("CARGO_INSTALL_ROOT", &builder.out); + + let mut cargo = Command::from(cargo); + if !try_run(builder, &mut cargo) { + return; + } + // # Run `cargo miri setup`. - // As a side-effect, this will install xargo. let mut cargo = tool::prepare_tool_cargo( builder, compiler, @@ -412,9 +421,7 @@ impl Step for Miri { cargo.env("XARGO_RUST_SRC", builder.src.join("src")); // Debug things. cargo.env("RUST_BACKTRACE", "1"); - // Configure `cargo install` path, and let cargo-miri know that that's where - // xargo ends up. - cargo.env("CARGO_INSTALL_ROOT", &builder.out); // cargo adds a `bin/` + // Let cargo-miri know where xargo ended up. cargo.env("XARGO", builder.out.join("bin").join("xargo")); let mut cargo = Command::from(cargo); @@ -427,7 +434,7 @@ impl Step for Miri { // (We do this separately from the above so that when the setup actually // happens we get some output.) // We re-use the `cargo` from above. - cargo.arg("--env"); + cargo.arg("--print-sysroot"); // FIXME: Is there a way in which we can re-use the usual `run` helpers? let miri_sysroot = if builder.config.dry_run { @@ -437,13 +444,11 @@ impl Step for Miri { let out = cargo.output() .expect("We already ran `cargo miri setup` before and that worked"); assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code"); - // Output is "MIRI_SYSROOT=\n". + // Output is "\n". let stdout = String::from_utf8(out.stdout) .expect("`cargo miri setup` stdout is not valid UTF-8"); - let stdout = stdout.trim(); - builder.verbose(&format!("`cargo miri setup --env` returned: {:?}", stdout)); - let sysroot = stdout.splitn(2, '=') - .nth(1).expect("`cargo miri setup` stdout did not contain '='"); + let sysroot = stdout.trim_end(); + builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot)); sysroot.to_owned() }; @@ -1047,10 +1052,11 @@ impl Step for Compiletest { // Also provide `rust_test_helpers` for the host. builder.ensure(native::TestHelpers { target: compiler.host }); - // wasm32 can't build the test helpers - if !target.contains("wasm32") { + // As well as the target, except for plain wasm32, which can't build it + if !target.contains("wasm32") || target.contains("emscripten") { builder.ensure(native::TestHelpers { target }); } + builder.ensure(RemoteCopyLibs { compiler, target }); let mut cmd = builder.tool_cmd(Tool::Compiletest); @@ -1164,7 +1170,7 @@ impl Step for Compiletest { }).to_string() }) }; - let lldb_exe = if builder.config.lldb_enabled && !target.contains("emscripten") { + let lldb_exe = if builder.config.lldb_enabled { // Test against the lldb that was just built. builder.llvm_out(target).join("bin").join("lldb") } else { @@ -1233,7 +1239,6 @@ impl Step for Compiletest { if builder.config.llvm_enabled() { let llvm_config = builder.ensure(native::Llvm { target: builder.config.build, - emscripten: false, }); if !builder.config.dry_run { let llvm_version = output(Command::new(&llvm_config).arg("--version")); diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index f1baeafe26..815498047f 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -244,6 +244,7 @@ pub fn prepare_tool_cargo( path.ends_with("rls") || path.ends_with("clippy") || path.ends_with("miri") || + path.ends_with("rustbook") || path.ends_with("rustfmt") { cargo.env("LIBZ_SYS_STATIC", "1"); diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index f035a71191..bb94fb2b75 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -21,6 +21,13 @@ macro_rules! t { Err(e) => panic!("{} failed with {}", stringify!($e), e), } }; + // it can show extra info in the second parameter + ($e:expr, $extra:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {} ({:?})", stringify!($e), e, $extra), + } + }; } // Because Cargo adds the compiler's dylib path to our library search path, llvm-config may diff --git a/src/ci/azure-pipelines/auto.yml b/src/ci/azure-pipelines/auto.yml index e81b7ef598..271c325854 100644 --- a/src/ci/azure-pipelines/auto.yml +++ b/src/ci/azure-pipelines/auto.yml @@ -130,6 +130,8 @@ jobs: IMAGE: i686-gnu-nopt test-various: IMAGE: test-various + wasm32: + IMAGE: wasm32 x86_64-gnu: IMAGE: x86_64-gnu x86_64-gnu-full-bootstrap: @@ -138,6 +140,7 @@ jobs: IMAGE: x86_64-gnu-aux x86_64-gnu-tools: IMAGE: x86_64-gnu-tools + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json x86_64-gnu-debug: IMAGE: x86_64-gnu-debug x86_64-gnu-nopt: @@ -260,8 +263,9 @@ jobs: # MSVC tools tests x86_64-msvc-tools: MSYS_BITS: 64 - SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstates.json windows - RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstates.json + SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstate/toolstates.json + DEPLOY_TOOLSTATES_JSON: toolstates-windows.json # 32/64-bit MinGW builds. # @@ -313,6 +317,7 @@ jobs: # 32/64 bit MSVC and GNU deployment dist-x86_64-msvc: + MSYS_BITS: 64 RUST_CONFIGURE_ARGS: > --build=x86_64-pc-windows-msvc --target=x86_64-pc-windows-msvc,aarch64-pc-windows-msvc @@ -322,6 +327,7 @@ jobs: DIST_REQUIRE_ALL_TOOLS: 1 DEPLOY: 1 dist-i686-msvc: + MSYS_BITS: 32 RUST_CONFIGURE_ARGS: > --build=i686-pc-windows-msvc --target=i586-pc-windows-msvc diff --git a/src/ci/azure-pipelines/pr.yml b/src/ci/azure-pipelines/pr.yml index 62e23efe1e..566e654fdb 100644 --- a/src/ci/azure-pipelines/pr.yml +++ b/src/ci/azure-pipelines/pr.yml @@ -22,14 +22,6 @@ jobs: IMAGE: x86_64-gnu-llvm-6.0 mingw-check: IMAGE: mingw-check - -- job: LinuxTools - timeoutInMinutes: 600 - pool: - vmImage: ubuntu-16.04 - steps: - - template: steps/run.yml - parameters: - only_on_updated_submodules: 'yes' - variables: - IMAGE: x86_64-gnu-tools + x86_64-gnu-tools: + IMAGE: x86_64-gnu-tools + CI_ONLY_WHEN_SUBMODULES_CHANGED: 1 diff --git a/src/ci/azure-pipelines/steps/install-clang.yml b/src/ci/azure-pipelines/steps/install-clang.yml deleted file mode 100644 index 14daf81b43..0000000000 --- a/src/ci/azure-pipelines/steps/install-clang.yml +++ /dev/null @@ -1,46 +0,0 @@ -steps: - -- bash: | - set -e - curl -f http://releases.llvm.org/7.0.0/clang+llvm-7.0.0-x86_64-apple-darwin.tar.xz | tar xJf - - - export CC=`pwd`/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang - echo "##vso[task.setvariable variable=CC]$CC" - - export CXX=`pwd`/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang++ - echo "##vso[task.setvariable variable=CXX]$CXX" - - # Configure `AR` specifically so rustbuild doesn't try to infer it as - # `clang-ar` by accident. - echo "##vso[task.setvariable variable=AR]ar" - displayName: Install clang (OSX) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) - -# If we're compiling for MSVC then we, like most other distribution builders, -# switch to clang as the compiler. This'll allow us eventually to enable LTO -# amongst LLVM and rustc. Note that we only do this on MSVC as I don't think -# clang has an output mode compatible with MinGW that we need. If it does we -# should switch to clang for MinGW as well! -# -# Note that the LLVM installer is an NSIS installer -# -# Original downloaded here came from -# http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe -# That installer was run through `wine` on Linux and then the resulting -# installation directory (found in `$HOME/.wine/drive_c/Program Files/LLVM`) was -# packaged up into a tarball. We've had issues otherwise that the installer will -# randomly hang, provide not a lot of useful information, pollute global state, -# etc. In general the tarball is just more confined and easier to deal with when -# working with various CI environments. -- bash: | - set -e - mkdir -p citools - cd citools - curl -f https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/LLVM-7.0.0-win64.tar.gz | tar xzf - - echo "##vso[task.setvariable variable=RUST_CONFIGURE_ARGS]$RUST_CONFIGURE_ARGS --set llvm.clang-cl=`pwd`/clang-rust/bin/clang-cl.exe" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'), eq(variables['MINGW_URL'],'')) - displayName: Install clang (Windows) - -# Note that we don't install clang on Linux since its compiler story is just so -# different. Each container has its own toolchain configured appropriately -# already. diff --git a/src/ci/azure-pipelines/steps/install-sccache.yml b/src/ci/azure-pipelines/steps/install-sccache.yml deleted file mode 100644 index d4679c1c67..0000000000 --- a/src/ci/azure-pipelines/steps/install-sccache.yml +++ /dev/null @@ -1,21 +0,0 @@ -steps: - -- bash: | - set -e - curl -fo /usr/local/bin/sccache https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2018-04-02-sccache-x86_64-apple-darwin - chmod +x /usr/local/bin/sccache - displayName: Install sccache (OSX) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) - -- script: | - md sccache - powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf sccache\sccache.exe https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2018-04-26-sccache-x86_64-pc-windows-msvc" - echo ##vso[task.prependpath]%CD%\sccache - displayName: Install sccache (Windows) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# Note that we don't install sccache on Linux since it's installed elsewhere -# through all the containers. -# -# FIXME: we should probably install sccache outside the containers and then -# mount it inside the containers so we can centralize all installation here. diff --git a/src/ci/azure-pipelines/steps/install-windows-build-deps.yml b/src/ci/azure-pipelines/steps/install-windows-build-deps.yml deleted file mode 100644 index bd4f1ed0ce..0000000000 --- a/src/ci/azure-pipelines/steps/install-windows-build-deps.yml +++ /dev/null @@ -1,120 +0,0 @@ -steps: -# We use the WIX toolset to create combined installers for Windows, and these -# binaries are downloaded from -# https://github.com/wixtoolset/wix3 originally -- bash: | - set -e - curl -O https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/wix311-binaries.zip - echo "##vso[task.setvariable variable=WIX]`pwd`/wix" - mkdir -p wix/bin - cd wix/bin - 7z x ../../wix311-binaries.zip - displayName: Install wix - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# We use InnoSetup and its `iscc` program to also create combined installers. -# Honestly at this point WIX above and `iscc` are just holdovers from -# oh-so-long-ago and are required for creating installers on Windows. I think -# one is MSI installers and one is EXE, but they're not used so frequently at -# this point anyway so perhaps it's a wash! -- script: | - echo ##vso[task.prependpath]C:\Program Files (x86)\Inno Setup 5 - curl.exe -o is-install.exe https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2017-08-22-is.exe - is-install.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP- - displayName: Install InnoSetup - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# We've had issues with the default drive in use running out of space during a -# build, and it looks like the `C:` drive has more space than the default `D:` -# drive. We should probably confirm this with the azure pipelines team at some -# point, but this seems to fix our "disk space full" problems. -- script: | - mkdir c:\MORE_SPACE - mklink /J build c:\MORE_SPACE - displayName: "Ensure build happens on C:/ instead of D:/" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -- bash: git config --replace-all --global core.autocrlf false - displayName: "Disable git automatic line ending conversion (on C:/)" - -# Download and install MSYS2, needed primarily for the test suite (run-make) but -# also used by the MinGW toolchain for assembling things. -# -# FIXME: we should probe the default azure image and see if we can use the MSYS2 -# toolchain there. (if there's even one there). For now though this gets the job -# done. -- bash: | - set -e - choco install msys2 --params="/InstallDir:$(System.Workfolder)/msys2 /NoPath" -y --no-progress - echo "##vso[task.prependpath]$(System.Workfolder)/msys2/usr/bin" - mkdir -p "$(System.Workfolder)/msys2/home/$USERNAME" - displayName: Install msys2 - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -- bash: pacman -S --noconfirm --needed base-devel ca-certificates make diffutils tar - displayName: Install msys2 base deps - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# If we need to download a custom MinGW, do so here and set the path -# appropriately. -# -# Here we also do a pretty heinous thing which is to mangle the MinGW -# installation we just downloaded. Currently, as of this writing, we're using -# MinGW-w64 builds of gcc, and that's currently at 6.3.0. We use 6.3.0 as it -# appears to be the first version which contains a fix for #40546, builds -# randomly failing during LLVM due to ar.exe/ranlib.exe failures. -# -# Unfortunately, though, 6.3.0 *also* is the first version of MinGW-w64 builds -# to contain a regression in gdb (#40184). As a result if we were to use the -# gdb provided (7.11.1) then we would fail all debuginfo tests. -# -# In order to fix spurious failures (pretty high priority) we use 6.3.0. To -# avoid disabling gdb tests we download an *old* version of gdb, specifically -# that found inside the 6.2.0 distribution. We then overwrite the 6.3.0 gdb -# with the 6.2.0 gdb to get tests passing. -# -# Note that we don't literally overwrite the gdb.exe binary because it appears -# to just use gdborig.exe, so that's the binary we deal with instead. -- bash: | - set -e - curl -o mingw.7z $MINGW_URL/$MINGW_ARCHIVE - 7z x -y mingw.7z > /dev/null - curl -o $MINGW_DIR/bin/gdborig.exe $MINGW_URL/2017-04-20-${MSYS_BITS}bit-gdborig.exe - echo "##vso[task.prependpath]`pwd`/$MINGW_DIR/bin" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'), ne(variables['MINGW_URL'],'')) - displayName: Download custom MinGW - -# Otherwise install MinGW through `pacman` -- bash: | - set -e - arch=i686 - if [ "$MSYS_BITS" = "64" ]; then - arch=x86_64 - fi - pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake mingw-w64-$arch-gcc mingw-w64-$arch-python2 - echo "##vso[task.prependpath]$(System.Workfolder)/msys2/mingw$MSYS_BITS/bin" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'), eq(variables['MINGW_URL'],'')) - displayName: Download standard MinGW - -# Make sure we use the native python interpreter instead of some msys equivalent -# one way or another. The msys interpreters seem to have weird path conversions -# baked in which break LLVM's build system one way or another, so let's use the -# native version which keeps everything as native as possible. -- bash: | - set -e - cp C:/Python27amd64/python.exe C:/Python27amd64/python2.7.exe - echo "##vso[task.prependpath]C:/Python27amd64" - displayName: Prefer the "native" Python as LLVM has trouble building with MSYS sometimes - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# Note that this is originally from the github releases patch of Ninja -- bash: | - set -e - mkdir ninja - curl -o ninja.zip https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2017-03-15-ninja-win.zip - 7z x -oninja ninja.zip - rm ninja.zip - echo "##vso[task.setvariable variable=RUST_CONFIGURE_ARGS]$RUST_CONFIGURE_ARGS --enable-ninja" - echo "##vso[task.prependpath]`pwd`/ninja" - displayName: Download and install ninja - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) diff --git a/src/ci/azure-pipelines/steps/run.yml b/src/ci/azure-pipelines/steps/run.yml index 15a2499e46..cef2d23560 100644 --- a/src/ci/azure-pipelines/steps/run.yml +++ b/src/ci/azure-pipelines/steps/run.yml @@ -6,11 +6,6 @@ # # Check travis config for `gdb --batch` command to print all crash logs -parameters: - # When this parameter is set to anything other than an empty string the tests - # will only be executed when the commit updates submodules - only_on_updated_submodules: '' - steps: # Disable automatic line ending conversion, which is enabled by default on @@ -26,21 +21,8 @@ steps: - checkout: self fetchDepth: 2 -# Set the SKIP_JOB environment variable if this job is supposed to only run -# when submodules are updated and they were not. The following time consuming -# tasks will be skipped when the environment variable is present. -- ${{ if parameters.only_on_updated_submodules }}: - - bash: | - set -e - # Submodules pseudo-files inside git have the 160000 permissions, so when - # those files are present in the diff a submodule was updated. - if git diff HEAD^ | grep "^index .* 160000" >/dev/null 2>&1; then - echo "Executing the job since submodules are updated" - else - echo "Not executing this job since no submodules were updated" - echo "##vso[task.setvariable variable=SKIP_JOB;]1" - fi - displayName: Decide whether to run this job +- bash: src/ci/scripts/should-skip-this.sh + displayName: Decide whether to run this job # Spawn a background process to collect CPU usage statistics which we'll upload # at the end of the build. See the comments in the script here for more @@ -48,86 +30,106 @@ steps: - bash: python src/ci/cpu-usage-over-time.py &> cpu-usage.csv & displayName: "Collect CPU-usage statistics in the background" -- bash: printenv | sort - displayName: Show environment variables +- bash: src/ci/scripts/dump-environment.sh + displayName: Show the current environment -- bash: | - set -e - df -h - du . | sort -nr | head -n100 - displayName: Show disk usage - # FIXME: this hasn't been tested, but maybe it works on Windows? Should test! - condition: and(succeeded(), ne(variables['Agent.OS'], 'Windows_NT')) +- bash: src/ci/scripts/install-sccache.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install sccache + condition: and(succeeded(), not(variables.SKIP_JOB)) -- template: install-sccache.yml -- template: install-clang.yml +- bash: src/ci/scripts/install-clang.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install clang + condition: and(succeeded(), not(variables.SKIP_JOB)) -# Switch to XCode 9.3 on OSX since it seems to be the last version that supports -# i686-apple-darwin. We'll eventually want to upgrade this and it will probably -# force us to drop i686-apple-darwin, but let's keep the wheels turning for now. -- bash: | - set -e - sudo xcode-select --switch /Applications/Xcode_9.3.app - displayName: Switch to Xcode 9.3 (OSX) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) +- bash: src/ci/scripts/switch-xcode.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Switch to Xcode 9.3 + condition: and(succeeded(), not(variables.SKIP_JOB)) -- template: install-windows-build-deps.yml +- bash: src/ci/scripts/install-wix.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install wix + condition: and(succeeded(), not(variables.SKIP_JOB)) -# Looks like docker containers have IPv6 disabled by default, so let's turn it -# on since libstd tests require it -- bash: | - set -e - sudo mkdir -p /etc/docker - echo '{"ipv6":true,"fixed-cidr-v6":"fd9a:8454:6789:13f7::/64"}' | sudo tee /etc/docker/daemon.json - sudo service docker restart - displayName: Enable IPv6 - condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Linux')) +- bash: src/ci/scripts/install-innosetup.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install InnoSetup + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/windows-symlink-build-dir.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Ensure the build happens on C:\ instead of D:\ + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/disable-git-crlf-conversion.sh + displayName: "Disable git automatic line ending conversion (on C:/)" + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-msys2.sh + env: + AGENT_OS: $(Agent.OS) + SYSTEM_WORKFOLDER: $(System.Workfolder) + displayName: Install msys2 + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-msys2-packages.sh + env: + AGENT_OS: $(Agent.OS) + SYSTEM_WORKFOLDER: $(System.Workfolder) + displayName: Install msys2 packages + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-mingw.sh + env: + AGENT_OS: $(Agent.OS) + SYSTEM_WORKFOLDER: $(System.Workfolder) + displayName: Install MinGW + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-ninja.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install ninja + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/enable-docker-ipv6.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Enable IPv6 on Docker + condition: and(succeeded(), not(variables.SKIP_JOB)) # Disable automatic line ending conversion (again). On Windows, when we're # installing dependencies, something switches the git configuration directory or # re-enables autocrlf. We've not tracked down the exact cause -- and there may # be multiple -- but this should ensure submodules are checked out with the # appropriate line endings. -- bash: git config --replace-all --global core.autocrlf false - displayName: "Disable git automatic line ending conversion" +- bash: src/ci/scripts/disable-git-crlf-conversion.sh + displayName: Disable git automatic line ending conversion + condition: and(succeeded(), not(variables.SKIP_JOB)) -# Check out all our submodules, but more quickly than using git by using one of -# our custom scripts -- bash: | - set -e - mkdir -p $HOME/rustsrc - $BUILD_SOURCESDIRECTORY/src/ci/init_repo.sh . $HOME/rustsrc - condition: and(succeeded(), not(variables.SKIP_JOB), ne(variables['Agent.OS'], 'Windows_NT')) - displayName: Check out submodules (Unix) -- script: | - if not exist C:\cache\rustsrc\NUL mkdir C:\cache\rustsrc - sh src/ci/init_repo.sh . /c/cache/rustsrc - condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Windows_NT')) - displayName: Check out submodules (Windows) +- bash: src/ci/scripts/checkout-submodules.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Checkout submodules + condition: and(succeeded(), not(variables.SKIP_JOB)) -# See also the disable for autocrlf above, this just checks that it worked -# -# We check both in rust-lang/rust and in a submodule to make sure both are -# accurate. Submodules are checked out significantly later than the main -# repository in this script, so settings can (and do!) change between then. -# -# Linux (and maybe macOS) builders don't currently have dos2unix so just only -# run this step on Windows. -- bash: | - set -x - # print out the git configuration so we can better investigate failures in - # the following - git config --list --show-origin - dos2unix -ih Cargo.lock src/tools/rust-installer/install-template.sh - endings=$(dos2unix -ic Cargo.lock src/tools/rust-installer/install-template.sh) - # if endings has non-zero length, error out - if [ -n "$endings" ]; then exit 1 ; fi - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - displayName: Verify line endings are LF +- bash: src/ci/scripts/verify-line-endings.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Verify line endings + condition: and(succeeded(), not(variables.SKIP_JOB)) # Ensure the `aws` CLI is installed so we can deploy later on, cache docker # images, etc. -- bash: src/ci/install-awscli.sh +- bash: src/ci/scripts/install-awscli.sh env: AGENT_OS: $(Agent.OS) condition: and(succeeded(), not(variables.SKIP_JOB)) @@ -181,37 +183,21 @@ steps: condition: and(succeeded(), not(variables.SKIP_JOB)) displayName: Run build -# If we're a deploy builder, use the `aws` command to publish everything to our -# bucket. -- bash: | - set -e - source src/ci/shared.sh - if [ "$AGENT_OS" = "Linux" ]; then - rm -rf obj/build/dist/doc - upload_dir=obj/build/dist - else - rm -rf build/dist/doc - upload_dir=build/dist - fi - ls -la $upload_dir - deploy_dir=rustc-builds - if [ "$DEPLOY_ALT" == "1" ]; then - deploy_dir=rustc-builds-alt - fi - retry aws s3 cp --no-progress --recursive --acl public-read ./$upload_dir s3://$DEPLOY_BUCKET/$deploy_dir/$BUILD_SOURCEVERSION +- bash: src/ci/scripts/upload-artifacts.sh env: AWS_ACCESS_KEY_ID: $(UPLOAD_AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY: $(UPLOAD_AWS_SECRET_ACCESS_KEY) - condition: and(succeeded(), not(variables.SKIP_JOB), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1'))) displayName: Upload artifacts - -# Upload CPU usage statistics that we've been gathering this whole time. Always -# execute this step in case we want to inspect failed builds, but don't let -# errors here ever fail the build since this is just informational. -- bash: aws s3 cp --acl public-read cpu-usage.csv s3://$DEPLOY_BUCKET/rustc-builds/$BUILD_SOURCEVERSION/cpu-$CI_JOB_NAME.csv - env: - AWS_ACCESS_KEY_ID: $(UPLOAD_AWS_ACCESS_KEY_ID) - AWS_SECRET_ACCESS_KEY: $(UPLOAD_AWS_SECRET_ACCESS_KEY) - condition: variables['UPLOAD_AWS_SECRET_ACCESS_KEY'] - continueOnError: true - displayName: Upload CPU usage statistics + # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy + # builders *should* have the AWS credentials available. Still, explicitly + # adding the condition is helpful as this way CI will not silently skip + # deploying artifacts from a dist builder if the variables are misconfigured, + # erroring about invalid credentials instead. + condition: | + and( + succeeded(), not(variables.SKIP_JOB), + or( + variables.UPLOAD_AWS_SECRET_ACCESS_KEY, + eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1') + ) + ) diff --git a/src/ci/docker/asmjs/Dockerfile b/src/ci/docker/asmjs/Dockerfile deleted file mode 100644 index 3abaab6b34..0000000000 --- a/src/ci/docker/asmjs/Dockerfile +++ /dev/null @@ -1,47 +0,0 @@ -FROM ubuntu:16.04 - -RUN apt-get update && apt-get install -y --no-install-recommends \ - g++ \ - make \ - file \ - curl \ - ca-certificates \ - python \ - git \ - cmake \ - sudo \ - gdb \ - xz-utils - -COPY scripts/emscripten.sh /scripts/ -RUN bash /scripts/emscripten.sh - -COPY scripts/sccache.sh /scripts/ -RUN sh /scripts/sccache.sh - -ENV PATH=$PATH:/emsdk-portable -ENV PATH=$PATH:/emsdk-portable/clang/e1.38.15_64bit/ -ENV PATH=$PATH:/emsdk-portable/emscripten/1.38.15/ -ENV PATH=$PATH:/emsdk-portable/node/8.9.1_64bit/bin/ -ENV EMSCRIPTEN=/emsdk-portable/emscripten/1.38.15/ -ENV BINARYEN_ROOT=/emsdk-portable/clang/e1.38.15_64bit/binaryen/ -ENV EM_CONFIG=/emsdk-portable/.emscripten - -ENV TARGETS=asmjs-unknown-emscripten - -ENV RUST_CONFIGURE_ARGS --enable-emscripten --disable-optimize-tests - -ENV SCRIPT python2.7 ../x.py test --target $TARGETS \ - src/test/ui \ - src/test/run-fail \ - src/libstd \ - src/liballoc \ - src/libcore - -# Debug assertions in rustc are largely covered by other builders, and LLVM -# assertions cause this builder to slow down by quite a large amount and don't -# buy us a huge amount over other builders (not sure if we've ever seen an -# asmjs-specific backend assertion trip), so disable assertions for these -# tests. -ENV NO_LLVM_ASSERTIONS=1 -ENV NO_DEBUG_ASSERTIONS=1 diff --git a/src/ci/docker/disabled/asmjs/Dockerfile b/src/ci/docker/disabled/asmjs/Dockerfile new file mode 100644 index 0000000000..e27a2a529a --- /dev/null +++ b/src/ci/docker/disabled/asmjs/Dockerfile @@ -0,0 +1,41 @@ +FROM ubuntu:16.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + g++ \ + make \ + file \ + curl \ + ca-certificates \ + python \ + git \ + cmake \ + sudo \ + gdb \ + xz-utils \ + bzip2 + +COPY scripts/emscripten.sh /scripts/ +RUN bash /scripts/emscripten.sh + +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +ENV PATH=$PATH:/emsdk-portable +ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/ +ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/ +ENV BINARYEN_ROOT=/emsdk-portable/upstream/ + +ENV TARGETS=asmjs-unknown-emscripten + +# Use -O1 optimizations in the link step to reduce time spent optimizing JS. +ENV EMCC_CFLAGS=-O1 + +# Emscripten installation is user-specific +ENV NO_CHANGE_USER=1 + +ENV SCRIPT python2.7 ../x.py test --target $TARGETS + +# This is almost identical to the wasm32-unknown-emscripten target, so +# running with assertions again is not useful +ENV NO_DEBUG_ASSERTIONS=1 +ENV NO_LLVM_ASSERTIONS=1 diff --git a/src/ci/docker/disabled/wasm32-exp/Dockerfile b/src/ci/docker/disabled/wasm32-exp/Dockerfile deleted file mode 100644 index 420d47b314..0000000000 --- a/src/ci/docker/disabled/wasm32-exp/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM ubuntu:16.04 - -RUN apt-get update && apt-get install -y --no-install-recommends \ - g++ \ - make \ - file \ - curl \ - ca-certificates \ - python \ - git \ - cmake \ - sudo \ - gdb \ - xz-utils \ - jq \ - bzip2 - -# emscripten -COPY scripts/emscripten-wasm.sh /scripts/ -COPY wasm32-exp/node.sh /usr/local/bin/node -RUN bash /scripts/emscripten-wasm.sh - -# cache -COPY scripts/sccache.sh /scripts/ -RUN sh /scripts/sccache.sh - -# env -ENV PATH=/wasm-install/emscripten:/wasm-install/bin:$PATH -ENV EM_CONFIG=/root/.emscripten - -ENV TARGETS=wasm32-experimental-emscripten - -ENV RUST_CONFIGURE_ARGS --experimental-targets=WebAssembly - -ENV SCRIPT python2.7 ../x.py test --target $TARGETS diff --git a/src/ci/docker/disabled/wasm32-exp/node.sh b/src/ci/docker/disabled/wasm32-exp/node.sh deleted file mode 100755 index aa938971c7..0000000000 --- a/src/ci/docker/disabled/wasm32-exp/node.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash - -path="$(dirname $1)" -file="$(basename $1)" - -shift - -cd "$path" -exec /node-v8.0.0-linux-x64/bin/node "$file" "$@" diff --git a/src/ci/docker/disabled/wasm32/Dockerfile b/src/ci/docker/disabled/wasm32/Dockerfile deleted file mode 100644 index 0d2bd39303..0000000000 --- a/src/ci/docker/disabled/wasm32/Dockerfile +++ /dev/null @@ -1,32 +0,0 @@ -FROM ubuntu:16.04 - -RUN apt-get update && apt-get install -y --no-install-recommends \ - g++ \ - make \ - file \ - curl \ - ca-certificates \ - python \ - git \ - cmake \ - sudo \ - gdb \ - xz-utils - -# emscripten -COPY scripts/emscripten.sh /scripts/ -RUN bash /scripts/emscripten.sh - -COPY scripts/sccache.sh /scripts/ -RUN sh /scripts/sccache.sh - -ENV PATH=$PATH:/emsdk-portable -ENV PATH=$PATH:/emsdk-portable/clang/e1.38.15_64bit/ -ENV PATH=$PATH:/emsdk-portable/emscripten/1.38.15/ -ENV PATH=$PATH:/emsdk-portable/node/8.9.1_64bit/bin/ -ENV EMSCRIPTEN=/emsdk-portable/emscripten/1.38.15/ -ENV BINARYEN_ROOT=/emsdk-portable/clang/e1.38.15_64bit/binaryen/ -ENV EM_CONFIG=/emsdk-portable/.emscripten - -ENV TARGETS=wasm32-unknown-emscripten -ENV SCRIPT python2.7 ../x.py test --target $TARGETS diff --git a/src/ci/docker/dist-various-1/Dockerfile b/src/ci/docker/dist-various-1/Dockerfile index 1057911946..816bdb34a7 100644 --- a/src/ci/docker/dist-various-1/Dockerfile +++ b/src/ci/docker/dist-various-1/Dockerfile @@ -15,6 +15,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ g++-arm-linux-gnueabi \ g++-arm-linux-gnueabihf \ g++-aarch64-linux-gnu \ + g++-mips64-linux-gnuabi64 \ + g++-mips64el-linux-gnuabi64 \ gcc-sparc64-linux-gnu \ libc6-dev-sparc64-cross \ bzip2 \ @@ -77,6 +79,14 @@ RUN env \ CC=mipsel-openwrt-linux-gcc \ CXX=mipsel-openwrt-linux-g++ \ bash musl.sh mipsel && \ + env \ + CC=mips64-linux-gnuabi64-gcc \ + CXX=mips64-linux-gnuabi64-g++ \ + bash musl.sh mips64 && \ + env \ + CC=mips64el-linux-gnuabi64-gcc \ + CXX=mips64el-linux-gnuabi64-g++ \ + bash musl.sh mips64el && \ rm -rf /build/* # FIXME(mozilla/sccache#235) this shouldn't be necessary but is currently @@ -97,6 +107,8 @@ ENV TARGETS=$TARGETS,wasm32-unknown-emscripten ENV TARGETS=$TARGETS,x86_64-rumprun-netbsd ENV TARGETS=$TARGETS,mips-unknown-linux-musl ENV TARGETS=$TARGETS,mipsel-unknown-linux-musl +ENV TARGETS=$TARGETS,mips64-unknown-linux-muslabi64 +ENV TARGETS=$TARGETS,mips64el-unknown-linux-muslabi64 ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf ENV TARGETS=$TARGETS,armv5te-unknown-linux-gnueabi @@ -125,6 +137,8 @@ ENV TARGETS=$TARGETS,thumbv7neon-unknown-linux-gnueabihf ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \ CC_mips_unknown_linux_musl=mips-openwrt-linux-gcc \ + CC_mips64el_unknown_linux_muslabi64=mips64el-linux-gnuabi64-gcc \ + CC_mips64_unknown_linux_muslabi64=mips64-linux-gnuabi64-gcc \ CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc \ CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \ CC_thumbv7neon_unknown_linux_gnueabihf=arm-linux-gnueabihf-gcc \ @@ -139,7 +153,8 @@ ENV RUST_CONFIGURE_ARGS \ --musl-root-aarch64=/musl-aarch64 \ --musl-root-mips=/musl-mips \ --musl-root-mipsel=/musl-mipsel \ - --enable-emscripten \ + --musl-root-mips64=/musl-mips64 \ + --musl-root-mips64el=/musl-mips64el \ --disable-docs ENV SCRIPT \ diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 415d6b63eb..cdafcbadc9 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -106,6 +106,7 @@ fi mkdir -p $HOME/.cargo mkdir -p $objdir/tmp mkdir -p $objdir/cores +mkdir -p /tmp/toolstate args= if [ "$SCCACHE_BUCKET" != "" ]; then @@ -156,6 +157,7 @@ else args="$args --volume $objdir:/checkout/obj" args="$args --volume $HOME/.cargo:/cargo" args="$args --volume $HOME/rustsrc:$HOME/rustsrc" + args="$args --volume /tmp/toolstate:/tmp/toolstate" args="$args --env LOCAL_USER_ID=`id -u`" fi diff --git a/src/ci/docker/scripts/emscripten-wasm.sh b/src/ci/docker/scripts/emscripten-wasm.sh deleted file mode 100644 index e4a93d7a10..0000000000 --- a/src/ci/docker/scripts/emscripten-wasm.sh +++ /dev/null @@ -1,37 +0,0 @@ -set -ex - -hide_output() { - set +x - on_err=" -echo ERROR: An error was encountered with the build. -cat /tmp/build.log -exit 1 -" - trap "$on_err" ERR - bash -c "while true; do sleep 30; echo \$(date) - building ...; done" & - PING_LOOP_PID=$! - $@ &> /tmp/build.log - trap - ERR - kill $PING_LOOP_PID - rm -f /tmp/build.log - set -x -} - -# Download last known good emscripten from WebAssembly waterfall -BUILD=$(curl -fL https://storage.googleapis.com/wasm-llvm/builds/linux/lkgr.json | \ - jq '.build | tonumber') -curl -sL https://storage.googleapis.com/wasm-llvm/builds/linux/$BUILD/wasm-binaries.tbz2 | \ - hide_output tar xvkj - -# node 8 is required to run wasm -cd / -curl -sL https://nodejs.org/dist/v8.0.0/node-v8.0.0-linux-x64.tar.xz | \ - tar -xJ - -# Make emscripten use wasm-ready node and LLVM tools -echo "EMSCRIPTEN_ROOT = '/wasm-install/emscripten'" >> /root/.emscripten -echo "NODE_JS='/usr/local/bin/node'" >> /root/.emscripten -echo "LLVM_ROOT='/wasm-install/bin'" >> /root/.emscripten -echo "BINARYEN_ROOT = '/wasm-install'" >> /root/.emscripten -echo "COMPILER_ENGINE = NODE_JS" >> /root/.emscripten -echo "JS_ENGINES = [NODE_JS]" >> /root/.emscripten diff --git a/src/ci/docker/scripts/emscripten.sh b/src/ci/docker/scripts/emscripten.sh index 47196e8939..1be8074159 100644 --- a/src/ci/docker/scripts/emscripten.sh +++ b/src/ci/docker/scripts/emscripten.sh @@ -17,22 +17,7 @@ exit 1 set -x } -cd / -curl -fL https://mozilla-games.s3.amazonaws.com/emscripten/releases/emsdk-portable.tar.gz | \ - tar -xz - +git clone https://github.com/emscripten-core/emsdk.git /emsdk-portable cd /emsdk-portable -./emsdk update -hide_output ./emsdk install sdk-1.38.15-64bit -./emsdk activate sdk-1.38.15-64bit - -# Compile and cache libc -source ./emsdk_env.sh -echo "main(){}" > a.c -HOME=/emsdk-portable/ emcc a.c -HOME=/emsdk-portable/ emcc -s BINARYEN=1 a.c -rm -f a.* - -# Make emsdk usable by any user -cp /root/.emscripten /emsdk-portable -chmod a+rxw -R /emsdk-portable +hide_output ./emsdk install 1.38.46-upstream +./emsdk activate 1.38.46-upstream diff --git a/src/ci/docker/wasm32/Dockerfile b/src/ci/docker/wasm32/Dockerfile new file mode 100644 index 0000000000..a0f35afd99 --- /dev/null +++ b/src/ci/docker/wasm32/Dockerfile @@ -0,0 +1,44 @@ +FROM ubuntu:16.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + g++ \ + make \ + file \ + curl \ + ca-certificates \ + python \ + git \ + cmake \ + sudo \ + gdb \ + xz-utils \ + bzip2 + +COPY scripts/emscripten.sh /scripts/ +RUN bash /scripts/emscripten.sh + +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +ENV PATH=$PATH:/emsdk-portable +ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/ +ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/ +ENV BINARYEN_ROOT=/emsdk-portable/upstream/ + +ENV TARGETS=wasm32-unknown-emscripten + +# Use -O1 optimizations in the link step to reduce time spent optimizing. +ENV EMCC_CFLAGS=-O1 + +# Emscripten installation is user-specific +ENV NO_CHANGE_USER=1 + +# FIXME: Re-enable these tests once https://github.com/rust-lang/cargo/pull/7476 +# is picked up by CI +ENV SCRIPT python2.7 ../x.py test --target $TARGETS \ + --exclude src/libcore \ + --exclude src/liballoc \ + --exclude src/libproc_macro \ + --exclude src/libstd \ + --exclude src/libterm \ + --exclude src/libtest diff --git a/src/ci/docker/x86_64-gnu-tools/Dockerfile b/src/ci/docker/x86_64-gnu-tools/Dockerfile index 8035195c6e..7687a6ca23 100644 --- a/src/ci/docker/x86_64-gnu-tools/Dockerfile +++ b/src/ci/docker/x86_64-gnu-tools/Dockerfile @@ -26,5 +26,5 @@ ENV CHECK_LINKS 1 ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --save-toolstates=/tmp/toolstates.json -ENV SCRIPT /tmp/checktools.sh ../x.py /tmp/toolstates.json linux + --save-toolstates=/tmp/toolstate/toolstates.json +ENV SCRIPT /tmp/checktools.sh ../x.py /tmp/toolstate/toolstates.json linux diff --git a/src/ci/docker/x86_64-gnu-tools/checktools.sh b/src/ci/docker/x86_64-gnu-tools/checktools.sh index 4243effdf9..ebb8c0bda5 100755 --- a/src/ci/docker/x86_64-gnu-tools/checktools.sh +++ b/src/ci/docker/x86_64-gnu-tools/checktools.sh @@ -3,7 +3,7 @@ set -eu X_PY="$1" -TOOLSTATE_FILE="$(realpath $2)" +TOOLSTATE_FILE="$(realpath -m $2)" OS="$3" COMMIT="$(git rev-parse HEAD)" CHANGED_FILES="$(git diff --name-status HEAD HEAD^)" @@ -13,6 +13,7 @@ SIX_WEEK_CYCLE="$(( ($(date +%s) / 86400 - 20) % 42 ))" # The Wednesday after this has value 0. # We track this value to prevent regressing tools in the last week of the 6-week cycle. +mkdir -p "$(dirname $TOOLSTATE_FILE)" touch "$TOOLSTATE_FILE" # Try to test all the tools and store the build/test success in the TOOLSTATE_FILE diff --git a/src/ci/init_repo.sh b/src/ci/init_repo.sh index c7c3b0a5fb..92c6e546a3 100755 --- a/src/ci/init_repo.sh +++ b/src/ci/init_repo.sh @@ -47,7 +47,7 @@ function fetch_github_commit_archive { rm $cached } -included="src/llvm-project src/llvm-emscripten src/doc/book src/doc/rust-by-example" +included="src/llvm-project src/doc/book src/doc/rust-by-example" modules="$(git config --file .gitmodules --get-regexp '\.path$' | cut -d' ' -f2)" modules=($modules) use_git="" diff --git a/src/ci/run.sh b/src/ci/run.sh index c3f8e4c270..7c5df63ec0 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -55,6 +55,9 @@ if [ "$DEPLOY$DEPLOY_ALT" = "1" ]; then if [ "$NO_LLVM_ASSERTIONS" = "1" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-llvm-assertions" elif [ "$DEPLOY_ALT" != "" ]; then + if [ "$NO_PARALLEL_COMPILER" = "" ]; then + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.parallel-compiler" + fi RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-assertions" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.verify-llvm-ir" fi @@ -114,7 +117,7 @@ make check-bootstrap # Display the CPU and memory information. This helps us know why the CI timing # is fluctuating. -if isOSX; then +if isMacOS; then system_profiler SPHardwareDataType || true sysctl hw || true ncpus=$(sysctl -n hw.ncpu) diff --git a/src/ci/scripts/checkout-submodules.sh b/src/ci/scripts/checkout-submodules.sh new file mode 100755 index 0000000000..0b44ea3c90 --- /dev/null +++ b/src/ci/scripts/checkout-submodules.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Check out all our submodules, but more quickly than using git by using one of +# our custom scripts + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + path="/c/cache/rustsrc" +else + path="${HOME}/rustsrc" +fi + +mkdir -p "${path}" +"$(cd "$(dirname "$0")" && pwd)/../init_repo.sh" . "${path}" diff --git a/src/ci/scripts/disable-git-crlf-conversion.sh b/src/ci/scripts/disable-git-crlf-conversion.sh new file mode 100755 index 0000000000..836145fbb8 --- /dev/null +++ b/src/ci/scripts/disable-git-crlf-conversion.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Disable automatic line ending conversion, which is enabled by default on +# Azure's Windows image. Having the conversion enabled caused regressions both +# in our test suite (it broke miri tests) and in the ecosystem, since we +# started shipping install scripts with CRLF endings instead of the old LF. +# +# Note that we do this a couple times during the build as the PATH and current +# user/directory change, e.g. when mingw is enabled. + +set -euo pipefail +IFS=$'\n\t' + +git config --replace-all --global core.autocrlf false diff --git a/src/ci/scripts/dump-environment.sh b/src/ci/scripts/dump-environment.sh new file mode 100755 index 0000000000..c6774b52ab --- /dev/null +++ b/src/ci/scripts/dump-environment.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# This script dumps information about the build environment to stdout. + +set -euo pipefail +IFS=$'\n\t' + +echo "environment variables:" +printenv | sort +echo + +echo "disk usage:" +df -h +echo + +echo "biggest files in the working dir:" +set +o pipefail +du . | sort -nr | head -n100 +set -o pipefail +echo diff --git a/src/ci/scripts/enable-docker-ipv6.sh b/src/ci/scripts/enable-docker-ipv6.sh new file mode 100755 index 0000000000..03d5a75e24 --- /dev/null +++ b/src/ci/scripts/enable-docker-ipv6.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Looks like docker containers have IPv6 disabled by default, so let's turn it +# on since libstd tests require it + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isLinux; then + sudo mkdir -p /etc/docker + echo '{"ipv6":true,"fixed-cidr-v6":"fd9a:8454:6789:13f7::/64"}' \ + | sudo tee /etc/docker/daemon.json + sudo service docker restart +fi diff --git a/src/ci/install-awscli.sh b/src/ci/scripts/install-awscli.sh similarity index 88% rename from src/ci/install-awscli.sh rename to src/ci/scripts/install-awscli.sh index 69c8d2e309..e211879385 100755 --- a/src/ci/install-awscli.sh +++ b/src/ci/scripts/install-awscli.sh @@ -16,12 +16,14 @@ set -euo pipefail IFS=$'\n\t' -MIRROR="https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2019-07-27-awscli.tar" +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +MIRROR="${MIRRORS_BASE}/2019-07-27-awscli.tar" DEPS_DIR="/tmp/awscli-deps" pip="pip" pipflags="" -if [[ "${AGENT_OS}" == "Linux" ]]; then +if isLinux; then pip="pip3" pipflags="--user" diff --git a/src/ci/scripts/install-clang.sh b/src/ci/scripts/install-clang.sh new file mode 100755 index 0000000000..e9b685718e --- /dev/null +++ b/src/ci/scripts/install-clang.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# This script installs clang on the local machine. Note that we don't install +# clang on Linux since its compiler story is just so different. Each container +# has its own toolchain configured appropriately already. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isMacOS; then + curl -f "${MIRRORS_BASE}/clang%2Bllvm-7.0.0-x86_64-apple-darwin.tar.xz" | tar xJf - + + ciCommandSetEnv CC "$(pwd)/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang" + ciCommandSetEnv CXX "$(pwd)/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang++" + + # Configure `AR` specifically so rustbuild doesn't try to infer it as + # `clang-ar` by accident. + ciCommandSetEnv AR "ar" +elif isWindows && [[ -z ${MINGW_URL+x} ]]; then + # If we're compiling for MSVC then we, like most other distribution builders, + # switch to clang as the compiler. This'll allow us eventually to enable LTO + # amongst LLVM and rustc. Note that we only do this on MSVC as I don't think + # clang has an output mode compatible with MinGW that we need. If it does we + # should switch to clang for MinGW as well! + # + # Note that the LLVM installer is an NSIS installer + # + # Original downloaded here came from + # http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe + # That installer was run through `wine` on Linux and then the resulting + # installation directory (found in `$HOME/.wine/drive_c/Program Files/LLVM`) was + # packaged up into a tarball. We've had issues otherwise that the installer will + # randomly hang, provide not a lot of useful information, pollute global state, + # etc. In general the tarball is just more confined and easier to deal with when + # working with various CI environments. + + mkdir -p citools + cd citools + curl -f "${MIRRORS_BASE}/LLVM-7.0.0-win64.tar.gz" | tar xzf - + ciCommandSetEnv RUST_CONFIGURE_ARGS \ + "${RUST_CONFIGURE_ARGS} --set llvm.clang-cl=$(pwd)/clang-rust/bin/clang-cl.exe" +fi diff --git a/src/ci/scripts/install-innosetup.sh b/src/ci/scripts/install-innosetup.sh new file mode 100755 index 0000000000..04ca249777 --- /dev/null +++ b/src/ci/scripts/install-innosetup.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# We use InnoSetup and its `iscc` program to also create combined installers. +# Honestly at this point WIX above and `iscc` are just holdovers from +# oh-so-long-ago and are required for creating installers on Windows. I think +# one is MSI installers and one is EXE, but they're not used so frequently at +# this point anyway so perhaps it's a wash! + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + curl.exe -o is-install.exe "${MIRRORS_BASE}/2017-08-22-is.exe" + cmd.exe //c "is-install.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP-" + + ciCommandAddPath "C:\\Program Files (x86)\\Inno Setup 5" +fi diff --git a/src/ci/scripts/install-mingw.sh b/src/ci/scripts/install-mingw.sh new file mode 100755 index 0000000000..b4e8b889f5 --- /dev/null +++ b/src/ci/scripts/install-mingw.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# If we need to download a custom MinGW, do so here and set the path +# appropriately. +# +# Here we also do a pretty heinous thing which is to mangle the MinGW +# installation we just downloaded. Currently, as of this writing, we're using +# MinGW-w64 builds of gcc, and that's currently at 6.3.0. We use 6.3.0 as it +# appears to be the first version which contains a fix for #40546, builds +# randomly failing during LLVM due to ar.exe/ranlib.exe failures. +# +# Unfortunately, though, 6.3.0 *also* is the first version of MinGW-w64 builds +# to contain a regression in gdb (#40184). As a result if we were to use the +# gdb provided (7.11.1) then we would fail all debuginfo tests. +# +# In order to fix spurious failures (pretty high priority) we use 6.3.0. To +# avoid disabling gdb tests we download an *old* version of gdb, specifically +# that found inside the 6.2.0 distribution. We then overwrite the 6.3.0 gdb +# with the 6.2.0 gdb to get tests passing. +# +# Note that we don't literally overwrite the gdb.exe binary because it appears +# to just use gdborig.exe, so that's the binary we deal with instead. +# +# Otherwise install MinGW through `pacman` + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + if [[ -z "${MINGW_URL+x}" ]]; then + arch=i686 + if [ "$MSYS_BITS" = "64" ]; then + arch=x86_64 + fi + pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake \ + mingw-w64-$arch-gcc mingw-w64-$arch-python2 + ciCommandAddPath "${SYSTEM_WORKFOLDER}/msys2/mingw${MSYS_BITS}/bin" + else + curl -o mingw.7z "${MINGW_URL}/${MINGW_ARCHIVE}" + 7z x -y mingw.7z > /dev/null + curl -o "${MINGW_DIR}/bin/gdborig.exe" "${MINGW_URL}/2017-04-20-${MSYS_BITS}bit-gdborig.exe" + ciCommandAddPath "$(pwd)/${MINGW_DIR}/bin" + fi +fi diff --git a/src/ci/scripts/install-msys2-packages.sh b/src/ci/scripts/install-msys2-packages.sh new file mode 100755 index 0000000000..36d9202f7a --- /dev/null +++ b/src/ci/scripts/install-msys2-packages.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + pacman -S --noconfirm --needed base-devel ca-certificates make diffutils tar + + # Make sure we use the native python interpreter instead of some msys equivalent + # one way or another. The msys interpreters seem to have weird path conversions + # baked in which break LLVM's build system one way or another, so let's use the + # native version which keeps everything as native as possible. + cp C:/Python27amd64/python.exe C:/Python27amd64/python2.7.exe + ciCommandAddPath "C:\\Python27amd64" +fi diff --git a/src/ci/scripts/install-msys2.sh b/src/ci/scripts/install-msys2.sh new file mode 100755 index 0000000000..ce37c3b146 --- /dev/null +++ b/src/ci/scripts/install-msys2.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# Download and install MSYS2, needed primarily for the test suite (run-make) but +# also used by the MinGW toolchain for assembling things. +# +# FIXME: we should probe the default azure image and see if we can use the MSYS2 +# toolchain there. (if there's even one there). For now though this gets the job +# done. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + choco install msys2 --params="/InstallDir:${SYSTEM_WORKFOLDER}/msys2 /NoPath" -y --no-progress + mkdir -p "${SYSTEM_WORKFOLDER}/msys2/home/${USERNAME}" + + ciCommandAddPath "${SYSTEM_WORKFOLDER}/msys2/usr/bin" +fi diff --git a/src/ci/scripts/install-ninja.sh b/src/ci/scripts/install-ninja.sh new file mode 100755 index 0000000000..b8261d8a6f --- /dev/null +++ b/src/ci/scripts/install-ninja.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Note that this is originally from the github releases patch of Ninja + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + mkdir ninja + curl -o ninja.zip "${MIRRORS_BASE}/2017-03-15-ninja-win.zip" + 7z x -oninja ninja.zip + rm ninja.zip + ciCommandSetEnv "RUST_CONFIGURE_ARGS" "${RUST_CONFIGURE_ARGS} --enable-ninja" + ciCommandAddPath "$(pwd)/ninja" +fi diff --git a/src/ci/scripts/install-sccache.sh b/src/ci/scripts/install-sccache.sh new file mode 100755 index 0000000000..d3c2989922 --- /dev/null +++ b/src/ci/scripts/install-sccache.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# This script installs sccache on the local machine. Note that we don't install +# sccache on Linux since it's installed elsewhere through all the containers. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isMacOS; then + curl -fo /usr/local/bin/sccache "${MIRRORS_BASE}/2018-04-02-sccache-x86_64-apple-darwin" + chmod +x /usr/local/bin/sccache +elif isWindows; then + mkdir -p sccache + curl -fo sccache/sccache.exe "${MIRRORS_BASE}/2018-04-26-sccache-x86_64-pc-windows-msvc" + ciCommandAddPath "$(pwd)/sccache" +fi + +# FIXME: we should probably install sccache outside the containers and then +# mount it inside the containers so we can centralize all installation here. diff --git a/src/ci/scripts/install-wix.sh b/src/ci/scripts/install-wix.sh new file mode 100755 index 0000000000..688f1a49cb --- /dev/null +++ b/src/ci/scripts/install-wix.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# We use the WIX toolset to create combined installers for Windows, and these +# binaries are downloaded from https://github.com/wixtoolset/wix3 originally + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + ciCommandSetEnv WIX "$(pwd)/wix" + + curl -O "${MIRRORS_BASE}/wix311-binaries.zip" + mkdir -p wix/bin + cd wix/bin + 7z x ../../wix311-binaries.zip +fi diff --git a/src/ci/scripts/should-skip-this.sh b/src/ci/scripts/should-skip-this.sh new file mode 100755 index 0000000000..f945db0ada --- /dev/null +++ b/src/ci/scripts/should-skip-this.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# Set the SKIP_JOB environment variable if this job is supposed to only run +# when submodules are updated and they were not. The following time consuming +# tasks will be skipped when the environment variable is present. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if [[ -z "${CI_ONLY_WHEN_SUBMODULES_CHANGED+x}" ]]; then + echo "Executing the job since there is no skip rule in effect" +elif git diff HEAD^ | grep --quiet "^index .* 160000"; then + # Submodules pseudo-files inside git have the 160000 permissions, so when + # those files are present in the diff a submodule was updated. + echo "Executing the job since submodules are updated" +else + echo "Not executing this job since no submodules were updated" + ciCommandSetEnv SKIP_JOB 1 +fi diff --git a/src/ci/scripts/switch-xcode.sh b/src/ci/scripts/switch-xcode.sh new file mode 100755 index 0000000000..2cbb2ddbc7 --- /dev/null +++ b/src/ci/scripts/switch-xcode.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Switch to XCode 9.3 on OSX since it seems to be the last version that supports +# i686-apple-darwin. We'll eventually want to upgrade this and it will probably +# force us to drop i686-apple-darwin, but let's keep the wheels turning for now. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isMacOS; then + sudo xcode-select --switch /Applications/Xcode_9.3.app +fi diff --git a/src/ci/scripts/upload-artifacts.sh b/src/ci/scripts/upload-artifacts.sh new file mode 100755 index 0000000000..312ec9d805 --- /dev/null +++ b/src/ci/scripts/upload-artifacts.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Upload all the artifacts to our S3 bucket. All the files inside ${upload_dir} +# will be uploaded to the deploy bucket and eventually signed and released in +# static.rust-lang.org. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +upload_dir="$(mktemp -d)" + +# Release tarballs produced by a dist builder. +if [[ "${DEPLOY-0}" -eq "1" ]] || [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then + dist_dir=build/dist + if isLinux; then + dist_dir=obj/build/dist + fi + rm -rf "${dist_dir}/doc" + cp -r "${dist_dir}"/* "${upload_dir}" +fi + +# CPU usage statistics. +cp cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv" + +# Toolstate data. +if [[ -n "${DEPLOY_TOOLSTATES_JSON+x}" ]]; then + cp /tmp/toolstate/toolstates.json "${upload_dir}/${DEPLOY_TOOLSTATES_JSON}" +fi + +echo "Files that will be uploaded:" +ls -lah "${upload_dir}" +echo + +deploy_dir="rustc-builds" +if [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then + deploy_dir="rustc-builds-alt" +fi +deploy_url="s3://${DEPLOY_BUCKET}/${deploy_dir}/$(ciCommit)" + +retry aws s3 cp --no-progress --recursive --acl public-read "${upload_dir}" "${deploy_url}" diff --git a/src/ci/scripts/verify-line-endings.sh b/src/ci/scripts/verify-line-endings.sh new file mode 100755 index 0000000000..f3cac13ea4 --- /dev/null +++ b/src/ci/scripts/verify-line-endings.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# See also the disable for autocrlf, this just checks that it worked. +# +# We check both in rust-lang/rust and in a submodule to make sure both are +# accurate. Submodules are checked out significantly later than the main +# repository in this script, so settings can (and do!) change between then. +# +# Linux (and maybe macOS) builders don't currently have dos2unix so just only +# run this step on Windows. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + # print out the git configuration so we can better investigate failures in + # the following + git config --list --show-origin + dos2unix -ih Cargo.lock src/tools/rust-installer/install-template.sh + endings=$(dos2unix -ic Cargo.lock src/tools/rust-installer/install-template.sh) + # if endings has non-zero length, error out + if [ -n "$endings" ]; then exit 1 ; fi +fi diff --git a/src/ci/scripts/windows-symlink-build-dir.sh b/src/ci/scripts/windows-symlink-build-dir.sh new file mode 100755 index 0000000000..e57128c70f --- /dev/null +++ b/src/ci/scripts/windows-symlink-build-dir.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# We've had issues with the default drive in use running out of space during a +# build, and it looks like the `C:` drive has more space than the default `D:` +# drive. We should probably confirm this with the azure pipelines team at some +# point, but this seems to fix our "disk space full" problems. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + cmd //c "mkdir c:\\MORE_SPACE" + cmd //c "mklink /J build c:\\MORE_SPACE" +fi diff --git a/src/ci/shared.sh b/src/ci/shared.sh index b093a07ec5..718a5379ae 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -4,6 +4,8 @@ # `source shared.sh`, hence the invalid shebang and not being # marked as an executable file in git. +export MIRRORS_BASE="https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc" + # See http://unix.stackexchange.com/questions/82598 # Duplicated in docker/dist-various-2/shared.sh function retry { @@ -28,10 +30,43 @@ function isCI { [ "$CI" = "true" ] || [ "$TF_BUILD" = "True" ] } -function isOSX { +function isMacOS { [ "$AGENT_OS" = "Darwin" ] } +function isWindows { + [ "$AGENT_OS" = "Windows_NT" ] +} + +function isLinux { + [ "$AGENT_OS" = "Linux" ] +} + function getCIBranch { echo "$BUILD_SOURCEBRANCHNAME" } + +function ciCommit { + echo "${BUILD_SOURCEVERSION}" +} + +function ciCommandAddPath { + if [[ $# -ne 1 ]]; then + echo "usage: $0 " + exit 1 + fi + path="$1" + + echo "##vso[task.prependpath]${path}" +} + +function ciCommandSetEnv { + if [[ $# -ne 2 ]]; then + echo "usage: $0 " + exit 1 + fi + name="$1" + value="$2" + + echo "##vso[task.setvariable variable=${name}]${value}" +} diff --git a/src/doc/book/Cargo.lock b/src/doc/book/Cargo.lock index 928f6b1244..dbf955717c 100644 --- a/src/doc/book/Cargo.lock +++ b/src/doc/book/Cargo.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "aho-corasick" version = "0.5.3" diff --git a/src/doc/book/ci/build.sh b/src/doc/book/ci/build.sh old mode 100644 new mode 100755 index 9ad1781381..13f4509a5b --- a/src/doc/book/ci/build.sh +++ b/src/doc/book/ci/build.sh @@ -4,13 +4,6 @@ set -e export PATH=$PATH:/home/travis/.cargo/bin; -# Feature check -cd ci/stable-check - -cargo run -- ../../src - -cd ../.. - echo 'Spellchecking...' bash ci/spellcheck.sh list echo 'Testing...' @@ -19,3 +12,8 @@ echo 'Building...' mdbook build echo 'Linting for local file paths...' cargo run --bin lfp src +echo 'Validating references' +for file in src/*.md ; do + echo Checking references in $file + cargo run --quiet --bin link2print < $file > /dev/null +done diff --git a/src/doc/book/ci/stable-check/Cargo.lock b/src/doc/book/ci/stable-check/Cargo.lock deleted file mode 100644 index 9a3b307c96..0000000000 --- a/src/doc/book/ci/stable-check/Cargo.lock +++ /dev/null @@ -1,4 +0,0 @@ -[root] -name = "stable-check" -version = "0.1.0" - diff --git a/src/doc/book/ci/stable-check/Cargo.toml b/src/doc/book/ci/stable-check/Cargo.toml deleted file mode 100644 index 691722a0b2..0000000000 --- a/src/doc/book/ci/stable-check/Cargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "stable-check" -version = "0.1.0" -authors = ["steveklabnik "] - -[dependencies] diff --git a/src/doc/book/ci/stable-check/src/main.rs b/src/doc/book/ci/stable-check/src/main.rs deleted file mode 100644 index 167f1f883a..0000000000 --- a/src/doc/book/ci/stable-check/src/main.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::error::Error; -use std::env; -use std::fs; -use std::fs::File; -use std::io::prelude::*; -use std::path::Path; - -fn main() { - let arg = env::args().nth(1).unwrap_or_else(|| { - println!("Please pass a src directory as the first argument"); - std::process::exit(1); - }); - - match check_directory(&Path::new(&arg)) { - Ok(()) => println!("passed!"), - Err(e) => { - println!("Error: {}", e); - std::process::exit(1); - } - } - -} - -fn check_directory(dir: &Path) -> Result<(), Box> { - for entry in fs::read_dir(dir)? { - let entry = entry?; - let path = entry.path(); - - if path.is_dir() { - continue; - } - - let mut file = File::open(&path)?; - let mut contents = String::new(); - file.read_to_string(&mut contents)?; - - if contents.contains("#![feature") { - return Err(From::from(format!("Feature flag found in {:?}", path))); - } - } - - Ok(()) -} diff --git a/src/doc/book/rust-toolchain b/src/doc/book/rust-toolchain new file mode 100644 index 0000000000..bf50e910e6 --- /dev/null +++ b/src/doc/book/rust-toolchain @@ -0,0 +1 @@ +1.37.0 diff --git a/src/doc/book/src/appendix-04-useful-development-tools.md b/src/doc/book/src/appendix-04-useful-development-tools.md index ec40d1cbef..33929bc8d0 100644 --- a/src/doc/book/src/appendix-04-useful-development-tools.md +++ b/src/doc/book/src/appendix-04-useful-development-tools.md @@ -1,10 +1,10 @@ -# Appendix D - Useful Development Tools +## Appendix D - Useful Development Tools In this appendix, we talk about some useful development tools that the Rust project provides. We’ll look at automatic formatting, quick ways to apply warning fixes, a linter, and integrating with IDEs. -## Automatic Formatting with `rustfmt` +### Automatic Formatting with `rustfmt` The `rustfmt` tool reformats your code according to the community code style. Many collaborative projects use `rustfmt` to prevent arguments about which @@ -29,7 +29,7 @@ on `rustfmt`, see [its documentation][rustfmt]. [rustfmt]: https://github.com/rust-lang/rustfmt -## Fix Your Code with `rustfix` +### Fix Your Code with `rustfix` The rustfix tool is included with Rust installations and can automatically fix some compiler warnings. If you’ve written code in Rust, you’ve probably seen @@ -96,7 +96,7 @@ The `for` loop variable is now named `_i`, and the warning no longer appears. You can also use the `cargo fix` command to transition your code between different Rust editions. Editions are covered in Appendix E. -## More Lints with Clippy +### More Lints with Clippy The Clippy tool is a collection of lints to analyze your code so you can catch common mistakes and improve your Rust code. @@ -158,7 +158,7 @@ For more information on Clippy, see [its documentation][clippy]. [clippy]: https://github.com/rust-lang/rust-clippy -## IDE Integration Using the Rust Language Server +### IDE Integration Using the Rust Language Server To help IDE integration, the Rust project distributes the *Rust Language Server* (`rls`). This tool speaks the [Language Server diff --git a/src/doc/book/src/appendix-05-editions.md b/src/doc/book/src/appendix-05-editions.md index ac75c4181d..db98ecc5ee 100644 --- a/src/doc/book/src/appendix-05-editions.md +++ b/src/doc/book/src/appendix-05-editions.md @@ -1,4 +1,4 @@ -# Appendix E - Editions +## Appendix E - Editions In Chapter 1, you saw that `cargo new` adds a bit of metadata to your *Cargo.toml* file about an edition. This appendix talks about what that means! diff --git a/src/doc/book/src/appendix-07-nightly-rust.md b/src/doc/book/src/appendix-07-nightly-rust.md index d8fd0da776..bace82f2e0 100644 --- a/src/doc/book/src/appendix-07-nightly-rust.md +++ b/src/doc/book/src/appendix-07-nightly-rust.md @@ -1,4 +1,4 @@ -# Appendix G - How Rust is Made and “Nightly Rust” +## Appendix G - How Rust is Made and “Nightly Rust” This appendix is about how Rust is made and how that affects you as a Rust developer. diff --git a/src/doc/book/src/ch00-00-introduction.md b/src/doc/book/src/ch00-00-introduction.md index 7a37ee4ef4..86fe469f9c 100644 --- a/src/doc/book/src/ch00-00-introduction.md +++ b/src/doc/book/src/ch00-00-introduction.md @@ -104,7 +104,7 @@ chapters. In concept chapters, you’ll learn about an aspect of Rust. In projec chapters, we’ll build small programs together, applying what you’ve learned so far. Chapters 2, 12, and 20 are project chapters; the rest are concept chapters. -Chapter 1 explains how to install Rust, how to write a Hello, world! program, +Chapter 1 explains how to install Rust, how to write a “Hello, world!” program, and how to use Cargo, Rust’s package manager and build tool. Chapter 2 is a hands-on introduction to the Rust language. Here we cover concepts at a high level, and later chapters will provide additional detail. If you want to get diff --git a/src/doc/book/src/ch01-01-installation.md b/src/doc/book/src/ch01-01-installation.md index 9061389e6d..d7659ebab8 100644 --- a/src/doc/book/src/ch01-01-installation.md +++ b/src/doc/book/src/ch01-01-installation.md @@ -126,9 +126,9 @@ resources include [the Users forum][users] and [Stack Overflow][stackoverflow]. ### Local Documentation -The installer also includes a copy of the documentation locally, so you can -read it offline. Run `rustup doc` to open the local documentation in your -browser. +The installation of Rust also includes a copy of the documentation locally, so +you can read it offline. Run `rustup doc` to open the local documentation in +your browser. Any time a type or function is provided by the standard library and you’re not sure what it does or how to use it, use the application programming interface diff --git a/src/doc/book/src/ch01-02-hello-world.md b/src/doc/book/src/ch01-02-hello-world.md index e82ba49670..82a545ab63 100644 --- a/src/doc/book/src/ch01-02-hello-world.md +++ b/src/doc/book/src/ch01-02-hello-world.md @@ -20,7 +20,7 @@ we suggest making a *projects* directory in your home directory and keeping all your projects there. Open a terminal and enter the following commands to make a *projects* directory -and a directory for the Hello, world! project within the *projects* directory. +and a directory for the “Hello, world!” project within the *projects* directory. For Linux, macOS, and PowerShell on Windows, enter this: @@ -86,7 +86,7 @@ program. That makes you a Rust programmer—welcome! ### Anatomy of a Rust Program -Let’s review in detail what just happened in your Hello, world! program. +Let’s review in detail what just happened in your “Hello, world!” program. Here’s the first piece of the puzzle: ```rust @@ -178,7 +178,7 @@ From here, you run the *main* or *main.exe* file, like this: $ ./main # or .\main.exe on Windows ``` -If *main.rs* was your Hello, world! program, this line would print `Hello, +If *main.rs* was your “Hello, world!” program, this line would print `Hello, world!` to your terminal. If you’re more familiar with a dynamic language, such as Ruby, Python, or diff --git a/src/doc/book/src/ch01-03-hello-cargo.md b/src/doc/book/src/ch01-03-hello-cargo.md index 34428e5f94..b40be39da5 100644 --- a/src/doc/book/src/ch01-03-hello-cargo.md +++ b/src/doc/book/src/ch01-03-hello-cargo.md @@ -6,9 +6,9 @@ such as building your code, downloading the libraries your code depends on, and building those libraries. (We call libraries your code needs *dependencies*.) The simplest Rust programs, like the one we’ve written so far, don’t have any -dependencies. So if we had built the Hello, world! project with Cargo, it would -only use the part of Cargo that handles building your code. As you write more -complex Rust programs, you’ll add dependencies, and if you start a project +dependencies. So if we had built the “Hello, world!” project with Cargo, it +would only use the part of Cargo that handles building your code. As you write +more complex Rust programs, you’ll add dependencies, and if you start a project using Cargo, adding dependencies will be much easier to do. Because the vast majority of Rust projects use Cargo, the rest of this book @@ -29,7 +29,7 @@ determine how to install Cargo separately. ### Creating a Project with Cargo Let’s create a new project using Cargo and look at how it differs from our -original Hello, world! project. Navigate back to your *projects* directory (or +original “Hello, world!” project. Navigate back to your *projects* directory (or wherever you decided to store your code). Then, on any operating system, run the following: @@ -99,10 +99,10 @@ fn main() { } ``` -Cargo has generated a Hello, world! program for you, just like the one we wrote -in Listing 1-1! So far, the differences between our previous project and the -project Cargo generates are that Cargo placed the code in the *src* directory, -and we have a *Cargo.toml* configuration file in the top directory. +Cargo has generated a “Hello, world!” program for you, just like the one we +wrote in Listing 1-1! So far, the differences between our previous project and +the project Cargo generates are that Cargo placed the code in the *src* +directory, and we have a *Cargo.toml* configuration file in the top directory. Cargo expects your source files to live inside the *src* directory. The top-level project directory is just for README files, license information, @@ -110,14 +110,14 @@ configuration files, and anything else not related to your code. Using Cargo helps you organize your projects. There’s a place for everything, and everything is in its place. -If you started a project that doesn’t use Cargo, as we did with the Hello, -world! project, you can convert it to a project that does use Cargo. Move the +If you started a project that doesn’t use Cargo, as we did with the “Hello, +world!” project, you can convert it to a project that does use Cargo. Move the project code into the *src* directory and create an appropriate *Cargo.toml* file. ### Building and Running a Cargo Project -Now let’s look at what’s different when we build and run the Hello, world! +Now let’s look at what’s different when we build and run the “Hello, world!” program with Cargo! From your *hello_cargo* directory, build your project by entering the following command: @@ -237,7 +237,7 @@ you’ve learned how to: * Install the latest stable version of Rust using `rustup` * Update to a newer Rust version * Open locally installed documentation -* Write and run a Hello, world! program using `rustc` directly +* Write and run a “Hello, world!” program using `rustc` directly * Create and run a new project using the conventions of Cargo This is a great time to build a more substantial program to get used to reading diff --git a/src/doc/book/src/ch02-00-guessing-game-tutorial.md b/src/doc/book/src/ch02-00-guessing-game-tutorial.md index 5651b68bd3..2ecc1520ea 100644 --- a/src/doc/book/src/ch02-00-guessing-game-tutorial.md +++ b/src/doc/book/src/ch02-00-guessing-game-tutorial.md @@ -201,7 +201,7 @@ io::stdin().read_line(&mut guess) .expect("Failed to read line"); ``` -If we hadn’t listed the `use std::io` line at the beginning of the program, we +If we hadn’t put the `use std::io` line at the beginning of the program, we could have written this function call as `std::io::stdin`. The `stdin` function returns an instance of [`std::io::Stdin`][iostdin], which is a type that represents a handle to the standard input for your terminal. @@ -373,23 +373,28 @@ code that uses `rand`, we need to modify the *Cargo.toml* file to include the the bottom beneath the `[dependencies]` section header that Cargo created for you: + + Filename: Cargo.toml ```toml [dependencies] - -rand = "0.3.14" +rand = "0.5.5" ``` In the *Cargo.toml* file, everything that follows a header is part of a section that continues until another section starts. The `[dependencies]` section is where you tell Cargo which external crates your project depends on and which versions of those crates you require. In this case, we’ll specify the `rand` -crate with the semantic version specifier `0.3.14`. Cargo understands [Semantic +crate with the semantic version specifier `0.5.5`. Cargo understands [Semantic Versioning][semver] (sometimes called *SemVer*), which is a -standard for writing version numbers. The number `0.3.14` is actually shorthand -for `^0.3.14`, which means “any version that has a public API compatible with -version 0.3.14.” +standard for writing version numbers. The number `0.5.5` is actually shorthand +for `^0.5.5`, which means “any version that has a public API compatible with +version 0.5.5.” [semver]: http://semver.org @@ -398,13 +403,19 @@ Listing 2-2. ```text $ cargo build - Updating registry `https://github.com/rust-lang/crates.io-index` - Downloading rand v0.3.14 - Downloading libc v0.2.14 - Compiling libc v0.2.14 - Compiling rand v0.3.14 + Updating crates.io index + Downloaded rand v0.5.5 + Downloaded libc v0.2.62 + Downloaded rand_core v0.2.2 + Downloaded rand_core v0.3.1 + Downloaded rand_core v0.4.2 + Compiling rand_core v0.4.2 + Compiling libc v0.2.62 + Compiling rand_core v0.3.1 + Compiling rand_core v0.2.2 + Compiling rand v0.5.5 Compiling guessing_game v0.1.0 (file:///projects/guessing_game) - Finished dev [unoptimized + debuginfo] target(s) in 2.53 secs + Finished dev [unoptimized + debuginfo] target(s) in 2.53 s ``` Listing 2-2: The output from running `cargo build` after @@ -422,8 +433,8 @@ their open source Rust projects for others to use. After updating the registry, Cargo checks the `[dependencies]` section and downloads any crates you don’t have yet. In this case, although we only listed -`rand` as a dependency, Cargo also grabbed a copy of `libc`, because `rand` -depends on `libc` to work. After downloading the crates, Rust compiles them and +`rand` as a dependency, Cargo also grabbed `libc` and `rand_core`, because `rand` +depends on those to work. After downloading the crates, Rust compiles them and then compiles the project with the dependencies available. If you immediately run `cargo build` again without making any changes, you @@ -439,7 +450,7 @@ and build again, you’ll only see two lines of output: ```text $ cargo build Compiling guessing_game v0.1.0 (file:///projects/guessing_game) - Finished dev [unoptimized + debuginfo] target(s) in 2.53 secs + Finished dev [unoptimized + debuginfo] target(s) in 2.53s ``` These lines show Cargo only updates the build with your tiny change to the @@ -452,7 +463,7 @@ your part of the code. Cargo has a mechanism that ensures you can rebuild the same artifact every time you or anyone else builds your code: Cargo will use only the versions of the dependencies you specified until you indicate otherwise. For example, what -happens if next week version 0.3.15 of the `rand` crate comes out and +happens if next week version 0.5.6 of the `rand` crate comes out and contains an important bug fix but also contains a regression that will break your code? @@ -464,7 +475,7 @@ the *Cargo.lock* file. When you build your project in the future, Cargo will see that the *Cargo.lock* file exists and use the versions specified there rather than doing all the work of figuring out versions again. This lets you have a reproducible build automatically. In other words, your project will -remain at `0.3.14` until you explicitly upgrade, thanks to the *Cargo.lock* +remain at `0.5.5` until you explicitly upgrade, thanks to the *Cargo.lock* file. #### Updating a Crate to Get a New Version @@ -474,26 +485,25 @@ which will ignore the *Cargo.lock* file and figure out all the latest versions that fit your specifications in *Cargo.toml*. If that works, Cargo will write those versions to the *Cargo.lock* file. -But by default, Cargo will only look for versions greater than `0.3.0` and less -than `0.4.0`. If the `rand` crate has released two new versions, `0.3.15` and -`0.4.0`, you would see the following if you ran `cargo update`: +But by default, Cargo will only look for versions greater than `0.5.5` and less +than `0.6.0`. If the `rand` crate has released two new versions, `0.5.6` and +`0.6.0`, you would see the following if you ran `cargo update`: ```text $ cargo update - Updating registry `https://github.com/rust-lang/crates.io-index` - Updating rand v0.3.14 -> v0.3.15 + Updating crates.io index + Updating rand v0.5.5 -> v0.5.6 ``` At this point, you would also notice a change in your *Cargo.lock* file noting -that the version of the `rand` crate you are now using is `0.3.15`. +that the version of the `rand` crate you are now using is `0.5.6`. -If you wanted to use `rand` version `0.4.0` or any version in the `0.4.x` +If you wanted to use `rand` version `0.6.0` or any version in the `0.6.x` series, you’d have to update the *Cargo.toml* file to look like this instead: ```toml [dependencies] - -rand = "0.4.0" +rand = "0.6.0" ``` The next time you run `cargo build`, Cargo will update the registry of crates diff --git a/src/doc/book/src/ch03-01-variables-and-mutability.md b/src/doc/book/src/ch03-01-variables-and-mutability.md index b14bba5244..d6a73a092e 100644 --- a/src/doc/book/src/ch03-01-variables-and-mutability.md +++ b/src/doc/book/src/ch03-01-variables-and-mutability.md @@ -65,7 +65,7 @@ But mutability can be very useful. Variables are immutable only by default; as you did in Chapter 2, you can make them mutable by adding `mut` in front of the variable name. In addition to allowing this value to change, `mut` conveys intent to future readers of the code by indicating that other parts of the code -will be changing this variable's value. +will be changing this variable’s value. For example, let’s change *src/main.rs* to the following: diff --git a/src/doc/book/src/ch03-02-data-types.md b/src/doc/book/src/ch03-02-data-types.md index 228e4316b2..482e69f73f 100644 --- a/src/doc/book/src/ch03-02-data-types.md +++ b/src/doc/book/src/ch03-02-data-types.md @@ -228,9 +228,9 @@ primitive compound types: tuples and arrays. #### The Tuple Type -A tuple is a general way of grouping together some number of other values -with a variety of types into one compound type. Tuples have a fixed length: -once declared, they cannot grow or shrink in size. +A tuple is a general way of grouping together a number of values with a variety +of types into one compound type. Tuples have a fixed length: once declared, +they cannot grow or shrink in size. We create a tuple by writing a comma-separated list of values inside parentheses. Each position in the tuple has a type, and the types of the @@ -286,8 +286,8 @@ fn main() { ``` This program creates a tuple, `x`, and then makes new variables for each -element by using their index. As with most programming languages, the first -index in a tuple is 0. +element by using their respective indices. As with most programming languages, +the first index in a tuple is 0. #### The Array Type @@ -318,7 +318,7 @@ vector. Chapter 8 discusses vectors in more detail. An example of when you might want to use an array rather than a vector is in a program that needs to know the names of the months of the year. It’s very unlikely that such a program will need to add or remove months, so you can use -an array because you know it will always contain 12 items: +an array because you know it will always contain 12 elements: ```rust let months = ["January", "February", "March", "April", "May", "June", "July", @@ -334,7 +334,7 @@ let a: [i32; 5] = [1, 2, 3, 4, 5]; ``` Here, `i32` is the type of each element. After the semicolon, the number `5` -indicates the element contains five items. +indicates the array contains five elements. Writing an array’s type this way looks similar to an alternative syntax for initializing an array: if you want to create an array that contains the same diff --git a/src/doc/book/src/ch06-00-enums.md b/src/doc/book/src/ch06-00-enums.md index 767f8668da..cf7ea67f60 100644 --- a/src/doc/book/src/ch06-00-enums.md +++ b/src/doc/book/src/ch06-00-enums.md @@ -1,7 +1,7 @@ # Enums and Pattern Matching In this chapter we’ll look at *enumerations*, also referred to as *enums*. -Enums allow you to define a type by enumerating its possible values. First, +Enums allow you to define a type by enumerating its possible *variants*. First, we’ll define and use an enum to show how an enum can encode meaning along with data. Next, we’ll explore a particularly useful enum, called `Option`, which expresses that a value can be either something or nothing. Then we’ll look at diff --git a/src/doc/book/src/ch06-01-defining-an-enum.md b/src/doc/book/src/ch06-01-defining-an-enum.md index 9a56af7372..0d25afbb68 100644 --- a/src/doc/book/src/ch06-01-defining-an-enum.md +++ b/src/doc/book/src/ch06-01-defining-an-enum.md @@ -5,18 +5,18 @@ are useful and more appropriate than structs in this case. Say we need to work with IP addresses. Currently, two major standards are used for IP addresses: version four and version six. These are the only possibilities for an IP address that our program will come across: we can *enumerate* all possible -values, which is where enumeration gets its name. +variants, which is where enumeration gets its name. Any IP address can be either a version four or a version six address, but not both at the same time. That property of IP addresses makes the enum data -structure appropriate, because enum values can only be one of the variants. +structure appropriate, because enum values can only be one of its variants. Both version four and version six addresses are still fundamentally IP addresses, so they should be treated as the same type when the code is handling situations that apply to any kind of IP address. We can express this concept in code by defining an `IpAddrKind` enumeration and -listing the possible kinds an IP address can be, `V4` and `V6`. These are known -as the *variants* of the enum: +listing the possible kinds an IP address can be, `V4` and `V6`. These are the +variants of the enum: ```rust enum IpAddrKind { diff --git a/src/doc/book/src/ch07-04-bringing-paths-into-scope-with-the-use-keyword.md b/src/doc/book/src/ch07-04-bringing-paths-into-scope-with-the-use-keyword.md index ea6e51716b..05af46cbdc 100644 --- a/src/doc/book/src/ch07-04-bringing-paths-into-scope-with-the-use-keyword.md +++ b/src/doc/book/src/ch07-04-bringing-paths-into-scope-with-the-use-keyword.md @@ -241,6 +241,12 @@ In Chapter 2, we programmed a guessing game project that used an external package called `rand` to get random numbers. To use `rand` in our project, we added this line to *Cargo.toml*: + + Filename: Cargo.toml ```toml diff --git a/src/doc/book/src/ch07-05-separating-modules-into-different-files.md b/src/doc/book/src/ch07-05-separating-modules-into-different-files.md index 4a039fa83e..6b51859afd 100644 --- a/src/doc/book/src/ch07-05-separating-modules-into-different-files.md +++ b/src/doc/book/src/ch07-05-separating-modules-into-different-files.md @@ -76,7 +76,7 @@ that module. ## Summary -Rust lets you organize your packages into crates and your crates into modules +Rust lets you split a package into multiple crates and a crate into modules so you can refer to items defined in one module from another module. You can do this by specifying absolute or relative paths. These paths can be brought into scope with a `use` statement so you can use a shorter path for multiple uses of diff --git a/src/doc/book/src/ch09-02-recoverable-errors-with-result.md b/src/doc/book/src/ch09-02-recoverable-errors-with-result.md index 7de45284fe..8461e2bf03 100644 --- a/src/doc/book/src/ch09-02-recoverable-errors-with-result.md +++ b/src/doc/book/src/ch09-02-recoverable-errors-with-result.md @@ -470,13 +470,13 @@ and returns it. Of course, using `fs::read_to_string` doesn’t give us the opportunity to explain all the error handling, so we did it the longer way first. -#### The `?` Operator Can Only Be Used in Functions That Return `Result` +#### The `?` Operator Can Be Used in Functions That Return `Result` -The `?` operator can only be used in functions that have a return type of +The `?` operator can be used in functions that have a return type of `Result`, because it is defined to work in the same way as the `match` expression we defined in Listing 9-6. The part of the `match` that requires a return type of `Result` is `return Err(e)`, so the return type of the function -must be a `Result` to be compatible with this `return`. +can be a `Result` to be compatible with this `return`. Let’s look at what happens if we use the `?` operator in the `main` function, which you’ll recall has a return type of `()`: @@ -505,8 +505,9 @@ error[E0277]: the `?` operator can only be used in a function that returns ``` This error points out that we’re only allowed to use the `?` operator in a -function that returns `Result`. When you’re writing code in a function -that doesn’t return `Result`, and you want to use `?` when you call other +function that returns `Result` or `Option` or another type that implements +`std::ops::Try`. When you’re writing code in a function +that doesn’t return one of these types, and you want to use `?` when you call other functions that return `Result`, you have two choices to fix this problem. One technique is to change the return type of your function to be `Result` if you have no restrictions preventing that. The other technique is to use diff --git a/src/doc/book/src/ch10-02-traits.md b/src/doc/book/src/ch10-02-traits.md index 8fcf15df01..19e873bbac 100644 --- a/src/doc/book/src/ch10-02-traits.md +++ b/src/doc/book/src/ch10-02-traits.md @@ -611,12 +611,12 @@ reduce duplication but also specify to the compiler that we want the generic type to have particular behavior. The compiler can then use the trait bound information to check that all the concrete types used with our code provide the correct behavior. In dynamically typed languages, we would get an error at -runtime if we called a method on a type that the type didn’t implement. But -Rust moves these errors to compile time so we’re forced to fix the problems -before our code is even able to run. Additionally, we don’t have to write code -that checks for behavior at runtime because we’ve already checked at compile -time. Doing so improves performance without having to give up the flexibility -of generics. +runtime if we called a method on a type which didn’t implement the type which +defines the method. But Rust moves these errors to compile time so we’re forced +to fix the problems before our code is even able to run. Additionally, we don’t +have to write code that checks for behavior at runtime because we’ve already +checked at compile time. Doing so improves performance without having to give +up the flexibility of generics. Another kind of generic that we’ve already been using is called *lifetimes*. Rather than ensuring that a type has the behavior we want, lifetimes ensure diff --git a/src/doc/book/src/ch12-04-testing-the-librarys-functionality.md b/src/doc/book/src/ch12-04-testing-the-librarys-functionality.md index 2d1eeb04d6..e8ed858dec 100644 --- a/src/doc/book/src/ch12-04-testing-the-librarys-functionality.md +++ b/src/doc/book/src/ch12-04-testing-the-librarys-functionality.md @@ -196,7 +196,7 @@ pub fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { The `lines` method returns an iterator. We’ll talk about iterators in depth in -[Chapter 13][ch13], but recall that you saw this way of using an +[Chapter 13][ch13-iterators], but recall that you saw this way of using an iterator in [Listing 3-5][ch3-iter], where we used a `for` loop with an iterator to run some code on each item in a collection. @@ -266,7 +266,7 @@ At this point, we could consider opportunities for refactoring the implementation of the search function while keeping the tests passing to maintain the same functionality. The code in the search function isn’t too bad, but it doesn’t take advantage of some useful features of iterators. We’ll -return to this example in [Chapter 13][ch13], where we’ll +return to this example in [Chapter 13][ch13-iterators], where we’ll explore iterators in detail, and look at how to improve it. #### Using the `search` Function in the `run` Function @@ -336,3 +336,4 @@ ch10-03-lifetime-syntax.html#validating-references-with-lifetimes [ch11-anatomy]: ch11-01-writing-tests.html#the-anatomy-of-a-test-function [ch10-lifetimes]: ch10-03-lifetime-syntax.html [ch3-iter]: ch03-05-control-flow.html#looping-through-a-collection-with-for +[ch13-iterators]: ch13-02-iterators.html diff --git a/src/doc/book/src/ch13-01-closures.md b/src/doc/book/src/ch13-01-closures.md index f679c1431e..56f8ed4586 100644 --- a/src/doc/book/src/ch13-01-closures.md +++ b/src/doc/book/src/ch13-01-closures.md @@ -133,11 +133,9 @@ The first `if` block calls `simulated_expensive_calculation` twice, the `if` inside the outer `else` doesn’t call it at all, and the code inside the second `else` case calls it once. - - The desired behavior of the `generate_workout` function is to first check -whether the user wants a low-intensity workout (indicated by a number less -than 25) or a high-intensity workout (a number of 25 or greater). +whether the user wants a low-intensity workout (indicated by a number less than +25) or a high-intensity workout (a number of 25 or greater). Low-intensity workout plans will recommend a number of push-ups and sit-ups based on the complex algorithm we’re simulating. diff --git a/src/doc/book/src/ch14-03-cargo-workspaces.md b/src/doc/book/src/ch14-03-cargo-workspaces.md index 8b8c078baf..a662ac219e 100644 --- a/src/doc/book/src/ch14-03-cargo-workspaces.md +++ b/src/doc/book/src/ch14-03-cargo-workspaces.md @@ -192,12 +192,17 @@ each other. Let’s add the `rand` crate to the `[dependencies]` section in the *add-one/Cargo.toml* file to be able to use the `rand` crate in the `add-one` crate: + + Filename: add-one/Cargo.toml ```toml [dependencies] - -rand = "0.3.14" +rand = "0.5.5" ``` We can now add `use rand;` to the *add-one/src/lib.rs* file, and building the @@ -206,10 +211,10 @@ and compile the `rand` crate: ```text $ cargo build - Updating registry `https://github.com/rust-lang/crates.io-index` - Downloading rand v0.3.14 + Updating crates.io index + Downloaded rand v0.5.5 --snip-- - Compiling rand v0.3.14 + Compiling rand v0.5.5 Compiling add-one v0.1.0 (file:///projects/add/add-one) Compiling adder v0.1.0 (file:///projects/add/adder) Finished dev [unoptimized + debuginfo] target(s) in 10.18 secs diff --git a/src/doc/book/src/ch15-03-drop.md b/src/doc/book/src/ch15-03-drop.md index 800de36bfc..333e4e2c1d 100644 --- a/src/doc/book/src/ch15-03-drop.md +++ b/src/doc/book/src/ch15-03-drop.md @@ -58,7 +58,7 @@ an instance of your type goes out of scope. We’re printing some text here to demonstrate when Rust will call `drop`. In `main`, we create two instances of `CustomSmartPointer` and then print -`CustomSmartPointers created.`. At the end of `main`, our instances of +`CustomSmartPointers created`. At the end of `main`, our instances of `CustomSmartPointer` will go out of scope, and Rust will call the code we put in the `drop` method, printing our final message. Note that we didn’t need to call the `drop` method explicitly. @@ -84,7 +84,7 @@ functionality. Disabling `drop` isn’t usually necessary; the whole point of th `Drop` trait is that it’s taken care of automatically. Occasionally, however, you might want to clean up a value early. One example is when using smart pointers that manage locks: you might want to force the `drop` method that -releases the lock to run so other code in the same scope can acquire the lock. +releases the lock so that other code in the same scope can acquire the lock. Rust doesn’t let you call the `Drop` trait’s `drop` method manually; instead you have to call the `std::mem::drop` function provided by the standard library if you want to force a value to be dropped before the end of its scope. @@ -146,7 +146,7 @@ an argument. The function is in the prelude, so we can modify `main` in Listing # # impl Drop for CustomSmartPointer { # fn drop(&mut self) { -# println!("Dropping CustomSmartPointer!"); +# println!("Dropping CustomSmartPointer with data `{}`!", self.data); # } # } # diff --git a/src/doc/book/src/ch15-05-interior-mutability.md b/src/doc/book/src/ch15-05-interior-mutability.md index f43d549244..34c002b2fa 100644 --- a/src/doc/book/src/ch15-05-interior-mutability.md +++ b/src/doc/book/src/ch15-05-interior-mutability.md @@ -1,16 +1,14 @@ ## `RefCell` and the Interior Mutability Pattern - - *Interior mutability* is a design pattern in Rust that allows you to mutate data even when there are immutable references to that data; normally, this action is disallowed by the borrowing rules. To mutate data, the pattern uses `unsafe` code inside a data structure to bend Rust’s usual rules that govern -mutation and borrowing. We haven’t yet covered unsafe code; we will in -Chapter 19. We can use types that use the interior mutability pattern when we -can ensure that the borrowing rules will be followed at runtime, even though -the compiler can’t guarantee that. The `unsafe` code involved is then wrapped -in a safe API, and the outer type is still immutable. +mutation and borrowing. We haven’t yet covered unsafe code; we will in Chapter +19. We can use types that use the interior mutability pattern when we can +ensure that the borrowing rules will be followed at runtime, even though the +compiler can’t guarantee that. The `unsafe` code involved is then wrapped in a +safe API, and the outer type is still immutable. Let’s explore this concept by looking at the `RefCell` type that follows the interior mutability pattern. diff --git a/src/doc/book/src/ch16-02-message-passing.md b/src/doc/book/src/ch16-02-message-passing.md index 56181eaf79..6b5c23f87e 100644 --- a/src/doc/book/src/ch16-02-message-passing.md +++ b/src/doc/book/src/ch16-02-message-passing.md @@ -55,16 +55,14 @@ of the streams will end up in one river at the end. We’ll start with a single producer for now, but we’ll add multiple producers when we get this example working. - - The `mpsc::channel` function returns a tuple, the first element of which is the sending end and the second element is the receiving end. The abbreviations `tx` and `rx` are traditionally used in many fields for *transmitter* and *receiver* respectively, so we name our variables as such to indicate each end. We’re using a `let` statement with a pattern that destructures the tuples; we’ll -discuss the use of patterns in `let` statements and destructuring in -Chapter 18. Using a `let` statement this way is a convenient approach to -extract the pieces of the tuple returned by `mpsc::channel`. +discuss the use of patterns in `let` statements and destructuring in Chapter +18. Using a `let` statement this way is a convenient approach to extract the +pieces of the tuple returned by `mpsc::channel`. Let’s move the transmitting end into a spawned thread and have it send one string so the spawned thread is communicating with the main thread, as shown in diff --git a/src/doc/book/src/ch17-02-trait-objects.md b/src/doc/book/src/ch17-02-trait-objects.md index 6145f21c52..d7369bffa1 100644 --- a/src/doc/book/src/ch17-02-trait-objects.md +++ b/src/doc/book/src/ch17-02-trait-objects.md @@ -275,14 +275,15 @@ new type and draw it because `SelectBox` implements the `Draw` trait, which means it implements the `draw` method. This concept—of being concerned only with the messages a value responds to -rather than the value’s concrete type—is similar to the concept *duck typing* -in dynamically typed languages: if it walks like a duck and quacks like a duck, -then it must be a duck! In the implementation of `run` on `Screen` in Listing -17-5, `run` doesn’t need to know what the concrete type of each component is. -It doesn’t check whether a component is an instance of a `Button` or a -`SelectBox`, it just calls the `draw` method on the component. By specifying -`Box` as the type of the values in the `components` vector, we’ve -defined `Screen` to need values that we can call the `draw` method on. +rather than the value’s concrete type—is similar to the concept of *duck +typing* in dynamically typed languages: if it walks like a duck and quacks +like a duck, then it must be a duck! In the implementation of `run` on `Screen` +in Listing 17-5, `run` doesn’t need to know what the concrete type of each +component is. It doesn’t check whether a component is an instance of a `Button` +or a `SelectBox`, it just calls the `draw` method on the component. By +specifying `Box` as the type of the values in the `components` +vector, we’ve defined `Screen` to need values that we can call the `draw` +method on. The advantage of using trait objects and Rust’s type system to write code similar to code using duck typing is that we never have to check whether a diff --git a/src/doc/book/src/ch18-02-refutability.md b/src/doc/book/src/ch18-02-refutability.md index 55cb03812d..36fa17d48b 100644 --- a/src/doc/book/src/ch18-02-refutability.md +++ b/src/doc/book/src/ch18-02-refutability.md @@ -10,8 +10,9 @@ a_value` because if the value in the `a_value` variable is `None` rather than Function parameters, `let` statements, and `for` loops can only accept irrefutable patterns, because the program cannot do anything meaningful when -values don’t match. The `if let` and `while let` expressions only accept -refutable patterns, because by definition they’re intended to handle possible +values don’t match. The `if let` and `while let` expressions accept +refutable and irrefutable patterns, but the compiler warns against +irrefutable patterns because by definition they’re intended to handle possible failure: the functionality of a conditional is in its ability to perform differently depending on success or failure. @@ -69,9 +70,9 @@ patterns instead of `let` We’ve given the code an out! This code is perfectly valid, although it means we cannot use an irrefutable pattern without receiving an error. If we give `if let` a pattern that will always match, such as `x`, as shown in Listing 18-10, -it will not compile. +the compiler will give a warning. -```rust,ignore,does_not_compile +```rust,ignore if let x = 5 { println!("{}", x); }; @@ -84,11 +85,15 @@ Rust complains that it doesn’t make sense to use `if let` with an irrefutable pattern: ```text -error[E0162]: irrefutable if-let pattern - --> :2:8 +warning: irrefutable if-let pattern + --> :2:5 | -2 | if let x = 5 { - | ^ irrefutable pattern +2 | / if let x = 5 { +3 | | println!("{}", x); +4 | | }; + | |_^ + | + = note: #[warn(irrefutable_let_patterns)] on by default ``` For this reason, match arms must use refutable patterns, except for the last diff --git a/src/doc/book/src/ch18-03-pattern-syntax.md b/src/doc/book/src/ch18-03-pattern-syntax.md index 63eab2f7b2..31b96a88b3 100644 --- a/src/doc/book/src/ch18-03-pattern-syntax.md +++ b/src/doc/book/src/ch18-03-pattern-syntax.md @@ -711,11 +711,11 @@ fn main() { match x { Some(50) => println!("Got 50"), - Some(n) if n == y => println!("Matched, n = {:?}", n), + Some(n) if n == y => println!("Matched, n = {}", n), _ => println!("Default case, x = {:?}", x), } - println!("at the end: x = {:?}, y = {:?}", x, y); + println!("at the end: x = {:?}, y = {}", x, y); } ``` diff --git a/src/doc/book/src/ch19-01-unsafe-rust.md b/src/doc/book/src/ch19-01-unsafe-rust.md index 8c32b3663e..c7956e8627 100644 --- a/src/doc/book/src/ch19-01-unsafe-rust.md +++ b/src/doc/book/src/ch19-01-unsafe-rust.md @@ -251,7 +251,7 @@ fn split_at_mut(slice: &mut [i32], mid: usize) -> (&mut [i32], &mut [i32]) { This function first gets the total length of the slice. Then it asserts that the index given as a parameter is within the slice by checking whether it’s less than or equal to the length. The assertion means that if we pass an index -that is greater than the index to split the slice at, the function will panic +that is greater than the length to split the slice at, the function will panic before it attempts to use that index. Then we return two mutable slices in a tuple: one from the start of the diff --git a/src/doc/book/src/ch20-02-multithreaded.md b/src/doc/book/src/ch20-02-multithreaded.md index 5ca9358c70..1f80bbb208 100644 --- a/src/doc/book/src/ch20-02-multithreaded.md +++ b/src/doc/book/src/ch20-02-multithreaded.md @@ -364,7 +364,7 @@ impl ThreadPool { ``` We still use the `()` after `FnOnce` because this `FnOnce` represents a closure -that takes no parameters and doesn’t return a value. Just like function +that takes no parameters and returns the unit type `()`. Just like function definitions, the return type can be omitted from the signature, but even if we have no parameters, we still need the parentheses. diff --git a/src/doc/book/convert-quotes.sh b/src/doc/book/tools/convert-quotes.sh similarity index 100% rename from src/doc/book/convert-quotes.sh rename to src/doc/book/tools/convert-quotes.sh diff --git a/src/doc/book/doc-to-md.sh b/src/doc/book/tools/doc-to-md.sh similarity index 100% rename from src/doc/book/doc-to-md.sh rename to src/doc/book/tools/doc-to-md.sh diff --git a/src/doc/book/nostarch.sh b/src/doc/book/tools/nostarch.sh similarity index 100% rename from src/doc/book/nostarch.sh rename to src/doc/book/tools/nostarch.sh diff --git a/src/doc/grammar.md b/src/doc/grammar.md index ee9135b657..4501d74073 100644 --- a/src/doc/grammar.md +++ b/src/doc/grammar.md @@ -1,812 +1,7 @@ % Grammar -# Introduction +The Rust grammar may now be found in the [reference]. Additionally, the [grammar +working group] is working on producing a testable grammar. -This document is the primary reference for the Rust programming language grammar. It -provides only one kind of material: - - - Chapters that formally define the language grammar. - -This document does not serve as an introduction to the language. Background -familiarity with the language is assumed. A separate [guide] is available to -help acquire such background. - -This document also does not serve as a reference to the [standard] library -included in the language distribution. Those libraries are documented -separately by extracting documentation attributes from their source code. Many -of the features that one might expect to be language features are library -features in Rust, so what you're looking for may be there, not here. - -[guide]: guide.html -[standard]: std/index.html - -# Notation - -Rust's grammar is defined over Unicode codepoints, each conventionally denoted -`U+XXXX`, for 4 or more hexadecimal digits `X`. _Most_ of Rust's grammar is -confined to the ASCII range of Unicode, and is described in this document by a -dialect of Extended Backus-Naur Form (EBNF), specifically a dialect of EBNF -supported by common automated LL(k) parsing tools such as `llgen`, rather than -the dialect given in ISO 14977. The dialect can be defined self-referentially -as follows: - -```antlr -grammar : rule + ; -rule : nonterminal ':' productionrule ';' ; -productionrule : production [ '|' production ] * ; -production : term * ; -term : element repeats ; -element : LITERAL | IDENTIFIER | '[' productionrule ']' ; -repeats : [ '*' | '+' ] NUMBER ? | NUMBER ? | '?' ; -``` - -Where: - -- Whitespace in the grammar is ignored. -- Square brackets are used to group rules. -- `LITERAL` is a single printable ASCII character, or an escaped hexadecimal - ASCII code of the form `\xQQ`, in single quotes, denoting the corresponding - Unicode codepoint `U+00QQ`. -- `IDENTIFIER` is a nonempty string of ASCII letters and underscores. -- The `repeat` forms apply to the adjacent `element`, and are as follows: - - `?` means zero or one repetition - - `*` means zero or more repetitions - - `+` means one or more repetitions - - NUMBER trailing a repeat symbol gives a maximum repetition count - - NUMBER on its own gives an exact repetition count - -This EBNF dialect should hopefully be familiar to many readers. - -## Unicode productions - -A few productions in Rust's grammar permit Unicode codepoints outside the ASCII -range. We define these productions in terms of character properties specified -in the Unicode standard, rather than in terms of ASCII-range codepoints. The -section [Special Unicode Productions](#special-unicode-productions) lists these -productions. - -## String table productions - -Some rules in the grammar — notably [unary -operators](#unary-operator-expressions), [binary -operators](#binary-operator-expressions), and [keywords](#keywords) — are -given in a simplified form: as a listing of a table of unquoted, printable -whitespace-separated strings. These cases form a subset of the rules regarding -the [token](#tokens) rule, and are assumed to be the result of a -lexical-analysis phase feeding the parser, driven by a DFA, operating over the -disjunction of all such string table entries. - -When such a string enclosed in double-quotes (`"`) occurs inside the grammar, -it is an implicit reference to a single member of such a string table -production. See [tokens](#tokens) for more information. - -# Lexical structure - -## Input format - -Rust input is interpreted as a sequence of Unicode codepoints encoded in UTF-8. -Most Rust grammar rules are defined in terms of printable ASCII-range -codepoints, but a small number are defined in terms of Unicode properties or -explicit codepoint lists. [^inputformat] - -[^inputformat]: Substitute definitions for the special Unicode productions are - provided to the grammar verifier, restricted to ASCII range, when verifying the - grammar in this document. - -## Special Unicode Productions - -The following productions in the Rust grammar are defined in terms of Unicode -properties: `ident`, `non_null`, `non_eol`, `non_single_quote` and -`non_double_quote`. - -### Identifiers - -The `ident` production is any nonempty Unicode string of -the following form: - -- The first character is in one of the following ranges `U+0041` to `U+005A` -("A" to "Z"), `U+0061` to `U+007A` ("a" to "z"), or `U+005F` ("\_"). -- The remaining characters are in the range `U+0030` to `U+0039` ("0" to "9"), -or any of the prior valid initial characters. - -as long as the identifier does _not_ occur in the set of [keywords](#keywords). - -### Delimiter-restricted productions - -Some productions are defined by exclusion of particular Unicode characters: - -- `non_null` is any single Unicode character aside from `U+0000` (null) -- `non_eol` is any single Unicode character aside from `U+000A` (`'\n'`) -- `non_single_quote` is any single Unicode character aside from `U+0027` (`'`) -- `non_double_quote` is any single Unicode character aside from `U+0022` (`"`) - -## Comments - -```antlr -comment : block_comment | line_comment ; -block_comment : "/*" block_comment_body * "*/" ; -block_comment_body : [block_comment | character] * ; -line_comment : "//" non_eol * ; -``` - -**FIXME:** add doc grammar? - -## Whitespace - -```antlr -whitespace_char : '\x20' | '\x09' | '\x0a' | '\x0d' ; -whitespace : [ whitespace_char | comment ] + ; -``` - -## Tokens - -```antlr -simple_token : keyword | unop | binop ; -token : simple_token | ident | literal | symbol | whitespace token ; -``` - -### Keywords - -

- -| | | | | | -|----------|----------|----------|----------|----------| -| _ | abstract | alignof | as | become | -| box | break | const | continue | crate | -| do | else | enum | extern | false | -| final | fn | for | if | impl | -| in | let | loop | macro | match | -| mod | move | mut | offsetof | override | -| priv | proc | pub | pure | ref | -| return | Self | self | sizeof | static | -| struct | super | trait | true | type | -| typeof | unsafe | unsized | use | virtual | -| where | while | yield | | | - - -Each of these keywords has special meaning in its grammar, and all of them are -excluded from the `ident` rule. - -Not all of these keywords are used by the language. Some of them were used -before Rust 1.0, and were left reserved once their implementations were -removed. Some of them were reserved before 1.0 to make space for possible -future features. - -### Literals - -```antlr -lit_suffix : ident; -literal : [ string_lit | char_lit | byte_string_lit | byte_lit | num_lit | bool_lit ] lit_suffix ?; -``` - -The optional `lit_suffix` production is only used for certain numeric literals, -but is reserved for future extension. That is, the above gives the lexical -grammar, but a Rust parser will reject everything but the 12 special cases -mentioned in [Number literals](reference/tokens.html#number-literals) in the -reference. - -#### Character and string literals - -```antlr -char_lit : '\x27' char_body '\x27' ; -string_lit : '"' string_body * '"' | 'r' raw_string ; - -char_body : non_single_quote - | '\x5c' [ '\x27' | common_escape | unicode_escape ] ; - -string_body : non_double_quote - | '\x5c' [ '\x22' | common_escape | unicode_escape ] ; -raw_string : '"' raw_string_body '"' | '#' raw_string '#' ; - -common_escape : '\x5c' - | 'n' | 'r' | 't' | '0' - | 'x' hex_digit 2 -unicode_escape : 'u' '{' hex_digit+ 6 '}'; - -hex_digit : 'a' | 'b' | 'c' | 'd' | 'e' | 'f' - | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' - | dec_digit ; -oct_digit : '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' ; -dec_digit : '0' | nonzero_dec ; -nonzero_dec: '1' | '2' | '3' | '4' - | '5' | '6' | '7' | '8' | '9' ; -``` - -#### Byte and byte string literals - -```antlr -byte_lit : "b\x27" byte_body '\x27' ; -byte_string_lit : "b\x22" string_body * '\x22' | "br" raw_byte_string ; - -byte_body : ascii_non_single_quote - | '\x5c' [ '\x27' | common_escape ] ; - -byte_string_body : ascii_non_double_quote - | '\x5c' [ '\x22' | common_escape ] ; -raw_byte_string : '"' raw_byte_string_body '"' | '#' raw_byte_string '#' ; - -``` - -#### Number literals - -```antlr -num_lit : nonzero_dec [ dec_digit | '_' ] * float_suffix ? - | '0' [ [ dec_digit | '_' ] * float_suffix ? - | 'b' [ '1' | '0' | '_' ] + - | 'o' [ oct_digit | '_' ] + - | 'x' [ hex_digit | '_' ] + ] ; - -float_suffix : [ exponent | '.' dec_lit exponent ? ] ? ; - -exponent : ['E' | 'e'] ['-' | '+' ] ? dec_lit ; -dec_lit : [ dec_digit | '_' ] + ; -``` - -#### Boolean literals - -```antlr -bool_lit : [ "true" | "false" ] ; -``` - -The two values of the boolean type are written `true` and `false`. - -### Symbols - -```antlr -symbol : "::" | "->" - | '#' | '[' | ']' | '(' | ')' | '{' | '}' - | ',' | ';' ; -``` - -Symbols are a general class of printable [tokens](#tokens) that play structural -roles in a variety of grammar productions. They are cataloged here for -completeness as the set of remaining miscellaneous printable tokens that do not -otherwise appear as [unary operators](#unary-operator-expressions), [binary -operators](#binary-operator-expressions), or [keywords](#keywords). - -## Paths - -```antlr -expr_path : [ "::" ] ident [ "::" expr_path_tail ] + ; -expr_path_tail : '<' type_expr [ ',' type_expr ] + '>' - | expr_path ; - -type_path : ident [ type_path_tail ] + ; -type_path_tail : '<' type_expr [ ',' type_expr ] + '>' - | "::" type_path ; -``` - -# Syntax extensions - -## Macros - -```antlr -expr_macro_rules : "macro_rules" '!' ident '(' macro_rule * ')' ';' - | "macro_rules" '!' ident '{' macro_rule * '}' ; -macro_rule : '(' matcher * ')' "=>" '(' transcriber * ')' ';' ; -matcher : '(' matcher * ')' | '[' matcher * ']' - | '{' matcher * '}' | '$' ident ':' ident - | '$' '(' matcher * ')' sep_token? [ '*' | '+' ] - | non_special_token ; -transcriber : '(' transcriber * ')' | '[' transcriber * ']' - | '{' transcriber * '}' | '$' ident - | '$' '(' transcriber * ')' sep_token? [ '*' | '+' ] - | non_special_token ; -``` - -# Crates and source files - -**FIXME:** grammar? What production covers #![crate_id = "foo"] ? - -# Items and attributes - -**FIXME:** grammar? - -## Items - -```antlr -item : vis ? mod_item | fn_item | type_item | struct_item | enum_item - | const_item | static_item | trait_item | impl_item | extern_block_item ; -``` - -### Type Parameters - -**FIXME:** grammar? - -### Modules - -```antlr -mod_item : "mod" ident ( ';' | '{' mod '}' ); -mod : [ view_item | item ] * ; -``` - -#### View items - -```antlr -view_item : extern_crate_decl | use_decl ';' ; -``` - -##### Extern crate declarations - -```antlr -extern_crate_decl : "extern" "crate" crate_name -crate_name: ident | ( ident "as" ident ) -``` - -##### Use declarations - -```antlr -use_decl : vis ? "use" [ path "as" ident - | path_glob ] ; - -path_glob : ident [ "::" [ path_glob - | '*' ] ] ? - | '{' path_item [ ',' path_item ] * '}' ; - -path_item : ident | "self" ; -``` - -### Functions - -**FIXME:** grammar? - -#### Generic functions - -**FIXME:** grammar? - -#### Unsafety - -**FIXME:** grammar? - -##### Unsafe functions - -**FIXME:** grammar? - -##### Unsafe blocks - -**FIXME:** grammar? - -#### Diverging functions - -**FIXME:** grammar? - -### Type definitions - -**FIXME:** grammar? - -### Structures - -**FIXME:** grammar? - -### Enumerations - -**FIXME:** grammar? - -### Constant items - -```antlr -const_item : "const" ident ':' type '=' expr ';' ; -``` - -### Static items - -```antlr -static_item : "static" ident ':' type '=' expr ';' ; -``` - -#### Mutable statics - -**FIXME:** grammar? - -### Traits - -**FIXME:** grammar? - -### Implementations - -**FIXME:** grammar? - -### External blocks - -```antlr -extern_block_item : "extern" '{' extern_block '}' ; -extern_block : [ foreign_fn ] * ; -``` - -## Visibility and Privacy - -```antlr -vis : "pub" ; -``` -### Re-exporting and Visibility - -See [Use declarations](#use-declarations). - -## Attributes - -```antlr -attribute : '#' '!' ? '[' meta_item ']' ; -meta_item : ident [ '=' literal - | '(' meta_seq ')' ] ? ; -meta_seq : meta_item [ ',' meta_seq ] ? ; -``` - -# Statements and expressions - -## Statements - -```antlr -stmt : decl_stmt | expr_stmt | ';' ; -``` - -### Declaration statements - -```antlr -decl_stmt : item | let_decl ; -``` - -#### Item declarations - -See [Items](#items). - -#### Variable declarations - -```antlr -let_decl : "let" pat [':' type ] ? [ init ] ? ';' ; -init : [ '=' ] expr ; -``` - -### Expression statements - -```antlr -expr_stmt : expr ';' ; -``` - -## Expressions - -```antlr -expr : literal | path | tuple_expr | unit_expr | struct_expr - | block_expr | method_call_expr | field_expr | array_expr - | idx_expr | range_expr | unop_expr | binop_expr - | paren_expr | call_expr | lambda_expr | while_expr - | loop_expr | break_expr | continue_expr | for_expr - | if_expr | match_expr | if_let_expr | while_let_expr - | return_expr ; -``` - -#### Lvalues, rvalues and temporaries - -**FIXME:** grammar? - -#### Moved and copied types - -**FIXME:** Do we want to capture this in the grammar as different productions? - -### Literal expressions - -See [Literals](#literals). - -### Path expressions - -See [Paths](#paths). - -### Tuple expressions - -```antlr -tuple_expr : '(' [ expr [ ',' expr ] * | expr ',' ] ? ')' ; -``` - -### Unit expressions - -```antlr -unit_expr : "()" ; -``` - -### Structure expressions - -```antlr -struct_expr_field_init : ident | ident ':' expr ; -struct_expr : expr_path '{' struct_expr_field_init - [ ',' struct_expr_field_init ] * - [ ".." expr ] '}' | - expr_path '(' expr - [ ',' expr ] * ')' | - expr_path ; -``` - -### Block expressions - -```antlr -block_expr : '{' [ stmt | item ] * - [ expr ] '}' ; -``` - -### Method-call expressions - -```antlr -method_call_expr : expr '.' ident paren_expr_list ; -``` - -### Field expressions - -```antlr -field_expr : expr '.' ident ; -``` - -### Array expressions - -```antlr -array_expr : '[' "mut" ? array_elems? ']' ; - -array_elems : [expr [',' expr]*] | [expr ';' expr] ; -``` - -### Index expressions - -```antlr -idx_expr : expr '[' expr ']' ; -``` - -### Range expressions - -```antlr -range_expr : expr ".." expr | - expr ".." | - ".." expr | - ".." ; -``` - -### Unary operator expressions - -```antlr -unop_expr : unop expr ; -unop : '-' | '*' | '!' ; -``` - -### Binary operator expressions - -```antlr -binop_expr : expr binop expr | type_cast_expr - | assignment_expr | compound_assignment_expr ; -binop : arith_op | bitwise_op | lazy_bool_op | comp_op -``` - -#### Arithmetic operators - -```antlr -arith_op : '+' | '-' | '*' | '/' | '%' ; -``` - -#### Bitwise operators - -```antlr -bitwise_op : '&' | '|' | '^' | "<<" | ">>" ; -``` - -#### Lazy boolean operators - -```antlr -lazy_bool_op : "&&" | "||" ; -``` - -#### Comparison operators - -```antlr -comp_op : "==" | "!=" | '<' | '>' | "<=" | ">=" ; -``` - -#### Type cast expressions - -```antlr -type_cast_expr : value "as" type ; -``` - -#### Assignment expressions - -```antlr -assignment_expr : expr '=' expr ; -``` - -#### Compound assignment expressions - -```antlr -compound_assignment_expr : expr [ arith_op | bitwise_op ] '=' expr ; -``` - -### Grouped expressions - -```antlr -paren_expr : '(' expr ')' ; -``` - -### Call expressions - -```antlr -expr_list : [ expr [ ',' expr ]* ] ? ; -paren_expr_list : '(' expr_list ')' ; -call_expr : expr paren_expr_list ; -``` - -### Lambda expressions - -```antlr -ident_list : [ ident [ ',' ident ]* ] ? ; -lambda_expr : '|' ident_list '|' expr ; -``` - -### While loops - -```antlr -while_expr : [ lifetime ':' ] ? "while" no_struct_literal_expr '{' block '}' ; -``` - -### Infinite loops - -```antlr -loop_expr : [ lifetime ':' ] ? "loop" '{' block '}'; -``` - -### Break expressions - -```antlr -break_expr : "break" [ lifetime ] ?; -``` - -### Continue expressions - -```antlr -continue_expr : "continue" [ lifetime ] ?; -``` - -### For expressions - -```antlr -for_expr : [ lifetime ':' ] ? "for" pat "in" no_struct_literal_expr '{' block '}' ; -``` - -### If expressions - -```antlr -if_expr : "if" no_struct_literal_expr '{' block '}' - else_tail ? ; - -else_tail : "else" [ if_expr | if_let_expr - | '{' block '}' ] ; -``` - -### Match expressions - -```antlr -match_expr : "match" no_struct_literal_expr '{' match_arm * '}' ; - -match_arm : attribute * match_pat "=>" [ expr "," | '{' block '}' ] ; - -match_pat : pat [ '|' pat ] * [ "if" expr ] ? ; -``` - -### If let expressions - -```antlr -if_let_expr : "if" "let" pat '=' expr '{' block '}' - else_tail ? ; -``` - -### While let loops - -```antlr -while_let_expr : [ lifetime ':' ] ? "while" "let" pat '=' expr '{' block '}' ; -``` - -### Return expressions - -```antlr -return_expr : "return" expr ? ; -``` - -# Type system - -**FIXME:** is this entire chapter relevant here? Or should it all have been covered by some production already? - -## Types - -### Primitive types - -**FIXME:** grammar? - -#### Machine types - -**FIXME:** grammar? - -#### Machine-dependent integer types - -**FIXME:** grammar? - -### Textual types - -**FIXME:** grammar? - -### Tuple types - -**FIXME:** grammar? - -### Array, and Slice types - -**FIXME:** grammar? - -### Structure types - -**FIXME:** grammar? - -### Enumerated types - -**FIXME:** grammar? - -### Pointer types - -**FIXME:** grammar? - -### Function types - -**FIXME:** grammar? - -### Closure types - -```antlr -closure_type := [ 'unsafe' ] [ '<' lifetime-list '>' ] '|' arg-list '|' - [ ':' bound-list ] [ '->' type ] -lifetime-list := lifetime | lifetime ',' lifetime-list -arg-list := ident ':' type | ident ':' type ',' arg-list -``` - -### Never type -An empty type - -```antlr -never_type : "!" ; -``` - -### Object types - -**FIXME:** grammar? - -### Type parameters - -**FIXME:** grammar? - -### Type parameter bounds - -```antlr -bound-list := bound | bound '+' bound-list '+' ? -bound := ty_bound | lt_bound -lt_bound := lifetime -ty_bound := ty_bound_noparen | (ty_bound_noparen) -ty_bound_noparen := [?] [ for ] simple_path -``` - -### Self types - -**FIXME:** grammar? - -## Type kinds - -**FIXME:** this is probably not relevant to the grammar... - -# Memory and concurrency models - -**FIXME:** is this entire chapter relevant here? Or should it all have been covered by some production already? - -## Memory model - -### Memory allocation and lifetime - -### Memory ownership - -### Variables - -### Boxes - -## Threads - -### Communication between threads - -### Thread lifecycle +[reference]: https://doc.rust-lang.org/reference/ +[grammar working group]: https://github.com/rust-lang/wg-grammar diff --git a/src/doc/nomicon/src/atomics.md b/src/doc/nomicon/src/atomics.md index 4cd209a9d0..f750f096c5 100644 --- a/src/doc/nomicon/src/atomics.md +++ b/src/doc/nomicon/src/atomics.md @@ -1,20 +1,23 @@ # Atomics -Rust pretty blatantly just inherits C11's memory model for atomics. This is not +Rust pretty blatantly just inherits the memory model for atomics from C++20. This is not due to this model being particularly excellent or easy to understand. Indeed, this model is quite complex and known to have [several flaws][C11-busted]. Rather, it is a pragmatic concession to the fact that *everyone* is pretty bad at modeling atomics. At very least, we can benefit from existing tooling and -research around C. +research around the C/C++ memory model. +(You'll often see this model referred to as "C/C++11" or just "C11". C just copies +the C++ memory model; and C++11 was the first version of the model but it has +received some bugfixes since then.) Trying to fully explain the model in this book is fairly hopeless. It's defined in terms of madness-inducing causality graphs that require a full book to properly understand in a practical way. If you want all the nitty-gritty -details, you should check out [C's specification (Section 7.17)][C11-model]. +details, you should check out the [C++20 draft specification (Section 31)][C++-model]. Still, we'll try to cover the basics and some of the problems Rust developers face. -The C11 memory model is fundamentally about trying to bridge the gap between the +The C++ memory model is fundamentally about trying to bridge the gap between the semantics we want, the optimizations compilers want, and the inconsistent chaos our hardware wants. *We* would like to just write programs and have them do exactly what we said but, you know, fast. Wouldn't that be great? @@ -113,7 +116,7 @@ programming: # Data Accesses -The C11 memory model attempts to bridge the gap by allowing us to talk about the +The C++ memory model attempts to bridge the gap by allowing us to talk about the *causality* of our program. Generally, this is by establishing a *happens before* relationship between parts of the program and the threads that are running them. This gives the hardware and compiler room to optimize the program @@ -148,7 +151,7 @@ propagated to other threads. The set of orderings Rust exposes are: * Acquire * Relaxed -(Note: We explicitly do not expose the C11 *consume* ordering) +(Note: We explicitly do not expose the C++ *consume* ordering) TODO: negative reasoning vs positive reasoning? TODO: "can't forget to synchronize" @@ -252,4 +255,4 @@ relaxed operations can be cheaper on weakly-ordered platforms. [C11-busted]: http://plv.mpi-sws.org/c11comp/popl15.pdf -[C11-model]: http://www.open-std.org/jtc1/sc22/wg14/www/standards.html#9899 +[C++-model]: http://eel.is/c++draft/atomics.order diff --git a/src/doc/nomicon/src/working-with-unsafe.md b/src/doc/nomicon/src/working-with-unsafe.md index 1864dd10ca..c29a75a531 100644 --- a/src/doc/nomicon/src/working-with-unsafe.md +++ b/src/doc/nomicon/src/working-with-unsafe.md @@ -16,10 +16,14 @@ fn index(idx: usize, arr: &[u8]) -> Option { } ``` -This function is safe and correct. We check that the index is in bounds, and if it -is, index into the array in an unchecked manner. But even in such a trivial -function, the scope of the unsafe block is questionable. Consider changing the -`<` to a `<=`: +This function is safe and correct. We check that the index is in bounds, and if +it is, index into the array in an unchecked manner. We say that such a correct +unsafely implemented function is *sound*, meaning that safe code cannot cause +Undefined Behavior through it (which, remember, is the single fundamental +property of Safe Rust). + +But even in such a trivial function, the scope of the unsafe block is +questionable. Consider changing the `<` to a `<=`: ```rust fn index(idx: usize, arr: &[u8]) -> Option { @@ -33,10 +37,10 @@ fn index(idx: usize, arr: &[u8]) -> Option { } ``` -This program is now unsound, and yet *we only modified safe code*. This is the -fundamental problem of safety: it's non-local. The soundness of our unsafe -operations necessarily depends on the state established by otherwise -"safe" operations. +This program is now *unsound*, Safe Rust can cause Undefined Behavior, and yet +*we only modified safe code*. This is the fundamental problem of safety: it's +non-local. The soundness of our unsafe operations necessarily depends on the +state established by otherwise "safe" operations. Safety is modular in the sense that opting into unsafety doesn't require you to consider arbitrary other kinds of badness. For instance, doing an unchecked diff --git a/src/doc/reference/.travis.yml b/src/doc/reference/.travis.yml index 3d8979ffe0..73304507e5 100644 --- a/src/doc/reference/.travis.yml +++ b/src/doc/reference/.travis.yml @@ -1,7 +1,8 @@ -language: rust +language: shell -rust: - - nightly +before_install: + - curl -sSL https://sh.rustup.rs | sh -s -- -y --default-toolchain=nightly --profile=minimal -c rust-docs + - export PATH="$HOME/.cargo/bin:$PATH" install: - travis_retry curl -Lf https://github.com/rust-lang-nursery/mdBook/releases/download/v0.3.1/mdbook-v0.3.1-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=$HOME/.cargo/bin diff --git a/src/doc/reference/src/SUMMARY.md b/src/doc/reference/src/SUMMARY.md index b1ba241375..fb0e6574d8 100644 --- a/src/doc/reference/src/SUMMARY.md +++ b/src/doc/reference/src/SUMMARY.md @@ -45,6 +45,7 @@ - [Diagnostics](attributes/diagnostics.md) - [Code generation](attributes/codegen.md) - [Limits](attributes/limits.md) + - [Type System](attributes/type_system.md) - [Statements and expressions](statements-and-expressions.md) - [Statements](statements.md) diff --git a/src/doc/reference/src/attributes.md b/src/doc/reference/src/attributes.md index 059d4dedf5..dfffe5ca01 100644 --- a/src/doc/reference/src/attributes.md +++ b/src/doc/reference/src/attributes.md @@ -50,6 +50,9 @@ Attributes may be applied to many things in the language: * [Generic lifetime or type parameter][generics] accept outer attributes. * Expressions accept outer attributes in limited situations, see [Expression Attributes] for details. +* [Function][functions], [closure]] and [function pointer] + parameters accept outer attributes. This includes attributes on variadic parameters + denoted with `...` in function pointers and [external blocks][variadic functions]. Some examples of attributes: @@ -86,8 +89,7 @@ fn some_unused_variables() { ## Meta Item Attribute Syntax A "meta item" is the syntax used for the _Attr_ rule by most [built-in -attributes] and the [`meta` macro fragment specifier]. It has the following -grammar: +attributes]. It has the following grammar: > **Syntax**\ > _MetaItem_ :\ @@ -236,6 +238,9 @@ The following is an index of all built-in attributes. - Features - `feature` — Used to enable unstable or experimental compiler features. See [The Unstable Book] for features implemented in `rustc`. +- Type System + - [`non_exhaustive`] — Indicate that a type will have more fields/variants + added in future. [Doc comments]: comments.md#doc-comments [ECMA-334]: https://www.ecma-international.org/publications/standards/Ecma-334.htm @@ -268,7 +273,6 @@ The following is an index of all built-in attributes. [`link`]: items/external-blocks.md#the-link-attribute [`macro_export`]: macros-by-example.md#path-based-scope [`macro_use`]: macros-by-example.md#the-macro_use-attribute -[`meta` macro fragment specifier]: macros-by-example.md [`must_use`]: attributes/diagnostics.md#the-must_use-attribute [`no_builtins`]: attributes/codegen.md#the-no_builtins-attribute [`no_implicit_prelude`]: items/modules.md#prelude-items @@ -276,6 +280,7 @@ The following is an index of all built-in attributes. [`no_main`]: crates-and-source-files.md#the-no_main-attribute [`no_mangle`]: abi.md#the-no_mangle-attribute [`no_std`]: crates-and-source-files.md#preludes-and-no_std +[`non_exhaustive`]: attributes/type_system.md#the-non_exhaustive-attribute [`panic_handler`]: runtime.md#the-panic_handler-attribute [`path`]: items/modules.md#the-path-attribute [`proc_macro_attribute`]: procedural-macros.md#attribute-macros @@ -306,3 +311,6 @@ The following is an index of all built-in attributes. [statements]: statements.md [struct]: items/structs.md [union]: items/unions.md +[closure]: expressions/closure-expr.md +[function pointer]: types/function-pointer.md +[variadic functions]: items/external-blocks.html#variadic-functions diff --git a/src/doc/reference/src/attributes/type_system.md b/src/doc/reference/src/attributes/type_system.md new file mode 100644 index 0000000000..52bbb68f18 --- /dev/null +++ b/src/doc/reference/src/attributes/type_system.md @@ -0,0 +1,140 @@ +# Type system attributes + +The following [attributes] are used for changing how a type can be used. + +## The `non_exhaustive` attribute + +The *`non_exhaustive` attribute* indicates that a type or variant may have +more fields or variants added in the future. It can be applied to +[`struct`s][struct], [`enum`s][enum], and `enum` variants. + +The `non_exhaustive` attribute uses the [_MetaWord_] syntax and thus does not +take any inputs. + +Within the defining crate, `non_exhaustive` has no effect. + +```rust +#[non_exhaustive] +pub struct Config { + pub window_width: u16, + pub window_height: u16, +} + +#[non_exhaustive] +pub enum Error { + Message(String), + Other, +} + +pub enum Message { + #[non_exhaustive] Send { from: u32, to: u32, contents: String }, + #[non_exhaustive] Reaction(u32), + #[non_exhaustive] Quit, +} + +// Non-exhaustive structs can be constructed as normal within the defining crate. +let config = Config { window_width: 640, window_height: 480 }; + +// Non-exhaustive structs can be matched on exhaustively within the defining crate. +if let Config { window_width, window_height } = config { + // ... +} + +let error = Error::Other; +let message = Message::Reaction(3); + +// Non-exhaustive enums can be matched on exhaustively within the defining crate. +match error { + Error::Message(ref s) => { }, + Error::Other => { }, +} + +match message { + // Non-exhaustive variants can be matched on exhaustively within the defining crate. + Message::Send { from, to, contents } => { }, + Message::Reaction(id) => { }, + Message::Quit => { }, +} +``` + +Outside of the defining crate, types annotated with `non_exhaustive` have limitations that +preserve backwards compatibility when new fields or variants are added. + +Non-exhaustive types cannot be constructed outside of the defining crate: + +- Non-exhaustive variants ([`struct`][struct] or [`enum` variant][enum]) cannot be constructed + with a [_StructExpression_] \(including with [functional update syntax]). +- [`enum`][enum] instances can be constructed in an [_EnumerationVariantExpression_]. + +```rust,ignore (requires multiple crates) +// `Config`, `Error`, and `Message` are types defined in an upstream crate that have been +// annotated as `#[non_exhaustive]`. +use upstream::{Config, Error, Message}; + +// Cannot construct an instance of `Config`, if new fields were added in +// a new version of `upstream` then this would fail to compile, so it is +// disallowed. +let config = Config { window_width: 640, window_height: 480 }; + +// Can construct an instance of `Error`, new variants being introduced would +// not result in this failing to compile. +let error = Error::Message("foo".to_string()); + +// Cannot construct an instance of `Message::Send` or `Message::Reaction`, +// if new fields were added in a new version of `upstream` then this would +// fail to compile, so it is disallowed. +let message = Message::Send { from: 0, to: 1, contents: "foo".to_string(), }; +let message = Message::Reaction(0); + +// Cannot construct an instance of `Message::Quit`, if this were converted to +// a tuple-variant `upstream` then this would fail to compile. +let message = Message::Quit; +``` + +There are limitations when matching on non-exhaustive types outside of the defining crate: + +- When pattern matching on a non-exhaustive variant ([`struct`][struct] or [`enum` variant][enum]), + a [_StructPattern_] must be used which must include a `..`. Tuple variant constructor visibility + is lowered to `min($vis, pub(crate))`. +- When pattern matching on a non-exhaustive [`enum`][enum], matching on a variant does not + contribute towards the exhaustiveness of the arms. + +```rust, ignore (requires multiple crates) +// `Config`, `Error`, and `Message` are types defined in an upstream crate that have been +// annotated as `#[non_exhaustive]`. +use upstream::{Config, Error, Message}; + +// Cannot match on a non-exhaustive enum without including a wildcard arm. +match error { + Error::Message(ref s) => { }, + Error::Other => { }, + // would compile with: `_ => {},` +} + +// Cannot match on a non-exhaustive struct without a wildcard. +if let Ok(Config { window_width, window_height }) = config { + // would compile with: `..` +} + +match message { + // Cannot match on a non-exhaustive struct enum variant without including a wildcard. + Message::Send { from, to, contents } => { }, + // Cannot match on a non-exhaustive tuple or unit enum variant. + Message::Reaction(type) => { }, + Message::Quit => { }, +} +``` + +Non-exhaustive types are always considered inhabited in downstream crates. + +[_EnumerationVariantExpression_]: ../expressions/enum-variant-expr.md +[_MetaWord_]: ../attributes.md#meta-item-attribute-syntax +[_StructExpression_]: ../expressions/struct-expr.md +[_StructPattern_]: ../patterns.md#struct-patterns +[_TupleStructPattern_]: ../patterns.md#tuple-struct-patterns +[`if let`]: ../expressions/if-expr.md#if-let-expressions +[`match`]: ../expressions/match-expr.md +[attributes]: ../attributes.md +[enum]: ../items/enumerations.md +[functional update syntax]: ../expressions/struct-expr.md#functional-update-syntax +[struct]: ../items/structs.md diff --git a/src/doc/reference/src/expressions/await-expr.md b/src/doc/reference/src/expressions/await-expr.md index 95037d73d3..a6e7dc5831 100644 --- a/src/doc/reference/src/expressions/await-expr.md +++ b/src/doc/reference/src/expressions/await-expr.md @@ -52,12 +52,13 @@ Effectively, an `.await` expression is roughly equivalent to the following (this desugaring is not normative): ```rust,ignore -let future = /* */; -loop { - let mut pin = unsafe { Pin::new_unchecked(&mut future) }; - match Pin::future::poll(Pin::borrow(&mut pin), &mut current_context) { - Poll::Ready(r) => break r, - Poll::Pending => yield Poll::Pending, +match /* */ { + mut pinned => loop { + let mut pin = unsafe { Pin::new_unchecked(&mut pinned) }; + match Pin::future::poll(Pin::borrow(&mut pin), &mut current_context) { + Poll::Ready(r) => break r, + Poll::Pending => yield Poll::Pending, + } } } ``` diff --git a/src/doc/reference/src/expressions/closure-expr.md b/src/doc/reference/src/expressions/closure-expr.md index aa9299bd6d..74b8e203c6 100644 --- a/src/doc/reference/src/expressions/closure-expr.md +++ b/src/doc/reference/src/expressions/closure-expr.md @@ -10,7 +10,7 @@ >    _ClosureParam_ (`,` _ClosureParam_)\* `,`? > > _ClosureParam_ :\ ->    [_Pattern_] ( `:` [_Type_] )? +>    [_OuterAttribute_]\* [_Pattern_] ( `:` [_Type_] )? A _closure expression_ defines a closure and denotes it as a value, in a single expression. A closure expression is a pipe-symbol-delimited (`|`) list of @@ -67,9 +67,15 @@ let word = "konnichiwa".to_owned(); ten_times(move |j| println!("{}, {}", word, j)); ``` +## Attributes on closure parameters + +Attributes on closure parameters follow the same rules and restrictions as +[regular function parameters]. + [block]: block-expr.md [function definitions]: ../items/functions.md [patterns]: ../patterns.md +[regular function parameters]: ../items/functions.md#attributes-on-function-parameters [_Expression_]: ../expressions.md [_BlockExpression_]: block-expr.md @@ -77,3 +83,4 @@ ten_times(move |j| println!("{}, {}", word, j)); [_Pattern_]: ../patterns.md [_Type_]: ../types.md#type-expressions [`let` binding]: ../statements.md#let-statements +[_OuterAttribute_]: ../attributes.md \ No newline at end of file diff --git a/src/doc/reference/src/glossary.md b/src/doc/reference/src/glossary.md index 6dc22b860f..36c0c9314a 100644 --- a/src/doc/reference/src/glossary.md +++ b/src/doc/reference/src/glossary.md @@ -65,6 +65,12 @@ For example, `2 + (3 * 4)` is an expression that returns the value 14. An [item] that is not a member of an [implementation], such as a *free function* or a *free const*. Contrast to an [associated item]. +### Inhabited + +A type is inhabited if it has constructors and therefore can be instantiated. An inhabited type is +not "empty" in the sense that there can be values of the type. Opposite of +[Uninhabited](#uninhabited). + ### Inherent implementation An [implementation] that applies to a nominal type, not to a trait-type pair. @@ -159,6 +165,13 @@ but is not limited to: process termination or corruption; improper, incorrect, or unintended computation; or platform-specific results. [More][undefined-behavior]. +### Uninhabited + +A type is uninhabited if it has no constructors and therefore can never be instantiated. An +uninhabited type is "empty" in the sense that there are no values of the type. The canonical +example of an uninhabited type is the [never type] `!`, or an enum with no variants +`enum Never { }`. Opposite of [Inhabited](#inhabited). + [alignment]: type-layout.md#size-and-alignment [associated item]: #associated-item [enums]: items/enumerations.md @@ -168,6 +181,7 @@ or unintended computation; or platform-specific results. [inherent implementation]: items/implementations.md#inherent-implementations [item]: items.md [method]: items/associated-items.md#methods +[never type]: types/never.md [object safety]: items/traits.md#object-safety [structs]: items/structs.md [trait objects]: types/trait-object.md diff --git a/src/doc/reference/src/items/associated-items.md b/src/doc/reference/src/items/associated-items.md index 29ea6da9b9..a7bc3de86d 100644 --- a/src/doc/reference/src/items/associated-items.md +++ b/src/doc/reference/src/items/associated-items.md @@ -86,8 +86,13 @@ let _: f64 = f64::from_i32(42); >       [_BlockExpression_] > > _SelfParam_ :\ ->       (`&` | `&` [_Lifetime_])? `mut`? `self`\ ->    | `mut`? `self` (`:` [_Type_])? +>    [_OuterAttribute_]\* ( _ShorthandSelf_ | _TypedSelf_ ) +> +> _ShorthandSelf_ :\ +>    (`&` | `&` [_Lifetime_])? `mut`? `self` +> +> _TypedSelf_ :\ +>    `mut`? `self` `:` [_Type_] Associated functions whose first parameter is named `self` are called *methods* and may be invoked using the [method call operator], for example, `x.foo()`, as @@ -190,6 +195,11 @@ let bounding_box = circle_shape.bounding_box(); > methods with anonymous parameters (e.g. `fn foo(u8)`). This is deprecated and > an error as of the 2018 edition. All parameters must have an argument name. +#### Attributes on method parameters + +Attributes on method parameters follow the same rules and restrictions as +[regular function parameters]. + ## Associated Types *Associated types* are [type aliases] associated with another type. Associated @@ -336,6 +346,7 @@ fn main() { [`Box`]: ../special-types-and-traits.md#boxt [`Pin

`]: ../special-types-and-traits.md#pinp [`Rc`]: ../special-types-and-traits.md#rct +[_OuterAttribute_]: ../attributes.md [traits]: traits.md [type aliases]: type-aliases.md [inherent implementations]: implementations.md#inherent-implementations @@ -349,3 +360,4 @@ fn main() { [function item]: ../types/function-item.md [method call operator]: ../expressions/method-call-expr.md [path]: ../paths.md +[regular function parameters]: functions.md#attributes-on-function-parameters \ No newline at end of file diff --git a/src/doc/reference/src/items/external-blocks.md b/src/doc/reference/src/items/external-blocks.md index f3e692ac95..6537084279 100644 --- a/src/doc/reference/src/items/external-blocks.md +++ b/src/doc/reference/src/items/external-blocks.md @@ -8,9 +8,10 @@ >    `}` > > _ExternalItem_ :\ ->    [_OuterAttribute_]\*\ ->    [_Visibility_]?\ ->    ( _ExternalStaticItem_ | _ExternalFunctionItem_ ) +>    [_OuterAttribute_]\* (\ +>          [_MacroInvocationSemi_]\ +>       | ( [_Visibility_]? ( _ExternalStaticItem_ | _ExternalFunctionItem_ ) )\ +>    ) > > _ExternalStaticItem_ :\ >    `static` `mut`? [IDENTIFIER] `:` [_Type_] `;` @@ -24,14 +25,14 @@ >    _NamedFunctionParam_ ( `,` _NamedFunctionParam_ )\* `,`? > > _NamedFunctionParam_ :\ ->    ( [IDENTIFIER] | `_` ) `:` [_Type_] +>    [_OuterAttribute_]\* ( [IDENTIFIER] | `_` ) `:` [_Type_] > > _NamedFunctionParametersWithVariadics_ :\ ->    ( _NamedFunctionParam_ `,` )\* _NamedFunctionParam_ `,` `...` +>    ( _NamedFunctionParam_ `,` )\* _NamedFunctionParam_ `,` [_OuterAttribute_]\* `...` External blocks provide _declarations_ of items that are not _defined_ in the current crate and are the basis of Rust's foreign function interface. These are -akin to unchecked imports. +akin to unchecked imports. Two kind of item _declarations_ are allowed in external blocks: [functions] and [statics]. Calling functions or accessing statics that are declared in external @@ -162,6 +163,11 @@ extern { } ``` +### Attributes on function parameters + +Attributes on extern function parameters follow the same rules and +restrictions as [regular function parameters]. + [IDENTIFIER]: ../identifiers.md [WebAssembly module]: https://webassembly.github.io/spec/core/syntax/modules.html [functions]: functions.md @@ -170,6 +176,7 @@ extern { [_FunctionReturnType_]: functions.md [_Generics_]: generics.md [_InnerAttribute_]: ../attributes.md +[_MacroInvocationSemi_]: ../macros.md#macro-invocation [_MetaListNameValueStr_]: ../attributes.md#meta-item-attribute-syntax [_MetaNameValueStr_]: ../attributes.md#meta-item-attribute-syntax [_OuterAttribute_]: ../attributes.md @@ -177,3 +184,4 @@ extern { [_Visibility_]: ../visibility-and-privacy.md [_WhereClause_]: generics.md#where-clauses [attributes]: ../attributes.md +[regular function parameters]: functions.md#attributes-on-function-parameters \ No newline at end of file diff --git a/src/doc/reference/src/items/functions.md b/src/doc/reference/src/items/functions.md index 63f436fe46..4bc48518f6 100644 --- a/src/doc/reference/src/items/functions.md +++ b/src/doc/reference/src/items/functions.md @@ -20,7 +20,7 @@ >    _FunctionParam_ (`,` _FunctionParam_)\* `,`? > > _FunctionParam_ :\ ->    [_Pattern_] `:` [_Type_] +>    [_OuterAttribute_]\* [_Pattern_] `:` [_Type_] > > _FunctionReturnType_ :\ >    `->` [_Type_] @@ -180,12 +180,13 @@ aborts the process by executing an illegal instruction. ## Const functions -Functions qualified with the `const` keyword are const functions. _Const -functions_ can be called from within [const context]s. When called from a const -context, the function is interpreted by the compiler at compile time. The -interpretation happens in the environment of the compilation target and not the -host. So `usize` is `32` bits if you are compiling against a `32` bit system, -irrelevant of whether you are building on a `64` bit or a `32` bit system. +Functions qualified with the `const` keyword are const functions, as are +[tuple struct] and [tuple variant] constructors. _Const functions_ can be +called from within [const context]s. When called from a const context, the +function is interpreted by the compiler at compile time. The interpretation +happens in the environment of the compilation target and not the host. So +`usize` is `32` bits if you are compiling against a `32` bit system, irrelevant +of whether you are building on a `64` bit or a `32` bit system. If a const function is called outside a [const context], it is indistinguishable from any other function. You can freely do anything with a const function that @@ -214,7 +215,9 @@ Exhaustive list of permitted structures in const functions: are all permitted. This rule also applies to type parameters of impl blocks that - contain const methods + contain const methods. + + This does not apply to tuple struct and tuple variant constructors. * Arithmetic and comparison operators on integers * All boolean operators except for `&&` and `||` which are banned since @@ -345,12 +348,40 @@ fn test_only() { > Note: Except for lints, it is idiomatic to only use outer attributes on > function items. -The attributes that have meaning on a function are [`cfg`], [`deprecated`], +The attributes that have meaning on a function are [`cfg`], [`cfg_attr`], [`deprecated`], [`doc`], [`export_name`], [`link_section`], [`no_mangle`], [the lint check attributes], [`must_use`], [the procedural macro attributes], [the testing attributes], and [the optimization hint attributes]. Functions also accept attributes macros. +## Attributes on function parameters + +[Outer attributes][attributes] are allowed on function parameters and the +permitted [built-in attributes] are restricted to `cfg`, `cfg_attr`, `allow`, +`warn`, `deny`, and `forbid`. + +```rust +fn len( + #[cfg(windows)] slice: &[u16], + #[cfg(not(windows))] slice: &[u8], +) -> usize { + slice.len() +} +``` + +Inert helper attributes used by procedural macro attributes applied to items are also +allowed but be careful to not include these inert attributes in your final `TokenStream`. + +For example, the following code defines an inert `some_inert_attribute` attribute that +is not formally defined anywhere and the `some_proc_macro_attribute` procedural macro is +responsible for detecting its presence and removing it from the output token stream. + +```rust,ignore +#[some_proc_macro_attribute] +fn foo_oof(#[some_inert_attribute] arg: u8) { +} +``` + [IDENTIFIER]: ../identifiers.md [RAW_STRING_LITERAL]: ../tokens.md#raw-string-literals [STRING_LITERAL]: ../tokens.md#string-literals @@ -359,7 +390,10 @@ attributes macros. [_Pattern_]: ../patterns.md [_Type_]: ../types.md#type-expressions [_WhereClause_]: generics.md#where-clauses +[_OuterAttribute_]: ../attributes.md [const context]: ../const_eval.md#const-context +[tuple struct]: structs.md +[tuple variant]: enumerations.md [external block]: external-blocks.md [path]: ../paths.md [block]: ../expressions/block-expr.md @@ -368,7 +402,8 @@ attributes macros. [*function item type*]: ../types/function-item.md [Trait]: traits.md [attributes]: ../attributes.md -[`cfg`]: ../conditional-compilation.md +[`cfg`]: ../conditional-compilation.md#the-cfg-attribute +[`cfg_attr`]: ../conditional-compilation.md#the-cfg_attr-attribute [the lint check attributes]: ../attributes/diagnostics.md#lint-check-attributes [the procedural macro attributes]: ../procedural-macros.md [the testing attributes]: ../attributes/testing.md @@ -383,3 +418,4 @@ attributes macros. [`link_section`]: ../abi.md#the-link_section-attribute [`no_mangle`]: ../abi.md#the-no_mangle-attribute [external_block_abi]: external-blocks.md#abi +[built-in attributes]: ../attributes.html#built-in-attributes-index diff --git a/src/doc/reference/src/items/generics.md b/src/doc/reference/src/items/generics.md index 074b5cf366..6dc8f01d09 100644 --- a/src/doc/reference/src/items/generics.md +++ b/src/doc/reference/src/items/generics.md @@ -92,10 +92,11 @@ attributes may give meaning to it. This example shows using a custom derive attribute to modify the meaning of a generic parameter. -```ignore +```rust,ignore // Assume that the derive for MyFlexibleClone declared `my_flexible_clone` as // an attribute it understands. -#[derive(MyFlexibleClone)] struct Foo<#[my_flexible_clone(unbounded)] H> { +#[derive(MyFlexibleClone)] +struct Foo<#[my_flexible_clone(unbounded)] H> { a: *const H } ``` diff --git a/src/doc/reference/src/items/traits.md b/src/doc/reference/src/items/traits.md index e835a6097e..dea8ecad05 100644 --- a/src/doc/reference/src/items/traits.md +++ b/src/doc/reference/src/items/traits.md @@ -38,7 +38,7 @@ >    _TraitFunctionParam_ (`,` _TraitFunctionParam_)\* `,`? > > _TraitFunctionParam_[†](#parameter-patterns) :\ ->    ( [_Pattern_] `:` )? [_Type_] +>    [_OuterAttribute_]\* ( [_Pattern_] `:` )? [_Type_] > > _TraitConst_ :\ >    `const` [IDENTIFIER] `:` [_Type_] ( `=` [_Expression_] )? `;` diff --git a/src/doc/reference/src/items/unions.md b/src/doc/reference/src/items/unions.md index 22c40df7c2..d63bf1eec9 100644 --- a/src/doc/reference/src/items/unions.md +++ b/src/doc/reference/src/items/unions.md @@ -119,8 +119,8 @@ struct Value { fn is_zero(v: Value) -> bool { unsafe { match v { - Value { tag: I, u: U { i: 0 } } => true, - Value { tag: F, u: U { f: 0.0 } } => true, + Value { tag: Tag::I, u: U { i: 0 } } => true, + Value { tag: Tag::F, u: U { f: num } } if num == 0.0 => true, _ => false, } } diff --git a/src/doc/reference/src/keywords.md b/src/doc/reference/src/keywords.md index 4eac070553..9df5b2a58e 100644 --- a/src/doc/reference/src/keywords.md +++ b/src/doc/reference/src/keywords.md @@ -60,6 +60,8 @@ be used as the names of: The following keywords were added beginning in the 2018 edition. > **Lexer 2018+**\ +> KW_ASYNC : `async`\ +> KW_AWAIT : `await`\ > KW_DYN : `dyn` ## Reserved keywords @@ -86,8 +88,6 @@ them to use these keywords. The following keywords are reserved beginning in the 2018 edition. > **Lexer 2018+**\ -> KW_ASYNC : `async`\ -> KW_AWAIT : `await`\ > KW_TRY : `try` ## Weak keywords diff --git a/src/doc/reference/src/macros-by-example.md b/src/doc/reference/src/macros-by-example.md index 8cae03a8e3..fd7c33575a 100644 --- a/src/doc/reference/src/macros-by-example.md +++ b/src/doc/reference/src/macros-by-example.md @@ -128,7 +128,7 @@ fragment specifiers are: * `ident`: an [IDENTIFIER_OR_KEYWORD] * `path`: a [_TypePath_] style path * `tt`: a [_TokenTree_] (a single [token] or tokens in matching delimiters `()`, `[]`, or `{}`) - * `meta`: a [_MetaItem_], the contents of an attribute + * `meta`: an [_Attr_], the contents of an attribute * `lifetime`: a [LIFETIME_TOKEN] * `vis`: a possibly empty [_Visibility_] qualifier * `literal`: matches `-`?[_LiteralExpression_] @@ -477,12 +477,12 @@ For more detail, see the [formal specification]. [LIFETIME_TOKEN]: tokens.md#lifetimes-and-loop-labels [Metavariables]: #metavariables [Repetitions]: #repetitions +[_Attr_]: attributes.md [_BlockExpression_]: expressions/block-expr.md [_DelimTokenTree_]: macros.md [_Expression_]: expressions.md [_Item_]: items.md [_LiteralExpression_]: expressions/literal-expr.md -[_MetaItem_]: attributes.md#meta-item-attribute-syntax [_MetaListIdents_]: attributes.md#meta-item-attribute-syntax [_Pattern_]: patterns.md [_Statement_]: statements.md diff --git a/src/doc/reference/src/macros.md b/src/doc/reference/src/macros.md index cdac0b2bfc..f1ca1a9db6 100644 --- a/src/doc/reference/src/macros.md +++ b/src/doc/reference/src/macros.md @@ -37,6 +37,7 @@ following situations: * [Types] * [Items] including [associated items] * [`macro_rules`] transcribers +* [External blocks] When used as an item or a statement, the _MacroInvocationSemi_ form is used where a semicolon is required at the end when not using curly braces. @@ -99,3 +100,4 @@ example!(); [statements]: statements.md [types]: types.md [visibility qualifiers]: visibility-and-privacy.md +[External blocks]: items/external-blocks.md diff --git a/src/doc/reference/src/procedural-macros.md b/src/doc/reference/src/procedural-macros.md index 9818b26556..ff3f6df1d7 100644 --- a/src/doc/reference/src/procedural-macros.md +++ b/src/doc/reference/src/procedural-macros.md @@ -75,9 +75,7 @@ the macro invocation operator (`!`). These macros are defined by a [public] [function] with the `proc_macro` [attribute] and a signature of `(TokenStream) -> TokenStream`. The input [`TokenStream`] is what is inside the delimiters of the macro invocation and the -output [`TokenStream`] replaces the entire macro invocation. It may contain an -arbitrary number of [items]. These macros cannot expand to syntax that defines -new `macro_rules` style macros. +output [`TokenStream`] replaces the entire macro invocation. For example, the following macro definition ignores its input and outputs a function `answer` into its scope. @@ -105,11 +103,12 @@ fn main() { } ``` -These macros are only invokable in [modules]. They cannot even be invoked to -create [item declaration statements]. Furthermore, they must either be invoked -with curly braces and no semicolon or a different delimiter followed by a -semicolon. For example, `make_answer` from the previous example can be invoked -as `make_answer!{}`, `make_answer!();` or `make_answer![];`. +Function-like procedural macros may expand to a [type] or any number of +[items], including [`macro_rules`] definitions. They may be invoked in a [type +expression], [item] position (except as a [statement]), including items in +[`extern` blocks], inherent and trait [implementations], and [trait +definitions]. They cannot be used in a [statement], [expression], or +[pattern]. ### Derive macros @@ -192,7 +191,9 @@ struct Struct { ### Attribute macros -*Attribute macros* define new [attributes] which can be attached to [items]. +*Attribute macros* define new [outer attributes][attributes] which can be +attached to [items], including items in [`extern` blocks], inherent and trait +[implementations], and [trait definitions]. Attribute macros are defined by a [public] [function] with the `proc_macro_attribute` [attribute] that has a signature of `(TokenStream, @@ -201,8 +202,7 @@ tree following the attribute's name, not including the outer delimiters. If the attribute is written as a bare attribute name, the attribute [`TokenStream`] is empty. The second [`TokenStream`] is the rest of the [item] including other [attributes] on the [item]. The returned [`TokenStream`] -replaces the [item] with an arbitrary number of [items]. These macros cannot -expand to syntax that defines new `macro_rules` style macros. +replaces the [item] with an arbitrary number of [items]. For example, this attribute macro takes the input stream and returns it as is, effectively being the no-op of attributes. @@ -266,28 +266,35 @@ fn invoke4() {} // out: item: "fn invoke4() {}" ``` +[Attribute macros]: #attribute-macros +[Cargo's build scripts]: ../cargo/reference/build-scripts.html +[Derive macros]: #derive-macros +[Function-like macros]: #function-like-procedural-macros [`TokenStream`]: ../proc_macro/struct.TokenStream.html [`TokenStream`s]: ../proc_macro/struct.TokenStream.html [`compile_error`]: ../std/macro.compile_error.html [`derive` attribute]: attributes/derive.md +[`extern` blocks]: items/external-blocks.md +[`macro_rules`]: macros-by-example.md [`proc_macro` crate]: ../proc_macro/index.html -[Cargo's build scripts]: ../cargo/reference/build-scripts.html -[Derive macros]: #derive-macros -[Attribute macros]: #attribute-macros -[Function-like macros]: #function-like-procedural-macros [attribute]: attributes.md [attributes]: attributes.md [block]: expressions/block-expr.md [crate type]: linkage.md [derive macro helper attributes]: #derive-macro-helper-attributes [enum]: items/enumerations.md +[expression]: expressions.md +[function]: items/functions.md +[implementations]: items/implementations.md [inert]: attributes.md#active-and-inert-attributes [item]: items.md -[item declaration statements]: statements.md#item-declarations [items]: items.md -[function]: items/functions.md [module]: items/modules.md -[modules]: items/modules.md +[pattern]: patterns.md [public]: visibility-and-privacy.md +[statement]: statements.md [struct]: items/structs.md +[trait definitions]: items/traits.md +[type expression]: types.md#type-expressions +[type]: types.md [union]: items/unions.md diff --git a/src/doc/reference/src/type-layout.md b/src/doc/reference/src/type-layout.md index b0cc71a734..57e801e105 100644 --- a/src/doc/reference/src/type-layout.md +++ b/src/doc/reference/src/type-layout.md @@ -34,22 +34,17 @@ alignment of the type respectively. The size of most primitives is given in this table. -Type | `size_of::()` -- | - | - -bool | 1 -u8 | 1 -u16 | 2 -u32 | 4 -u64 | 8 -u128 | 16 -i8 | 1 -i16 | 2 -i32 | 4 -i64 | 8 -i128 | 16 -f32 | 4 -f64 | 8 -char | 4 +| Type | `size_of::()`| +|-- |-- | +| `bool` | 1 | +| `u8` / `i8` | 1 | +| `u16` / `i16` | 2 | +| `u32` / `i32` | 4 | +| `u64` / `i64` | 8 | +| `u128` / `i128` | 16 | +| `f32` | 4 | +| `f64` | 8 | +| `char` | 4 | `usize` and `isize` have a size big enough to contain every address on the target platform. For example, on a 32 bit target, this is 4 bytes and on a 64 diff --git a/src/doc/reference/src/types/function-pointer.md b/src/doc/reference/src/types/function-pointer.md index 88ef50c245..912ee932a0 100644 --- a/src/doc/reference/src/types/function-pointer.md +++ b/src/doc/reference/src/types/function-pointer.md @@ -15,10 +15,10 @@ >    _MaybeNamedParam_ ( `,` _MaybeNamedParam_ )\* `,`? > > _MaybeNamedParam_ :\ ->    ( ( [IDENTIFIER] | `_` ) `:` )? [_Type_] +>    [_OuterAttribute_]\* ( ( [IDENTIFIER] | `_` ) `:` )? [_Type_] > > _MaybeNamedFunctionParametersVariadic_ :\ ->    ( _MaybeNamedParam_ `,` )\* _MaybeNamedParam_ `,` `...` +>    ( _MaybeNamedParam_ `,` )\* _MaybeNamedParam_ `,` [_OuterAttribute_]\* `...` Function pointer types, written using the `fn` keyword, refer to a function whose identity is not necessarily known at compile-time. They can be created @@ -44,13 +44,20 @@ let bo: Binop = add; x = bo(5,7); ``` +## Attributes on function pointer parameters + +Attributes on function pointer parameters follow the same rules and +restrictions as [regular function parameters]. + [IDENTIFIER]: ../identifiers.md [_ForLifetimes_]: ../items/generics.md#where-clauses [_FunctionQualifiers_]: ../items/functions.md [_TypeNoBounds_]: ../types.md#type-expressions [_Type_]: ../types.md#type-expressions +[_OuterAttribute_]: ../attributes.md [`extern`]: ../items/external-blocks.md [closures]: closure.md [extern function]: ../items/functions.md#extern-function-qualifier [function items]: function-item.md [unsafe function]: ../unsafe-functions.md +[regular function parameters]: ../items/functions.md#attributes-on-function-parameters \ No newline at end of file diff --git a/src/doc/reference/theme/header.hbs b/src/doc/reference/theme/header.hbs deleted file mode 100644 index 85db69f844..0000000000 --- a/src/doc/reference/theme/header.hbs +++ /dev/null @@ -1,8 +0,0 @@ -

- For now, this reference is a best-effort document. We strive for validity - and completeness, but are not yet there. In the future, the docs and lang - teams will work together to figure out how best to do this. Until then, this - is a best-effort attempt. If you find something wrong or missing, file an - issue or - send in a pull request. -

\ No newline at end of file diff --git a/src/doc/reference/theme/reference.css b/src/doc/reference/theme/reference.css index e14630584f..06ea927b64 100644 --- a/src/doc/reference/theme/reference.css +++ b/src/doc/reference/theme/reference.css @@ -1,58 +1,3 @@ -/* These selectors moves things around to make space for the warning on the - top of each page. Get rid of it when the warning goes away. */ -.page-wrapper > .nav-chapters { - /* add height for warning content & margin */ - top: 120px; -} - -.sidebar-visible .content { - top: 120px; -} - -.nav-chapters { - top: 120px; -} - -p.warning { - background-color: rgb(242, 222, 222); - border-bottom-color: rgb(238, 211, 215); - border-bottom-left-radius: 4px; - border-bottom-right-radius: 4px; - border-bottom-style: solid; - border-bottom-width: 0.666667px; - border-image-outset: 0 0 0 0; - border-image-repeat: stretch stretch; - border-image-slice: 100% 100% 100% 100%; - border-image-source: none; - border-image-width: 1 1 1 1; - border-left-color: rgb(238, 211, 215); - border-left-style: solid; - border-left-width: 0.666667px; - border-right-color: rgb(238, 211, 215); - border-right-style: solid; - border-right-width: 0.666667px; - border-top-color: rgb(238, 211, 215); - border-top-left-radius: 4px; - border-top-right-radius: 4px; - border-top-style: solid; - border-top-width: 0.666667px; - color: rgb(185, 74, 72); - margin-bottom: 0px; - margin-left: 0px; - margin-right: 0px; - margin-top: 30px; - padding-bottom: 8px; - padding-left: 14px; - padding-right: 35px; - padding-top: 8px; -} -p.warning strong { - color: rgb(185, 74, 72) -} -p.warning a { - color: rgb(0, 136, 204) -} - /* .parenthetical class used to keep e.g. "less-than symbol (<)" from wrapping the end parenthesis onto its own line. Use in a span between the last word and diff --git a/src/doc/rust-by-example/src/SUMMARY.md b/src/doc/rust-by-example/src/SUMMARY.md index cd6bbc7b4f..85711fff22 100644 --- a/src/doc/rust-by-example/src/SUMMARY.md +++ b/src/doc/rust-by-example/src/SUMMARY.md @@ -69,7 +69,7 @@ - [As output parameters](fn/closures/output_parameters.md) - [Examples in `std`](fn/closures/closure_examples.md) - [Iterator::any](fn/closures/closure_examples/iter_any.md) - - [Iterator::find](fn/closures/closure_examples/iter_find.md) + - [Searching through iterators](fn/closures/closure_examples/iter_find.md) - [Higher Order Functions](fn/hof.md) - [Diverging functions](fn/diverging.md) @@ -139,8 +139,8 @@ - [Iterators](trait/iter.md) - [`impl Trait`](trait/impl_trait.md) - [Clone](trait/clone.md) - - [Supertraits](traits/supertraits.md) - - [Disambiguating overlapping traits](traits/disambiguating.md) + - [Supertraits](trait/supertraits.md) + - [Disambiguating overlapping traits](trait/disambiguating.md) - [macro_rules!](macros.md) - [Syntax](macros/syntax.md) diff --git a/src/doc/rust-by-example/src/cargo/build_scripts.md b/src/doc/rust-by-example/src/cargo/build_scripts.md index c5da00db6a..6db3afe90a 100644 --- a/src/doc/rust-by-example/src/cargo/build_scripts.md +++ b/src/doc/rust-by-example/src/cargo/build_scripts.md @@ -1,7 +1,7 @@ # Build Scripts -Sometimes a normal build from cargo is not enough. Perhaps your crate needs some -pre-requisites before cargo will successfully compile, things like code +Sometimes a normal build from `cargo` is not enough. Perhaps your crate needs +some pre-requisites before `cargo` will successfully compile, things like code generation, or some native code that needs to be compiled. To solve this problem we have build scripts that Cargo can run. @@ -20,7 +20,7 @@ default. ## How to use a build script The build script is simply another Rust file that will be compiled and invoked -prior to compiling anything else in the package. Hence it can be used to fulfil +prior to compiling anything else in the package. Hence it can be used to fulfill pre-requisites of your crate. Cargo provides the script with inputs via environment variables [specified @@ -29,10 +29,11 @@ here] that can be used. The script provides output via stdout. All lines printed are written to `target/debug/build//output`. Further, lines prefixed with `cargo:` will be interpreted by Cargo directly and hence can be used to define parameters for the -packages compilation. +package's compilation. -For further specification and examples have a read of the [cargo specification]. +For further specification and examples have a read of the +[Cargo specification][cargo_specification]. [specified here]: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts -[cargo specification]: https://doc.rust-lang.org/cargo/reference/build-scripts.html \ No newline at end of file +[cargo_specification]: https://doc.rust-lang.org/cargo/reference/build-scripts.html diff --git a/src/doc/rust-by-example/src/cargo/deps.md b/src/doc/rust-by-example/src/cargo/deps.md index 21db4c2a65..7403a2ae76 100644 --- a/src/doc/rust-by-example/src/cargo/deps.md +++ b/src/doc/rust-by-example/src/cargo/deps.md @@ -15,10 +15,10 @@ cargo new foo cargo new --lib foo ``` -For the rest of this chapter, I will assume we are making a binary, rather than +For the rest of this chapter, let's assume we are making a binary, rather than a library, but all of the concepts are the same. -After the above commands, you should see something like this: +After the above commands, you should see a file hierarchy like this: ```txt foo @@ -40,7 +40,7 @@ authors = ["mark"] [dependencies] ``` -The `name` field under `package` determines the name of the project. This is +The `name` field under `[package]` determines the name of the project. This is used by `crates.io` if you publish the crate (more later). It is also the name of the output binary when you compile. @@ -49,14 +49,14 @@ Versioning](http://semver.org/). The `authors` field is a list of authors used when publishing the crate. -The `dependencies` section lets you add a dependency for your project. +The `[dependencies]` section lets you add dependencies for your project. -For example, suppose that I want my program to have a great CLI. You can find +For example, suppose that we want our program to have a great CLI. You can find lots of great packages on [crates.io](https://crates.io) (the official Rust package registry). One popular choice is [clap](https://crates.io/crates/clap). As of this writing, the most recent published version of `clap` is `2.27.1`. To add a dependency to our program, we can simply add the following to our -`Cargo.toml` under `dependencies`: `clap = "2.27.1"`. And of course, `extern +`Cargo.toml` under `[dependencies]`: `clap = "2.27.1"`. And of course, `extern crate clap` in `main.rs`, just like normal. And that's it! You can start using `clap` in your program. diff --git a/src/doc/rust-by-example/src/conversion/string.md b/src/doc/rust-by-example/src/conversion/string.md index 2bec02f6df..ab1452153a 100644 --- a/src/doc/rust-by-example/src/conversion/string.md +++ b/src/doc/rust-by-example/src/conversion/string.md @@ -29,16 +29,16 @@ fn main() { ## Parsing a String One of the more common types to convert a string into is a number. The idiomatic -approach to this is to use the [`parse`] function and provide the type for the -function to parse the string value into, this can be done either without type -inference or using the 'turbofish' syntax. +approach to this is to use the [`parse`] function and either to arrange for +type inference or to specify the type to parse using the 'turbofish' syntax. +Both alternatives are shown in the following example. This will convert the string into the type specified so long as the [`FromStr`] trait is implemented for that type. This is implemented for numerous types within the standard library. To obtain this functionality on a user defined type simply implement the [`FromStr`] trait for that type. -```rust +```rust,editable fn main() { let parsed: i32 = "5".parse().unwrap(); let turbo_parsed = "10".parse::().unwrap(); diff --git a/src/doc/rust-by-example/src/custom_types/enum.md b/src/doc/rust-by-example/src/custom_types/enum.md index 8580ce9f74..e861df9bfd 100644 --- a/src/doc/rust-by-example/src/custom_types/enum.md +++ b/src/doc/rust-by-example/src/custom_types/enum.md @@ -55,7 +55,7 @@ fn main() { ## Type aliases -If you use a type alias, you can refer to each enum variant via its alias. +If you use a type alias, you can refer to each enum variant via its alias. This might be useful if the enum's name is too long or too generic, and you want to rename it. @@ -93,16 +93,17 @@ impl VeryVerboseEnumOfThingsToDoWithNumbers { } ``` -To learn more about enums and type aliases, you can read the +To learn more about enums and type aliases, you can read the [stabilization report][aliasreport] from when this feature was stabilized into -Rust. +Rust. ### See also: -[`match`][match], [`fn`][fn], and [`String`][str], [] +[`match`][match], [`fn`][fn], and [`String`][str], ["Type alias enum variants" RFC][type_alias_rfc] [c_struct]: https://en.wikipedia.org/wiki/Struct_(C_programming_language) [match]: ../flow_control/match.md [fn]: ../fn.md [str]: ../std/str.md [aliasreport]: https://github.com/rust-lang/rust/pull/61682/#issuecomment-502472847 +[type_alias_rfc]: https://rust-lang.github.io/rfcs/2338-type-alias-enum-variants.html diff --git a/src/doc/rust-by-example/src/custom_types/structs.md b/src/doc/rust-by-example/src/custom_types/structs.md index 0b620bf38a..e863e0ee6b 100644 --- a/src/doc/rust-by-example/src/custom_types/structs.md +++ b/src/doc/rust-by-example/src/custom_types/structs.md @@ -10,6 +10,7 @@ There are three types of structures ("structs") that can be created using the ```rust,editable #[derive(Debug)] struct Person<'a> { + // The 'a defines a lifetime name: &'a str, age: u8, } @@ -29,8 +30,10 @@ struct Point { // Structs can be reused as fields of another struct #[allow(dead_code)] struct Rectangle { - p1: Point, - p2: Point, + // A rectangle can be specified by where the top left and bottom right + // corners are in space. + top_left: Point, + bottom_right: Point, } fn main() { @@ -44,23 +47,26 @@ fn main() { // Instantiate a `Point` - let point: Point = Point { x: 0.3, y: 0.4 }; + let point: Point = Point { x: 10.3, y: 0.4 }; // Access the fields of the point println!("point coordinates: ({}, {})", point.x, point.y); - // Make a new point by using struct update syntax to use the fields of our other one - let new_point = Point { x: 0.1, ..point }; - // `new_point.y` will be the same as `point.y` because we used that field from `point` - println!("second point: ({}, {})", new_point.x, new_point.y); + // Make a new point by using struct update syntax to use the fields of our + // other one + let bottom_right = Point { x: 5.2, ..point }; + + // `bottom_right.y` will be the same as `point.y` because we used that field + // from `point` + println!("second point: ({}, {})", bottom_right.x, bottom_right.y); // Destructure the point using a `let` binding - let Point { x: my_x, y: my_y } = point; + let Point { x: top_edge, y: left_edge } = point; let _rectangle = Rectangle { // struct instantiation is an expression too - p1: Point { x: my_y, y: my_x }, - p2: point, + top_left: Point { x: left_edge, y: top_edge }, + bottom_right: bottom_right, }; // Instantiate a unit struct @@ -81,14 +87,15 @@ fn main() { ### Activity -1. Add a function `rect_area` which calculates the area of a rectangle (try - using nested destructuring). +1. Add a function `rect_area` which calculates the area of a rectangle (try + using nested destructuring). 2. Add a function `square` which takes a `Point` and a `f32` as arguments, and returns a `Rectangle` with its lower left corner on the point, and a width and height corresponding to the `f32`. ### See also: -[`attributes`][attributes] and [destructuring][destructuring] +[`attributes`][attributes], [lifetime][lifetime] and [destructuring][destructuring] [attributes]: ../attribute.md [c_struct]: https://en.wikipedia.org/wiki/Struct_(C_programming_language) [destructuring]: ../flow_control/match/destructuring.md +[lifetime]: ../scope/lifetime.md diff --git a/src/doc/rust-by-example/src/error/result.md b/src/doc/rust-by-example/src/error/result.md index 3202b314ea..d779bfdad1 100644 --- a/src/doc/rust-by-example/src/error/result.md +++ b/src/doc/rust-by-example/src/error/result.md @@ -5,8 +5,8 @@ describes possible *error* instead of possible *absence*. That is, `Result` could have one of two outcomes: -* `Ok`: An element `T` was found -* `Err`: An error was found with element `E` +* `Ok(T)`: An element `T` was found +* `Err(E)`: An error was found with element `E` By convention, the expected outcome is `Ok` while the unexpected outcome is `Err`. diff --git a/src/doc/rust-by-example/src/flow_control/if_let.md b/src/doc/rust-by-example/src/flow_control/if_let.md index a5671332b9..22bdde8d1b 100644 --- a/src/doc/rust-by-example/src/flow_control/if_let.md +++ b/src/doc/rust-by-example/src/flow_control/if_let.md @@ -103,7 +103,7 @@ Another benefit: `if let` allows to match enum non-parameterized variants, even Would you like a challenge? Fix the following example to use `if let`: -```rust,editable,ignore +```rust,editable,ignore,mdbook-runnable // This enum purposely doesn't #[derive(PartialEq)], // neither we implement PartialEq for it. That's why comparing Foo::Bar==a fails below. enum Foo {Bar} diff --git a/src/doc/rust-by-example/src/flow_control/match.md b/src/doc/rust-by-example/src/flow_control/match.md index fb0f56e3d2..02a80670ff 100644 --- a/src/doc/rust-by-example/src/flow_control/match.md +++ b/src/doc/rust-by-example/src/flow_control/match.md @@ -15,7 +15,7 @@ fn main() { // Match several values 2 | 3 | 5 | 7 | 11 => println!("This is a prime"), // Match an inclusive range - 13...19 => println!("A teen"), + 13..=19 => println!("A teen"), // Handle the rest of cases _ => println!("Ain't special"), } @@ -31,4 +31,4 @@ fn main() { println!("{} -> {}", boolean, binary); } -``` \ No newline at end of file +``` diff --git a/src/doc/rust-by-example/src/flow_control/match/binding.md b/src/doc/rust-by-example/src/flow_control/match/binding.md index e1aa232169..0407b37774 100644 --- a/src/doc/rust-by-example/src/flow_control/match/binding.md +++ b/src/doc/rust-by-example/src/flow_control/match/binding.md @@ -15,9 +15,9 @@ fn main() { match age() { 0 => println!("I'm not born yet I guess"), - // Could `match` 1 ... 12 directly but then what age + // Could `match` 1 ..= 12 directly but then what age // would the child be? Instead, bind to `n` for the - // sequence of 1 .. 12. Now the age can be reported. + // sequence of 1 ..= 12. Now the age can be reported. n @ 1 ..= 12 => println!("I'm a child of age {:?}", n), n @ 13 ..= 19 => println!("I'm a teen of age {:?}", n), // Nothing bound. Return the result. diff --git a/src/doc/rust-by-example/src/fn/closures/capture.md b/src/doc/rust-by-example/src/fn/closures/capture.md index 36468782f3..d4fa4eadc9 100644 --- a/src/doc/rust-by-example/src/fn/closures/capture.md +++ b/src/doc/rust-by-example/src/fn/closures/capture.md @@ -18,37 +18,51 @@ fn main() { let color = "green"; - // A closure to print `color` which immediately borrows (`&`) - // `color` and stores the borrow and closure in the `print` - // variable. It will remain borrowed until `print` goes out of - // scope. `println!` only requires `by reference` so it doesn't + // A closure to print `color` which immediately borrows (`&`) `color` and + // stores the borrow and closure in the `print` variable. It will remain + // borrowed until `print` is used the last time. + // + // `println!` only requires arguments by immutable reference so it doesn't // impose anything more restrictive. let print = || println!("`color`: {}", color); // Call the closure using the borrow. print(); + + // `color` can be borrowed immutably again, because the closure only holds + // an immutable reference to `color`. + let _reborrow = &color; print(); - let mut count = 0; + // A move or reborrow is allowed after the final use of `print` + let _color_moved = color; - // A closure to increment `count` could take either `&mut count` - // or `count` but `&mut count` is less restrictive so it takes - // that. Immediately borrows `count`. + + let mut count = 0; + // A closure to increment `count` could take either `&mut count` or `count` + // but `&mut count` is less restrictive so it takes that. Immediately + // borrows `count`. // - // A `mut` is required on `inc` because a `&mut` is stored inside. - // Thus, calling the closure mutates the closure which requires - // a `mut`. + // A `mut` is required on `inc` because a `&mut` is stored inside. Thus, + // calling the closure mutates the closure which requires a `mut`. let mut inc = || { count += 1; println!("`count`: {}", count); }; - // Call the closure. - inc(); + // Call the closure using a mutable borrow. inc(); - //let _reborrow = &mut count; + // The closure still mutably borrows `count` because it is called later. + // An attempt to reborrow will lead to an error. + // let _reborrow = &count; // ^ TODO: try uncommenting this line. + inc(); + + // The closure no longer needs to borrow `&mut count`. Therefore, it is + // possible to reborrow without an error + let _count_reborrowed = &mut count; + // A non-copy type. let movable = Box::new(3); @@ -64,7 +78,7 @@ fn main() { // `consume` consumes the variable so this can only be called once. consume(); - //consume(); + // consume(); // ^ TODO: Try uncommenting this line. } ``` @@ -82,7 +96,7 @@ fn main() { println!("{}", contains(&1)); println!("{}", contains(&4)); - // `println!("There're {} elements in vec", haystack.len());` + // println!("There're {} elements in vec", haystack.len()); // ^ Uncommenting above line will result in compile-time error // because borrow checker doesn't allow re-using variable after it // has been moved. diff --git a/src/doc/rust-by-example/src/fn/closures/closure_examples/iter_find.md b/src/doc/rust-by-example/src/fn/closures/closure_examples/iter_find.md index 77ddad2ce5..9eb5072e97 100644 --- a/src/doc/rust-by-example/src/fn/closures/closure_examples/iter_find.md +++ b/src/doc/rust-by-example/src/fn/closures/closure_examples/iter_find.md @@ -1,8 +1,8 @@ -# Iterator::find +# Searching through iterators -`Iterator::find` is a function which when passed an iterator, will return -the first element which satisfies the predicate as an `Option`. Its -signature: +`Iterator::find` is a function which iterates over an iterator and searches for the +first value which satisfies some condition. If none of the values satisfy the +condition, it returns `None`. Its signature: ```rust,ignore pub trait Iterator { @@ -29,9 +29,11 @@ fn main() { // `into_iter()` for vecs yields `i32`. let mut into_iter = vec2.into_iter(); - // A reference to what is yielded is `&&i32`. Destructure to `i32`. + // `iter()` for vecs yields `&i32`, and we want to reference one of its + // items, so we have to destructure `&&i32` to `i32` println!("Find 2 in vec1: {:?}", iter .find(|&&x| x == 2)); - // A reference to what is yielded is `&i32`. Destructure to `i32`. + // `into_iter()` for vecs yields `i32`, and we want to reference one of + // its items, so we have to destructure `&i32` to `i32` println!("Find 2 in vec2: {:?}", into_iter.find(| &x| x == 2)); let array1 = [1, 2, 3]; @@ -44,8 +46,33 @@ fn main() { } ``` +`Iterator::find` gives you a reference to the item. But if you want the _index_ of the +item, use `Iterator::position`. + +```rust,editable +fn main() { + let vec = vec![1, 9, 3, 3, 13, 2]; + + let index_of_first_even_number = vec.iter().position(|x| x % 2 == 0); + assert_eq!(index_of_first_even_number, Some(5)); + + + let index_of_first_negative_number = vec.iter().position(|x| x < &0); + assert_eq!(index_of_first_negative_number, None); +} +``` + ### See also: [`std::iter::Iterator::find`][find] +[`std::iter::Iterator::find_map`][find_map] + +[`std::iter::Iterator::position`][position] + +[`std::iter::Iterator::rposition`][rposition] + [find]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.find +[find_map]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.find_map +[position]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.position +[rposition]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.rposition diff --git a/src/doc/rust-by-example/src/fn/closures/input_parameters.md b/src/doc/rust-by-example/src/fn/closures/input_parameters.md index c97a4d1390..be36c6d361 100644 --- a/src/doc/rust-by-example/src/fn/closures/input_parameters.md +++ b/src/doc/rust-by-example/src/fn/closures/input_parameters.md @@ -1,33 +1,34 @@ # As input parameters -While Rust chooses how to capture variables on the fly mostly without type -annotation, this ambiguity is not allowed when writing functions. When -taking a closure as an input parameter, the closure's complete type must be -annotated using one of a few `traits`. In order of decreasing restriction, +While Rust chooses how to capture variables on the fly mostly without type +annotation, this ambiguity is not allowed when writing functions. When +taking a closure as an input parameter, the closure's complete type must be +annotated using one of a few `traits`. In order of decreasing restriction, they are: * `Fn`: the closure captures by reference (`&T`) * `FnMut`: the closure captures by mutable reference (`&mut T`) * `FnOnce`: the closure captures by value (`T`) -On a variable-by-variable basis, the compiler will capture variables in the -least restrictive manner possible. +On a variable-by-variable basis, the compiler will capture variables in the +least restrictive manner possible. -For instance, consider a parameter annotated as `FnOnce`. This specifies -that the closure *may* capture by `&T`, `&mut T`, or `T`, but the compiler -will ultimately choose based on how the captured variables are used in the +For instance, consider a parameter annotated as `FnOnce`. This specifies +that the closure *may* capture by `&T`, `&mut T`, or `T`, but the compiler +will ultimately choose based on how the captured variables are used in the closure. -This is because if a move is possible, then any type of borrow should also -be possible. Note that the reverse is not true. If the parameter is -annotated as `Fn`, then capturing variables by `&mut T` or `T` are not +This is because if a move is possible, then any type of borrow should also +be possible. Note that the reverse is not true. If the parameter is +annotated as `Fn`, then capturing variables by `&mut T` or `T` are not allowed. -In the following example, try swapping the usage of `Fn`, `FnMut`, and +In the following example, try swapping the usage of `Fn`, `FnMut`, and `FnOnce` to see what happens: ```rust,editable // A function which takes a closure as an argument and calls it. +// denotes that F is a "Generic type parameter" fn apply(f: F) where // The closure takes no input and returns nothing. F: FnOnce() { @@ -81,9 +82,11 @@ fn main() { ### See also: -[`std::mem::drop`][drop], [`Fn`][fn], [`FnMut`][fnmut], and [`FnOnce`][fnonce] +[`std::mem::drop`][drop], [`Fn`][fn], [`FnMut`][fnmut], [Generics][generics], [where][where] and [`FnOnce`][fnonce] [drop]: https://doc.rust-lang.org/std/mem/fn.drop.html [fn]: https://doc.rust-lang.org/std/ops/trait.Fn.html [fnmut]: https://doc.rust-lang.org/std/ops/trait.FnMut.html [fnonce]: https://doc.rust-lang.org/std/ops/trait.FnOnce.html +[generics]: ../../generics.md +[where]: ../../generics/where.md diff --git a/src/doc/rust-by-example/src/fn/closures/output_parameters.md b/src/doc/rust-by-example/src/fn/closures/output_parameters.md index 4ac4dd0c4a..6cad1735b9 100644 --- a/src/doc/rust-by-example/src/fn/closures/output_parameters.md +++ b/src/doc/rust-by-example/src/fn/closures/output_parameters.md @@ -5,13 +5,11 @@ output parameters should also be possible. However, anonymous closure types are, by definition, unknown, so we have to use `impl Trait` to return them. -The valid traits for returns are slightly different than before: +The valid traits for returning a closure are: -* `Fn`: normal -* `FnMut`: normal -* `FnOnce`: There are some unusual things at play here, so the [`FnBox`][fnbox] - type is currently needed, and is unstable. This is expected to change in - the future. +* `Fn` +* `FnMut` +* `FnOnce` Beyond this, the `move` keyword must be used, which signals that all captures occur by value. This is required because any captures by reference would be @@ -31,12 +29,20 @@ fn create_fnmut() -> impl FnMut() { move || println!("This is a: {}", text) } +fn create_fnonce() -> impl FnOnce() { + let text = "FnOnce".to_owned(); + + move || println!("This is a: {}", text) +} + fn main() { let fn_plain = create_fn(); let mut fn_mut = create_fnmut(); + let fn_once = create_fnonce(); fn_plain(); fn_mut(); + fn_once(); } ``` @@ -46,6 +52,5 @@ fn main() { [fn]: https://doc.rust-lang.org/std/ops/trait.Fn.html [fnmut]: https://doc.rust-lang.org/std/ops/trait.FnMut.html -[fnbox]: https://doc.rust-lang.org/std/boxed/trait.FnBox.html [generics]: ../../generics.md [impltrait]: ../../trait/impl_trait.md diff --git a/src/doc/rust-by-example/src/macros/dry.md b/src/doc/rust-by-example/src/macros/dry.md index 7333d0e812..ca015272af 100644 --- a/src/doc/rust-by-example/src/macros/dry.md +++ b/src/doc/rust-by-example/src/macros/dry.md @@ -10,7 +10,7 @@ use std::ops::{Add, Mul, Sub}; macro_rules! assert_equal_len { // The `tt` (token tree) designator is used for // operators and tokens. - ($a:ident, $b:ident, $func:ident, $op:tt) => { + ($a:expr, $b:expr, $func:ident, $op:tt) => { assert!($a.len() == $b.len(), "{:?}: dimension mismatch: {:?} {:?} {:?}", stringify!($func), diff --git a/src/doc/rust-by-example/src/primitives/array.md b/src/doc/rust-by-example/src/primitives/array.md index fe4bee94c2..f612c1dbc2 100644 --- a/src/doc/rust-by-example/src/primitives/array.md +++ b/src/doc/rust-by-example/src/primitives/array.md @@ -42,6 +42,9 @@ fn main() { analyze_slice(&xs); // Slices can point to a section of an array + // They are of the form [starting_index..ending_index] + // starting_index is the first position in the slice + // ending_index is one more than the last position in the slice println!("borrow a section of the array as a slice"); analyze_slice(&ys[1 .. 4]); diff --git a/src/doc/rust-by-example/src/scope/borrow/alias.md b/src/doc/rust-by-example/src/scope/borrow/alias.md index 2a12e605f4..9cfd5fe5dc 100644 --- a/src/doc/rust-by-example/src/scope/borrow/alias.md +++ b/src/doc/rust-by-example/src/scope/borrow/alias.md @@ -1,9 +1,9 @@ # Aliasing Data can be immutably borrowed any number of times, but while immutably -borrowed, the original data can't be mutably borrowed. On the other hand, -only *one* mutable borrow is allowed at a time. The original data can be -borrowed again only *after* the mutable reference goes out of scope. +borrowed, the original data can't be mutably borrowed. On the other hand, only +*one* mutable borrow is allowed at a time. The original data can be borrowed +again only *after* the mutable reference has been used for the last time. ```rust,editable struct Point { x: i32, y: i32, z: i32 } @@ -11,49 +11,48 @@ struct Point { x: i32, y: i32, z: i32 } fn main() { let mut point = Point { x: 0, y: 0, z: 0 }; - { - let borrowed_point = &point; - let another_borrow = &point; - - // Data can be accessed via the references and the original owner - println!("Point has coordinates: ({}, {}, {})", - borrowed_point.x, another_borrow.y, point.z); - - // Error! Can't borrow `point` as mutable because it's currently - // borrowed as immutable. - //let mutable_borrow = &mut point; - // TODO ^ Try uncommenting this line - - // Immutable references go out of scope - } - - { - let mutable_borrow = &mut point; - - // Change data via mutable reference - mutable_borrow.x = 5; - mutable_borrow.y = 2; - mutable_borrow.z = 1; - - // Error! Can't borrow `point` as immutable because it's currently - // borrowed as mutable. - //let y = &point.y; - // TODO ^ Try uncommenting this line - - // Error! Can't print because `println!` takes an immutable reference. - //println!("Point Z coordinate is {}", point.z); - // TODO ^ Try uncommenting this line - - // Ok! Mutable references can be passed as immutable to `println!` - println!("Point has coordinates: ({}, {}, {})", - mutable_borrow.x, mutable_borrow.y, mutable_borrow.z); - - // Mutable reference goes out of scope - } - - // Immutable references to `point` are allowed again let borrowed_point = &point; + let another_borrow = &point; + + // Data can be accessed via the references and the original owner + println!("Point has coordinates: ({}, {}, {})", + borrowed_point.x, another_borrow.y, point.z); + + // Error! Can't borrow `point` as mutable because it's currently + // borrowed as immutable. + // let mutable_borrow = &mut point; + // TODO ^ Try uncommenting this line + + // The borrowed values are used again here + println!("Point has coordinates: ({}, {}, {})", + borrowed_point.x, another_borrow.y, point.z); + + // The immutable references are no longer used for the rest of the code so + // it is possible to reborrow with a mutable reference. + let mutable_borrow = &mut point; + + // Change data via mutable reference + mutable_borrow.x = 5; + mutable_borrow.y = 2; + mutable_borrow.z = 1; + + // Error! Can't borrow `point` as immutable because it's currently + // borrowed as mutable. + // let y = &point.y; + // TODO ^ Try uncommenting this line + + // Error! Can't print because `println!` takes an immutable reference. + // println!("Point Z coordinate is {}", point.z); + // TODO ^ Try uncommenting this line + + // Ok! Mutable references can be passed as immutable to `println!` + println!("Point has coordinates: ({}, {}, {})", + mutable_borrow.x, mutable_borrow.y, mutable_borrow.z); + + // The mutable reference is no longer used for the rest of the code so it + // is possible to reborrow + let new_borrowed_point = &point; println!("Point now has coordinates: ({}, {}, {})", - borrowed_point.x, borrowed_point.y, borrowed_point.z); + new_borrowed_point.x, new_borrowed_point.y, new_borrowed_point.z); } ``` diff --git a/src/doc/rust-by-example/src/std/box.md b/src/doc/rust-by-example/src/std/box.md index 8d45e1a44e..ebc8ff430a 100644 --- a/src/doc/rust-by-example/src/std/box.md +++ b/src/doc/rust-by-example/src/std/box.md @@ -18,10 +18,12 @@ struct Point { y: f64, } +// A Rectangle can be specified by where its top left and bottom right +// corners are in space #[allow(dead_code)] struct Rectangle { - p1: Point, - p2: Point, + top_left: Point, + bottom_right: Point, } fn origin() -> Point { @@ -38,14 +40,14 @@ fn main() { // Stack allocated variables let point: Point = origin(); let rectangle: Rectangle = Rectangle { - p1: origin(), - p2: Point { x: 3.0, y: 4.0 } + top_left: origin(), + bottom_right: Point { x: 3.0, y: -4.0 } }; // Heap allocated rectangle let boxed_rectangle: Box = Box::new(Rectangle { - p1: origin(), - p2: origin() + top_left: origin(), + bottom_right: Point { x: 3.0, y: -4.0 }, }); // The output of functions can be boxed diff --git a/src/doc/rust-by-example/src/testing/dev_dependencies.md b/src/doc/rust-by-example/src/testing/dev_dependencies.md index 4c08979e60..c20decfa63 100644 --- a/src/doc/rust-by-example/src/testing/dev_dependencies.md +++ b/src/doc/rust-by-example/src/testing/dev_dependencies.md @@ -1,7 +1,7 @@ # Development dependencies -Sometimes there is a need to have a dependencies for tests (examples, -benchmarks) only. Such dependencies are added to `Cargo.toml` in +Sometimes there is a need to have dependencies for tests (examples, +benchmarks) only. Such dependencies are added to `Cargo.toml` in the `[dev-dependencies]` section. These dependencies are not propagated to other packages which depend on this package. diff --git a/src/doc/rust-by-example/src/traits/disambiguating.md b/src/doc/rust-by-example/src/trait/disambiguating.md similarity index 100% rename from src/doc/rust-by-example/src/traits/disambiguating.md rename to src/doc/rust-by-example/src/trait/disambiguating.md diff --git a/src/doc/rust-by-example/src/traits/supertraits.md b/src/doc/rust-by-example/src/trait/supertraits.md similarity index 100% rename from src/doc/rust-by-example/src/traits/supertraits.md rename to src/doc/rust-by-example/src/trait/supertraits.md diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index 3cda8d9279..b603c7b231 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -10,9 +10,11 @@ - [Warn-by-default lints](lints/listing/warn-by-default.md) - [Deny-by-default lints](lints/listing/deny-by-default.md) - [Codegen options](codegen-options/index.md) +- [JSON Output](json.md) - [Targets](targets/index.md) - [Built-in Targets](targets/built-in.md) - [Custom Targets](targets/custom.md) + - [Known Issues](targets/known-issues.md) - [Profile-guided Optimization](profile-guided-optimization.md) - [Linker-plugin based LTO](linker-plugin-lto.md) - [Contributing to `rustc`](contributing.md) diff --git a/src/doc/rustc/src/codegen-options/index.md b/src/doc/rustc/src/codegen-options/index.md index 5c41acc658..f5d5f2089d 100644 --- a/src/doc/rustc/src/codegen-options/index.md +++ b/src/doc/rustc/src/codegen-options/index.md @@ -61,6 +61,8 @@ enabling or disabling a feature. To see the valid options and an example of use, run `rustc --print target-features`. +Using this flag is unsafe and might result in [undefined runtime behavior](../targets/known-issues.md). + ## passes This flag can be used to add extra LLVM passes to the compilation. @@ -105,7 +107,7 @@ flag will turn that behavior off. ## no-vectorize-slp -By default, `rustc` will attempt to vectorize loops using [superword-level +By default, `rustc` will attempt to vectorize code using [superword-level parallelism](https://llvm.org/docs/Vectorizers.html#the-slp-vectorizer). This flag will turn that behavior off. diff --git a/src/doc/rustc/src/command-line-arguments.md b/src/doc/rustc/src/command-line-arguments.md index 5eea9c8687..bdb3c51965 100644 --- a/src/doc/rustc/src/command-line-arguments.md +++ b/src/doc/rustc/src/command-line-arguments.md @@ -92,6 +92,7 @@ information about editions may be found in the [edition guide]. [edition guide]: ../edition-guide/introduction.html ## `--emit`: specifies the types of output files to generate + This flag controls the types of output files generated by the compiler. It accepts a comma-separated list of values, and may be specified multiple times. @@ -144,7 +145,7 @@ of print values are: target CPU may be selected with the `-C target-cpu=val` flag. - `target-features` — List of available target features for the current target. Target features may be enabled with the `-C target-feature=val` - flag. + flag. This flag is unsafe. See [known issues](targets/known-issues.md) for more details. - `relocation-models` — List of relocation models. Relocation models may be selected with the `-C relocation-model=val` flag. - `code-models` — List of code models. Code models may be selected with the @@ -241,12 +242,13 @@ The "sysroot" is where `rustc` looks for the crates that come with the Rust distribution; this flag allows that to be overridden. ## `--error-format`: control how errors are produced + This flag lets you control the format of messages. Messages are printed to stderr. The valid options are: - `human` — Human-readable output. This is the default. -- `json` — Structured JSON output. +- `json` — Structured JSON output. See [the JSON chapter] for more detail. - `short` — Short, one-line messages. ## `--color`: configure coloring of output @@ -273,6 +275,7 @@ pathname syntax. For example `--remap-path-prefix foo=bar` will match `foo/lib.rs` but not `./foo/lib.rs`. ## `--json`: configure json messages printed by the compiler + When the `--error-format=json` option is passed to rustc then all of the compiler's diagnostic output will be emitted in the form of JSON blobs. The @@ -305,9 +308,13 @@ to customize the output: Note that it is invalid to combine the `--json` argument with the `--color` argument, and it is required to combine `--json` with `--error-format=json`. +See [the JSON chapter] for more detail. + ## `@path`: load command-line flags from a path If you specify `@path` on the command-line, then it will open `path` and read command line options from it. These options are one per line; a blank line indicates an empty option. The file can use Unix or Windows style line endings, and must be encoded as UTF-8. + +[the JSON chapter]: json.md diff --git a/src/doc/rustc/src/json.md b/src/doc/rustc/src/json.md new file mode 100644 index 0000000000..b737849516 --- /dev/null +++ b/src/doc/rustc/src/json.md @@ -0,0 +1,231 @@ +# JSON Output + +This chapter documents the JSON structures emitted by `rustc`. JSON may be +enabled with the [`--error-format=json` flag][option-error-format]. Additional +options may be specified with the [`--json` flag][option-json] which can +change which messages are generated, and the format of the messages. + +JSON messages are emitted one per line to stderr. + +If parsing the output with Rust, the +[`cargo_metadata`](https://crates.io/crates/cargo_metadata) crate provides +some support for parsing the messages. + +When parsing, care should be taken to be forwards-compatible with future changes +to the format. Optional values may be `null`. New fields may be added. Enumerated +fields like "level" or "suggestion_applicability" may add new values. + +## Diagnostics + +Diagnostic messages provide errors or possible concerns generated during +compilation. `rustc` provides detailed information about where the diagnostic +originates, along with hints and suggestions. + +Diagnostics are arranged in a parent/child relationship where the parent +diagnostic value is the core of the diagnostic, and the attached children +provide additional context, help, and information. + +Diagnostics have the following format: + +```javascript +{ + /* The primary message. */ + "message": "unused variable: `x`", + /* The diagnostic code. + Some messages may set this value to null. + */ + "code": { + /* A unique string identifying which diagnostic triggered. */ + "code": "unused_variables", + /* An optional string explaining more detail about the diagnostic code. */ + "explanation": null + }, + /* The severity of the diagnostic. + Values may be: + - "error": A fatal error that prevents compilation. + - "warning": A possible error or concern. + - "note": Additional information or context about the diagnostic. + - "help": A suggestion on how to resolve the diagnostic. + - "failure-note": A note attached to the message for further information. + - "error: internal compiler error": Indicates a bug within the compiler. + */ + "level": "warning", + /* An array of source code locations to point out specific details about + where the diagnostic originates from. This may be empty, for example + for some global messages, or child messages attached to a parent. + + Character offsets are offsets of Unicode Scalar Values. + */ + "spans": [ + { + /* The file where the span is located. + For spans located within a macro expansion, this will be the + name of the expanded macro in the format "". + */ + "file_name": "lib.rs", + /* The byte offset where the span starts (0-based, inclusive). */ + "byte_start": 21, + /* The byte offset where the span ends (0-based, exclusive). */ + "byte_end": 22, + /* The first line number of the span (1-based, inclusive). */ + "line_start": 2, + /* The last line number of the span (1-based, inclusive). */ + "line_end": 2, + /* The first character offset of the line_start (1-based, inclusive). */ + "column_start": 9, + /* The last character offset of the line_end (1-based, exclusive). */ + "column_end": 10, + /* Whether or not this is the "primary" span. + + This indicates that this span is the focal point of the + diagnostic. + + There are rare cases where multiple spans may be marked as + primary. For example, "immutable borrow occurs here" and + "mutable borrow ends here" can be two separate primary spans. + + The top (parent) message should always have at least one + primary span, unless it has zero spans. Child messages may have + zero or more primary spans. + */ + "is_primary": true, + /* An array of objects showing the original source code for this + span. This shows the entire lines of text where the span is + located. A span across multiple lines will have a separate + value for each line. + */ + "text": [ + { + /* The entire line of the original source code. */ + "text": " let x = 123;", + /* The first character offset of the line of + where the span covers this line (1-based, inclusive). */ + "highlight_start": 9, + /* The last character offset of the line of + where the span covers this line (1-based, exclusive). */ + "highlight_end": 10 + } + ], + /* An optional message to display at this span location. + This is typically null for primary spans. + */ + "label": null, + /* An optional string of a suggested replacement for this span to + solve the issue. Tools may try to replace the contents of the + span with this text. + */ + "suggested_replacement": null, + /* An optional string that indicates the confidence of the + "suggested_replacement". Tools may use this value to determine + whether or not suggestions should be automatically applied. + + Possible values may be: + - "MachineApplicable": The suggestion is definitely what the + user intended. This suggestion should be automatically + applied. + - "MaybeIncorrect": The suggestion may be what the user + intended, but it is uncertain. The suggestion should result + in valid Rust code if it is applied. + - "HasPlaceholders": The suggestion contains placeholders like + `(...)`. The suggestion cannot be applied automatically + because it will not result in valid Rust code. The user will + need to fill in the placeholders. + - "Unspecified": The applicability of the suggestion is unknown. + */ + "suggestion_applicability": null, + /* An optional object indicating the expansion of a macro within + this span. + + If a message occurs within a macro invocation, this object will + provide details of where within the macro expansion the message + is located. + */ + "expansion": { + /* The span of the macro invocation. + Uses the same span definition as the "spans" array. + */ + "span": {/*...*/} + /* Name of the macro, such as "foo!" or "#[derive(Eq)]". */ + "macro_decl_name": "some_macro!", + /* Optional span where the relevant part of the macro is + defined. */ + "def_site_span": {/*...*/}, + } + } + ], + /* Array of attached diagnostic messages. + This is an array of objects using the same format as the parent + message. Children are not nested (children do not themselves + contain "children" definitions). + */ + "children": [ + { + "message": "`#[warn(unused_variables)]` on by default", + "code": null, + "level": "note", + "spans": [], + "children": [], + "rendered": null + }, + { + "message": "consider prefixing with an underscore", + "code": null, + "level": "help", + "spans": [ + { + "file_name": "lib.rs", + "byte_start": 21, + "byte_end": 22, + "line_start": 2, + "line_end": 2, + "column_start": 9, + "column_end": 10, + "is_primary": true, + "text": [ + { + "text": " let x = 123;", + "highlight_start": 9, + "highlight_end": 10 + } + ], + "label": null, + "suggested_replacement": "_x", + "suggestion_applicability": "MachineApplicable", + "expansion": null + } + ], + "children": [], + "rendered": null + } + ], + /* Optional string of the rendered version of the diagnostic as displayed + by rustc. Note that this may be influenced by the `--json` flag. + */ + "rendered": "warning: unused variable: `x`\n --> lib.rs:2:9\n |\n2 | let x = 123;\n | ^ help: consider prefixing with an underscore: `_x`\n |\n = note: `#[warn(unused_variables)]` on by default\n\n" +} +``` + +## Artifact notifications + +Artifact notifications are emitted when the [`--json=artifacts` +flag][option-json] is used. They indicate that a file artifact has been saved +to disk. More information about emit kinds may be found in the [`--emit` +flag][option-emit] documentation. + +```javascript +{ + /* The filename that was generated. */ + "artifact": "libfoo.rlib", + /* The kind of artifact that was generated. Possible values: + - "link": The generated crate as specified by the crate-type. + - "dep-info": The `.d` file with dependency information in a Makefile-like syntax. + - "metadata": The Rust `.rmeta` file containing metadata about the crate. + - "save-analysis": A JSON file emitted by the `-Zsave-analysis` feature. + */ + "emit": "link" +} +``` + +[option-emit]: command-line-arguments.md#option-emit +[option-error-format]: command-line-arguments.md#option-error-format +[option-json]: command-line-arguments.md#option-json diff --git a/src/doc/rustc/src/lints/listing/deny-by-default.md b/src/doc/rustc/src/lints/listing/deny-by-default.md index 6574267f18..5688e90ada 100644 --- a/src/doc/rustc/src/lints/listing/deny-by-default.md +++ b/src/doc/rustc/src/lints/listing/deny-by-default.md @@ -222,3 +222,28 @@ error: invalid `crate_type` value | ^^^^^^^^^^^^^^^^^^^^ | ``` + +## const-err + +This lint detects expressions that will always panic at runtime and would be an +error in a `const` context. + +```rust,ignore +let _ = [0; 4][4]; +``` + +This will produce: + +```text +error: index out of bounds: the len is 4 but the index is 4 + --> src/lib.rs:1:9 + | +1 | let _ = [0; 4][4]; + | ^^^^^^^^^ + | +``` + +## order-dependent-trait-objects + +This lint detects a trait coherency violation that would allow creating two +trait impls for the same dynamic trait object involving marker traits. diff --git a/src/doc/rustc/src/lints/listing/warn-by-default.md b/src/doc/rustc/src/lints/listing/warn-by-default.md index e486240fda..813d7c4baf 100644 --- a/src/doc/rustc/src/lints/listing/warn-by-default.md +++ b/src/doc/rustc/src/lints/listing/warn-by-default.md @@ -596,30 +596,6 @@ warning: function cannot return without recursing | ``` -## unions-with-drop-fields - -This lint detects use of unions that contain fields with possibly non-trivial drop code. Some -example code that triggers this lint: - -```rust -#![feature(untagged_unions)] - -union U { - s: String, -} -``` - -This will produce: - -```text -warning: union contains a field with possibly non-trivial drop code, drop code of union fields is ignored when dropping the union - --> src/main.rs:4:5 - | -4 | s: String, - | ^^^^^^^^^ - | -``` - ## unknown-lints This lint detects unrecognized lint attribute. Some diff --git a/src/doc/rustc/src/targets/index.md b/src/doc/rustc/src/targets/index.md index 3d63d072be..5859df83f6 100644 --- a/src/doc/rustc/src/targets/index.md +++ b/src/doc/rustc/src/targets/index.md @@ -11,3 +11,9 @@ To compile to a particular target, use the `--target` flag: ```bash $ rustc src/main.rs --target=wasm32-unknown-unknown ``` +## Target Features +`x86`, and `ARMv8` are two popular CPU architectures. Their instruction sets form a common baseline across most CPUs. However, some CPUs extend these with custom instruction sets, e.g. vector (`AVX`), bitwise manipulation (`BMI`) or cryptographic (`AES`). + +Developers, who know on which CPUs their compiled code is going to run can choose to add (or remove) CPU specific instruction sets via the `-C target-feature=val` flag. + +Please note, that this flag is generally considered as unsafe. More details can be found in [this section](known-issues.md). diff --git a/src/doc/rustc/src/targets/known-issues.md b/src/doc/rustc/src/targets/known-issues.md new file mode 100644 index 0000000000..89fd8ea6d3 --- /dev/null +++ b/src/doc/rustc/src/targets/known-issues.md @@ -0,0 +1,13 @@ +# Known Issues +This section informs you about known "gotchas". Keep in mind, that this section is (and always will be) incomplete. For suggestions and amendments, feel free to [contribute](../contributing.md) to this guide. + +## Target Features +Most target-feature problems arise, when mixing code that have the target-feature _enabled_ with code that have it _disabled_. If you want to avoid undefined behavior, it is recommended to build _all code_ (including the standard library and imported crates) with a common set of target-features. + +By default, compiling your code with the `-C target-feature` flag will not recompile the entire standard library and/or imported crates with matching target features. Therefore, target features are generally considered as unsafe. Using `#[target_feature]` on individual functions makes the function unsafe. + +Examples: + +| Target-Feature | Issue | Seen on | Description | Details | +| -------------- | ----- | ------- | ----------- | ------- | +| `+soft-float`
and
`-sse` | Segfaults and ABI mismatches | `x86` and `x86-64` | The `x86` and `x86_64` architecture uses SSE registers (aka `xmm`) for floating point operations. Using software emulated floats ("soft-floats") disables usage of `xmm` registers, but parts of Rust's core libraries (e.g. `std::f32` or `std::f64`) are compiled without soft-floats and expect parameters to be passed in `xmm` registers. This leads to ABI mismatches.

Attempting to compile with disabled SSE causes the same error, too. | [#63466](https://github.com/rust-lang/rust/issues/63466) | diff --git a/src/doc/rustdoc/src/documentation-tests.md b/src/doc/rustdoc/src/documentation-tests.md index c9acd3c307..bc1da5ff15 100644 --- a/src/doc/rustdoc/src/documentation-tests.md +++ b/src/doc/rustdoc/src/documentation-tests.md @@ -379,3 +379,49 @@ However, it's preferable to use fenced code blocks over indented code blocks. Not only are fenced code blocks considered more idiomatic for Rust code, but there is no way to use directives such as `ignore` or `should_panic` with indented code blocks. + +### Include items only when collecting doctests + +Rustdoc's documentation tests can do some things that regular unit tests can't, so it can +sometimes be useful to extend your doctests with samples that wouldn't otherwise need to be in +documentation. To this end, Rustdoc allows you to have certain items only appear when it's +collecting doctests, so you can utilize doctest functionality without forcing the test to appear in +docs, or to find an arbitrary private item to include it on. + +When compiling a crate for use in doctests (with `--test` option), rustdoc will set `cfg(doctest)`. +Note that they will still link against only the public items of your crate; if you need to test +private items, you need to write a unit test. + +In this example, we're adding doctests that we know won't compile, to verify that our struct can +only take in valid data: + +```rust +/// We have a struct here. Remember it doesn't accept negative numbers! +pub struct MyStruct(pub usize); + +/// ```compile_fail +/// let x = my_crate::MyStruct(-5); +/// ``` +#[cfg(doctest)] +pub struct MyStructOnlyTakesUsize; +``` + +Note that the struct `MyStructOnlyTakesUsize` here isn't actually part of your public crate +API. The use of `#[cfg(doctest)]` makes sure that this struct only exists while rustdoc is +collecting doctests. This means that its doctest is executed when `--test` is passed to rustdoc, +but is hidden from the public documentation. + +Another possible use of `cfg(doctest)` is to test doctests that are included in your README file +without including it in your main documentation. For example, you could write this into your +`lib.rs` to test your README as part of your doctests: + +```rust,ignore +#![feature(extern_doc)] + +#[doc(include="../README.md")] +#[cfg(doctest)] +pub struct ReadmeDoctests; +``` + +This will include your README as documentation on the hidden struct `ReadmeDoctests`, which will +then be tested alongside the rest of your doctests. diff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md index 49d05b5038..3c3e72aa37 100644 --- a/src/doc/rustdoc/src/unstable-features.md +++ b/src/doc/rustdoc/src/unstable-features.md @@ -211,36 +211,6 @@ pub struct BigX; Then, when looking for it through the `rustdoc` search, if you enter "x" or "big", search will show the `BigX` struct first. -### Include items only when collecting doctests - -Rustdoc's [documentation tests] can do some things that regular unit tests can't, so it can -sometimes be useful to extend your doctests with samples that wouldn't otherwise need to be in -documentation. To this end, Rustdoc allows you to have certain items only appear when it's -collecting doctests, so you can utilize doctest functionality without forcing the test to appear in -docs, or to find an arbitrary private item to include it on. - -If you add `#![feature(cfg_doctest)]` to your crate, Rustdoc will set `cfg(doctest)` when collecting -doctests. Note that they will still link against only the public items of your crate; if you need to -test private items, unit tests are still the way to go. - -In this example, we're adding doctests that we know won't compile, to verify that our struct can -only take in valid data: - -```rust -#![feature(cfg_doctest)] - -/// We have a struct here. Remember it doesn't accept negative numbers! -pub struct MyStruct(usize); - -/// ```compile_fail -/// let x = my_crate::MyStruct(-5); -/// ``` -#[cfg(doctest)] -pub struct MyStructOnlyTakesUsize; -``` - -[documentation tests]: documentation-tests.html - ## Unstable command-line arguments These features are enabled by passing a command-line flag to Rustdoc, but the flags in question are diff --git a/src/doc/unstable-book/src/compiler-flags/report-time.md b/src/doc/unstable-book/src/compiler-flags/report-time.md new file mode 100644 index 0000000000..ed4e9c6b56 --- /dev/null +++ b/src/doc/unstable-book/src/compiler-flags/report-time.md @@ -0,0 +1,80 @@ +# `report-time` + +The tracking issue for this feature is: [#64888] + +[#64888]: https://github.com/rust-lang/rust/issues/64888 + +------------------------ + +The `report-time` feature adds a possibility to report execution time of the +tests generated via `libtest`. + +This is unstable feature, so you have to provide `-Zunstable-options` to get +this feature working. + +Sample usage command: + +```sh +./test_executable -Zunstable-options --report-time +``` + +Available options: + +```sh +--report-time [plain|colored] + Show execution time of each test. Awailable values: + plain = do not colorize the execution time (default); + colored = colorize output according to the `color` + parameter value; + Threshold values for colorized output can be + configured via + `RUST_TEST_TIME_UNIT`, `RUST_TEST_TIME_INTEGRATION` + and + `RUST_TEST_TIME_DOCTEST` environment variables. + Expected format of environment variable is + `VARIABLE=WARN_TIME,CRITICAL_TIME`. + Not available for --format=terse +--ensure-time + Treat excess of the test execution time limit as + error. + Threshold values for this option can be configured via + `RUST_TEST_TIME_UNIT`, `RUST_TEST_TIME_INTEGRATION` + and + `RUST_TEST_TIME_DOCTEST` environment variables. + Expected format of environment variable is + `VARIABLE=WARN_TIME,CRITICAL_TIME`. + `CRITICAL_TIME` here means the limit that should not be + exceeded by test. +``` + +Example of the environment variable format: + +```sh +RUST_TEST_TIME_UNIT=100,200 +``` + +where 100 stands for warn time, and 200 stands for critical time. + +## Examples + +```sh +cargo test --tests -- -Zunstable-options --report-time + Finished dev [unoptimized + debuginfo] target(s) in 0.02s + Running target/debug/deps/example-27fb188025bec02c + +running 3 tests +test tests::unit_test_quick ... ok <0.000s> +test tests::unit_test_warn ... ok <0.055s> +test tests::unit_test_critical ... ok <0.110s> + +test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + + Running target/debug/deps/tests-cedb06f6526d15d9 + +running 3 tests +test unit_test_quick ... ok <0.000s> +test unit_test_warn ... ok <0.550s> +test unit_test_critical ... ok <1.100s> + +test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +``` diff --git a/src/doc/unstable-book/src/language-features/lang-items.md b/src/doc/unstable-book/src/language-features/lang-items.md index 3ee024c6b5..d4ad65e84b 100644 --- a/src/doc/unstable-book/src/language-features/lang-items.md +++ b/src/doc/unstable-book/src/language-features/lang-items.md @@ -249,11 +249,11 @@ the source code. - Runtime - `start`: `libstd/rt.rs` - `eh_personality`: `libpanic_unwind/emcc.rs` (EMCC) - - `eh_personality`: `libpanic_unwind/seh64_gnu.rs` (SEH64 GNU) + - `eh_personality`: `libpanic_unwind/gcc.rs` (GNU) - `eh_personality`: `libpanic_unwind/seh.rs` (SEH) - - `eh_unwind_resume`: `libpanic_unwind/seh64_gnu.rs` (SEH64 GNU) - `eh_unwind_resume`: `libpanic_unwind/gcc.rs` (GCC) - - `msvc_try_filter`: `libpanic_unwind/seh.rs` (SEH) + - `eh_catch_typeinfo`: `libpanic_unwind/seh.rs` (SEH) + - `eh_catch_typeinfo`: `libpanic_unwind/emcc.rs` (EMCC) - `panic`: `libcore/panicking.rs` - `panic_bounds_check`: `libcore/panicking.rs` - `panic_impl`: `libcore/panicking.rs` diff --git a/src/doc/unstable-book/src/language-features/non-exhaustive.md b/src/doc/unstable-book/src/language-features/non-exhaustive.md deleted file mode 100644 index 907147c17e..0000000000 --- a/src/doc/unstable-book/src/language-features/non-exhaustive.md +++ /dev/null @@ -1,76 +0,0 @@ -# `non_exhaustive` - -The tracking issue for this feature is: [#44109] - -[#44109]: https://github.com/rust-lang/rust/issues/44109 - ------------------------- - -The `non_exhaustive` gate allows you to use the `#[non_exhaustive]` attribute -on structs, enums and enum variants. When applied within a crate, users of the -crate will need to use the `_` pattern when matching enums and use the `..` -pattern when matching structs. Enum variants cannot be matched against. -Structs and enum variants marked as `non_exhaustive` will not be able to -be created normally outside of the defining crate. This is demonstrated -below: - -```rust,ignore (pseudo-Rust) -use std::error::Error as StdError; - -#[non_exhaustive] -pub enum Error { - Message(String), - Other, -} -impl StdError for Error { - fn description(&self) -> &str { - // This will not error, despite being marked as non_exhaustive, as this - // enum is defined within the current crate, it can be matched - // exhaustively. - match *self { - Message(ref s) => s, - Other => "other or unknown error", - } - } -} -``` - -```rust,ignore (pseudo-Rust) -use mycrate::Error; - -// This will not error as the non_exhaustive Error enum has been matched with -// a wildcard. -match error { - Message(ref s) => ..., - Other => ..., - _ => ..., -} -``` - -```rust,ignore (pseudo-Rust) -#[non_exhaustive] -pub struct Config { - pub window_width: u16, - pub window_height: u16, -} - -// We can create structs as normal within the defining crate when marked as -// non_exhaustive. -let config = Config { window_width: 640, window_height: 480 }; - -// We can match structs exhaustively when within the defining crate. -if let Ok(Config { window_width, window_height }) = load_config() { - // ... -} -``` - -```rust,ignore (pseudo-Rust) -use mycrate::Config; - -// We cannot create a struct like normal if it has been marked as -// non_exhaustive. -let config = Config { window_width: 640, window_height: 480 }; -// By adding the `..` we can match the config as below outside of the crate -// when marked non_exhaustive. -let &Config { window_width, window_height, .. } = config; -``` diff --git a/src/doc/unstable-book/src/language-features/track-caller.md b/src/doc/unstable-book/src/language-features/track-caller.md new file mode 100644 index 0000000000..afc11a2b94 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/track-caller.md @@ -0,0 +1,5 @@ +# `track_caller` + +The tracking issue for this feature is: [#47809](https://github.com/rust-lang/rust/issues/47809). + +------------------------ diff --git a/src/grammar/lexer.l b/src/grammar/lexer.l deleted file mode 100644 index 1feb781b2b..0000000000 --- a/src/grammar/lexer.l +++ /dev/null @@ -1,350 +0,0 @@ -%{ -#include -#include - -static int num_hashes; -static int end_hashes; -static int saw_non_hash; - -%} - -%option stack -%option yylineno - -%x str -%x rawstr -%x rawstr_esc_begin -%x rawstr_esc_body -%x rawstr_esc_end -%x byte -%x bytestr -%x rawbytestr -%x rawbytestr_nohash -%x pound -%x shebang_or_attr -%x ltorchar -%x linecomment -%x doc_line -%x blockcomment -%x doc_block -%x suffix - -ident [a-zA-Z\x80-\xff_][a-zA-Z0-9\x80-\xff_]* - -%% - -{ident} { BEGIN(INITIAL); } -(.|\n) { yyless(0); BEGIN(INITIAL); } - -[ \n\t\r] { } - -\xef\xbb\xbf { - // UTF-8 byte order mark (BOM), ignore if in line 1, error otherwise - if (yyget_lineno() != 1) { - return -1; - } -} - -\/\/(\/|\!) { BEGIN(doc_line); yymore(); } -\n { BEGIN(INITIAL); - yyleng--; - yytext[yyleng] = 0; - return ((yytext[2] == '!') ? INNER_DOC_COMMENT : OUTER_DOC_COMMENT); - } -[^\n]* { yymore(); } - -\/\/|\/\/\/\/ { BEGIN(linecomment); } -\n { BEGIN(INITIAL); } -[^\n]* { } - -\/\*(\*|\!)[^*] { yy_push_state(INITIAL); yy_push_state(doc_block); yymore(); } -\/\* { yy_push_state(doc_block); yymore(); } -\*\/ { - yy_pop_state(); - if (yy_top_state() == doc_block) { - yymore(); - } else { - return ((yytext[2] == '!') ? INNER_DOC_COMMENT : OUTER_DOC_COMMENT); - } -} -(.|\n) { yymore(); } - -\/\* { yy_push_state(blockcomment); } -\/\* { yy_push_state(blockcomment); } -\*\/ { yy_pop_state(); } -(.|\n) { } - -_ { return UNDERSCORE; } -abstract { return ABSTRACT; } -alignof { return ALIGNOF; } -as { return AS; } -become { return BECOME; } -box { return BOX; } -break { return BREAK; } -catch { return CATCH; } -const { return CONST; } -continue { return CONTINUE; } -crate { return CRATE; } -default { return DEFAULT; } -do { return DO; } -else { return ELSE; } -enum { return ENUM; } -extern { return EXTERN; } -false { return FALSE; } -final { return FINAL; } -fn { return FN; } -for { return FOR; } -if { return IF; } -impl { return IMPL; } -in { return IN; } -let { return LET; } -loop { return LOOP; } -macro { return MACRO; } -match { return MATCH; } -mod { return MOD; } -move { return MOVE; } -mut { return MUT; } -offsetof { return OFFSETOF; } -override { return OVERRIDE; } -priv { return PRIV; } -proc { return PROC; } -pure { return PURE; } -pub { return PUB; } -ref { return REF; } -return { return RETURN; } -self { return SELF; } -sizeof { return SIZEOF; } -static { return STATIC; } -struct { return STRUCT; } -super { return SUPER; } -trait { return TRAIT; } -true { return TRUE; } -type { return TYPE; } -typeof { return TYPEOF; } -union { return UNION; } -unsafe { return UNSAFE; } -unsized { return UNSIZED; } -use { return USE; } -virtual { return VIRTUAL; } -where { return WHERE; } -while { return WHILE; } -yield { return YIELD; } - -{ident} { return IDENT; } - -0x[0-9a-fA-F_]+ { BEGIN(suffix); return LIT_INTEGER; } -0o[0-7_]+ { BEGIN(suffix); return LIT_INTEGER; } -0b[01_]+ { BEGIN(suffix); return LIT_INTEGER; } -[0-9][0-9_]* { BEGIN(suffix); return LIT_INTEGER; } -[0-9][0-9_]*\.(\.|[a-zA-Z]) { yyless(yyleng - 2); BEGIN(suffix); return LIT_INTEGER; } - -[0-9][0-9_]*\.[0-9_]*([eE][-\+]?[0-9_]+)? { BEGIN(suffix); return LIT_FLOAT; } -[0-9][0-9_]*(\.[0-9_]*)?[eE][-\+]?[0-9_]+ { BEGIN(suffix); return LIT_FLOAT; } - -; { return ';'; } -, { return ','; } -\.\.\. { return DOTDOTDOT; } -\.\. { return DOTDOT; } -\. { return '.'; } -\( { return '('; } -\) { return ')'; } -\{ { return '{'; } -\} { return '}'; } -\[ { return '['; } -\] { return ']'; } -@ { return '@'; } -# { BEGIN(pound); yymore(); } -\! { BEGIN(shebang_or_attr); yymore(); } -\[ { - BEGIN(INITIAL); - yyless(2); - return SHEBANG; -} -[^\[\n]*\n { - // Since the \n was eaten as part of the token, yylineno will have - // been incremented to the value 2 if the shebang was on the first - // line. This yyless undoes that, setting yylineno back to 1. - yyless(yyleng - 1); - if (yyget_lineno() == 1) { - BEGIN(INITIAL); - return SHEBANG_LINE; - } else { - BEGIN(INITIAL); - yyless(2); - return SHEBANG; - } -} -. { BEGIN(INITIAL); yyless(1); return '#'; } - -\~ { return '~'; } -:: { return MOD_SEP; } -: { return ':'; } -\$ { return '$'; } -\? { return '?'; } - -== { return EQEQ; } -=> { return FAT_ARROW; } -= { return '='; } -\!= { return NE; } -\! { return '!'; } -\<= { return LE; } -\<\< { return SHL; } -\<\<= { return SHLEQ; } -\< { return '<'; } -\>= { return GE; } -\>\> { return SHR; } -\>\>= { return SHREQ; } -\> { return '>'; } - -\x27 { BEGIN(ltorchar); yymore(); } -static { BEGIN(INITIAL); return STATIC_LIFETIME; } -{ident} { BEGIN(INITIAL); return LIFETIME; } -\\[nrt\\\x27\x220]\x27 { BEGIN(suffix); return LIT_CHAR; } -\\x[0-9a-fA-F]{2}\x27 { BEGIN(suffix); return LIT_CHAR; } -\\u\{([0-9a-fA-F]_*){1,6}\}\x27 { BEGIN(suffix); return LIT_CHAR; } -.\x27 { BEGIN(suffix); return LIT_CHAR; } -[\x80-\xff]{2,4}\x27 { BEGIN(suffix); return LIT_CHAR; } -<> { BEGIN(INITIAL); return -1; } - -b\x22 { BEGIN(bytestr); yymore(); } -\x22 { BEGIN(suffix); return LIT_BYTE_STR; } - -<> { return -1; } -\\[n\nrt\\\x27\x220] { yymore(); } -\\x[0-9a-fA-F]{2} { yymore(); } -\\u\{([0-9a-fA-F]_*){1,6}\} { yymore(); } -\\[^n\nrt\\\x27\x220] { return -1; } -(.|\n) { yymore(); } - -br\x22 { BEGIN(rawbytestr_nohash); yymore(); } -\x22 { BEGIN(suffix); return LIT_BYTE_STR_RAW; } -(.|\n) { yymore(); } -<> { return -1; } - -br/# { - BEGIN(rawbytestr); - yymore(); - num_hashes = 0; - saw_non_hash = 0; - end_hashes = 0; -} -# { - if (!saw_non_hash) { - num_hashes++; - } else if (end_hashes != 0) { - end_hashes++; - if (end_hashes == num_hashes) { - BEGIN(INITIAL); - return LIT_BYTE_STR_RAW; - } - } - yymore(); -} -\x22# { - end_hashes = 1; - if (end_hashes == num_hashes) { - BEGIN(INITIAL); - return LIT_BYTE_STR_RAW; - } - yymore(); -} -(.|\n) { - if (!saw_non_hash) { - saw_non_hash = 1; - } - if (end_hashes != 0) { - end_hashes = 0; - } - yymore(); -} -<> { return -1; } - -b\x27 { BEGIN(byte); yymore(); } -\\[nrt\\\x27\x220]\x27 { BEGIN(INITIAL); return LIT_BYTE; } -\\x[0-9a-fA-F]{2}\x27 { BEGIN(INITIAL); return LIT_BYTE; } -\\u([0-9a-fA-F]_*){4}\x27 { BEGIN(INITIAL); return LIT_BYTE; } -\\U([0-9a-fA-F]_*){8}\x27 { BEGIN(INITIAL); return LIT_BYTE; } -.\x27 { BEGIN(INITIAL); return LIT_BYTE; } -<> { BEGIN(INITIAL); return -1; } - -r\x22 { BEGIN(rawstr); yymore(); } -\x22 { BEGIN(suffix); return LIT_STR_RAW; } -(.|\n) { yymore(); } -<> { return -1; } - -r/# { - BEGIN(rawstr_esc_begin); - yymore(); - num_hashes = 0; - saw_non_hash = 0; - end_hashes = 0; -} - -# { - num_hashes++; - yymore(); -} -\x22 { - BEGIN(rawstr_esc_body); - yymore(); -} -(.|\n) { return -1; } - -\x22/# { - BEGIN(rawstr_esc_end); - yymore(); - } -(.|\n) { - yymore(); - } - -# { - end_hashes++; - if (end_hashes == num_hashes) { - BEGIN(INITIAL); - return LIT_STR_RAW; - } - yymore(); - } -[^#] { - end_hashes = 0; - BEGIN(rawstr_esc_body); - yymore(); - } - -<> { return -1; } - -\x22 { BEGIN(str); yymore(); } -\x22 { BEGIN(suffix); return LIT_STR; } - -<> { return -1; } -\\[n\nr\rt\\\x27\x220] { yymore(); } -\\x[0-9a-fA-F]{2} { yymore(); } -\\u\{([0-9a-fA-F]_*){1,6}\} { yymore(); } -\\[^n\nrt\\\x27\x220] { return -1; } -(.|\n) { yymore(); } - -\<- { return LARROW; } --\> { return RARROW; } -- { return '-'; } --= { return MINUSEQ; } -&& { return ANDAND; } -& { return '&'; } -&= { return ANDEQ; } -\|\| { return OROR; } -\| { return '|'; } -\|= { return OREQ; } -\+ { return '+'; } -\+= { return PLUSEQ; } -\* { return '*'; } -\*= { return STAREQ; } -\/ { return '/'; } -\/= { return SLASHEQ; } -\^ { return '^'; } -\^= { return CARETEQ; } -% { return '%'; } -%= { return PERCENTEQ; } - -<> { return 0; } - -%% diff --git a/src/grammar/parser-lalr-main.c b/src/grammar/parser-lalr-main.c deleted file mode 100644 index 6348190cc1..0000000000 --- a/src/grammar/parser-lalr-main.c +++ /dev/null @@ -1,193 +0,0 @@ -#include -#include -#include -#include - -extern int yylex(); -extern int rsparse(); - -#define PUSHBACK_LEN 4 - -static char pushback[PUSHBACK_LEN]; -static int verbose; - -void print(const char* format, ...) { - va_list args; - va_start(args, format); - if (verbose) { - vprintf(format, args); - } - va_end(args); -} - -// If there is a non-null char at the head of the pushback queue, -// dequeue it and shift the rest of the queue forwards. Otherwise, -// return the token from calling yylex. -int rslex() { - if (pushback[0] == '\0') { - return yylex(); - } else { - char c = pushback[0]; - memmove(pushback, pushback + 1, PUSHBACK_LEN - 1); - pushback[PUSHBACK_LEN - 1] = '\0'; - return c; - } -} - -// Note: this does nothing if the pushback queue is full. As long as -// there aren't more than PUSHBACK_LEN consecutive calls to push_back -// in an action, this shouldn't be a problem. -void push_back(char c) { - for (int i = 0; i < PUSHBACK_LEN; ++i) { - if (pushback[i] == '\0') { - pushback[i] = c; - break; - } - } -} - -extern int rsdebug; - -struct node { - struct node *next; - struct node *prev; - int own_string; - char const *name; - int n_elems; - struct node *elems[]; -}; - -struct node *nodes = NULL; -int n_nodes; - -struct node *mk_node(char const *name, int n, ...) { - va_list ap; - int i = 0; - unsigned sz = sizeof(struct node) + (n * sizeof(struct node *)); - struct node *nn, *nd = (struct node *)malloc(sz); - - print("# New %d-ary node: %s = %p\n", n, name, nd); - - nd->own_string = 0; - nd->prev = NULL; - nd->next = nodes; - if (nodes) { - nodes->prev = nd; - } - nodes = nd; - - nd->name = name; - nd->n_elems = n; - - va_start(ap, n); - while (i < n) { - nn = va_arg(ap, struct node *); - print("# arg[%d]: %p\n", i, nn); - print("# (%s ...)\n", nn->name); - nd->elems[i++] = nn; - } - va_end(ap); - n_nodes++; - return nd; -} - -struct node *mk_atom(char *name) { - struct node *nd = mk_node((char const *)strdup(name), 0); - nd->own_string = 1; - return nd; -} - -struct node *mk_none() { - return mk_atom(""); -} - -struct node *ext_node(struct node *nd, int n, ...) { - va_list ap; - int i = 0, c = nd->n_elems + n; - unsigned sz = sizeof(struct node) + (c * sizeof(struct node *)); - struct node *nn; - - print("# Extending %d-ary node by %d nodes: %s = %p", - nd->n_elems, c, nd->name, nd); - - if (nd->next) { - nd->next->prev = nd->prev; - } - if (nd->prev) { - nd->prev->next = nd->next; - } - nd = realloc(nd, sz); - nd->prev = NULL; - nd->next = nodes; - nodes->prev = nd; - nodes = nd; - - print(" ==> %p\n", nd); - - va_start(ap, n); - while (i < n) { - nn = va_arg(ap, struct node *); - print("# arg[%d]: %p\n", i, nn); - print("# (%s ...)\n", nn->name); - nd->elems[nd->n_elems++] = nn; - ++i; - } - va_end(ap); - return nd; -} - -int const indent_step = 4; - -void print_indent(int depth) { - while (depth) { - if (depth-- % indent_step == 0) { - print("|"); - } else { - print(" "); - } - } -} - -void print_node(struct node *n, int depth) { - int i = 0; - print_indent(depth); - if (n->n_elems == 0) { - print("%s\n", n->name); - } else { - print("(%s\n", n->name); - for (i = 0; i < n->n_elems; ++i) { - print_node(n->elems[i], depth + indent_step); - } - print_indent(depth); - print(")\n"); - } -} - -int main(int argc, char **argv) { - if (argc == 2 && strcmp(argv[1], "-v") == 0) { - verbose = 1; - } else { - verbose = 0; - } - int ret = 0; - struct node *tmp; - memset(pushback, '\0', PUSHBACK_LEN); - ret = rsparse(); - print("--- PARSE COMPLETE: ret:%d, n_nodes:%d ---\n", ret, n_nodes); - if (nodes) { - print_node(nodes, 0); - } - while (nodes) { - tmp = nodes; - nodes = tmp->next; - if (tmp->own_string) { - free((void*)tmp->name); - } - free(tmp); - } - return ret; -} - -void rserror(char const *s) { - fprintf(stderr, "%s\n", s); -} diff --git a/src/grammar/parser-lalr.y b/src/grammar/parser-lalr.y deleted file mode 100644 index 5585c95a5a..0000000000 --- a/src/grammar/parser-lalr.y +++ /dev/null @@ -1,1982 +0,0 @@ -%{ -#define YYERROR_VERBOSE -#define YYSTYPE struct node * -struct node; -extern int yylex(); -extern void yyerror(char const *s); -extern struct node *mk_node(char const *name, int n, ...); -extern struct node *mk_atom(char *text); -extern struct node *mk_none(); -extern struct node *ext_node(struct node *nd, int n, ...); -extern void push_back(char c); -extern char *yytext; -%} -%debug - -%token SHL -%token SHR -%token LE -%token EQEQ -%token NE -%token GE -%token ANDAND -%token OROR -%token SHLEQ -%token SHREQ -%token MINUSEQ -%token ANDEQ -%token OREQ -%token PLUSEQ -%token STAREQ -%token SLASHEQ -%token CARETEQ -%token PERCENTEQ -%token DOTDOT -%token DOTDOTDOT -%token MOD_SEP -%token RARROW -%token LARROW -%token FAT_ARROW -%token LIT_BYTE -%token LIT_CHAR -%token LIT_INTEGER -%token LIT_FLOAT -%token LIT_STR -%token LIT_STR_RAW -%token LIT_BYTE_STR -%token LIT_BYTE_STR_RAW -%token IDENT -%token UNDERSCORE -%token LIFETIME - -// keywords -%token SELF -%token STATIC -%token ABSTRACT -%token ALIGNOF -%token AS -%token BECOME -%token BREAK -%token CATCH -%token CRATE -%token DO -%token ELSE -%token ENUM -%token EXTERN -%token FALSE -%token FINAL -%token FN -%token FOR -%token IF -%token IMPL -%token IN -%token LET -%token LOOP -%token MACRO -%token MATCH -%token MOD -%token MOVE -%token MUT -%token OFFSETOF -%token OVERRIDE -%token PRIV -%token PUB -%token PURE -%token REF -%token RETURN -%token SIZEOF -%token STRUCT -%token SUPER -%token UNION -%token UNSIZED -%token TRUE -%token TRAIT -%token TYPE -%token UNSAFE -%token VIRTUAL -%token YIELD -%token DEFAULT -%token USE -%token WHILE -%token CONTINUE -%token PROC -%token BOX -%token CONST -%token WHERE -%token TYPEOF -%token INNER_DOC_COMMENT -%token OUTER_DOC_COMMENT - -%token SHEBANG -%token SHEBANG_LINE -%token STATIC_LIFETIME - - /* - Quoting from the Bison manual: - - "Finally, the resolution of conflicts works by comparing the precedence - of the rule being considered with that of the lookahead token. If the - token's precedence is higher, the choice is to shift. If the rule's - precedence is higher, the choice is to reduce. If they have equal - precedence, the choice is made based on the associativity of that - precedence level. The verbose output file made by ‘-v’ (see Invoking - Bison) says how each conflict was resolved" - */ - -// We expect no shift/reduce or reduce/reduce conflicts in this grammar; -// all potential ambiguities are scrutinized and eliminated manually. -%expect 0 - -// fake-precedence symbol to cause '|' bars in lambda context to parse -// at low precedence, permit things like |x| foo = bar, where '=' is -// otherwise lower-precedence than '|'. Also used for proc() to cause -// things like proc() a + b to parse as proc() { a + b }. -%precedence LAMBDA - -%precedence SELF - -// MUT should be lower precedence than IDENT so that in the pat rule, -// "& MUT pat" has higher precedence than "binding_mode ident [@ pat]" -%precedence MUT - -// IDENT needs to be lower than '{' so that 'foo {' is shifted when -// trying to decide if we've got a struct-construction expr (esp. in -// contexts like 'if foo { .') -// -// IDENT also needs to be lower precedence than '<' so that '<' in -// 'foo:bar . <' is shifted (in a trait reference occurring in a -// bounds list), parsing as foo:(bar) rather than (foo:bar). -%precedence IDENT - // Put the weak keywords that can be used as idents here as well -%precedence CATCH -%precedence DEFAULT -%precedence UNION - -// A couple fake-precedence symbols to use in rules associated with + -// and < in trailing type contexts. These come up when you have a type -// in the RHS of operator-AS, such as "foo as bar". The "<" there -// has to be shifted so the parser keeps trying to parse a type, even -// though it might well consider reducing the type "bar" and then -// going on to "<" as a subsequent binop. The "+" case is with -// trailing type-bounds ("foo as bar:A+B"), for the same reason. -%precedence SHIFTPLUS - -%precedence MOD_SEP -%precedence RARROW ':' - -// In where clauses, "for" should have greater precedence when used as -// a higher ranked constraint than when used as the beginning of a -// for_in_type (which is a ty) -%precedence FORTYPE -%precedence FOR - -// Binops & unops, and their precedences -%precedence '?' -%precedence BOX -%nonassoc DOTDOT - -// RETURN needs to be lower-precedence than tokens that start -// prefix_exprs -%precedence RETURN YIELD - -%right '=' SHLEQ SHREQ MINUSEQ ANDEQ OREQ PLUSEQ STAREQ SLASHEQ CARETEQ PERCENTEQ -%right LARROW -%left OROR -%left ANDAND -%left EQEQ NE -%left '<' '>' LE GE -%left '|' -%left '^' -%left '&' -%left SHL SHR -%left '+' '-' -%precedence AS -%left '*' '/' '%' -%precedence '!' - -%precedence '{' '[' '(' '.' - -%precedence RANGE - -%start crate - -%% - -//////////////////////////////////////////////////////////////////////// -// Part 1: Items and attributes -//////////////////////////////////////////////////////////////////////// - -crate -: maybe_shebang inner_attrs maybe_mod_items { mk_node("crate", 2, $2, $3); } -| maybe_shebang maybe_mod_items { mk_node("crate", 1, $2); } -; - -maybe_shebang -: SHEBANG_LINE -| %empty -; - -maybe_inner_attrs -: inner_attrs -| %empty { $$ = mk_none(); } -; - -inner_attrs -: inner_attr { $$ = mk_node("InnerAttrs", 1, $1); } -| inner_attrs inner_attr { $$ = ext_node($1, 1, $2); } -; - -inner_attr -: SHEBANG '[' meta_item ']' { $$ = mk_node("InnerAttr", 1, $3); } -| INNER_DOC_COMMENT { $$ = mk_node("InnerAttr", 1, mk_node("doc-comment", 1, mk_atom(yytext))); } -; - -maybe_outer_attrs -: outer_attrs -| %empty { $$ = mk_none(); } -; - -outer_attrs -: outer_attr { $$ = mk_node("OuterAttrs", 1, $1); } -| outer_attrs outer_attr { $$ = ext_node($1, 1, $2); } -; - -outer_attr -: '#' '[' meta_item ']' { $$ = $3; } -| OUTER_DOC_COMMENT { $$ = mk_node("doc-comment", 1, mk_atom(yytext)); } -; - -meta_item -: ident { $$ = mk_node("MetaWord", 1, $1); } -| ident '=' lit { $$ = mk_node("MetaNameValue", 2, $1, $3); } -| ident '(' meta_seq ')' { $$ = mk_node("MetaList", 2, $1, $3); } -| ident '(' meta_seq ',' ')' { $$ = mk_node("MetaList", 2, $1, $3); } -; - -meta_seq -: %empty { $$ = mk_none(); } -| meta_item { $$ = mk_node("MetaItems", 1, $1); } -| meta_seq ',' meta_item { $$ = ext_node($1, 1, $3); } -; - -maybe_mod_items -: mod_items -| %empty { $$ = mk_none(); } -; - -mod_items -: mod_item { $$ = mk_node("Items", 1, $1); } -| mod_items mod_item { $$ = ext_node($1, 1, $2); } -; - -attrs_and_vis -: maybe_outer_attrs visibility { $$ = mk_node("AttrsAndVis", 2, $1, $2); } -; - -mod_item -: attrs_and_vis item { $$ = mk_node("Item", 2, $1, $2); } -; - -// items that can appear outside of a fn block -item -: stmt_item -| item_macro -; - -// items that can appear in "stmts" -stmt_item -: item_static -| item_const -| item_type -| block_item -| view_item -; - -item_static -: STATIC ident ':' ty '=' expr ';' { $$ = mk_node("ItemStatic", 3, $2, $4, $6); } -| STATIC MUT ident ':' ty '=' expr ';' { $$ = mk_node("ItemStatic", 3, $3, $5, $7); } -; - -item_const -: CONST ident ':' ty '=' expr ';' { $$ = mk_node("ItemConst", 3, $2, $4, $6); } -; - -item_macro -: path_expr '!' maybe_ident parens_delimited_token_trees ';' { $$ = mk_node("ItemMacro", 3, $1, $3, $4); } -| path_expr '!' maybe_ident braces_delimited_token_trees { $$ = mk_node("ItemMacro", 3, $1, $3, $4); } -| path_expr '!' maybe_ident brackets_delimited_token_trees ';'{ $$ = mk_node("ItemMacro", 3, $1, $3, $4); } -; - -view_item -: use_item -| extern_fn_item -| EXTERN CRATE ident ';' { $$ = mk_node("ViewItemExternCrate", 1, $3); } -| EXTERN CRATE ident AS ident ';' { $$ = mk_node("ViewItemExternCrate", 2, $3, $5); } -; - -extern_fn_item -: EXTERN maybe_abi item_fn { $$ = mk_node("ViewItemExternFn", 2, $2, $3); } -; - -use_item -: USE view_path ';' { $$ = mk_node("ViewItemUse", 1, $2); } -; - -view_path -: path_no_types_allowed { $$ = mk_node("ViewPathSimple", 1, $1); } -| path_no_types_allowed MOD_SEP '{' '}' { $$ = mk_node("ViewPathList", 2, $1, mk_atom("ViewPathListEmpty")); } -| MOD_SEP '{' '}' { $$ = mk_node("ViewPathList", 1, mk_atom("ViewPathListEmpty")); } -| path_no_types_allowed MOD_SEP '{' idents_or_self '}' { $$ = mk_node("ViewPathList", 2, $1, $4); } -| MOD_SEP '{' idents_or_self '}' { $$ = mk_node("ViewPathList", 1, $3); } -| path_no_types_allowed MOD_SEP '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 2, $1, $4); } -| MOD_SEP '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 1, $3); } -| path_no_types_allowed MOD_SEP '*' { $$ = mk_node("ViewPathGlob", 1, $1); } -| MOD_SEP '*' { $$ = mk_atom("ViewPathGlob"); } -| '*' { $$ = mk_atom("ViewPathGlob"); } -| '{' '}' { $$ = mk_atom("ViewPathListEmpty"); } -| '{' idents_or_self '}' { $$ = mk_node("ViewPathList", 1, $2); } -| '{' idents_or_self ',' '}' { $$ = mk_node("ViewPathList", 1, $2); } -| path_no_types_allowed AS ident { $$ = mk_node("ViewPathSimple", 2, $1, $3); } -; - -block_item -: item_fn -| item_unsafe_fn -| item_mod -| item_foreign_mod { $$ = mk_node("ItemForeignMod", 1, $1); } -| item_struct -| item_enum -| item_union -| item_trait -| item_impl -; - -maybe_ty_ascription -: ':' ty_sum { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -maybe_init_expr -: '=' expr { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -// structs -item_struct -: STRUCT ident generic_params maybe_where_clause struct_decl_args -{ - $$ = mk_node("ItemStruct", 4, $2, $3, $4, $5); -} -| STRUCT ident generic_params struct_tuple_args maybe_where_clause ';' -{ - $$ = mk_node("ItemStruct", 4, $2, $3, $4, $5); -} -| STRUCT ident generic_params maybe_where_clause ';' -{ - $$ = mk_node("ItemStruct", 3, $2, $3, $4); -} -; - -struct_decl_args -: '{' struct_decl_fields '}' { $$ = $2; } -| '{' struct_decl_fields ',' '}' { $$ = $2; } -; - -struct_tuple_args -: '(' struct_tuple_fields ')' { $$ = $2; } -| '(' struct_tuple_fields ',' ')' { $$ = $2; } -; - -struct_decl_fields -: struct_decl_field { $$ = mk_node("StructFields", 1, $1); } -| struct_decl_fields ',' struct_decl_field { $$ = ext_node($1, 1, $3); } -| %empty { $$ = mk_none(); } -; - -struct_decl_field -: attrs_and_vis ident ':' ty_sum { $$ = mk_node("StructField", 3, $1, $2, $4); } -; - -struct_tuple_fields -: struct_tuple_field { $$ = mk_node("StructFields", 1, $1); } -| struct_tuple_fields ',' struct_tuple_field { $$ = ext_node($1, 1, $3); } -| %empty { $$ = mk_none(); } -; - -struct_tuple_field -: attrs_and_vis ty_sum { $$ = mk_node("StructField", 2, $1, $2); } -; - -// enums -item_enum -: ENUM ident generic_params maybe_where_clause '{' enum_defs '}' { $$ = mk_node("ItemEnum", 0); } -| ENUM ident generic_params maybe_where_clause '{' enum_defs ',' '}' { $$ = mk_node("ItemEnum", 0); } -; - -enum_defs -: enum_def { $$ = mk_node("EnumDefs", 1, $1); } -| enum_defs ',' enum_def { $$ = ext_node($1, 1, $3); } -| %empty { $$ = mk_none(); } -; - -enum_def -: attrs_and_vis ident enum_args { $$ = mk_node("EnumDef", 3, $1, $2, $3); } -; - -enum_args -: '{' struct_decl_fields '}' { $$ = mk_node("EnumArgs", 1, $2); } -| '{' struct_decl_fields ',' '}' { $$ = mk_node("EnumArgs", 1, $2); } -| '(' maybe_ty_sums ')' { $$ = mk_node("EnumArgs", 1, $2); } -| '=' expr { $$ = mk_node("EnumArgs", 1, $2); } -| %empty { $$ = mk_none(); } -; - -// unions -item_union -: UNION ident generic_params maybe_where_clause '{' struct_decl_fields '}' { $$ = mk_node("ItemUnion", 0); } -| UNION ident generic_params maybe_where_clause '{' struct_decl_fields ',' '}' { $$ = mk_node("ItemUnion", 0); } - -item_mod -: MOD ident ';' { $$ = mk_node("ItemMod", 1, $2); } -| MOD ident '{' maybe_mod_items '}' { $$ = mk_node("ItemMod", 2, $2, $4); } -| MOD ident '{' inner_attrs maybe_mod_items '}' { $$ = mk_node("ItemMod", 3, $2, $4, $5); } -; - -item_foreign_mod -: EXTERN maybe_abi '{' maybe_foreign_items '}' { $$ = mk_node("ItemForeignMod", 1, $4); } -| EXTERN maybe_abi '{' inner_attrs maybe_foreign_items '}' { $$ = mk_node("ItemForeignMod", 2, $4, $5); } -; - -maybe_abi -: str -| %empty { $$ = mk_none(); } -; - -maybe_foreign_items -: foreign_items -| %empty { $$ = mk_none(); } -; - -foreign_items -: foreign_item { $$ = mk_node("ForeignItems", 1, $1); } -| foreign_items foreign_item { $$ = ext_node($1, 1, $2); } -; - -foreign_item -: attrs_and_vis STATIC item_foreign_static { $$ = mk_node("ForeignItem", 2, $1, $3); } -| attrs_and_vis item_foreign_fn { $$ = mk_node("ForeignItem", 2, $1, $2); } -| attrs_and_vis UNSAFE item_foreign_fn { $$ = mk_node("ForeignItem", 2, $1, $3); } -; - -item_foreign_static -: maybe_mut ident ':' ty ';' { $$ = mk_node("StaticItem", 3, $1, $2, $4); } -; - -item_foreign_fn -: FN ident generic_params fn_decl_allow_variadic maybe_where_clause ';' { $$ = mk_node("ForeignFn", 4, $2, $3, $4, $5); } -; - -fn_decl_allow_variadic -: fn_params_allow_variadic ret_ty { $$ = mk_node("FnDecl", 2, $1, $2); } -; - -fn_params_allow_variadic -: '(' ')' { $$ = mk_none(); } -| '(' params ')' { $$ = $2; } -| '(' params ',' ')' { $$ = $2; } -| '(' params ',' DOTDOTDOT ')' { $$ = $2; } -; - -visibility -: PUB { $$ = mk_atom("Public"); } -| %empty { $$ = mk_atom("Inherited"); } -; - -idents_or_self -: ident_or_self { $$ = mk_node("IdentsOrSelf", 1, $1); } -| idents_or_self AS ident { $$ = mk_node("IdentsOrSelf", 2, $1, $3); } -| idents_or_self ',' ident_or_self { $$ = ext_node($1, 1, $3); } -; - -ident_or_self -: ident -| SELF { $$ = mk_atom(yytext); } -; - -item_type -: TYPE ident generic_params maybe_where_clause '=' ty_sum ';' { $$ = mk_node("ItemTy", 4, $2, $3, $4, $6); } -; - -for_sized -: FOR '?' ident { $$ = mk_node("ForSized", 1, $3); } -| FOR ident '?' { $$ = mk_node("ForSized", 1, $2); } -| %empty { $$ = mk_none(); } -; - -item_trait -: maybe_unsafe TRAIT ident generic_params for_sized maybe_ty_param_bounds maybe_where_clause '{' maybe_trait_items '}' -{ - $$ = mk_node("ItemTrait", 7, $1, $3, $4, $5, $6, $7, $9); -} -; - -maybe_trait_items -: trait_items -| %empty { $$ = mk_none(); } -; - -trait_items -: trait_item { $$ = mk_node("TraitItems", 1, $1); } -| trait_items trait_item { $$ = ext_node($1, 1, $2); } -; - -trait_item -: trait_const -| trait_type -| trait_method -| maybe_outer_attrs item_macro { $$ = mk_node("TraitMacroItem", 2, $1, $2); } -; - -trait_const -: maybe_outer_attrs CONST ident maybe_ty_ascription maybe_const_default ';' { $$ = mk_node("ConstTraitItem", 4, $1, $3, $4, $5); } -; - -maybe_const_default -: '=' expr { $$ = mk_node("ConstDefault", 1, $2); } -| %empty { $$ = mk_none(); } -; - -trait_type -: maybe_outer_attrs TYPE ty_param ';' { $$ = mk_node("TypeTraitItem", 2, $1, $3); } -; - -maybe_unsafe -: UNSAFE { $$ = mk_atom("Unsafe"); } -| %empty { $$ = mk_none(); } -; - -maybe_default_maybe_unsafe -: DEFAULT UNSAFE { $$ = mk_atom("DefaultUnsafe"); } -| DEFAULT { $$ = mk_atom("Default"); } -| UNSAFE { $$ = mk_atom("Unsafe"); } -| %empty { $$ = mk_none(); } - -trait_method -: type_method { $$ = mk_node("Required", 1, $1); } -| method { $$ = mk_node("Provided", 1, $1); } -; - -type_method -: maybe_outer_attrs maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';' -{ - $$ = mk_node("TypeMethod", 6, $1, $2, $4, $5, $6, $7); -} -| maybe_outer_attrs CONST maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';' -{ - $$ = mk_node("TypeMethod", 6, $1, $3, $5, $6, $7, $8); -} -| maybe_outer_attrs maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause ';' -{ - $$ = mk_node("TypeMethod", 7, $1, $2, $4, $6, $7, $8, $9); -} -; - -method -: maybe_outer_attrs maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("Method", 7, $1, $2, $4, $5, $6, $7, $8); -} -| maybe_outer_attrs CONST maybe_unsafe FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("Method", 7, $1, $3, $5, $6, $7, $8, $9); -} -| maybe_outer_attrs maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self_allow_anon_params maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("Method", 8, $1, $2, $4, $6, $7, $8, $9, $10); -} -; - -impl_method -: attrs_and_vis maybe_default maybe_unsafe FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("Method", 8, $1, $2, $3, $5, $6, $7, $8, $9); -} -| attrs_and_vis maybe_default CONST maybe_unsafe FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("Method", 8, $1, $2, $4, $6, $7, $8, $9, $10); -} -| attrs_and_vis maybe_default maybe_unsafe EXTERN maybe_abi FN ident generic_params fn_decl_with_self maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("Method", 9, $1, $2, $3, $5, $7, $8, $9, $10, $11); -} -; - -// There are two forms of impl: -// -// impl (<...>)? TY { ... } -// impl (<...>)? TRAIT for TY { ... } -// -// Unfortunately since TY can begin with '<' itself -- as part of a -// TyQualifiedPath type -- there's an s/r conflict when we see '<' after IMPL: -// should we reduce one of the early rules of TY (such as maybe_once) -// or shall we continue shifting into the generic_params list for the -// impl? -// -// The production parser disambiguates a different case here by -// permitting / requiring the user to provide parens around types when -// they are ambiguous with traits. We do the same here, regrettably, -// by splitting ty into ty and ty_prim. -item_impl -: maybe_default_maybe_unsafe IMPL generic_params ty_prim_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}' -{ - $$ = mk_node("ItemImpl", 6, $1, $3, $4, $5, $7, $8); -} -| maybe_default_maybe_unsafe IMPL generic_params '(' ty ')' maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}' -{ - $$ = mk_node("ItemImpl", 6, $1, $3, 5, $6, $9, $10); -} -| maybe_default_maybe_unsafe IMPL generic_params trait_ref FOR ty_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}' -{ - $$ = mk_node("ItemImpl", 6, $3, $4, $6, $7, $9, $10); -} -| maybe_default_maybe_unsafe IMPL generic_params '!' trait_ref FOR ty_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}' -{ - $$ = mk_node("ItemImplNeg", 7, $1, $3, $5, $7, $8, $10, $11); -} -| maybe_default_maybe_unsafe IMPL generic_params trait_ref FOR DOTDOT '{' '}' -{ - $$ = mk_node("ItemImplDefault", 3, $1, $3, $4); -} -| maybe_default_maybe_unsafe IMPL generic_params '!' trait_ref FOR DOTDOT '{' '}' -{ - $$ = mk_node("ItemImplDefaultNeg", 3, $1, $3, $4); -} -; - -maybe_impl_items -: impl_items -| %empty { $$ = mk_none(); } -; - -impl_items -: impl_item { $$ = mk_node("ImplItems", 1, $1); } -| impl_item impl_items { $$ = ext_node($1, 1, $2); } -; - -impl_item -: impl_method -| attrs_and_vis item_macro { $$ = mk_node("ImplMacroItem", 2, $1, $2); } -| impl_const -| impl_type -; - -maybe_default -: DEFAULT { $$ = mk_atom("Default"); } -| %empty { $$ = mk_none(); } -; - -impl_const -: attrs_and_vis maybe_default item_const { $$ = mk_node("ImplConst", 3, $1, $2, $3); } -; - -impl_type -: attrs_and_vis maybe_default TYPE ident generic_params '=' ty_sum ';' { $$ = mk_node("ImplType", 5, $1, $2, $4, $5, $7); } -; - -item_fn -: FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("ItemFn", 5, $2, $3, $4, $5, $6); -} -| CONST FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("ItemFn", 5, $3, $4, $5, $6, $7); -} -; - -item_unsafe_fn -: UNSAFE FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("ItemUnsafeFn", 5, $3, $4, $5, $6, $7); -} -| CONST UNSAFE FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("ItemUnsafeFn", 5, $4, $5, $6, $7, $8); -} -| UNSAFE EXTERN maybe_abi FN ident generic_params fn_decl maybe_where_clause inner_attrs_and_block -{ - $$ = mk_node("ItemUnsafeFn", 6, $3, $5, $6, $7, $8, $9); -} -; - -fn_decl -: fn_params ret_ty { $$ = mk_node("FnDecl", 2, $1, $2); } -; - -fn_decl_with_self -: fn_params_with_self ret_ty { $$ = mk_node("FnDecl", 2, $1, $2); } -; - -fn_decl_with_self_allow_anon_params -: fn_anon_params_with_self ret_ty { $$ = mk_node("FnDecl", 2, $1, $2); } -; - -fn_params -: '(' maybe_params ')' { $$ = $2; } -; - -fn_anon_params -: '(' anon_param anon_params_allow_variadic_tail ')' { $$ = ext_node($2, 1, $3); } -| '(' ')' { $$ = mk_none(); } -; - -fn_params_with_self -: '(' maybe_mut SELF maybe_ty_ascription maybe_comma_params ')' { $$ = mk_node("SelfLower", 3, $2, $4, $5); } -| '(' '&' maybe_mut SELF maybe_ty_ascription maybe_comma_params ')' { $$ = mk_node("SelfRegion", 3, $3, $5, $6); } -| '(' '&' lifetime maybe_mut SELF maybe_ty_ascription maybe_comma_params ')' { $$ = mk_node("SelfRegion", 4, $3, $4, $6, $7); } -| '(' maybe_params ')' { $$ = mk_node("SelfStatic", 1, $2); } -; - -fn_anon_params_with_self -: '(' maybe_mut SELF maybe_ty_ascription maybe_comma_anon_params ')' { $$ = mk_node("SelfLower", 3, $2, $4, $5); } -| '(' '&' maybe_mut SELF maybe_ty_ascription maybe_comma_anon_params ')' { $$ = mk_node("SelfRegion", 3, $3, $5, $6); } -| '(' '&' lifetime maybe_mut SELF maybe_ty_ascription maybe_comma_anon_params ')' { $$ = mk_node("SelfRegion", 4, $3, $4, $6, $7); } -| '(' maybe_anon_params ')' { $$ = mk_node("SelfStatic", 1, $2); } -; - -maybe_params -: params -| params ',' -| %empty { $$ = mk_none(); } -; - -params -: param { $$ = mk_node("Args", 1, $1); } -| params ',' param { $$ = ext_node($1, 1, $3); } -; - -param -: pat ':' ty_sum { $$ = mk_node("Arg", 2, $1, $3); } -; - -inferrable_params -: inferrable_param { $$ = mk_node("InferrableParams", 1, $1); } -| inferrable_params ',' inferrable_param { $$ = ext_node($1, 1, $3); } -; - -inferrable_param -: pat maybe_ty_ascription { $$ = mk_node("InferrableParam", 2, $1, $2); } -; - -maybe_comma_params -: ',' { $$ = mk_none(); } -| ',' params { $$ = $2; } -| ',' params ',' { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -maybe_comma_anon_params -: ',' { $$ = mk_none(); } -| ',' anon_params { $$ = $2; } -| ',' anon_params ',' { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -maybe_anon_params -: anon_params -| anon_params ',' -| %empty { $$ = mk_none(); } -; - -anon_params -: anon_param { $$ = mk_node("Args", 1, $1); } -| anon_params ',' anon_param { $$ = ext_node($1, 1, $3); } -; - -// anon means it's allowed to be anonymous (type-only), but it can -// still have a name -anon_param -: named_arg ':' ty { $$ = mk_node("Arg", 2, $1, $3); } -| ty -; - -anon_params_allow_variadic_tail -: ',' DOTDOTDOT { $$ = mk_none(); } -| ',' anon_param anon_params_allow_variadic_tail { $$ = mk_node("Args", 2, $2, $3); } -| %empty { $$ = mk_none(); } -; - -named_arg -: ident -| UNDERSCORE { $$ = mk_atom("PatWild"); } -| '&' ident { $$ = $2; } -| '&' UNDERSCORE { $$ = mk_atom("PatWild"); } -| ANDAND ident { $$ = $2; } -| ANDAND UNDERSCORE { $$ = mk_atom("PatWild"); } -| MUT ident { $$ = $2; } -; - -ret_ty -: RARROW '!' { $$ = mk_none(); } -| RARROW ty { $$ = mk_node("ret-ty", 1, $2); } -| %prec IDENT %empty { $$ = mk_none(); } -; - -generic_params -: '<' '>' { $$ = mk_node("Generics", 2, mk_none(), mk_none()); } -| '<' lifetimes '>' { $$ = mk_node("Generics", 2, $2, mk_none()); } -| '<' lifetimes ',' '>' { $$ = mk_node("Generics", 2, $2, mk_none()); } -| '<' lifetimes SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, mk_none()); } -| '<' lifetimes ',' SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, mk_none()); } -| '<' lifetimes ',' ty_params '>' { $$ = mk_node("Generics", 2, $2, $4); } -| '<' lifetimes ',' ty_params ',' '>' { $$ = mk_node("Generics", 2, $2, $4); } -| '<' lifetimes ',' ty_params SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, $4); } -| '<' lifetimes ',' ty_params ',' SHR { push_back('>'); $$ = mk_node("Generics", 2, $2, $4); } -| '<' ty_params '>' { $$ = mk_node("Generics", 2, mk_none(), $2); } -| '<' ty_params ',' '>' { $$ = mk_node("Generics", 2, mk_none(), $2); } -| '<' ty_params SHR { push_back('>'); $$ = mk_node("Generics", 2, mk_none(), $2); } -| '<' ty_params ',' SHR { push_back('>'); $$ = mk_node("Generics", 2, mk_none(), $2); } -| %empty { $$ = mk_none(); } -; - -maybe_where_clause -: %empty { $$ = mk_none(); } -| where_clause -; - -where_clause -: WHERE where_predicates { $$ = mk_node("WhereClause", 1, $2); } -| WHERE where_predicates ',' { $$ = mk_node("WhereClause", 1, $2); } -; - -where_predicates -: where_predicate { $$ = mk_node("WherePredicates", 1, $1); } -| where_predicates ',' where_predicate { $$ = ext_node($1, 1, $3); } -; - -where_predicate -: maybe_for_lifetimes lifetime ':' bounds { $$ = mk_node("WherePredicate", 3, $1, $2, $4); } -| maybe_for_lifetimes ty ':' ty_param_bounds { $$ = mk_node("WherePredicate", 3, $1, $2, $4); } -; - -maybe_for_lifetimes -: FOR '<' lifetimes '>' { $$ = mk_none(); } -| %prec FORTYPE %empty { $$ = mk_none(); } - -ty_params -: ty_param { $$ = mk_node("TyParams", 1, $1); } -| ty_params ',' ty_param { $$ = ext_node($1, 1, $3); } -; - -// A path with no type parameters; e.g. `foo::bar::Baz` -// -// These show up in 'use' view-items, because these are processed -// without respect to types. -path_no_types_allowed -: ident { $$ = mk_node("ViewPath", 1, $1); } -| MOD_SEP ident { $$ = mk_node("ViewPath", 1, $2); } -| SELF { $$ = mk_node("ViewPath", 1, mk_atom("Self")); } -| MOD_SEP SELF { $$ = mk_node("ViewPath", 1, mk_atom("Self")); } -| SUPER { $$ = mk_node("ViewPath", 1, mk_atom("Super")); } -| MOD_SEP SUPER { $$ = mk_node("ViewPath", 1, mk_atom("Super")); } -| path_no_types_allowed MOD_SEP ident { $$ = ext_node($1, 1, $3); } -; - -// A path with a lifetime and type parameters, with no double colons -// before the type parameters; e.g. `foo::bar<'a>::Baz` -// -// These show up in "trait references", the components of -// type-parameter bounds lists, as well as in the prefix of the -// path_generic_args_and_bounds rule, which is the full form of a -// named typed expression. -// -// They do not have (nor need) an extra '::' before '<' because -// unlike in expr context, there are no "less-than" type exprs to -// be ambiguous with. -path_generic_args_without_colons -: %prec IDENT - ident { $$ = mk_node("components", 1, $1); } -| %prec IDENT - ident generic_args { $$ = mk_node("components", 2, $1, $2); } -| %prec IDENT - ident '(' maybe_ty_sums ')' ret_ty { $$ = mk_node("components", 2, $1, $3); } -| %prec IDENT - path_generic_args_without_colons MOD_SEP ident { $$ = ext_node($1, 1, $3); } -| %prec IDENT - path_generic_args_without_colons MOD_SEP ident generic_args { $$ = ext_node($1, 2, $3, $4); } -| %prec IDENT - path_generic_args_without_colons MOD_SEP ident '(' maybe_ty_sums ')' ret_ty { $$ = ext_node($1, 2, $3, $5); } -; - -generic_args -: '<' generic_values '>' { $$ = $2; } -| '<' generic_values SHR { push_back('>'); $$ = $2; } -| '<' generic_values GE { push_back('='); $$ = $2; } -| '<' generic_values SHREQ { push_back('>'); push_back('='); $$ = $2; } -// If generic_args starts with "<<", the first arg must be a -// TyQualifiedPath because that's the only type that can start with a -// '<'. This rule parses that as the first ty_sum and then continues -// with the rest of generic_values. -| SHL ty_qualified_path_and_generic_values '>' { $$ = $2; } -| SHL ty_qualified_path_and_generic_values SHR { push_back('>'); $$ = $2; } -| SHL ty_qualified_path_and_generic_values GE { push_back('='); $$ = $2; } -| SHL ty_qualified_path_and_generic_values SHREQ { push_back('>'); push_back('='); $$ = $2; } -; - -generic_values -: maybe_ty_sums_and_or_bindings { $$ = mk_node("GenericValues", 1, $1); } -; - -maybe_ty_sums_and_or_bindings -: ty_sums -| ty_sums ',' -| ty_sums ',' bindings { $$ = mk_node("TySumsAndBindings", 2, $1, $3); } -| bindings -| bindings ',' -| %empty { $$ = mk_none(); } -; - -maybe_bindings -: ',' bindings { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -//////////////////////////////////////////////////////////////////////// -// Part 2: Patterns -//////////////////////////////////////////////////////////////////////// - -pat -: UNDERSCORE { $$ = mk_atom("PatWild"); } -| '&' pat { $$ = mk_node("PatRegion", 1, $2); } -| '&' MUT pat { $$ = mk_node("PatRegion", 1, $3); } -| ANDAND pat { $$ = mk_node("PatRegion", 1, mk_node("PatRegion", 1, $2)); } -| '(' ')' { $$ = mk_atom("PatUnit"); } -| '(' pat_tup ')' { $$ = mk_node("PatTup", 1, $2); } -| '[' pat_vec ']' { $$ = mk_node("PatVec", 1, $2); } -| lit_or_path -| lit_or_path DOTDOTDOT lit_or_path { $$ = mk_node("PatRange", 2, $1, $3); } -| path_expr '{' pat_struct '}' { $$ = mk_node("PatStruct", 2, $1, $3); } -| path_expr '(' ')' { $$ = mk_node("PatEnum", 2, $1, mk_none()); } -| path_expr '(' pat_tup ')' { $$ = mk_node("PatEnum", 2, $1, $3); } -| path_expr '!' maybe_ident delimited_token_trees { $$ = mk_node("PatMac", 3, $1, $3, $4); } -| binding_mode ident { $$ = mk_node("PatIdent", 2, $1, $2); } -| ident '@' pat { $$ = mk_node("PatIdent", 3, mk_node("BindByValue", 1, mk_atom("MutImmutable")), $1, $3); } -| binding_mode ident '@' pat { $$ = mk_node("PatIdent", 3, $1, $2, $4); } -| BOX pat { $$ = mk_node("PatUniq", 1, $2); } -| '<' ty_sum maybe_as_trait_ref '>' MOD_SEP ident { $$ = mk_node("PatQualifiedPath", 3, $2, $3, $6); } -| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_as_trait_ref '>' MOD_SEP ident -{ - $$ = mk_node("PatQualifiedPath", 3, mk_node("PatQualifiedPath", 3, $2, $3, $6), $7, $10); -} -; - -pats_or -: pat { $$ = mk_node("Pats", 1, $1); } -| pats_or '|' pat { $$ = ext_node($1, 1, $3); } -; - -binding_mode -: REF { $$ = mk_node("BindByRef", 1, mk_atom("MutImmutable")); } -| REF MUT { $$ = mk_node("BindByRef", 1, mk_atom("MutMutable")); } -| MUT { $$ = mk_node("BindByValue", 1, mk_atom("MutMutable")); } -; - -lit_or_path -: path_expr { $$ = mk_node("PatLit", 1, $1); } -| lit { $$ = mk_node("PatLit", 1, $1); } -| '-' lit { $$ = mk_node("PatLit", 1, $2); } -; - -pat_field -: ident { $$ = mk_node("PatField", 1, $1); } -| binding_mode ident { $$ = mk_node("PatField", 2, $1, $2); } -| BOX ident { $$ = mk_node("PatField", 2, mk_atom("box"), $2); } -| BOX binding_mode ident { $$ = mk_node("PatField", 3, mk_atom("box"), $2, $3); } -| ident ':' pat { $$ = mk_node("PatField", 2, $1, $3); } -| binding_mode ident ':' pat { $$ = mk_node("PatField", 3, $1, $2, $4); } -| LIT_INTEGER ':' pat { $$ = mk_node("PatField", 2, mk_atom(yytext), $3); } -; - -pat_fields -: pat_field { $$ = mk_node("PatFields", 1, $1); } -| pat_fields ',' pat_field { $$ = ext_node($1, 1, $3); } -; - -pat_struct -: pat_fields { $$ = mk_node("PatStruct", 2, $1, mk_atom("false")); } -| pat_fields ',' { $$ = mk_node("PatStruct", 2, $1, mk_atom("false")); } -| pat_fields ',' DOTDOT { $$ = mk_node("PatStruct", 2, $1, mk_atom("true")); } -| DOTDOT { $$ = mk_node("PatStruct", 1, mk_atom("true")); } -| %empty { $$ = mk_node("PatStruct", 1, mk_none()); } -; - -pat_tup -: pat_tup_elts { $$ = mk_node("PatTup", 2, $1, mk_none()); } -| pat_tup_elts ',' { $$ = mk_node("PatTup", 2, $1, mk_none()); } -| pat_tup_elts DOTDOT { $$ = mk_node("PatTup", 2, $1, mk_none()); } -| pat_tup_elts ',' DOTDOT { $$ = mk_node("PatTup", 2, $1, mk_none()); } -| pat_tup_elts DOTDOT ',' pat_tup_elts { $$ = mk_node("PatTup", 2, $1, $4); } -| pat_tup_elts DOTDOT ',' pat_tup_elts ',' { $$ = mk_node("PatTup", 2, $1, $4); } -| pat_tup_elts ',' DOTDOT ',' pat_tup_elts { $$ = mk_node("PatTup", 2, $1, $5); } -| pat_tup_elts ',' DOTDOT ',' pat_tup_elts ',' { $$ = mk_node("PatTup", 2, $1, $5); } -| DOTDOT ',' pat_tup_elts { $$ = mk_node("PatTup", 2, mk_none(), $3); } -| DOTDOT ',' pat_tup_elts ',' { $$ = mk_node("PatTup", 2, mk_none(), $3); } -| DOTDOT { $$ = mk_node("PatTup", 2, mk_none(), mk_none()); } -; - -pat_tup_elts -: pat { $$ = mk_node("PatTupElts", 1, $1); } -| pat_tup_elts ',' pat { $$ = ext_node($1, 1, $3); } -; - -pat_vec -: pat_vec_elts { $$ = mk_node("PatVec", 2, $1, mk_none()); } -| pat_vec_elts ',' { $$ = mk_node("PatVec", 2, $1, mk_none()); } -| pat_vec_elts DOTDOT { $$ = mk_node("PatVec", 2, $1, mk_none()); } -| pat_vec_elts ',' DOTDOT { $$ = mk_node("PatVec", 2, $1, mk_none()); } -| pat_vec_elts DOTDOT ',' pat_vec_elts { $$ = mk_node("PatVec", 2, $1, $4); } -| pat_vec_elts DOTDOT ',' pat_vec_elts ',' { $$ = mk_node("PatVec", 2, $1, $4); } -| pat_vec_elts ',' DOTDOT ',' pat_vec_elts { $$ = mk_node("PatVec", 2, $1, $5); } -| pat_vec_elts ',' DOTDOT ',' pat_vec_elts ',' { $$ = mk_node("PatVec", 2, $1, $5); } -| DOTDOT ',' pat_vec_elts { $$ = mk_node("PatVec", 2, mk_none(), $3); } -| DOTDOT ',' pat_vec_elts ',' { $$ = mk_node("PatVec", 2, mk_none(), $3); } -| DOTDOT { $$ = mk_node("PatVec", 2, mk_none(), mk_none()); } -| %empty { $$ = mk_node("PatVec", 2, mk_none(), mk_none()); } -; - -pat_vec_elts -: pat { $$ = mk_node("PatVecElts", 1, $1); } -| pat_vec_elts ',' pat { $$ = ext_node($1, 1, $3); } -; - -//////////////////////////////////////////////////////////////////////// -// Part 3: Types -//////////////////////////////////////////////////////////////////////// - -ty -: ty_prim -| ty_closure -| '<' ty_sum maybe_as_trait_ref '>' MOD_SEP ident { $$ = mk_node("TyQualifiedPath", 3, $2, $3, $6); } -| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_as_trait_ref '>' MOD_SEP ident { $$ = mk_node("TyQualifiedPath", 3, mk_node("TyQualifiedPath", 3, $2, $3, $6), $7, $10); } -| '(' ty_sums ')' { $$ = mk_node("TyTup", 1, $2); } -| '(' ty_sums ',' ')' { $$ = mk_node("TyTup", 1, $2); } -| '(' ')' { $$ = mk_atom("TyNil"); } -; - -ty_prim -: %prec IDENT path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("global", 1, mk_atom("false")), $1); } -| %prec IDENT MOD_SEP path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("global", 1, mk_atom("true")), $2); } -| %prec IDENT SELF MOD_SEP path_generic_args_without_colons { $$ = mk_node("TyPath", 2, mk_node("self", 1, mk_atom("true")), $3); } -| %prec IDENT path_generic_args_without_colons '!' maybe_ident delimited_token_trees { $$ = mk_node("TyMacro", 3, $1, $3, $4); } -| %prec IDENT MOD_SEP path_generic_args_without_colons '!' maybe_ident delimited_token_trees { $$ = mk_node("TyMacro", 3, $2, $4, $5); } -| BOX ty { $$ = mk_node("TyBox", 1, $2); } -| '*' maybe_mut_or_const ty { $$ = mk_node("TyPtr", 2, $2, $3); } -| '&' ty { $$ = mk_node("TyRptr", 2, mk_atom("MutImmutable"), $2); } -| '&' MUT ty { $$ = mk_node("TyRptr", 2, mk_atom("MutMutable"), $3); } -| ANDAND ty { $$ = mk_node("TyRptr", 1, mk_node("TyRptr", 2, mk_atom("MutImmutable"), $2)); } -| ANDAND MUT ty { $$ = mk_node("TyRptr", 1, mk_node("TyRptr", 2, mk_atom("MutMutable"), $3)); } -| '&' lifetime maybe_mut ty { $$ = mk_node("TyRptr", 3, $2, $3, $4); } -| ANDAND lifetime maybe_mut ty { $$ = mk_node("TyRptr", 1, mk_node("TyRptr", 3, $2, $3, $4)); } -| '[' ty ']' { $$ = mk_node("TyVec", 1, $2); } -| '[' ty ',' DOTDOT expr ']' { $$ = mk_node("TyFixedLengthVec", 2, $2, $5); } -| '[' ty ';' expr ']' { $$ = mk_node("TyFixedLengthVec", 2, $2, $4); } -| TYPEOF '(' expr ')' { $$ = mk_node("TyTypeof", 1, $3); } -| UNDERSCORE { $$ = mk_atom("TyInfer"); } -| ty_bare_fn -| for_in_type -; - -ty_bare_fn -: FN ty_fn_decl { $$ = $2; } -| UNSAFE FN ty_fn_decl { $$ = $3; } -| EXTERN maybe_abi FN ty_fn_decl { $$ = $4; } -| UNSAFE EXTERN maybe_abi FN ty_fn_decl { $$ = $5; } -; - -ty_fn_decl -: generic_params fn_anon_params ret_ty { $$ = mk_node("TyFnDecl", 3, $1, $2, $3); } -; - -ty_closure -: UNSAFE '|' anon_params '|' maybe_bounds ret_ty { $$ = mk_node("TyClosure", 3, $3, $5, $6); } -| '|' anon_params '|' maybe_bounds ret_ty { $$ = mk_node("TyClosure", 3, $2, $4, $5); } -| UNSAFE OROR maybe_bounds ret_ty { $$ = mk_node("TyClosure", 2, $3, $4); } -| OROR maybe_bounds ret_ty { $$ = mk_node("TyClosure", 2, $2, $3); } -; - -for_in_type -: FOR '<' maybe_lifetimes '>' for_in_type_suffix { $$ = mk_node("ForInType", 2, $3, $5); } -; - -for_in_type_suffix -: ty_bare_fn -| trait_ref -| ty_closure -; - -maybe_mut -: MUT { $$ = mk_atom("MutMutable"); } -| %prec MUT %empty { $$ = mk_atom("MutImmutable"); } -; - -maybe_mut_or_const -: MUT { $$ = mk_atom("MutMutable"); } -| CONST { $$ = mk_atom("MutImmutable"); } -| %empty { $$ = mk_atom("MutImmutable"); } -; - -ty_qualified_path_and_generic_values -: ty_qualified_path maybe_bindings -{ - $$ = mk_node("GenericValues", 3, mk_none(), mk_node("TySums", 1, mk_node("TySum", 1, $1)), $2); -} -| ty_qualified_path ',' ty_sums maybe_bindings -{ - $$ = mk_node("GenericValues", 3, mk_none(), mk_node("TySums", 2, $1, $3), $4); -} -; - -ty_qualified_path -: ty_sum AS trait_ref '>' MOD_SEP ident { $$ = mk_node("TyQualifiedPath", 3, $1, $3, $6); } -| ty_sum AS trait_ref '>' MOD_SEP ident '+' ty_param_bounds { $$ = mk_node("TyQualifiedPath", 3, $1, $3, $6); } -; - -maybe_ty_sums -: ty_sums -| ty_sums ',' -| %empty { $$ = mk_none(); } -; - -ty_sums -: ty_sum { $$ = mk_node("TySums", 1, $1); } -| ty_sums ',' ty_sum { $$ = ext_node($1, 1, $3); } -; - -ty_sum -: ty_sum_elt { $$ = mk_node("TySum", 1, $1); } -| ty_sum '+' ty_sum_elt { $$ = ext_node($1, 1, $3); } -; - -ty_sum_elt -: ty -| lifetime -; - -ty_prim_sum -: ty_prim_sum_elt { $$ = mk_node("TySum", 1, $1); } -| ty_prim_sum '+' ty_prim_sum_elt { $$ = ext_node($1, 1, $3); } -; - -ty_prim_sum_elt -: ty_prim -| lifetime -; - -maybe_ty_param_bounds -: ':' ty_param_bounds { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -ty_param_bounds -: boundseq -| %empty { $$ = mk_none(); } -; - -boundseq -: polybound -| boundseq '+' polybound { $$ = ext_node($1, 1, $3); } -; - -polybound -: FOR '<' maybe_lifetimes '>' bound { $$ = mk_node("PolyBound", 2, $3, $5); } -| bound -| '?' FOR '<' maybe_lifetimes '>' bound { $$ = mk_node("PolyBound", 2, $4, $6); } -| '?' bound { $$ = $2; } -; - -bindings -: binding { $$ = mk_node("Bindings", 1, $1); } -| bindings ',' binding { $$ = ext_node($1, 1, $3); } -; - -binding -: ident '=' ty { mk_node("Binding", 2, $1, $3); } -; - -ty_param -: ident maybe_ty_param_bounds maybe_ty_default { $$ = mk_node("TyParam", 3, $1, $2, $3); } -| ident '?' ident maybe_ty_param_bounds maybe_ty_default { $$ = mk_node("TyParam", 4, $1, $3, $4, $5); } -; - -maybe_bounds -: %prec SHIFTPLUS - ':' bounds { $$ = $2; } -| %prec SHIFTPLUS %empty { $$ = mk_none(); } -; - -bounds -: bound { $$ = mk_node("bounds", 1, $1); } -| bounds '+' bound { $$ = ext_node($1, 1, $3); } -; - -bound -: lifetime -| trait_ref -; - -maybe_ltbounds -: %prec SHIFTPLUS - ':' ltbounds { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -ltbounds -: lifetime { $$ = mk_node("ltbounds", 1, $1); } -| ltbounds '+' lifetime { $$ = ext_node($1, 1, $3); } -; - -maybe_ty_default -: '=' ty_sum { $$ = mk_node("TyDefault", 1, $2); } -| %empty { $$ = mk_none(); } -; - -maybe_lifetimes -: lifetimes -| lifetimes ',' -| %empty { $$ = mk_none(); } -; - -lifetimes -: lifetime_and_bounds { $$ = mk_node("Lifetimes", 1, $1); } -| lifetimes ',' lifetime_and_bounds { $$ = ext_node($1, 1, $3); } -; - -lifetime_and_bounds -: LIFETIME maybe_ltbounds { $$ = mk_node("lifetime", 2, mk_atom(yytext), $2); } -| STATIC_LIFETIME { $$ = mk_atom("static_lifetime"); } -; - -lifetime -: LIFETIME { $$ = mk_node("lifetime", 1, mk_atom(yytext)); } -| STATIC_LIFETIME { $$ = mk_atom("static_lifetime"); } -; - -trait_ref -: %prec IDENT path_generic_args_without_colons -| %prec IDENT MOD_SEP path_generic_args_without_colons { $$ = $2; } -; - -//////////////////////////////////////////////////////////////////////// -// Part 4: Blocks, statements, and expressions -//////////////////////////////////////////////////////////////////////// - -inner_attrs_and_block -: '{' maybe_inner_attrs maybe_stmts '}' { $$ = mk_node("ExprBlock", 2, $2, $3); } -; - -block -: '{' maybe_stmts '}' { $$ = mk_node("ExprBlock", 1, $2); } -; - -maybe_stmts -: stmts -| stmts nonblock_expr { $$ = ext_node($1, 1, $2); } -| nonblock_expr -| %empty { $$ = mk_none(); } -; - -// There are two sub-grammars within a "stmts: exprs" derivation -// depending on whether each stmt-expr is a block-expr form; this is to -// handle the "semicolon rule" for stmt sequencing that permits -// writing -// -// if foo { bar } 10 -// -// as a sequence of two stmts (one if-expr stmt, one lit-10-expr -// stmt). Unfortunately by permitting juxtaposition of exprs in -// sequence like that, the non-block expr grammar has to have a -// second limited sub-grammar that excludes the prefix exprs that -// are ambiguous with binops. That is to say: -// -// {10} - 1 -// -// should parse as (progn (progn 10) (- 1)) not (- (progn 10) 1), that -// is to say, two statements rather than one, at least according to -// the mainline rust parser. -// -// So we wind up with a 3-way split in exprs that occur in stmt lists: -// block, nonblock-prefix, and nonblock-nonprefix. -// -// In non-stmts contexts, expr can relax this trichotomy. - -stmts -: stmt { $$ = mk_node("stmts", 1, $1); } -| stmts stmt { $$ = ext_node($1, 1, $2); } -; - -stmt -: maybe_outer_attrs let { $$ = $2; } -| stmt_item -| PUB stmt_item { $$ = $2; } -| outer_attrs stmt_item { $$ = $2; } -| outer_attrs PUB stmt_item { $$ = $3; } -| full_block_expr -| maybe_outer_attrs block { $$ = $2; } -| nonblock_expr ';' -| outer_attrs nonblock_expr ';' { $$ = $2; } -| ';' { $$ = mk_none(); } -; - -maybe_exprs -: exprs -| exprs ',' -| %empty { $$ = mk_none(); } -; - -maybe_expr -: expr -| %empty { $$ = mk_none(); } -; - -exprs -: expr { $$ = mk_node("exprs", 1, $1); } -| exprs ',' expr { $$ = ext_node($1, 1, $3); } -; - -path_expr -: path_generic_args_with_colons -| MOD_SEP path_generic_args_with_colons { $$ = $2; } -| SELF MOD_SEP path_generic_args_with_colons { $$ = mk_node("SelfPath", 1, $3); } -; - -// A path with a lifetime and type parameters with double colons before -// the type parameters; e.g. `foo::bar::<'a>::Baz::` -// -// These show up in expr context, in order to disambiguate from "less-than" -// expressions. -path_generic_args_with_colons -: ident { $$ = mk_node("components", 1, $1); } -| SUPER { $$ = mk_atom("Super"); } -| path_generic_args_with_colons MOD_SEP ident { $$ = ext_node($1, 1, $3); } -| path_generic_args_with_colons MOD_SEP SUPER { $$ = ext_node($1, 1, mk_atom("Super")); } -| path_generic_args_with_colons MOD_SEP generic_args { $$ = ext_node($1, 1, $3); } -; - -// the braces-delimited macro is a block_expr so it doesn't appear here -macro_expr -: path_expr '!' maybe_ident parens_delimited_token_trees { $$ = mk_node("MacroExpr", 3, $1, $3, $4); } -| path_expr '!' maybe_ident brackets_delimited_token_trees { $$ = mk_node("MacroExpr", 3, $1, $3, $4); } -; - -nonblock_expr -: lit { $$ = mk_node("ExprLit", 1, $1); } -| %prec IDENT - path_expr { $$ = mk_node("ExprPath", 1, $1); } -| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); } -| macro_expr { $$ = mk_node("ExprMac", 1, $1); } -| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); } -| nonblock_expr '?' { $$ = mk_node("ExprTry", 1, $1); } -| nonblock_expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); } -| nonblock_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); } -| nonblock_expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); } -| nonblock_expr '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 2, $1, $3); } -| '[' vec_expr ']' { $$ = mk_node("ExprVec", 1, $2); } -| '(' maybe_exprs ')' { $$ = mk_node("ExprParen", 1, $2); } -| CONTINUE { $$ = mk_node("ExprAgain", 0); } -| CONTINUE lifetime { $$ = mk_node("ExprAgain", 1, $2); } -| RETURN { $$ = mk_node("ExprRet", 0); } -| RETURN expr { $$ = mk_node("ExprRet", 1, $2); } -| BREAK { $$ = mk_node("ExprBreak", 0); } -| BREAK lifetime { $$ = mk_node("ExprBreak", 1, $2); } -| YIELD { $$ = mk_node("ExprYield", 0); } -| YIELD expr { $$ = mk_node("ExprYield", 1, $2); } -| nonblock_expr '=' expr { $$ = mk_node("ExprAssign", 2, $1, $3); } -| nonblock_expr SHLEQ expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); } -| nonblock_expr SHREQ expr { $$ = mk_node("ExprAssignShr", 2, $1, $3); } -| nonblock_expr MINUSEQ expr { $$ = mk_node("ExprAssignSub", 2, $1, $3); } -| nonblock_expr ANDEQ expr { $$ = mk_node("ExprAssignBitAnd", 2, $1, $3); } -| nonblock_expr OREQ expr { $$ = mk_node("ExprAssignBitOr", 2, $1, $3); } -| nonblock_expr PLUSEQ expr { $$ = mk_node("ExprAssignAdd", 2, $1, $3); } -| nonblock_expr STAREQ expr { $$ = mk_node("ExprAssignMul", 2, $1, $3); } -| nonblock_expr SLASHEQ expr { $$ = mk_node("ExprAssignDiv", 2, $1, $3); } -| nonblock_expr CARETEQ expr { $$ = mk_node("ExprAssignBitXor", 2, $1, $3); } -| nonblock_expr PERCENTEQ expr { $$ = mk_node("ExprAssignRem", 2, $1, $3); } -| nonblock_expr OROR expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiOr"), $1, $3); } -| nonblock_expr ANDAND expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiAnd"), $1, $3); } -| nonblock_expr EQEQ expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiEq"), $1, $3); } -| nonblock_expr NE expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiNe"), $1, $3); } -| nonblock_expr '<' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiLt"), $1, $3); } -| nonblock_expr '>' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiGt"), $1, $3); } -| nonblock_expr LE expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiLe"), $1, $3); } -| nonblock_expr GE expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiGe"), $1, $3); } -| nonblock_expr '|' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitOr"), $1, $3); } -| nonblock_expr '^' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitXor"), $1, $3); } -| nonblock_expr '&' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitAnd"), $1, $3); } -| nonblock_expr SHL expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiShl"), $1, $3); } -| nonblock_expr SHR expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiShr"), $1, $3); } -| nonblock_expr '+' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiAdd"), $1, $3); } -| nonblock_expr '-' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiSub"), $1, $3); } -| nonblock_expr '*' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiMul"), $1, $3); } -| nonblock_expr '/' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiDiv"), $1, $3); } -| nonblock_expr '%' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiRem"), $1, $3); } -| nonblock_expr DOTDOT { $$ = mk_node("ExprRange", 2, $1, mk_none()); } -| nonblock_expr DOTDOT expr { $$ = mk_node("ExprRange", 2, $1, $3); } -| DOTDOT expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); } -| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); } -| nonblock_expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); } -| nonblock_expr ':' ty { $$ = mk_node("ExprTypeAscr", 2, $1, $3); } -| BOX expr { $$ = mk_node("ExprBox", 1, $2); } -| expr_qualified_path -| nonblock_prefix_expr -; - -expr -: lit { $$ = mk_node("ExprLit", 1, $1); } -| %prec IDENT - path_expr { $$ = mk_node("ExprPath", 1, $1); } -| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); } -| macro_expr { $$ = mk_node("ExprMac", 1, $1); } -| path_expr '{' struct_expr_fields '}' { $$ = mk_node("ExprStruct", 2, $1, $3); } -| expr '?' { $$ = mk_node("ExprTry", 1, $1); } -| expr '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); } -| expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); } -| expr '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); } -| expr '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 2, $1, $3); } -| '(' maybe_exprs ')' { $$ = mk_node("ExprParen", 1, $2); } -| '[' vec_expr ']' { $$ = mk_node("ExprVec", 1, $2); } -| CONTINUE { $$ = mk_node("ExprAgain", 0); } -| CONTINUE ident { $$ = mk_node("ExprAgain", 1, $2); } -| RETURN { $$ = mk_node("ExprRet", 0); } -| RETURN expr { $$ = mk_node("ExprRet", 1, $2); } -| BREAK { $$ = mk_node("ExprBreak", 0); } -| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); } -| YIELD { $$ = mk_node("ExprYield", 0); } -| YIELD expr { $$ = mk_node("ExprYield", 1, $2); } -| expr '=' expr { $$ = mk_node("ExprAssign", 2, $1, $3); } -| expr SHLEQ expr { $$ = mk_node("ExprAssignShl", 2, $1, $3); } -| expr SHREQ expr { $$ = mk_node("ExprAssignShr", 2, $1, $3); } -| expr MINUSEQ expr { $$ = mk_node("ExprAssignSub", 2, $1, $3); } -| expr ANDEQ expr { $$ = mk_node("ExprAssignBitAnd", 2, $1, $3); } -| expr OREQ expr { $$ = mk_node("ExprAssignBitOr", 2, $1, $3); } -| expr PLUSEQ expr { $$ = mk_node("ExprAssignAdd", 2, $1, $3); } -| expr STAREQ expr { $$ = mk_node("ExprAssignMul", 2, $1, $3); } -| expr SLASHEQ expr { $$ = mk_node("ExprAssignDiv", 2, $1, $3); } -| expr CARETEQ expr { $$ = mk_node("ExprAssignBitXor", 2, $1, $3); } -| expr PERCENTEQ expr { $$ = mk_node("ExprAssignRem", 2, $1, $3); } -| expr OROR expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiOr"), $1, $3); } -| expr ANDAND expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiAnd"), $1, $3); } -| expr EQEQ expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiEq"), $1, $3); } -| expr NE expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiNe"), $1, $3); } -| expr '<' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiLt"), $1, $3); } -| expr '>' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiGt"), $1, $3); } -| expr LE expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiLe"), $1, $3); } -| expr GE expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiGe"), $1, $3); } -| expr '|' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitOr"), $1, $3); } -| expr '^' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitXor"), $1, $3); } -| expr '&' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitAnd"), $1, $3); } -| expr SHL expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiShl"), $1, $3); } -| expr SHR expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiShr"), $1, $3); } -| expr '+' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiAdd"), $1, $3); } -| expr '-' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiSub"), $1, $3); } -| expr '*' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiMul"), $1, $3); } -| expr '/' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiDiv"), $1, $3); } -| expr '%' expr { $$ = mk_node("ExprBinary", 3, mk_atom("BiRem"), $1, $3); } -| expr DOTDOT { $$ = mk_node("ExprRange", 2, $1, mk_none()); } -| expr DOTDOT expr { $$ = mk_node("ExprRange", 2, $1, $3); } -| DOTDOT expr { $$ = mk_node("ExprRange", 2, mk_none(), $2); } -| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); } -| expr AS ty { $$ = mk_node("ExprCast", 2, $1, $3); } -| expr ':' ty { $$ = mk_node("ExprTypeAscr", 2, $1, $3); } -| BOX expr { $$ = mk_node("ExprBox", 1, $2); } -| expr_qualified_path -| block_expr -| block -| nonblock_prefix_expr -; - -expr_nostruct -: lit { $$ = mk_node("ExprLit", 1, $1); } -| %prec IDENT - path_expr { $$ = mk_node("ExprPath", 1, $1); } -| SELF { $$ = mk_node("ExprPath", 1, mk_node("ident", 1, mk_atom("self"))); } -| macro_expr { $$ = mk_node("ExprMac", 1, $1); } -| expr_nostruct '?' { $$ = mk_node("ExprTry", 1, $1); } -| expr_nostruct '.' path_generic_args_with_colons { $$ = mk_node("ExprField", 2, $1, $3); } -| expr_nostruct '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); } -| expr_nostruct '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 2, $1, $3); } -| expr_nostruct '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 2, $1, $3); } -| '[' vec_expr ']' { $$ = mk_node("ExprVec", 1, $2); } -| '(' maybe_exprs ')' { $$ = mk_node("ExprParen", 1, $2); } -| CONTINUE { $$ = mk_node("ExprAgain", 0); } -| CONTINUE ident { $$ = mk_node("ExprAgain", 1, $2); } -| RETURN { $$ = mk_node("ExprRet", 0); } -| RETURN expr { $$ = mk_node("ExprRet", 1, $2); } -| BREAK { $$ = mk_node("ExprBreak", 0); } -| BREAK ident { $$ = mk_node("ExprBreak", 1, $2); } -| YIELD { $$ = mk_node("ExprYield", 0); } -| YIELD expr { $$ = mk_node("ExprYield", 1, $2); } -| expr_nostruct '=' expr_nostruct { $$ = mk_node("ExprAssign", 2, $1, $3); } -| expr_nostruct SHLEQ expr_nostruct { $$ = mk_node("ExprAssignShl", 2, $1, $3); } -| expr_nostruct SHREQ expr_nostruct { $$ = mk_node("ExprAssignShr", 2, $1, $3); } -| expr_nostruct MINUSEQ expr_nostruct { $$ = mk_node("ExprAssignSub", 2, $1, $3); } -| expr_nostruct ANDEQ expr_nostruct { $$ = mk_node("ExprAssignBitAnd", 2, $1, $3); } -| expr_nostruct OREQ expr_nostruct { $$ = mk_node("ExprAssignBitOr", 2, $1, $3); } -| expr_nostruct PLUSEQ expr_nostruct { $$ = mk_node("ExprAssignAdd", 2, $1, $3); } -| expr_nostruct STAREQ expr_nostruct { $$ = mk_node("ExprAssignMul", 2, $1, $3); } -| expr_nostruct SLASHEQ expr_nostruct { $$ = mk_node("ExprAssignDiv", 2, $1, $3); } -| expr_nostruct CARETEQ expr_nostruct { $$ = mk_node("ExprAssignBitXor", 2, $1, $3); } -| expr_nostruct PERCENTEQ expr_nostruct { $$ = mk_node("ExprAssignRem", 2, $1, $3); } -| expr_nostruct OROR expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiOr"), $1, $3); } -| expr_nostruct ANDAND expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiAnd"), $1, $3); } -| expr_nostruct EQEQ expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiEq"), $1, $3); } -| expr_nostruct NE expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiNe"), $1, $3); } -| expr_nostruct '<' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiLt"), $1, $3); } -| expr_nostruct '>' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiGt"), $1, $3); } -| expr_nostruct LE expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiLe"), $1, $3); } -| expr_nostruct GE expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiGe"), $1, $3); } -| expr_nostruct '|' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitOr"), $1, $3); } -| expr_nostruct '^' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitXor"), $1, $3); } -| expr_nostruct '&' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiBitAnd"), $1, $3); } -| expr_nostruct SHL expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiShl"), $1, $3); } -| expr_nostruct SHR expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiShr"), $1, $3); } -| expr_nostruct '+' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiAdd"), $1, $3); } -| expr_nostruct '-' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiSub"), $1, $3); } -| expr_nostruct '*' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiMul"), $1, $3); } -| expr_nostruct '/' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiDiv"), $1, $3); } -| expr_nostruct '%' expr_nostruct { $$ = mk_node("ExprBinary", 3, mk_atom("BiRem"), $1, $3); } -| expr_nostruct DOTDOT %prec RANGE { $$ = mk_node("ExprRange", 2, $1, mk_none()); } -| expr_nostruct DOTDOT expr_nostruct { $$ = mk_node("ExprRange", 2, $1, $3); } -| DOTDOT expr_nostruct { $$ = mk_node("ExprRange", 2, mk_none(), $2); } -| DOTDOT { $$ = mk_node("ExprRange", 2, mk_none(), mk_none()); } -| expr_nostruct AS ty { $$ = mk_node("ExprCast", 2, $1, $3); } -| expr_nostruct ':' ty { $$ = mk_node("ExprTypeAscr", 2, $1, $3); } -| BOX expr { $$ = mk_node("ExprBox", 1, $2); } -| expr_qualified_path -| block_expr -| block -| nonblock_prefix_expr_nostruct -; - -nonblock_prefix_expr_nostruct -: '-' expr_nostruct { $$ = mk_node("ExprUnary", 2, mk_atom("UnNeg"), $2); } -| '!' expr_nostruct { $$ = mk_node("ExprUnary", 2, mk_atom("UnNot"), $2); } -| '*' expr_nostruct { $$ = mk_node("ExprUnary", 2, mk_atom("UnDeref"), $2); } -| '&' maybe_mut expr_nostruct { $$ = mk_node("ExprAddrOf", 2, $2, $3); } -| ANDAND maybe_mut expr_nostruct { $$ = mk_node("ExprAddrOf", 1, mk_node("ExprAddrOf", 2, $2, $3)); } -| lambda_expr_nostruct -| MOVE lambda_expr_nostruct { $$ = $2; } -; - -nonblock_prefix_expr -: '-' expr { $$ = mk_node("ExprUnary", 2, mk_atom("UnNeg"), $2); } -| '!' expr { $$ = mk_node("ExprUnary", 2, mk_atom("UnNot"), $2); } -| '*' expr { $$ = mk_node("ExprUnary", 2, mk_atom("UnDeref"), $2); } -| '&' maybe_mut expr { $$ = mk_node("ExprAddrOf", 2, $2, $3); } -| ANDAND maybe_mut expr { $$ = mk_node("ExprAddrOf", 1, mk_node("ExprAddrOf", 2, $2, $3)); } -| lambda_expr -| MOVE lambda_expr { $$ = $2; } -; - -expr_qualified_path -: '<' ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_qpath_params -{ - $$ = mk_node("ExprQualifiedPath", 4, $2, $3, $6, $7); -} -| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_as_trait_ref '>' MOD_SEP ident -{ - $$ = mk_node("ExprQualifiedPath", 3, mk_node("ExprQualifiedPath", 3, $2, $3, $6), $7, $10); -} -| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident generic_args maybe_as_trait_ref '>' MOD_SEP ident -{ - $$ = mk_node("ExprQualifiedPath", 3, mk_node("ExprQualifiedPath", 4, $2, $3, $6, $7), $8, $11); -} -| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_as_trait_ref '>' MOD_SEP ident generic_args -{ - $$ = mk_node("ExprQualifiedPath", 4, mk_node("ExprQualifiedPath", 3, $2, $3, $6), $7, $10, $11); -} -| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident generic_args maybe_as_trait_ref '>' MOD_SEP ident generic_args -{ - $$ = mk_node("ExprQualifiedPath", 4, mk_node("ExprQualifiedPath", 4, $2, $3, $6, $7), $8, $11, $12); -} - -maybe_qpath_params -: MOD_SEP generic_args { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -maybe_as_trait_ref -: AS trait_ref { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -lambda_expr -: %prec LAMBDA - OROR ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); } -| %prec LAMBDA - '|' '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $3, $4); } -| %prec LAMBDA - '|' inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $2, $4, $5); } -| %prec LAMBDA - '|' inferrable_params OROR lambda_expr_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $2, mk_none(), $4); } -; - -lambda_expr_no_first_bar -: %prec LAMBDA - '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); } -| %prec LAMBDA - inferrable_params '|' ret_ty expr { $$ = mk_node("ExprFnBlock", 3, $1, $3, $4); } -| %prec LAMBDA - inferrable_params OROR lambda_expr_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $1, mk_none(), $3); } -; - -lambda_expr_nostruct -: %prec LAMBDA - OROR expr_nostruct { $$ = mk_node("ExprFnBlock", 2, mk_none(), $2); } -| %prec LAMBDA - '|' '|' ret_ty expr_nostruct { $$ = mk_node("ExprFnBlock", 3, mk_none(), $3, $4); } -| %prec LAMBDA - '|' inferrable_params '|' expr_nostruct { $$ = mk_node("ExprFnBlock", 2, $2, $4); } -| %prec LAMBDA - '|' inferrable_params OROR lambda_expr_nostruct_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $2, mk_none(), $4); } -; - -lambda_expr_nostruct_no_first_bar -: %prec LAMBDA - '|' ret_ty expr_nostruct { $$ = mk_node("ExprFnBlock", 3, mk_none(), $2, $3); } -| %prec LAMBDA - inferrable_params '|' ret_ty expr_nostruct { $$ = mk_node("ExprFnBlock", 3, $1, $3, $4); } -| %prec LAMBDA - inferrable_params OROR lambda_expr_nostruct_no_first_bar { $$ = mk_node("ExprFnBlock", 3, $1, mk_none(), $3); } -; - -vec_expr -: maybe_exprs -| exprs ';' expr { $$ = mk_node("VecRepeat", 2, $1, $3); } -; - -struct_expr_fields -: field_inits -| field_inits ',' -| maybe_field_inits default_field_init { $$ = ext_node($1, 1, $2); } -| %empty { $$ = mk_none(); } -; - -maybe_field_inits -: field_inits -| field_inits ',' -| %empty { $$ = mk_none(); } -; - -field_inits -: field_init { $$ = mk_node("FieldInits", 1, $1); } -| field_inits ',' field_init { $$ = ext_node($1, 1, $3); } -; - -field_init -: ident { $$ = mk_node("FieldInit", 1, $1); } -| ident ':' expr { $$ = mk_node("FieldInit", 2, $1, $3); } -| LIT_INTEGER ':' expr { $$ = mk_node("FieldInit", 2, mk_atom(yytext), $3); } -; - -default_field_init -: DOTDOT expr { $$ = mk_node("DefaultFieldInit", 1, $2); } -; - -block_expr -: expr_match -| expr_if -| expr_if_let -| expr_while -| expr_while_let -| expr_loop -| expr_for -| UNSAFE block { $$ = mk_node("UnsafeBlock", 1, $2); } -| path_expr '!' maybe_ident braces_delimited_token_trees { $$ = mk_node("Macro", 3, $1, $3, $4); } -; - -full_block_expr -: block_expr -| block_expr_dot -; - -block_expr_dot -: block_expr '.' path_generic_args_with_colons %prec IDENT { $$ = mk_node("ExprField", 2, $1, $3); } -| block_expr_dot '.' path_generic_args_with_colons %prec IDENT { $$ = mk_node("ExprField", 2, $1, $3); } -| block_expr '.' path_generic_args_with_colons '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 3, $1, $3, $5); } -| block_expr_dot '.' path_generic_args_with_colons '[' maybe_expr ']' { $$ = mk_node("ExprIndex", 3, $1, $3, $5); } -| block_expr '.' path_generic_args_with_colons '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 3, $1, $3, $5); } -| block_expr_dot '.' path_generic_args_with_colons '(' maybe_exprs ')' { $$ = mk_node("ExprCall", 3, $1, $3, $5); } -| block_expr '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); } -| block_expr_dot '.' LIT_INTEGER { $$ = mk_node("ExprTupleIndex", 1, $1); } -; - -expr_match -: MATCH expr_nostruct '{' '}' { $$ = mk_node("ExprMatch", 1, $2); } -| MATCH expr_nostruct '{' match_clauses '}' { $$ = mk_node("ExprMatch", 2, $2, $4); } -| MATCH expr_nostruct '{' match_clauses nonblock_match_clause '}' { $$ = mk_node("ExprMatch", 2, $2, ext_node($4, 1, $5)); } -| MATCH expr_nostruct '{' nonblock_match_clause '}' { $$ = mk_node("ExprMatch", 2, $2, mk_node("Arms", 1, $4)); } -; - -match_clauses -: match_clause { $$ = mk_node("Arms", 1, $1); } -| match_clauses match_clause { $$ = ext_node($1, 1, $2); } -; - -match_clause -: nonblock_match_clause ',' -| block_match_clause -| block_match_clause ',' -; - -nonblock_match_clause -: maybe_outer_attrs pats_or maybe_guard FAT_ARROW nonblock_expr { $$ = mk_node("ArmNonblock", 4, $1, $2, $3, $5); } -| maybe_outer_attrs pats_or maybe_guard FAT_ARROW block_expr_dot { $$ = mk_node("ArmNonblock", 4, $1, $2, $3, $5); } -; - -block_match_clause -: maybe_outer_attrs pats_or maybe_guard FAT_ARROW block { $$ = mk_node("ArmBlock", 4, $1, $2, $3, $5); } -| maybe_outer_attrs pats_or maybe_guard FAT_ARROW block_expr { $$ = mk_node("ArmBlock", 4, $1, $2, $3, $5); } -; - -maybe_guard -: IF expr_nostruct { $$ = $2; } -| %empty { $$ = mk_none(); } -; - -expr_if -: IF expr_nostruct block { $$ = mk_node("ExprIf", 2, $2, $3); } -| IF expr_nostruct block ELSE block_or_if { $$ = mk_node("ExprIf", 3, $2, $3, $5); } -; - -expr_if_let -: IF LET pat '=' expr_nostruct block { $$ = mk_node("ExprIfLet", 3, $3, $5, $6); } -| IF LET pat '=' expr_nostruct block ELSE block_or_if { $$ = mk_node("ExprIfLet", 4, $3, $5, $6, $8); } -; - -block_or_if -: block -| expr_if -| expr_if_let -; - -expr_while -: maybe_label WHILE expr_nostruct block { $$ = mk_node("ExprWhile", 3, $1, $3, $4); } -; - -expr_while_let -: maybe_label WHILE LET pat '=' expr_nostruct block { $$ = mk_node("ExprWhileLet", 4, $1, $4, $6, $7); } -; - -expr_loop -: maybe_label LOOP block { $$ = mk_node("ExprLoop", 2, $1, $3); } -; - -expr_for -: maybe_label FOR pat IN expr_nostruct block { $$ = mk_node("ExprForLoop", 4, $1, $3, $5, $6); } -; - -maybe_label -: lifetime ':' -| %empty { $$ = mk_none(); } -; - -let -: LET pat maybe_ty_ascription maybe_init_expr ';' { $$ = mk_node("DeclLocal", 3, $2, $3, $4); } -; - -//////////////////////////////////////////////////////////////////////// -// Part 5: Macros and misc. rules -//////////////////////////////////////////////////////////////////////// - -lit -: LIT_BYTE { $$ = mk_node("LitByte", 1, mk_atom(yytext)); } -| LIT_CHAR { $$ = mk_node("LitChar", 1, mk_atom(yytext)); } -| LIT_INTEGER { $$ = mk_node("LitInteger", 1, mk_atom(yytext)); } -| LIT_FLOAT { $$ = mk_node("LitFloat", 1, mk_atom(yytext)); } -| TRUE { $$ = mk_node("LitBool", 1, mk_atom(yytext)); } -| FALSE { $$ = mk_node("LitBool", 1, mk_atom(yytext)); } -| str -; - -str -: LIT_STR { $$ = mk_node("LitStr", 1, mk_atom(yytext), mk_atom("CookedStr")); } -| LIT_STR_RAW { $$ = mk_node("LitStr", 1, mk_atom(yytext), mk_atom("RawStr")); } -| LIT_BYTE_STR { $$ = mk_node("LitByteStr", 1, mk_atom(yytext), mk_atom("ByteStr")); } -| LIT_BYTE_STR_RAW { $$ = mk_node("LitByteStr", 1, mk_atom(yytext), mk_atom("RawByteStr")); } -; - -maybe_ident -: %empty { $$ = mk_none(); } -| ident -; - -ident -: IDENT { $$ = mk_node("ident", 1, mk_atom(yytext)); } -// Weak keywords that can be used as identifiers -| CATCH { $$ = mk_node("ident", 1, mk_atom(yytext)); } -| DEFAULT { $$ = mk_node("ident", 1, mk_atom(yytext)); } -| UNION { $$ = mk_node("ident", 1, mk_atom(yytext)); } -; - -unpaired_token -: SHL { $$ = mk_atom(yytext); } -| SHR { $$ = mk_atom(yytext); } -| LE { $$ = mk_atom(yytext); } -| EQEQ { $$ = mk_atom(yytext); } -| NE { $$ = mk_atom(yytext); } -| GE { $$ = mk_atom(yytext); } -| ANDAND { $$ = mk_atom(yytext); } -| OROR { $$ = mk_atom(yytext); } -| LARROW { $$ = mk_atom(yytext); } -| SHLEQ { $$ = mk_atom(yytext); } -| SHREQ { $$ = mk_atom(yytext); } -| MINUSEQ { $$ = mk_atom(yytext); } -| ANDEQ { $$ = mk_atom(yytext); } -| OREQ { $$ = mk_atom(yytext); } -| PLUSEQ { $$ = mk_atom(yytext); } -| STAREQ { $$ = mk_atom(yytext); } -| SLASHEQ { $$ = mk_atom(yytext); } -| CARETEQ { $$ = mk_atom(yytext); } -| PERCENTEQ { $$ = mk_atom(yytext); } -| DOTDOT { $$ = mk_atom(yytext); } -| DOTDOTDOT { $$ = mk_atom(yytext); } -| MOD_SEP { $$ = mk_atom(yytext); } -| RARROW { $$ = mk_atom(yytext); } -| FAT_ARROW { $$ = mk_atom(yytext); } -| LIT_BYTE { $$ = mk_atom(yytext); } -| LIT_CHAR { $$ = mk_atom(yytext); } -| LIT_INTEGER { $$ = mk_atom(yytext); } -| LIT_FLOAT { $$ = mk_atom(yytext); } -| LIT_STR { $$ = mk_atom(yytext); } -| LIT_STR_RAW { $$ = mk_atom(yytext); } -| LIT_BYTE_STR { $$ = mk_atom(yytext); } -| LIT_BYTE_STR_RAW { $$ = mk_atom(yytext); } -| IDENT { $$ = mk_atom(yytext); } -| UNDERSCORE { $$ = mk_atom(yytext); } -| LIFETIME { $$ = mk_atom(yytext); } -| SELF { $$ = mk_atom(yytext); } -| STATIC { $$ = mk_atom(yytext); } -| ABSTRACT { $$ = mk_atom(yytext); } -| ALIGNOF { $$ = mk_atom(yytext); } -| AS { $$ = mk_atom(yytext); } -| BECOME { $$ = mk_atom(yytext); } -| BREAK { $$ = mk_atom(yytext); } -| CATCH { $$ = mk_atom(yytext); } -| CRATE { $$ = mk_atom(yytext); } -| DEFAULT { $$ = mk_atom(yytext); } -| DO { $$ = mk_atom(yytext); } -| ELSE { $$ = mk_atom(yytext); } -| ENUM { $$ = mk_atom(yytext); } -| EXTERN { $$ = mk_atom(yytext); } -| FALSE { $$ = mk_atom(yytext); } -| FINAL { $$ = mk_atom(yytext); } -| FN { $$ = mk_atom(yytext); } -| FOR { $$ = mk_atom(yytext); } -| IF { $$ = mk_atom(yytext); } -| IMPL { $$ = mk_atom(yytext); } -| IN { $$ = mk_atom(yytext); } -| LET { $$ = mk_atom(yytext); } -| LOOP { $$ = mk_atom(yytext); } -| MACRO { $$ = mk_atom(yytext); } -| MATCH { $$ = mk_atom(yytext); } -| MOD { $$ = mk_atom(yytext); } -| MOVE { $$ = mk_atom(yytext); } -| MUT { $$ = mk_atom(yytext); } -| OFFSETOF { $$ = mk_atom(yytext); } -| OVERRIDE { $$ = mk_atom(yytext); } -| PRIV { $$ = mk_atom(yytext); } -| PUB { $$ = mk_atom(yytext); } -| PURE { $$ = mk_atom(yytext); } -| REF { $$ = mk_atom(yytext); } -| RETURN { $$ = mk_atom(yytext); } -| STRUCT { $$ = mk_atom(yytext); } -| SIZEOF { $$ = mk_atom(yytext); } -| SUPER { $$ = mk_atom(yytext); } -| TRUE { $$ = mk_atom(yytext); } -| TRAIT { $$ = mk_atom(yytext); } -| TYPE { $$ = mk_atom(yytext); } -| UNION { $$ = mk_atom(yytext); } -| UNSAFE { $$ = mk_atom(yytext); } -| UNSIZED { $$ = mk_atom(yytext); } -| USE { $$ = mk_atom(yytext); } -| VIRTUAL { $$ = mk_atom(yytext); } -| WHILE { $$ = mk_atom(yytext); } -| YIELD { $$ = mk_atom(yytext); } -| CONTINUE { $$ = mk_atom(yytext); } -| PROC { $$ = mk_atom(yytext); } -| BOX { $$ = mk_atom(yytext); } -| CONST { $$ = mk_atom(yytext); } -| WHERE { $$ = mk_atom(yytext); } -| TYPEOF { $$ = mk_atom(yytext); } -| INNER_DOC_COMMENT { $$ = mk_atom(yytext); } -| OUTER_DOC_COMMENT { $$ = mk_atom(yytext); } -| SHEBANG { $$ = mk_atom(yytext); } -| STATIC_LIFETIME { $$ = mk_atom(yytext); } -| ';' { $$ = mk_atom(yytext); } -| ',' { $$ = mk_atom(yytext); } -| '.' { $$ = mk_atom(yytext); } -| '@' { $$ = mk_atom(yytext); } -| '#' { $$ = mk_atom(yytext); } -| '~' { $$ = mk_atom(yytext); } -| ':' { $$ = mk_atom(yytext); } -| '$' { $$ = mk_atom(yytext); } -| '=' { $$ = mk_atom(yytext); } -| '?' { $$ = mk_atom(yytext); } -| '!' { $$ = mk_atom(yytext); } -| '<' { $$ = mk_atom(yytext); } -| '>' { $$ = mk_atom(yytext); } -| '-' { $$ = mk_atom(yytext); } -| '&' { $$ = mk_atom(yytext); } -| '|' { $$ = mk_atom(yytext); } -| '+' { $$ = mk_atom(yytext); } -| '*' { $$ = mk_atom(yytext); } -| '/' { $$ = mk_atom(yytext); } -| '^' { $$ = mk_atom(yytext); } -| '%' { $$ = mk_atom(yytext); } -; - -token_trees -: %empty { $$ = mk_node("TokenTrees", 0); } -| token_trees token_tree { $$ = ext_node($1, 1, $2); } -; - -token_tree -: delimited_token_trees -| unpaired_token { $$ = mk_node("TTTok", 1, $1); } -; - -delimited_token_trees -: parens_delimited_token_trees -| braces_delimited_token_trees -| brackets_delimited_token_trees -; - -parens_delimited_token_trees -: '(' token_trees ')' -{ - $$ = mk_node("TTDelim", 3, - mk_node("TTTok", 1, mk_atom("(")), - $2, - mk_node("TTTok", 1, mk_atom(")"))); -} -; - -braces_delimited_token_trees -: '{' token_trees '}' -{ - $$ = mk_node("TTDelim", 3, - mk_node("TTTok", 1, mk_atom("{")), - $2, - mk_node("TTTok", 1, mk_atom("}"))); -} -; - -brackets_delimited_token_trees -: '[' token_trees ']' -{ - $$ = mk_node("TTDelim", 3, - mk_node("TTTok", 1, mk_atom("[")), - $2, - mk_node("TTTok", 1, mk_atom("]"))); -} -; diff --git a/src/grammar/raw-string-literal-ambiguity.md b/src/grammar/raw-string-literal-ambiguity.md deleted file mode 100644 index c909f23331..0000000000 --- a/src/grammar/raw-string-literal-ambiguity.md +++ /dev/null @@ -1,64 +0,0 @@ -Rust's lexical grammar is not context-free. Raw string literals are the source -of the problem. Informally, a raw string literal is an `r`, followed by `N` -hashes (where N can be zero), a quote, any characters, then a quote followed -by `N` hashes. Critically, once inside the first pair of quotes, -another quote cannot be followed by `N` consecutive hashes. e.g. -`r###""###"###` is invalid. - -This grammar describes this as best possible: - - R -> 'r' S - S -> '"' B '"' - S -> '#' S '#' - B -> . B - B -> ε - -Where `.` represents any character, and `ε` the empty string. Consider the -string `r#""#"#`. This string is not a valid raw string literal, but can be -accepted as one by the above grammar, using the derivation: - - R : #""#"# - S : ""#" - S : "# - B : # - B : ε - -(Where `T : U` means the rule `T` is applied, and `U` is the remainder of the -string.) The difficulty arises from the fact that it is fundamentally -context-sensitive. In particular, the context needed is the number of hashes. - -To prove that Rust's string literals are not context-free, we will use -the fact that context-free languages are closed under intersection with -regular languages, and the -[pumping lemma for context-free languages](https://en.wikipedia.org/wiki/Pumping_lemma_for_context-free_languages). - -Consider the regular language `R = r#+""#*"#+`. If Rust's raw string literals are -context-free, then their intersection with `R`, `R'`, should also be context-free. -Therefore, to prove that raw string literals are not context-free, -it is sufficient to prove that `R'` is not context-free. - -The language `R'` is `{r#^n""#^m"#^n | m < n}`. - -Assume `R'` *is* context-free. Then `R'` has some pumping length `p > 0` for which -the pumping lemma applies. Consider the following string `s` in `R'`: - -`r#^p""#^{p-1}"#^p` - -e.g. for `p = 2`: `s = r##""#"##` - -Then `s = uvwxy` for some choice of `uvwxy` such that `vx` is non-empty, -`|vwx| < p+1`, and `uv^iwx^iy` is in `R'` for all `i >= 0`. - -Neither `v` nor `x` can contain a `"` or `r`, as the number of these characters -in any string in `R'` is fixed. So `v` and `x` contain only hashes. -Consequently, of the three sequences of hashes, `v` and `x` combined -can only pump two of them. -If we ever choose the central sequence of hashes, then one of the outer sequences -will not grow when we pump, leading to an imbalance between the outer sequences. -Therefore, we must pump both outer sequences of hashes. However, -there are `p+2` characters between these two sequences of hashes, and `|vwx|` must -be less than `p+1`. Therefore we have a contradiction, and `R'` must not be -context-free. - -Since `R'` is not context-free, it follows that the Rust's raw string literals -must not be context-free. diff --git a/src/grammar/testparser.py b/src/grammar/testparser.py deleted file mode 100755 index 4b5a7fb9e1..0000000000 --- a/src/grammar/testparser.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python - -# ignore-tidy-linelength - -import sys - -import os -import subprocess -import argparse - -# usage: testparser.py [-h] [-p PARSER [PARSER ...]] -s SOURCE_DIR - -# Parsers should read from stdin and return exit status 0 for a -# successful parse, and nonzero for an unsuccessful parse - -parser = argparse.ArgumentParser() -parser.add_argument('-p', '--parser', nargs='+') -parser.add_argument('-s', '--source-dir', nargs=1, required=True) -args = parser.parse_args(sys.argv[1:]) - -total = 0 -ok = {} -bad = {} -for parser in args.parser: - ok[parser] = 0 - bad[parser] = [] -devnull = open(os.devnull, 'w') -print("\n") - -for base, dirs, files in os.walk(args.source_dir[0]): - for f in filter(lambda p: p.endswith('.rs'), files): - p = os.path.join(base, f) - parse_fail = 'parse-fail' in p - if sys.version_info.major == 3: - lines = open(p, encoding='utf-8').readlines() - else: - lines = open(p).readlines() - if any('ignore-test' in line or 'ignore-lexer-test' in line for line in lines): - continue - total += 1 - for parser in args.parser: - if subprocess.call(parser, stdin=open(p), stderr=subprocess.STDOUT, stdout=devnull) == 0: - if parse_fail: - bad[parser].append(p) - else: - ok[parser] += 1 - else: - if parse_fail: - ok[parser] += 1 - else: - bad[parser].append(p) - parser_stats = ', '.join(['{}: {}'.format(parser, ok[parser]) for parser in args.parser]) - sys.stdout.write("\033[K\r total: {}, {}, scanned {}" - .format(total, os.path.relpath(parser_stats), os.path.relpath(p))) - -devnull.close() - -print("\n") - -for parser in args.parser: - filename = os.path.basename(parser) + '.bad' - print("writing {} files that did not yield the correct result with {} to {}".format(len(bad[parser]), parser, filename)) - with open(filename, "w") as f: - for p in bad[parser]: - f.write(p) - f.write("\n") diff --git a/src/grammar/tokens.h b/src/grammar/tokens.h deleted file mode 100644 index 297e3dc841..0000000000 --- a/src/grammar/tokens.h +++ /dev/null @@ -1,99 +0,0 @@ -enum Token { - SHL = 257, // Parser generators reserve 0-256 for char literals - SHR, - LE, - EQEQ, - NE, - GE, - ANDAND, - OROR, - SHLEQ, - SHREQ, - MINUSEQ, - ANDEQ, - OREQ, - PLUSEQ, - STAREQ, - SLASHEQ, - CARETEQ, - PERCENTEQ, - DOTDOT, - DOTDOTDOT, - MOD_SEP, - LARROW, - RARROW, - FAT_ARROW, - LIT_BYTE, - LIT_CHAR, - LIT_INTEGER, - LIT_FLOAT, - LIT_STR, - LIT_STR_RAW, - LIT_BYTE_STR, - LIT_BYTE_STR_RAW, - IDENT, - UNDERSCORE, - LIFETIME, - - // keywords - SELF, - STATIC, - ABSTRACT, - ALIGNOF, - AS, - BECOME, - BREAK, - CATCH, - CRATE, - DEFAULT, - DO, - ELSE, - ENUM, - EXTERN, - FALSE, - FINAL, - FN, - FOR, - IF, - IMPL, - IN, - LET, - LOOP, - MACRO, - MATCH, - MOD, - MOVE, - MUT, - OFFSETOF, - OVERRIDE, - PRIV, - PUB, - PURE, - REF, - RETURN, - SIZEOF, - STRUCT, - SUPER, - UNION, - TRUE, - TRAIT, - TYPE, - UNSAFE, - UNSIZED, - USE, - VIRTUAL, - WHILE, - YIELD, - CONTINUE, - PROC, - BOX, - CONST, - WHERE, - TYPEOF, - INNER_DOC_COMMENT, - OUTER_DOC_COMMENT, - - SHEBANG, - SHEBANG_LINE, - STATIC_LIFETIME -}; diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index a39fcd5ad4..9bc76f5157 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -240,7 +240,6 @@ pub(crate) unsafe fn box_free(ptr: Unique) { #[stable(feature = "global_alloc", since = "1.28.0")] #[rustc_allocator_nounwind] pub fn handle_alloc_error(layout: Layout) -> ! { - #[cfg_attr(bootstrap, allow(improper_ctypes))] extern "Rust" { #[lang = "oom"] fn oom_impl(layout: Layout) -> !; diff --git a/src/liballoc/borrow.rs b/src/liballoc/borrow.rs index a9c5bce4c2..d2bdda83fa 100644 --- a/src/liballoc/borrow.rs +++ b/src/liballoc/borrow.rs @@ -207,6 +207,47 @@ impl Clone for Cow<'_, B> { } impl Cow<'_, B> { + /// Returns true if the data is borrowed, i.e. if `to_mut` would require additional work. + /// + /// # Examples + /// + /// ``` + /// #![feature(cow_is_borrowed)] + /// use std::borrow::Cow; + /// + /// let cow = Cow::Borrowed("moo"); + /// assert!(cow.is_borrowed()); + /// + /// let bull: Cow<'_, str> = Cow::Owned("...moo?".to_string()); + /// assert!(!bull.is_borrowed()); + /// ``` + #[unstable(feature = "cow_is_borrowed", issue = "65143")] + pub fn is_borrowed(&self) -> bool { + match *self { + Borrowed(_) => true, + Owned(_) => false, + } + } + + /// Returns true if the data is owned, i.e. if `to_mut` would be a no-op. + /// + /// # Examples + /// + /// ``` + /// #![feature(cow_is_borrowed)] + /// use std::borrow::Cow; + /// + /// let cow: Cow<'_, str> = Cow::Owned("moo".to_string()); + /// assert!(cow.is_owned()); + /// + /// let bull = Cow::Borrowed("...moo?"); + /// assert!(!bull.is_owned()); + /// ``` + #[unstable(feature = "cow_is_borrowed", issue = "65143")] + pub fn is_owned(&self) -> bool { + !self.is_borrowed() + } + /// Acquires a mutable reference to the owned form of the data. /// /// Clones the data if it is not already owned. diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index c61e318340..567b8ea722 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -29,10 +29,8 @@ //! Nil, //! } //! -//! fn main() { -//! let list: List = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil)))); -//! println!("{:?}", list); -//! } +//! let list: List = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil)))); +//! println!("{:?}", list); //! ``` //! //! This will print `Cons(1, Cons(2, Nil))`. @@ -144,6 +142,9 @@ impl Box { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Box> { let layout = alloc::Layout::new::>(); + if layout.size() == 0 { + return Box(NonNull::dangling().into()) + } let ptr = unsafe { Global.alloc(layout) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) @@ -184,9 +185,16 @@ impl Box<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { let layout = alloc::Layout::array::>(len).unwrap(); - let ptr = unsafe { alloc::alloc(layout) }; - let unique = Unique::new(ptr).unwrap_or_else(|| alloc::handle_alloc_error(layout)); - let slice = unsafe { slice::from_raw_parts_mut(unique.cast().as_ptr(), len) }; + let ptr = if layout.size() == 0 { + NonNull::dangling() + } else { + unsafe { + Global.alloc(layout) + .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) + .cast() + } + }; + let slice = unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len) }; Box(Unique::from(slice)) } } @@ -375,14 +383,12 @@ impl Box { /// ``` /// #![feature(box_into_raw_non_null)] /// - /// fn main() { - /// let x = Box::new(5); - /// let ptr = Box::into_raw_non_null(x); + /// let x = Box::new(5); + /// let ptr = Box::into_raw_non_null(x); /// - /// // Clean up the memory by converting the NonNull pointer back - /// // into a Box and letting the Box be dropped. - /// let x = unsafe { Box::from_raw(ptr.as_ptr()) }; - /// } + /// // Clean up the memory by converting the NonNull pointer back + /// // into a Box and letting the Box be dropped. + /// let x = unsafe { Box::from_raw(ptr.as_ptr()) }; /// ``` #[unstable(feature = "box_into_raw_non_null", issue = "47336")] #[inline] @@ -428,23 +434,19 @@ impl Box { /// Simple usage: /// /// ``` - /// fn main() { - /// let x = Box::new(41); - /// let static_ref: &'static mut usize = Box::leak(x); - /// *static_ref += 1; - /// assert_eq!(*static_ref, 42); - /// } + /// let x = Box::new(41); + /// let static_ref: &'static mut usize = Box::leak(x); + /// *static_ref += 1; + /// assert_eq!(*static_ref, 42); /// ``` /// /// Unsized data: /// /// ``` - /// fn main() { - /// let x = vec![1, 2, 3].into_boxed_slice(); - /// let static_ref = Box::leak(x); - /// static_ref[0] = 4; - /// assert_eq!(*static_ref, [4, 2, 3]); - /// } + /// let x = vec![1, 2, 3].into_boxed_slice(); + /// let static_ref = Box::leak(x); + /// static_ref[0] = 4; + /// assert_eq!(*static_ref, [4, 2, 3]); /// ``` #[stable(feature = "box_leak", since = "1.26.0")] #[inline] @@ -780,11 +782,9 @@ impl Box { /// } /// } /// - /// fn main() { - /// let my_string = "Hello World".to_string(); - /// print_if_string(Box::new(my_string)); - /// print_if_string(Box::new(0i8)); - /// } + /// let my_string = "Hello World".to_string(); + /// print_if_string(Box::new(my_string)); + /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Box> { if self.is::() { @@ -814,11 +814,9 @@ impl Box { /// } /// } /// - /// fn main() { - /// let my_string = "Hello World".to_string(); - /// print_if_string(Box::new(my_string)); - /// print_if_string(Box::new(0i8)); - /// } + /// let my_string = "Hello World".to_string(); + /// print_if_string(Box::new(my_string)); + /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Box> { >::downcast(self).map_err(|s| unsafe { @@ -883,11 +881,33 @@ impl Iterator for Box { fn nth(&mut self, n: usize) -> Option { (**self).nth(n) } + fn last(self) -> Option { + BoxIter::last(self) + } } +trait BoxIter { + type Item; + fn last(self) -> Option; +} + +impl BoxIter for Box { + type Item = I::Item; + default fn last(self) -> Option { + #[inline] + fn some(_: Option, x: T) -> Option { + Some(x) + } + + self.fold(None, some) + } +} + +/// Specialization for sized `I`s that uses `I`s implementation of `last()` +/// instead of the default. #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Box { - fn last(self) -> Option where I: Sized { +impl BoxIter for Box { + fn last(self) -> Option { (*self).last() } } diff --git a/src/liballoc/collections/binary_heap.rs b/src/liballoc/collections/binary_heap.rs index 3d04f30e7b..fda6f090fd 100644 --- a/src/liballoc/collections/binary_heap.rs +++ b/src/liballoc/collections/binary_heap.rs @@ -146,7 +146,7 @@ #![stable(feature = "rust1", since = "1.0.0")] use core::ops::{Deref, DerefMut}; -use core::iter::{FromIterator, FusedIterator}; +use core::iter::{FromIterator, FusedIterator, TrustedLen}; use core::mem::{swap, size_of, ManuallyDrop}; use core::ptr; use core::fmt; @@ -648,6 +648,36 @@ impl BinaryHeap { self.extend(other.drain()); } } + + /// Returns an iterator which retrieves elements in heap order. + /// The retrieved elements are removed from the original heap. + /// The remaining elements will be removed on drop in heap order. + /// + /// Note: + /// * `.drain_sorted()` is O(n lg n); much slower than `.drain()`. + /// You should use the latter for most cases. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(binary_heap_drain_sorted)] + /// use std::collections::BinaryHeap; + /// + /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]); + /// assert_eq!(heap.len(), 5); + /// + /// drop(heap.drain_sorted()); // removes all elements in heap order + /// assert_eq!(heap.len(), 0); + /// ``` + #[inline] + #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] + pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> { + DrainSorted { + inner: self, + } + } } impl BinaryHeap { @@ -672,6 +702,27 @@ impl BinaryHeap { Iter { iter: self.data.iter() } } + /// Returns an iterator which retrieves elements in heap order. + /// This method consumes the original heap. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(binary_heap_into_iter_sorted)] + /// use std::collections::BinaryHeap; + /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]); + /// + /// assert_eq!(heap.into_iter_sorted().take(2).collect::>(), vec![5, 4]); + /// ``` + #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] + pub fn into_iter_sorted(self) -> IntoIterSorted { + IntoIterSorted { + inner: self, + } + } + /// Returns the greatest item in the binary heap, or `None` if it is empty. /// /// # Examples @@ -1115,6 +1166,37 @@ impl ExactSizeIterator for IntoIter { #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for IntoIter {} +#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] +#[derive(Clone, Debug)] +pub struct IntoIterSorted { + inner: BinaryHeap, +} + +#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] +impl Iterator for IntoIterSorted { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + self.inner.pop() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = self.inner.len(); + (exact, Some(exact)) + } +} + +#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] +impl ExactSizeIterator for IntoIterSorted {} + +#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] +impl FusedIterator for IntoIterSorted {} + +#[unstable(feature = "trusted_len", issue = "37572")] +unsafe impl TrustedLen for IntoIterSorted {} + /// A draining iterator over the elements of a `BinaryHeap`. /// /// This `struct` is created by the [`drain`] method on [`BinaryHeap`]. See its @@ -1161,6 +1243,52 @@ impl ExactSizeIterator for Drain<'_, T> { #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Drain<'_, T> {} +/// A draining iterator over the elements of a `BinaryHeap`. +/// +/// This `struct` is created by the [`drain_sorted`] method on [`BinaryHeap`]. See its +/// documentation for more. +/// +/// [`drain_sorted`]: struct.BinaryHeap.html#method.drain_sorted +/// [`BinaryHeap`]: struct.BinaryHeap.html +#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] +#[derive(Debug)] +pub struct DrainSorted<'a, T: Ord> { + inner: &'a mut BinaryHeap, +} + +#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] +impl<'a, T: Ord> Drop for DrainSorted<'a, T> { + /// Removes heap elements in heap order. + fn drop(&mut self) { + while let Some(_) = self.inner.pop() {} + } +} + +#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] +impl Iterator for DrainSorted<'_, T> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + self.inner.pop() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = self.inner.len(); + (exact, Some(exact)) + } +} + +#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] +impl ExactSizeIterator for DrainSorted<'_, T> { } + +#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")] +impl FusedIterator for DrainSorted<'_, T> {} + +#[unstable(feature = "trusted_len", issue = "37572")] +unsafe impl TrustedLen for DrainSorted<'_, T> {} + #[stable(feature = "binary_heap_extras_15", since = "1.5.0")] impl From> for BinaryHeap { /// Converts a `Vec` into a `BinaryHeap`. diff --git a/src/liballoc/collections/btree/map.rs b/src/liballoc/collections/btree/map.rs index 1683b81055..83fd4485f7 100644 --- a/src/liballoc/collections/btree/map.rs +++ b/src/liballoc/collections/btree/map.rs @@ -580,7 +580,6 @@ impl BTreeMap { /// # Examples /// /// ``` - /// #![feature(map_get_key_value)] /// use std::collections::BTreeMap; /// /// let mut map = BTreeMap::new(); @@ -588,7 +587,7 @@ impl BTreeMap { /// assert_eq!(map.get_key_value(&1), Some((&1, &"a"))); /// assert_eq!(map.get_key_value(&2), None); /// ``` - #[unstable(feature = "map_get_key_value", issue = "49347")] + #[stable(feature = "map_get_key_value", since = "1.40.0")] pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> where K: Borrow, Q: Ord @@ -2227,14 +2226,12 @@ impl<'a, K: Ord, V: Default> Entry<'a, K, V> { /// # Examples /// /// ``` - /// # fn main() { /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, Option> = BTreeMap::new(); /// map.entry("poneyland").or_default(); /// /// assert_eq!(map["poneyland"], None); - /// # } /// ``` pub fn or_default(self) -> &'a mut V { match self { diff --git a/src/liballoc/collections/btree/set.rs b/src/liballoc/collections/btree/set.rs index 0cb91ba4c8..f0796354e0 100644 --- a/src/liballoc/collections/btree/set.rs +++ b/src/liballoc/collections/btree/set.rs @@ -2,7 +2,7 @@ // to TreeMap use core::borrow::Borrow; -use core::cmp::Ordering::{self, Less, Greater, Equal}; +use core::cmp::Ordering::{Less, Greater, Equal}; use core::cmp::{max, min}; use core::fmt::{self, Debug}; use core::iter::{Peekable, FromIterator, FusedIterator}; @@ -109,6 +109,77 @@ pub struct Range<'a, T: 'a> { iter: btree_map::Range<'a, T, ()>, } +/// Core of SymmetricDifference and Union. +/// More efficient than btree.map.MergeIter, +/// and crucially for SymmetricDifference, nexts() reports on both sides. +#[derive(Clone)] +struct MergeIterInner + where I: Iterator, + I::Item: Copy, +{ + a: I, + b: I, + peeked: Option>, +} + +#[derive(Copy, Clone, Debug)] +enum MergeIterPeeked { + A(I::Item), + B(I::Item), +} + +impl MergeIterInner + where I: ExactSizeIterator + FusedIterator, + I::Item: Copy + Ord, +{ + fn new(a: I, b: I) -> Self { + MergeIterInner { a, b, peeked: None } + } + + fn nexts(&mut self) -> (Option, Option) { + let mut a_next = match self.peeked { + Some(MergeIterPeeked::A(next)) => Some(next), + _ => self.a.next(), + }; + let mut b_next = match self.peeked { + Some(MergeIterPeeked::B(next)) => Some(next), + _ => self.b.next(), + }; + let ord = match (a_next, b_next) { + (None, None) => Equal, + (_, None) => Less, + (None, _) => Greater, + (Some(a1), Some(b1)) => a1.cmp(&b1), + }; + self.peeked = match ord { + Less => b_next.take().map(MergeIterPeeked::B), + Equal => None, + Greater => a_next.take().map(MergeIterPeeked::A), + }; + (a_next, b_next) + } + + fn lens(&self) -> (usize, usize) { + match self.peeked { + Some(MergeIterPeeked::A(_)) => (1 + self.a.len(), self.b.len()), + Some(MergeIterPeeked::B(_)) => (self.a.len(), 1 + self.b.len()), + _ => (self.a.len(), self.b.len()), + } + } +} + +impl Debug for MergeIterInner + where I: Iterator + Debug, + I::Item: Copy + Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("MergeIterInner") + .field(&self.a) + .field(&self.b) + .finish() + } +} + /// A lazy iterator producing elements in the difference of `BTreeSet`s. /// /// This `struct` is created by the [`difference`] method on [`BTreeSet`]. @@ -120,34 +191,25 @@ pub struct Range<'a, T: 'a> { pub struct Difference<'a, T: 'a> { inner: DifferenceInner<'a, T>, } +#[derive(Debug)] enum DifferenceInner<'a, T: 'a> { Stitch { + // iterate all of self and some of other, spotting matches along the way self_iter: Iter<'a, T>, other_iter: Peekable>, }, Search { + // iterate a small set, look up in the large set self_iter: Iter<'a, T>, other_set: &'a BTreeSet, }, + Iterate(Iter<'a, T>), // simply stream self's elements } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Difference<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self.inner { - DifferenceInner::Stitch { - self_iter, - other_iter, - } => f - .debug_tuple("Difference") - .field(&self_iter) - .field(&other_iter) - .finish(), - DifferenceInner::Search { - self_iter, - other_set: _, - } => f.debug_tuple("Difference").field(&self_iter).finish(), - } + f.debug_tuple("Difference").field(&self.inner).finish() } } @@ -159,18 +221,12 @@ impl fmt::Debug for Difference<'_, T> { /// [`BTreeSet`]: struct.BTreeSet.html /// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference #[stable(feature = "rust1", since = "1.0.0")] -pub struct SymmetricDifference<'a, T: 'a> { - a: Peekable>, - b: Peekable>, -} +pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner>); #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for SymmetricDifference<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("SymmetricDifference") - .field(&self.a) - .field(&self.b) - .finish() + f.debug_tuple("SymmetricDifference").field(&self.0).finish() } } @@ -185,34 +241,25 @@ impl fmt::Debug for SymmetricDifference<'_, T> { pub struct Intersection<'a, T: 'a> { inner: IntersectionInner<'a, T>, } +#[derive(Debug)] enum IntersectionInner<'a, T: 'a> { Stitch { + // iterate similarly sized sets jointly, spotting matches along the way a: Iter<'a, T>, b: Iter<'a, T>, }, Search { + // iterate a small set, look up in the large set small_iter: Iter<'a, T>, large_set: &'a BTreeSet, }, + Answer(Option<&'a T>), // return a specific value or emptiness } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Intersection<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self.inner { - IntersectionInner::Stitch { - a, - b, - } => f - .debug_tuple("Intersection") - .field(&a) - .field(&b) - .finish(), - IntersectionInner::Search { - small_iter, - large_set: _, - } => f.debug_tuple("Intersection").field(&small_iter).finish(), - } + f.debug_tuple("Intersection").field(&self.inner).finish() } } @@ -224,18 +271,12 @@ impl fmt::Debug for Intersection<'_, T> { /// [`BTreeSet`]: struct.BTreeSet.html /// [`union`]: struct.BTreeSet.html#method.union #[stable(feature = "rust1", since = "1.0.0")] -pub struct Union<'a, T: 'a> { - a: Peekable>, - b: Peekable>, -} +pub struct Union<'a, T: 'a>(MergeIterInner>); #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Union<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("Union") - .field(&self.a) - .field(&self.b) - .finish() + f.debug_tuple("Union").field(&self.0).finish() } } @@ -314,24 +355,48 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn difference<'a>(&'a self, other: &'a BTreeSet) -> Difference<'a, T> { - if self.len() > other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { - // Self is bigger than or not much smaller than other set. - // Iterate both sets jointly, spotting matches along the way. - Difference { - inner: DifferenceInner::Stitch { + let (self_min, self_max) = if let (Some(self_min), Some(self_max)) = + (self.iter().next(), self.iter().next_back()) + { + (self_min, self_max) + } else { + return Difference { + inner: DifferenceInner::Iterate(self.iter()), + }; + }; + let (other_min, other_max) = if let (Some(other_min), Some(other_max)) = + (other.iter().next(), other.iter().next_back()) + { + (other_min, other_max) + } else { + return Difference { + inner: DifferenceInner::Iterate(self.iter()), + }; + }; + Difference { + inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { + (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()), + (Equal, _) => { + let mut self_iter = self.iter(); + self_iter.next(); + DifferenceInner::Iterate(self_iter) + } + (_, Equal) => { + let mut self_iter = self.iter(); + self_iter.next_back(); + DifferenceInner::Iterate(self_iter) + } + _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + DifferenceInner::Search { + self_iter: self.iter(), + other_set: other, + } + } + _ => DifferenceInner::Stitch { self_iter: self.iter(), other_iter: other.iter().peekable(), }, - } - } else { - // Self is much smaller than other set, or both sets are empty. - // Iterate the small set, searching for matches in the large set. - Difference { - inner: DifferenceInner::Search { - self_iter: self.iter(), - other_set: other, - }, - } + }, } } @@ -359,10 +424,7 @@ impl BTreeSet { pub fn symmetric_difference<'a>(&'a self, other: &'a BTreeSet) -> SymmetricDifference<'a, T> { - SymmetricDifference { - a: self.iter().peekable(), - b: other.iter().peekable(), - } + SymmetricDifference(MergeIterInner::new(self.iter(), other.iter())) } /// Visits the values representing the intersection, @@ -387,29 +449,46 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn intersection<'a>(&'a self, other: &'a BTreeSet) -> Intersection<'a, T> { - let (small, other) = if self.len() <= other.len() { - (self, other) + let (self_min, self_max) = if let (Some(self_min), Some(self_max)) = + (self.iter().next(), self.iter().next_back()) + { + (self_min, self_max) } else { - (other, self) + return Intersection { + inner: IntersectionInner::Answer(None), + }; }; - if small.len() > other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { - // Small set is not much smaller than other set. - // Iterate both sets jointly, spotting matches along the way. - Intersection { - inner: IntersectionInner::Stitch { - a: small.iter(), + let (other_min, other_max) = if let (Some(other_min), Some(other_max)) = + (other.iter().next(), other.iter().next_back()) + { + (other_min, other_max) + } else { + return Intersection { + inner: IntersectionInner::Answer(None), + }; + }; + Intersection { + inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { + (Greater, _) | (_, Less) => IntersectionInner::Answer(None), + (Equal, _) => IntersectionInner::Answer(Some(self_min)), + (_, Equal) => IntersectionInner::Answer(Some(self_max)), + _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + IntersectionInner::Search { + small_iter: self.iter(), + large_set: other, + } + } + _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + IntersectionInner::Search { + small_iter: other.iter(), + large_set: self, + } + } + _ => IntersectionInner::Stitch { + a: self.iter(), b: other.iter(), }, - } - } else { - // Big difference in number of elements, or both sets are empty. - // Iterate the small set, searching for matches in the large set. - Intersection { - inner: IntersectionInner::Search { - small_iter: small.iter(), - large_set: other, - }, - } + }, } } @@ -433,10 +512,7 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn union<'a>(&'a self, other: &'a BTreeSet) -> Union<'a, T> { - Union { - a: self.iter().peekable(), - b: other.iter().peekable(), - } + Union(MergeIterInner::new(self.iter(), other.iter())) } /// Clears the set, removing all values. @@ -544,43 +620,61 @@ impl BTreeSet { #[stable(feature = "rust1", since = "1.0.0")] pub fn is_subset(&self, other: &BTreeSet) -> bool { // Same result as self.difference(other).next().is_none() - // but the 3 paths below are faster (in order: hugely, 20%, 5%). + // but the code below is faster (hugely in some cases). if self.len() > other.len() { - false - } else if self.len() > other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { - // Self is not much smaller than other set. - // Stolen from TreeMap - let mut x = self.iter(); - let mut y = other.iter(); - let mut a = x.next(); - let mut b = y.next(); - while a.is_some() { - if b.is_none() { - return false; - } - - let a1 = a.unwrap(); - let b1 = b.unwrap(); - - match b1.cmp(a1) { - Less => (), - Greater => return false, - Equal => a = x.next(), - } - - b = y.next(); - } - true + return false; + } + let (self_min, self_max) = if let (Some(self_min), Some(self_max)) = + (self.iter().next(), self.iter().next_back()) + { + (self_min, self_max) } else { - // Big difference in number of elements, or both sets are empty. - // Iterate the small set, searching for matches in the large set. - for next in self { + return true; // self is empty + }; + let (other_min, other_max) = if let (Some(other_min), Some(other_max)) = + (other.iter().next(), other.iter().next_back()) + { + (other_min, other_max) + } else { + return false; // other is empty + }; + let mut self_iter = self.iter(); + match self_min.cmp(other_min) { + Less => return false, + Equal => { + self_iter.next(); + } + Greater => (), + } + match self_max.cmp(other_max) { + Greater => return false, + Equal => { + self_iter.next_back(); + } + Less => (), + } + if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { + // Big difference in number of elements. + for next in self_iter { if !other.contains(next) { return false; } } - true + } else { + // Self is not much smaller than other set. + let mut other_iter = other.iter(); + other_iter.next(); + other_iter.next_back(); + let mut self_next = self_iter.next(); + while let Some(self1) = self_next { + match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) { + Less => return false, + Equal => self_next = self_iter.next(), + Greater => (), + } + } } + true } /// Returns `true` if the set is a superset of another, @@ -1092,15 +1186,6 @@ impl<'a, T> DoubleEndedIterator for Range<'a, T> { #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Range<'_, T> {} -/// Compares `x` and `y`, but return `short` if x is None and `long` if y is None -fn cmp_opt(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering { - match (x, y) { - (None, _) => short, - (_, None) => long, - (Some(x1), Some(y1)) => x1.cmp(y1), - } -} - #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Difference<'_, T> { fn clone(&self) -> Self { @@ -1120,6 +1205,7 @@ impl Clone for Difference<'_, T> { self_iter: self_iter.clone(), other_set, }, + DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()), }, } } @@ -1138,7 +1224,7 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> { loop { match other_iter .peek() - .map_or(Less, |other_next| Ord::cmp(self_next, other_next)) + .map_or(Less, |other_next| self_next.cmp(other_next)) { Less => return Some(self_next), Equal => { @@ -1160,6 +1246,7 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> { return Some(self_next); } }, + DifferenceInner::Iterate(iter) => iter.next(), } } @@ -1167,12 +1254,13 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> { let (self_len, other_len) = match &self.inner { DifferenceInner::Stitch { self_iter, - other_iter + other_iter, } => (self_iter.len(), other_iter.len()), DifferenceInner::Search { self_iter, - other_set + other_set, } => (self_iter.len(), other_set.len()), + DifferenceInner::Iterate(iter) => (iter.len(), 0), }; (self_len.saturating_sub(other_len), Some(self_len)) } @@ -1184,10 +1272,7 @@ impl FusedIterator for Difference<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for SymmetricDifference<'_, T> { fn clone(&self) -> Self { - SymmetricDifference { - a: self.a.clone(), - b: self.b.clone(), - } + SymmetricDifference(self.0.clone()) } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1196,19 +1281,19 @@ impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { - match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) { - Less => return self.a.next(), - Equal => { - self.a.next(); - self.b.next(); - } - Greater => return self.b.next(), + let (a_next, b_next) = self.0.nexts(); + if a_next.and(b_next).is_none() { + return a_next.or(b_next); } } } fn size_hint(&self) -> (usize, Option) { - (0, Some(self.a.len() + self.b.len())) + let (a_len, b_len) = self.0.lens(); + // No checked_add, because even if a and b refer to the same set, + // and T is an empty type, the storage overhead of sets limits + // the number of elements to less than half the range of usize. + (0, Some(a_len + b_len)) } } @@ -1234,6 +1319,7 @@ impl Clone for Intersection<'_, T> { small_iter: small_iter.clone(), large_set, }, + IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer), }, } } @@ -1251,7 +1337,7 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> { let mut a_next = a.next()?; let mut b_next = b.next()?; loop { - match Ord::cmp(a_next, b_next) { + match a_next.cmp(b_next) { Less => a_next = a.next()?, Greater => b_next = b.next()?, Equal => return Some(a_next), @@ -1267,15 +1353,17 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> { return Some(small_next); } }, + IntersectionInner::Answer(answer) => answer.take(), } } fn size_hint(&self) -> (usize, Option) { - let min_len = match &self.inner { - IntersectionInner::Stitch { a, b } => min(a.len(), b.len()), - IntersectionInner::Search { small_iter, .. } => small_iter.len(), - }; - (0, Some(min_len)) + match &self.inner { + IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))), + IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())), + IntersectionInner::Answer(None) => (0, Some(0)), + IntersectionInner::Answer(Some(_)) => (1, Some(1)), + } } } @@ -1285,10 +1373,7 @@ impl FusedIterator for Intersection<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Union<'_, T> { fn clone(&self) -> Self { - Union { - a: self.a.clone(), - b: self.b.clone(), - } + Union(self.0.clone()) } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1296,19 +1381,13 @@ impl<'a, T: Ord> Iterator for Union<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { - match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) { - Less => self.a.next(), - Equal => { - self.b.next(); - self.a.next() - } - Greater => self.b.next(), - } + let (a_next, b_next) = self.0.nexts(); + a_next.or(b_next) } fn size_hint(&self) -> (usize, Option) { - let a_len = self.a.len(); - let b_len = self.b.len(); + let (a_len, b_len) = self.0.lens(); + // No checked_add - see SymmetricDifference::size_hint. (max(a_len, b_len), Some(a_len + b_len)) } } diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs index 816a71f255..702df25099 100644 --- a/src/liballoc/collections/linked_list.rs +++ b/src/liballoc/collections/linked_list.rs @@ -1197,6 +1197,19 @@ impl Clone for LinkedList { fn clone(&self) -> Self { self.iter().cloned().collect() } + + fn clone_from(&mut self, other: &Self) { + let mut iter_other = other.iter(); + if self.len() > other.len() { + self.split_off(other.len()); + } + for (elem, elem_other) in self.iter_mut().zip(&mut iter_other) { + elem.clone_from(elem_other); + } + if !iter_other.is_empty() { + self.extend(iter_other.cloned()); + } + } } #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/liballoc/collections/linked_list/tests.rs b/src/liballoc/collections/linked_list/tests.rs index ecb5948f11..1001f6bba3 100644 --- a/src/liballoc/collections/linked_list/tests.rs +++ b/src/liballoc/collections/linked_list/tests.rs @@ -110,6 +110,49 @@ fn test_append() { check_links(&n); } +#[test] +fn test_clone_from() { + // Short cloned from long + { + let v = vec![1, 2, 3, 4, 5]; + let u = vec![8, 7, 6, 2, 3, 4, 5]; + let mut m = list_from(&v); + let n = list_from(&u); + m.clone_from(&n); + check_links(&m); + assert_eq!(m, n); + for elt in u { + assert_eq!(m.pop_front(), Some(elt)) + } + } + // Long cloned from short + { + let v = vec![1, 2, 3, 4, 5]; + let u = vec![6, 7, 8]; + let mut m = list_from(&v); + let n = list_from(&u); + m.clone_from(&n); + check_links(&m); + assert_eq!(m, n); + for elt in u { + assert_eq!(m.pop_front(), Some(elt)) + } + } + // Two equal length lists + { + let v = vec![1, 2, 3, 4, 5]; + let u = vec![9, 8, 1, 2, 3]; + let mut m = list_from(&v); + let n = list_from(&u); + m.clone_from(&n); + check_links(&m); + assert_eq!(m, n); + for elt in u { + assert_eq!(m.pop_front(), Some(elt)) + } + } +} + #[test] fn test_insert_prev() { let mut m = list_from(&[0, 2, 4, 6, 8]); diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs index a4a0fbb194..8f3dfabd88 100644 --- a/src/liballoc/collections/vec_deque.rs +++ b/src/liballoc/collections/vec_deque.rs @@ -10,8 +10,8 @@ use core::array::LengthAtMost32; use core::cmp::{self, Ordering}; use core::fmt; -use core::iter::{repeat_with, FromIterator, FusedIterator}; -use core::mem; +use core::iter::{once, repeat_with, FromIterator, FusedIterator}; +use core::mem::{self, replace}; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{Index, IndexMut, RangeBounds, Try}; use core::ptr::{self, NonNull}; @@ -57,11 +57,88 @@ pub struct VecDeque { buf: RawVec, } +/// PairSlices pairs up equal length slice parts of two deques +/// +/// For example, given deques "A" and "B" with the following division into slices: +/// +/// A: [0 1 2] [3 4 5] +/// B: [a b] [c d e] +/// +/// It produces the following sequence of matching slices: +/// +/// ([0 1], [a b]) +/// ([2], [c]) +/// ([3 4], [d e]) +/// +/// and the uneven remainder of either A or B is skipped. +struct PairSlices<'a, 'b, T> { + a0: &'a mut [T], + a1: &'a mut [T], + b0: &'b [T], + b1: &'b [T], +} + +impl<'a, 'b, T> PairSlices<'a, 'b, T> { + fn from(to: &'a mut VecDeque, from: &'b VecDeque) -> Self { + let (a0, a1) = to.as_mut_slices(); + let (b0, b1) = from.as_slices(); + PairSlices { a0, a1, b0, b1 } + } + + fn has_remainder(&self) -> bool { + !self.b0.is_empty() + } + + fn remainder(self) -> impl Iterator { + once(self.b0).chain(once(self.b1)) + } +} + +impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> +{ + type Item = (&'a mut [T], &'b [T]); + fn next(&mut self) -> Option { + // Get next part length + let part = cmp::min(self.a0.len(), self.b0.len()); + if part == 0 { + return None; + } + let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part); + let (q0, q1) = self.b0.split_at(part); + + // Move a1 into a0, if it's empty (and b1, b0 the same way). + self.a0 = p1; + self.b0 = q1; + if self.a0.is_empty() { + self.a0 = replace(&mut self.a1, &mut []); + } + if self.b0.is_empty() { + self.b0 = replace(&mut self.b1, &[]); + } + Some((p0, q0)) + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl Clone for VecDeque { fn clone(&self) -> VecDeque { self.iter().cloned().collect() } + + fn clone_from(&mut self, other: &Self) { + self.truncate(other.len()); + + let mut iter = PairSlices::from(self, other); + while let Some((dst, src)) = iter.next() { + dst.clone_from_slice(&src); + } + + if iter.has_remainder() { + for remainder in iter.remainder() { + self.extend(remainder.iter().cloned()); + } + } + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1740,7 +1817,7 @@ impl VecDeque { } } - return elem; + elem } /// Splits the `VecDeque` into two at the given index. @@ -2209,6 +2286,16 @@ impl<'a, T> Iterator for Iter<'a, T> { final_res } + fn nth(&mut self, n: usize) -> Option { + if n >= count(self.tail, self.head, self.ring.len()) { + self.tail = self.head; + None + } else { + self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len()); + self.next() + } + } + #[inline] fn last(mut self) -> Option<&'a T> { self.next_back() @@ -2327,6 +2414,16 @@ impl<'a, T> Iterator for IterMut<'a, T> { back.iter_mut().fold(accum, &mut f) } + fn nth(&mut self, n: usize) -> Option { + if n >= count(self.tail, self.head, self.ring.len()) { + self.tail = self.head; + None + } else { + self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len()); + self.next() + } + } + #[inline] fn last(mut self) -> Option<&'a mut T> { self.next_back() diff --git a/src/liballoc/collections/vec_deque/tests.rs b/src/liballoc/collections/vec_deque/tests.rs index d253523997..d578ee0dac 100644 --- a/src/liballoc/collections/vec_deque/tests.rs +++ b/src/liballoc/collections/vec_deque/tests.rs @@ -361,6 +361,29 @@ fn test_vec_from_vecdeque() { } } +#[test] +fn test_clone_from() { + let m = vec![1; 8]; + let n = vec![2; 12]; + for pfv in 0..8 { + for pfu in 0..8 { + for longer in 0..2 { + let (vr, ur) = if longer == 0 { (&m, &n) } else { (&n, &m) }; + let mut v = VecDeque::from(vr.clone()); + for _ in 0..pfv { + v.push_front(1); + } + let mut u = VecDeque::from(ur.clone()); + for _ in 0..pfu { + u.push_front(2); + } + v.clone_from(&u); + assert_eq!(&v, &u); + } + } + } +} + #[test] fn issue_53529() { use crate::boxed::Box; diff --git a/src/liballoc/fmt.rs b/src/liballoc/fmt.rs index 68cbc366d7..cbfc55233a 100644 --- a/src/liballoc/fmt.rs +++ b/src/liballoc/fmt.rs @@ -80,24 +80,210 @@ //! arguments which have names. Like with positional parameters, it is not //! valid to provide named parameters that are unused by the format string. //! -//! ## Argument types +//! # Formatting Parameters //! -//! Each argument's type is dictated by the format string. -//! There are various parameters which require a particular type, however. -//! An example is the `{:.*}` syntax, which sets the number of decimal places -//! in floating-point types: +//! Each argument being formatted can be transformed by a number of formatting +//! parameters (corresponding to `format_spec` in the syntax above). These +//! parameters affect the string representation of what's being formatted. +//! +//! ## Width //! //! ``` -//! let formatted_number = format!("{:.*}", 2, 1.234567); -//! -//! assert_eq!("1.23", formatted_number) +//! // All of these print "Hello x !" +//! println!("Hello {:5}!", "x"); +//! println!("Hello {:1$}!", "x", 5); +//! println!("Hello {1:0$}!", 5, "x"); +//! println!("Hello {:width$}!", "x", width = 5); //! ``` //! -//! If this syntax is used, then the number of characters to print precedes the -//! actual object being formatted, and the number of characters must have the -//! type [`usize`]. +//! This is a parameter for the "minimum width" that the format should take up. +//! If the value's string does not fill up this many characters, then the +//! padding specified by fill/alignment will be used to take up the required +//! space (see below). //! -//! ## Formatting traits +//! The value for the width can also be provided as a [`usize`] in the list of +//! parameters by adding a postfix `$`, indicating that the second argument is +//! a [`usize`] specifying the width. +//! +//! Referring to an argument with the dollar syntax does not affect the "next +//! argument" counter, so it's usually a good idea to refer to arguments by +//! position, or use named arguments. +//! +//! ## Fill/Alignment +//! +//! ``` +//! assert_eq!(format!("Hello {:<5}!", "x"), "Hello x !"); +//! assert_eq!(format!("Hello {:-<5}!", "x"), "Hello x----!"); +//! assert_eq!(format!("Hello {:^5}!", "x"), "Hello x !"); +//! assert_eq!(format!("Hello {:>5}!", "x"), "Hello x!"); +//! ``` +//! +//! The optional fill character and alignment is provided normally in conjunction with the +//! [`width`](#width) parameter. It must be defined before `width`, right after the `:`. +//! This indicates that if the value being formatted is smaller than +//! `width` some extra characters will be printed around it. +//! Filling comes in the following variants for different alignments: +//! +//! * `[fill]<` - the argument is left-aligned in `width` columns +//! * `[fill]^` - the argument is center-aligned in `width` columns +//! * `[fill]>` - the argument is right-aligned in `width` columns +//! +//! The default [fill/alignment](#fillalignment) for non-numerics is a space and +//! left-aligned. The +//! defaults for numeric formatters is also a space but with right-alignment. If +//! the `0` flag (see below) is specified for numerics, then the implicit fill character is +//! `0`. +//! +//! Note that alignment may not be implemented by some types. In particular, it +//! is not generally implemented for the `Debug` trait. A good way to ensure +//! padding is applied is to format your input, then pad this resulting string +//! to obtain your output: +//! +//! ``` +//! println!("Hello {:^15}!", format!("{:?}", Some("hi"))); // => "Hello Some("hi") !" +//! ``` +//! +//! ## Sign/`#`/`0` +//! +//! ``` +//! assert_eq!(format!("Hello {:+}!", 5), "Hello +5!"); +//! assert_eq!(format!("{:#x}!", 27), "0x1b!"); +//! assert_eq!(format!("Hello {:05}!", 5), "Hello 00005!"); +//! assert_eq!(format!("Hello {:05}!", -5), "Hello -0005!"); +//! assert_eq!(format!("{:#010x}!", 27), "0x0000001b!"); +//! ``` +//! +//! These are all flags altering the behavior of the formatter. +//! +//! * `+` - This is intended for numeric types and indicates that the sign +//! should always be printed. Positive signs are never printed by +//! default, and the negative sign is only printed by default for the +//! `Signed` trait. This flag indicates that the correct sign (`+` or `-`) +//! should always be printed. +//! * `-` - Currently not used +//! * `#` - This flag is indicates that the "alternate" form of printing should +//! be used. The alternate forms are: +//! * `#?` - pretty-print the [`Debug`] formatting +//! * `#x` - precedes the argument with a `0x` +//! * `#X` - precedes the argument with a `0x` +//! * `#b` - precedes the argument with a `0b` +//! * `#o` - precedes the argument with a `0o` +//! * `0` - This is used to indicate for integer formats that the padding to `width` should +//! both be done with a `0` character as well as be sign-aware. A format +//! like `{:08}` would yield `00000001` for the integer `1`, while the +//! same format would yield `-0000001` for the integer `-1`. Notice that +//! the negative version has one fewer zero than the positive version. +//! Note that padding zeroes are always placed after the sign (if any) +//! and before the digits. When used together with the `#` flag, a similar +//! rule applies: padding zeroes are inserted after the prefix but before +//! the digits. The prefix is included in the total width. +//! +//! ## Precision +//! +//! For non-numeric types, this can be considered a "maximum width". If the resulting string is +//! longer than this width, then it is truncated down to this many characters and that truncated +//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set. +//! +//! For integral types, this is ignored. +//! +//! For floating-point types, this indicates how many digits after the decimal point should be +//! printed. +//! +//! There are three possible ways to specify the desired `precision`: +//! +//! 1. An integer `.N`: +//! +//! the integer `N` itself is the precision. +//! +//! 2. An integer or name followed by dollar sign `.N$`: +//! +//! use format *argument* `N` (which must be a `usize`) as the precision. +//! +//! 3. An asterisk `.*`: +//! +//! `.*` means that this `{...}` is associated with *two* format inputs rather than one: the +//! first input holds the `usize` precision, and the second holds the value to print. Note that +//! in this case, if one uses the format string `{:.*}`, then the `` part refers +//! to the *value* to print, and the `precision` must come in the input preceding ``. +//! +//! For example, the following calls all print the same thing `Hello x is 0.01000`: +//! +//! ``` +//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)} +//! println!("Hello {0} is {1:.5}", "x", 0.01); +//! +//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)} +//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01); +//! +//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)} +//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01); +//! +//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision +//! // specified in first of next two args (5)} +//! println!("Hello {} is {:.*}", "x", 5, 0.01); +//! +//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision +//! // specified in its predecessor (5)} +//! println!("Hello {} is {2:.*}", "x", 5, 0.01); +//! +//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified +//! // in arg "prec" (5)} +//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); +//! ``` +//! +//! While these: +//! +//! ``` +//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56); +//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56"); +//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56"); +//! ``` +//! +//! print two significantly different things: +//! +//! ```text +//! Hello, `1234.560` has 3 fractional digits +//! Hello, `123` has 3 characters +//! Hello, ` 123` has 3 right-aligned characters +//! ``` +//! +//! # Escaping +//! +//! The literal characters `{` and `}` may be included in a string by preceding +//! them with the same character. For example, the `{` character is escaped with +//! `{{` and the `}` character is escaped with `}}`. +//! +//! ``` +//! assert_eq!(format!("Hello {{}}"), "Hello {}"); +//! assert_eq!(format!("{{ Hello"), "{ Hello"); +//! ``` +//! +//! # Syntax +//! +//! To summarize, here you can find the full grammar of format strings. +//! The syntax for the formatting language used is drawn from other languages, +//! so it should not be too alien. Arguments are formatted with Python-like +//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like +//! `%`. The actual grammar for the formatting syntax is: +//! +//! ```text +//! format_string := [ maybe-format ] * +//! maybe-format := '{' '{' | '}' '}' | +//! format := '{' [ argument ] [ ':' format_spec ] '}' +//! argument := integer | identifier +//! +//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type] +//! fill := character +//! align := '<' | '^' | '>' +//! sign := '+' | '-' +//! width := count +//! precision := count | '*' +//! type := identifier | '?' | '' +//! count := parameter | integer +//! parameter := argument '$' +//! ``` +//! +//! # Formatting traits //! //! When requesting that an argument be formatted with a particular type, you //! are actually requesting that an argument ascribes to a particular trait. @@ -220,7 +406,7 @@ //! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\""); //! ``` //! -//! ## Related macros +//! # Related macros //! //! There are a number of related macros in the [`format!`] family. The ones that //! are currently implemented are: @@ -300,185 +486,6 @@ //! it would internally pass around this structure until it has been determined //! where output should go to. //! -//! # Syntax -//! -//! The syntax for the formatting language used is drawn from other languages, -//! so it should not be too alien. Arguments are formatted with Python-like -//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like -//! `%`. The actual grammar for the formatting syntax is: -//! -//! ```text -//! format_string := [ maybe-format ] * -//! maybe-format := '{' '{' | '}' '}' | -//! format := '{' [ argument ] [ ':' format_spec ] '}' -//! argument := integer | identifier -//! -//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type] -//! fill := character -//! align := '<' | '^' | '>' -//! sign := '+' | '-' -//! width := count -//! precision := count | '*' -//! type := identifier | '?' | '' -//! count := parameter | integer -//! parameter := argument '$' -//! ``` -//! -//! # Formatting Parameters -//! -//! Each argument being formatted can be transformed by a number of formatting -//! parameters (corresponding to `format_spec` in the syntax above). These -//! parameters affect the string representation of what's being formatted. -//! -//! ## Fill/Alignment -//! -//! The fill character is provided normally in conjunction with the -//! [`width`](#width) -//! parameter. This indicates that if the value being formatted is smaller than -//! `width` some extra characters will be printed around it. The extra -//! characters are specified by `fill`, and the alignment can be one of the -//! following options: -//! -//! * `<` - the argument is left-aligned in `width` columns -//! * `^` - the argument is center-aligned in `width` columns -//! * `>` - the argument is right-aligned in `width` columns -//! -//! Note that alignment may not be implemented by some types. In particular, it -//! is not generally implemented for the `Debug` trait. A good way to ensure -//! padding is applied is to format your input, then use this resulting string -//! to pad your output. -//! -//! ## Sign/`#`/`0` -//! -//! These can all be interpreted as flags for a particular formatter. -//! -//! * `+` - This is intended for numeric types and indicates that the sign -//! should always be printed. Positive signs are never printed by -//! default, and the negative sign is only printed by default for the -//! `Signed` trait. This flag indicates that the correct sign (`+` or `-`) -//! should always be printed. -//! * `-` - Currently not used -//! * `#` - This flag is indicates that the "alternate" form of printing should -//! be used. The alternate forms are: -//! * `#?` - pretty-print the [`Debug`] formatting -//! * `#x` - precedes the argument with a `0x` -//! * `#X` - precedes the argument with a `0x` -//! * `#b` - precedes the argument with a `0b` -//! * `#o` - precedes the argument with a `0o` -//! * `0` - This is used to indicate for integer formats that the padding should -//! both be done with a `0` character as well as be sign-aware. A format -//! like `{:08}` would yield `00000001` for the integer `1`, while the -//! same format would yield `-0000001` for the integer `-1`. Notice that -//! the negative version has one fewer zero than the positive version. -//! Note that padding zeroes are always placed after the sign (if any) -//! and before the digits. When used together with the `#` flag, a similar -//! rule applies: padding zeroes are inserted after the prefix but before -//! the digits. -//! -//! ## Width -//! -//! This is a parameter for the "minimum width" that the format should take up. -//! If the value's string does not fill up this many characters, then the -//! padding specified by fill/alignment will be used to take up the required -//! space. -//! -//! The default [fill/alignment](#fillalignment) for non-numerics is a space and -//! left-aligned. The -//! defaults for numeric formatters is also a space but with right-alignment. If -//! the `0` flag is specified for numerics, then the implicit fill character is -//! `0`. -//! -//! The value for the width can also be provided as a [`usize`] in the list of -//! parameters by using the dollar syntax indicating that the second argument is -//! a [`usize`] specifying the width, for example: -//! -//! ``` -//! // All of these print "Hello x !" -//! println!("Hello {:5}!", "x"); -//! println!("Hello {:1$}!", "x", 5); -//! println!("Hello {1:0$}!", 5, "x"); -//! println!("Hello {:width$}!", "x", width = 5); -//! ``` -//! -//! Referring to an argument with the dollar syntax does not affect the "next -//! argument" counter, so it's usually a good idea to refer to arguments by -//! position, or use named arguments. -//! -//! ## Precision -//! -//! For non-numeric types, this can be considered a "maximum width". If the resulting string is -//! longer than this width, then it is truncated down to this many characters and that truncated -//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set. -//! -//! For integral types, this is ignored. -//! -//! For floating-point types, this indicates how many digits after the decimal point should be -//! printed. -//! -//! There are three possible ways to specify the desired `precision`: -//! -//! 1. An integer `.N`: -//! -//! the integer `N` itself is the precision. -//! -//! 2. An integer or name followed by dollar sign `.N$`: -//! -//! use format *argument* `N` (which must be a `usize`) as the precision. -//! -//! 3. An asterisk `.*`: -//! -//! `.*` means that this `{...}` is associated with *two* format inputs rather than one: the -//! first input holds the `usize` precision, and the second holds the value to print. Note that -//! in this case, if one uses the format string `{:.*}`, then the `` part refers -//! to the *value* to print, and the `precision` must come in the input preceding ``. -//! -//! For example, the following calls all print the same thing `Hello x is 0.01000`: -//! -//! ``` -//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)} -//! println!("Hello {0} is {1:.5}", "x", 0.01); -//! -//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)} -//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01); -//! -//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)} -//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01); -//! -//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision -//! // specified in first of next two args (5)} -//! println!("Hello {} is {:.*}", "x", 5, 0.01); -//! -//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision -//! // specified in its predecessor (5)} -//! println!("Hello {} is {2:.*}", "x", 5, 0.01); -//! -//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified -//! // in arg "prec" (5)} -//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); -//! ``` -//! -//! While these: -//! -//! ``` -//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56); -//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56"); -//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56"); -//! ``` -//! -//! print two significantly different things: -//! -//! ```text -//! Hello, `1234.560` has 3 fractional digits -//! Hello, `123` has 3 characters -//! Hello, ` 123` has 3 right-aligned characters -//! ``` -//! -//! # Escaping -//! -//! The literal characters `{` and `}` may be included in a string by preceding -//! them with the same character. For example, the `{` character is escaped with -//! `{{` and the `}` character is escaped with `}}`. -//! //! [`usize`]: ../../std/primitive.usize.html //! [`isize`]: ../../std/primitive.isize.html //! [`i8`]: ../../std/primitive.i8.html diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 9e6ed92ffb..94379afc2b 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -85,6 +85,7 @@ #![feature(const_generic_impls_guard)] #![feature(const_generics)] #![feature(const_in_array_repeat_expressions)] +#![feature(cow_is_borrowed)] #![feature(dispatch_from_dyn)] #![feature(core_intrinsics)] #![feature(container_error_extra)] @@ -117,12 +118,10 @@ #![feature(allocator_internals)] #![feature(on_unimplemented)] #![feature(rustc_const_unstable)] -#![cfg_attr(bootstrap, feature(const_vec_new))] #![feature(slice_partition_dedup)] #![feature(maybe_uninit_extra, maybe_uninit_slice)] #![feature(alloc_layout_extra)] #![feature(try_trait)] -#![feature(mem_take)] #![feature(associated_type_bounds)] // Allow testing this library @@ -155,7 +154,7 @@ mod boxed { #[cfg(test)] mod tests; pub mod collections; -#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))] +#[cfg(target_has_atomic = "ptr")] pub mod sync; pub mod rc; pub mod raw_vec; diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index f234ac5ebe..f1c4c32e11 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -3,8 +3,9 @@ //! //! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, //! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new -//! pointer to the same value in the heap. When the last [`Rc`] pointer to a -//! given value is destroyed, the pointed-to value is also destroyed. +//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a +//! given allocation is destroyed, the value stored in that allocation (often +//! referred to as "inner value") is also dropped. //! //! Shared references in Rust disallow mutation by default, and [`Rc`] //! is no exception: you cannot generally obtain a mutable reference to @@ -21,8 +22,10 @@ //! //! The [`downgrade`][downgrade] method can be used to create a non-owning //! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d -//! to an [`Rc`], but this will return [`None`] if the value has -//! already been dropped. +//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has +//! already been dropped. In other words, `Weak` pointers do not keep the value +//! inside the allocation alive; however, they *do* keep the allocation +//! (the backing store for the inner value) alive. //! //! A cycle between [`Rc`] pointers will never be deallocated. For this reason, //! [`Weak`] is used to break cycles. For example, a tree could have strong @@ -41,13 +44,13 @@ //! Rc::downgrade(&my_rc); //! ``` //! -//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the value may have -//! already been destroyed. +//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the inner value may have +//! already been dropped. //! //! # Cloning references //! -//! Creating a new reference from an existing reference counted pointer is done using the -//! `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. +//! Creating a new reference to the same allocation as an existing reference counted pointer +//! is done using the `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. //! //! ``` //! use std::rc::Rc; @@ -93,7 +96,7 @@ //! ); //! //! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc` -//! // value gives us a new pointer to the same `Owner` value, incrementing +//! // gives us a new pointer to the same `Owner` allocation, incrementing //! // the reference count in the process. //! let gadget1 = Gadget { //! id: 1, @@ -110,8 +113,8 @@ //! // Despite dropping `gadget_owner`, we're still able to print out the name //! // of the `Owner` of the `Gadget`s. This is because we've only dropped a //! // single `Rc`, not the `Owner` it points to. As long as there are -//! // other `Rc` values pointing at the same `Owner`, it will remain -//! // allocated. The field projection `gadget1.owner.name` works because +//! // other `Rc` pointing at the same `Owner` allocation, it will remain +//! // live. The field projection `gadget1.owner.name` works because //! // `Rc` automatically dereferences to `Owner`. //! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name); //! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name); @@ -124,9 +127,9 @@ //! //! If our requirements change, and we also need to be able to traverse from //! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner` -//! to `Gadget` introduces a cycle between the values. This means that their -//! reference counts can never reach 0, and the values will remain allocated -//! forever: a memory leak. In order to get around this, we can use [`Weak`] +//! to `Gadget` introduces a cycle. This means that their +//! reference counts can never reach 0, and the allocation will never be destroyed: +//! a memory leak. In order to get around this, we can use [`Weak`] //! pointers. //! //! Rust actually makes it somewhat difficult to produce this loop in the first @@ -193,10 +196,10 @@ //! for gadget_weak in gadget_owner.gadgets.borrow().iter() { //! //! // `gadget_weak` is a `Weak`. Since `Weak` pointers can't -//! // guarantee the value is still allocated, we need to call +//! // guarantee the allocation still exists, we need to call //! // `upgrade`, which returns an `Option>`. //! // -//! // In this case we know the value still exists, so we simply +//! // In this case we know the allocation still exists, so we simply //! // `unwrap` the `Option`. In a more complicated program, you might //! // need graceful error handling for a `None` result. //! @@ -365,7 +368,7 @@ impl Rc { unsafe { Pin::new_unchecked(Rc::new(value)) } } - /// Returns the contained value, if the `Rc` has exactly one strong reference. + /// Returns the inner value, if the `Rc` has exactly one strong reference. /// /// Otherwise, an [`Err`][result] is returned with the same `Rc` that was /// passed in. @@ -446,7 +449,7 @@ impl Rc> { /// # Safety /// /// As with [`MaybeUninit::assume_init`], - /// it is up to the caller to guarantee that the value + /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. @@ -485,7 +488,7 @@ impl Rc<[mem::MaybeUninit]> { /// # Safety /// /// As with [`MaybeUninit::assume_init`], - /// it is up to the caller to guarantee that the value + /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. @@ -604,7 +607,7 @@ impl Rc { unsafe { NonNull::new_unchecked(Rc::into_raw(this) as *mut _) } } - /// Creates a new [`Weak`][weak] pointer to this value. + /// Creates a new [`Weak`][weak] pointer to this allocation. /// /// [weak]: struct.Weak.html /// @@ -625,7 +628,7 @@ impl Rc { Weak { ptr: this.ptr } } - /// Gets the number of [`Weak`][weak] pointers to this value. + /// Gets the number of [`Weak`][weak] pointers to this allocation. /// /// [weak]: struct.Weak.html /// @@ -645,7 +648,7 @@ impl Rc { this.weak() - 1 } - /// Gets the number of strong (`Rc`) pointers to this value. + /// Gets the number of strong (`Rc`) pointers to this allocation. /// /// # Examples /// @@ -664,7 +667,7 @@ impl Rc { } /// Returns `true` if there are no other `Rc` or [`Weak`][weak] pointers to - /// this inner value. + /// this allocation. /// /// [weak]: struct.Weak.html #[inline] @@ -672,14 +675,14 @@ impl Rc { Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 } - /// Returns a mutable reference to the inner value, if there are - /// no other `Rc` or [`Weak`][weak] pointers to the same value. + /// Returns a mutable reference into the given `Rc`, if there are + /// no other `Rc` or [`Weak`][weak] pointers to the same allocation. /// /// Returns [`None`] otherwise, because it is not safe to /// mutate a shared value. /// /// See also [`make_mut`][make_mut], which will [`clone`][clone] - /// the inner value when it's shared. + /// the inner value when there are other pointers. /// /// [weak]: struct.Weak.html /// [`None`]: ../../std/option/enum.Option.html#variant.None @@ -710,7 +713,7 @@ impl Rc { } } - /// Returns a mutable reference to the inner value, + /// Returns a mutable reference into the given `Rc`, /// without any check. /// /// See also [`get_mut`], which is safe and does appropriate checks. @@ -719,7 +722,7 @@ impl Rc { /// /// # Safety /// - /// Any other `Rc` or [`Weak`] pointers to the same value must not be dereferenced + /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced /// for the duration of the returned borrow. /// This is trivially the case if no such pointers exist, /// for example immediately after `Rc::new`. @@ -745,8 +748,8 @@ impl Rc { #[inline] #[stable(feature = "ptr_eq", since = "1.17.0")] - /// Returns `true` if the two `Rc`s point to the same value (not - /// just values that compare as equal). + /// Returns `true` if the two `Rc`s point to the same allocation + /// (in a vein similar to [`ptr::eq`]). /// /// # Examples /// @@ -760,6 +763,8 @@ impl Rc { /// assert!(Rc::ptr_eq(&five, &same_five)); /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` + /// + /// [`ptr::eq`]: ../../std/ptr/fn.eq.html pub fn ptr_eq(this: &Self, other: &Self) -> bool { this.ptr.as_ptr() == other.ptr.as_ptr() } @@ -768,12 +773,12 @@ impl Rc { impl Rc { /// Makes a mutable reference into the given `Rc`. /// - /// If there are other `Rc` pointers to the same value, then `make_mut` will - /// [`clone`] the inner value to ensure unique ownership. This is also + /// If there are other `Rc` pointers to the same allocation, then `make_mut` will + /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also /// referred to as clone-on-write. /// - /// If there are no other `Rc` pointers to this value, then [`Weak`] - /// pointers to this value will be dissassociated. + /// If there are no other `Rc` pointers to this allocation, then [`Weak`] + /// pointers to this allocation will be disassociated. /// /// See also [`get_mut`], which will fail rather than cloning. /// @@ -794,12 +799,12 @@ impl Rc { /// *Rc::make_mut(&mut data) += 1; // Won't clone anything /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything /// - /// // Now `data` and `other_data` point to different values. + /// // Now `data` and `other_data` point to different allocations. /// assert_eq!(*data, 8); /// assert_eq!(*other_data, 12); /// ``` /// - /// [`Weak`] pointers will be dissassociated: + /// [`Weak`] pointers will be disassociated: /// /// ``` /// use std::rc::Rc; @@ -837,7 +842,7 @@ impl Rc { // returned is the *only* pointer that will ever be returned to T. Our // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible - // reference to the inner value. + // reference to the allocation. unsafe { &mut this.ptr.as_mut().value } @@ -861,11 +866,9 @@ impl Rc { /// } /// } /// - /// fn main() { - /// let my_string = "Hello World".to_string(); - /// print_if_string(Rc::new(my_string)); - /// print_if_string(Rc::new(0i8)); - /// } + /// let my_string = "Hello World".to_string(); + /// print_if_string(Rc::new(my_string)); + /// print_if_string(Rc::new(0i8)); /// ``` pub fn downcast(self) -> Result, Rc> { if (*self).is::() { @@ -880,7 +883,7 @@ impl Rc { impl Rc { /// Allocates an `RcBox` with sufficient space for - /// a possibly-unsized value where the value has the layout provided. + /// a possibly-unsized inner value where the value has the layout provided. /// /// The function `mem_to_rcbox` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. @@ -910,7 +913,7 @@ impl Rc { inner } - /// Allocates an `RcBox` with sufficient space for an unsized value + /// Allocates an `RcBox` with sufficient space for an unsized inner value unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { // Allocate for the `RcBox` using the given value. Self::allocate_for_layout( @@ -1113,7 +1116,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { impl Clone for Rc { /// Makes a clone of the `Rc` pointer. /// - /// This creates another pointer to the same inner value, increasing the + /// This creates another pointer to the same allocation, increasing the /// strong reference count. /// /// # Examples @@ -1174,6 +1177,8 @@ impl RcEqIdent for Rc { /// store large values, that are slow to clone, but also heavy to check for equality, causing this /// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to /// the same value, than two `&T`s. +/// +/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] impl RcEqIdent for Rc { #[inline] @@ -1191,9 +1196,11 @@ impl RcEqIdent for Rc { impl PartialEq for Rc { /// Equality for two `Rc`s. /// - /// Two `Rc`s are equal if their inner values are equal. + /// Two `Rc`s are equal if their inner values are equal, even if they are + /// stored in different allocation. /// - /// If `T` also implements `Eq`, two `Rc`s that point to the same value are + /// If `T` also implements `Eq` (implying reflexivity of equality), + /// two `Rc`s that point to the same allocation are /// always equal. /// /// # Examples @@ -1214,7 +1221,8 @@ impl PartialEq for Rc { /// /// Two `Rc`s are unequal if their inner values are unequal. /// - /// If `T` also implements `Eq`, two `Rc`s that point to the same value are + /// If `T` also implements `Eq` (implying reflexivity of equality), + /// two `Rc`s that point to the same allocation are /// never unequal. /// /// # Examples @@ -1543,17 +1551,18 @@ impl<'a, T: 'a + Clone> RcFromIter<&'a T, slice::Iter<'a, T>> for Rc<[T]> { } /// `Weak` is a version of [`Rc`] that holds a non-owning reference to the -/// managed value. The value is accessed by calling [`upgrade`] on the `Weak` +/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak` /// pointer, which returns an [`Option`]`<`[`Rc`]`>`. /// /// Since a `Weak` reference does not count towards ownership, it will not -/// prevent the inner value from being dropped, and `Weak` itself makes no -/// guarantees about the value still being present and may return [`None`] -/// when [`upgrade`]d. +/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no +/// guarantees about the value still being present. Thus it may return [`None`] +/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation +/// itself (the backing store) from being deallocated. /// -/// A `Weak` pointer is useful for keeping a temporary reference to the value -/// within [`Rc`] without extending its lifetime. It is also used to prevent -/// circular references between [`Rc`] pointers, since mutual owning references +/// A `Weak` pointer is useful for keeping a temporary reference to the allocation +/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to +/// prevent circular references between [`Rc`] pointers, since mutual owning references /// would never allow either [`Rc`] to be dropped. For example, a tree could /// have strong [`Rc`] pointers from parent nodes to children, and `Weak` /// pointers from children back to their parents. @@ -1752,10 +1761,10 @@ pub(crate) fn is_dangling(ptr: NonNull) -> bool { } impl Weak { - /// Attempts to upgrade the `Weak` pointer to an [`Rc`], extending - /// the lifetime of the value if successful. + /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying + /// dropping of the inner value if successful. /// - /// Returns [`None`] if the value has since been dropped. + /// Returns [`None`] if the inner value has since been dropped. /// /// [`Rc`]: struct.Rc.html /// [`None`]: ../../std/option/enum.Option.html @@ -1789,7 +1798,7 @@ impl Weak { } } - /// Gets the number of strong (`Rc`) pointers pointing to this value. + /// Gets the number of strong (`Rc`) pointers pointing to this allocation. /// /// If `self` was created using [`Weak::new`], this will return 0. /// @@ -1803,11 +1812,11 @@ impl Weak { } } - /// Gets the number of `Weak` pointers pointing to this value. + /// Gets the number of `Weak` pointers pointing to this allocation. /// /// If `self` was created using [`Weak::new`], this will return `None`. If /// not, the returned value is at least 1, since `self` still points to the - /// value. + /// allocation. /// /// [`Weak::new`]: #method.new #[unstable(feature = "weak_counts", issue = "57977")] @@ -1832,14 +1841,14 @@ impl Weak { } } - /// Returns `true` if the two `Weak`s point to the same value (not just - /// values that compare as equal), or if both don't point to any value + /// Returns `true` if the two `Weak`s point to the same allocation (similar to + /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). /// /// # Notes /// /// Since this compares pointers it means that `Weak::new()` will equal each - /// other, even though they don't point to any value. + /// other, even though they don't point to any allocation. /// /// # Examples /// @@ -1871,6 +1880,8 @@ impl Weak { /// let third = Rc::downgrade(&third_rc); /// assert!(!first.ptr_eq(&third)); /// ``` + /// + /// [`ptr::eq`]: ../../std/ptr/fn.eq.html #[inline] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { @@ -1920,7 +1931,7 @@ impl Drop for Weak { #[stable(feature = "rc_weak", since = "1.4.0")] impl Clone for Weak { - /// Makes a clone of the `Weak` pointer that points to the same value. + /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples /// diff --git a/src/liballoc/slice.rs b/src/liballoc/slice.rs index 881d499c07..08243ef7c5 100644 --- a/src/liballoc/slice.rs +++ b/src/liballoc/slice.rs @@ -411,25 +411,16 @@ impl [T] { /// Basic usage: /// /// ``` - /// #![feature(repeat_generic_slice)] - /// - /// fn main() { - /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]); - /// } + /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]); /// ``` /// /// A panic upon overflow: /// /// ```should_panic - /// #![feature(repeat_generic_slice)] - /// fn main() { - /// // this will panic at runtime - /// b"0123456789abcdef".repeat(usize::max_value()); - /// } + /// // this will panic at runtime + /// b"0123456789abcdef".repeat(usize::max_value()); /// ``` - #[unstable(feature = "repeat_generic_slice", - reason = "it's on str, why not on slice?", - issue = "48784")] + #[stable(feature = "repeat_generic_slice", since = "1.40.0")] pub fn repeat(&self, n: usize) -> Vec where T: Copy { if n == 0 { return Vec::new(); diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 9a1342c30d..83816d8b95 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -456,7 +456,7 @@ impl str { } } } - return s; + s } /// Converts a [`Box`] into a [`String`] without copying or allocating. @@ -500,10 +500,8 @@ impl str { /// A panic upon overflow: /// /// ```should_panic - /// fn main() { - /// // this will panic at runtime - /// "0123456789abcdef".repeat(usize::max_value()); - /// } + /// // this will panic at runtime + /// "0123456789abcdef".repeat(usize::max_value()); /// ``` #[stable(feature = "repeat_str", since = "1.16.0")] pub fn repeat(&self, n: usize) -> String { diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index 1166e7b5df..d9927c642b 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -164,10 +164,8 @@ use crate::vec::Vec; /// /// fn example_func(example_arg: A) {} /// -/// fn main() { -/// let example_string = String::from("example_string"); -/// example_func(&example_string); -/// } +/// let example_string = String::from("example_string"); +/// example_func(&example_string); /// ``` /// /// There are two options that would work instead. The first would be to @@ -198,20 +196,21 @@ use crate::vec::Vec; /// /// let story = String::from("Once upon a time..."); /// -/// let ptr = story.as_ptr(); +// FIXME Update this when vec_into_raw_parts is stabilized +/// // Prevent automatically dropping the String's data +/// let mut story = mem::ManuallyDrop::new(story); +/// +/// let ptr = story.as_mut_ptr(); /// let len = story.len(); /// let capacity = story.capacity(); /// /// // story has nineteen bytes /// assert_eq!(19, len); /// -/// // Now that we have our parts, we throw the story away. -/// mem::forget(story); -/// /// // We can re-build a String out of ptr, len, and capacity. This is all /// // unsafe because we are responsible for making sure the components are /// // valid: -/// let s = unsafe { String::from_raw_parts(ptr as *mut _, len, capacity) } ; +/// let s = unsafe { String::from_raw_parts(ptr, len, capacity) } ; /// /// assert_eq!(String::from("Once upon a time..."), s); /// ``` @@ -369,7 +368,6 @@ impl String { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_string_new"))] pub const fn new() -> String { String { vec: Vec::new() } } @@ -429,7 +427,7 @@ impl String { /// Converts a vector of bytes to a `String`. /// - /// A string slice ([`&str`]) is made of bytes ([`u8`]), and a vector of bytes + /// A string ([`String`]) is made of bytes ([`u8`]), and a vector of bytes /// ([`Vec`]) is made of bytes, so this function converts between the /// two. Not all byte slices are valid `String`s, however: `String` /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that @@ -446,7 +444,7 @@ impl String { /// If you need a [`&str`] instead of a `String`, consider /// [`str::from_utf8`]. /// - /// The inverse of this method is [`as_bytes`]. + /// The inverse of this method is [`into_bytes`]. /// /// # Errors /// @@ -480,11 +478,11 @@ impl String { /// with this error. /// /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked - /// [`&str`]: ../../std/primitive.str.html + /// [`String`]: struct.String.html /// [`u8`]: ../../std/primitive.u8.html /// [`Vec`]: ../../std/vec/struct.Vec.html /// [`str::from_utf8`]: ../../std/str/fn.from_utf8.html - /// [`as_bytes`]: struct.String.html#method.as_bytes + /// [`into_bytes`]: struct.String.html#method.into_bytes /// [`FromUtf8Error`]: struct.FromUtf8Error.html /// [`Err`]: ../../std/result/enum.Result.html#variant.Err #[inline] @@ -650,6 +648,37 @@ impl String { decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect() } + /// Decomposes a `String` into its raw components. + /// + /// Returns the raw pointer to the underlying data, the length of + /// the string (in bytes), and the allocated capacity of the data + /// (in bytes). These are the same arguments in the same order as + /// the arguments to [`from_raw_parts`]. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `String`. The only way to do + /// this is to convert the raw pointer, length, and capacity back + /// into a `String` with the [`from_raw_parts`] function, allowing + /// the destructor to perform the cleanup. + /// + /// [`from_raw_parts`]: #method.from_raw_parts + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_into_raw_parts)] + /// let s = String::from("hello"); + /// + /// let (ptr, len, cap) = s.into_raw_parts(); + /// + /// let rebuilt = unsafe { String::from_raw_parts(ptr, len, cap) }; + /// assert_eq!(rebuilt, "hello"); + /// ``` + #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] + pub fn into_raw_parts(self) -> (*mut u8, usize, usize) { + self.vec.into_raw_parts() + } + /// Creates a new `String` from a length, capacity, and pointer. /// /// # Safety @@ -680,13 +709,16 @@ impl String { /// /// unsafe { /// let s = String::from("hello"); - /// let ptr = s.as_ptr(); + /// + // FIXME Update this when vec_into_raw_parts is stabilized + /// // Prevent automatically dropping the String's data + /// let mut s = mem::ManuallyDrop::new(s); + /// + /// let ptr = s.as_mut_ptr(); /// let len = s.len(); /// let capacity = s.capacity(); /// - /// mem::forget(s); - /// - /// let s = String::from_raw_parts(ptr as *mut _, len, capacity); + /// let s = String::from_raw_parts(ptr, len, capacity); /// /// assert_eq!(String::from("hello"), s); /// } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 45f98162e4..80d6c6e0d4 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -45,10 +45,10 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// /// The type `Arc` provides shared ownership of a value of type `T`, /// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces -/// a new `Arc` instance, which points to the same value on the heap as the +/// a new `Arc` instance, which points to the same allocation on the heap as the /// source `Arc`, while increasing a reference count. When the last `Arc` -/// pointer to a given value is destroyed, the pointed-to value is also -/// destroyed. +/// pointer to a given allocation is destroyed, the value stored in that allocation (often +/// referred to as "inner value") is also dropped. /// /// Shared references in Rust disallow mutation by default, and `Arc` is no /// exception: you cannot generally obtain a mutable reference to something @@ -61,7 +61,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// Unlike [`Rc`], `Arc` uses atomic operations for its reference /// counting. This means that it is thread-safe. The disadvantage is that /// atomic operations are more expensive than ordinary memory accesses. If you -/// are not sharing reference-counted values between threads, consider using +/// are not sharing reference-counted allocations between threads, consider using /// [`Rc`] for lower overhead. [`Rc`] is a safe default, because the /// compiler will catch any attempt to send an [`Rc`] between threads. /// However, a library might choose `Arc` in order to give library consumers @@ -85,8 +85,10 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// /// The [`downgrade`][downgrade] method can be used to create a non-owning /// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d -/// to an `Arc`, but this will return [`None`] if the value has already been -/// dropped. +/// to an `Arc`, but this will return [`None`] if the value stored in the allocation has +/// already been dropped. In other words, `Weak` pointers do not keep the value +/// inside the allocation alive; however, they *do* keep the allocation +/// (the backing store for the value) alive. /// /// A cycle between `Arc` pointers will never be deallocated. For this reason, /// [`Weak`][weak] is used to break cycles. For example, a tree could have @@ -121,8 +123,8 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// Arc::downgrade(&my_arc); /// ``` /// -/// [`Weak`][weak] does not auto-dereference to `T`, because the value may have -/// already been destroyed. +/// [`Weak`][weak] does not auto-dereference to `T`, because the inner value may have +/// already been dropped. /// /// [arc]: struct.Arc.html /// [weak]: struct.Weak.html @@ -221,17 +223,18 @@ impl Arc { } /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the -/// managed value. The value is accessed by calling [`upgrade`] on the `Weak` +/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak` /// pointer, which returns an [`Option`]`<`[`Arc`]`>`. /// /// Since a `Weak` reference does not count towards ownership, it will not -/// prevent the inner value from being dropped, and `Weak` itself makes no -/// guarantees about the value still being present and may return [`None`] -/// when [`upgrade`]d. +/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no +/// guarantees about the value still being present. Thus it may return [`None`] +/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation +/// itself (the backing store) from being deallocated. /// -/// A `Weak` pointer is useful for keeping a temporary reference to the value -/// within [`Arc`] without extending its lifetime. It is also used to prevent -/// circular references between [`Arc`] pointers, since mutual owning references +/// A `Weak` pointer is useful for keeping a temporary reference to the allocation +/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to +/// prevent circular references between [`Arc`] pointers, since mutual owning references /// would never allow either [`Arc`] to be dropped. For example, a tree could /// have strong [`Arc`] pointers from parent nodes to children, and `Weak` /// pointers from children back to their parents. @@ -345,7 +348,7 @@ impl Arc { unsafe { Pin::new_unchecked(Arc::new(data)) } } - /// Returns the contained value, if the `Arc` has exactly one strong reference. + /// Returns the inner value, if the `Arc` has exactly one strong reference. /// /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was /// passed in. @@ -426,7 +429,7 @@ impl Arc> { /// # Safety /// /// As with [`MaybeUninit::assume_init`], - /// it is up to the caller to guarantee that the value + /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. @@ -465,7 +468,7 @@ impl Arc<[mem::MaybeUninit]> { /// # Safety /// /// As with [`MaybeUninit::assume_init`], - /// it is up to the caller to guarantee that the value + /// it is up to the caller to guarantee that the inner value /// really is in an initialized state. /// Calling this when the content is not yet fully initialized /// causes immediate undefined behavior. @@ -584,7 +587,7 @@ impl Arc { unsafe { NonNull::new_unchecked(Arc::into_raw(this) as *mut _) } } - /// Creates a new [`Weak`][weak] pointer to this value. + /// Creates a new [`Weak`][weak] pointer to this allocation. /// /// [weak]: struct.Weak.html /// @@ -628,7 +631,7 @@ impl Arc { } } - /// Gets the number of [`Weak`][weak] pointers to this value. + /// Gets the number of [`Weak`][weak] pointers to this allocation. /// /// [weak]: struct.Weak.html /// @@ -659,7 +662,7 @@ impl Arc { if cnt == usize::MAX { 0 } else { cnt - 1 } } - /// Gets the number of strong (`Arc`) pointers to this value. + /// Gets the number of strong (`Arc`) pointers to this allocation. /// /// # Safety /// @@ -710,8 +713,8 @@ impl Arc { #[inline] #[stable(feature = "ptr_eq", since = "1.17.0")] - /// Returns `true` if the two `Arc`s point to the same value (not - /// just values that compare as equal). + /// Returns `true` if the two `Arc`s point to the same allocation + /// (in a vein similar to [`ptr::eq`]). /// /// # Examples /// @@ -725,6 +728,8 @@ impl Arc { /// assert!(Arc::ptr_eq(&five, &same_five)); /// assert!(!Arc::ptr_eq(&five, &other_five)); /// ``` + /// + /// [`ptr::eq`]: ../../std/ptr/fn.eq.html pub fn ptr_eq(this: &Self, other: &Self) -> bool { this.ptr.as_ptr() == other.ptr.as_ptr() } @@ -732,7 +737,7 @@ impl Arc { impl Arc { /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized value where the value has the layout provided. + /// a possibly-unsized inner value where the value has the layout provided. /// /// The function `mem_to_arcinner` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `ArcInner`. @@ -761,7 +766,7 @@ impl Arc { inner } - /// Allocates an `ArcInner` with sufficient space for an unsized value. + /// Allocates an `ArcInner` with sufficient space for an unsized inner value. unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { // Allocate for the `ArcInner` using the given value. Self::allocate_for_layout( @@ -903,7 +908,7 @@ impl ArcFromSlice for Arc<[T]> { impl Clone for Arc { /// Makes a clone of the `Arc` pointer. /// - /// This creates another pointer to the same inner value, increasing the + /// This creates another pointer to the same allocation, increasing the /// strong reference count. /// /// # Examples @@ -965,15 +970,19 @@ impl Receiver for Arc {} impl Arc { /// Makes a mutable reference into the given `Arc`. /// - /// If there are other `Arc` or [`Weak`][weak] pointers to the same value, - /// then `make_mut` will invoke [`clone`][clone] on the inner value to - /// ensure unique ownership. This is also referred to as clone-on-write. + /// If there are other `Arc` or [`Weak`][weak] pointers to the same allocation, + /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value + /// to ensure unique ownership. This is also referred to as clone-on-write. + /// + /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates + /// any remaining `Weak` pointers. /// /// See also [`get_mut`][get_mut], which will fail rather than cloning. /// /// [weak]: struct.Weak.html /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone /// [get_mut]: struct.Arc.html#method.get_mut + /// [`Rc::make_mut`]: ../rc/struct.Rc.html#method.make_mut /// /// # Examples /// @@ -988,7 +997,7 @@ impl Arc { /// *Arc::make_mut(&mut data) += 1; // Won't clone anything /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything /// - /// // Now `data` and `other_data` point to different values. + /// // Now `data` and `other_data` point to different allocations. /// assert_eq!(*data, 8); /// assert_eq!(*other_data, 12); /// ``` @@ -1048,14 +1057,14 @@ impl Arc { } impl Arc { - /// Returns a mutable reference to the inner value, if there are - /// no other `Arc` or [`Weak`][weak] pointers to the same value. + /// Returns a mutable reference into the given `Arc`, if there are + /// no other `Arc` or [`Weak`][weak] pointers to the same allocation. /// /// Returns [`None`][option] otherwise, because it is not safe to /// mutate a shared value. /// /// See also [`make_mut`][make_mut], which will [`clone`][clone] - /// the inner value when it's shared. + /// the inner value when there are other pointers. /// /// [weak]: struct.Weak.html /// [option]: ../../std/option/enum.Option.html @@ -1091,7 +1100,7 @@ impl Arc { } } - /// Returns a mutable reference to the inner value, + /// Returns a mutable reference into the given `Arc`, /// without any check. /// /// See also [`get_mut`], which is safe and does appropriate checks. @@ -1100,7 +1109,7 @@ impl Arc { /// /// # Safety /// - /// Any other `Arc` or [`Weak`] pointers to the same value must not be dereferenced + /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced /// for the duration of the returned borrow. /// This is trivially the case if no such pointers exist, /// for example immediately after `Arc::new`. @@ -1244,11 +1253,9 @@ impl Arc { /// } /// } /// - /// fn main() { - /// let my_string = "Hello World".to_string(); - /// print_if_string(Arc::new(my_string)); - /// print_if_string(Arc::new(0i8)); - /// } + /// let my_string = "Hello World".to_string(); + /// print_if_string(Arc::new(my_string)); + /// print_if_string(Arc::new(0i8)); /// ``` pub fn downcast(self) -> Result, Self> where @@ -1426,10 +1433,10 @@ impl Weak { } impl Weak { - /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending - /// the lifetime of the value if successful. + /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying + /// dropping of the inner value if successful. /// - /// Returns [`None`] if the value has since been dropped. + /// Returns [`None`] if the inner value has since been dropped. /// /// [`Arc`]: struct.Arc.html /// [`None`]: ../../std/option/enum.Option.html#variant.None @@ -1484,7 +1491,7 @@ impl Weak { } } - /// Gets the number of strong (`Arc`) pointers pointing to this value. + /// Gets the number of strong (`Arc`) pointers pointing to this allocation. /// /// If `self` was created using [`Weak::new`], this will return 0. /// @@ -1499,17 +1506,17 @@ impl Weak { } /// Gets an approximation of the number of `Weak` pointers pointing to this - /// value. + /// allocation. /// /// If `self` was created using [`Weak::new`], this will return 0. If not, /// the returned value is at least 1, since `self` still points to the - /// value. + /// allocation. /// /// # Accuracy /// /// Due to implementation details, the returned value can be off by 1 in /// either direction when other threads are manipulating any `Arc`s or - /// `Weak`s pointing to the same value. + /// `Weak`s pointing to the same allocation. /// /// [`Weak::new`]: #method.new #[unstable(feature = "weak_counts", issue = "57977")] @@ -1550,14 +1557,14 @@ impl Weak { } } - /// Returns `true` if the two `Weak`s point to the same value (not just - /// values that compare as equal), or if both don't point to any value + /// Returns `true` if the two `Weak`s point to the same allocation (similar to + /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). /// /// # Notes /// /// Since this compares pointers it means that `Weak::new()` will equal each - /// other, even though they don't point to any value. + /// other, even though they don't point to any allocation. /// /// # Examples /// @@ -1589,6 +1596,8 @@ impl Weak { /// let third = Arc::downgrade(&third_rc); /// assert!(!first.ptr_eq(&third)); /// ``` + /// + /// [`ptr::eq`]: ../../std/ptr/fn.eq.html #[inline] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { @@ -1598,7 +1607,7 @@ impl Weak { #[stable(feature = "arc_weak", since = "1.4.0")] impl Clone for Weak { - /// Makes a clone of the `Weak` pointer that points to the same value. + /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples /// @@ -1629,7 +1638,7 @@ impl Clone for Weak { } } - return Weak { ptr: self.ptr }; + Weak { ptr: self.ptr } } } @@ -1728,6 +1737,8 @@ impl ArcEqIdent for Arc { /// store large values, that are slow to clone, but also heavy to check for equality, causing this /// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to /// the same value, than two `&T`s. +/// +/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] impl ArcEqIdent for Arc { #[inline] @@ -1745,10 +1756,11 @@ impl ArcEqIdent for Arc { impl PartialEq for Arc { /// Equality for two `Arc`s. /// - /// Two `Arc`s are equal if their inner values are equal. + /// Two `Arc`s are equal if their inner values are equal, even if they are + /// stored in different allocation. /// - /// If `T` also implements `Eq`, two `Arc`s that point to the same value are - /// always equal. + /// If `T` also implements `Eq` (implying reflexivity of equality), + /// two `Arc`s that point to the same allocation are always equal. /// /// # Examples /// @@ -1768,8 +1780,8 @@ impl PartialEq for Arc { /// /// Two `Arc`s are unequal if their inner values are unequal. /// - /// If `T` also implements `Eq`, two `Arc`s that point to the same value are - /// never unequal. + /// If `T` also implements `Eq` (implying reflexivity of equality), + /// two `Arc`s that point to the same value are never unequal. /// /// # Examples /// diff --git a/src/liballoc/tests/binary_heap.rs b/src/liballoc/tests/binary_heap.rs index 0685fa943c..a44cf1eaf6 100644 --- a/src/liballoc/tests/binary_heap.rs +++ b/src/liballoc/tests/binary_heap.rs @@ -1,10 +1,6 @@ -use std::cmp; use std::collections::BinaryHeap; use std::collections::binary_heap::{Drain, PeekMut}; -use std::panic::{self, AssertUnwindSafe}; -use std::sync::atomic::{AtomicUsize, Ordering}; - -use rand::{thread_rng, seq::SliceRandom}; +use std::iter::TrustedLen; #[test] fn test_iterator() { @@ -19,7 +15,7 @@ fn test_iterator() { } #[test] -fn test_iterator_reverse() { +fn test_iter_rev_cloned_collect() { let data = vec![5, 9, 3]; let iterout = vec![3, 5, 9]; let pq = BinaryHeap::from(data); @@ -29,7 +25,7 @@ fn test_iterator_reverse() { } #[test] -fn test_move_iter() { +fn test_into_iter_collect() { let data = vec![5, 9, 3]; let iterout = vec![9, 5, 3]; let pq = BinaryHeap::from(data); @@ -39,7 +35,7 @@ fn test_move_iter() { } #[test] -fn test_move_iter_size_hint() { +fn test_into_iter_size_hint() { let data = vec![5, 9]; let pq = BinaryHeap::from(data); @@ -56,7 +52,7 @@ fn test_move_iter_size_hint() { } #[test] -fn test_move_iter_reverse() { +fn test_into_iter_rev_collect() { let data = vec![5, 9, 3]; let iterout = vec![3, 5, 9]; let pq = BinaryHeap::from(data); @@ -65,6 +61,65 @@ fn test_move_iter_reverse() { assert_eq!(v, iterout); } +#[test] +fn test_into_iter_sorted_collect() { + let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); + let it = heap.into_iter_sorted(); + let sorted = it.collect::>(); + assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]); +} + +#[test] +fn test_drain_sorted_collect() { + let mut heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); + let it = heap.drain_sorted(); + let sorted = it.collect::>(); + assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]); +} + +fn check_exact_size_iterator(len: usize, it: I) { + let mut it = it; + + for i in 0..it.len() { + let (lower, upper) = it.size_hint(); + assert_eq!(Some(lower), upper); + assert_eq!(lower, len - i); + assert_eq!(it.len(), len - i); + it.next(); + } + assert_eq!(it.len(), 0); + assert!(it.is_empty()); +} + +#[test] +fn test_exact_size_iterator() { + let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); + check_exact_size_iterator(heap.len(), heap.iter()); + check_exact_size_iterator(heap.len(), heap.clone().into_iter()); + check_exact_size_iterator(heap.len(), heap.clone().into_iter_sorted()); + check_exact_size_iterator(heap.len(), heap.clone().drain()); + check_exact_size_iterator(heap.len(), heap.clone().drain_sorted()); +} + +fn check_trusted_len(len: usize, it: I) { + let mut it = it; + for i in 0..len { + let (lower, upper) = it.size_hint(); + if upper.is_some() { + assert_eq!(Some(lower), upper); + assert_eq!(lower, len - i); + } + it.next(); + } +} + +#[test] +fn test_trusted_len() { + let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]); + check_trusted_len(heap.len(), heap.clone().into_iter_sorted()); + check_trusted_len(heap.len(), heap.clone().drain_sorted()); +} + #[test] fn test_peek_and_pop() { let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]; @@ -211,6 +266,15 @@ fn test_drain() { assert!(q.is_empty()); } +#[test] +fn test_drain_sorted() { + let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect(); + + assert_eq!(q.drain_sorted().take(5).collect::>(), vec![9, 8, 7, 6, 5]); + + assert!(q.is_empty()); +} + #[test] fn test_extend_ref() { let mut a = BinaryHeap::new(); @@ -281,9 +345,15 @@ fn assert_covariance() { // even if the order may not be correct. // // Destructors must be called exactly once per element. +// FIXME: re-enable emscripten once it can unwind again #[test] -#[cfg(not(miri))] // Miri does not support catching panics +#[cfg(not(any(miri, target_os = "emscripten")))] // Miri does not support catching panics fn panic_safe() { + use std::cmp; + use std::panic::{self, AssertUnwindSafe}; + use std::sync::atomic::{AtomicUsize, Ordering}; + use rand::{thread_rng, seq::SliceRandom}; + static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0); #[derive(Eq, PartialEq, Ord, Clone, Debug)] diff --git a/src/liballoc/tests/boxed.rs b/src/liballoc/tests/boxed.rs new file mode 100644 index 0000000000..bc3d53bf30 --- /dev/null +++ b/src/liballoc/tests/boxed.rs @@ -0,0 +1,18 @@ +use std::ptr::NonNull; +use std::mem::MaybeUninit; + +#[test] +fn unitialized_zero_size_box() { + assert_eq!( + &*Box::<()>::new_uninit() as *const _, + NonNull::>::dangling().as_ptr(), + ); + assert_eq!( + Box::<[()]>::new_uninit_slice(4).as_ptr(), + NonNull::>::dangling().as_ptr(), + ); + assert_eq!( + Box::<[String]>::new_uninit_slice(0).as_ptr(), + NonNull::>::dangling().as_ptr(), + ); +} diff --git a/src/liballoc/tests/btree/set.rs b/src/liballoc/tests/btree/set.rs index 35db18c39c..e4883abc8b 100644 --- a/src/liballoc/tests/btree/set.rs +++ b/src/liballoc/tests/btree/set.rs @@ -48,7 +48,9 @@ fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) f(&set_a, &set_b, &mut |&x| { - assert_eq!(x, expected[i]); + if i < expected.len() { + assert_eq!(x, expected[i]); + } i += 1; true }); @@ -74,20 +76,20 @@ fn test_intersection() { return; } - let large = (0..1000).collect::>(); + let large = (0..100).collect::>(); check_intersection(&[], &large, &[]); check_intersection(&large, &[], &[]); check_intersection(&[-1], &large, &[]); check_intersection(&large, &[-1], &[]); check_intersection(&[0], &large, &[0]); check_intersection(&large, &[0], &[0]); - check_intersection(&[999], &large, &[999]); - check_intersection(&large, &[999], &[999]); - check_intersection(&[1000], &large, &[]); - check_intersection(&large, &[1000], &[]); - check_intersection(&[11, 5000, 1, 3, 77, 8924, 103], + check_intersection(&[99], &large, &[99]); + check_intersection(&large, &[99], &[99]); + check_intersection(&[100], &large, &[]); + check_intersection(&large, &[100], &[]); + check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, - &[1, 3, 11, 77, 103]); + &[1, 3, 11, 77]); } #[test] @@ -95,10 +97,15 @@ fn test_intersection_size_hint() { let x: BTreeSet = [3, 4].iter().copied().collect(); let y: BTreeSet = [1, 2, 3].iter().copied().collect(); let mut iter = x.intersection(&y); - assert_eq!(iter.size_hint(), (0, Some(2))); + assert_eq!(iter.size_hint(), (1, Some(1))); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.size_hint(), (0, Some(0))); assert_eq!(iter.next(), None); + + iter = y.intersection(&y); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), Some(&1)); + assert_eq!(iter.size_hint(), (0, Some(2))); } #[test] @@ -111,6 +118,9 @@ fn test_difference() { check_difference(&[1, 12], &[], &[1, 12]); check_difference(&[], &[1, 2, 3, 9], &[]); check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]); + check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]); + check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]); + check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]); check_difference(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 14, 23, 34, 38, 39, 50], &[11, 22, 33, 40, 42]); @@ -119,18 +129,82 @@ fn test_difference() { return; } - let large = (0..1000).collect::>(); + let large = (0..100).collect::>(); check_difference(&[], &large, &[]); check_difference(&[-1], &large, &[-1]); check_difference(&[0], &large, &[]); - check_difference(&[999], &large, &[]); - check_difference(&[1000], &large, &[1000]); - check_difference(&[11, 5000, 1, 3, 77, 8924, 103], + check_difference(&[99], &large, &[]); + check_difference(&[100], &large, &[100]); + check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]); check_difference(&large, &[], &large); check_difference(&large, &[-1], &large); - check_difference(&large, &[1000], &large); + check_difference(&large, &[100], &large); +} + +#[test] +fn test_difference_size_hint() { + let s246: BTreeSet = [2, 4, 6].iter().copied().collect(); + let s23456: BTreeSet = (2..=6).collect(); + let mut iter = s246.difference(&s23456); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), None); + + let s12345: BTreeSet = (1..=5).collect(); + iter = s246.difference(&s12345); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), Some(&6)); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + + let s34567: BTreeSet = (3..=7).collect(); + iter = s246.difference(&s34567); + assert_eq!(iter.size_hint(), (0, Some(3))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (0, Some(2))); + assert_eq!(iter.next(), None); + + let s1: BTreeSet = (-9..=1).collect(); + iter = s246.difference(&s1); + assert_eq!(iter.size_hint(), (3, Some(3))); + + let s2: BTreeSet = (-9..=2).collect(); + iter = s246.difference(&s2); + assert_eq!(iter.size_hint(), (2, Some(2))); + assert_eq!(iter.next(), Some(&4)); + assert_eq!(iter.size_hint(), (1, Some(1))); + + let s23: BTreeSet = (2..=3).collect(); + iter = s246.difference(&s23); + assert_eq!(iter.size_hint(), (1, Some(3))); + assert_eq!(iter.next(), Some(&4)); + assert_eq!(iter.size_hint(), (1, Some(1))); + + let s4: BTreeSet = (4..=4).collect(); + iter = s246.difference(&s4); + assert_eq!(iter.size_hint(), (2, Some(3))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (1, Some(2))); + assert_eq!(iter.next(), Some(&6)); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + + let s56: BTreeSet = (5..=6).collect(); + iter = s246.difference(&s56); + assert_eq!(iter.size_hint(), (1, Some(3))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (0, Some(2))); + + let s6: BTreeSet = (6..=19).collect(); + iter = s246.difference(&s6); + assert_eq!(iter.size_hint(), (2, Some(2))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (1, Some(1))); + + let s7: BTreeSet = (7..=19).collect(); + iter = s246.difference(&s7); + assert_eq!(iter.size_hint(), (3, Some(3))); } #[test] @@ -147,6 +221,18 @@ fn test_symmetric_difference() { &[-2, 1, 5, 11, 14, 22]); } +#[test] +fn test_symmetric_difference_size_hint() { + let x: BTreeSet = [2, 4].iter().copied().collect(); + let y: BTreeSet = [1, 2, 3].iter().copied().collect(); + let mut iter = x.symmetric_difference(&y); + assert_eq!(iter.size_hint(), (0, Some(5))); + assert_eq!(iter.next(), Some(&1)); + assert_eq!(iter.size_hint(), (0, Some(4))); + assert_eq!(iter.next(), Some(&3)); + assert_eq!(iter.size_hint(), (0, Some(1))); +} + #[test] fn test_union() { fn check_union(a: &[i32], b: &[i32], expected: &[i32]) { @@ -161,6 +247,18 @@ fn test_union() { &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]); } +#[test] +fn test_union_size_hint() { + let x: BTreeSet = [2, 4].iter().copied().collect(); + let y: BTreeSet = [1, 2, 3].iter().copied().collect(); + let mut iter = x.union(&y); + assert_eq!(iter.size_hint(), (3, Some(5))); + assert_eq!(iter.next(), Some(&1)); + assert_eq!(iter.size_hint(), (2, Some(4))); + assert_eq!(iter.next(), Some(&2)); + assert_eq!(iter.size_hint(), (1, Some(2))); +} + #[test] // Only tests the simple function definition with respect to intersection fn test_is_disjoint() { @@ -170,7 +268,7 @@ fn test_is_disjoint() { } #[test] -// Also tests the trivial function definition of is_superset +// Also implicitly tests the trivial function definition of is_superset fn test_is_subset() { fn is_subset(a: &[i32], b: &[i32]) -> bool { let set_a = a.iter().collect::>(); @@ -188,23 +286,23 @@ fn test_is_subset() { assert_eq!(is_subset(&[1, 2], &[1, 2]), true); assert_eq!(is_subset(&[1, 2], &[2, 3]), false); assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], - &[-12, -5, 14, 23, 11, 34, 22, 38, 33, 42, 39, 40]), + &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]), true); assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], - &[-12, -5, 14, 23, 34, 38, 22, 11]), + &[-12, -5, 11, 14, 22, 23, 34, 38]), false); if cfg!(miri) { // Miri is too slow return; } - let large = (0..1000).collect::>(); + let large = (0..100).collect::>(); assert_eq!(is_subset(&[], &large), true); assert_eq!(is_subset(&large, &[]), false); assert_eq!(is_subset(&[-1], &large), false); assert_eq!(is_subset(&[0], &large), true); assert_eq!(is_subset(&[1, 2], &large), true); - assert_eq!(is_subset(&[999, 1000], &large), false); + assert_eq!(is_subset(&[99, 100], &large), false); } #[test] diff --git a/src/liballoc/tests/lib.rs b/src/liballoc/tests/lib.rs index 5723a30c0f..3273feb7b5 100644 --- a/src/liballoc/tests/lib.rs +++ b/src/liballoc/tests/lib.rs @@ -2,19 +2,21 @@ #![feature(box_syntax)] #![feature(drain_filter)] #![feature(exact_size_is_empty)] -#![feature(option_flattening)] +#![feature(new_uninit)] #![feature(pattern)] -#![feature(repeat_generic_slice)] #![feature(trusted_len)] #![feature(try_reserve)] #![feature(unboxed_closures)] #![feature(associated_type_bounds)] +#![feature(binary_heap_into_iter_sorted)] +#![feature(binary_heap_drain_sorted)] use std::hash::{Hash, Hasher}; use std::collections::hash_map::DefaultHasher; mod arc; mod binary_heap; +mod boxed; mod btree; mod cow_str; mod fmt; diff --git a/src/liballoc/tests/str.rs b/src/liballoc/tests/str.rs index 4332b2e90f..cb73c7c179 100644 --- a/src/liballoc/tests/str.rs +++ b/src/liballoc/tests/str.rs @@ -483,7 +483,7 @@ mod slice_index { } #[test] - #[cfg(not(target_arch = "asmjs"))] // hits an OOM + #[cfg(not(target_os = "emscripten"))] // hits an OOM #[cfg(not(miri))] // Miri is too slow fn simple_big() { fn a_million_letter_x() -> String { diff --git a/src/liballoc/tests/vec.rs b/src/liballoc/tests/vec.rs index 29a22aa031..8053721769 100644 --- a/src/liballoc/tests/vec.rs +++ b/src/liballoc/tests/vec.rs @@ -944,8 +944,10 @@ fn drain_filter_complex() { } } +// Miri does not support catching panics +// FIXME: re-enable emscripten once it can unwind again #[test] -#[cfg(not(miri))] // Miri does not support catching panics +#[cfg(not(any(miri, target_os = "emscripten")))] fn drain_filter_consumed_panic() { use std::rc::Rc; use std::sync::Mutex; @@ -995,8 +997,9 @@ fn drain_filter_consumed_panic() { } } +// FIXME: Re-enable emscripten once it can catch panics #[test] -#[cfg(not(miri))] // Miri does not support catching panics +#[cfg(not(any(miri, target_os = "emscripten")))] // Miri does not support catching panics fn drain_filter_unconsumed_panic() { use std::rc::Rc; use std::sync::Mutex; @@ -1281,3 +1284,51 @@ fn test_stable_push_pop() { v.pop().unwrap(); assert_eq!(*v0, 13); } + +// https://github.com/rust-lang/rust/pull/49496 introduced specialization based on: +// +// ``` +// unsafe impl IsZero for *mut T { +// fn is_zero(&self) -> bool { +// (*self).is_null() +// } +// } +// ``` +// +// … to call `RawVec::with_capacity_zeroed` for creating `Vec<*mut T>`, +// which is incorrect for fat pointers since `<*mut T>::is_null` only looks at the data component. +// That is, a fat pointer can be “null” without being made entirely of zero bits. +#[test] +fn vec_macro_repeating_null_raw_fat_pointer() { + let raw_dyn = &mut (|| ()) as &mut dyn Fn() as *mut dyn Fn(); + let vtable = dbg!(ptr_metadata(raw_dyn)); + let null_raw_dyn = ptr_from_raw_parts(std::ptr::null_mut(), vtable); + assert!(null_raw_dyn.is_null()); + + let vec = vec![null_raw_dyn; 1]; + dbg!(ptr_metadata(vec[0])); + assert!(vec[0] == null_raw_dyn); + + // Polyfill for https://github.com/rust-lang/rfcs/pull/2580 + + fn ptr_metadata(ptr: *mut dyn Fn()) -> *mut () { + unsafe { + std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable + } + } + + fn ptr_from_raw_parts(data: *mut (), vtable: *mut()) -> *mut dyn Fn() { + unsafe { + std::mem::transmute::(DynRepr { + data, + vtable + }) + } + } + + #[repr(C)] + struct DynRepr { + data: *mut (), + vtable: *mut (), + } +} diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 405969a550..5b53a6a289 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -154,8 +154,8 @@ use crate::raw_vec::RawVec; /// println!("{}", v[6]); // it will panic! /// ``` /// -/// In conclusion: always check if the index you want to get really exists -/// before doing it. +/// Use [`get`] and [`get_mut`] if you want to check whether the index is in +/// the `Vec`. /// /// # Slicing /// @@ -277,6 +277,8 @@ use crate::raw_vec::RawVec; /// The order has changed in the past and may change again. /// /// [`vec!`]: ../../std/macro.vec.html +/// [`get`]: ../../std/vec/struct.Vec.html#method.get +/// [`get_mut`]: ../../std/vec/struct.Vec.html#method.get_mut /// [`Index`]: ../../std/ops/trait.Index.html /// [`String`]: ../../std/string/struct.String.html /// [`&str`]: ../../std/primitive.str.html @@ -291,7 +293,7 @@ use crate::raw_vec::RawVec; /// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve /// [owned slice]: ../../std/boxed/struct.Box.html #[stable(feature = "rust1", since = "1.0.0")] -#[cfg_attr(all(not(bootstrap), not(test)), rustc_diagnostic_item = "vec_type")] +#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")] pub struct Vec { buf: RawVec, len: usize, @@ -314,7 +316,6 @@ impl Vec { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_vec_new"))] pub const fn new() -> Vec { Vec { buf: RawVec::NEW, @@ -359,6 +360,44 @@ impl Vec { } } + /// Decomposes a `Vec` into its raw components. + /// + /// Returns the raw pointer to the underlying data, the length of + /// the vector (in elements), and the allocated capacity of the + /// data (in elements). These are the same arguments in the same + /// order as the arguments to [`from_raw_parts`]. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `Vec`. The only way to do + /// this is to convert the raw pointer, length, and capacity back + /// into a `Vec` with the [`from_raw_parts`] function, allowing + /// the destructor to perform the cleanup. + /// + /// [`from_raw_parts`]: #method.from_raw_parts + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_into_raw_parts)] + /// let v: Vec = vec![-1, 0, 1]; + /// + /// let (ptr, len, cap) = v.into_raw_parts(); + /// + /// let rebuilt = unsafe { + /// // We can now make changes to the components, such as + /// // transmuting the raw pointer to a compatible type. + /// let ptr = ptr as *mut u32; + /// + /// Vec::from_raw_parts(ptr, len, cap) + /// }; + /// assert_eq!(rebuilt, [4294967295, 0, 1]); + /// ``` + #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] + pub fn into_raw_parts(self) -> (*mut T, usize, usize) { + let mut me = mem::ManuallyDrop::new(self); + (me.as_mut_ptr(), me.len(), me.capacity()) + } + /// Creates a `Vec` directly from the raw components of another vector. /// /// # Safety @@ -374,7 +413,11 @@ impl Vec { /// /// Violating these may cause problems like corrupting the allocator's /// internal data structures. For example it is **not** safe - /// to build a `Vec` from a pointer to a C `char` array and a `size_t`. + /// to build a `Vec` from a pointer to a C `char` array with length `size_t`. + /// It's also not safe to build one from a `Vec` and its length, because + /// the allocator cares about the alignment, and these two types have different + /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after + /// turning it into a `Vec` it'll be deallocated with alignment 1. /// /// The ownership of `ptr` is effectively transferred to the /// `Vec` which may then deallocate, reallocate or change the @@ -390,28 +433,27 @@ impl Vec { /// use std::ptr; /// use std::mem; /// - /// fn main() { - /// let mut v = vec![1, 2, 3]; + /// let v = vec![1, 2, 3]; /// - /// // Pull out the various important pieces of information about `v` - /// let p = v.as_mut_ptr(); - /// let len = v.len(); - /// let cap = v.capacity(); + // FIXME Update this when vec_into_raw_parts is stabilized + /// // Prevent running `v`'s destructor so we are in complete control + /// // of the allocation. + /// let mut v = mem::ManuallyDrop::new(v); /// - /// unsafe { - /// // Cast `v` into the void: no destructor run, so we are in - /// // complete control of the allocation to which `p` points. - /// mem::forget(v); + /// // Pull out the various important pieces of information about `v` + /// let p = v.as_mut_ptr(); + /// let len = v.len(); + /// let cap = v.capacity(); /// - /// // Overwrite memory with 4, 5, 6 - /// for i in 0..len as isize { - /// ptr::write(p.offset(i), 4 + i); - /// } - /// - /// // Put everything back together into a Vec - /// let rebuilt = Vec::from_raw_parts(p, len, cap); - /// assert_eq!(rebuilt, [4, 5, 6]); + /// unsafe { + /// // Overwrite memory with 4, 5, 6 + /// for i in 0..len as isize { + /// ptr::write(p.offset(i), 4 + i); /// } + /// + /// // Put everything back together into a Vec + /// let rebuilt = Vec::from_raw_parts(p, len, cap); + /// assert_eq!(rebuilt, [4, 5, 6]); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] @@ -1392,12 +1434,10 @@ impl Vec { /// ``` /// #![feature(vec_leak)] /// - /// fn main() { - /// let x = vec![1, 2, 3]; - /// let static_ref: &'static mut [usize] = Vec::leak(x); - /// static_ref[0] += 1; - /// assert_eq!(static_ref, &[2, 2, 3]); - /// } + /// let x = vec![1, 2, 3]; + /// let static_ref: &'static mut [usize] = Vec::leak(x); + /// static_ref[0] += 1; + /// assert_eq!(static_ref, &[2, 2, 3]); /// ``` #[unstable(feature = "vec_leak", issue = "62195")] #[inline] @@ -1735,20 +1775,45 @@ impl_is_zero!(char, |x| x == '\0'); impl_is_zero!(f32, |x: f32| x.to_bits() == 0); impl_is_zero!(f64, |x: f64| x.to_bits() == 0); -unsafe impl IsZero for *const T { +unsafe impl IsZero for *const T { #[inline] fn is_zero(&self) -> bool { (*self).is_null() } } -unsafe impl IsZero for *mut T { +unsafe impl IsZero for *mut T { #[inline] fn is_zero(&self) -> bool { (*self).is_null() } } +// `Option<&T>`, `Option<&mut T>` and `Option>` are guaranteed to represent `None` as null. +// For fat pointers, the bytes that would be the pointer metadata in the `Some` variant +// are padding in the `None` variant, so ignoring them and zero-initializing instead is ok. + +unsafe impl IsZero for Option<&T> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +unsafe impl IsZero for Option<&mut T> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +unsafe impl IsZero for Option> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + //////////////////////////////////////////////////////////////////////////////// // Common trait implementations for Vec diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 690d8344ac..66d27a2751 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -500,7 +500,7 @@ impl DroplessArena { // though it was supposed to give us `len` return slice::from_raw_parts_mut(mem, i); } - ptr::write(mem.offset(i as isize), value.unwrap()); + ptr::write(mem.add(i), value.unwrap()); i += 1; } } diff --git a/src/libcore/any.rs b/src/libcore/any.rs index 0afbf4f134..e2704e807d 100644 --- a/src/libcore/any.rs +++ b/src/libcore/any.rs @@ -2,14 +2,14 @@ //! of any `'static` type through runtime reflection. //! //! `Any` itself can be used to get a `TypeId`, and has more features when used -//! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and -//! `downcast_ref` methods, to test if the contained value is of a given type, -//! and to get a reference to the inner value as a type. As `&mut Any`, there +//! as a trait object. As `&dyn Any` (a borrowed trait object), it has the `is` +//! and `downcast_ref` methods, to test if the contained value is of a given type, +//! and to get a reference to the inner value as a type. As `&mut dyn Any`, there //! is also the `downcast_mut` method, for getting a mutable reference to the -//! inner value. `Box` adds the `downcast` method, which attempts to +//! inner value. `Box` adds the `downcast` method, which attempts to //! convert to a `Box`. See the [`Box`] documentation for the full details. //! -//! Note that &Any is limited to testing whether a value is of a specified +//! Note that `&dyn Any` is limited to testing whether a value is of a specified //! concrete type, and cannot be used to test whether a type implements a trait. //! //! [`Box`]: ../../std/boxed/struct.Box.html @@ -87,10 +87,8 @@ pub trait Any: 'static { /// TypeId::of::() == s.type_id() /// } /// - /// fn main() { - /// assert_eq!(is_string(&0), false); - /// assert_eq!(is_string(&"cookie monster".to_string()), true); - /// } + /// assert_eq!(is_string(&0), false); + /// assert_eq!(is_string(&"cookie monster".to_string()), true); /// ``` #[stable(feature = "get_type_id", since = "1.34.0")] fn type_id(&self) -> TypeId; @@ -145,10 +143,8 @@ impl dyn Any { /// } /// } /// - /// fn main() { - /// is_string(&0); - /// is_string(&"cookie monster".to_string()); - /// } + /// is_string(&0); + /// is_string(&"cookie monster".to_string()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -179,10 +175,8 @@ impl dyn Any { /// } /// } /// - /// fn main() { - /// print_if_string(&0); - /// print_if_string(&"cookie monster".to_string()); - /// } + /// print_if_string(&0); + /// print_if_string(&"cookie monster".to_string()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -210,16 +204,14 @@ impl dyn Any { /// } /// } /// - /// fn main() { - /// let mut x = 10u32; - /// let mut s = "starlord".to_string(); + /// let mut x = 10u32; + /// let mut s = "starlord".to_string(); /// - /// modify_if_u32(&mut x); - /// modify_if_u32(&mut s); + /// modify_if_u32(&mut x); + /// modify_if_u32(&mut s); /// - /// assert_eq!(x, 42); - /// assert_eq!(&s, "starlord"); - /// } + /// assert_eq!(x, 42); + /// assert_eq!(&s, "starlord"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -250,10 +242,8 @@ impl dyn Any+Send { /// } /// } /// - /// fn main() { - /// is_string(&0); - /// is_string(&"cookie monster".to_string()); - /// } + /// is_string(&0); + /// is_string(&"cookie monster".to_string()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -276,10 +266,8 @@ impl dyn Any+Send { /// } /// } /// - /// fn main() { - /// print_if_string(&0); - /// print_if_string(&"cookie monster".to_string()); - /// } + /// print_if_string(&0); + /// print_if_string(&"cookie monster".to_string()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -300,16 +288,14 @@ impl dyn Any+Send { /// } /// } /// - /// fn main() { - /// let mut x = 10u32; - /// let mut s = "starlord".to_string(); + /// let mut x = 10u32; + /// let mut s = "starlord".to_string(); /// - /// modify_if_u32(&mut x); - /// modify_if_u32(&mut s); + /// modify_if_u32(&mut x); + /// modify_if_u32(&mut s); /// - /// assert_eq!(x, 42); - /// assert_eq!(&s, "starlord"); - /// } + /// assert_eq!(x, 42); + /// assert_eq!(&s, "starlord"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -334,10 +320,8 @@ impl dyn Any+Send+Sync { /// } /// } /// - /// fn main() { - /// is_string(&0); - /// is_string(&"cookie monster".to_string()); - /// } + /// is_string(&0); + /// is_string(&"cookie monster".to_string()); /// ``` #[stable(feature = "any_send_sync_methods", since = "1.28.0")] #[inline] @@ -360,10 +344,8 @@ impl dyn Any+Send+Sync { /// } /// } /// - /// fn main() { - /// print_if_string(&0); - /// print_if_string(&"cookie monster".to_string()); - /// } + /// print_if_string(&0); + /// print_if_string(&"cookie monster".to_string()); /// ``` #[stable(feature = "any_send_sync_methods", since = "1.28.0")] #[inline] @@ -384,16 +366,14 @@ impl dyn Any+Send+Sync { /// } /// } /// - /// fn main() { - /// let mut x = 10u32; - /// let mut s = "starlord".to_string(); + /// let mut x = 10u32; + /// let mut s = "starlord".to_string(); /// - /// modify_if_u32(&mut x); - /// modify_if_u32(&mut s); + /// modify_if_u32(&mut x); + /// modify_if_u32(&mut s); /// - /// assert_eq!(x, 42); - /// assert_eq!(&s, "starlord"); - /// } + /// assert_eq!(x, 42); + /// assert_eq!(&s, "starlord"); /// ``` #[stable(feature = "any_send_sync_methods", since = "1.28.0")] #[inline] @@ -437,10 +417,8 @@ impl TypeId { /// TypeId::of::() == TypeId::of::() /// } /// - /// fn main() { - /// assert_eq!(is_string(&0), false); - /// assert_eq!(is_string(&"cookie monster".to_string()), true); - /// } + /// assert_eq!(is_string(&0), false); + /// assert_eq!(is_string(&"cookie monster".to_string()), true); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature="const_type_id")] @@ -467,6 +445,15 @@ impl TypeId { /// /// The current implementation uses the same infrastructure as compiler /// diagnostics and debuginfo, but this is not guaranteed. +/// +/// # Example +/// +/// ```rust +/// assert_eq!( +/// std::any::type_name::>(), +/// "core::option::Option", +/// ); +/// ``` #[stable(feature = "type_name", since = "1.38.0")] #[rustc_const_unstable(feature = "const_type_name")] pub const fn type_name() -> &'static str { diff --git a/src/libcore/array/iter.rs b/src/libcore/array/iter.rs new file mode 100644 index 0000000000..1180323840 --- /dev/null +++ b/src/libcore/array/iter.rs @@ -0,0 +1,266 @@ +//! Defines the `IntoIter` owned iterator for arrays. + +use crate::{ + fmt, + iter::{ExactSizeIterator, FusedIterator, TrustedLen}, + mem::{self, MaybeUninit}, + ops::Range, + ptr, +}; +use super::LengthAtMost32; + + +/// A by-value [array] iterator. +/// +/// [array]: ../../std/primitive.array.html +#[unstable(feature = "array_value_iter", issue = "65798")] +pub struct IntoIter +where + [T; N]: LengthAtMost32, +{ + /// This is the array we are iterating over. + /// + /// Elements with index `i` where `alive.start <= i < alive.end` have not + /// been yielded yet and are valid array entries. Elements with indices `i + /// < alive.start` or `i >= alive.end` have been yielded already and must + /// not be accessed anymore! Those dead elements might even be in a + /// completely uninitialized state! + /// + /// So the invariants are: + /// - `data[alive]` is alive (i.e. contains valid elements) + /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the + /// elements were already read and must not be touched anymore!) + data: [MaybeUninit; N], + + /// The elements in `data` that have not been yielded yet. + /// + /// Invariants: + /// - `alive.start <= alive.end` + /// - `alive.end <= N` + alive: Range, +} + +impl IntoIter +where + [T; N]: LengthAtMost32, +{ + /// Creates a new iterator over the given `array`. + /// + /// *Note*: this method might never get stabilized and/or removed in the + /// future as there will likely be another, preferred way of obtaining this + /// iterator (either via `IntoIterator` for arrays or via another way). + #[unstable(feature = "array_value_iter", issue = "65798")] + pub fn new(array: [T; N]) -> Self { + // The transmute here is actually safe. The docs of `MaybeUninit` + // promise: + // + // > `MaybeUninit` is guaranteed to have the same size and alignment + // > as `T`. + // + // The docs even show a transmute from an array of `MaybeUninit` to + // an array of `T`. + // + // With that, this initialization satisfies the invariants. + + // FIXME(LukasKalbertodt): actually use `mem::transmute` here, once it + // works with const generics: + // `mem::transmute::<[T; {N}], [MaybeUninit; {N}]>(array)` + // + // Until then, we do it manually here. We first create a bitwise copy + // but cast the pointer so that it is treated as a different type. Then + // we forget `array` so that it is not dropped. + let data = unsafe { + let data = ptr::read(&array as *const [T; N] as *const [MaybeUninit; N]); + mem::forget(array); + data + }; + + Self { + data, + alive: 0..N, + } + } + + /// Returns an immutable slice of all elements that have not been yielded + /// yet. + fn as_slice(&self) -> &[T] { + // This transmute is safe. As mentioned in `new`, `MaybeUninit` retains + // the size and alignment of `T`. Furthermore, we know that all + // elements within `alive` are properly initialized. + let slice = &self.data[self.alive.clone()]; + unsafe { + mem::transmute::<&[MaybeUninit], &[T]>(slice) + } + } +} + + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl Iterator for IntoIter +where + [T; N]: LengthAtMost32, +{ + type Item = T; + fn next(&mut self) -> Option { + if self.alive.start == self.alive.end { + return None; + } + + // Bump start index. + // + // From the check above we know that `alive.start != alive.end`. + // Combine this with the invariant `alive.start <= alive.end`, we know + // that `alive.start < alive.end`. Increasing `alive.start` by 1 + // maintains the invariant regarding `alive`. However, due to this + // change, for a short time, the alive zone is not `data[alive]` + // anymore, but `data[idx..alive.end]`. + let idx = self.alive.start; + self.alive.start += 1; + + // Read the element from the array. This is safe: `idx` is an index + // into the "alive" region of the array. Reading this element means + // that `data[idx]` is regarded as dead now (i.e. do not touch). As + // `idx` was the start of the alive-zone, the alive zone is now + // `data[alive]` again, restoring all invariants. + let out = unsafe { self.data.get_unchecked(idx).read() }; + + Some(out) + } + + fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + fn count(self) -> usize { + self.len() + } + + fn last(mut self) -> Option { + self.next_back() + } +} + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl DoubleEndedIterator for IntoIter +where + [T; N]: LengthAtMost32, +{ + fn next_back(&mut self) -> Option { + if self.alive.start == self.alive.end { + return None; + } + + // Decrease end index. + // + // From the check above we know that `alive.start != alive.end`. + // Combine this with the invariant `alive.start <= alive.end`, we know + // that `alive.start < alive.end`. As `alive.start` cannot be negative, + // `alive.end` is at least 1, meaning that we can safely decrement it + // by one. This also maintains the invariant `alive.start <= + // alive.end`. However, due to this change, for a short time, the alive + // zone is not `data[alive]` anymore, but `data[alive.start..alive.end + // + 1]`. + self.alive.end -= 1; + + // Read the element from the array. This is safe: `alive.end` is an + // index into the "alive" region of the array. Compare the previous + // comment that states that the alive region is + // `data[alive.start..alive.end + 1]`. Reading this element means that + // `data[alive.end]` is regarded as dead now (i.e. do not touch). As + // `alive.end` was the end of the alive-zone, the alive zone is now + // `data[alive]` again, restoring all invariants. + let out = unsafe { self.data.get_unchecked(self.alive.end).read() }; + + Some(out) + } +} + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl Drop for IntoIter +where + [T; N]: LengthAtMost32, +{ + fn drop(&mut self) { + // We simply drop each element via `for_each`. This should not incur + // any significant runtime overhead and avoids adding another `unsafe` + // block. + self.by_ref().for_each(drop); + } +} + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl ExactSizeIterator for IntoIter +where + [T; N]: LengthAtMost32, +{ + fn len(&self) -> usize { + // Will never underflow due to the invariant `alive.start <= + // alive.end`. + self.alive.end - self.alive.start + } + fn is_empty(&self) -> bool { + self.alive.is_empty() + } +} + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl FusedIterator for IntoIter +where + [T; N]: LengthAtMost32, +{} + +// The iterator indeed reports the correct length. The number of "alive" +// elements (that will still be yielded) is the length of the range `alive`. +// This range is decremented in length in either `next` or `next_back`. It is +// always decremented by 1 in those methods, but only if `Some(_)` is returned. +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +unsafe impl TrustedLen for IntoIter +where + [T; N]: LengthAtMost32, +{} + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl Clone for IntoIter +where + [T; N]: LengthAtMost32, +{ + fn clone(&self) -> Self { + unsafe { + // This creates a new uninitialized array. Note that the `assume_init` + // refers to the array, not the individual elements. And it is Ok if + // the array is in an uninitialized state as all elements may be + // uninitialized (all bit patterns are valid). Compare the + // `MaybeUninit` docs for more information. + let mut new_data: [MaybeUninit; N] = MaybeUninit::uninit().assume_init(); + + // Clone all alive elements. + for idx in self.alive.clone() { + // The element at `idx` in the old array is alive, so we can + // safely call `get_ref()`. We then clone it, and write the + // clone into the new array. + let clone = self.data.get_unchecked(idx).get_ref().clone(); + new_data.get_unchecked_mut(idx).write(clone); + } + + Self { + data: new_data, + alive: self.alive.clone(), + } + } + } +} + +#[stable(feature = "array_value_iter_impls", since = "1.40.0")] +impl fmt::Debug for IntoIter +where + [T; N]: LengthAtMost32, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Only print the elements that were not yielded yet: we cannot + // access the yielded elements anymore. + f.debug_tuple("IntoIter") + .field(&self.as_slice()) + .finish() + } +} diff --git a/src/libcore/array.rs b/src/libcore/array/mod.rs similarity index 98% rename from src/libcore/array.rs rename to src/libcore/array/mod.rs index b5614010e5..e1ec8b795d 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array/mod.rs @@ -14,6 +14,13 @@ use crate::hash::{Hash, self}; use crate::marker::Unsize; use crate::slice::{Iter, IterMut}; +#[cfg(not(bootstrap))] +mod iter; + +#[cfg(not(bootstrap))] +#[unstable(feature = "array_value_iter", issue = "65798")] +pub use iter::IntoIter; + /// Utility trait implemented only on arrays of fixed size /// /// This trait can be used to implement other traits on fixed-size arrays diff --git a/src/libcore/bool.rs b/src/libcore/bool.rs index f751ccb428..617bdd238f 100644 --- a/src/libcore/bool.rs +++ b/src/libcore/bool.rs @@ -1,6 +1,5 @@ //! impl bool {} -#[cfg(not(bootstrap))] #[lang = "bool"] impl bool { /// Returns `Some(t)` if the `bool` is `true`, or `None` otherwise. diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 8579dbf353..fda103a52d 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -229,52 +229,6 @@ pub struct Cell { value: UnsafeCell, } -impl Cell { - /// Returns a copy of the contained value. - /// - /// # Examples - /// - /// ``` - /// use std::cell::Cell; - /// - /// let c = Cell::new(5); - /// - /// let five = c.get(); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn get(&self) -> T { - unsafe{ *self.value.get() } - } - - /// Updates the contained value using a function and returns the new value. - /// - /// # Examples - /// - /// ``` - /// #![feature(cell_update)] - /// - /// use std::cell::Cell; - /// - /// let c = Cell::new(5); - /// let new = c.update(|x| x + 1); - /// - /// assert_eq!(new, 6); - /// assert_eq!(c.get(), 6); - /// ``` - #[inline] - #[unstable(feature = "cell_update", issue = "50186")] - pub fn update(&self, f: F) -> T - where - F: FnOnce(T) -> T, - { - let old = self.get(); - let new = f(old); - self.set(new); - new - } -} - #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for Cell where T: Send {} @@ -448,6 +402,52 @@ impl Cell { } } +impl Cell { + /// Returns a copy of the contained value. + /// + /// # Examples + /// + /// ``` + /// use std::cell::Cell; + /// + /// let c = Cell::new(5); + /// + /// let five = c.get(); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn get(&self) -> T { + unsafe{ *self.value.get() } + } + + /// Updates the contained value using a function and returns the new value. + /// + /// # Examples + /// + /// ``` + /// #![feature(cell_update)] + /// + /// use std::cell::Cell; + /// + /// let c = Cell::new(5); + /// let new = c.update(|x| x + 1); + /// + /// assert_eq!(new, 6); + /// assert_eq!(c.get(), 6); + /// ``` + #[inline] + #[unstable(feature = "cell_update", issue = "50186")] + pub fn update(&self, f: F) -> T + where + F: FnOnce(T) -> T, + { + let old = self.get(); + let new = f(old); + self.set(new); + new + } +} + impl Cell { /// Returns a raw pointer to the underlying data in this cell. /// diff --git a/src/libcore/char/convert.rs b/src/libcore/char/convert.rs index 0a870c6751..c456e14db1 100644 --- a/src/libcore/char/convert.rs +++ b/src/libcore/char/convert.rs @@ -111,11 +111,9 @@ impl From for u32 { /// ``` /// use std::mem; /// - /// fn main() { - /// let c = 'c'; - /// let u = u32::from(c); - /// assert!(4 == mem::size_of_val(&u)) - /// } + /// let c = 'c'; + /// let u = u32::from(c); + /// assert!(4 == mem::size_of_val(&u)) /// ``` #[inline] fn from(c: char) -> Self { @@ -150,11 +148,9 @@ impl From for char { /// ``` /// use std::mem; /// - /// fn main() { - /// let u = 32 as u8; - /// let c = char::from(u); - /// assert!(4 == mem::size_of_val(&c)) - /// } + /// let u = 32 as u8; + /// let c = char::from(u); + /// assert!(4 == mem::size_of_val(&c)) /// ``` #[inline] fn from(i: u8) -> Self { diff --git a/src/libcore/char/decode.rs b/src/libcore/char/decode.rs index 23059243c6..b71c9c2c40 100644 --- a/src/libcore/char/decode.rs +++ b/src/libcore/char/decode.rs @@ -31,21 +31,23 @@ pub struct DecodeUtf16Error { /// ``` /// use std::char::decode_utf16; /// -/// fn main() { -/// // 𝄞music -/// let v = [0xD834, 0xDD1E, 0x006d, 0x0075, -/// 0x0073, 0xDD1E, 0x0069, 0x0063, -/// 0xD834]; +/// // 𝄞music +/// let v = [ +/// 0xD834, 0xDD1E, 0x006d, 0x0075, 0x0073, 0xDD1E, 0x0069, 0x0063, 0xD834, +/// ]; /// -/// assert_eq!(decode_utf16(v.iter().cloned()) -/// .map(|r| r.map_err(|e| e.unpaired_surrogate())) -/// .collect::>(), -/// vec![Ok('𝄞'), -/// Ok('m'), Ok('u'), Ok('s'), -/// Err(0xDD1E), -/// Ok('i'), Ok('c'), -/// Err(0xD834)]); -/// } +/// assert_eq!( +/// decode_utf16(v.iter().cloned()) +/// .map(|r| r.map_err(|e| e.unpaired_surrogate())) +/// .collect::>(), +/// vec![ +/// Ok('𝄞'), +/// Ok('m'), Ok('u'), Ok('s'), +/// Err(0xDD1E), +/// Ok('i'), Ok('c'), +/// Err(0xD834) +/// ] +/// ); /// ``` /// /// A lossy decoder can be obtained by replacing `Err` results with the replacement character: @@ -53,17 +55,17 @@ pub struct DecodeUtf16Error { /// ``` /// use std::char::{decode_utf16, REPLACEMENT_CHARACTER}; /// -/// fn main() { -/// // 𝄞music -/// let v = [0xD834, 0xDD1E, 0x006d, 0x0075, -/// 0x0073, 0xDD1E, 0x0069, 0x0063, -/// 0xD834]; +/// // 𝄞music +/// let v = [ +/// 0xD834, 0xDD1E, 0x006d, 0x0075, 0x0073, 0xDD1E, 0x0069, 0x0063, 0xD834, +/// ]; /// -/// assert_eq!(decode_utf16(v.iter().cloned()) -/// .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) -/// .collect::(), -/// "𝄞mus�ic�"); -/// } +/// assert_eq!( +/// decode_utf16(v.iter().cloned()) +/// .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) +/// .collect::(), +/// "𝄞mus�ic�" +/// ); /// ``` #[stable(feature = "decode_utf16", since = "1.9.0")] #[inline] diff --git a/src/libcore/char/methods.rs b/src/libcore/char/methods.rs index a69eb0f6d4..971d89e004 100644 --- a/src/libcore/char/methods.rs +++ b/src/libcore/char/methods.rs @@ -116,9 +116,9 @@ impl char { // the code is split up here to improve execution speed for cases where // the `radix` is constant and 10 or smaller - let val = if radix <= 10 { + let val = if radix <= 10 { match self { - '0' ..= '9' => self as u32 - '0' as u32, + '0'..='9' => self as u32 - '0' as u32, _ => return None, } } else { @@ -130,8 +130,11 @@ impl char { } }; - if val < radix { Some(val) } - else { None } + if val < radix { + Some(val) + } else { + None + } } /// Returns an iterator that yields the hexadecimal Unicode escape of a @@ -303,8 +306,8 @@ impl char { '\r' => EscapeDefaultState::Backslash('r'), '\n' => EscapeDefaultState::Backslash('n'), '\\' | '\'' | '"' => EscapeDefaultState::Backslash(self), - '\x20' ..= '\x7e' => EscapeDefaultState::Char(self), - _ => EscapeDefaultState::Unicode(self.escape_unicode()) + '\x20'..='\x7e' => EscapeDefaultState::Char(self), + _ => EscapeDefaultState::Unicode(self.escape_unicode()), }; EscapeDefault { state: init_state } } @@ -436,30 +439,31 @@ impl char { pub fn encode_utf8(self, dst: &mut [u8]) -> &mut str { let code = self as u32; unsafe { - let len = - if code < MAX_ONE_B && !dst.is_empty() { + let len = if code < MAX_ONE_B && !dst.is_empty() { *dst.get_unchecked_mut(0) = code as u8; 1 } else if code < MAX_TWO_B && dst.len() >= 2 { *dst.get_unchecked_mut(0) = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; *dst.get_unchecked_mut(1) = (code & 0x3F) as u8 | TAG_CONT; 2 - } else if code < MAX_THREE_B && dst.len() >= 3 { + } else if code < MAX_THREE_B && dst.len() >= 3 { *dst.get_unchecked_mut(0) = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; - *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT; *dst.get_unchecked_mut(2) = (code & 0x3F) as u8 | TAG_CONT; 3 } else if dst.len() >= 4 { *dst.get_unchecked_mut(0) = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; *dst.get_unchecked_mut(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT; - *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT; *dst.get_unchecked_mut(3) = (code & 0x3F) as u8 | TAG_CONT; 4 } else { - panic!("encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}", + panic!( + "encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}", from_u32_unchecked(code).len_utf8(), code, - dst.len()) + dst.len(), + ) }; from_utf8_unchecked_mut(dst.get_unchecked_mut(..len)) } @@ -515,15 +519,24 @@ impl char { *dst.get_unchecked_mut(1) = 0xDC00 | ((code as u16) & 0x3FF); slice::from_raw_parts_mut(dst.as_mut_ptr(), 2) } else { - panic!("encode_utf16: need {} units to encode U+{:X}, but the buffer has {}", + panic!( + "encode_utf16: need {} units to encode U+{:X}, but the buffer has {}", from_u32_unchecked(code).len_utf16(), code, - dst.len()) + dst.len(), + ) } } } - /// Returns `true` if this `char` is an alphabetic code point, and false if not. + /// Returns `true` if this `char` has the `Alphabetic` property. + /// + /// `Alphabetic` is described in Chapter 4 (Character Properties) of the [Unicode Standard] and + /// specified in the [Unicode Character Database][ucd] [`DerivedCoreProperties.txt`]. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt /// /// # Examples /// @@ -547,10 +560,14 @@ impl char { } } - /// Returns `true` if this `char` is lowercase. + /// Returns `true` if this `char` has the `Lowercase` property. /// - /// 'Lowercase' is defined according to the terms of the Unicode Derived Core - /// Property `Lowercase`. + /// `Lowercase` is described in Chapter 4 (Character Properties) of the [Unicode Standard] and + /// specified in the [Unicode Character Database][ucd] [`DerivedCoreProperties.txt`]. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt /// /// # Examples /// @@ -575,10 +592,14 @@ impl char { } } - /// Returns `true` if this `char` is uppercase. + /// Returns `true` if this `char` has the `Uppercase` property. /// - /// 'Uppercase' is defined according to the terms of the Unicode Derived Core - /// Property `Uppercase`. + /// `Uppercase` is described in Chapter 4 (Character Properties) of the [Unicode Standard] and + /// specified in the [Unicode Character Database][ucd] [`DerivedCoreProperties.txt`]. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt /// /// # Examples /// @@ -603,10 +624,12 @@ impl char { } } - /// Returns `true` if this `char` is whitespace. + /// Returns `true` if this `char` has the `White_Space` property. /// - /// 'Whitespace' is defined according to the terms of the Unicode Derived Core - /// Property `White_Space`. + /// `White_Space` is specified in the [Unicode Character Database][ucd] [`PropList.txt`]. + /// + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`PropList.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/PropList.txt /// /// # Examples /// @@ -630,10 +653,10 @@ impl char { } } - /// Returns `true` if this `char` is alphanumeric. + /// Returns `true` if this `char` satisfies either [`is_alphabetic()`] or [`is_numeric()`]. /// - /// 'Alphanumeric'-ness is defined in terms of the Unicode General Categories - /// `Nd`, `Nl`, `No` and the Derived Core Property `Alphabetic`. + /// [`is_alphabetic()`]: #method.is_alphabetic + /// [`is_numeric()`]: #method.is_numeric /// /// # Examples /// @@ -655,10 +678,15 @@ impl char { self.is_alphabetic() || self.is_numeric() } - /// Returns `true` if this `char` is a control code point. + /// Returns `true` if this `char` has the general category for control codes. /// - /// 'Control code point' is defined in terms of the Unicode General - /// Category `Cc`. + /// Control codes (code points with the general category of `Cc`) are described in Chapter 4 + /// (Character Properties) of the [Unicode Standard] and specified in the [Unicode Character + /// Database][ucd] [`UnicodeData.txt`]. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt /// /// # Examples /// @@ -675,19 +703,29 @@ impl char { general_category::Cc(self) } - /// Returns `true` if this `char` is an extended grapheme character. + /// Returns `true` if this `char` has the `Grapheme_Extend` property. /// - /// 'Extended grapheme character' is defined in terms of the Unicode Shaping and Rendering - /// Category `Grapheme_Extend`. + /// `Grapheme_Extend` is described in [Unicode Standard Annex #29 (Unicode Text + /// Segmentation)][uax29] and specified in the [Unicode Character Database][ucd] + /// [`DerivedCoreProperties.txt`]. + /// + /// [uax29]: https://www.unicode.org/reports/tr29/ + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt #[inline] pub(crate) fn is_grapheme_extended(self) -> bool { derived_property::Grapheme_Extend(self) } - /// Returns `true` if this `char` is numeric. + /// Returns `true` if this `char` has one of the general categories for numbers. /// - /// 'Numeric'-ness is defined in terms of the Unicode General Categories - /// `Nd`, `Nl`, `No`. + /// The general categories for numbers (`Nd` for decimal digits, `Nl` for letter-like numeric + /// characters, and `No` for other numeric characters) are specified in the [Unicode Character + /// Database][ucd] [`UnicodeData.txt`]. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt /// /// # Examples /// @@ -713,25 +751,29 @@ impl char { } } - /// Returns an iterator that yields the lowercase equivalent of a `char` - /// as one or more `char`s. + /// Returns an iterator that yields the lowercase mapping of this `char` as one or more + /// `char`s. /// - /// If a character does not have a lowercase equivalent, the same character - /// will be returned back by the iterator. + /// If this `char` does not have a lowercase mapping, the iterator yields the same `char`. /// - /// This performs complex unconditional mappings with no tailoring: it maps - /// one Unicode character to its lowercase equivalent according to the - /// [Unicode database] and the additional complex mappings - /// [`SpecialCasing.txt`]. Conditional mappings (based on context or - /// language) are not considered here. + /// If this `char` has a one-to-one lowercase mapping given by the [Unicode Character + /// Database][ucd] [`UnicodeData.txt`], the iterator yields that `char`. /// - /// For a full reference, see [here][reference]. + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt /// - /// [Unicode database]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt + /// If this `char` requires special considerations (e.g. multiple `char`s) the iterator yields + /// the `char`(s) given by [`SpecialCasing.txt`]. /// - /// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt + /// [`SpecialCasing.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/SpecialCasing.txt /// - /// [reference]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992 + /// This operation performs an unconditional mapping without tailoring. That is, the conversion + /// is independent of context and language. + /// + /// In the [Unicode Standard], Chapter 4 (Character Properties) discusses case mapping in + /// general and Chapter 3 (Conformance) discusses the default algorithm for case conversion. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ /// /// # Examples /// @@ -774,25 +816,29 @@ impl char { ToLowercase(CaseMappingIter::new(conversions::to_lower(self))) } - /// Returns an iterator that yields the uppercase equivalent of a `char` - /// as one or more `char`s. + /// Returns an iterator that yields the uppercase mapping of this `char` as one or more + /// `char`s. /// - /// If a character does not have an uppercase equivalent, the same character - /// will be returned back by the iterator. + /// If this `char` does not have a uppercase mapping, the iterator yields the same `char`. /// - /// This performs complex unconditional mappings with no tailoring: it maps - /// one Unicode character to its uppercase equivalent according to the - /// [Unicode database] and the additional complex mappings - /// [`SpecialCasing.txt`]. Conditional mappings (based on context or - /// language) are not considered here. + /// If this `char` has a one-to-one uppercase mapping given by the [Unicode Character + /// Database][ucd] [`UnicodeData.txt`], the iterator yields that `char`. /// - /// For a full reference, see [here][reference]. + /// [ucd]: https://www.unicode.org/reports/tr44/ + /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt /// - /// [Unicode database]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt + /// If this `char` requires special considerations (e.g. multiple `char`s) the iterator yields + /// the `char`(s) given by [`SpecialCasing.txt`]. /// - /// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt + /// [`SpecialCasing.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/SpecialCasing.txt /// - /// [reference]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992 + /// This operation performs an unconditional mapping without tailoring. That is, the conversion + /// is independent of context and language. + /// + /// In the [Unicode Standard], Chapter 4 (Character Properties) discusses case mapping in + /// general and Chapter 3 (Conformance) discusses the default algorithm for case conversion. + /// + /// [Unicode Standard]: https://www.unicode.org/versions/latest/ /// /// # Examples /// diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index 6bdae1b557..14d947ccf2 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -135,7 +135,6 @@ pub trait Clone : Sized { /// Derive macro generating an impl of the trait `Clone`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics, derive_clone_copy)] pub macro Clone($item:item) { /* compiler built-in */ } diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index fc7329f57d..1ac51291b9 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -210,9 +210,8 @@ pub trait PartialEq { /// Derive macro generating an impl of the trait `PartialEq`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] -#[allow_internal_unstable(core_intrinsics)] +#[allow_internal_unstable(core_intrinsics, structural_match)] pub macro PartialEq($item:item) { /* compiler built-in */ } /// Trait for equality comparisons which are [equivalence relations]( @@ -273,9 +272,8 @@ pub trait Eq: PartialEq { /// Derive macro generating an impl of the trait `Eq`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] -#[allow_internal_unstable(core_intrinsics, derive_eq)] +#[allow_internal_unstable(core_intrinsics, derive_eq, structural_match)] pub macro Eq($item:item) { /* compiler built-in */ } // FIXME: this struct is used solely by #[derive] to @@ -624,7 +622,6 @@ pub trait Ord: Eq + PartialOrd { /// Derive macro generating an impl of the trait `Ord`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics)] pub macro Ord($item:item) { /* compiler built-in */ } @@ -873,7 +870,6 @@ pub trait PartialOrd: PartialEq { /// Derive macro generating an impl of the trait `PartialOrd`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics)] pub macro PartialOrd($item:item) { /* compiler built-in */ } diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index 06f2b7bab1..3cd2337ee5 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -554,6 +554,18 @@ impl From for T { fn from(t: T) -> T { t } } +/// **Stability note:** This impl does not yet exist, but we are +/// "reserving space" to add it in the future. See +/// [rust-lang/rust#64715][#64715] for details. +/// +/// [#64715]: https://github.com/rust-lang/rust/issues/64715 +#[stable(feature = "convert_infallible", since = "1.34.0")] +#[cfg(not(bootstrap))] +#[rustc_reservation_impl="permitting this impl would forbid us from adding \ +`impl From for T` later; see rust-lang/rust#64715 for details"] +impl From for T { + fn from(t: !) -> T { t } +} // TryFrom implies TryInto #[stable(feature = "try_from", since = "1.34.0")] diff --git a/src/libcore/default.rs b/src/libcore/default.rs index 806d478310..1aadc77cfb 100644 --- a/src/libcore/default.rs +++ b/src/libcore/default.rs @@ -117,7 +117,6 @@ pub trait Default: Sized { /// Derive macro generating an impl of the trait `Default`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics)] pub macro Default($item:item) { /* compiler built-in */ } diff --git a/src/libcore/ffi.rs b/src/libcore/ffi.rs index eda0e7c518..569c667ac0 100644 --- a/src/libcore/ffi.rs +++ b/src/libcore/ffi.rs @@ -18,8 +18,13 @@ use crate::ops::{Deref, DerefMut}; /// stabilized, it is recommended to use a newtype wrapper around an empty /// byte array. See the [Nomicon] for details. /// +/// One could use `std::os::raw::c_void` if they want to support old Rust +/// compiler down to 1.1.0. After Rust 1.30.0, it was re-exported by +/// this definition. For more information, please read [RFC 2521]. +/// /// [pointer]: ../../std/primitive.pointer.html /// [Nomicon]: https://doc.rust-lang.org/nomicon/ffi.html#representing-opaque-structs +/// [RFC 2521]: https://github.com/rust-lang/rfcs/blob/master/text/2521-c_void-reunification.md // N.B., for LLVM to recognize the void pointer type and by extension // functions like malloc(), we need to have it represented as i8* in // LLVM bitcode. The enum used here ensures this and prevents misuse @@ -29,7 +34,7 @@ use crate::ops::{Deref, DerefMut}; // would be uninhabited and at least dereferencing such pointers would // be UB. #[repr(u8)] -#[stable(feature = "raw_os", since = "1.1.0")] +#[stable(feature = "core_c_void", since = "1.30.0")] pub enum c_void { #[unstable(feature = "c_void_variant", reason = "temporary implementation detail", issue = "0")] @@ -49,8 +54,10 @@ impl fmt::Debug for c_void { /// Basic implementation of a `va_list`. // The name is WIP, using `VaListImpl` for now. #[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64"), not(target_arch = "asmjs")), + not(target_arch = "x86_64")), all(target_arch = "aarch64", target_os = "ios"), + target_arch = "wasm32", + target_arch = "asmjs", windows))] #[repr(transparent)] #[unstable(feature = "c_variadic", @@ -67,8 +74,10 @@ pub struct VaListImpl<'f> { } #[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64"), not(target_arch = "asmjs")), + not(target_arch = "x86_64")), all(target_arch = "aarch64", target_os = "ios"), + target_arch = "wasm32", + target_arch = "asmjs", windows))] #[unstable(feature = "c_variadic", reason = "the `c_variadic` feature has not been properly tested on \ @@ -137,38 +146,6 @@ pub struct VaListImpl<'f> { _marker: PhantomData<&'f mut &'f c_void>, } -/// asm.js ABI implementation of a `va_list`. -// asm.js uses the PNaCl ABI, which specifies that a `va_list` is -// an array of 4 32-bit integers, according to the old PNaCl docs at -// https://web.archive.org/web/20130518054430/https://www.chromium.org/nativeclient/pnacl/bitcode-abi#TOC-Derived-Types -// and clang does the same in `CreatePNaClABIBuiltinVaListDecl` from `lib/AST/ASTContext.cpp` -#[cfg(all(target_arch = "asmjs", not(windows)))] -#[repr(C)] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "44930")] -#[lang = "va_list"] -pub struct VaListImpl<'f> { - inner: [crate::mem::MaybeUninit; 4], - _marker: PhantomData<&'f mut &'f c_void>, -} - -#[cfg(all(target_arch = "asmjs", not(windows)))] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "44930")] -impl<'f> fmt::Debug for VaListImpl<'f> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - unsafe { - write!(f, "va_list* [{:#x}, {:#x}, {:#x}, {:#x}]", - self.inner[0].read(), self.inner[1].read(), - self.inner[2].read(), self.inner[3].read()) - } - } -} - /// A wrapper for a `va_list` #[repr(transparent)] #[derive(Debug)] @@ -178,14 +155,18 @@ impl<'f> fmt::Debug for VaListImpl<'f> { issue = "44930")] pub struct VaList<'a, 'f: 'a> { #[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64"), not(target_arch = "asmjs")), + not(target_arch = "x86_64")), all(target_arch = "aarch64", target_os = "ios"), + target_arch = "wasm32", + target_arch = "asmjs", windows))] inner: VaListImpl<'f>, #[cfg(all(any(target_arch = "aarch64", target_arch = "powerpc", - target_arch = "x86_64", target_arch = "asmjs"), + target_arch = "x86_64"), any(not(target_arch = "aarch64"), not(target_os = "ios")), + not(target_arch = "wasm32"), + not(target_arch = "asmjs"), not(windows)))] inner: &'a mut VaListImpl<'f>, @@ -193,8 +174,10 @@ pub struct VaList<'a, 'f: 'a> { } #[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64"), not(target_arch = "asmjs")), + not(target_arch = "x86_64")), all(target_arch = "aarch64", target_os = "ios"), + target_arch = "wasm32", + target_arch = "asmjs", windows))] #[unstable(feature = "c_variadic", reason = "the `c_variadic` feature has not been properly tested on \ @@ -212,8 +195,10 @@ impl<'f> VaListImpl<'f> { } #[cfg(all(any(target_arch = "aarch64", target_arch = "powerpc", - target_arch = "x86_64", target_arch = "asmjs"), + target_arch = "x86_64"), any(not(target_arch = "aarch64"), not(target_os = "ios")), + not(target_arch = "wasm32"), + not(target_arch = "asmjs"), not(windows)))] #[unstable(feature = "c_variadic", reason = "the `c_variadic` feature has not been properly tested on \ diff --git a/src/libcore/fmt/builders.rs b/src/libcore/fmt/builders.rs index 15ce2277fa..e6e3454b36 100644 --- a/src/libcore/fmt/builders.rs +++ b/src/libcore/fmt/builders.rs @@ -775,10 +775,10 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { reason = "recently added", issue = "62482")] pub fn key(&mut self, key: &dyn fmt::Debug) -> &mut DebugMap<'a, 'b> { - assert!(!self.has_key, "attempted to begin a new map entry \ - without completing the previous one"); - self.result = self.result.and_then(|_| { + assert!(!self.has_key, "attempted to begin a new map entry \ + without completing the previous one"); + if self.is_pretty() { if !self.has_fields { self.fmt.write_str("\n")?; @@ -839,9 +839,9 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { reason = "recently added", issue = "62482")] pub fn value(&mut self, value: &dyn fmt::Debug) -> &mut DebugMap<'a, 'b> { - assert!(self.has_key, "attempted to format a map value before its key"); - self.result = self.result.and_then(|_| { + assert!(self.has_key, "attempted to format a map value before its key"); + if self.is_pretty() { let mut slot = None; let mut writer = PadAdapter::wrap(&mut self.fmt, &mut slot, &mut self.state); @@ -924,9 +924,11 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { /// ``` #[stable(feature = "debug_builders", since = "1.2.0")] pub fn finish(&mut self) -> fmt::Result { - assert!(!self.has_key, "attempted to finish a map with a partial entry"); + self.result.and_then(|_| { + assert!(!self.has_key, "attempted to finish a map with a partial entry"); - self.result.and_then(|_| self.fmt.write_str("}")) + self.fmt.write_str("}") + }) } fn is_pretty(&self) -> bool { diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 65e2f8b9be..0e83a282b1 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -108,10 +108,10 @@ pub struct Error; /// [`io::Write`]: ../../std/io/trait.Write.html #[stable(feature = "rust1", since = "1.0.0")] pub trait Write { - /// Writes a slice of bytes into this writer, returning whether the write + /// Writes a string slice into this writer, returning whether the write /// succeeded. /// - /// This method can only succeed if the entire byte slice was successfully + /// This method can only succeed if the entire string slice was successfully /// written, and this method will not return until all data has been /// written or an error occurs. /// @@ -518,8 +518,7 @@ impl Display for Arguments<'_> { label="`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`", )] #[doc(alias = "{:?}")] -#[cfg_attr(bootstrap, lang = "debug_trait")] -#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "debug_trait")] +#[rustc_diagnostic_item = "debug_trait"] pub trait Debug { /// Formats the value using the given formatter. /// @@ -550,7 +549,6 @@ pub trait Debug { pub(crate) mod macros { /// Derive macro generating an impl of the trait `Debug`. #[rustc_builtin_macro] - #[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics)] pub macro Debug($item:item) { /* compiler built-in */ } @@ -1534,12 +1532,10 @@ impl<'a> Formatter<'a> { /// } /// } /// - /// fn main() { - /// assert_eq!(&format!("{:<}", Foo), "left"); - /// assert_eq!(&format!("{:>}", Foo), "right"); - /// assert_eq!(&format!("{:^}", Foo), "center"); - /// assert_eq!(&format!("{}", Foo), "into the void"); - /// } + /// assert_eq!(&format!("{:<}", Foo), "left"); + /// assert_eq!(&format!("{:>}", Foo), "right"); + /// assert_eq!(&format!("{:^}", Foo), "center"); + /// assert_eq!(&format!("{}", Foo), "into the void"); /// ``` #[stable(feature = "fmt_flags_align", since = "1.28.0")] pub fn align(&self) -> Option { @@ -2029,7 +2025,7 @@ impl Pointer for *const T { if f.alternate() { f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32); - if let None = f.width { + if f.width.is_none() { f.width = Some(((mem::size_of::() * 8) / 4) + 2); } } diff --git a/src/libcore/hash/mod.rs b/src/libcore/hash/mod.rs index aaaa6f9c57..020e085abf 100644 --- a/src/libcore/hash/mod.rs +++ b/src/libcore/hash/mod.rs @@ -202,7 +202,6 @@ pub trait Hash { pub(crate) mod macros { /// Derive macro generating an impl of the trait `Hash`. #[rustc_builtin_macro] - #[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics)] pub macro Hash($item:item) { /* compiler built-in */ } diff --git a/src/libcore/hint.rs b/src/libcore/hint.rs index ee4be6c915..368a2f16b2 100644 --- a/src/libcore/hint.rs +++ b/src/libcore/hint.rs @@ -114,24 +114,8 @@ pub fn black_box(dummy: T) -> T { // this. LLVM's intepretation of inline assembly is that it's, well, a black // box. This isn't the greatest implementation since it probably deoptimizes // more than we want, but it's so far good enough. - #[cfg(not(any( - target_arch = "asmjs", - all( - target_arch = "wasm32", - target_os = "emscripten" - ) - )))] unsafe { asm!("" : : "r"(&dummy)); return dummy; } - - // Not all platforms support inline assembly so try to do something without - // inline assembly which in theory still hinders at least some optimizations - // on those targets. This is the "best effort" scenario. - unsafe { - let ret = crate::ptr::read_volatile(&dummy); - crate::mem::forget(dummy); - ret - } } diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 905375eb60..3db85d05d7 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -696,6 +696,10 @@ extern "rust-intrinsic" { /// This will statically either panic, or do nothing. pub fn panic_if_uninhabited(); + /// Gets a reference to a static `Location` indicating where it was called. + #[cfg(not(bootstrap))] + pub fn caller_location() -> &'static crate::panic::Location<'static>; + /// Creates a value initialized to zero. /// /// `init` is unsafe because it returns a zeroed-out datum, @@ -874,6 +878,7 @@ extern "rust-intrinsic" { /// // the original inner type (`&i32`) to the converted inner type /// // (`Option<&i32>`), so read the nomicon pages linked above. /// let v_from_raw = unsafe { + // FIXME Update this when vec_into_raw_parts is stabilized /// // Ensure the original vector is not dropped. /// let mut v_clone = std::mem::ManuallyDrop::new(v_clone); /// Vec::from_raw_parts(v_clone.as_mut_ptr() as *mut Option<&i32>, @@ -1299,38 +1304,16 @@ extern "rust-intrinsic" { /// The stabilized versions of this intrinsic are available on the integer /// primitives via the `wrapping_add` method. For example, /// [`std::u32::wrapping_add`](../../std/primitive.u32.html#method.wrapping_add) - #[cfg(bootstrap)] - pub fn overflowing_add(a: T, b: T) -> T; - /// Returns (a - b) mod 2N, where N is the width of T in bits. - /// The stabilized versions of this intrinsic are available on the integer - /// primitives via the `wrapping_sub` method. For example, - /// [`std::u32::wrapping_sub`](../../std/primitive.u32.html#method.wrapping_sub) - #[cfg(bootstrap)] - pub fn overflowing_sub(a: T, b: T) -> T; - /// Returns (a * b) mod 2N, where N is the width of T in bits. - /// The stabilized versions of this intrinsic are available on the integer - /// primitives via the `wrapping_mul` method. For example, - /// [`std::u32::wrapping_mul`](../../std/primitive.u32.html#method.wrapping_mul) - #[cfg(bootstrap)] - pub fn overflowing_mul(a: T, b: T) -> T; - - /// Returns (a + b) mod 2N, where N is the width of T in bits. - /// The stabilized versions of this intrinsic are available on the integer - /// primitives via the `wrapping_add` method. For example, - /// [`std::u32::wrapping_add`](../../std/primitive.u32.html#method.wrapping_add) - #[cfg(not(bootstrap))] pub fn wrapping_add(a: T, b: T) -> T; /// Returns (a - b) mod 2N, where N is the width of T in bits. /// The stabilized versions of this intrinsic are available on the integer /// primitives via the `wrapping_sub` method. For example, /// [`std::u32::wrapping_sub`](../../std/primitive.u32.html#method.wrapping_sub) - #[cfg(not(bootstrap))] pub fn wrapping_sub(a: T, b: T) -> T; /// Returns (a * b) mod 2N, where N is the width of T in bits. /// The stabilized versions of this intrinsic are available on the integer /// primitives via the `wrapping_mul` method. For example, /// [`std::u32::wrapping_mul`](../../std/primitive.u32.html#method.wrapping_mul) - #[cfg(not(bootstrap))] pub fn wrapping_mul(a: T, b: T) -> T; /// Computes `a + b`, while saturating at numeric bounds. @@ -1361,6 +1344,10 @@ extern "rust-intrinsic" { /// Emits a `!nontemporal` store according to LLVM (see their docs). /// Probably will never become stable. pub fn nontemporal_store(ptr: *mut T, val: T); + + /// See documentation of `<*const T>::offset_from` for details. + #[cfg(not(bootstrap))] + pub fn ptr_offset_from(ptr: *const T, base: *const T) -> isize; } // Some functions are defined here because they accidentally got made diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index aba8e84d58..fac6ff0f06 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -118,26 +118,16 @@ //! //! let mut counter = Counter::new(); //! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); +//! assert_eq!(counter.next(), Some(1)); +//! assert_eq!(counter.next(), Some(2)); +//! assert_eq!(counter.next(), Some(3)); +//! assert_eq!(counter.next(), Some(4)); +//! assert_eq!(counter.next(), Some(5)); +//! assert_eq!(counter.next(), None); //! ``` //! -//! This will print `1` through `5`, each on their own line. -//! -//! Calling `next()` this way gets repetitive. Rust has a construct which can -//! call `next()` on your iterator, until it reaches `None`. Let's go over that +//! Calling [`next`] this way gets repetitive. Rust has a construct which can +//! call [`next`] on your iterator, until it reaches `None`. Let's go over that //! next. //! //! Also note that `Iterator` provides a default implementation of methods such as `nth` and `fold` @@ -253,20 +243,23 @@ //! ``` //! //! The idiomatic way to write a [`map`] for its side effects is to use a -//! `for` loop instead: +//! `for` loop or call the [`for_each`] method: //! //! ``` //! let v = vec![1, 2, 3, 4, 5]; //! +//! v.iter().for_each(|x| println!("{}", x)); +//! // or //! for x in &v { //! println!("{}", x); //! } //! ``` //! //! [`map`]: trait.Iterator.html#method.map +//! [`for_each`]: trait.Iterator.html#method.for_each //! -//! The two most common ways to evaluate an iterator are to use a `for` loop -//! like this, or using the [`collect`] method to produce a new collection. +//! Another common way to evaluate an iterator is to use the [`collect`] +//! method to produce a new collection. //! //! [`collect`]: trait.Iterator.html#method.collect //! diff --git a/src/libcore/iter/traits/collect.rs b/src/libcore/iter/traits/collect.rs index 25439136b8..00a8641705 100644 --- a/src/libcore/iter/traits/collect.rs +++ b/src/libcore/iter/traits/collect.rs @@ -167,7 +167,7 @@ pub trait FromIterator: Sized { /// // and we'll implement IntoIterator /// impl IntoIterator for MyCollection { /// type Item = i32; -/// type IntoIter = ::std::vec::IntoIter; +/// type IntoIter = std::vec::IntoIter; /// /// fn into_iter(self) -> Self::IntoIter { /// self.0.into_iter() diff --git a/src/libcore/iter/traits/iterator.rs b/src/libcore/iter/traits/iterator.rs index da49223dfb..7ffc8b3729 100644 --- a/src/libcore/iter/traits/iterator.rs +++ b/src/libcore/iter/traits/iterator.rs @@ -384,6 +384,9 @@ pub trait Iterator { /// /// In other words, it links two iterators together, in a chain. 🔗 /// + /// [`once`] is commonly used to adapt a single value into a chain of + /// other kinds of iteration. + /// /// # Examples /// /// Basic usage: @@ -408,9 +411,6 @@ pub trait Iterator { /// [`Iterator`] itself. For example, slices (`&[T]`) implement /// [`IntoIterator`], and so can be passed to `chain()` directly: /// - /// [`IntoIterator`]: trait.IntoIterator.html - /// [`Iterator`]: trait.Iterator.html - /// /// ``` /// let s1 = &[1, 2, 3]; /// let s2 = &[4, 5, 6]; @@ -425,6 +425,21 @@ pub trait Iterator { /// assert_eq!(iter.next(), Some(&6)); /// assert_eq!(iter.next(), None); /// ``` + /// + /// If you work with Windows API, you may wish to convert [`OsStr`] to `Vec`: + /// + /// ``` + /// #[cfg(windows)] + /// fn os_str_to_utf16(s: &std::ffi::OsStr) -> Vec { + /// use std::os::windows::ffi::OsStrExt; + /// s.encode_wide().chain(std::iter::once(0)).collect() + /// } + /// ``` + /// + /// [`once`]: fn.once.html + /// [`Iterator`]: trait.Iterator.html + /// [`IntoIterator`]: trait.IntoIterator.html + /// [`OsStr`]: ../../std/ffi/struct.OsStr.html #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn chain(self, other: U) -> Chain where @@ -1859,14 +1874,13 @@ pub trait Iterator { Self: Sized, F: FnMut(Self::Item) -> bool { #[inline] - fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut(T) -> LoopState<(), ()> { - move |x| { + fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> LoopState<(), ()> { + move |(), x| { if f(x) { LoopState::Continue(()) } else { LoopState::Break(()) } } } - - self.try_for_each(check(f)) == LoopState::Continue(()) + self.try_fold((), check(f)) == LoopState::Continue(()) } /// Tests if any element of the iterator matches a predicate. @@ -1913,14 +1927,14 @@ pub trait Iterator { F: FnMut(Self::Item) -> bool { #[inline] - fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut(T) -> LoopState<(), ()> { - move |x| { + fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> LoopState<(), ()> { + move |(), x| { if f(x) { LoopState::Break(()) } else { LoopState::Continue(()) } } } - self.try_for_each(check(f)) == LoopState::Break(()) + self.try_fold((), check(f)) == LoopState::Break(()) } /// Searches for an element of an iterator that satisfies a predicate. @@ -1972,14 +1986,16 @@ pub trait Iterator { P: FnMut(&Self::Item) -> bool, { #[inline] - fn check(mut predicate: impl FnMut(&T) -> bool) -> impl FnMut(T) -> LoopState<(), T> { - move |x| { + fn check( + mut predicate: impl FnMut(&T) -> bool + ) -> impl FnMut((), T) -> LoopState<(), T> { + move |(), x| { if predicate(&x) { LoopState::Break(x) } else { LoopState::Continue(()) } } } - self.try_for_each(check(predicate)).break_value() + self.try_fold((), check(predicate)).break_value() } /// Applies function to the elements of iterator and returns @@ -2004,14 +2020,14 @@ pub trait Iterator { F: FnMut(Self::Item) -> Option, { #[inline] - fn check(mut f: impl FnMut(T) -> Option) -> impl FnMut(T) -> LoopState<(), B> { - move |x| match f(x) { + fn check(mut f: impl FnMut(T) -> Option) -> impl FnMut((), T) -> LoopState<(), B> { + move |(), x| match f(x) { Some(x) => LoopState::Break(x), None => LoopState::Continue(()), } } - self.try_for_each(check(f)).break_value() + self.try_fold((), check(f)).break_value() } /// Searches for an element in an iterator, returning its index. @@ -2581,7 +2597,7 @@ pub trait Iterator { /// assert_eq!(xs.iter().cmp_by(&ys, |&x, &y| (x * x).cmp(&y)), Ordering::Equal); /// assert_eq!(xs.iter().cmp_by(&ys, |&x, &y| (2 * x).cmp(&y)), Ordering::Greater); /// ``` - #[unstable(feature = "iter_order_by", issue = "0")] + #[unstable(feature = "iter_order_by", issue = "64295")] fn cmp_by(mut self, other: I, mut cmp: F) -> Ordering where Self: Sized, @@ -2664,7 +2680,7 @@ pub trait Iterator { /// Some(Ordering::Greater) /// ); /// ``` - #[unstable(feature = "iter_order_by", issue = "0")] + #[unstable(feature = "iter_order_by", issue = "64295")] fn partial_cmp_by(mut self, other: I, mut partial_cmp: F) -> Option where Self: Sized, @@ -2729,7 +2745,7 @@ pub trait Iterator { /// /// assert!(xs.iter().eq_by(&ys, |&x, &y| x * x == y)); /// ``` - #[unstable(feature = "iter_order_by", issue = "0")] + #[unstable(feature = "iter_order_by", issue = "64295")] fn eq_by(mut self, other: I, mut eq: F) -> bool where Self: Sized, diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 8221df56a5..1b67b05c73 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -87,7 +87,6 @@ #![feature(link_llvm_intrinsics)] #![feature(never_type)] #![feature(nll)] -#![cfg_attr(bootstrap, feature(bind_by_move_pattern_guards))] #![feature(exhaustive_patterns)] #![feature(no_core)] #![feature(on_unimplemented)] @@ -120,24 +119,21 @@ #![feature(rtm_target_feature)] #![feature(f16c_target_feature)] #![feature(hexagon_target_feature)] -#![cfg_attr(bootstrap, feature(const_slice_len))] -#![cfg_attr(bootstrap, feature(const_str_as_bytes))] -#![cfg_attr(bootstrap, feature(const_str_len))] #![feature(const_int_conversion)] #![feature(const_transmute)] -#![feature(non_exhaustive)] +#![cfg_attr(bootstrap, feature(non_exhaustive))] #![feature(structural_match)] #![feature(abi_unadjusted)] #![feature(adx_target_feature)] #![feature(maybe_uninit_slice)] #![feature(external_doc)] -#![feature(mem_take)] #![feature(associated_type_bounds)] #[prelude_import] #[allow(unused)] use prelude::v1::*; +#[cfg(not(test))] // See #65860 #[macro_use] mod macros; @@ -185,10 +181,14 @@ pub mod hint; /* Core language traits */ +#[cfg(not(test))] // See #65860 pub mod marker; pub mod ops; +#[cfg(not(test))] // See #65860 pub mod cmp; +#[cfg(not(test))] // See #65860 pub mod clone; +#[cfg(not(test))] // See #65860 pub mod default; pub mod convert; pub mod borrow; @@ -196,6 +196,7 @@ pub mod borrow; /* Core types and methods on primitives */ pub mod any; +#[cfg(not(test))] // See #65860 pub mod array; pub mod ascii; pub mod sync; @@ -203,7 +204,9 @@ pub mod cell; pub mod char; pub mod panic; pub mod panicking; +#[cfg(not(test))] // See #65860 pub mod pin; +#[cfg(not(test))] // See #65860 pub mod iter; pub mod option; pub mod raw; @@ -211,14 +214,18 @@ pub mod result; pub mod ffi; pub mod slice; +#[cfg(not(test))] // See #65860 pub mod str; +#[cfg(not(test))] // See #65860 pub mod hash; +#[cfg(not(test))] // See #65860 pub mod fmt; pub mod time; pub mod unicode; /* Async */ +#[cfg(not(test))] // See #65860 pub mod future; pub mod task; diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs index c6f5fb0b16..8ccd31c95d 100644 --- a/src/libcore/macros.rs +++ b/src/libcore/macros.rs @@ -1,8 +1,9 @@ /// Panics the current thread. /// /// For details, see `std::macros`. +#[cfg(bootstrap)] #[macro_export] -#[allow_internal_unstable(core_panic)] +#[allow_internal_unstable(core_panic, panic_internals)] #[stable(feature = "core", since = "1.6.0")] macro_rules! panic { () => ( @@ -20,6 +21,38 @@ macro_rules! panic { }); } +/// Panics the current thread. +/// +/// For details, see `std::macros`. +#[cfg(not(bootstrap))] +#[macro_export] +#[allow_internal_unstable(core_panic, panic_internals)] +#[stable(feature = "core", since = "1.6.0")] +macro_rules! panic { + () => ( + $crate::panic!("explicit panic") + ); + ($msg:expr) => ({ + const LOC: &$crate::panic::Location<'_> = &$crate::panic::Location::internal_constructor( + $crate::file!(), + $crate::line!(), + $crate::column!(), + ); + $crate::panicking::panic($msg, LOC) + }); + ($msg:expr,) => ( + $crate::panic!($msg) + ); + ($fmt:expr, $($arg:tt)+) => ({ + const LOC: &$crate::panic::Location<'_> = &$crate::panic::Location::internal_constructor( + $crate::file!(), + $crate::line!(), + $crate::column!(), + ); + $crate::panicking::panic_fmt($crate::format_args!($fmt, $($arg)+), LOC) + }); +} + /// Asserts that two expressions are equal to each other (using [`PartialEq`]). /// /// On panic, this macro will print the values of the expressions with their @@ -238,6 +271,33 @@ macro_rules! debug_assert_ne { ($($arg:tt)*) => (if $crate::cfg!(debug_assertions) { $crate::assert_ne!($($arg)*); }) } +/// Returns whether the given expression matches any of the given patterns. +/// +/// Like in a `match` expression, the pattern can be optionally followed by `if` +/// and a guard expression that has access to names bound by the pattern. +/// +/// # Examples +/// +/// ``` +/// #![feature(matches_macro)] +/// +/// let foo = 'f'; +/// assert!(matches!(foo, 'A'..='Z' | 'a'..='z')); +/// +/// let bar = Some(4); +/// assert!(matches!(bar, Some(x) if x > 2)); +/// ``` +#[macro_export] +#[unstable(feature = "matches_macro", issue = "65721")] +macro_rules! matches { + ($expression:expr, $( $pattern:pat )|+ $( if $guard: expr )?) => { + match $expression { + $( $pattern )|+ $( if $guard )? => true, + _ => false + } + } +} + /// Unwraps a result or propagates its error. /// /// The `?` operator was added to replace `try!` and should be used instead. @@ -465,7 +525,7 @@ macro_rules! writeln { /// The unsafe counterpart of this macro is the [`unreachable_unchecked`] function, which /// will cause undefined behavior if the code is reached. /// -/// [`panic!`]: ../std/macro.panic.html +/// [`panic!`]: ../std/macro.panic.html /// [`unreachable_unchecked`]: ../std/hint/fn.unreachable_unchecked.html /// [`std::hint`]: ../std/hint/index.html /// @@ -474,6 +534,7 @@ macro_rules! writeln { /// This will always [`panic!`] /// /// [`panic!`]: ../std/macro.panic.html +/// /// # Examples /// /// Match arms: @@ -519,15 +580,20 @@ macro_rules! unreachable { }); } -/// Indicates unfinished code. +/// Indicates unfinished code by panicking with a message of "not yet implemented". /// -/// This can be useful if you are prototyping and are just looking to have your -/// code type-check, or if you're implementing a trait that requires multiple -/// methods, and you're only planning on using one of them. +/// This allows the your code to type-check, which is useful if you are prototyping or +/// implementing a trait that requires multiple methods which you don't plan of using all of. +/// +/// There is no difference between `unimplemented!` and `todo!` apart from the +/// name. /// /// # Panics /// -/// This will always [panic!](macro.panic.html) +/// This will always [panic!](macro.panic.html) because `unimplemented!` is just a +/// shorthand for `panic!` with a fixed, specific message. +/// +/// Like `panic!`, this macro has a second form for displaying custom values. /// /// # Examples /// @@ -535,38 +601,53 @@ macro_rules! unreachable { /// /// ``` /// trait Foo { -/// fn bar(&self); +/// fn bar(&self) -> u8; /// fn baz(&self); +/// fn qux(&self) -> Result; /// } /// ``` /// -/// We want to implement `Foo` on one of our types, but we also want to work on -/// just `bar()` first. In order for our code to compile, we need to implement -/// `baz()`, so we can use `unimplemented!`: +/// We want to implement `Foo` for 'MyStruct', but so far we only know how to +/// implement the `bar()` function. `baz()` and `qux()` will still need to be defined +/// in our implementation of `Foo`, but we can use `unimplemented!` in their definitions +/// to allow our code to compile. +/// +/// In the meantime, we want to have our program stop running once these +/// unimplemented functions are reached. /// /// ``` /// # trait Foo { -/// # fn bar(&self); +/// # fn bar(&self) -> u8; /// # fn baz(&self); +/// # fn qux(&self) -> Result; /// # } /// struct MyStruct; /// /// impl Foo for MyStruct { -/// fn bar(&self) { -/// // implementation goes here +/// fn bar(&self) -> u8 { +/// 1 + 1 /// } /// /// fn baz(&self) { -/// // let's not worry about implementing baz() for now +/// // We aren't sure how to even start writing baz yet, +/// // so we have no logic here at all. +/// // This will display "thread 'main' panicked at 'not yet implemented'". /// unimplemented!(); /// } +/// +/// fn qux(&self) -> Result { +/// let n = self.bar(); +/// // We have some logic here, +/// // so we can use unimplemented! to display what we have so far. +/// // This will display: +/// // "thread 'main' panicked at 'not yet implemented: we need to divide by 2'". +/// unimplemented!("we need to divide by {}", n); +/// } /// } /// /// fn main() { /// let s = MyStruct; /// s.bar(); -/// -/// // we aren't even using baz() yet, so this is fine. /// } /// ``` #[macro_export] @@ -579,8 +660,10 @@ macro_rules! unimplemented { /// Indicates unfinished code. /// /// This can be useful if you are prototyping and are just looking to have your -/// code typecheck. `todo!` works exactly like `unimplemented!`. The only -/// difference between the two macros is the name. +/// code typecheck. +/// +/// There is no difference between `unimplemented!` and `todo!` apart from the +/// name. /// /// # Panics /// @@ -602,8 +685,6 @@ macro_rules! unimplemented { /// `baz()`, so we can use `todo!`: /// /// ``` -/// #![feature(todo_macro)] -/// /// # trait Foo { /// # fn bar(&self); /// # fn baz(&self); @@ -629,7 +710,7 @@ macro_rules! unimplemented { /// } /// ``` #[macro_export] -#[unstable(feature = "todo_macro", issue = "59277")] +#[stable(feature = "todo_macro", since = "1.39.0")] macro_rules! todo { () => (panic!("not yet implemented")); ($($arg:tt)+) => (panic!("not yet implemented: {}", $crate::format_args!($($arg)+))); @@ -1236,10 +1317,8 @@ pub(crate) mod builtin { pub macro test($item:item) { /* compiler built-in */ } /// Attribute macro applied to a function to turn it into a benchmark test. - #[cfg_attr(not(bootstrap), unstable(soft, feature = "test", issue = "50297", - reason = "`bench` is a part of custom test frameworks which are unstable"))] - #[cfg_attr(bootstrap, unstable(feature = "test", issue = "50297", - reason = "`bench` is a part of custom test frameworks which are unstable"))] + #[unstable(soft, feature = "test", issue = "50297", + reason = "`bench` is a part of custom test frameworks which are unstable")] #[allow_internal_unstable(test, rustc_attrs)] #[rustc_builtin_macro] pub macro bench($item:item) { /* compiler built-in */ } diff --git a/src/libcore/marker.rs b/src/libcore/marker.rs index a2cfb320e7..a25573feab 100644 --- a/src/libcore/marker.rs +++ b/src/libcore/marker.rs @@ -126,6 +126,85 @@ pub trait Unsize { // Empty. } +/// Required trait for constants used in pattern matches. +/// +/// Any type that derives `PartialEq` automatically implements this trait, +/// *regardless* of whether its type-parameters implement `Eq`. +/// +/// If a `const` item contains some type that does not implement this trait, +/// then that type either (1.) does not implement `PartialEq` (which means the +/// constant will not provide that comparison method, which code generation +/// assumes is available), or (2.) it implements *its own* version of +/// `PartialEq` (which we assume does not conform to a structural-equality +/// comparison). +/// +/// In either of the two scenarios above, we reject usage of such a constant in +/// a pattern match. +/// +/// See also the [structural match RFC][RFC1445], and [issue 63438][] which +/// motivated migrating from attribute-based design to this trait. +/// +/// [RFC1445]: https://github.com/rust-lang/rfcs/blob/master/text/1445-restrict-constants-in-patterns.md +/// [issue 63438]: https://github.com/rust-lang/rust/issues/63438 +#[cfg(not(bootstrap))] +#[unstable(feature = "structural_match", issue = "31434")] +#[rustc_on_unimplemented(message="the type `{Self}` does not `#[derive(PartialEq)]`")] +#[lang = "structural_peq"] +pub trait StructuralPartialEq { + // Empty. +} + +/// Required trait for constants used in pattern matches. +/// +/// Any type that derives `Eq` automatically implements this trait, *regardless* +/// of whether its type-parameters implement `Eq`. +/// +/// This is a hack to workaround a limitation in our type-system. +/// +/// Background: +/// +/// We want to require that types of consts used in pattern matches +/// have the attribute `#[derive(PartialEq, Eq)]`. +/// +/// In a more ideal world, we could check that requirement by just checking that +/// the given type implements both (1.) the `StructuralPartialEq` trait *and* +/// (2.) the `Eq` trait. However, you can have ADTs that *do* `derive(PartialEq, Eq)`, +/// and be a case that we want the compiler to accept, and yet the constant's +/// type fails to implement `Eq`. +/// +/// Namely, a case like this: +/// +/// ```rust +/// #[derive(PartialEq, Eq)] +/// struct Wrap(X); +/// fn higher_order(_: &()) { } +/// const CFN: Wrap = Wrap(higher_order); +/// fn main() { +/// match CFN { +/// CFN => {} +/// _ => {} +/// } +/// } +/// ``` +/// +/// (The problem in the above code is that `Wrap` does not implement +/// `PartialEq`, nor `Eq`, because `for<'a> fn(&'a _)` does not implement those +/// traits.) +/// +/// Therefore, we cannot rely on naive check for `StructuralPartialEq` and +/// mere `Eq`. +/// +/// As a hack to work around this, we use two separate traits injected by each +/// of the two derives (`#[derive(PartialEq)]` and `#[derive(Eq)]`) and check +/// that both of them are present as part of structural-match checking. +#[cfg(not(bootstrap))] +#[unstable(feature = "structural_match", issue = "31434")] +#[rustc_on_unimplemented(message="the type `{Self}` does not `#[derive(Eq)]`")] +#[lang = "structural_teq"] +pub trait StructuralEq { + // Empty. +} + /// Types whose values can be duplicated simply by copying bits. /// /// By default, variable bindings have 'move semantics.' In other @@ -290,7 +369,6 @@ pub trait Copy : Clone { /// Derive macro generating an impl of the trait `Copy`. #[rustc_builtin_macro] -#[cfg_attr(bootstrap, rustc_macro_transparency = "semitransparent")] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics, derive_clone_copy)] pub macro Copy($item:item) { /* compiler built-in */ } @@ -438,6 +516,14 @@ macro_rules! impls{ $t } } + + #[cfg(not(bootstrap))] + #[unstable(feature = "structural_match", issue = "31434")] + impl StructuralPartialEq for $t { } + + #[cfg(not(bootstrap))] + #[unstable(feature = "structural_match", issue = "31434")] + impl StructuralEq for $t { } ) } diff --git a/src/libcore/mem/maybe_uninit.rs b/src/libcore/mem/maybe_uninit.rs index 9e9e901c76..792ce9dfad 100644 --- a/src/libcore/mem/maybe_uninit.rs +++ b/src/libcore/mem/maybe_uninit.rs @@ -5,12 +5,12 @@ use crate::mem::ManuallyDrop; /// /// # Initialization invariant /// -/// The compiler, in general, assumes that variables are properly initialized -/// at their respective type. For example, a variable of reference type must -/// be aligned and non-NULL. This is an invariant that must *always* be upheld, -/// even in unsafe code. As a consequence, zero-initializing a variable of reference -/// type causes instantaneous [undefined behavior][ub], no matter whether that reference -/// ever gets used to access memory: +/// The compiler, in general, assumes that a variable is properly initialized +/// according to the requirements of the variable's type. For example, a variable of +/// reference type must be aligned and non-NULL. This is an invariant that must +/// *always* be upheld, even in unsafe code. As a consequence, zero-initializing a +/// variable of reference type causes instantaneous [undefined behavior][ub], +/// no matter whether that reference ever gets used to access memory: /// /// ```rust,no_run /// # #![allow(invalid_value)] diff --git a/src/libcore/mem/mod.rs b/src/libcore/mem/mod.rs index 87ec05a243..c7da56aad3 100644 --- a/src/libcore/mem/mod.rs +++ b/src/libcore/mem/mod.rs @@ -236,7 +236,7 @@ pub fn forget_unsized(t: T) { /// ``` /// /// [alignment]: ./fn.align_of.html -#[inline] +#[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] pub const fn size_of() -> usize { @@ -328,7 +328,7 @@ pub fn min_align_of_val(val: &T) -> usize { /// /// assert_eq!(4, mem::align_of::()); /// ``` -#[inline] +#[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] pub const fn align_of() -> usize { @@ -368,15 +368,17 @@ pub fn align_of_val(val: &T) -> usize { /// make a difference in release builds (where a loop that has no side-effects /// is easily detected and eliminated), but is often a big win for debug builds. /// -/// Note that `ptr::drop_in_place` already performs this check, so if your workload -/// can be reduced to some small number of drop_in_place calls, using this is -/// unnecessary. In particular note that you can drop_in_place a slice, and that +/// Note that [`drop_in_place`] already performs this check, so if your workload +/// can be reduced to some small number of [`drop_in_place`] calls, using this is +/// unnecessary. In particular note that you can [`drop_in_place`] a slice, and that /// will do a single needs_drop check for all the values. /// /// Types like Vec therefore just `drop_in_place(&mut self[..])` without using -/// needs_drop explicitly. Types like `HashMap`, on the other hand, have to drop +/// `needs_drop` explicitly. Types like [`HashMap`], on the other hand, have to drop /// values one at a time and should use this API. /// +/// [`drop_in_place`]: ../ptr/fn.drop_in_place.html +/// [`HashMap`]: ../../std/collections/struct.HashMap.html /// /// # Examples /// @@ -518,8 +520,6 @@ pub fn swap(x: &mut T, y: &mut T) { /// A simple example: /// /// ``` -/// #![feature(mem_take)] -/// /// use std::mem; /// /// let mut v: Vec = vec![1, 2]; @@ -550,8 +550,6 @@ pub fn swap(x: &mut T, y: &mut T) { /// `self`, allowing it to be returned: /// /// ``` -/// #![feature(mem_take)] -/// /// use std::mem; /// /// # struct Buffer { buf: Vec } @@ -570,7 +568,7 @@ pub fn swap(x: &mut T, y: &mut T) { /// /// [`Clone`]: ../../std/clone/trait.Clone.html #[inline] -#[unstable(feature = "mem_take", issue = "61129")] +#[stable(feature = "mem_take", since = "1.40.0")] pub fn take(dest: &mut T) -> T { replace(dest, T::default()) } @@ -818,9 +816,9 @@ impl fmt::Debug for Discriminant { /// /// enum Foo { A(&'static str), B(i32), C(i32) } /// -/// assert!(mem::discriminant(&Foo::A("bar")) == mem::discriminant(&Foo::A("baz"))); -/// assert!(mem::discriminant(&Foo::B(1)) == mem::discriminant(&Foo::B(2))); -/// assert!(mem::discriminant(&Foo::B(3)) != mem::discriminant(&Foo::C(3))); +/// assert_eq!(mem::discriminant(&Foo::A("bar")), mem::discriminant(&Foo::A("baz"))); +/// assert_eq!(mem::discriminant(&Foo::B(1)), mem::discriminant(&Foo::B(2))); +/// assert_ne!(mem::discriminant(&Foo::B(3)), mem::discriminant(&Foo::C(3))); /// ``` #[stable(feature = "discriminant_value", since = "1.21.0")] pub fn discriminant(v: &T) -> Discriminant { diff --git a/src/libcore/num/dec2flt/algorithm.rs b/src/libcore/num/dec2flt/algorithm.rs index fa3c807537..ed89852dc4 100644 --- a/src/libcore/num/dec2flt/algorithm.rs +++ b/src/libcore/num/dec2flt/algorithm.rs @@ -143,13 +143,12 @@ pub fn fast_path(integral: &[u8], fractional: &[u8], e: i64) -> Opt /// > not a bound for the true error, but bounds the difference between the approximation z and /// > the best possible approximation that uses p bits of significand.) pub fn bellerophon(f: &Big, e: i16) -> T { - let slop; - if f <= &Big::from_u64(T::MAX_SIG) { + let slop = if f <= &Big::from_u64(T::MAX_SIG) { // The cases abs(e) < log5(2^N) are in fast_path() - slop = if e >= 0 { 0 } else { 3 }; + if e >= 0 { 0 } else { 3 } } else { - slop = if e >= 0 { 1 } else { 4 }; - } + if e >= 0 { 1 } else { 4 } + }; let z = rawfp::big_to_fp(f).mul(&power_of_ten(e)).normalize(); let exp_p_n = 1 << (P - T::SIG_BITS as u32); let lowbits: i64 = (z.f % exp_p_n) as i64; diff --git a/src/libcore/num/f32.rs b/src/libcore/num/f32.rs index 22e7573eca..5730088c4d 100644 --- a/src/libcore/num/f32.rs +++ b/src/libcore/num/f32.rs @@ -466,11 +466,10 @@ impl f32 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let bytes = 12.5f32.to_be_bytes(); /// assert_eq!(bytes, [0x41, 0x48, 0x00, 0x00]); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn to_be_bytes(self) -> [u8; 4] { self.to_bits().to_be_bytes() @@ -482,11 +481,10 @@ impl f32 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let bytes = 12.5f32.to_le_bytes(); /// assert_eq!(bytes, [0x00, 0x00, 0x48, 0x41]); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn to_le_bytes(self) -> [u8; 4] { self.to_bits().to_le_bytes() @@ -504,7 +502,6 @@ impl f32 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let bytes = 12.5f32.to_ne_bytes(); /// assert_eq!( /// bytes, @@ -515,7 +512,7 @@ impl f32 { /// } /// ); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn to_ne_bytes(self) -> [u8; 4] { self.to_bits().to_ne_bytes() @@ -526,11 +523,10 @@ impl f32 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let value = f32::from_be_bytes([0x41, 0x48, 0x00, 0x00]); /// assert_eq!(value, 12.5); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn from_be_bytes(bytes: [u8; 4]) -> Self { Self::from_bits(u32::from_be_bytes(bytes)) @@ -541,11 +537,10 @@ impl f32 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let value = f32::from_le_bytes([0x00, 0x00, 0x48, 0x41]); /// assert_eq!(value, 12.5); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn from_le_bytes(bytes: [u8; 4]) -> Self { Self::from_bits(u32::from_le_bytes(bytes)) @@ -563,7 +558,6 @@ impl f32 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let value = f32::from_ne_bytes(if cfg!(target_endian = "big") { /// [0x41, 0x48, 0x00, 0x00] /// } else { @@ -571,7 +565,7 @@ impl f32 { /// }); /// assert_eq!(value, 12.5); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn from_ne_bytes(bytes: [u8; 4]) -> Self { Self::from_bits(u32::from_ne_bytes(bytes)) diff --git a/src/libcore/num/f64.rs b/src/libcore/num/f64.rs index bbe1d04078..2bdeda340d 100644 --- a/src/libcore/num/f64.rs +++ b/src/libcore/num/f64.rs @@ -479,11 +479,10 @@ impl f64 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let bytes = 12.5f64.to_be_bytes(); /// assert_eq!(bytes, [0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn to_be_bytes(self) -> [u8; 8] { self.to_bits().to_be_bytes() @@ -495,11 +494,10 @@ impl f64 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let bytes = 12.5f64.to_le_bytes(); /// assert_eq!(bytes, [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40]); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn to_le_bytes(self) -> [u8; 8] { self.to_bits().to_le_bytes() @@ -517,7 +515,6 @@ impl f64 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let bytes = 12.5f64.to_ne_bytes(); /// assert_eq!( /// bytes, @@ -528,7 +525,7 @@ impl f64 { /// } /// ); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn to_ne_bytes(self) -> [u8; 8] { self.to_bits().to_ne_bytes() @@ -539,11 +536,10 @@ impl f64 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let value = f64::from_be_bytes([0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); /// assert_eq!(value, 12.5); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn from_be_bytes(bytes: [u8; 8]) -> Self { Self::from_bits(u64::from_be_bytes(bytes)) @@ -554,11 +550,10 @@ impl f64 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let value = f64::from_le_bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40]); /// assert_eq!(value, 12.5); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn from_le_bytes(bytes: [u8; 8]) -> Self { Self::from_bits(u64::from_le_bytes(bytes)) @@ -576,7 +571,6 @@ impl f64 { /// # Examples /// /// ``` - /// #![feature(float_to_from_bytes)] /// let value = f64::from_ne_bytes(if cfg!(target_endian = "big") { /// [0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] /// } else { @@ -584,7 +578,7 @@ impl f64 { /// }); /// assert_eq!(value, 12.5); /// ``` - #[unstable(feature = "float_to_from_bytes", issue = "60446")] + #[stable(feature = "float_to_from_bytes", since = "1.40.0")] #[inline] pub fn from_ne_bytes(bytes: [u8; 8]) -> Self { Self::from_bits(u64::from_ne_bytes(bytes)) diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 5d99c10e73..b4ade70414 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -252,7 +252,7 @@ Basic usage: $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] - #[inline] + #[inline(always)] #[rustc_promotable] pub const fn min_value() -> Self { !0 ^ ((!0 as $UnsignedT) >> 1) as Self @@ -271,7 +271,7 @@ Basic usage: $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] - #[inline] + #[inline(always)] #[rustc_promotable] pub const fn max_value() -> Self { !Self::min_value() @@ -938,7 +938,9 @@ Basic usage: ``` ", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_add(1), 101); assert_eq!(", stringify!($SelfT), "::max_value().saturating_add(100), ", stringify!($SelfT), -"::max_value());", +"::max_value()); +assert_eq!(", stringify!($SelfT), "::min_value().saturating_add(-1), ", stringify!($SelfT), +"::min_value());", $EndFeature, " ```"), @@ -952,7 +954,6 @@ $EndFeature, " } } - doc_comment! { concat!("Saturating integer subtraction. Computes `self - rhs`, saturating at the numeric bounds instead of overflowing. @@ -964,7 +965,9 @@ Basic usage: ``` ", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_sub(127), -27); assert_eq!(", stringify!($SelfT), "::min_value().saturating_sub(100), ", stringify!($SelfT), -"::min_value());", +"::min_value()); +assert_eq!(", stringify!($SelfT), "::max_value().saturating_sub(-1), ", stringify!($SelfT), +"::max_value());", $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] @@ -1055,7 +1058,7 @@ $EndFeature, " #[inline] pub fn saturating_mul(self, rhs: Self) -> Self { self.checked_mul(rhs).unwrap_or_else(|| { - if (self < 0 && rhs < 0) || (self > 0 && rhs > 0) { + if (self < 0) == (rhs < 0) { Self::max_value() } else { Self::min_value() @@ -1112,13 +1115,7 @@ $EndFeature, " without modifying the original"] #[inline] pub const fn wrapping_add(self, rhs: Self) -> Self { - #[cfg(bootstrap)] { - intrinsics::overflowing_add(self, rhs) - } - - #[cfg(not(bootstrap))] { - intrinsics::wrapping_add(self, rhs) - } + intrinsics::wrapping_add(self, rhs) } } @@ -1141,13 +1138,7 @@ $EndFeature, " without modifying the original"] #[inline] pub const fn wrapping_sub(self, rhs: Self) -> Self { - #[cfg(bootstrap)] { - intrinsics::overflowing_sub(self, rhs) - } - - #[cfg(not(bootstrap))] { - intrinsics::wrapping_sub(self, rhs) - } + intrinsics::wrapping_sub(self, rhs) } } @@ -1169,13 +1160,7 @@ $EndFeature, " without modifying the original"] #[inline] pub const fn wrapping_mul(self, rhs: Self) -> Self { - #[cfg(bootstrap)] { - intrinsics::overflowing_mul(self, rhs) - } - - #[cfg(not(bootstrap))] { - intrinsics::wrapping_mul(self, rhs) - } + intrinsics::wrapping_mul(self, rhs) } } @@ -1402,7 +1387,16 @@ $EndFeature, " #[stable(feature = "no_panic_abs", since = "1.13.0")] #[inline] pub const fn wrapping_abs(self) -> Self { - (self ^ (self >> ($BITS - 1))).wrapping_sub(self >> ($BITS - 1)) + // sign is -1 (all ones) for negative numbers, 0 otherwise. + let sign = self >> ($BITS - 1); + // For positive self, sign == 0 so the expression is simply + // (self ^ 0).wrapping_sub(0) == self == abs(self). + // + // For negative self, self ^ sign == self ^ all_ones. + // But all_ones ^ self == all_ones - self == -1 - self. + // So for negative numbers, (self ^ sign).wrapping_sub(sign) is + // (-1 - self).wrapping_sub(-1) == -self == abs(self). + (self ^ sign).wrapping_sub(sign) } } @@ -1761,7 +1755,7 @@ $EndFeature, " #[stable(feature = "no_panic_abs", since = "1.13.0")] #[inline] pub const fn overflowing_abs(self) -> (Self, bool) { - (self ^ (self >> ($BITS - 1))).overflowing_sub(self >> ($BITS - 1)) + (self.wrapping_abs(), self == Self::min_value()) } } @@ -1870,7 +1864,7 @@ if `self < 0`, this is equal to round towards +/- infinity. # Panics -This function will panic if `rhs` is 0. +This function will panic if `rhs` is 0 or the division results in overflow. # Examples @@ -1909,7 +1903,7 @@ This is done as if by the Euclidean division algorithm -- given # Panics -This function will panic if `rhs` is 0. +This function will panic if `rhs` is 0 or the division results in overflow. # Examples @@ -1969,7 +1963,21 @@ $EndFeature, " // Note that the #[inline] above means that the overflow // semantics of the subtraction depend on the crate we're being // inlined into. - (self ^ (self >> ($BITS - 1))) - (self >> ($BITS - 1)) + + // sign is -1 (all ones) for negative numbers, 0 otherwise. + let sign = self >> ($BITS - 1); + // For positive self, sign == 0 so the expression is simply + // (self ^ 0) - 0 == self == abs(self). + // + // For negative self, self ^ sign == self ^ all_ones. + // But all_ones ^ self == all_ones - self == -1 - self. + // So for negative numbers, (self ^ sign) - sign is + // (-1 - self) - -1 == -self == abs(self). + // + // The subtraction overflows when self is min_value(), because + // (-1 - min_value()) - -1 is max_value() - -1 which overflows. + // This is exactly when we want self.abs() to overflow. + (self ^ sign) - sign } } @@ -2303,7 +2311,7 @@ Basic usage: ```"), #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] - #[inline] + #[inline(always)] pub const fn min_value() -> Self { 0 } } @@ -2320,7 +2328,7 @@ stringify!($MaxV), ");", $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] - #[inline] + #[inline(always)] pub const fn max_value() -> Self { !0 } } @@ -3040,13 +3048,7 @@ $EndFeature, " without modifying the original"] #[inline] pub const fn wrapping_add(self, rhs: Self) -> Self { - #[cfg(bootstrap)] { - intrinsics::overflowing_add(self, rhs) - } - - #[cfg(not(bootstrap))] { - intrinsics::wrapping_add(self, rhs) - } + intrinsics::wrapping_add(self, rhs) } } @@ -3068,13 +3070,7 @@ $EndFeature, " without modifying the original"] #[inline] pub const fn wrapping_sub(self, rhs: Self) -> Self { - #[cfg(bootstrap)] { - intrinsics::overflowing_sub(self, rhs) - } - - #[cfg(not(bootstrap))] { - intrinsics::wrapping_sub(self, rhs) - } + intrinsics::wrapping_sub(self, rhs) } } @@ -3097,13 +3093,7 @@ $EndFeature, " without modifying the original"] #[inline] pub const fn wrapping_mul(self, rhs: Self) -> Self { - #[cfg(bootstrap)] { - intrinsics::overflowing_mul(self, rhs) - } - - #[cfg(not(bootstrap))] { - intrinsics::wrapping_mul(self, rhs) - } + intrinsics::wrapping_mul(self, rhs) } doc_comment! { @@ -3704,6 +3694,10 @@ Since, for the positive integers, all common definitions of division are equal, this is exactly equal to `self / rhs`. +# Panics + +This function will panic if `rhs` is 0. + # Examples Basic usage: @@ -3729,6 +3723,10 @@ Since, for the positive integers, all common definitions of division are equal, this is exactly equal to `self % rhs`. +# Panics + +This function will panic if `rhs` is 0. + # Examples Basic usage: @@ -3759,8 +3757,8 @@ assert!(!10", stringify!($SelfT), ".is_power_of_two());", $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn is_power_of_two(self) -> bool { - (self.wrapping_sub(1)) & self == 0 && !(self == 0) + pub const fn is_power_of_two(self) -> bool { + self.count_ones() == 1 } } diff --git a/src/libcore/num/wrapping.rs b/src/libcore/num/wrapping.rs index 59a10ae99b..5fe9895d8d 100644 --- a/src/libcore/num/wrapping.rs +++ b/src/libcore/num/wrapping.rs @@ -437,7 +437,7 @@ assert_eq!(n.trailing_zeros(), 3); /// wrapping the truncated bits to the end of the resulting /// integer. /// - /// Please note this isn't the same operation as the `>>` shifting + /// Please note this isn't the same operation as the `<<` shifting /// operator! /// /// # Examples @@ -463,7 +463,7 @@ assert_eq!(n.trailing_zeros(), 3); /// wrapping the truncated bits to the beginning of the resulting /// integer. /// - /// Please note this isn't the same operation as the `<<` shifting + /// Please note this isn't the same operation as the `>>` shifting /// operator! /// /// # Examples diff --git a/src/libcore/ops/try.rs b/src/libcore/ops/try.rs index 76fec1020f..e8f35f8cf2 100644 --- a/src/libcore/ops/try.rs +++ b/src/libcore/ops/try.rs @@ -5,7 +5,7 @@ /// extracting those success or failure values from an existing instance and /// creating a new instance from a success or failure value. #[unstable(feature = "try_trait", issue = "42327")] -#[rustc_on_unimplemented( +#[cfg_attr(bootstrap, rustc_on_unimplemented( on(all( any(from_method="from_error", from_method="from_ok"), from_desugaring="QuestionMark"), @@ -17,7 +17,20 @@ message="the `?` operator can only be applied to values \ that implement `{Try}`", label="the `?` operator cannot be applied to type `{Self}`") -)] +))] +#[cfg_attr(not(bootstrap), rustc_on_unimplemented( +on(all( +any(from_method="from_error", from_method="from_ok"), +from_desugaring="QuestionMark"), +message="the `?` operator can only be used in {ItemContext} \ + that returns `Result` or `Option` \ + (or another type that implements `{Try}`)", +label="cannot use the `?` operator in {ItemContext} that returns `{Self}`"), +on(all(from_method="into_result", from_desugaring="QuestionMark"), +message="the `?` operator can only be applied to values \ + that implement `{Try}`", +label="the `?` operator cannot be applied to type `{Self}`") +))] #[doc(alias = "?")] pub trait Try { /// The type of this value when viewed as successful. diff --git a/src/libcore/ops/unsize.rs b/src/libcore/ops/unsize.rs index 8e46830084..d29147645f 100644 --- a/src/libcore/ops/unsize.rs +++ b/src/libcore/ops/unsize.rs @@ -76,7 +76,7 @@ impl, U: ?Sized> CoerceUnsized<*const U> for *const T {} /// ``` /// # #![feature(dispatch_from_dyn, unsize)] /// # use std::{ops::DispatchFromDyn, marker::Unsize}; -/// # struct Rc(::std::rc::Rc); +/// # struct Rc(std::rc::Rc); /// impl DispatchFromDyn> for Rc /// where /// T: Unsize, diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 5569d99f8d..f0ac5e749f 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -46,7 +46,7 @@ //! # Options and pointers ("nullable" pointers) //! //! Rust's pointer types must always point to a valid location; there are -//! no "null" pointers. Instead, Rust has *optional* pointers, like +//! no "null" references. Instead, Rust has *optional* pointers, like //! the optional owned box, [`Option`]`<`[`Box`]`>`. //! //! The following example uses [`Option`] to create an optional box of @@ -64,7 +64,7 @@ //! //! fn check_optional(optional: Option>) { //! match optional { -//! Some(ref p) => println!("has value {}", p), +//! Some(p) => println!("has value {}", p), //! None => println!("has no value"), //! } //! } @@ -83,7 +83,7 @@ //! let msg = Some("howdy"); //! //! // Take a reference to the contained string -//! if let Some(ref m) = msg { +//! if let Some(m) = &msg { //! println!("{}", *m); //! } //! @@ -395,10 +395,10 @@ impl Option { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn unwrap_or(self, def: T) -> T { + pub fn unwrap_or(self, default: T) -> T { match self { Some(x) => x, - None => def, + None => default, } } @@ -837,9 +837,8 @@ impl Option { #[inline] #[stable(feature = "option_entry", since = "1.20.0")] pub fn get_or_insert_with T>(&mut self, f: F) -> &mut T { - match *self { - None => *self = Some(f()), - _ => (), + if let None = *self { + *self = Some(f()); } match *self { @@ -1102,7 +1101,6 @@ impl Option { } } -#[unstable(feature = "inner_deref", reason = "newly added", issue = "50264")] impl Option { /// Converts from `Option` (or `&Option`) to `Option<&T::Target>`. /// @@ -1114,20 +1112,18 @@ impl Option { /// # Examples /// /// ``` - /// #![feature(inner_deref)] - /// /// let x: Option = Some("hey".to_owned()); /// assert_eq!(x.as_deref(), Some("hey")); /// /// let x: Option = None; /// assert_eq!(x.as_deref(), None); /// ``` + #[stable(feature = "option_deref", since = "1.40.0")] pub fn as_deref(&self) -> Option<&T::Target> { self.as_ref().map(|t| t.deref()) } } -#[unstable(feature = "inner_deref", reason = "newly added", issue = "50264")] impl Option { /// Converts from `Option` (or `&mut Option`) to `Option<&mut T::Target>`. /// @@ -1137,14 +1133,13 @@ impl Option { /// # Examples /// /// ``` - /// #![feature(inner_deref)] - /// /// let mut x: Option = Some("hey".to_owned()); /// assert_eq!(x.as_deref_mut().map(|x| { /// x.make_ascii_uppercase(); /// x /// }), Some("HEY".to_owned().as_mut_str())); /// ``` + #[stable(feature = "option_deref", since = "1.40.0")] pub fn as_deref_mut(&mut self) -> Option<&mut T::Target> { self.as_mut().map(|t| t.deref_mut()) } @@ -1572,7 +1567,6 @@ impl Option> { /// # Examples /// Basic usage: /// ``` - /// #![feature(option_flattening)] /// let x: Option> = Some(Some(6)); /// assert_eq!(Some(6), x.flatten()); /// @@ -1584,13 +1578,12 @@ impl Option> { /// ``` /// Flattening once only removes one level of nesting: /// ``` - /// #![feature(option_flattening)] /// let x: Option>> = Some(Some(Some(6))); /// assert_eq!(Some(Some(6)), x.flatten()); /// assert_eq!(Some(6), x.flatten().flatten()); /// ``` #[inline] - #[unstable(feature = "option_flattening", issue = "60258")] + #[stable(feature = "option_flattening", since = "1.40.0")] pub fn flatten(self) -> Option { self.and_then(convert::identity) } diff --git a/src/libcore/panic.rs b/src/libcore/panic.rs index 989fc96732..51bbf3a8fd 100644 --- a/src/libcore/panic.rs +++ b/src/libcore/panic.rs @@ -35,7 +35,7 @@ use crate::fmt; pub struct PanicInfo<'a> { payload: &'a (dyn Any + Send), message: Option<&'a fmt::Arguments<'a>>, - location: Location<'a>, + location: &'a Location<'a>, } impl<'a> PanicInfo<'a> { @@ -45,11 +45,16 @@ impl<'a> PanicInfo<'a> { issue = "0")] #[doc(hidden)] #[inline] - pub fn internal_constructor(message: Option<&'a fmt::Arguments<'a>>, - location: Location<'a>) - -> Self { + pub fn internal_constructor( + message: Option<&'a fmt::Arguments<'a>>, + location: &'a Location<'a>, + ) -> Self { struct NoPayload; - PanicInfo { payload: &NoPayload, location, message } + PanicInfo { + location, + message, + payload: &NoPayload, + } } #[doc(hidden)] @@ -162,6 +167,7 @@ impl fmt::Display for PanicInfo<'_> { /// /// panic!("Normal panic"); /// ``` +#[cfg_attr(not(bootstrap), lang = "panic_location")] #[derive(Debug)] #[stable(feature = "panic_hooks", since = "1.10.0")] pub struct Location<'a> { @@ -176,7 +182,7 @@ impl<'a> Location<'a> { and related macros", issue = "0")] #[doc(hidden)] - pub fn internal_constructor(file: &'a str, line: u32, col: u32) -> Self { + pub const fn internal_constructor(file: &'a str, line: u32, col: u32) -> Self { Location { file, line, col } } diff --git a/src/libcore/panicking.rs b/src/libcore/panicking.rs index 7b7253419b..685b749776 100644 --- a/src/libcore/panicking.rs +++ b/src/libcore/panicking.rs @@ -29,6 +29,7 @@ use crate::fmt; use crate::panic::{Location, PanicInfo}; +#[cfg(bootstrap)] #[cold] // never inline unless panic_immediate_abort to avoid code // bloat at the call sites as much as possible @@ -49,6 +50,27 @@ pub fn panic(expr_file_line_col: &(&'static str, &'static str, u32, u32)) -> ! { panic_fmt(fmt::Arguments::new_v1(&[expr], &[]), &(file, line, col)) } +#[cfg(not(bootstrap))] +#[cold] +// never inline unless panic_immediate_abort to avoid code +// bloat at the call sites as much as possible +#[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] +#[lang = "panic"] +pub fn panic(expr: &str, location: &Location<'_>) -> ! { + if cfg!(feature = "panic_immediate_abort") { + unsafe { super::intrinsics::abort() } + } + + // Use Arguments::new_v1 instead of format_args!("{}", expr) to potentially + // reduce size overhead. The format_args! macro uses str's Display trait to + // write expr, which calls Formatter::pad, which must accommodate string + // truncation and padding (even though none is used here). Using + // Arguments::new_v1 may allow the compiler to omit Formatter::pad from the + // output binary, saving up to a few kilobytes. + panic_fmt(fmt::Arguments::new_v1(&[expr], &[]), location) +} + +#[cfg(bootstrap)] #[cold] #[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] #[lang = "panic_bounds_check"] @@ -62,6 +84,22 @@ fn panic_bounds_check(file_line_col: &(&'static str, u32, u32), len, index), file_line_col) } +#[cfg(not(bootstrap))] +#[cold] +#[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] +#[lang = "panic_bounds_check"] +fn panic_bounds_check(location: &Location<'_>, index: usize, len: usize) -> ! { + if cfg!(feature = "panic_immediate_abort") { + unsafe { super::intrinsics::abort() } + } + + panic_fmt( + format_args!("index out of bounds: the len is {} but the index is {}", len, index), + location + ) +} + +#[cfg(bootstrap)] #[cold] #[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] #[cfg_attr( feature="panic_immediate_abort" ,inline)] @@ -71,16 +109,32 @@ pub fn panic_fmt(fmt: fmt::Arguments<'_>, file_line_col: &(&'static str, u32, u3 } // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call - #[cfg_attr(bootstrap, allow(improper_ctypes))] extern "Rust" { #[lang = "panic_impl"] fn panic_impl(pi: &PanicInfo<'_>) -> !; } let (file, line, col) = *file_line_col; - let pi = PanicInfo::internal_constructor( - Some(&fmt), - Location::internal_constructor(file, line, col), - ); + let location = Location::internal_constructor(file, line, col); + let pi = PanicInfo::internal_constructor(Some(&fmt), &location); + unsafe { panic_impl(&pi) } +} + +#[cfg(not(bootstrap))] +#[cold] +#[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] +#[cfg_attr( feature="panic_immediate_abort" ,inline)] +pub fn panic_fmt(fmt: fmt::Arguments<'_>, location: &Location<'_>) -> ! { + if cfg!(feature = "panic_immediate_abort") { + unsafe { super::intrinsics::abort() } + } + + // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call + extern "Rust" { + #[lang = "panic_impl"] + fn panic_impl(pi: &PanicInfo<'_>) -> !; + } + + let pi = PanicInfo::internal_constructor(Some(&fmt), location); unsafe { panic_impl(&pi) } } diff --git a/src/libcore/pin.rs b/src/libcore/pin.rs index 1dc6d54b08..be057ed6d5 100644 --- a/src/libcore/pin.rs +++ b/src/libcore/pin.rs @@ -369,6 +369,8 @@ //! [drop-guarantee]: #drop-guarantee //! [`poll`]: ../../std/future/trait.Future.html#tymethod.poll //! [`Pin::get_unchecked_mut`]: struct.Pin.html#method.get_unchecked_mut +//! [`bool`]: ../../std/primitive.bool.html +//! [`i32`]: ../../std/primitive.i32.html #![stable(feature = "pin", since = "1.33.0")] diff --git a/src/libcore/ptr/mod.rs b/src/libcore/ptr/mod.rs index 13ccc9b252..1355ce1aa4 100644 --- a/src/libcore/ptr/mod.rs +++ b/src/libcore/ptr/mod.rs @@ -188,7 +188,7 @@ unsafe fn real_drop_in_place(to_drop: &mut T) { /// let p: *const i32 = ptr::null(); /// assert!(p.is_null()); /// ``` -#[inline] +#[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] pub const fn null() -> *const T { 0 as *const T } @@ -203,7 +203,7 @@ pub const fn null() -> *const T { 0 as *const T } /// let p: *mut i32 = ptr::null_mut(); /// assert!(p.is_null()); /// ``` -#[inline] +#[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] pub const fn null_mut() -> *mut T { 0 as *mut T } @@ -1286,7 +1286,22 @@ impl *const T { /// } /// ``` #[unstable(feature = "ptr_offset_from", issue = "41079")] + #[cfg(not(bootstrap))] + #[rustc_const_unstable(feature = "const_ptr_offset_from")] #[inline] + pub const unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized { + let pointee_size = mem::size_of::(); + let ok = 0 < pointee_size && pointee_size <= isize::max_value() as usize; + // assert that the pointee size is valid in a const eval compatible way + // FIXME: do this with a real assert at some point + [()][(!ok) as usize]; + intrinsics::ptr_offset_from(self, origin) + } + + #[unstable(feature = "ptr_offset_from", issue = "41079")] + #[inline] + #[cfg(bootstrap)] + /// bootstrap pub unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized { let pointee_size = mem::size_of::(); assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize); @@ -2013,8 +2028,9 @@ impl *mut T { /// } /// ``` #[unstable(feature = "ptr_offset_from", issue = "41079")] + #[rustc_const_unstable(feature = "const_ptr_offset_from")] #[inline] - pub unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized { + pub const unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized { (self as *const T).offset_from(origin) } @@ -2732,31 +2748,29 @@ impl Eq for *mut T {} /// impl Trait for Wrapper {} /// impl Trait for i32 {} /// -/// fn main() { -/// let wrapper = Wrapper { member: 10 }; +/// let wrapper = Wrapper { member: 10 }; /// -/// // Pointers have equal addresses. -/// assert!(std::ptr::eq( -/// &wrapper as *const Wrapper as *const u8, -/// &wrapper.member as *const i32 as *const u8 -/// )); +/// // Pointers have equal addresses. +/// assert!(std::ptr::eq( +/// &wrapper as *const Wrapper as *const u8, +/// &wrapper.member as *const i32 as *const u8 +/// )); /// -/// // Objects have equal addresses, but `Trait` has different implementations. -/// assert!(!std::ptr::eq( -/// &wrapper as &dyn Trait, -/// &wrapper.member as &dyn Trait, -/// )); -/// assert!(!std::ptr::eq( -/// &wrapper as &dyn Trait as *const dyn Trait, -/// &wrapper.member as &dyn Trait as *const dyn Trait, -/// )); +/// // Objects have equal addresses, but `Trait` has different implementations. +/// assert!(!std::ptr::eq( +/// &wrapper as &dyn Trait, +/// &wrapper.member as &dyn Trait, +/// )); +/// assert!(!std::ptr::eq( +/// &wrapper as &dyn Trait as *const dyn Trait, +/// &wrapper.member as &dyn Trait as *const dyn Trait, +/// )); /// -/// // Converting the reference to a `*const u8` compares by address. -/// assert!(std::ptr::eq( -/// &wrapper as &dyn Trait as *const dyn Trait as *const u8, -/// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8, -/// )); -/// } +/// // Converting the reference to a `*const u8` compares by address. +/// assert!(std::ptr::eq( +/// &wrapper as &dyn Trait as *const dyn Trait as *const u8, +/// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8, +/// )); /// ``` #[stable(feature = "ptr_eq", since = "1.17.0")] #[inline] diff --git a/src/libcore/slice/mod.rs b/src/libcore/slice/mod.rs index 0c2a4e0867..cdada1252d 100644 --- a/src/libcore/slice/mod.rs +++ b/src/libcore/slice/mod.rs @@ -28,7 +28,7 @@ use crate::fmt; use crate::intrinsics::{assume, exact_div, unchecked_sub, is_aligned_and_not_null}; use crate::isize; use crate::iter::*; -use crate::ops::{FnMut, Try, self}; +use crate::ops::{FnMut, Range, self}; use crate::option::Option; use crate::option::Option::{None, Some}; use crate::result::Result; @@ -62,9 +62,9 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_slice_len"))] // SAFETY: const sound because we transmute out the length field as a usize (which it must be) - #[cfg_attr(not(bootstrap), allow_internal_unstable(const_fn_union))] + #[allow(unused_attributes)] + #[allow_internal_unstable(const_fn_union)] pub const fn len(&self) -> usize { unsafe { crate::ptr::Repr { rust: self }.raw.len @@ -81,7 +81,6 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_slice_len"))] pub const fn is_empty(&self) -> bool { self.len() == 0 } @@ -408,6 +407,86 @@ impl [T] { self as *mut [T] as *mut T } + /// Returns the two raw pointers spanning the slice. + /// + /// The returned range is half-open, which means that the end pointer + /// points *one past* the last element of the slice. This way, an empty + /// slice is represented by two equal pointers, and the difference between + /// the two pointers represents the size of the size. + /// + /// See [`as_ptr`] for warnings on using these pointers. The end pointer + /// requires extra caution, as it does not point to a valid element in the + /// slice. + /// + /// This function is useful for interacting with foreign interfaces which + /// use two pointers to refer to a range of elements in memory, as is + /// common in C++. + /// + /// It can also be useful to check if a pointer to an element refers to an + /// element of this slice: + /// + /// ``` + /// #![feature(slice_ptr_range)] + /// + /// let a = [1, 2, 3]; + /// let x = &a[1] as *const _; + /// let y = &5 as *const _; + /// + /// assert!(a.as_ptr_range().contains(&x)); + /// assert!(!a.as_ptr_range().contains(&y)); + /// ``` + /// + /// [`as_ptr`]: #method.as_ptr + #[unstable(feature = "slice_ptr_range", issue = "65807")] + #[inline] + pub fn as_ptr_range(&self) -> Range<*const T> { + // The `add` here is safe, because: + // + // - Both pointers are part of the same object, as pointing directly + // past the object also counts. + // + // - The size of the slice is never larger than isize::MAX bytes, as + // noted here: + // - https://github.com/rust-lang/unsafe-code-guidelines/issues/102#issuecomment-473340447 + // - https://doc.rust-lang.org/reference/behavior-considered-undefined.html + // - https://doc.rust-lang.org/core/slice/fn.from_raw_parts.html#safety + // (This doesn't seem normative yet, but the very same assumption is + // made in many places, including the Index implementation of slices.) + // + // - There is no wrapping around involved, as slices do not wrap past + // the end of the address space. + // + // See the documentation of pointer::add. + let start = self.as_ptr(); + let end = unsafe { start.add(self.len()) }; + start..end + } + + /// Returns the two unsafe mutable pointers spanning the slice. + /// + /// The returned range is half-open, which means that the end pointer + /// points *one past* the last element of the slice. This way, an empty + /// slice is represented by two equal pointers, and the difference between + /// the two pointers represents the size of the size. + /// + /// See [`as_mut_ptr`] for warnings on using these pointers. The end + /// pointer requires extra caution, as it does not point to a valid element + /// in the slice. + /// + /// This function is useful for interacting with foreign interfaces which + /// use two pointers to refer to a range of elements in memory, as is + /// common in C++. + /// + /// [`as_mut_ptr`]: #method.as_mut_ptr + #[unstable(feature = "slice_ptr_range", issue = "65807")] + #[inline] + pub fn as_mut_ptr_range(&mut self) -> Range<*mut T> { + // See as_ptr_range() above for why `add` here is safe. + let start = self.as_mut_ptr(); + let end = unsafe { start.add(self.len()) }; + start..end + } + /// Swaps two elements in the slice. /// /// # Arguments @@ -3182,39 +3261,6 @@ macro_rules! iterator { self.next_back() } - #[inline] - fn try_fold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try - { - // manual unrolling is needed when there are conditional exits from the loop - let mut accum = init; - unsafe { - while len!(self) >= 4 { - accum = f(accum, next_unchecked!(self))?; - accum = f(accum, next_unchecked!(self))?; - accum = f(accum, next_unchecked!(self))?; - accum = f(accum, next_unchecked!(self))?; - } - while !is_empty!(self) { - accum = f(accum, next_unchecked!(self))?; - } - } - Try::from_ok(accum) - } - - #[inline] - fn fold(mut self, init: Acc, mut f: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, - { - // Let LLVM unroll this, rather than using the default - // impl that would force the manual unrolling above - let mut accum = init; - while let Some(x) = self.next() { - accum = f(accum, x); - } - accum - } - #[inline] #[rustc_inherit_overflow_checks] fn position

) { - for lint in pass.get_lints() { - self.lints.push((lint, from_plugin)); + pub fn register_lints(&mut self, lints: &[&'static Lint]) { + for lint in lints { + self.lints.push(lint); let id = LintId::of(lint); if self.by_name.insert(lint.name_lower(), Id(id)).is_some() { - let msg = format!("duplicate specification of lint {}", lint.name_lower()); - match (sess, from_plugin) { - // We load builtin lints first, so a duplicate is a compiler bug. - // Use early_error when handling -W help with no crate. - (None, _) => early_error(config::ErrorOutputType::default(), &msg[..]), - (Some(_), false) => bug!("{}", msg), + bug!("duplicate specification of lint {}", lint.name_lower()) + } - // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(&msg[..]), + if let Some(FutureIncompatibleInfo { edition, .. }) = lint.future_incompatible { + if let Some(edition) = edition { + self.lint_groups.entry(edition.lint_name()) + .or_insert(LintGroup { + lint_ids: vec![], + from_plugin: lint.is_plugin, + depr: None, + }) + .lint_ids.push(id); } + + self.lint_groups.entry("future_incompatible") + .or_insert(LintGroup { + lint_ids: vec![], + from_plugin: lint.is_plugin, + depr: None, + }) + .lint_ids.push(id); } } } - pub fn register_future_incompatible(&mut self, - sess: Option<&Session>, - lints: Vec) { - - for edition in edition::ALL_EDITIONS { - let lints = lints.iter().filter(|f| f.edition == Some(*edition)).map(|f| f.id) - .collect::>(); - if !lints.is_empty() { - self.register_group(sess, false, edition.lint_name(), None, lints) - } - } - - let mut future_incompatible = Vec::with_capacity(lints.len()); - for lint in lints { - future_incompatible.push(lint.id); - self.future_incompatible.insert(lint.id, lint); - } - - self.register_group( - sess, - false, - "future_incompatible", - None, - future_incompatible, - ); - } - - pub fn future_incompatible(&self, id: LintId) -> Option<&FutureIncompatibleInfo> { - self.future_incompatible.get(&id) - } - pub fn register_group_alias( &mut self, lint_name: &'static str, @@ -277,7 +228,6 @@ impl LintStore { pub fn register_group( &mut self, - sess: Option<&Session>, from_plugin: bool, name: &'static str, deprecated_name: Option<&'static str>, @@ -300,16 +250,7 @@ impl LintStore { } if !new { - let msg = format!("duplicate specification of lint group {}", name); - match (sess, from_plugin) { - // We load builtin lints first, so a duplicate is a compiler bug. - // Use early_error when handling -W help with no crate. - (None, _) => early_error(config::ErrorOutputType::default(), &msg[..]), - (Some(_), false) => bug!("{}", msg), - - // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(&msg[..]), - } + bug!("duplicate specification of lint group {}", name); } } @@ -522,7 +463,7 @@ pub struct LateContext<'a, 'tcx> { pub access_levels: &'a AccessLevels, /// The store of registered lints and the lint levels. - lint_store: ReadGuard<'a, LintStore>, + lint_store: &'tcx LintStore, last_node_with_lint_attrs: hir::HirId, @@ -550,7 +491,7 @@ pub struct EarlyContext<'a> { builder: LintLevelsBuilder<'a>, /// The store of registered lints and the lint levels. - lint_store: ReadGuard<'a, LintStore>, + lint_store: &'a LintStore, buffered: LintBuffer, } @@ -639,14 +580,16 @@ pub trait LintContext: Sized { impl<'a> EarlyContext<'a> { fn new( sess: &'a Session, + lint_store: &'a LintStore, krate: &'a ast::Crate, buffered: LintBuffer, + warn_about_weird_lints: bool, ) -> EarlyContext<'a> { EarlyContext { sess, krate, - lint_store: sess.lint_store.borrow(), - builder: LintLevelSets::builder(sess), + lint_store, + builder: LintLevelSets::builder(sess, warn_about_weird_lints, lint_store), buffered, } } @@ -681,7 +624,7 @@ impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> { f: F) where F: FnOnce(&mut Self) { - let push = self.context.builder.push(attrs); + let push = self.context.builder.push(attrs, &self.context.lint_store); self.check_id(id); self.enter_attrs(attrs); f(self); @@ -829,7 +772,7 @@ impl<'a, 'tcx> LateContext<'a, 'tcx> { trait_ref: Option>, ) -> Result { if trait_ref.is_none() { - if let ty::Adt(def, substs) = self_ty.sty { + if let ty::Adt(def, substs) = self_ty.kind { return self.print_def_path(def.did, substs); } } @@ -875,14 +818,14 @@ impl<'a, 'tcx> LateContext<'a, 'tcx> { _ => {} } - path.push(disambiguated_data.data.as_interned_str().as_symbol()); + path.push(disambiguated_data.data.as_symbol()); Ok(path) } fn path_generic_args( self, print_prefix: impl FnOnce(Self) -> Result, - _args: &[Kind<'tcx>], + _args: &[GenericArg<'tcx>], ) -> Result { print_prefix(self) } @@ -981,7 +924,7 @@ for LateContextAndPass<'a, 'tcx, T> { fn visit_item(&mut self, it: &'tcx hir::Item) { let generics = self.context.generics.take(); - self.context.generics = it.node.generics(); + self.context.generics = it.kind.generics(); self.with_lint_attrs(it.hir_id, &it.attrs, |cx| { cx.with_param_env(it.hir_id, |cx| { lint_callback!(cx, check_item, it); @@ -1355,10 +1298,6 @@ impl LintPass for LateLintPassObjects<'_> { fn name(&self) -> &'static str { panic!() } - - fn get_lints(&self) -> LintArray { - panic!() - } } macro_rules! expand_late_lint_pass_impl_methods { @@ -1393,7 +1332,7 @@ fn late_lint_mod_pass<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>( tables: &ty::TypeckTables::empty(None), param_env: ty::ParamEnv::empty(), access_levels, - lint_store: tcx.sess.lint_store.borrow(), + lint_store: &tcx.lint_store, last_node_with_lint_attrs: tcx.hir().as_local_hir_id(module_def_id).unwrap(), generics: None, only_module: true, @@ -1425,8 +1364,8 @@ pub fn late_lint_mod<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>( late_lint_mod_pass(tcx, module_def_id, builtin_lints); - let mut passes: Vec<_> = tcx.sess.lint_store.borrow().late_module_passes - .iter().map(|pass| pass.fresh_late_pass()).collect(); + let mut passes: Vec<_> = tcx.lint_store.late_module_passes + .iter().map(|pass| (pass)()).collect(); if !passes.is_empty() { late_lint_mod_pass(tcx, module_def_id, LateLintPassObjects { lints: &mut passes[..] }); @@ -1443,7 +1382,7 @@ fn late_lint_pass_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tc tables: &ty::TypeckTables::empty(None), param_env: ty::ParamEnv::empty(), access_levels, - lint_store: tcx.sess.lint_store.borrow(), + lint_store: &tcx.lint_store, last_node_with_lint_attrs: hir::CRATE_HIR_ID, generics: None, only_module: false, @@ -1467,7 +1406,8 @@ fn late_lint_pass_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tc } fn late_lint_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tcx>, builtin_lints: T) { - let mut passes = tcx.sess.lint_store.borrow().late_passes.lock().take().unwrap(); + let mut passes = tcx.lint_store + .late_passes.iter().map(|p| (p)()).collect::>(); if !tcx.sess.opts.debugging_opts.no_interleave_lints { if !passes.is_empty() { @@ -1482,8 +1422,8 @@ fn late_lint_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tcx>, b }); } - let mut passes: Vec<_> = tcx.sess.lint_store.borrow().late_module_passes - .iter().map(|pass| pass.fresh_late_pass()).collect(); + let mut passes: Vec<_> = tcx.lint_store.late_module_passes + .iter().map(|pass| (pass)()).collect(); for pass in &mut passes { time(tcx.sess, &format!("running late module lint: {}", pass.name()), || { @@ -1491,9 +1431,6 @@ fn late_lint_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tcx>, b }); } } - - // Put the passes back in the session. - *tcx.sess.lint_store.borrow().late_passes.lock() = Some(passes); } /// Performs lint checking on a crate. @@ -1510,7 +1447,7 @@ pub fn check_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>( time(tcx.sess, "module lints", || { // Run per-module lints par_iter(&tcx.hir().krate().modules).for_each(|(&module, _)| { - tcx.ensure().lint_mod(tcx.hir().local_def_id_from_node_id(module)); + tcx.ensure().lint_mod(tcx.hir().local_def_id(module)); }); }); }); @@ -1525,10 +1462,6 @@ impl LintPass for EarlyLintPassObjects<'_> { fn name(&self) -> &'static str { panic!() } - - fn get_lints(&self) -> LintArray { - panic!() - } } macro_rules! expand_early_lint_pass_impl_methods { @@ -1553,12 +1486,14 @@ early_lint_methods!(early_lint_pass_impl, []); fn early_lint_crate( sess: &Session, + lint_store: &LintStore, krate: &ast::Crate, pass: T, buffered: LintBuffer, + warn_about_weird_lints: bool, ) -> LintBuffer { let mut cx = EarlyContextAndPass { - context: EarlyContext::new(sess, krate, buffered), + context: EarlyContext::new(sess, lint_store, krate, buffered, warn_about_weird_lints), pass, }; @@ -1577,31 +1512,31 @@ fn early_lint_crate( pub fn check_ast_crate( sess: &Session, + lint_store: &LintStore, krate: &ast::Crate, pre_expansion: bool, + lint_buffer: Option, builtin_lints: T, ) { - let (mut passes, mut buffered) = if pre_expansion { - ( - sess.lint_store.borrow_mut().pre_expansion_passes.take().unwrap(), - LintBuffer::default(), - ) + let mut passes: Vec<_> = if pre_expansion { + lint_store.pre_expansion_passes.iter().map(|p| (p)()).collect() } else { - ( - sess.lint_store.borrow_mut().early_passes.take().unwrap(), - sess.buffered_lints.borrow_mut().take().unwrap(), - ) + lint_store.early_passes.iter().map(|p| (p)()).collect() }; + let mut buffered = lint_buffer.unwrap_or_default(); if !sess.opts.debugging_opts.no_interleave_lints { - buffered = early_lint_crate(sess, krate, builtin_lints, buffered); + buffered = early_lint_crate(sess, lint_store, krate, builtin_lints, buffered, + pre_expansion); if !passes.is_empty() { buffered = early_lint_crate( sess, + lint_store, krate, EarlyLintPassObjects { lints: &mut passes[..] }, buffered, + pre_expansion, ); } } else { @@ -1609,21 +1544,16 @@ pub fn check_ast_crate( buffered = time(sess, &format!("running lint: {}", pass.name()), || { early_lint_crate( sess, + lint_store, krate, EarlyLintPassObjects { lints: slice::from_mut(pass) }, buffered, + pre_expansion, ) }); } } - // Put the lint store levels and passes back in the session. - if pre_expansion { - sess.lint_store.borrow_mut().pre_expansion_passes = Some(passes); - } else { - sess.lint_store.borrow_mut().early_passes = Some(passes); - } - // All of the buffered lints should have been emitted at this point. // If not, that means that we somehow buffered a lint for a node id // that was not lint-checked (perhaps it doesn't exist?). This is a bug. @@ -1653,7 +1583,7 @@ impl Decodable for LintId { fn decode(d: &mut D) -> Result { let s = d.read_str()?; ty::tls::with(|tcx| { - match tcx.sess.lint_store.borrow().find_lints(&s) { + match tcx.lint_store.find_lints(&s) { Ok(ids) => { if ids.len() != 0 { panic!("invalid lint-id `{}`", s); diff --git a/src/librustc/lint/internal.rs b/src/librustc/lint/internal.rs index 13834eaf40..a08722e940 100644 --- a/src/librustc/lint/internal.rs +++ b/src/librustc/lint/internal.rs @@ -94,7 +94,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TyTyKind { } fn check_ty(&mut self, cx: &LateContext<'_, '_>, ty: &'tcx Ty) { - match &ty.node { + match &ty.kind { TyKind::Path(qpath) => { if let QPath::Resolved(_, path) = qpath { if let Some(last) = path.segments.iter().last() { @@ -169,7 +169,7 @@ fn lint_ty_kind_usage(cx: &LateContext<'_, '_>, segment: &PathSegment) -> bool { } fn is_ty_or_ty_ctxt(cx: &LateContext<'_, '_>, ty: &Ty) -> Option { - match &ty.node { + match &ty.kind { TyKind::Path(qpath) => { if let QPath::Resolved(_, path) = qpath { let did = path.res.opt_def_id()?; @@ -218,7 +218,7 @@ declare_lint_pass!(LintPassImpl => [LINT_PASS_IMPL_WITHOUT_MACRO]); impl EarlyLintPass for LintPassImpl { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.node { + if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.kind { if let Some(last) = lint_pass.path.segments.last() { if last.ident.name == sym::LintPass { let expn_data = lint_pass.path.span.ctxt().outer_expn_data(); diff --git a/src/librustc/lint/levels.rs b/src/librustc/lint/levels.rs index cbc6dbdba7..e470dbdf32 100644 --- a/src/librustc/lint/levels.rs +++ b/src/librustc/lint/levels.rs @@ -3,16 +3,16 @@ use std::cmp; use crate::hir::HirId; use crate::ich::StableHashingContext; use crate::lint::builtin; -use crate::lint::context::CheckLintNameResult; +use crate::lint::context::{LintStore, CheckLintNameResult}; use crate::lint::{self, Lint, LintId, Level, LintSource}; use crate::session::Session; use crate::util::nodemap::FxHashMap; use errors::{Applicability, DiagnosticBuilder}; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher}; use syntax::ast; use syntax::attr; use syntax::feature_gate; +use syntax::print::pprust; use syntax::source_map::MultiSpan; use syntax::symbol::{Symbol, sym}; @@ -35,21 +35,24 @@ enum LintSet { } impl LintLevelSets { - pub fn new(sess: &Session) -> LintLevelSets { + pub fn new(sess: &Session, lint_store: &LintStore) -> LintLevelSets { let mut me = LintLevelSets { list: Vec::new(), lint_cap: Level::Forbid, }; - me.process_command_line(sess); + me.process_command_line(sess, lint_store); return me } - pub fn builder(sess: &Session) -> LintLevelsBuilder<'_> { - LintLevelsBuilder::new(sess, LintLevelSets::new(sess)) + pub fn builder<'a>( + sess: &'a Session, + warn_about_weird_lints: bool, + store: &LintStore, + ) -> LintLevelsBuilder<'a> { + LintLevelsBuilder::new(sess, warn_about_weird_lints, LintLevelSets::new(sess, store)) } - fn process_command_line(&mut self, sess: &Session) { - let store = sess.lint_store.borrow(); + fn process_command_line(&mut self, sess: &Session, store: &LintStore) { let mut specs = FxHashMap::default(); self.lint_cap = sess.opts.lint_cap.unwrap_or(Level::Forbid); @@ -161,14 +164,18 @@ pub struct BuilderPush { } impl<'a> LintLevelsBuilder<'a> { - pub fn new(sess: &'a Session, sets: LintLevelSets) -> LintLevelsBuilder<'a> { + pub fn new( + sess: &'a Session, + warn_about_weird_lints: bool, + sets: LintLevelSets, + ) -> LintLevelsBuilder<'a> { assert_eq!(sets.list.len(), 1); LintLevelsBuilder { sess, sets, cur: 0, id_to_set: Default::default(), - warn_about_weird_lints: sess.buffered_lints.borrow().is_some(), + warn_about_weird_lints, } } @@ -186,9 +193,8 @@ impl<'a> LintLevelsBuilder<'a> { /// #[allow] /// /// Don't forget to call `pop`! - pub fn push(&mut self, attrs: &[ast::Attribute]) -> BuilderPush { + pub fn push(&mut self, attrs: &[ast::Attribute], store: &LintStore) -> BuilderPush { let mut specs = FxHashMap::default(); - let store = self.sess.lint_store.borrow(); let sess = self.sess; let bad_attr = |span| { struct_span_err!(sess, span, E0452, "malformed lint attribute input") @@ -202,11 +208,7 @@ impl<'a> LintLevelsBuilder<'a> { let meta = unwrap_or!(attr.meta(), continue); attr::mark_used(attr); - let mut metas = if let Some(metas) = meta.meta_item_list() { - metas - } else { - continue; - }; + let mut metas = unwrap_or!(meta.meta_item_list(), continue); if metas.is_empty() { // FIXME (#55112): issue unused-attributes lint for `#[level()]` @@ -218,7 +220,7 @@ impl<'a> LintLevelsBuilder<'a> { let mut reason = None; let tail_li = &metas[metas.len()-1]; if let Some(item) = tail_li.meta_item() { - match item.node { + match item.kind { ast::MetaItemKind::Word => {} // actual lint names handled later ast::MetaItemKind::NameValue(ref name_value) => { if item.path == sym::reason { @@ -226,7 +228,7 @@ impl<'a> LintLevelsBuilder<'a> { metas = &metas[0..metas.len()-1]; // FIXME (#55112): issue unused-attributes lint if we thereby // don't have any lint names (`#[level(reason = "foo")]`) - if let ast::LitKind::Str(rationale, _) = name_value.node { + if let ast::LitKind::Str(rationale, _) = name_value.kind { if !self.sess.features_untracked().lint_reasons { feature_gate::emit_feature_err( &self.sess.parse_sess, @@ -264,7 +266,7 @@ impl<'a> LintLevelsBuilder<'a> { let mut err = bad_attr(sp); let mut add_label = true; if let Some(item) = li.meta_item() { - if let ast::MetaItemKind::NameValue(_) = item.node { + if let ast::MetaItemKind::NameValue(_) = item.kind { if item.path == sym::reason { err.span_label(sp, "reason in lint attribute must come last"); add_label = false; @@ -286,7 +288,7 @@ impl<'a> LintLevelsBuilder<'a> { tool_ident.span, E0710, "an unknown tool name found in scoped lint: `{}`", - meta_item.path + pprust::path_to_string(&meta_item.path), ); continue; } @@ -526,9 +528,7 @@ impl LintLevelMap { impl<'a> HashStable> for LintLevelMap { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let LintLevelMap { ref sets, ref id_to_set, @@ -567,9 +567,7 @@ impl<'a> HashStable> for LintLevelMap { impl HashStable for LintId { #[inline] - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { self.lint_name_raw().hash_stable(hcx, hasher); } } diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 5b490b7012..11d0d0d90f 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -28,6 +28,7 @@ use crate::hir::intravisit; use crate::hir; use crate::lint::builtin::BuiltinLintDiagnostics; use crate::lint::builtin::parser::{ILL_FORMED_ATTRIBUTE_INPUT, META_VARIABLE_MISUSE}; +use crate::lint::builtin::parser::INCOMPLETE_INCLUDE; use crate::session::{Session, DiagnosticMessageId}; use crate::ty::TyCtxt; use crate::ty::query::Providers; @@ -38,13 +39,13 @@ use syntax::ast; use syntax::source_map::{MultiSpan, ExpnKind, DesugaringKind}; use syntax::early_buffered_lints::BufferedEarlyLintId; use syntax::edition::Edition; -use syntax::ext::base::MacroKind; use syntax::symbol::{Symbol, sym}; +use syntax_pos::hygiene::MacroKind; use syntax_pos::Span; pub use crate::lint::context::{LateContext, EarlyContext, LintContext, LintStore, check_crate, check_ast_crate, late_lint_mod, CheckLintNameResult, - FutureIncompatibleInfo, BufferedEarlyLint,}; + BufferedEarlyLint,}; /// Specification of a single lint. #[derive(Copy, Clone, Debug)] @@ -75,14 +76,41 @@ pub struct Lint { /// `true` if this lint is reported even inside expansions of external macros. pub report_in_external_macro: bool, + + pub future_incompatible: Option, + + pub is_plugin: bool, +} + +/// Extra information for a future incompatibility lint. +#[derive(Copy, Clone, Debug)] +pub struct FutureIncompatibleInfo { + /// e.g., a URL for an issue/PR/RFC or error code + pub reference: &'static str, + /// If this is an edition fixing lint, the edition in which + /// this lint becomes obsolete + pub edition: Option, } impl Lint { + pub const fn default_fields_for_macro() -> Self { + Lint { + name: "", + default_level: Level::Forbid, + desc: "", + edition_lint_opts: None, + is_plugin: false, + report_in_external_macro: false, + future_incompatible: None, + } + } + /// Returns the `rust::lint::Lint` for a `syntax::early_buffered_lints::BufferedEarlyLintId`. pub fn from_parser_lint_id(lint_id: BufferedEarlyLintId) -> &'static Self { match lint_id { BufferedEarlyLintId::IllFormedAttributeInput => ILL_FORMED_ATTRIBUTE_INPUT, BufferedEarlyLintId::MetaVariableMisuse => META_VARIABLE_MISUSE, + BufferedEarlyLintId::IncompleteInclude => INCOMPLETE_INCLUDE, } } @@ -103,18 +131,21 @@ impl Lint { #[macro_export] macro_rules! declare_lint { ($vis: vis $NAME: ident, $Level: ident, $desc: expr) => ( - declare_lint!{$vis $NAME, $Level, $desc, false} + declare_lint!( + $vis $NAME, $Level, $desc, + ); ); - ($vis: vis $NAME: ident, $Level: ident, $desc: expr, report_in_external_macro: $rep: expr) => ( - declare_lint!{$vis $NAME, $Level, $desc, $rep} - ); - ($vis: vis $NAME: ident, $Level: ident, $desc: expr, $external: expr) => ( + ($vis: vis $NAME: ident, $Level: ident, $desc: expr, + $(@future_incompatible = $fi:expr;)? $($v:ident),*) => ( $vis static $NAME: &$crate::lint::Lint = &$crate::lint::Lint { name: stringify!($NAME), default_level: $crate::lint::$Level, desc: $desc, edition_lint_opts: None, - report_in_external_macro: $external, + is_plugin: false, + $($v: true,)* + $(future_incompatible: Some($fi),)* + ..$crate::lint::Lint::default_fields_for_macro() }; ); ($vis: vis $NAME: ident, $Level: ident, $desc: expr, @@ -126,6 +157,7 @@ macro_rules! declare_lint { desc: $desc, edition_lint_opts: Some(($lint_edition, $crate::lint::Level::$edition_level)), report_in_external_macro: false, + is_plugin: false, }; ); } @@ -154,6 +186,8 @@ macro_rules! declare_tool_lint { desc: $desc, edition_lint_opts: None, report_in_external_macro: $external, + future_incompatible: None, + is_plugin: true, }; ); } @@ -171,14 +205,6 @@ pub type LintArray = Vec<&'static Lint>; pub trait LintPass { fn name(&self) -> &'static str; - - /// Gets descriptions of the lints this `LintPass` object can emit. - /// - /// N.B., there is no enforcement that the object only emits lints it registered. - /// And some `rustc` internal `LintPass`es register lints to be emitted by other - /// parts of the compiler. If you want enforced access restrictions for your - /// `Lint`, make it a private `static` item in its own module. - fn get_lints(&self) -> LintArray; } /// Implements `LintPass for $name` with the given list of `Lint` statics. @@ -187,7 +213,9 @@ macro_rules! impl_lint_pass { ($name:ident => [$($lint:expr),* $(,)?]) => { impl LintPass for $name { fn name(&self) -> &'static str { stringify!($name) } - fn get_lints(&self) -> LintArray { $crate::lint_array!($($lint),*) } + } + impl $name { + pub fn get_lints() -> LintArray { $crate::lint_array!($($lint),*) } } }; } @@ -285,9 +313,6 @@ macro_rules! expand_lint_pass_methods { macro_rules! declare_late_lint_pass { ([], [$hir:tt], [$($methods:tt)*]) => ( pub trait LateLintPass<'a, $hir>: LintPass { - fn fresh_late_pass(&self) -> LateLintPassObject { - panic!() - } expand_lint_pass_methods!(&LateContext<'a, $hir>, [$($methods)*]); } ) @@ -325,6 +350,12 @@ macro_rules! declare_combined_late_lint_pass { $($passes: $constructor,)* } } + + $v fn get_lints() -> LintArray { + let mut lints = Vec::new(); + $(lints.extend_from_slice(&$passes::get_lints());)* + lints + } } impl<'a, 'tcx> LateLintPass<'a, 'tcx> for $name { @@ -335,12 +366,6 @@ macro_rules! declare_combined_late_lint_pass { fn name(&self) -> &'static str { panic!() } - - fn get_lints(&self) -> LintArray { - let mut lints = Vec::new(); - $(lints.extend_from_slice(&self.$passes.get_lints());)* - lints - } } ) } @@ -452,6 +477,12 @@ macro_rules! declare_combined_early_lint_pass { $($passes: $constructor,)* } } + + $v fn get_lints() -> LintArray { + let mut lints = Vec::new(); + $(lints.extend_from_slice(&$passes::get_lints());)* + lints + } } impl EarlyLintPass for $name { @@ -462,12 +493,6 @@ macro_rules! declare_combined_early_lint_pass { fn name(&self) -> &'static str { panic!() } - - fn get_lints(&self) -> LintArray { - let mut lints = Vec::new(); - $(lints.extend_from_slice(&self.$passes.get_lints());)* - lints - } } ) } @@ -618,13 +643,29 @@ impl LintBuffer { } } - pub fn take(&mut self, id: ast::NodeId) -> Vec { + fn take(&mut self, id: ast::NodeId) -> Vec { self.map.remove(&id).unwrap_or_default() } - pub fn get_any(&self) -> Option<&[BufferedEarlyLint]> { - let key = self.map.keys().next().map(|k| *k); - key.map(|k| &self.map[&k][..]) + pub fn buffer_lint>( + &mut self, + lint: &'static Lint, + id: ast::NodeId, + sp: S, + msg: &str, + ) { + self.add_lint(lint, id, sp.into(), msg, BuiltinLintDiagnostics::Normal) + } + + pub fn buffer_lint_with_diagnostic>( + &mut self, + lint: &'static Lint, + id: ast::NodeId, + sp: S, + msg: &str, + diagnostic: BuiltinLintDiagnostics, + ) { + self.add_lint(lint, id, sp.into(), msg, diagnostic) } } @@ -647,9 +688,8 @@ pub fn struct_lint_level<'a>(sess: &'a Session, }; // Check for future incompatibility lints and issue a stronger warning. - let lints = sess.lint_store.borrow(); let lint_id = LintId::of(lint); - let future_incompatible = lints.future_incompatible(lint_id); + let future_incompatible = lint.future_incompatible; // If this code originates in a foreign macro, aka something that this crate // did not itself author, then it's likely that there's nothing this crate @@ -753,13 +793,15 @@ pub fn maybe_lint_level_root(tcx: TyCtxt<'_>, id: hir::HirId) -> bool { fn lint_levels(tcx: TyCtxt<'_>, cnum: CrateNum) -> &LintLevelMap { assert_eq!(cnum, LOCAL_CRATE); + let store = &tcx.lint_store; let mut builder = LintLevelMapBuilder { - levels: LintLevelSets::builder(tcx.sess), + levels: LintLevelSets::builder(tcx.sess, false, &store), tcx: tcx, + store: store, }; let krate = tcx.hir().krate(); - let push = builder.levels.push(&krate.attrs); + let push = builder.levels.push(&krate.attrs, &store); builder.levels.register_id(hir::CRATE_HIR_ID); for macro_def in &krate.exported_macros { builder.levels.register_id(macro_def.hir_id); @@ -770,19 +812,20 @@ fn lint_levels(tcx: TyCtxt<'_>, cnum: CrateNum) -> &LintLevelMap { tcx.arena.alloc(builder.levels.build_map()) } -struct LintLevelMapBuilder<'tcx> { +struct LintLevelMapBuilder<'a, 'tcx> { levels: levels::LintLevelsBuilder<'tcx>, tcx: TyCtxt<'tcx>, + store: &'a LintStore, } -impl LintLevelMapBuilder<'tcx> { +impl LintLevelMapBuilder<'_, '_> { fn with_lint_attrs(&mut self, id: hir::HirId, attrs: &[ast::Attribute], f: F) where F: FnOnce(&mut Self) { - let push = self.levels.push(attrs); + let push = self.levels.push(attrs, self.store); if push.changed { self.levels.register_id(id); } @@ -791,7 +834,7 @@ impl LintLevelMapBuilder<'tcx> { } } -impl intravisit::Visitor<'tcx> for LintLevelMapBuilder<'tcx> { +impl intravisit::Visitor<'tcx> for LintLevelMapBuilder<'_, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir()) } diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs index 09fa924efc..256a08d7e9 100644 --- a/src/librustc/macros.rs +++ b/src/librustc/macros.rs @@ -97,9 +97,9 @@ macro_rules! impl_stable_hash_for { where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),* { #[inline] - fn hash_stable(&self, - __ctx: &mut $crate::ich::StableHashingContext<'a>, - __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { + fn hash_stable(&self, + __ctx: &mut $crate::ich::StableHashingContext<'a>, + __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { use $enum_path::*; ::std::mem::discriminant(self).hash_stable(__ctx, __hasher); @@ -128,9 +128,9 @@ macro_rules! impl_stable_hash_for { where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),* { #[inline] - fn hash_stable(&self, - __ctx: &mut $crate::ich::StableHashingContext<'a>, - __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { + fn hash_stable(&self, + __ctx: &mut $crate::ich::StableHashingContext<'a>, + __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { let $struct_name { $(ref $field),* } = *self; @@ -153,9 +153,9 @@ macro_rules! impl_stable_hash_for { where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),* { #[inline] - fn hash_stable(&self, - __ctx: &mut $crate::ich::StableHashingContext<'a>, - __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { + fn hash_stable(&self, + __ctx: &mut $crate::ich::StableHashingContext<'a>, + __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { let $struct_name ( $(ref $field),* ) = *self; @@ -173,9 +173,9 @@ macro_rules! impl_stable_hash_for_spanned { impl HashStable> for ::syntax::source_map::Spanned<$T> { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher) { self.node.hash_stable(hcx, hasher); self.span.hash_stable(hcx, hasher); } diff --git a/src/librustc/middle/borrowck.rs b/src/librustc/middle/borrowck.rs deleted file mode 100644 index 60c24eeae7..0000000000 --- a/src/librustc/middle/borrowck.rs +++ /dev/null @@ -1,31 +0,0 @@ -use crate::ich::StableHashingContext; - -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; - -#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)] -pub enum SignalledError { SawSomeError, NoErrorsSeen } - -impl Default for SignalledError { - fn default() -> SignalledError { - SignalledError::NoErrorsSeen - } -} - -impl_stable_hash_for!(enum self::SignalledError { SawSomeError, NoErrorsSeen }); - -#[derive(Debug, Default, RustcEncodable, RustcDecodable)] -pub struct BorrowCheckResult { - pub signalled_any_error: SignalledError, -} - -impl<'a> HashStable> for BorrowCheckResult { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let BorrowCheckResult { - ref signalled_any_error, - } = *self; - signalled_any_error.hash_stable(hcx, hasher); - } -} diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index de84fcd716..1f40862273 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -16,7 +16,7 @@ use syntax::ast; use syntax::symbol::Symbol; use syntax_pos::Span; use rustc_target::spec::Target; -use rustc_data_structures::sync::{self, MetadataRef, Lrc}; +use rustc_data_structures::sync::{self, MetadataRef}; use rustc_macros::HashStable; pub use self::NativeLibraryKind::*; @@ -32,6 +32,12 @@ pub struct CrateSource { pub rmeta: Option<(PathBuf, PathKind)>, } +impl CrateSource { + pub fn paths(&self) -> impl Iterator { + self.dylib.iter().chain(self.rlib.iter()).chain(self.rmeta.iter()).map(|p| &p.0) + } +} + #[derive(RustcEncodable, RustcDecodable, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug, HashStable)] pub enum DepKind { @@ -96,6 +102,8 @@ pub enum NativeLibraryKind { NativeStaticNobundle, /// macOS-specific NativeFramework, + /// Windows dynamic library without import library. + NativeRawDylib, /// default way to specify a dynamic library NativeUnknown, } @@ -109,7 +117,7 @@ pub struct NativeLibrary { pub wasm_import_module: Option, } -#[derive(Clone, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, RustcEncodable, RustcDecodable, HashStable)] pub struct ForeignModule { pub foreign_items: Vec, pub def_id: DefId, @@ -126,10 +134,17 @@ pub struct ExternCrate { /// used to select the extern with the shortest path pub path_len: usize, + /// Crate that depends on this crate + pub dependency_of: CrateNum, +} + +impl ExternCrate { /// If true, then this crate is the crate named by the extern /// crate referenced above. If false, then this crate is a dep /// of the crate. - pub direct: bool, + pub fn is_direct(&self) -> bool { + self.dependency_of == LOCAL_CRATE + } } #[derive(Copy, Clone, Debug, HashStable)] @@ -141,9 +156,7 @@ pub enum ExternCrateSource { /// such ids DefId, ), - // Crate is loaded by `use`. - Use, - /// Crate is implicitly loaded by an absolute path. + /// Crate is implicitly loaded by a path resolving through extern prelude. Path, } @@ -178,6 +191,8 @@ pub trait MetadataLoader { -> Result; } +pub type MetadataLoaderDyn = dyn MetadataLoader + Sync; + /// A store of Rust crates, through which their metadata can be accessed. /// /// Note that this trait should probably not be expanding today. All new @@ -188,20 +203,20 @@ pub trait MetadataLoader { /// (it'd break incremental compilation) and should only be called pre-HIR (e.g. /// during resolve) pub trait CrateStore { - fn crate_data_as_rc_any(&self, krate: CrateNum) -> Lrc; + fn crate_data_as_any(&self, cnum: CrateNum) -> &dyn Any; // resolve fn def_key(&self, def: DefId) -> DefKey; fn def_path(&self, def: DefId) -> hir_map::DefPath; fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash; - fn def_path_table(&self, cnum: CrateNum) -> Lrc; + fn def_path_table(&self, cnum: CrateNum) -> &DefPathTable; // "queries" used in resolve that aren't tracked for incremental compilation fn crate_name_untracked(&self, cnum: CrateNum) -> Symbol; fn crate_is_private_dep_untracked(&self, cnum: CrateNum) -> bool; fn crate_disambiguator_untracked(&self, cnum: CrateNum) -> CrateDisambiguator; fn crate_hash_untracked(&self, cnum: CrateNum) -> Svh; - fn extern_mod_stmt_cnum_untracked(&self, emod_id: ast::NodeId) -> Option; + fn crate_host_hash_untracked(&self, cnum: CrateNum) -> Option; fn item_generics_cloned_untracked(&self, def: DefId, sess: &Session) -> ty::Generics; fn postorder_cnums_untracked(&self) -> Vec; diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 96b99fe4cd..8b2bf55ccc 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -1,64 +1,10 @@ -//! Resolution of mixing rlibs and dylibs +//! Type definitions for learning about the dependency formats of all upstream +//! crates (rlibs/dylibs/oh my). //! -//! When producing a final artifact, such as a dynamic library, the compiler has -//! a choice between linking an rlib or linking a dylib of all upstream -//! dependencies. The linking phase must guarantee, however, that a library only -//! show up once in the object file. For example, it is illegal for library A to -//! be statically linked to B and C in separate dylibs, and then link B and C -//! into a crate D (because library A appears twice). -//! -//! The job of this module is to calculate what format each upstream crate -//! should be used when linking each output type requested in this session. This -//! generally follows this set of rules: -//! -//! 1. Each library must appear exactly once in the output. -//! 2. Each rlib contains only one library (it's just an object file) -//! 3. Each dylib can contain more than one library (due to static linking), -//! and can also bring in many dynamic dependencies. -//! -//! With these constraints in mind, it's generally a very difficult problem to -//! find a solution that's not "all rlibs" or "all dylibs". I have suspicions -//! that NP-ness may come into the picture here... -//! -//! The current selection algorithm below looks mostly similar to: -//! -//! 1. If static linking is required, then require all upstream dependencies -//! to be available as rlibs. If not, generate an error. -//! 2. If static linking is requested (generating an executable), then -//! attempt to use all upstream dependencies as rlibs. If any are not -//! found, bail out and continue to step 3. -//! 3. Static linking has failed, at least one library must be dynamically -//! linked. Apply a heuristic by greedily maximizing the number of -//! dynamically linked libraries. -//! 4. Each upstream dependency available as a dynamic library is -//! registered. The dependencies all propagate, adding to a map. It is -//! possible for a dylib to add a static library as a dependency, but it -//! is illegal for two dylibs to add the same static library as a -//! dependency. The same dylib can be added twice. Additionally, it is -//! illegal to add a static dependency when it was previously found as a -//! dylib (and vice versa) -//! 5. After all dynamic dependencies have been traversed, re-traverse the -//! remaining dependencies and add them statically (if they haven't been -//! added already). -//! -//! While not perfect, this algorithm should help support use-cases such as leaf -//! dependencies being static while the larger tree of inner dependencies are -//! all dynamic. This isn't currently very well battle tested, so it will likely -//! fall short in some use cases. -//! -//! Currently, there is no way to specify the preference of linkage with a -//! particular library (other than a global dynamic/static switch). -//! Additionally, the algorithm is geared towards finding *any* solution rather -//! than finding a number of solutions (there are normally quite a few). - -use crate::hir::def_id::CrateNum; +//! For all the gory details, see the provider of the `dependency_formats` +//! query. use crate::session::config; -use crate::ty::TyCtxt; -use crate::middle::cstore::{self, DepKind}; -use crate::middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic}; -use crate::util::nodemap::FxHashMap; -use rustc_target::spec::PanicStrategy; /// A list of dependencies for a certain crate type. /// @@ -71,324 +17,12 @@ pub type DependencyList = Vec; /// A mapping of all required dependencies for a particular flavor of output. /// /// This is local to the tcx, and is generally relevant to one session. -pub type Dependencies = FxHashMap; +pub type Dependencies = Vec<(config::CrateType, DependencyList)>; -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Debug, HashStable)] pub enum Linkage { NotLinked, IncludedFromDylib, Static, Dynamic, } - -pub fn calculate(tcx: TyCtxt<'_>) { - let sess = &tcx.sess; - let fmts = sess.crate_types.borrow().iter().map(|&ty| { - let linkage = calculate_type(tcx, ty); - verify_ok(tcx, &linkage); - (ty, linkage) - }).collect::>(); - sess.abort_if_errors(); - sess.dependency_formats.set(fmts); -} - -fn calculate_type(tcx: TyCtxt<'_>, ty: config::CrateType) -> DependencyList { - let sess = &tcx.sess; - - if !sess.opts.output_types.should_codegen() { - return Vec::new(); - } - - let preferred_linkage = match ty { - // cdylibs must have all static dependencies. - config::CrateType::Cdylib => Linkage::Static, - - // Generating a dylib without `-C prefer-dynamic` means that we're going - // to try to eagerly statically link all dependencies. This is normally - // done for end-product dylibs, not intermediate products. - config::CrateType::Dylib if !sess.opts.cg.prefer_dynamic => Linkage::Static, - config::CrateType::Dylib => Linkage::Dynamic, - - // If the global prefer_dynamic switch is turned off, or the final - // executable will be statically linked, prefer static crate linkage. - config::CrateType::Executable if !sess.opts.cg.prefer_dynamic || - sess.crt_static() => Linkage::Static, - config::CrateType::Executable => Linkage::Dynamic, - - // proc-macro crates are mostly cdylibs, but we also need metadata. - config::CrateType::ProcMacro => Linkage::Static, - - // No linkage happens with rlibs, we just needed the metadata (which we - // got long ago), so don't bother with anything. - config::CrateType::Rlib => Linkage::NotLinked, - - // staticlibs must have all static dependencies. - config::CrateType::Staticlib => Linkage::Static, - }; - - if preferred_linkage == Linkage::NotLinked { - // If the crate is not linked, there are no link-time dependencies. - return Vec::new(); - } - - if preferred_linkage == Linkage::Static { - // Attempt static linkage first. For dylibs and executables, we may be - // able to retry below with dynamic linkage. - if let Some(v) = attempt_static(tcx) { - return v; - } - - // Staticlibs, cdylibs, and static executables must have all static - // dependencies. If any are not found, generate some nice pretty errors. - if ty == config::CrateType::Cdylib || ty == config::CrateType::Staticlib || - (ty == config::CrateType::Executable && sess.crt_static() && - !sess.target.target.options.crt_static_allows_dylibs) { - for &cnum in tcx.crates().iter() { - if tcx.dep_kind(cnum).macros_only() { continue } - let src = tcx.used_crate_source(cnum); - if src.rlib.is_some() { continue } - sess.err(&format!("crate `{}` required to be available in rlib format, \ - but was not found in this form", - tcx.crate_name(cnum))); - } - return Vec::new(); - } - } - - let mut formats = FxHashMap::default(); - - // Sweep all crates for found dylibs. Add all dylibs, as well as their - // dependencies, ensuring there are no conflicts. The only valid case for a - // dependency to be relied upon twice is for both cases to rely on a dylib. - for &cnum in tcx.crates().iter() { - if tcx.dep_kind(cnum).macros_only() { continue } - let name = tcx.crate_name(cnum); - let src = tcx.used_crate_source(cnum); - if src.dylib.is_some() { - info!("adding dylib: {}", name); - add_library(tcx, cnum, RequireDynamic, &mut formats); - let deps = tcx.dylib_dependency_formats(cnum); - for &(depnum, style) in deps.iter() { - info!("adding {:?}: {}", style, tcx.crate_name(depnum)); - add_library(tcx, depnum, style, &mut formats); - } - } - } - - // Collect what we've got so far in the return vector. - let last_crate = tcx.crates().len(); - let mut ret = (1..last_crate+1).map(|cnum| { - match formats.get(&CrateNum::new(cnum)) { - Some(&RequireDynamic) => Linkage::Dynamic, - Some(&RequireStatic) => Linkage::IncludedFromDylib, - None => Linkage::NotLinked, - } - }).collect::>(); - - // Run through the dependency list again, and add any missing libraries as - // static libraries. - // - // If the crate hasn't been included yet and it's not actually required - // (e.g., it's an allocator) then we skip it here as well. - for &cnum in tcx.crates().iter() { - let src = tcx.used_crate_source(cnum); - if src.dylib.is_none() && - !formats.contains_key(&cnum) && - tcx.dep_kind(cnum) == DepKind::Explicit { - assert!(src.rlib.is_some() || src.rmeta.is_some()); - info!("adding staticlib: {}", tcx.crate_name(cnum)); - add_library(tcx, cnum, RequireStatic, &mut formats); - ret[cnum.as_usize() - 1] = Linkage::Static; - } - } - - // We've gotten this far because we're emitting some form of a final - // artifact which means that we may need to inject dependencies of some - // form. - // - // Things like allocators and panic runtimes may not have been activated - // quite yet, so do so here. - activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, - &|cnum| tcx.is_panic_runtime(cnum)); - - // When dylib B links to dylib A, then when using B we must also link to A. - // It could be the case, however, that the rlib for A is present (hence we - // found metadata), but the dylib for A has since been removed. - // - // For situations like this, we perform one last pass over the dependencies, - // making sure that everything is available in the requested format. - for (cnum, kind) in ret.iter().enumerate() { - let cnum = CrateNum::new(cnum + 1); - let src = tcx.used_crate_source(cnum); - match *kind { - Linkage::NotLinked | - Linkage::IncludedFromDylib => {} - Linkage::Static if src.rlib.is_some() => continue, - Linkage::Dynamic if src.dylib.is_some() => continue, - kind => { - let kind = match kind { - Linkage::Static => "rlib", - _ => "dylib", - }; - sess.err(&format!("crate `{}` required to be available in {} format, \ - but was not found in this form", - tcx.crate_name(cnum), kind)); - } - } - } - - ret -} - -fn add_library( - tcx: TyCtxt<'_>, - cnum: CrateNum, - link: LinkagePreference, - m: &mut FxHashMap, -) { - match m.get(&cnum) { - Some(&link2) => { - // If the linkages differ, then we'd have two copies of the library - // if we continued linking. If the linkages are both static, then we - // would also have two copies of the library (static from two - // different locations). - // - // This error is probably a little obscure, but I imagine that it - // can be refined over time. - if link2 != link || link == RequireStatic { - tcx.sess.struct_err(&format!("cannot satisfy dependencies so `{}` only \ - shows up once", tcx.crate_name(cnum))) - .help("having upstream crates all available in one format \ - will likely make this go away") - .emit(); - } - } - None => { m.insert(cnum, link); } - } -} - -fn attempt_static(tcx: TyCtxt<'_>) -> Option { - let sess = &tcx.sess; - let crates = cstore::used_crates(tcx, RequireStatic); - if !crates.iter().by_ref().all(|&(_, ref p)| p.is_some()) { - return None - } - - // All crates are available in an rlib format, so we're just going to link - // everything in explicitly so long as it's actually required. - let last_crate = tcx.crates().len(); - let mut ret = (1..last_crate+1).map(|cnum| { - if tcx.dep_kind(CrateNum::new(cnum)) == DepKind::Explicit { - Linkage::Static - } else { - Linkage::NotLinked - } - }).collect::>(); - - // Our allocator/panic runtime may not have been linked above if it wasn't - // explicitly linked, which is the case for any injected dependency. Handle - // that here and activate them. - activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, - &|cnum| tcx.is_panic_runtime(cnum)); - - Some(ret) -} - -// Given a list of how to link upstream dependencies so far, ensure that an -// injected dependency is activated. This will not do anything if one was -// transitively included already (e.g., via a dylib or explicitly so). -// -// If an injected dependency was not found then we're guaranteed the -// metadata::creader module has injected that dependency (not listed as -// a required dependency) in one of the session's field. If this field is not -// set then this compilation doesn't actually need the dependency and we can -// also skip this step entirely. -fn activate_injected_dep(injected: Option, - list: &mut DependencyList, - replaces_injected: &dyn Fn(CrateNum) -> bool) { - for (i, slot) in list.iter().enumerate() { - let cnum = CrateNum::new(i + 1); - if !replaces_injected(cnum) { - continue - } - if *slot != Linkage::NotLinked { - return - } - } - if let Some(injected) = injected { - let idx = injected.as_usize() - 1; - assert_eq!(list[idx], Linkage::NotLinked); - list[idx] = Linkage::Static; - } -} - -// After the linkage for a crate has been determined we need to verify that -// there's only going to be one allocator in the output. -fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) { - let sess = &tcx.sess; - if list.len() == 0 { - return - } - let mut panic_runtime = None; - for (i, linkage) in list.iter().enumerate() { - if let Linkage::NotLinked = *linkage { - continue - } - let cnum = CrateNum::new(i + 1); - - if tcx.is_panic_runtime(cnum) { - if let Some((prev, _)) = panic_runtime { - let prev_name = tcx.crate_name(prev); - let cur_name = tcx.crate_name(cnum); - sess.err(&format!("cannot link together two \ - panic runtimes: {} and {}", - prev_name, cur_name)); - } - panic_runtime = Some((cnum, tcx.panic_strategy(cnum))); - } - } - - // If we found a panic runtime, then we know by this point that it's the - // only one, but we perform validation here that all the panic strategy - // compilation modes for the whole DAG are valid. - if let Some((cnum, found_strategy)) = panic_runtime { - let desired_strategy = sess.panic_strategy(); - - // First up, validate that our selected panic runtime is indeed exactly - // our same strategy. - if found_strategy != desired_strategy { - sess.err(&format!("the linked panic runtime `{}` is \ - not compiled with this crate's \ - panic strategy `{}`", - tcx.crate_name(cnum), - desired_strategy.desc())); - } - - // Next up, verify that all other crates are compatible with this panic - // strategy. If the dep isn't linked, we ignore it, and if our strategy - // is abort then it's compatible with everything. Otherwise all crates' - // panic strategy must match our own. - for (i, linkage) in list.iter().enumerate() { - if let Linkage::NotLinked = *linkage { - continue - } - if desired_strategy == PanicStrategy::Abort { - continue - } - let cnum = CrateNum::new(i + 1); - let found_strategy = tcx.panic_strategy(cnum); - let is_compiler_builtins = tcx.is_compiler_builtins(cnum); - if is_compiler_builtins || desired_strategy == found_strategy { - continue - } - - sess.err(&format!("the crate `{}` is compiled with the \ - panic strategy `{}` which is \ - incompatible with this crate's \ - strategy of `{}`", - tcx.crate_name(cnum), - found_strategy.desc(), - desired_strategy.desc())); - } - } -} diff --git a/src/librustc/middle/exported_symbols.rs b/src/librustc/middle/exported_symbols.rs index 2027880930..4d14299751 100644 --- a/src/librustc/middle/exported_symbols.rs +++ b/src/librustc/middle/exported_symbols.rs @@ -1,7 +1,6 @@ use crate::hir::def_id::{DefId, LOCAL_CRATE}; use crate::ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{StableHasher, HashStable, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use std::cmp; use std::mem; use crate::ty::{self, TyCtxt}; @@ -94,9 +93,7 @@ pub fn metadata_symbol_name(tcx: TyCtxt<'_>) -> String { } impl<'a, 'tcx> HashStable> for ExportedSymbol<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ExportedSymbol::NonGeneric(def_id) => { diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index de6dadabcb..bb7ac5d8db 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -2,25 +2,20 @@ //! normal visitor, which just walks the entire body in one shot, the //! `ExprUseVisitor` determines how expressions are being used. -pub use self::LoanCause::*; pub use self::ConsumeMode::*; -pub use self::MoveReason::*; -pub use self::MatchMode::*; -use self::TrackMatchMode::*; use self::OverloadedCallType::*; -use crate::hir::def::{CtorOf, Res, DefKind}; +use crate::hir::def::Res; use crate::hir::def_id::DefId; use crate::hir::ptr::P; use crate::infer::InferCtxt; use crate::middle::mem_categorization as mc; use crate::middle::region; -use crate::ty::{self, DefIdTree, TyCtxt, adjustment}; +use crate::ty::{self, TyCtxt, adjustment}; use crate::hir::{self, PatKind}; use std::rc::Rc; use syntax_pos::Span; -use crate::util::nodemap::ItemLocalSet; /////////////////////////////////////////////////////////////////////////// // The Delegate trait @@ -30,161 +25,19 @@ use crate::util::nodemap::ItemLocalSet; pub trait Delegate<'tcx> { // The value found at `cmt` is either copied or moved, depending // on mode. - fn consume(&mut self, - consume_id: hir::HirId, - consume_span: Span, - cmt: &mc::cmt_<'tcx>, - mode: ConsumeMode); + fn consume(&mut self, cmt: &mc::cmt_<'tcx>, mode: ConsumeMode); - // The value found at `cmt` has been determined to match the - // pattern binding `matched_pat`, and its subparts are being - // copied or moved depending on `mode`. Note that `matched_pat` - // is called on all variant/structs in the pattern (i.e., the - // interior nodes of the pattern's tree structure) while - // consume_pat is called on the binding identifiers in the pattern - // (which are leaves of the pattern's tree structure). - // - // Note that variants/structs and identifiers are disjoint; thus - // `matched_pat` and `consume_pat` are never both called on the - // same input pattern structure (though of `consume_pat` can be - // called on a subpart of an input passed to `matched_pat). - fn matched_pat(&mut self, - matched_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: MatchMode); - - // The value found at `cmt` is either copied or moved via the - // pattern binding `consume_pat`, depending on mode. - fn consume_pat(&mut self, - consume_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: ConsumeMode); - - // The value found at `borrow` is being borrowed at the point - // `borrow_id` for the region `loan_region` with kind `bk`. - fn borrow(&mut self, - borrow_id: hir::HirId, - borrow_span: Span, - cmt: &mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>, - bk: ty::BorrowKind, - loan_cause: LoanCause); - - // The local variable `id` is declared but not initialized. - fn decl_without_init(&mut self, - id: hir::HirId, - span: Span); + // The value found at `cmt` is being borrowed with kind `bk`. + fn borrow(&mut self, cmt: &mc::cmt_<'tcx>, bk: ty::BorrowKind); // The path at `cmt` is being assigned to. - fn mutate(&mut self, - assignment_id: hir::HirId, - assignment_span: Span, - assignee_cmt: &mc::cmt_<'tcx>, - mode: MutateMode); - - // A nested closure or generator - only one layer deep. - fn nested_body(&mut self, _body_id: hir::BodyId) {} -} - -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum LoanCause { - ClosureCapture(Span), - AddrOf, - AutoRef, - AutoUnsafe, - RefBinding, - OverloadedOperator, - ClosureInvocation, - ForLoop, - MatchDiscriminant + fn mutate(&mut self, assignee_cmt: &mc::cmt_<'tcx>); } #[derive(Copy, Clone, PartialEq, Debug)] pub enum ConsumeMode { Copy, // reference to x where x has a type that copies - Move(MoveReason), // reference to x where x has a type that moves -} - -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum MoveReason { - DirectRefMove, - PatBindingMove, - CaptureMove, -} - -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum MatchMode { - NonBindingMatch, - BorrowingMatch, - CopyingMatch, - MovingMatch, -} - -#[derive(Copy, Clone, PartialEq, Debug)] -enum TrackMatchMode { - Unknown, - Definite(MatchMode), - Conflicting, -} - -impl TrackMatchMode { - // Builds up the whole match mode for a pattern from its constituent - // parts. The lattice looks like this: - // - // Conflicting - // / \ - // / \ - // Borrowing Moving - // \ / - // \ / - // Copying - // | - // NonBinding - // | - // Unknown - // - // examples: - // - // * `(_, some_int)` pattern is Copying, since - // NonBinding + Copying => Copying - // - // * `(some_int, some_box)` pattern is Moving, since - // Copying + Moving => Moving - // - // * `(ref x, some_box)` pattern is Conflicting, since - // Borrowing + Moving => Conflicting - // - // Note that the `Unknown` and `Conflicting` states are - // represented separately from the other more interesting - // `Definite` states, which simplifies logic here somewhat. - fn lub(&mut self, mode: MatchMode) { - *self = match (*self, mode) { - // Note that clause order below is very significant. - (Unknown, new) => Definite(new), - (Definite(old), new) if old == new => Definite(old), - - (Definite(old), NonBindingMatch) => Definite(old), - (Definite(NonBindingMatch), new) => Definite(new), - - (Definite(old), CopyingMatch) => Definite(old), - (Definite(CopyingMatch), new) => Definite(new), - - (Definite(_), _) => Conflicting, - (Conflicting, _) => *self, - }; - } - - fn match_mode(&self) -> MatchMode { - match *self { - Unknown => NonBindingMatch, - Definite(mode) => mode, - Conflicting => { - // Conservatively return MovingMatch to let the - // compiler continue to make progress. - MovingMatch - } - } - } + Move, // reference to x where x has a type that moves } #[derive(Copy, Clone, PartialEq, Debug)] @@ -261,9 +114,6 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { /// - `param_env` --- parameter environment for trait lookups (esp. pertaining to `Copy`) /// - `region_scope_tree` --- region scope tree for the code being analyzed /// - `tables` --- typeck results for the code being analyzed - /// - `rvalue_promotable_map` --- if you care about rvalue promotion, then provide - /// the map here (it can be computed with `tcx.rvalue_promotable_map(def_id)`). - /// `None` means that rvalues will be given more conservative lifetimes. /// /// See also `with_infer`, which is used *during* typeck. pub fn new( @@ -273,15 +123,13 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { param_env: ty::ParamEnv<'tcx>, region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option<&'tcx ItemLocalSet>, ) -> Self { ExprUseVisitor { mc: mc::MemCategorizationContext::new(tcx, param_env, body_owner, region_scope_tree, - tables, - rvalue_promotable_map), + tables), delegate, param_env, } @@ -317,16 +165,9 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { let param_ty = return_if_err!(self.mc.pat_ty_adjusted(¶m.pat)); debug!("consume_body: param_ty = {:?}", param_ty); - let fn_body_scope_r = - self.tcx().mk_region(ty::ReScope( - region::Scope { - id: body.value.hir_id.local_id, - data: region::ScopeData::Node - })); let param_cmt = Rc::new(self.mc.cat_rvalue( param.hir_id, param.pat.span, - fn_body_scope_r, // Parameters live only as long as the fn body. param_ty)); self.walk_irrefutable_pat(param_cmt, ¶m.pat); @@ -339,15 +180,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { self.mc.tcx } - fn delegate_consume(&mut self, - consume_id: hir::HirId, - consume_span: Span, - cmt: &mc::cmt_<'tcx>) { - debug!("delegate_consume(consume_id={}, cmt={:?})", - consume_id, cmt); + fn delegate_consume(&mut self, cmt: &mc::cmt_<'tcx>) { + debug!("delegate_consume(cmt={:?})", cmt); - let mode = copy_or_move(&self.mc, self.param_env, cmt, DirectRefMove); - self.delegate.consume(consume_id, consume_span, cmt, mode); + let mode = copy_or_move(&self.mc, self.param_env, cmt); + self.delegate.consume(cmt, mode); } fn consume_exprs(&mut self, exprs: &[hir::Expr]) { @@ -360,30 +197,21 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { debug!("consume_expr(expr={:?})", expr); let cmt = return_if_err!(self.mc.cat_expr(expr)); - self.delegate_consume(expr.hir_id, expr.span, &cmt); + self.delegate_consume(&cmt); self.walk_expr(expr); } - fn mutate_expr(&mut self, - span: Span, - assignment_expr: &hir::Expr, - expr: &hir::Expr, - mode: MutateMode) { + fn mutate_expr(&mut self, expr: &hir::Expr) { let cmt = return_if_err!(self.mc.cat_expr(expr)); - self.delegate.mutate(assignment_expr.hir_id, span, &cmt, mode); + self.delegate.mutate(&cmt); self.walk_expr(expr); } - fn borrow_expr(&mut self, - expr: &hir::Expr, - r: ty::Region<'tcx>, - bk: ty::BorrowKind, - cause: LoanCause) { - debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})", - expr, r, bk); + fn borrow_expr(&mut self, expr: &hir::Expr, bk: ty::BorrowKind) { + debug!("borrow_expr(expr={:?}, bk={:?})", expr, bk); let cmt = return_if_err!(self.mc.cat_expr(expr)); - self.delegate.borrow(expr.hir_id, expr.span, &cmt, r, bk, cause); + self.delegate.borrow(&cmt, bk); self.walk_expr(expr) } @@ -397,28 +225,28 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { self.walk_adjustment(expr); - match expr.node { + match expr.kind { hir::ExprKind::Path(_) => { } hir::ExprKind::Type(ref subexpr, _) => { - self.walk_expr(&subexpr) + self.walk_expr(subexpr) } hir::ExprKind::Unary(hir::UnDeref, ref base) => { // *base - self.select_from_expr(&base); + self.select_from_expr(base); } hir::ExprKind::Field(ref base, _) => { // base.f - self.select_from_expr(&base); + self.select_from_expr(base); } hir::ExprKind::Index(ref lhs, ref rhs) => { // lhs[rhs] - self.select_from_expr(&lhs); - self.consume_expr(&rhs); + self.select_from_expr(lhs); + self.consume_expr(rhs); } hir::ExprKind::Call(ref callee, ref args) => { // callee(args) - self.walk_callee(expr, &callee); + self.walk_callee(expr, callee); self.consume_exprs(args); } @@ -436,14 +264,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { hir::ExprKind::Match(ref discr, ref arms, _) => { let discr_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&discr))); - let r = self.tcx().lifetimes.re_empty; - self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant); + self.borrow_expr(&discr, ty::ImmBorrow); // treatment of the discriminant is handled while walking the arms. for arm in arms { - let mode = self.arm_move_mode(discr_cmt.clone(), arm); - let mode = mode.match_mode(); - self.walk_arm(discr_cmt.clone(), arm, mode); + self.walk_arm(discr_cmt.clone(), arm); } } @@ -454,11 +279,8 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { hir::ExprKind::AddrOf(m, ref base) => { // &base // make sure that the thing we are pointing out stays valid // for the lifetime `scope_r` of the resulting ptr: - let expr_ty = return_if_err!(self.mc.expr_ty(expr)); - if let ty::Ref(r, _, _) = expr_ty.sty { - let bk = ty::BorrowKind::from_mutbl(m); - self.borrow_expr(&base, r, bk, AddrOf); - } + let bk = ty::BorrowKind::from_mutbl(m); + self.borrow_expr(&base, bk); } hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => { @@ -466,16 +288,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { if o.is_indirect { self.consume_expr(output); } else { - self.mutate_expr( - output.span, - expr, - output, - if o.is_rw { - MutateMode::WriteAndRead - } else { - MutateMode::JustWrite - }, - ); + self.mutate_expr(output); } } self.consume_exprs(inputs); @@ -486,65 +299,64 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { hir::ExprKind::Err => {} hir::ExprKind::Loop(ref blk, _, _) => { - self.walk_block(&blk); + self.walk_block(blk); } hir::ExprKind::Unary(_, ref lhs) => { - self.consume_expr(&lhs); + self.consume_expr(lhs); } hir::ExprKind::Binary(_, ref lhs, ref rhs) => { - self.consume_expr(&lhs); - self.consume_expr(&rhs); + self.consume_expr(lhs); + self.consume_expr(rhs); } hir::ExprKind::Block(ref blk, _) => { - self.walk_block(&blk); + self.walk_block(blk); } hir::ExprKind::Break(_, ref opt_expr) | hir::ExprKind::Ret(ref opt_expr) => { if let Some(ref expr) = *opt_expr { - self.consume_expr(&expr); + self.consume_expr(expr); } } hir::ExprKind::Assign(ref lhs, ref rhs) => { - self.mutate_expr(expr.span, expr, &lhs, MutateMode::JustWrite); - self.consume_expr(&rhs); + self.mutate_expr(lhs); + self.consume_expr(rhs); } hir::ExprKind::Cast(ref base, _) => { - self.consume_expr(&base); + self.consume_expr(base); } hir::ExprKind::DropTemps(ref expr) => { - self.consume_expr(&expr); + self.consume_expr(expr); } hir::ExprKind::AssignOp(_, ref lhs, ref rhs) => { if self.mc.tables.is_method_call(expr) { self.consume_expr(lhs); } else { - self.mutate_expr(expr.span, expr, &lhs, MutateMode::WriteAndRead); + self.mutate_expr(lhs); } - self.consume_expr(&rhs); + self.consume_expr(rhs); } hir::ExprKind::Repeat(ref base, _) => { - self.consume_expr(&base); + self.consume_expr(base); } - hir::ExprKind::Closure(_, _, body_id, fn_decl_span, _) => { - self.delegate.nested_body(body_id); + hir::ExprKind::Closure(_, _, _, fn_decl_span, _) => { self.walk_captures(expr, fn_decl_span); } hir::ExprKind::Box(ref base) => { - self.consume_expr(&base); + self.consume_expr(base); } hir::ExprKind::Yield(ref value, _) => { - self.consume_expr(&value); + self.consume_expr(value); } } } @@ -553,31 +365,19 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { let callee_ty = return_if_err!(self.mc.expr_ty_adjusted(callee)); debug!("walk_callee: callee={:?} callee_ty={:?}", callee, callee_ty); - match callee_ty.sty { + match callee_ty.kind { ty::FnDef(..) | ty::FnPtr(_) => { self.consume_expr(callee); } ty::Error => { } _ => { if let Some(def_id) = self.mc.tables.type_dependent_def_id(call.hir_id) { - let call_scope = region::Scope { - id: call.hir_id.local_id, - data: region::ScopeData::Node - }; match OverloadedCallType::from_method_id(self.tcx(), def_id) { FnMutOverloadedCall => { - let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope)); - self.borrow_expr(callee, - call_scope_r, - ty::MutBorrow, - ClosureInvocation); + self.borrow_expr(callee, ty::MutBorrow); } FnOverloadedCall => { - let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope)); - self.borrow_expr(callee, - call_scope_r, - ty::ImmBorrow, - ClosureInvocation); + self.borrow_expr(callee, ty::ImmBorrow); } FnOnceOverloadedCall => self.consume_expr(callee), } @@ -590,7 +390,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { } fn walk_stmt(&mut self, stmt: &hir::Stmt) { - match stmt.node { + match stmt.kind { hir::StmtKind::Local(ref local) => { self.walk_local(&local); } @@ -608,22 +408,14 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { } fn walk_local(&mut self, local: &hir::Local) { - match local.init { - None => { - local.pat.each_binding(|_, hir_id, span, _| { - self.delegate.decl_without_init(hir_id, span); - }) - } - - Some(ref expr) => { - // Variable declarations with - // initializers are considered - // "assigns", which is handled by - // `walk_pat`: - self.walk_expr(&expr); - let init_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&expr))); - self.walk_irrefutable_pat(init_cmt, &local.pat); - } + if let Some(ref expr) = local.init { + // Variable declarations with + // initializers are considered + // "assigns", which is handled by + // `walk_pat`: + self.walk_expr(&expr); + let init_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&expr))); + self.walk_irrefutable_pat(init_cmt, &local.pat); } } @@ -658,7 +450,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { // Select just those fields of the `with` // expression that will actually be used - match with_cmt.ty.sty { + match with_cmt.ty.kind { ty::Adt(adt, substs) if adt.is_struct() => { // Consume those fields of the with expression that are needed. for (f_index, with_field) in adt.non_enum_variant().fields.iter().enumerate() { @@ -673,7 +465,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { with_field.ident, with_field.ty(self.tcx(), substs) ); - self.delegate_consume(with_expr.hir_id, with_expr.span, &cmt_field); + self.delegate_consume(&cmt_field); } } } @@ -708,7 +500,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { adjustment::Adjust::Pointer(_) => { // Creating a closure/fn-pointer or unsizing consumes // the input and stores it into the resulting rvalue. - self.delegate_consume(expr.hir_id, expr.span, &cmt); + self.delegate_consume(&cmt); } adjustment::Adjust::Deref(None) => {} @@ -720,7 +512,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { // this is an autoref of `x`. adjustment::Adjust::Deref(Some(ref deref)) => { let bk = ty::BorrowKind::from_mutbl(deref.mutbl); - self.delegate.borrow(expr.hir_id, expr.span, &cmt, deref.region, bk, AutoRef); + self.delegate.borrow(&cmt, bk); } adjustment::Adjust::Borrow(ref autoref) => { @@ -744,13 +536,8 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { autoref); match *autoref { - adjustment::AutoBorrow::Ref(r, m) => { - self.delegate.borrow(expr.hir_id, - expr.span, - cmt_base, - r, - ty::BorrowKind::from_mutbl(m.into()), - AutoRef); + adjustment::AutoBorrow::Ref(_, m) => { + self.delegate.borrow(cmt_base, ty::BorrowKind::from_mutbl(m.into())); } adjustment::AutoBorrow::RawPtr(m) => { @@ -758,37 +545,14 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { expr.hir_id, cmt_base); - // Converting from a &T to *T (or &mut T to *mut T) is - // treated as borrowing it for the enclosing temporary - // scope. - let r = self.tcx().mk_region(ty::ReScope( - region::Scope { - id: expr.hir_id.local_id, - data: region::ScopeData::Node - })); - self.delegate.borrow(expr.hir_id, - expr.span, - cmt_base, - r, - ty::BorrowKind::from_mutbl(m), - AutoUnsafe); + self.delegate.borrow(cmt_base, ty::BorrowKind::from_mutbl(m)); } } } - fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode { - let mut mode = Unknown; - for pat in &arm.pats { - self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode); - } - mode - } - - fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) { - for pat in &arm.pats { - self.walk_pat(discr_cmt.clone(), &pat, mode); - } + fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) { + self.walk_pat(discr_cmt.clone(), &arm.pat); if let Some(hir::Guard::If(ref e)) = arm.guard { self.consume_expr(e) @@ -800,55 +564,22 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { /// Walks a pat that occurs in isolation (i.e., top-level of fn argument or /// let binding, and *not* a match arm or nested pat.) fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) { - let mut mode = Unknown; - self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode); - let mode = mode.match_mode(); - self.walk_pat(cmt_discr, pat, mode); + self.walk_pat(cmt_discr, pat); } - /// Identifies any bindings within `pat` and accumulates within - /// `mode` whether the overall pattern/match structure is a move, - /// copy, or borrow. - fn determine_pat_move_mode(&mut self, - cmt_discr: mc::cmt<'tcx>, - pat: &hir::Pat, - mode: &mut TrackMatchMode) { - debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr, pat); - return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| { - if let PatKind::Binding(..) = pat.node { - let bm = *self.mc.tables.pat_binding_modes() - .get(pat.hir_id) - .expect("missing binding mode"); - match bm { - ty::BindByReference(..) => - mode.lub(BorrowingMatch), - ty::BindByValue(..) => { - match copy_or_move(&self.mc, self.param_env, &cmt_pat, PatBindingMove) { - Copy => mode.lub(CopyingMatch), - Move(..) => mode.lub(MovingMatch), - } - } - } - } - })); - } - - /// The core driver for walking a pattern; `match_mode` must be - /// established up front, e.g., via `determine_pat_move_mode` (see - /// also `walk_irrefutable_pat` for patterns that stand alone). - fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) { + /// The core driver for walking a pattern + fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) { debug!("walk_pat(cmt_discr={:?}, pat={:?})", cmt_discr, pat); let tcx = self.tcx(); let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self; return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |cmt_pat, pat| { - if let PatKind::Binding(_, canonical_id, ..) = pat.node { + if let PatKind::Binding(_, canonical_id, ..) = pat.kind { debug!( - "walk_pat: binding cmt_pat={:?} pat={:?} match_mode={:?}", + "walk_pat: binding cmt_pat={:?} pat={:?}", cmt_pat, pat, - match_mode, ); if let Some(&bm) = mc.tables.pat_binding_modes().get(pat.hir_id) { debug!("walk_pat: pat.hir_id={:?} bm={:?}", pat.hir_id, bm); @@ -861,21 +592,19 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { // binding being produced. let def = Res::Local(canonical_id); if let Ok(ref binding_cmt) = mc.cat_res(pat.hir_id, pat.span, pat_ty, def) { - delegate.mutate(pat.hir_id, pat.span, binding_cmt, MutateMode::Init); + delegate.mutate(binding_cmt); } // It is also a borrow or copy/move of the value being matched. match bm { ty::BindByReference(m) => { - if let ty::Ref(r, _, _) = pat_ty.sty { - let bk = ty::BorrowKind::from_mutbl(m); - delegate.borrow(pat.hir_id, pat.span, &cmt_pat, r, bk, RefBinding); - } + let bk = ty::BorrowKind::from_mutbl(m); + delegate.borrow(&cmt_pat, bk); } ty::BindByValue(..) => { - let mode = copy_or_move(mc, param_env, &cmt_pat, PatBindingMove); + let mode = copy_or_move(mc, param_env, &cmt_pat); debug!("walk_pat binding consuming pat"); - delegate.consume_pat(pat, &cmt_pat, mode); + delegate.consume(&cmt_pat, mode); } } } else { @@ -883,45 +612,6 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { } } })); - - // Do a second pass over the pattern, calling `matched_pat` on - // the interior nodes (enum variants and structs), as opposed - // to the above loop's visit of than the bindings that form - // the leaves of the pattern tree structure. - return_if_err!(mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| { - let qpath = match pat.node { - PatKind::Path(ref qpath) | - PatKind::TupleStruct(ref qpath, ..) | - PatKind::Struct(ref qpath, ..) => qpath, - _ => return - }; - let res = mc.tables.qpath_res(qpath, pat.hir_id); - match res { - Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_did) => { - let variant_did = mc.tcx.parent(variant_ctor_did).unwrap(); - let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did); - - debug!("variantctor downcast_cmt={:?} pat={:?}", downcast_cmt, pat); - delegate.matched_pat(pat, &downcast_cmt, match_mode); - } - Res::Def(DefKind::Variant, variant_did) => { - let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did); - - debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat); - delegate.matched_pat(pat, &downcast_cmt, match_mode); - } - Res::Def(DefKind::Struct, _) - | Res::Def(DefKind::Ctor(..), _) - | Res::Def(DefKind::Union, _) - | Res::Def(DefKind::TyAlias, _) - | Res::Def(DefKind::AssocTy, _) - | Res::SelfTy(..) => { - debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat); - delegate.matched_pat(pat, &cmt_pat, match_mode); - } - _ => {} - } - })); } fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) { @@ -929,7 +619,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { let closure_def_id = self.tcx().hir().local_def_id(closure_expr.hir_id); if let Some(upvars) = self.tcx().upvars(closure_def_id) { - for (&var_id, upvar) in upvars.iter() { + for &var_id in upvars.keys() { let upvar_id = ty::UpvarId { var_path: ty::UpvarPath { hir_id: var_id }, closure_expr_id: closure_def_id.to_local(), @@ -940,19 +630,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { var_id)); match upvar_capture { ty::UpvarCapture::ByValue => { - let mode = copy_or_move(&self.mc, - self.param_env, - &cmt_var, - CaptureMove); - self.delegate.consume(closure_expr.hir_id, upvar.span, &cmt_var, mode); + let mode = copy_or_move(&self.mc, self.param_env, &cmt_var); + self.delegate.consume(&cmt_var, mode); } ty::UpvarCapture::ByRef(upvar_borrow) => { - self.delegate.borrow(closure_expr.hir_id, - fn_decl_span, - &cmt_var, - upvar_borrow.region, - upvar_borrow.kind, - ClosureCapture(upvar.span)); + self.delegate.borrow(&cmt_var, upvar_borrow.kind); } } } @@ -975,10 +657,9 @@ fn copy_or_move<'a, 'tcx>( mc: &mc::MemCategorizationContext<'a, 'tcx>, param_env: ty::ParamEnv<'tcx>, cmt: &mc::cmt_<'tcx>, - move_reason: MoveReason, ) -> ConsumeMode { if !mc.type_is_copy_modulo_regions(param_env, cmt.ty, cmt.span) { - Move(move_reason) + Move } else { Copy } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index c5d9a722ae..41f02a876f 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -13,6 +13,7 @@ use crate::hir::def_id::DefId; use crate::hir::check_attr::Target; use crate::ty::{self, TyCtxt}; use crate::middle::weak_lang_items; +use crate::middle::cstore::ExternCrate; use crate::util::nodemap::FxHashMap; use syntax::ast; @@ -182,16 +183,39 @@ impl LanguageItemCollector<'tcx> { E0152, "duplicate lang item found: `{}`.", name), - None => self.tcx.sess.struct_err(&format!( - "duplicate lang item in crate `{}`: `{}`.", - self.tcx.crate_name(item_def_id.krate), - name)), + None => { + match self.tcx.extern_crate(item_def_id) { + Some(ExternCrate {dependency_of, ..}) => { + self.tcx.sess.struct_err(&format!( + "duplicate lang item in crate `{}` (which `{}` depends on): `{}`.", + self.tcx.crate_name(item_def_id.krate), + self.tcx.crate_name(*dependency_of), + name)) + }, + _ => { + self.tcx.sess.struct_err(&format!( + "duplicate lang item in crate `{}`: `{}`.", + self.tcx.crate_name(item_def_id.krate), + name)) + } + } + }, }; if let Some(span) = self.tcx.hir().span_if_local(original_def_id) { span_note!(&mut err, span, "first defined here."); } else { - err.note(&format!("first defined in crate `{}`.", + match self.tcx.extern_crate(original_def_id) { + Some(ExternCrate {dependency_of, ..}) => { + err.note(&format!( + "first defined in crate `{}` (which `{}` depends on).", + self.tcx.crate_name(original_def_id.krate), + self.tcx.crate_name(*dependency_of))); + }, + _ => { + err.note(&format!("first defined in crate `{}`.", self.tcx.crate_name(original_def_id.krate))); + } + } } err.emit(); } @@ -273,6 +297,10 @@ language_item_table! { SizedTraitLangItem, "sized", sized_trait, Target::Trait; UnsizeTraitLangItem, "unsize", unsize_trait, Target::Trait; + // trait injected by #[derive(PartialEq)], (i.e. "Partial EQ"). + StructuralPeqTraitLangItem, "structural_peq", structural_peq_trait, Target::Trait; + // trait injected by #[derive(Eq)], (i.e. "Total EQ"; no, I will not apologize). + StructuralTeqTraitLangItem, "structural_teq", structural_teq_trait, Target::Trait; CopyTraitLangItem, "copy", copy_trait, Target::Trait; CloneTraitLangItem, "clone", clone_trait, Target::Trait; SyncTraitLangItem, "sync", sync_trait, Target::Trait; @@ -342,6 +370,7 @@ language_item_table! { PanicFnLangItem, "panic", panic_fn, Target::Fn; PanicBoundsCheckFnLangItem, "panic_bounds_check", panic_bounds_check_fn, Target::Fn; PanicInfoLangItem, "panic_info", panic_info, Target::Struct; + PanicLocationLangItem, "panic_location", panic_location, Target::Struct; PanicImplLangItem, "panic_impl", panic_impl, Target::Fn; // Libstd panic entry point. Necessary for const eval to be able to catch it BeginPanicFnLangItem, "begin_panic", begin_panic_fn, Target::Fn; @@ -356,7 +385,7 @@ language_item_table! { EhPersonalityLangItem, "eh_personality", eh_personality, Target::Fn; EhUnwindResumeLangItem, "eh_unwind_resume", eh_unwind_resume, Target::Fn; - MSVCTryFilterLangItem, "msvc_try_filter", msvc_try_filter, Target::Static; + EhCatchTypeinfoLangItem, "eh_catch_typeinfo", eh_catch_typeinfo, Target::Static; OwnedBoxLangItem, "owned_box", owned_box, Target::Struct; diff --git a/src/librustc/middle/lib_features.rs b/src/librustc/middle/lib_features.rs index 0d6d016e50..2d726fcd17 100644 --- a/src/librustc/middle/lib_features.rs +++ b/src/librustc/middle/lib_features.rs @@ -59,7 +59,7 @@ impl LibFeatureCollector<'tcx> { attr.check_name(**stab_attr) }) { let meta_item = attr.meta(); - if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta_item { + if let Some(MetaItem { kind: MetaItemKind::List(ref metas), .. }) = meta_item { let mut feature = None; let mut since = None; for meta in metas { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 73ca981bbe..cbf336fdbe 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -79,12 +79,11 @@ use std::fmt; use std::hash::{Hash, Hasher}; use rustc_data_structures::fx::FxIndexMap; use std::rc::Rc; -use crate::util::nodemap::ItemLocalSet; #[derive(Clone, Debug, PartialEq)] pub enum Categorization<'tcx> { - Rvalue(ty::Region<'tcx>), // temporary val, argument is its scope - ThreadLocal(ty::Region<'tcx>), // value that cannot move, but still restricted in scope + Rvalue, // temporary val + ThreadLocal, // value that cannot move, but still restricted in scope StaticItem, Upvar(Upvar), // upvar referenced by closure env Local(hir::HirId), // local variable @@ -103,7 +102,7 @@ pub struct Upvar { } // different kinds of pointers: -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, PartialEq)] pub enum PointerKind<'tcx> { /// `Box` Unique, @@ -117,7 +116,7 @@ pub enum PointerKind<'tcx> { // We use the term "interior" to mean "something reachable from the // base without a pointer dereference", e.g., a field -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, PartialEq)] pub enum InteriorKind { InteriorField(FieldIndex), InteriorElement(InteriorOffsetKind), @@ -140,13 +139,13 @@ impl Hash for FieldIndex { } } -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq)] pub enum InteriorOffsetKind { Index, // e.g., `array_expr[index_expr]` Pattern, // e.g., `fn foo([_, a, _, _]: [A; 4]) { ... }` } -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Debug)] pub enum MutabilityCategory { McImmutable, // Immutable. McDeclared, // Directly declared as mutable. @@ -219,7 +218,6 @@ pub struct MemCategorizationContext<'a, 'tcx> { pub upvars: Option<&'tcx FxIndexMap>, pub region_scope_tree: &'a region::ScopeTree, pub tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option<&'tcx ItemLocalSet>, infcx: Option<&'a InferCtxt<'a, 'tcx>>, } @@ -271,7 +269,7 @@ impl MutabilityCategory { id: hir::HirId, ) -> MutabilityCategory { let ret = match tcx.hir().get(id) { - Node::Binding(p) => match p.node { + Node::Binding(p) => match p.kind { PatKind::Binding(..) => { let bm = *tables.pat_binding_modes() .get(p.hir_id) @@ -335,7 +333,6 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { body_owner: DefId, region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option<&'tcx ItemLocalSet>, ) -> MemCategorizationContext<'a, 'tcx> { MemCategorizationContext { tcx, @@ -343,7 +340,6 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { upvars: tcx.upvars(body_owner), region_scope_tree, tables, - rvalue_promotable_map, infcx: None, param_env, } @@ -369,19 +365,12 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { ) -> MemCategorizationContext<'a, 'tcx> { let tcx = infcx.tcx; - // Subtle: we can't do rvalue promotion analysis until the - // typeck phase is complete, which means that you can't trust - // the rvalue lifetimes that result, but that's ok, since we - // don't need to know those during type inference. - let rvalue_promotable_map = None; - MemCategorizationContext { tcx, body_owner, upvars: tcx.upvars(body_owner), region_scope_tree, tables, - rvalue_promotable_map, infcx: Some(infcx), param_env, } @@ -486,7 +475,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { // This code detects whether we are looking at a `ref x`, // and if so, figures out what the type *being borrowed* is. - let ret_ty = match pat.node { + let ret_ty = match pat.kind { PatKind::Binding(..) => { let bm = *self.tables .pat_binding_modes() @@ -577,7 +566,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { debug!("cat_expr: id={} expr={:?}", expr.hir_id, expr); let expr_ty = self.expr_ty(expr)?; - match expr.node { + match expr.kind { hir::ExprKind::Unary(hir::UnDeref, ref e_base) => { if self.tables.is_method_call(expr) { self.cat_overloaded_place(expr, e_base, NoteNone) @@ -664,8 +653,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { .any(|attr| attr.check_name(sym::thread_local)); let cat = if is_thread_local { - let re = self.temporary_scope(hir_id.local_id); - Categorization::ThreadLocal(re) + Categorization::ThreadLocal } else { Categorization::StaticItem }; @@ -738,18 +726,20 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { LocalDefId::from_def_id(closure_expr_def_id), ); let ty = self.node_ty(fn_hir_id)?; - let kind = match ty.sty { + let kind = match ty.kind { ty::Generator(..) => ty::ClosureKind::FnOnce, - ty::Closure(closure_def_id, closure_substs) => { + ty::Closure(closure_def_id, substs) => { match self.infcx { // During upvar inference we may not know the // closure kind, just use the LATTICE_BOTTOM value. Some(infcx) => - infcx.closure_kind(closure_def_id, closure_substs) - .unwrap_or(ty::ClosureKind::LATTICE_BOTTOM), + infcx.closure_kind( + closure_def_id, + substs + ).unwrap_or(ty::ClosureKind::LATTICE_BOTTOM), None => - closure_substs.closure_kind(closure_def_id, self.tcx.global_tcx()), + substs.as_closure().kind(closure_def_id, self.tcx), } } _ => span_bug!(span, "unexpected type for fn in mem_categorization: {:?}", ty), @@ -876,16 +866,6 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { ret } - /// Returns the lifetime of a temporary created by expr with id `id`. - /// This could be `'static` if `id` is part of a constant expression. - pub fn temporary_scope(&self, id: hir::ItemLocalId) -> ty::Region<'tcx> { - let scope = self.region_scope_tree.temporary_scope(id); - self.tcx.mk_region(match scope { - Some(scope) => ty::ReScope(scope), - None => ty::ReStatic - }) - } - pub fn cat_rvalue_node(&self, hir_id: hir::HirId, span: Span, @@ -894,28 +874,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { debug!("cat_rvalue_node(id={:?}, span={:?}, expr_ty={:?})", hir_id, span, expr_ty); - let promotable = self.rvalue_promotable_map.as_ref().map(|m| m.contains(&hir_id.local_id)) - .unwrap_or(false); - - debug!("cat_rvalue_node: promotable = {:?}", promotable); - - // Always promote `[T; 0]` (even when e.g., borrowed mutably). - let promotable = match expr_ty.sty { - ty::Array(_, len) if len.try_eval_usize(self.tcx, self.param_env) == Some(0) => true, - _ => promotable, - }; - - debug!("cat_rvalue_node: promotable = {:?} (2)", promotable); - - // Compute maximum lifetime of this rvalue. This is 'static if - // we can promote to a constant, otherwise equal to enclosing temp - // lifetime. - let re = if promotable { - self.tcx.lifetimes.re_static - } else { - self.temporary_scope(hir_id.local_id) - }; - let ret = self.cat_rvalue(hir_id, span, re, expr_ty); + let ret = self.cat_rvalue(hir_id, span, expr_ty); debug!("cat_rvalue_node ret {:?}", ret); ret } @@ -923,12 +882,11 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { pub fn cat_rvalue(&self, cmt_hir_id: hir::HirId, span: Span, - temp_scope: ty::Region<'tcx>, expr_ty: Ty<'tcx>) -> cmt_<'tcx> { let ret = cmt_ { hir_id: cmt_hir_id, span:span, - cat:Categorization::Rvalue(temp_scope), + cat:Categorization::Rvalue, mutbl:McDeclared, ty:expr_ty, note: NoteNone @@ -974,7 +932,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { let place_ty = self.expr_ty(expr)?; let base_ty = self.expr_ty_adjusted(base)?; - let (region, mutbl) = match base_ty.sty { + let (region, mutbl) = match base_ty.kind { ty::Ref(region, _, mutbl) => (region, mutbl), _ => span_bug!(expr.span, "cat_overloaded_place: base is not a reference") }; @@ -1004,7 +962,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { } }; - let ptr = match base_cmt.ty.sty { + let ptr = match base_cmt.ty.kind { ty::Adt(def, ..) if def.is_box() => Unique, ty::RawPtr(ref mt) => UnsafePtr(mt.mutbl), ty::Ref(r, _, mutbl) => { @@ -1212,7 +1170,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { // that (where the `ref` on `x` is implied). op(cmt.clone(), pat); - match pat.node { + match pat.kind { PatKind::TupleStruct(ref qpath, ref subpats, ddpos) => { let res = self.tables.qpath_res(qpath, pat.hir_id); let (cmt, expected_len) = match res { @@ -1230,7 +1188,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { Res::Def(DefKind::Ctor(CtorOf::Struct, CtorKind::Fn), _) | Res::SelfCtor(..) => { let ty = self.pat_ty_unadjusted(&pat)?; - match ty.sty { + match ty.kind { ty::Adt(adt_def, _) => { (cmt, adt_def.non_enum_variant().fields.len()) } @@ -1303,7 +1261,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { PatKind::Tuple(ref subpats, ddpos) => { // (p1, ..., pN) let ty = self.pat_ty_unadjusted(&pat)?; - let expected_len = match ty.sty { + let expected_len = match ty.kind { ty::Tuple(ref tys) => tys.len(), _ => span_bug!(pat.span, "tuple pattern unexpected type {:?}", ty), }; @@ -1376,9 +1334,9 @@ impl<'tcx> cmt_<'tcx> { //! determines how long the value in `self` remains live. match self.cat { - Categorization::Rvalue(..) | + Categorization::Rvalue | Categorization::StaticItem | - Categorization::ThreadLocal(..) | + Categorization::ThreadLocal | Categorization::Local(..) | Categorization::Deref(_, UnsafePtr(..)) | Categorization::Deref(_, BorrowedPtr(..)) | @@ -1409,8 +1367,8 @@ impl<'tcx> cmt_<'tcx> { b.freely_aliasable() } - Categorization::Rvalue(..) | - Categorization::ThreadLocal(..) | + Categorization::Rvalue | + Categorization::ThreadLocal | Categorization::Local(..) | Categorization::Upvar(..) | Categorization::Deref(_, UnsafePtr(..)) => { // yes, it's aliasable, but... @@ -1457,10 +1415,10 @@ impl<'tcx> cmt_<'tcx> { Categorization::StaticItem => { "static item".into() } - Categorization::ThreadLocal(..) => { + Categorization::ThreadLocal => { "thread-local static item".into() } - Categorization::Rvalue(..) => { + Categorization::Rvalue => { "non-place".into() } Categorization::Local(vid) => { diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index c2bcd46216..8be64bf64b 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -32,7 +32,7 @@ fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item, attrs: CodegenFnAt return true } - match item.node { + match item.kind { hir::ItemKind::Fn(_, header, ..) if header.is_const() => { return true; } @@ -55,7 +55,7 @@ fn method_might_be_inlined( if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) { return true } - if let hir::ImplItemKind::Method(method_sig, _) = &impl_item.node { + if let hir::ImplItemKind::Method(method_sig, _) = &impl_item.kind { if method_sig.header.is_const() { return true } @@ -100,7 +100,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ReachableContext<'a, 'tcx> { } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { - let res = match expr.node { + let res = match expr.kind { hir::ExprKind::Path(ref qpath) => { Some(self.tables.qpath_res(qpath, expr.hir_id)) } @@ -157,14 +157,14 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { match self.tcx.hir().find(hir_id) { Some(Node::Item(item)) => { - match item.node { + match item.kind { hir::ItemKind::Fn(..) => item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id)), _ => false, } } Some(Node::TraitItem(trait_method)) => { - match trait_method.node { + match trait_method.kind { hir::TraitItemKind::Const(_, ref default) => default.is_some(), hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => true, hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) | @@ -172,7 +172,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { } } Some(Node::ImplItem(impl_item)) => { - match impl_item.node { + match impl_item.kind { hir::ImplItemKind::Const(..) => true, hir::ImplItemKind::Method(..) => { let attrs = self.tcx.codegen_fn_attrs(def_id); @@ -187,7 +187,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { // type of the impl require inlining, this method // does too. let impl_hir_id = self.tcx.hir().as_local_hir_id(impl_did).unwrap(); - match self.tcx.hir().expect_item(impl_hir_id).node { + match self.tcx.hir().expect_item(impl_hir_id).kind { hir::ItemKind::Impl(..) => { let generics = self.tcx.generics_of(impl_did); generics.requires_monomorphization(self.tcx) @@ -225,7 +225,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { // If we are building an executable, only explicitly extern // types need to be exported. if let Node::Item(item) = *node { - let reachable = if let hir::ItemKind::Fn(_, header, ..) = item.node { + let reachable = if let hir::ItemKind::Fn(_, header, ..) = item.kind { header.abi != Abi::Rust } else { false @@ -249,7 +249,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { match *node { Node::Item(item) => { - match item.node { + match item.kind { hir::ItemKind::Fn(.., body) => { let def_id = self.tcx.hir().local_def_id(item.hir_id); if item_might_be_inlined(self.tcx, @@ -286,7 +286,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { } } Node::TraitItem(trait_method) => { - match trait_method.node { + match trait_method.kind { hir::TraitItemKind::Const(_, None) | hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) => { // Keep going, nothing to get exported @@ -299,7 +299,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { } } Node::ImplItem(impl_item) => { - match impl_item.node { + match impl_item.kind { hir::ImplItemKind::Const(_, body) => { self.visit_nested_body(body); } @@ -313,7 +313,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { hir::ImplItemKind::TyAlias(_) => {} } } - Node::Expr(&hir::Expr { node: hir::ExprKind::Closure(.., body, _, _), .. }) => { + Node::Expr(&hir::Expr { kind: hir::ExprKind::Closure(.., body, _, _), .. }) => { self.visit_nested_body(body); } // Nothing to recurse on for these @@ -361,7 +361,7 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx } // We need only trait impls here, not inherent impls, and only non-exported ones - if let hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node { + if let hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.kind { if !self.access_levels.is_reachable(item.hir_id) { self.worklist.extend(impl_item_refs.iter().map(|ii_ref| ii_ref.id.hir_id)); diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 87470140e3..9ff205228a 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -16,8 +16,8 @@ use crate::util::nodemap::{FxHashMap, FxHashSet}; use crate::ty::{self, DefIdTree, TyCtxt}; use crate::ty::query::Providers; -use rustc_data_structures::indexed_vec::Idx; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_index::vec::Idx; use rustc_macros::HashStable; use syntax::source_map; use syntax_pos::{Span, DUMMY_SP}; @@ -131,7 +131,7 @@ pub enum ScopeData { Remainder(FirstStatementIndex) } -newtype_index! { +rustc_index::newtype_index! { /// Represents a subscope of `block` for a binding that is introduced /// by `block.stmts[first_statement_index]`. Such subscopes represent /// a suffix of the block. Note that each subscope does not include @@ -796,7 +796,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h // index information.) for (i, statement) in blk.stmts.iter().enumerate() { - match statement.node { + match statement.kind { hir::StmtKind::Local(..) | hir::StmtKind::Item(..) => { // Each declaration introduces a subscope for bindings @@ -850,7 +850,7 @@ fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node }); // If this is a binding then record the lifetime of that binding. - if let PatKind::Binding(..) = pat.node { + if let PatKind::Binding(..) = pat.kind { record_var_lifetime(visitor, pat.hir_id.local_id, pat.span); } @@ -893,7 +893,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h let mut terminating = |id: hir::ItemLocalId| { terminating_scopes.insert(id); }; - match expr.node { + match expr.kind { // Conditional or repeating scopes are always terminating // scopes, meaning that temporaries cannot outlive them. // This ensures fixed size stacks. @@ -996,7 +996,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h // properly, we can't miss any types. - match expr.node { + match expr.kind { // Manually recurse over closures, because they are the only // case of nested bodies that share the parent environment. hir::ExprKind::Closure(.., body, _, _) => { @@ -1053,7 +1053,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr); - if let hir::ExprKind::Yield(_, source) = &expr.node { + if let hir::ExprKind::Yield(_, source) = &expr.kind { // Mark this expr's scope and all parent scopes as containing `yield`. let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node }; loop { @@ -1198,7 +1198,7 @@ fn resolve_local<'tcx>( // In the former case (the implicit ref version), the temporary is created by the // & expression, and its lifetime would be extended to the end of the block (due // to a different rule, not the below code). - match pat.node { + match pat.kind { PatKind::Binding(hir::BindingAnnotation::Ref, ..) | PatKind::Binding(hir::BindingAnnotation::RefMut, ..) => true, @@ -1240,7 +1240,7 @@ fn resolve_local<'tcx>( expr: &hir::Expr, blk_id: Option, ) { - match expr.node { + match expr.kind { hir::ExprKind::AddrOf(_, ref subexpr) => { record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id); record_rvalue_scope(visitor, &subexpr, blk_id); @@ -1300,7 +1300,7 @@ fn resolve_local<'tcx>( // outer expression. visitor.scope_tree.record_rvalue_scope(expr.hir_id.local_id, blk_scope); - match expr.node { + match expr.kind { hir::ExprKind::AddrOf(_, ref subexpr) | hir::ExprKind::Unary(hir::UnDeref, ref subexpr) | hir::ExprKind::Field(ref subexpr, _) | @@ -1491,9 +1491,7 @@ pub fn provide(providers: &mut Providers<'_>) { } impl<'a> HashStable> for ScopeTree { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ScopeTree { root_body, root_parent, diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index d833a34385..a122d84a5a 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -459,7 +459,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { } fn visit_item(&mut self, item: &'tcx hir::Item) { - match item.node { + match item.kind { hir::ItemKind::Fn(ref decl, _, ref generics, _) => { self.visit_early_late(None, decl, generics, |this| { intravisit::walk_item(this, item); @@ -504,12 +504,12 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { | hir::ItemKind::Impl(_, _, _, ref generics, ..) => { // Impls permit `'_` to be used and it is equivalent to "some fresh lifetime name". // This is not true for other kinds of items.x - let track_lifetime_uses = match item.node { + let track_lifetime_uses = match item.kind { hir::ItemKind::Impl(..) => true, _ => false, }; // These kinds of items have only early-bound lifetime parameters. - let mut index = if sub_items_have_self_param(&item.node) { + let mut index = if sub_items_have_self_param(&item.kind) { 1 // Self comes before lifetimes } else { 0 @@ -541,7 +541,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { } fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem) { - match item.node { + match item.kind { hir::ForeignItemKind::Fn(ref decl, _, ref generics) => { self.visit_early_late(None, decl, generics, |this| { intravisit::walk_foreign_item(this, item); @@ -558,8 +558,8 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { fn visit_ty(&mut self, ty: &'tcx hir::Ty) { debug!("visit_ty: id={:?} ty={:?}", ty.hir_id, ty); - debug!("visit_ty: ty.node={:?}", ty.node); - match ty.node { + debug!("visit_ty: ty.kind={:?}", ty.kind); + match ty.kind { hir::TyKind::BareFn(ref c) => { let next_early_index = self.next_early_index(); let was_in_fn_syntax = self.is_in_fn_syntax; @@ -637,8 +637,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { // `type MyAnonTy<'b> = impl MyTrait<'b>;` // ^ ^ this gets resolved in the scope of // the opaque_ty generics - let (generics, bounds) = match self.tcx.hir().expect_item(item_id.id).node - { + let (generics, bounds) = match self.tcx.hir().expect_item(item_id.id).kind { // Named opaque `impl Trait` types are reached via `TyKind::Path`. // This arm is for `impl Trait` in the types of statics, constants and locals. hir::ItemKind::OpaqueTy(hir::OpaqueTy { @@ -709,15 +708,22 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { match param.kind { GenericParamKind::Lifetime { .. } => { let (name, reg) = Region::early(&self.tcx.hir(), &mut index, ¶m); + let def_id = if let Region::EarlyBound(_ ,def_id , _) = reg { + def_id + } else { + bug!(); + }; if let hir::ParamName::Plain(param_name) = name { if param_name.name == kw::UnderscoreLifetime { // Pick the elided lifetime "definition" if one exists // and use it to make an elision scope. + self.lifetime_uses.insert(def_id.clone(), LifetimeUseSet::Many); elision = Some(reg); } else { lifetimes.insert(name, reg); } } else { + self.lifetime_uses.insert(def_id.clone(), LifetimeUseSet::Many); lifetimes.insert(name, reg); } } @@ -765,20 +771,13 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { }); } } - hir::TyKind::CVarArgs(ref lt) => { - // Resolve the generated lifetime for the C-variadic arguments. - // The lifetime is generated in AST -> HIR lowering. - if lt.name.is_elided() { - self.resolve_elided_lifetimes(vec![lt]) - } - } _ => intravisit::walk_ty(self, ty), } } fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) { use self::hir::TraitItemKind::*; - match trait_item.node { + match trait_item.kind { Method(ref sig, _) => { let tcx = self.tcx; self.visit_early_late( @@ -830,7 +829,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) { use self::hir::ImplItemKind::*; - match impl_item.node { + match impl_item.kind { Method(ref sig, _) => { let tcx = self.tcx; self.visit_early_late( @@ -1214,7 +1213,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body) { } fn expression_label(ex: &hir::Expr) -> Option { - if let hir::ExprKind::Loop(_, Some(label), _) = ex.node { + if let hir::ExprKind::Loop(_, Some(label), _) = ex.kind { Some(label.ident) } else { None @@ -1263,7 +1262,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body) { fn compute_object_lifetime_defaults(tcx: TyCtxt<'_>) -> HirIdMap> { let mut map = HirIdMap::default(); for item in tcx.hir().krate().items.values() { - match item.node { + match item.kind { hir::ItemKind::Struct(_, ref generics) | hir::ItemKind::Union(_, ref generics) | hir::ItemKind::Enum(_, ref generics) @@ -1352,7 +1351,7 @@ fn object_lifetime_defaults_for_item( continue; } - let res = match data.bounded_ty.node { + let res = match data.bounded_ty.kind { hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) => path.res, _ => continue, }; @@ -1487,7 +1486,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let mut elide_use = None; let mut find_arg_use_span = |inputs: &hir::HirVec| { for input in inputs { - match input.node { + match input.kind { hir::TyKind::Rptr(lt, _) => { if lt.name.ident() == name { // include the trailing whitespace between the lifetime and type names @@ -1525,12 +1524,12 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { { match parent { Node::Item(item) => { - if let hir::ItemKind::Fn(decl, _, _, _) = &item.node { + if let hir::ItemKind::Fn(decl, _, _, _) = &item.kind { find_arg_use_span(&decl.inputs); } }, Node::ImplItem(impl_item) => { - if let hir::ImplItemKind::Method(sig, _) = &impl_item.node { + if let hir::ImplItemKind::Method(sig, _) = &impl_item.kind { find_arg_use_span(&sig.decl.inputs); } } @@ -1623,7 +1622,6 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { _ => None, } { debug!("id = {:?} span = {:?} name = {:?}", id, span, name); - if name.name == kw::UnderscoreLifetime { continue; } @@ -1733,10 +1731,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let mut index = 0; if let Some(parent_id) = parent_id { let parent = self.tcx.hir().expect_item(parent_id); - if sub_items_have_self_param(&parent.node) { + if sub_items_have_self_param(&parent.kind) { index += 1; // Self comes before lifetimes } - match parent.node { + match parent.kind { hir::ItemKind::Trait(_, _, ref generics, ..) | hir::ItemKind::Impl(_, _, _, ref generics, ..) => { index += generics.params.len() as u32; @@ -1867,15 +1865,15 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let fn_id = self.tcx.hir().body_owner(body_id); match self.tcx.hir().get(fn_id) { Node::Item(&hir::Item { - node: hir::ItemKind::Fn(..), + kind: hir::ItemKind::Fn(..), .. }) | Node::TraitItem(&hir::TraitItem { - node: hir::TraitItemKind::Method(..), + kind: hir::TraitItemKind::Method(..), .. }) | Node::ImplItem(&hir::ImplItem { - node: hir::ImplItemKind::Method(..), + kind: hir::ImplItemKind::Method(..), .. }) => { let scope = self.tcx.hir().local_def_id(fn_id); @@ -2165,18 +2163,18 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let body = match self.tcx.hir().get(parent) { // `fn` definitions and methods. Node::Item(&hir::Item { - node: hir::ItemKind::Fn(.., body), + kind: hir::ItemKind::Fn(.., body), .. }) => Some(body), Node::TraitItem(&hir::TraitItem { - node: hir::TraitItemKind::Method(_, ref m), + kind: hir::TraitItemKind::Method(_, ref m), .. }) => { if let hir::ItemKind::Trait(.., ref trait_items) = self.tcx .hir() .expect_item(self.tcx.hir().get_parent_item(parent)) - .node + .kind { assoc_item_kind = trait_items .iter() @@ -2190,13 +2188,13 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } Node::ImplItem(&hir::ImplItem { - node: hir::ImplItemKind::Method(_, body), + kind: hir::ImplItemKind::Method(_, body), .. }) => { if let hir::ItemKind::Impl(.., ref self_ty, ref impl_items) = self.tcx .hir() .expect_item(self.tcx.hir().get_parent_item(parent)) - .node + .kind { impl_self = Some(self_ty); assoc_item_kind = impl_items @@ -2270,8 +2268,8 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } fn visit_ty(&mut self, ty: &'a hir::Ty) { - if let hir::TyKind::Rptr(lifetime_ref, ref mt) = ty.node { - if let hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) = mt.ty.node + if let hir::TyKind::Rptr(lifetime_ref, ref mt) = ty.kind { + if let hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) = mt.ty.kind { if self.is_self_ty(path.res) { if let Some(lifetime) = self.map.defs.get(&lifetime_ref.hir_id) { @@ -2286,7 +2284,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { let mut visitor = SelfVisitor { map: self.map, - impl_self: impl_self.map(|ty| &ty.node), + impl_self: impl_self.map(|ty| &ty.kind), lifetime: Set1::Empty, }; visitor.visit_ty(&inputs[0]); @@ -2364,10 +2362,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } fn visit_ty(&mut self, ty: &hir::Ty) { - if let hir::TyKind::BareFn(_) = ty.node { + if let hir::TyKind::BareFn(_) = ty.kind { self.outer_index.shift_in(1); } - match ty.node { + match ty.kind { hir::TyKind::TraitObject(ref bounds, ref lifetime) => { for bound in bounds { self.visit_poly_trait_ref(bound, hir::TraitBoundModifier::None); @@ -2379,12 +2377,11 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { self.visit_lifetime(lifetime); } } - hir::TyKind::CVarArgs(_) => {} _ => { intravisit::walk_ty(self, ty); } } - if let hir::TyKind::BareFn(_) = ty.node { + if let hir::TyKind::BareFn(_) = ty.kind { self.outer_index.shift_out(1); } } @@ -2991,7 +2988,7 @@ fn insert_late_bound_lifetimes( } fn visit_ty(&mut self, ty: &'v hir::Ty) { - match ty.node { + match ty.kind { hir::TyKind::Path(hir::QPath::Resolved(Some(_), _)) | hir::TyKind::Path(hir::QPath::TypeRelative(..)) => { // ignore lifetimes appearing in associated type diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index c06a0feb6a..93d0627ac6 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -25,7 +25,7 @@ use crate::util::nodemap::{FxHashSet, FxHashMap}; use std::mem::replace; use std::cmp::Ordering; -#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)] +#[derive(PartialEq, Clone, Copy, Debug)] pub enum StabilityLevel { Unstable, Stable, @@ -199,8 +199,12 @@ impl<'a, 'tcx> Annotator<'a, 'tcx> { let name = attr.name_or_empty(); if [sym::unstable, sym::stable, sym::rustc_deprecated].contains(&name) { attr::mark_used(attr); - self.tcx.sess.span_err(attr.span, "stability attributes may not be used \ - outside of the standard library"); + struct_span_err!( + self.tcx.sess, + attr.span, + E0734, + "stability attributes may not be used outside of the standard library", + ).emit(); } } @@ -246,7 +250,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> { fn visit_item(&mut self, i: &'tcx Item) { let orig_in_trait_impl = self.in_trait_impl; let mut kind = AnnotationKind::Required; - match i.node { + match i.kind { // Inherent impls and foreign modules serve only as containers for other items, // they don't have their own stability. They still can be annotated as unstable // and propagate this unstability to children, but this annotation is completely @@ -344,14 +348,14 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> { } fn visit_item(&mut self, i: &'tcx Item) { - match i.node { + match i.kind { // Inherent impls and foreign modules serve only as containers for other items, // they don't have their own stability. They still can be annotated as unstable // and propagate this unstability to children, but this annotation is completely // optional. They inherit stability from their parents when unannotated. hir::ItemKind::Impl(.., None, _, _) | hir::ItemKind::ForeignMod(..) => {} - _ => self.check_missing_stability(i.hir_id, i.span, i.node.descriptive_variant()) + _ => self.check_missing_stability(i.hir_id, i.span, i.kind.descriptive_variant()) } intravisit::walk_item(self, i) @@ -382,7 +386,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> { } fn visit_foreign_item(&mut self, i: &'tcx hir::ForeignItem) { - self.check_missing_stability(i.hir_id, i.span, i.node.descriptive_variant()); + self.check_missing_stability(i.hir_id, i.span, i.kind.descriptive_variant()); intravisit::walk_foreign_item(self, i); } @@ -481,7 +485,13 @@ pub fn provide(providers: &mut Providers<'_>) { } pub fn report_unstable( - sess: &Session, feature: Symbol, reason: Option, issue: u32, is_soft: bool, span: Span + sess: &Session, + feature: Symbol, + reason: Option, + issue: u32, + is_soft: bool, + span: Span, + soft_handler: impl FnOnce(&'static lint::Lint, Span, &str), ) { let msg = match reason { Some(r) => format!("use of unstable library feature '{}': {}", feature, r), @@ -507,7 +517,7 @@ pub fn report_unstable( let fresh = sess.one_time_diagnostics.borrow_mut().insert(error_id); if fresh { if is_soft { - sess.buffer_lint(lint::builtin::SOFT_UNSTABLE, CRATE_NODE_ID, span, &msg); + soft_handler(lint::builtin::SOFT_UNSTABLE, span, &msg) } else { emit_feature_err( &sess.parse_sess, feature, span, GateIssue::Library(Some(issue)), &msg @@ -576,7 +586,7 @@ pub fn rustc_deprecation_message(depr: &RustcDeprecation, path: &str) -> (String } pub fn early_report_deprecation( - sess: &Session, + lint_buffer: &'a mut lint::LintBuffer, message: &str, suggestion: Option, lint: &'static Lint, @@ -587,7 +597,7 @@ pub fn early_report_deprecation( } let diag = BuiltinLintDiagnostics::DeprecatedMacro(suggestion, span); - sess.buffer_lint_with_diagnostic(lint, CRATE_NODE_ID, span, message, diag); + lint_buffer.buffer_lint_with_diagnostic(lint, CRATE_NODE_ID, span, message, diag); } fn late_report_deprecation( @@ -775,10 +785,12 @@ impl<'tcx> TyCtxt<'tcx> { /// Additionally, this function will also check if the item is deprecated. If so, and `id` is /// not `None`, a deprecated lint attached to `id` will be emitted. pub fn check_stability(self, def_id: DefId, id: Option, span: Span) { + let soft_handler = + |lint, span, msg: &_| self.lint_hir(lint, id.unwrap_or(hir::CRATE_HIR_ID), span, msg); match self.eval_stability(def_id, id, span) { EvalResult::Allow => {} EvalResult::Deny { feature, reason, issue, is_soft } => - report_unstable(self.sess, feature, reason, issue, is_soft, span), + report_unstable(self.sess, feature, reason, issue, is_soft, span, soft_handler), EvalResult::Unmarked => { // The API could be uncallable for other reasons, for example when a private module // was referenced. @@ -797,7 +809,7 @@ impl Visitor<'tcx> for Checker<'tcx> { } fn visit_item(&mut self, item: &'tcx hir::Item) { - match item.node { + match item.kind { hir::ItemKind::ExternCrate(_) => { // compiler-generated `extern crate` items have a dummy span. if item.span.is_dummy() { return } @@ -893,11 +905,10 @@ pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) { // Warn if the user has enabled an already-stable lang feature. unnecessary_stable_feature_lint(tcx, span, feature, since); } - if lang_features.contains(&feature) { + if !lang_features.insert(feature) { // Warn if the user enables a lang feature multiple times. duplicate_feature_err(tcx.sess, span, feature); } - lang_features.insert(feature); } let declared_lib_features = &tcx.features().declared_lib_features; diff --git a/src/librustc/mir/cache.rs b/src/librustc/mir/cache.rs index 1f60487784..9b41366741 100644 --- a/src/librustc/mir/cache.rs +++ b/src/librustc/mir/cache.rs @@ -1,6 +1,6 @@ -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_data_structures::sync::{RwLock, MappedReadGuard, ReadGuard}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use crate::ich::StableHashingContext; use crate::mir::{Body, BasicBlock}; @@ -24,9 +24,7 @@ impl rustc_serialize::Decodable for Cache { } impl<'a> HashStable> for Cache { - fn hash_stable(&self, - _: &mut StableHashingContext<'a>, - _: &mut StableHasher) { + fn hash_stable(&self, _: &mut StableHashingContext<'a>, _: &mut StableHasher) { // Do nothing. } } diff --git a/src/librustc/mir/interpret/allocation.rs b/src/librustc/mir/interpret/allocation.rs index 15e6cb6bca..aa8ac4902a 100644 --- a/src/librustc/mir/interpret/allocation.rs +++ b/src/librustc/mir/interpret/allocation.rs @@ -245,6 +245,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// as a slice. /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods + /// on `InterpCx` instead. #[inline] pub fn get_bytes( &self, @@ -275,6 +277,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// so be sure to actually put data there! /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods + /// on `InterpCx` instead. pub fn get_bytes_mut( &mut self, cx: &impl HasDataLayout, @@ -297,6 +301,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached /// before a `0` is found. + /// + /// Most likely, you want to call `Memory::read_c_str` instead of this method. pub fn read_c_str( &self, cx: &impl HasDataLayout, @@ -342,33 +348,25 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// Writes `src` to the memory starting at `ptr.offset`. /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to call `Memory::write_bytes` instead of this method. pub fn write_bytes( &mut self, cx: &impl HasDataLayout, ptr: Pointer, - src: &[u8], + src: impl IntoIterator, ) -> InterpResult<'tcx> { - let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?; - bytes.clone_from_slice(src); - Ok(()) - } - - /// Sets `count` bytes starting at `ptr.offset` with `val`. Basically `memset`. - /// - /// It is the caller's responsibility to check bounds and alignment beforehand. - pub fn write_repeat( - &mut self, - cx: &impl HasDataLayout, - ptr: Pointer, - val: u8, - count: Size - ) -> InterpResult<'tcx> - { - let bytes = self.get_bytes_mut(cx, ptr, count)?; - for b in bytes { - *b = val; + let mut src = src.into_iter(); + let (lower, upper) = src.size_hint(); + let len = upper.expect("can only write bounded iterators"); + assert_eq!(lower, len, "can only write iterators with a precise length"); + let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(len as u64))?; + // `zip` would stop when the first iterator ends; we want to definitely + // cover all of `bytes`. + for dest in bytes { + *dest = src.next().expect("iterator was shorter than it said it would be"); } + src.next().expect_none("iterator was longer than it said it would be"); Ok(()) } @@ -380,6 +378,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// pointers being valid for ZSTs. /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to call `InterpCx::read_scalar` instead of this method. pub fn read_scalar( &self, cx: &impl HasDataLayout, @@ -418,6 +417,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// Reads a pointer-sized scalar. /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to call `InterpCx::read_scalar` instead of this method. pub fn read_ptr_sized( &self, cx: &impl HasDataLayout, @@ -435,6 +435,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// pointers being valid for ZSTs. /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to call `InterpCx::write_scalar` instead of this method. pub fn write_scalar( &mut self, cx: &impl HasDataLayout, @@ -477,6 +478,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra> Allocation { /// Writes a pointer-sized scalar. /// /// It is the caller's responsibility to check bounds and alignment beforehand. + /// Most likely, you want to call `InterpCx::write_scalar` instead of this method. pub fn write_ptr_sized( &mut self, cx: &impl HasDataLayout, diff --git a/src/librustc/mir/interpret/error.rs b/src/librustc/mir/interpret/error.rs index ac99ccd45e..d918b9ee67 100644 --- a/src/librustc/mir/interpret/error.rs +++ b/src/librustc/mir/interpret/error.rs @@ -363,6 +363,8 @@ pub enum UndefinedBehaviorInfo { UbExperimental(String), /// Unreachable code was executed. Unreachable, + /// An enum discriminant was set to a value which was outside the range of valid values. + InvalidDiscriminant(ScalarMaybeUndef), } impl fmt::Debug for UndefinedBehaviorInfo { @@ -373,6 +375,8 @@ impl fmt::Debug for UndefinedBehaviorInfo { write!(f, "{}", msg), Unreachable => write!(f, "entered unreachable code"), + InvalidDiscriminant(val) => + write!(f, "encountered invalid enum discriminant {}", val), } } } @@ -400,7 +404,6 @@ pub enum UnsupportedOpInfo<'tcx> { InvalidMemoryAccess, InvalidFunctionPointer, InvalidBool, - InvalidDiscriminant(ScalarMaybeUndef), PointerOutOfBounds { ptr: Pointer, msg: CheckInAllocMsg, @@ -485,8 +488,6 @@ impl fmt::Debug for UnsupportedOpInfo<'tcx> { write!(f, "incorrect alloc info: expected size {} and align {}, \ got size {} and align {}", size.bytes(), align.bytes(), size2.bytes(), align2.bytes()), - InvalidDiscriminant(val) => - write!(f, "encountered invalid enum discriminant {}", val), InvalidMemoryAccess => write!(f, "tried to access memory through an invalid pointer"), DanglingPointerDeref => diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index 23433c2e88..6c31d54e08 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -101,7 +101,7 @@ pub use self::error::{ InvalidProgramInfo, ResourceExhaustionInfo, UndefinedBehaviorInfo, }; -pub use self::value::{Scalar, ScalarMaybeUndef, RawConst, ConstValue}; +pub use self::value::{Scalar, ScalarMaybeUndef, RawConst, ConstValue, get_slice_bytes}; pub use self::allocation::{Allocation, AllocationExtra, Relocations, UndefMask}; @@ -109,7 +109,7 @@ pub use self::pointer::{Pointer, PointerArithmetic, CheckInAllocMsg}; use crate::mir; use crate::hir::def_id::DefId; -use crate::ty::{self, TyCtxt, Instance, subst::UnpackedKind}; +use crate::ty::{self, TyCtxt, Instance, subst::GenericArgKind}; use crate::ty::codec::TyDecoder; use crate::ty::layout::{self, Size}; use std::io; @@ -426,7 +426,7 @@ impl<'tcx> AllocMap<'tcx> { // this for generic functions. Lifetime parameters are ignored. let is_generic = instance.substs.into_iter().any(|kind| { match kind.unpack() { - UnpackedKind::Lifetime(_) => false, + GenericArgKind::Lifetime(_) => false, _ => true, } }); @@ -470,6 +470,14 @@ impl<'tcx> AllocMap<'tcx> { } } + /// Panics if the `AllocId` does not refer to a function + pub fn unwrap_fn(&self, id: AllocId) -> Instance<'tcx> { + match self.get(id) { + Some(GlobalAlloc::Function(instance)) => instance, + _ => bug!("expected allocation ID {} to point to a function", id), + } + } + /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to /// call this function twice, even with the same `Allocation` will ICE the compiler. pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) { diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index b8bc741419..ac16b8b884 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -5,11 +5,12 @@ use rustc_apfloat::{Float, ieee::{Double, Single}}; use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size}, subst::SubstsRef}; use crate::ty::PlaceholderConst; use crate::hir::def_id::DefId; +use crate::ty::{BoundVar, DebruijnIndex}; use super::{InterpResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate}; /// Represents the result of a raw const operation, pre-validation. -#[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash, HashStable)] +#[derive(Clone, HashStable)] pub struct RawConst<'tcx> { // the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory` // (so you can use `AllocMap::unwrap_memory`). @@ -28,6 +29,9 @@ pub enum ConstValue<'tcx> { /// Infer the value of the const. Infer(InferConst<'tcx>), + /// Bound const variable, used only when preparing a trait query. + Bound(DebruijnIndex, BoundVar), + /// A placeholder const - universally quantified higher-ranked const. Placeholder(PlaceholderConst), @@ -66,8 +70,9 @@ impl<'tcx> ConstValue<'tcx> { match *self { ConstValue::Param(_) | ConstValue::Infer(_) | + ConstValue::Bound(..) | ConstValue::Placeholder(_) | - ConstValue::ByRef{ .. } | + ConstValue::ByRef { .. } | ConstValue::Unevaluated(..) | ConstValue::Slice { .. } => None, ConstValue::Scalar(val) => Some(val), @@ -343,14 +348,19 @@ impl<'tcx, Tag> Scalar { } } + #[inline(always)] + pub fn check_raw(data: u128, size: u8, target_size: Size) { + assert_eq!(target_size.bytes(), size as u64); + assert_ne!(size, 0, "you should never look at the bits of a ZST"); + Scalar::check_data(data, size); + } + /// Do not call this method! Use either `assert_bits` or `force_bits`. #[inline] pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> { match self { Scalar::Raw { data, size } => { - assert_eq!(target_size.bytes(), size as u64); - assert_ne!(size, 0, "you should never look at the bits of a ZST"); - Scalar::check_data(data, size); + Self::check_raw(data, size, target_size); Ok(data) } Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes), @@ -482,7 +492,7 @@ impl From> for Scalar { } } -#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, RustcEncodable, RustcDecodable)] +#[derive(Clone, Copy, Eq, PartialEq, RustcEncodable, RustcDecodable)] pub enum ScalarMaybeUndef { Scalar(Scalar), Undef, @@ -611,3 +621,18 @@ impl_stable_hash_for!(enum crate::mir::interpret::ScalarMaybeUndef { Scalar(v), Undef }); + +/// Gets the bytes of a constant slice value. +pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) -> &'tcx [u8] { + if let ConstValue::Slice { data, start, end } = val { + let len = end - start; + data.get_bytes( + cx, + // invent a pointer, only the offset is relevant anyway + Pointer::new(AllocId(0), Size::from_bytes(start as u64)), + Size::from_bytes(len as u64), + ).unwrap_or_else(|err| bug!("const slice is invalid: {:?}", err)) + } else { + bug!("expected const slice, but found another const value"); + } +} diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 92efcf44de..f7e0d0131d 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -15,16 +15,15 @@ use crate::ty::layout::VariantIdx; use crate::ty::print::{FmtPrinter, Printer}; use crate::ty::subst::{Subst, SubstsRef}; use crate::ty::{ - self, AdtDef, CanonicalUserTypeAnnotations, ClosureSubsts, GeneratorSubsts, Region, Ty, TyCtxt, - UserTypeAnnotationIndex, + self, AdtDef, CanonicalUserTypeAnnotations, List, Region, Ty, TyCtxt, UserTypeAnnotationIndex, }; use polonius_engine::Atom; -use rustc_data_structures::bit_set::BitMatrix; +use rustc_index::bit_set::BitMatrix; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::dominators::{dominators, Dominators}; use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors}; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::MappedReadGuard; use rustc_macros::HashStable; @@ -37,7 +36,7 @@ use std::slice; use std::vec::IntoIter; use std::{iter, mem, option, u32}; use syntax::ast::Name; -use syntax::symbol::{InternedString, Symbol}; +use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; pub use crate::mir::interpret::AssertMessage; @@ -262,6 +261,12 @@ impl<'tcx> Body<'tcx> { dominators(self) } + /// Returns `true` if a cycle exists in the control-flow graph that is reachable from the + /// `START_BLOCK`. + pub fn is_cfg_cyclic(&self) -> bool { + graph::is_cyclic(self) + } + #[inline] pub fn local_kind(&self, local: Local) -> LocalKind { let index = local.as_usize(); @@ -462,7 +467,9 @@ impl rustc_serialize::UseSpecializedDecodable for ClearCrossCrate< /// Grouped information about the source code origin of a MIR entity. /// Intended to be inspected by diagnostics and debuginfo. /// Most passes can work with it as a whole, within a single function. -#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, HashStable)] +// The unoffical Cranelift backend, at least as of #65828, needs `SourceInfo` to implement `Eq` and +// `Hash`. Please ping @bjorn3 if removing them. +#[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash, HashStable)] pub struct SourceInfo { /// The source span for the AST pertaining to this MIR entity. pub span: Span, @@ -575,7 +582,7 @@ impl BorrowKind { /////////////////////////////////////////////////////////////////////////// // Variables and temps -newtype_index! { +rustc_index::newtype_index! { pub struct Local { derive [HashStable] DEBUG_FORMAT = "_{}", @@ -602,7 +609,7 @@ pub enum LocalKind { ReturnPointer, } -#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct VarBindingForm<'tcx> { /// Is variable bound via `x`, `mut x`, `ref x`, or `ref mut x`? pub binding_mode: ty::BindingMode, @@ -624,7 +631,7 @@ pub struct VarBindingForm<'tcx> { pub pat_span: Span, } -#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub enum BindingForm<'tcx> { /// This is a binding for a non-`self` binding, or a `self` that has an explicit type. Var(VarBindingForm<'tcx>), @@ -635,7 +642,7 @@ pub enum BindingForm<'tcx> { } /// Represents what type of implicit self a function has, if any. -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Clone, Copy, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum ImplicitSelfKind { /// Represents a `fn x(self);`. Imm, @@ -676,14 +683,10 @@ impl_stable_hash_for!(enum self::MirPhase { mod binding_form_impl { use crate::ich::StableHashingContext; - use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; + use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; impl<'a, 'tcx> HashStable> for super::BindingForm<'tcx> { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use super::BindingForm::*; ::std::mem::discriminant(self).hash_stable(hcx, hasher); @@ -992,7 +995,7 @@ pub struct UpvarDebuginfo { /////////////////////////////////////////////////////////////////////////// // BasicBlock -newtype_index! { +rustc_index::newtype_index! { pub struct BasicBlock { derive [HashStable] DEBUG_FORMAT = "bb{}", @@ -1498,7 +1501,7 @@ impl<'tcx> TerminatorKind<'tcx> { Goto { .. } => vec!["".into()], SwitchInt { ref values, switch_ty, .. } => ty::tls::with(|tcx| { let param_env = ty::ParamEnv::empty(); - let switch_ty = tcx.lift_to_global(&switch_ty).unwrap(); + let switch_ty = tcx.lift(&switch_ty).unwrap(); let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size; values .iter() @@ -1710,15 +1713,17 @@ impl Debug for Statement<'_> { /// A path to a value; something that can be evaluated without /// changing or disturbing program state. #[derive( - Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, HashStable, + Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, HashStable, )] pub struct Place<'tcx> { pub base: PlaceBase<'tcx>, /// projection out of a place (access a field, deref a pointer, etc) - pub projection: Box<[PlaceElem<'tcx>]>, + pub projection: &'tcx List>, } +impl<'tcx> rustc_serialize::UseSpecializedDecodable for Place<'tcx> {} + #[derive( Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, HashStable, )] @@ -1822,6 +1827,8 @@ impl ProjectionElem { /// and the index is a local. pub type PlaceElem<'tcx> = ProjectionElem>; +impl<'tcx> Copy for PlaceElem<'tcx> { } + // At least on 64 bit systems, `PlaceElem` should not be larger than two pointers. #[cfg(target_arch = "x86_64")] static_assert_size!(PlaceElem<'_>, 16); @@ -1830,7 +1837,7 @@ static_assert_size!(PlaceElem<'_>, 16); /// need neither the `V` parameter for `Index` nor the `T` for `Field`. pub type ProjectionKind = ProjectionElem<(), ()>; -newtype_index! { +rustc_index::newtype_index! { pub struct Field { derive [HashStable] DEBUG_FORMAT = "field[{}]" @@ -1844,50 +1851,11 @@ pub struct PlaceRef<'a, 'tcx> { } impl<'tcx> Place<'tcx> { - // FIXME change this back to a const when projection is a shared slice. - // - // pub const RETURN_PLACE: Place<'tcx> = Place { - // base: PlaceBase::Local(RETURN_PLACE), - // projection: &[], - // }; + // FIXME change this to a const fn by also making List::empty a const fn. pub fn return_place() -> Place<'tcx> { Place { base: PlaceBase::Local(RETURN_PLACE), - projection: Box::new([]), - } - } - - pub fn field(self, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { - self.elem(ProjectionElem::Field(f, ty)) - } - - pub fn deref(self) -> Place<'tcx> { - self.elem(ProjectionElem::Deref) - } - - pub fn downcast(self, adt_def: &'tcx AdtDef, variant_index: VariantIdx) -> Place<'tcx> { - self.elem(ProjectionElem::Downcast( - Some(adt_def.variants[variant_index].ident.name), - variant_index, - )) - } - - pub fn downcast_unnamed(self, variant_index: VariantIdx) -> Place<'tcx> { - self.elem(ProjectionElem::Downcast(None, variant_index)) - } - - pub fn index(self, index: Local) -> Place<'tcx> { - self.elem(ProjectionElem::Index(index)) - } - - pub fn elem(self, elem: PlaceElem<'tcx>) -> Place<'tcx> { - // FIXME(spastorino): revisit this again once projection is not a Box<[T]> anymore - let mut projection = self.projection.into_vec(); - projection.push(elem); - - Place { - base: self.base, - projection: projection.into_boxed_slice(), + projection: List::empty(), } } @@ -1904,15 +1872,15 @@ impl<'tcx> Place<'tcx> { // // FIXME: can we safely swap the semantics of `fn base_local` below in here instead? pub fn local_or_deref_local(&self) -> Option { - match self { - Place { - base: PlaceBase::Local(local), - projection: box [], + match self.as_ref() { + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[], } | - Place { - base: PlaceBase::Local(local), - projection: box [ProjectionElem::Deref], - } => Some(*local), + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[ProjectionElem::Deref], + } => Some(local), _ => None, } } @@ -1920,10 +1888,7 @@ impl<'tcx> Place<'tcx> { /// If this place represents a local variable like `_X` with no /// projections, return `Some(_X)`. pub fn as_local(&self) -> Option { - match self { - Place { projection: box [], base: PlaceBase::Local(l) } => Some(*l), - _ => None, - } + self.as_ref().as_local() } pub fn as_ref(&self) -> PlaceRef<'_, 'tcx> { @@ -1938,7 +1903,7 @@ impl From for Place<'_> { fn from(local: Local) -> Self { Place { base: local.into(), - projection: Box::new([]), + projection: List::empty(), } } } @@ -1967,6 +1932,15 @@ impl<'a, 'tcx> PlaceRef<'a, 'tcx> { _ => None, } } + + /// If this place represents a local variable like `_X` with no + /// projections, return `Some(_X)`. + pub fn as_local(&self) -> Option { + match self { + PlaceRef { base: PlaceBase::Local(l), projection: [] } => Some(*l), + _ => None, + } + } } impl Debug for Place<'_> { @@ -2045,7 +2019,7 @@ impl Debug for PlaceBase<'_> { /////////////////////////////////////////////////////////////////////////// // Scopes -newtype_index! { +rustc_index::newtype_index! { pub struct SourceScope { derive [HashStable] DEBUG_FORMAT = "scope[{}]", @@ -2186,8 +2160,8 @@ pub enum AggregateKind<'tcx> { /// active field index would identity the field `c` Adt(&'tcx AdtDef, VariantIdx, SubstsRef<'tcx>, Option, Option), - Closure(DefId, ClosureSubsts<'tcx>), - Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability), + Closure(DefId, SubstsRef<'tcx>), + Generator(DefId, SubstsRef<'tcx>, hir::GeneratorMovability), } #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] @@ -2390,7 +2364,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { /// this does not necessarily mean that they are "==" in Rust -- in /// particular one must be wary of `NaN`! -#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)] pub struct Constant<'tcx> { pub span: Span, @@ -2436,7 +2410,7 @@ pub struct Constant<'tcx> { /// The first will lead to the constraint `w: &'1 str` (for some /// inferred region `'1`). The second will lead to the constraint `w: /// &'static str`. -#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct UserTypeProjections { pub(crate) contents: Vec<(UserTypeProjection, Span)>, } @@ -2513,7 +2487,7 @@ impl<'tcx> UserTypeProjections { /// * `let (x, _): T = ...` -- here, the `projs` vector would contain /// `field[0]` (aka `.0`), indicating that the type of `s` is /// determined by finding the type of the `.0` field from `T`. -#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct UserTypeProjection { pub base: UserTypeAnnotationIndex, pub projs: Vec, @@ -2584,7 +2558,7 @@ impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection { } } -newtype_index! { +rustc_index::newtype_index! { pub struct Promoted { derive [HashStable] DEBUG_FORMAT = "promoted[{}]" @@ -2600,7 +2574,14 @@ impl<'tcx> Debug for Constant<'tcx> { impl<'tcx> Display for Constant<'tcx> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { write!(fmt, "const ")?; - write!(fmt, "{}", self.literal) + // FIXME make the default pretty printing of raw pointers more detailed. Here we output the + // debug representation of raw pointers, so that the raw pointers in the mir dump output are + // detailed and just not '{pointer}'. + if let ty::RawPtr(_) = self.literal.ty.kind { + write!(fmt, "{:?} : {}", self.literal.val, self.literal.ty) + } else { + write!(fmt, "{}", self.literal) + } } } @@ -2715,7 +2696,7 @@ impl Location { } } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)] pub enum UnsafetyViolationKind { General, /// Permitted both in `const fn`s and regular `fn`s. @@ -2724,15 +2705,15 @@ pub enum UnsafetyViolationKind { BorrowPacked(hir::HirId), } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)] pub struct UnsafetyViolation { pub source_info: SourceInfo, - pub description: InternedString, - pub details: InternedString, + pub description: Symbol, + pub details: Symbol, pub kind: UnsafetyViolationKind, } -#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, RustcEncodable, RustcDecodable, HashStable)] pub struct UnsafetyCheckResult { /// Violations that are propagated *upwards* from this function. pub violations: Lrc<[UnsafetyViolation]>, @@ -2741,7 +2722,7 @@ pub struct UnsafetyCheckResult { pub unsafe_blocks: Lrc<[(hir::HirId, bool)]>, } -newtype_index! { +rustc_index::newtype_index! { pub struct GeneratorSavedLocal { derive [HashStable] DEBUG_FORMAT = "_{}", @@ -3173,6 +3154,17 @@ impl<'tcx> TypeFoldable<'tcx> for PlaceBase<'tcx> { } } +impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { + fn super_fold_with>(&self, folder: &mut F) -> Self { + let v = self.iter().map(|t| t.fold_with(folder)).collect::>(); + folder.tcx().intern_place_elems(&v) + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.iter().any(|t| t.visit_with(visitor)) + } +} + impl<'tcx> TypeFoldable<'tcx> for Static<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { Static { diff --git a/src/librustc/mir/mono.rs b/src/librustc/mir/mono.rs index a061e6f48f..58f99667cb 100644 --- a/src/librustc/mir/mono.rs +++ b/src/librustc/mir/mono.rs @@ -1,6 +1,6 @@ use crate::hir::def_id::{DefId, CrateNum, LOCAL_CRATE}; use crate::hir::HirId; -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; use syntax::attr::InlineAttr; use syntax::source_map::Span; use crate::ty::{Instance, InstanceDef, TyCtxt, SymbolName, subst::InternalSubsts}; @@ -8,15 +8,14 @@ use crate::util::nodemap::FxHashMap; use crate::ty::print::obsolete::DefPathBasedNames; use crate::dep_graph::{WorkProductId, DepNode, WorkProduct, DepConstructor}; use rustc_data_structures::base_n; -use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult, - StableHasher}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use crate::ich::{Fingerprint, StableHashingContext, NodeIdHashingMode}; use crate::session::config::OptLevel; use std::fmt; use std::hash::Hash; /// Describes how a monomorphization will be instantiated in object files. -#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] +#[derive(PartialEq)] pub enum InstantiationMode { /// There will be exactly one instance of the given MonoItem. It will have /// external linkage so that it can be linked to from other codegen units. @@ -81,7 +80,7 @@ impl<'tcx> MonoItem<'tcx> { MonoItem::GlobalAsm(hir_id) => { let def_id = tcx.hir().local_def_id(hir_id); SymbolName { - name: InternedString::intern(&format!("global_asm_{:?}", def_id)) + name: Symbol::intern(&format!("global_asm_{:?}", def_id)) } } } @@ -223,9 +222,7 @@ impl<'tcx> MonoItem<'tcx> { } impl<'a, 'tcx> HashStable> for MonoItem<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { ::std::mem::discriminant(self).hash_stable(hcx, hasher); match *self { @@ -249,12 +246,12 @@ pub struct CodegenUnit<'tcx> { /// name be unique amongst **all** crates. Therefore, it should /// contain something unique to this crate (e.g., a module path) /// as well as the crate name and disambiguator. - name: InternedString, + name: Symbol, items: FxHashMap, (Linkage, Visibility)>, size_estimate: Option, } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum Linkage { External, AvailableExternally, @@ -283,7 +280,7 @@ impl_stable_hash_for!(enum self::Linkage { Common }); -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Debug)] pub enum Visibility { Default, Hidden, @@ -297,7 +294,7 @@ impl_stable_hash_for!(enum self::Visibility { }); impl<'tcx> CodegenUnit<'tcx> { - pub fn new(name: InternedString) -> CodegenUnit<'tcx> { + pub fn new(name: Symbol) -> CodegenUnit<'tcx> { CodegenUnit { name: name, items: Default::default(), @@ -305,11 +302,11 @@ impl<'tcx> CodegenUnit<'tcx> { } } - pub fn name(&self) -> &InternedString { - &self.name + pub fn name(&self) -> Symbol { + self.name } - pub fn set_name(&mut self, name: InternedString) { + pub fn set_name(&mut self, name: Symbol) { self.name = name; } @@ -389,6 +386,7 @@ impl<'tcx> CodegenUnit<'tcx> { tcx.hir().as_local_hir_id(def_id) } InstanceDef::VtableShim(..) | + InstanceDef::ReifyShim(..) | InstanceDef::Intrinsic(..) | InstanceDef::FnPtrShim(..) | InstanceDef::Virtual(..) | @@ -419,9 +417,7 @@ impl<'tcx> CodegenUnit<'tcx> { } impl<'a, 'tcx> HashStable> for CodegenUnit<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let CodegenUnit { ref items, name, @@ -478,7 +474,7 @@ impl CodegenUnitNameBuilder<'tcx> { cnum: CrateNum, components: I, special_suffix: Option) - -> InternedString + -> Symbol where I: IntoIterator, C: fmt::Display, S: fmt::Display, @@ -491,7 +487,7 @@ impl CodegenUnitNameBuilder<'tcx> { cgu_name } else { let cgu_name = &cgu_name.as_str()[..]; - InternedString::intern(&CodegenUnit::mangle_name(cgu_name)) + Symbol::intern(&CodegenUnit::mangle_name(cgu_name)) } } @@ -501,7 +497,7 @@ impl CodegenUnitNameBuilder<'tcx> { cnum: CrateNum, components: I, special_suffix: Option) - -> InternedString + -> Symbol where I: IntoIterator, C: fmt::Display, S: fmt::Display, @@ -547,6 +543,6 @@ impl CodegenUnitNameBuilder<'tcx> { write!(cgu_name, ".{}", special_suffix).unwrap(); } - InternedString::intern(&cgu_name[..]) + Symbol::intern(&cgu_name[..]) } } diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs index d776809839..e87aabf9a0 100644 --- a/src/librustc/mir/tcx.rs +++ b/src/librustc/mir/tcx.rs @@ -34,7 +34,7 @@ impl<'tcx> PlaceTy<'tcx> { /// /// Note that the resulting type has not been normalized. pub fn field_ty(self, tcx: TyCtxt<'tcx>, f: &Field) -> Ty<'tcx> { - let answer = match self.ty.sty { + let answer = match self.ty.kind { ty::Adt(adt_def, substs) => { let variant_def = match self.variant_index { None => adt_def.non_enum_variant(), @@ -89,7 +89,7 @@ impl<'tcx> PlaceTy<'tcx> { ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => PlaceTy::from_ty(self.ty.builtin_index().unwrap()), ProjectionElem::Subslice { from, to } => { - PlaceTy::from_ty(match self.ty.sty { + PlaceTy::from_ty(match self.ty.kind { ty::Array(inner, size) => { let size = size.eval_usize(tcx, param_env); let len = size - (from as u64) - (to as u64); @@ -195,9 +195,9 @@ impl<'tcx> Rvalue<'tcx> { } Rvalue::Discriminant(ref place) => { let ty = place.ty(local_decls, tcx).ty; - match ty.sty { + match ty.kind { ty::Adt(adt_def, _) => adt_def.repr.discr_type().to_ty(tcx), - ty::Generator(_, substs, _) => substs.discr_ty(tcx), + ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx), _ => { // This can only be `0`, for now, so `u8` will suffice. tcx.types.u8 diff --git a/src/librustc/mir/traversal.rs b/src/librustc/mir/traversal.rs index 1416a5f0a6..f129dd3abe 100644 --- a/src/librustc/mir/traversal.rs +++ b/src/librustc/mir/traversal.rs @@ -1,4 +1,4 @@ -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use super::*; diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index 1e3b9eb29c..6a41b843e5 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -1,5 +1,5 @@ use crate::ty::subst::SubstsRef; -use crate::ty::{CanonicalUserTypeAnnotation, ClosureSubsts, GeneratorSubsts, Ty}; +use crate::ty::{CanonicalUserTypeAnnotation, Ty}; use crate::mir::*; use syntax_pos::Span; @@ -158,13 +158,7 @@ macro_rules! make_mir_visitor { self.super_place_base(base, context, location); } - fn visit_projection(&mut self, - base: & $($mutability)? PlaceBase<'tcx>, - projection: & $($mutability)? [PlaceElem<'tcx>], - context: PlaceContext, - location: Location) { - self.super_projection(base, projection, context, location); - } + visit_place_fns!($($mutability)?); fn visit_constant(&mut self, constant: & $($mutability)? Constant<'tcx>, @@ -221,18 +215,6 @@ macro_rules! make_mir_visitor { self.super_substs(substs); } - fn visit_closure_substs(&mut self, - substs: & $($mutability)? ClosureSubsts<'tcx>, - _: Location) { - self.super_closure_substs(substs); - } - - fn visit_generator_substs(&mut self, - substs: & $($mutability)? GeneratorSubsts<'tcx>, - _: Location) { - self.super_generator_substs(substs); - } - fn visit_local_decl(&mut self, local: Local, local_decl: & $($mutability)? LocalDecl<'tcx>) { @@ -618,14 +600,14 @@ macro_rules! make_mir_visitor { _, closure_substs ) => { - self.visit_closure_substs(closure_substs, location); + self.visit_substs(closure_substs, location); } AggregateKind::Generator( _, generator_substs, _movability, ) => { - self.visit_generator_substs(generator_substs, location); + self.visit_substs(generator_substs, location); } } @@ -684,28 +666,6 @@ macro_rules! make_mir_visitor { ); } - fn super_place(&mut self, - place: & $($mutability)? Place<'tcx>, - context: PlaceContext, - location: Location) { - let mut context = context; - - if !place.projection.is_empty() { - context = if context.is_mutating_use() { - PlaceContext::MutatingUse(MutatingUseContext::Projection) - } else { - PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) - }; - } - - self.visit_place_base(& $($mutability)? place.base, context, location); - - self.visit_projection(& $($mutability)? place.base, - & $($mutability)? place.projection, - context, - location); - } - fn super_place_base(&mut self, place_base: & $($mutability)? PlaceBase<'tcx>, context: PlaceContext, @@ -720,36 +680,6 @@ macro_rules! make_mir_visitor { } } - fn super_projection(&mut self, - base: & $($mutability)? PlaceBase<'tcx>, - projection: & $($mutability)? [PlaceElem<'tcx>], - context: PlaceContext, - location: Location) { - if let [proj_base @ .., elem] = projection { - self.visit_projection(base, proj_base, context, location); - - match elem { - ProjectionElem::Field(_field, ty) => { - self.visit_ty(ty, TyContext::Location(location)); - } - ProjectionElem::Index(local) => { - self.visit_local( - local, - PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy), - location - ); - } - ProjectionElem::Deref | - ProjectionElem::Subslice { from: _, to: _ } | - ProjectionElem::ConstantIndex { offset: _, - min_length: _, - from_end: _ } | - ProjectionElem::Downcast(_, _) => { - } - } - } - } - fn super_local_decl(&mut self, local: Local, local_decl: & $($mutability)? LocalDecl<'tcx>) { @@ -834,14 +764,6 @@ macro_rules! make_mir_visitor { fn super_substs(&mut self, _substs: & $($mutability)? SubstsRef<'tcx>) { } - fn super_generator_substs(&mut self, - _substs: & $($mutability)? GeneratorSubsts<'tcx>) { - } - - fn super_closure_substs(&mut self, - _substs: & $($mutability)? ClosureSubsts<'tcx>) { - } - // Convenience methods fn visit_location(&mut self, body: & $($mutability)? Body<'tcx>, location: Location) { @@ -860,6 +782,145 @@ macro_rules! make_mir_visitor { } } +macro_rules! visit_place_fns { + (mut) => ( + fn tcx<'a>(&'a self) -> TyCtxt<'tcx>; + + fn super_place( + &mut self, + place: &mut Place<'tcx>, + context: PlaceContext, + location: Location, + ) { + self.visit_place_base(&mut place.base, context, location); + + if let Some(new_projection) = self.process_projection(&place.projection) { + place.projection = self.tcx().intern_place_elems(&new_projection); + } + } + + fn process_projection( + &mut self, + projection: &'a [PlaceElem<'tcx>], + ) -> Option>> { + let mut projection = Cow::Borrowed(projection); + + for i in 0..projection.len() { + if let Some(elem) = projection.get(i) { + if let Some(elem) = self.process_projection_elem(elem) { + // This converts the borrowed projection into `Cow::Owned(_)` and returns a + // clone of the projection so we can mutate and reintern later. + let vec = projection.to_mut(); + vec[i] = elem; + } + } + } + + match projection { + Cow::Borrowed(_) => None, + Cow::Owned(vec) => Some(vec), + } + } + + fn process_projection_elem( + &mut self, + _elem: &PlaceElem<'tcx>, + ) -> Option> { + None + } + ); + + () => ( + fn visit_projection( + &mut self, + base: &PlaceBase<'tcx>, + projection: &[PlaceElem<'tcx>], + context: PlaceContext, + location: Location, + ) { + self.super_projection(base, projection, context, location); + } + + fn visit_projection_elem( + &mut self, + base: &PlaceBase<'tcx>, + proj_base: &[PlaceElem<'tcx>], + elem: &PlaceElem<'tcx>, + context: PlaceContext, + location: Location, + ) { + self.super_projection_elem(base, proj_base, elem, context, location); + } + + fn super_place( + &mut self, + place: &Place<'tcx>, + context: PlaceContext, + location: Location, + ) { + let mut context = context; + + if !place.projection.is_empty() { + context = if context.is_mutating_use() { + PlaceContext::MutatingUse(MutatingUseContext::Projection) + } else { + PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) + }; + } + + self.visit_place_base(&place.base, context, location); + + self.visit_projection(&place.base, + &place.projection, + context, + location); + } + + fn super_projection( + &mut self, + base: &PlaceBase<'tcx>, + projection: &[PlaceElem<'tcx>], + context: PlaceContext, + location: Location, + ) { + let mut cursor = projection; + while let [proj_base @ .., elem] = cursor { + cursor = proj_base; + self.visit_projection_elem(base, cursor, elem, context, location); + } + } + + fn super_projection_elem( + &mut self, + _base: &PlaceBase<'tcx>, + _proj_base: &[PlaceElem<'tcx>], + elem: &PlaceElem<'tcx>, + _context: PlaceContext, + location: Location, + ) { + match elem { + ProjectionElem::Field(_field, ty) => { + self.visit_ty(ty, TyContext::Location(location)); + } + ProjectionElem::Index(local) => { + self.visit_local( + local, + PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy), + location + ); + } + ProjectionElem::Deref | + ProjectionElem::Subslice { from: _, to: _ } | + ProjectionElem::ConstantIndex { offset: _, + min_length: _, + from_end: _ } | + ProjectionElem::Downcast(_, _) => { + } + } + } + ); +} + make_mir_visitor!(Visitor,); make_mir_visitor!(MutVisitor,mut); @@ -890,7 +951,7 @@ impl<'tcx> MirVisitable<'tcx> for Option> { /// Extra information passed to `visit_ty` and friends to give context /// about where the type etc appears. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Debug)] pub enum TyContext { LocalDecl { /// The index of the local variable we are visiting. diff --git a/src/librustc/query/mod.rs b/src/librustc/query/mod.rs index 252e49d5d1..86ed2419e0 100644 --- a/src/librustc/query/mod.rs +++ b/src/librustc/query/mod.rs @@ -15,7 +15,7 @@ use crate::traits::query::{ }; use std::borrow::Cow; -use syntax_pos::symbol::InternedString; +use syntax_pos::symbol::Symbol; // Each of these queries corresponds to a function pointer field in the // `Providers` struct for requesting a value of that type, and a method @@ -61,7 +61,7 @@ rustc_queries! { /// predicate gets in the way of some checks, which are intended /// to operate over only the actual where-clauses written by the /// user.) - query predicates_of(key: DefId) -> &'tcx ty::GenericPredicates<'tcx> { + query predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> { cache_on_disk_if { key.is_local() } } @@ -94,6 +94,7 @@ rustc_queries! { /// of the MIR qualify_consts pass. The actual meaning of /// the value isn't known except to the pass itself. query mir_const_qualif(key: DefId) -> (u8, &'tcx BitSet) { + desc { |tcx| "const checking `{}`", tcx.def_path_str(key) } cache_on_disk_if { key.is_local() } } @@ -132,7 +133,7 @@ rustc_queries! { cache_on_disk_if { key.is_local() } load_cached(tcx, id) { let promoted: Option< - rustc_data_structures::indexed_vec::IndexVec< + rustc_index::vec::IndexVec< crate::mir::Promoted, crate::mir::Body<'tcx> >> = tcx.queries.on_disk_cache.try_load_query_result(tcx, id); @@ -183,16 +184,14 @@ rustc_queries! { /// predicates (where-clauses) directly defined on it. This is /// equal to the `explicit_predicates_of` predicates plus the /// `inferred_outlives_of` predicates. - query predicates_defined_on(_: DefId) - -> &'tcx ty::GenericPredicates<'tcx> {} + query predicates_defined_on(_: DefId) -> ty::GenericPredicates<'tcx> {} /// Returns the predicates written explicitly by the user. - query explicit_predicates_of(_: DefId) - -> &'tcx ty::GenericPredicates<'tcx> {} + query explicit_predicates_of(_: DefId) -> ty::GenericPredicates<'tcx> {} /// Returns the inferred outlives predicates (e.g., for `struct /// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`). - query inferred_outlives_of(_: DefId) -> &'tcx [ty::Predicate<'tcx>] {} + query inferred_outlives_of(_: DefId) -> &'tcx [(ty::Predicate<'tcx>, Span)] {} /// Maps from the `DefId` of a trait to the list of /// super-predicates. This is a subset of the full list of @@ -200,14 +199,13 @@ rustc_queries! { /// evaluate them even during type conversion, often before the /// full predicates are available (note that supertraits have /// additional acyclicity requirements). - query super_predicates_of(key: DefId) -> &'tcx ty::GenericPredicates<'tcx> { + query super_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> { desc { |tcx| "computing the supertraits of `{}`", tcx.def_path_str(key) } } /// To avoid cycles within the predicates of a single item we compute /// per-type-parameter predicates for resolving `T::AssocTy`. - query type_param_predicates(key: (DefId, DefId)) - -> &'tcx ty::GenericPredicates<'tcx> { + query type_param_predicates(key: (DefId, DefId)) -> ty::GenericPredicates<'tcx> { no_force desc { |tcx| "computing the bounds for type parameter `{}`", { let id = tcx.hir().as_local_hir_id(key.1).unwrap(); @@ -230,6 +228,12 @@ rustc_queries! { cycle_delay_bug } + query trivial_dropck_outlives(ty: Ty<'tcx>) -> bool { + anon + no_force + desc { "checking if `{:?}` has trivial dropck", ty } + } + query adt_dtorck_constraint( _: DefId ) -> Result, NoSolution> {} @@ -290,7 +294,7 @@ rustc_queries! { query associated_item(_: DefId) -> ty::AssocItem {} query impl_trait_ref(_: DefId) -> Option> {} - query impl_polarity(_: DefId) -> hir::ImplPolarity {} + query impl_polarity(_: DefId) -> ty::ImplPolarity {} query issue33140_self_ty(_: DefId) -> Option> {} } @@ -397,10 +401,6 @@ rustc_queries! { } BorrowChecking { - query borrowck(key: DefId) -> &'tcx BorrowCheckResult { - cache_on_disk_if { key.is_local() } - } - /// Borrow-checks the function body. If this is a closure, returns /// additional requirements that the closure's creator must verify. query mir_borrowck(key: DefId) -> mir::BorrowCheckResult<'tcx> { @@ -462,14 +462,18 @@ rustc_queries! { query const_field( key: ty::ParamEnvAnd<'tcx, (&'tcx ty::Const<'tcx>, mir::Field)> ) -> &'tcx ty::Const<'tcx> { - eval_always no_force desc { "extract field of const" } } + + query const_caller_location(key: (syntax_pos::Symbol, u32, u32)) -> &'tcx ty::Const<'tcx> { + no_force + desc { "get a &core::panic::Location referring to a span" } + } } TypeChecking { - query check_match(key: DefId) -> SignalledError { + query check_match(key: DefId) { cache_on_disk_if { key.is_local() } } @@ -534,19 +538,6 @@ rustc_queries! { TypeChecking { query trait_of_item(_: DefId) -> Option {} - query const_is_rvalue_promotable_to_static(key: DefId) -> bool { - desc { |tcx| - "const checking if rvalue is promotable to static `{}`", - tcx.def_path_str(key) - } - cache_on_disk_if { true } - } - query rvalue_promotable_map(key: DefId) -> &'tcx ItemLocalSet { - desc { |tcx| - "checking which parts of `{}` are promotable to static", - tcx.def_path_str(key) - } - } } Codegen { @@ -634,6 +625,12 @@ rustc_queries! { -> &'tcx [(CrateNum, LinkagePreference)] { desc { "dylib dependency formats of crate" } } + + query dependency_formats(_: CrateNum) + -> Lrc + { + desc { "get the linkage format of all dependencies" } + } } Codegen { @@ -757,6 +754,10 @@ rustc_queries! { eval_always desc { "looking up the hash a crate" } } + query crate_host_hash(_: CrateNum) -> Option { + eval_always + desc { "looking up the hash of a host version of a crate" } + } query original_crate_name(_: CrateNum) -> Symbol { eval_always desc { "looking up the original name a crate" } @@ -931,7 +932,7 @@ rustc_queries! { desc { "collect_and_partition_mono_items" } } query is_codegened_item(_: DefId) -> bool {} - query codegen_unit(_: InternedString) -> Arc> { + query codegen_unit(_: Symbol) -> Arc> { no_force desc { "codegen_unit" } } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 5eda3df378..2bcddeaf19 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -7,20 +7,16 @@ use crate::session::{early_error, early_warn, Session}; use crate::session::search_paths::SearchPath; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::sync::Lrc; use rustc_target::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, RelroLevel}; use rustc_target::spec::{Target, TargetTriple}; use syntax; -use syntax::ast::{self, IntTy, UintTy, MetaItemKind}; +use syntax::ast::{self, IntTy, UintTy}; use syntax::source_map::{FileName, FilePathMapping}; use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION}; -use syntax::parse::{ParseSess, new_parser_from_source_str}; -use syntax::parse::token; use syntax::symbol::{sym, Symbol}; use syntax::feature_gate::UnstableFeatures; -use syntax::source_map::SourceMap; use errors::emitter::HumanReadableErrorType; use errors::{ColorConfig, FatalError, Handler}; @@ -66,7 +62,7 @@ impl_stable_hash_via_hash!(OptLevel); /// This is what the `LtoCli` values get mapped to after resolving defaults and /// and taking other command line options into account. -#[derive(Clone, Copy, PartialEq, Hash, Debug)] +#[derive(Clone, PartialEq)] pub enum Lto { /// Don't do any LTO whatsoever No, @@ -300,10 +296,10 @@ impl OutputTypes { /// Use tree-based collections to cheaply get a deterministic `Hash` implementation. /// *Do not* switch `BTreeMap` or `BTreeSet` out for an unsorted container type! That /// would break dependency tracking for command-line arguments. -#[derive(Clone, Hash)] +#[derive(Clone)] pub struct Externs(BTreeMap); -#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct ExternEntry { pub locations: BTreeSet>, pub is_private_dep: bool @@ -463,7 +459,7 @@ pub enum PrintRequest { NativeStaticLibs, } -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Copy, Clone)] pub enum BorrowckMode { Mir, Migrate, @@ -478,14 +474,6 @@ impl BorrowckMode { BorrowckMode::Migrate => true, } } - - /// Returns whether we should emit the AST-based borrow checker errors. - pub fn use_ast(self) -> bool { - match self { - BorrowckMode::Mir => false, - BorrowckMode::Migrate => false, - } - } } pub enum Input { @@ -687,7 +675,7 @@ pub enum EntryFnType { impl_stable_hash_via_hash!(EntryFnType); -#[derive(Copy, PartialEq, PartialOrd, Clone, Ord, Eq, Hash, Debug)] +#[derive(Copy, PartialEq, PartialOrd, Clone, Ord, Eq, Hash, Debug, HashStable)] pub enum CrateType { Executable, Dylib, @@ -813,6 +801,7 @@ macro_rules! options { pub const parse_list: Option<&str> = Some("a space-separated list of strings"); pub const parse_opt_list: Option<&str> = Some("a space-separated list of strings"); pub const parse_opt_comma_list: Option<&str> = Some("a comma-separated list of strings"); + pub const parse_threads: Option<&str> = Some("a number"); pub const parse_uint: Option<&str> = Some("a number"); pub const parse_passes: Option<&str> = Some("a space-separated list of passes, or `all`"); @@ -956,6 +945,14 @@ macro_rules! options { } } + fn parse_threads(slot: &mut usize, v: Option<&str>) -> bool { + match v.and_then(|s| s.parse().ok()) { + Some(0) => { *slot = ::num_cpus::get(); true }, + Some(i) => { *slot = i; true }, + None => false + } + } + fn parse_uint(slot: &mut usize, v: Option<&str>) -> bool { match v.and_then(|s| s.parse().ok()) { Some(i) => { *slot = i; true }, @@ -1152,7 +1149,8 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, target_cpu: Option = (None, parse_opt_string, [TRACKED], "select target processor (`rustc --print target-cpus` for details)"), target_feature: String = (String::new(), parse_string, [TRACKED], - "target specific attributes (`rustc --print target-features` for details)"), + "target specific attributes. (`rustc --print target-features` for details). \ + This feature is unsafe."), passes: Vec = (Vec::new(), parse_list, [TRACKED], "a list of extra LLVM passes to run (space separated)"), llvm_args: Vec = (Vec::new(), parse_list, [TRACKED], @@ -1259,7 +1257,11 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "prints the LLVM optimization passes being run"), ast_json: bool = (false, parse_bool, [UNTRACKED], "print the AST as JSON and halt"), - threads: Option = (None, parse_opt_uint, [UNTRACKED], + // We default to 1 here since we want to behave like + // a sequential compiler for now. This'll likely be adjusted + // in the future. Note that -Zthreads=0 is the way to get + // the num_cpus behavior. + threads: usize = (1, parse_threads, [UNTRACKED], "use a thread pool with N threads"), ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED], "print the pre-expansion AST as JSON and halt"), @@ -1268,14 +1270,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, save_analysis: bool = (false, parse_bool, [UNTRACKED], "write syntax and type analysis (in JSON format) information, in \ addition to normal output"), - flowgraph_print_loans: bool = (false, parse_bool, [UNTRACKED], - "include loan analysis data in -Z unpretty flowgraph output"), - flowgraph_print_moves: bool = (false, parse_bool, [UNTRACKED], - "include move analysis data in -Z unpretty flowgraph output"), - flowgraph_print_assigns: bool = (false, parse_bool, [UNTRACKED], - "include assignment analysis data in -Z unpretty flowgraph output"), - flowgraph_print_all: bool = (false, parse_bool, [UNTRACKED], - "include all dataflow analysis data in -Z unpretty flowgraph output"), print_region_graph: bool = (false, parse_bool, [UNTRACKED], "prints region inference graph. \ Use with RUST_REGION_GRAPH=help for more info"), @@ -1295,6 +1289,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "show extended diagnostic help"), terminal_width: Option = (None, parse_opt_uint, [UNTRACKED], "set the current terminal width"), + panic_abort_tests: bool = (false, parse_bool, [TRACKED], + "support compiling tests with panic=abort"), continue_parse_after_error: bool = (false, parse_bool, [TRACKED], "attempt to recover from parse errors (experimental)"), dep_tasks: bool = (false, parse_bool, [UNTRACKED], @@ -1317,10 +1313,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"), query_dep_graph: bool = (false, parse_bool, [UNTRACKED], "enable queries of the dependency graph for regression testing"), - profile_queries: bool = (false, parse_bool, [UNTRACKED], - "trace and profile the queries of the incremental compilation framework"), - profile_queries_and_keys: bool = (false, parse_bool, [UNTRACKED], - "trace and profile the queries and keys of the incremental compilation framework"), no_analysis: bool = (false, parse_bool, [UNTRACKED], "parse and expand the source, but run no analysis"), extra_plugins: Vec = (Vec::new(), parse_list, [TRACKED], @@ -1346,7 +1338,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, mir_opt_level: usize = (1, parse_uint, [TRACKED], "set the MIR optimization level (0-3, default: 1)"), mutable_noalias: Option = (None, parse_opt_bool, [TRACKED], - "emit noalias metadata for mutable references (default: yes on LLVM >= 6)"), + "emit noalias metadata for mutable references (default: no)"), dump_mir: Option = (None, parse_opt_string, [UNTRACKED], "dump MIR state to file. `val` is used to select which passes and functions to dump. For example: @@ -1375,6 +1367,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "describes how to render the `rendered` field of json diagnostics"), unleash_the_miri_inside_of_you: bool = (false, parse_bool, [TRACKED], "take the breaks off const evaluation. NOTE: this is unsound"), + suppress_const_validation_back_compat_ice: bool = (false, parse_bool, [TRACKED], + "silence ICE triggered when the new const validator disagrees with the old"), osx_rpath_install_name: bool = (false, parse_bool, [TRACKED], "pass `-install_name @rpath/...` to the macOS linker"), sanitizer: Option = (None, parse_sanitizer, [TRACKED], @@ -1424,8 +1418,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, valid types are any of the types for `--pretty`, as well as: `expanded`, `expanded,identified`, `expanded,hygiene` (with internal representations), - `flowgraph=` (graphviz formatted flowgraph for node), - `flowgraph,unlabelled=` (unlabelled graphviz formatted flowgraph for node), `everybody_loops` (all function bodies replaced with `loop {}`), `hir` (the HIR), `hir,identified`, `hir,typed` (HIR with types for each node), @@ -1472,9 +1464,12 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "which mangling version to use for symbol names"), binary_dep_depinfo: bool = (false, parse_bool, [TRACKED], "include artifacts (sysroot, crate dependencies) used during compilation in dep-info"), + insert_sideeffect: bool = (false, parse_bool, [TRACKED], + "fix undefined behavior when a thread doesn't eventually make progress \ + (such as entering an empty infinite loop) by inserting llvm.sideeffect"), } -pub fn default_lib_output() -> CrateType { +pub const fn default_lib_output() -> CrateType { CrateType::Rlib } @@ -1515,22 +1510,25 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { } for &i in &[8, 16, 32, 64, 128] { if i >= min_atomic_width && i <= max_atomic_width { - let s = i.to_string(); - ret.insert(( - sym::target_has_atomic, - Some(Symbol::intern(&s)), - )); - if &s == wordsz { + let mut insert_atomic = |s| { ret.insert(( - sym::target_has_atomic, - Some(Symbol::intern("ptr")), + sym::target_has_atomic_load_store, + Some(Symbol::intern(s)), )); + if atomic_cas { + ret.insert(( + sym::target_has_atomic, + Some(Symbol::intern(s)) + )); + } + }; + let s = i.to_string(); + insert_atomic(&s); + if &s == wordsz { + insert_atomic("ptr"); } } } - if atomic_cas { - ret.insert((sym::target_has_atomic, Some(Symbol::intern("cas")))); - } if sess.opts.debug_assertions { ret.insert((Symbol::intern("debug_assertions"), None)); } @@ -1852,58 +1850,6 @@ pub fn rustc_optgroups() -> Vec { opts } -struct NullEmitter; - -impl errors::emitter::Emitter for NullEmitter { - fn emit_diagnostic(&mut self, _: &errors::Diagnostic) {} -} - -// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`. -pub fn parse_cfgspecs(cfgspecs: Vec) -> FxHashSet<(String, Option)> { - syntax::with_default_globals(move || { - let cfg = cfgspecs.into_iter().map(|s| { - - let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let handler = Handler::with_emitter(false, None, Box::new(NullEmitter)); - let sess = ParseSess::with_span_handler(handler, cm); - let filename = FileName::cfg_spec_source_code(&s); - let mut parser = new_parser_from_source_str(&sess, filename, s.to_string()); - - macro_rules! error {($reason: expr) => { - early_error(ErrorOutputType::default(), - &format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s)); - }} - - match &mut parser.parse_meta_item() { - Ok(meta_item) if parser.token == token::Eof => { - if meta_item.path.segments.len() != 1 { - error!("argument key must be an identifier"); - } - match &meta_item.node { - MetaItemKind::List(..) => { - error!(r#"expected `key` or `key="value"`"#); - } - MetaItemKind::NameValue(lit) if !lit.node.is_str() => { - error!("argument value must be a string"); - } - MetaItemKind::NameValue(..) | MetaItemKind::Word => { - let ident = meta_item.ident().expect("multi-segment cfg key"); - return (ident.name, meta_item.value_str()); - } - } - } - Ok(..) => {} - Err(err) => err.cancel(), - } - - error!(r#"expected `key` or `key="value"`"#); - }).collect::(); - cfg.into_iter().map(|(a, b)| { - (a.to_string(), b.map(|b| b.to_string())) - }).collect() - }) -} - pub fn get_cmd_lint_options(matches: &getopts::Matches, error_format: ErrorOutputType) -> (Vec<(String, lint::Level)>, bool, Option) { @@ -2037,11 +1983,7 @@ pub fn parse_error_format( return error_format; } -pub fn build_session_options_and_crate_config( - matches: &getopts::Matches, -) -> (Options, FxHashSet<(String, Option)>) { - let color = parse_color(matches); - +fn parse_crate_edition(matches: &getopts::Matches) -> Edition { let edition = match matches.opt_str("edition") { Some(arg) => Edition::from_str(&arg).unwrap_or_else(|_| early_error( @@ -2068,19 +2010,14 @@ pub fn build_session_options_and_crate_config( ) } - let (json_rendered, json_artifact_notifications) = parse_json(matches); - - let error_format = parse_error_format(matches, color, json_rendered); - - let unparsed_crate_types = matches.opt_strs("crate-type"); - let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(error_format, &e[..])); - - - let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format); - - let mut debugging_opts = build_debugging_options(matches, error_format); + edition +} +fn check_debug_option_stability( + debugging_opts: &DebuggingOptions, + error_format: ErrorOutputType, + json_rendered: HumanReadableErrorType, +) { if !debugging_opts.unstable_options { if let ErrorOutputType::Json { pretty: true, json_rendered } = error_format { early_error( @@ -2096,7 +2033,13 @@ pub fn build_session_options_and_crate_config( ); } } +} +fn parse_output_types( + debugging_opts: &DebuggingOptions, + matches: &getopts::Matches, + error_format: ErrorOutputType, +) -> OutputTypes { let mut output_types = BTreeMap::new(); if !debugging_opts.parse_only { for list in matches.opt_strs("emit") { @@ -2121,14 +2064,19 @@ pub fn build_session_options_and_crate_config( if output_types.is_empty() { output_types.insert(OutputType::Exe, None); } + OutputTypes(output_types) +} - let mut cg = build_codegen_options(matches, error_format); - let mut codegen_units = cg.codegen_units; +fn should_override_cgus_and_disable_thinlto( + output_types: &OutputTypes, + matches: &getopts::Matches, + error_format: ErrorOutputType, + mut codegen_units: Option, +) -> (bool, Option) { let mut disable_thinlto = false; - // Issue #30063: if user requests LLVM-related output to one // particular path, disable codegen-units. - let incompatible: Vec<_> = output_types + let incompatible: Vec<_> = output_types.0 .iter() .map(|ot_path| ot_path.0) .filter(|ot| !ot.is_compatible_with_codegen_units_and_single_output_file()) @@ -2160,20 +2108,6 @@ pub fn build_session_options_and_crate_config( } } - if debugging_opts.threads == Some(0) { - early_error( - error_format, - "value for threads must be a positive non-zero integer", - ); - } - - if debugging_opts.threads.unwrap_or(1) > 1 && debugging_opts.fuel.is_some() { - early_error( - error_format, - "optimization fuel is incompatible with multiple threads", - ); - } - if codegen_units == Some(0) { early_error( error_format, @@ -2181,8 +2115,32 @@ pub fn build_session_options_and_crate_config( ); } - let incremental = match (&debugging_opts.incremental, &cg.incremental) { - (&Some(ref path1), &Some(ref path2)) => { + (disable_thinlto, codegen_units) +} + +fn check_thread_count(debugging_opts: &DebuggingOptions, error_format: ErrorOutputType) { + if debugging_opts.threads == 0 { + early_error( + error_format, + "value for threads must be a positive non-zero integer", + ); + } + + if debugging_opts.threads > 1 && debugging_opts.fuel.is_some() { + early_error( + error_format, + "optimization fuel is incompatible with multiple threads", + ); + } +} + +fn select_incremental_path( + debugging_opts: &DebuggingOptions, + cg: &CodegenOptions, + error_format: ErrorOutputType, +) -> Option { + match (&debugging_opts.incremental, &cg.incremental) { + (Some(path1), Some(path2)) => { if path1 != path2 { early_error( error_format, @@ -2196,25 +2154,19 @@ pub fn build_session_options_and_crate_config( Some(path1) } } - (&Some(ref path), &None) => Some(path), - (&None, &Some(ref path)) => Some(path), - (&None, &None) => None, - }.map(|m| PathBuf::from(m)); - - if debugging_opts.profile && incremental.is_some() { - early_error( - error_format, - "can't instrument with gcov profiling when compiling incrementally", - ); - } - - if cg.profile_generate.enabled() && cg.profile_use.is_some() { - early_error( - error_format, - "options `-C profile-generate` and `-C profile-use` are exclusive", - ); - } + (Some(path), None) => Some(path), + (None, Some(path)) => Some(path), + (None, None) => None, + }.map(|m| PathBuf::from(m)) +} +fn collect_print_requests( + cg: &mut CodegenOptions, + dopts: &mut DebuggingOptions, + matches: &getopts::Matches, + is_unstable_enabled: bool, + error_format: ErrorOutputType, +) -> Vec { let mut prints = Vec::::new(); if cg.target_cpu.as_ref().map_or(false, |s| s == "help") { prints.push(PrintRequest::TargetCPUs); @@ -2232,72 +2184,105 @@ pub fn build_session_options_and_crate_config( prints.push(PrintRequest::CodeModels); cg.code_model = None; } - if debugging_opts + if dopts .tls_model .as_ref() .map_or(false, |s| s == "help") { prints.push(PrintRequest::TlsModels); - debugging_opts.tls_model = None; + dopts.tls_model = None; } - let cg = cg; + prints.extend(matches.opt_strs("print").into_iter().map(|s| match &*s { + "crate-name" => PrintRequest::CrateName, + "file-names" => PrintRequest::FileNames, + "sysroot" => PrintRequest::Sysroot, + "cfg" => PrintRequest::Cfg, + "target-list" => PrintRequest::TargetList, + "target-cpus" => PrintRequest::TargetCPUs, + "target-features" => PrintRequest::TargetFeatures, + "relocation-models" => PrintRequest::RelocationModels, + "code-models" => PrintRequest::CodeModels, + "tls-models" => PrintRequest::TlsModels, + "native-static-libs" => PrintRequest::NativeStaticLibs, + "target-spec-json" => { + if is_unstable_enabled { + PrintRequest::TargetSpec + } else { + early_error( + error_format, + "the `-Z unstable-options` flag must also be passed to \ + enable the target-spec-json print option", + ); + } + } + req => early_error(error_format, &format!("unknown print request `{}`", req)), + })); - let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::from(&m)); - let target_triple = if let Some(target) = matches.opt_str("target") { - if target.ends_with(".json") { + prints +} + +fn parse_target_triple(matches: &getopts::Matches, error_format: ErrorOutputType) -> TargetTriple { + match matches.opt_str("target") { + Some(target) if target.ends_with(".json") => { let path = Path::new(&target); TargetTriple::from_path(&path).unwrap_or_else(|_| early_error(error_format, &format!("target file {:?} does not exist", path))) - } else { - TargetTriple::TargetTriple(target) } + Some(target) => TargetTriple::TargetTriple(target), + _ => TargetTriple::from_triple(host_triple()), + } +} + +fn parse_opt_level( + matches: &getopts::Matches, + cg: &CodegenOptions, + error_format: ErrorOutputType, +) -> OptLevel { + // The `-O` and `-C opt-level` flags specify the same setting, so we want to be able + // to use them interchangeably. However, because they're technically different flags, + // we need to work out manually which should take precedence if both are supplied (i.e. + // the rightmost flag). We do this by finding the (rightmost) position of both flags and + // comparing them. Note that if a flag is not found, its position will be `None`, which + // always compared less than `Some(_)`. + let max_o = matches.opt_positions("O").into_iter().max(); + let max_c = matches.opt_strs_pos("C").into_iter().flat_map(|(i, s)| { + if let Some("opt-level") = s.splitn(2, '=').next() { + Some(i) + } else { + None + } + }).max(); + if max_o > max_c { + OptLevel::Default } else { - TargetTriple::from_triple(host_triple()) - }; - let opt_level = { - // The `-O` and `-C opt-level` flags specify the same setting, so we want to be able - // to use them interchangeably. However, because they're technically different flags, - // we need to work out manually which should take precedence if both are supplied (i.e. - // the rightmost flag). We do this by finding the (rightmost) position of both flags and - // comparing them. Note that if a flag is not found, its position will be `None`, which - // always compared less than `Some(_)`. - let max_o = matches.opt_positions("O").into_iter().max(); - let max_c = matches.opt_strs_pos("C").into_iter().flat_map(|(i, s)| { - if let Some("opt-level") = s.splitn(2, '=').next() { - Some(i) - } else { - None - } - }).max(); - if max_o > max_c { - OptLevel::Default - } else { - match cg.opt_level.as_ref().map(String::as_ref) { - None => OptLevel::No, - Some("0") => OptLevel::No, - Some("1") => OptLevel::Less, - Some("2") => OptLevel::Default, - Some("3") => OptLevel::Aggressive, - Some("s") => OptLevel::Size, - Some("z") => OptLevel::SizeMin, - Some(arg) => { - early_error( - error_format, - &format!( - "optimization level needs to be \ - between 0-3, s or z (instead was `{}`)", - arg - ), - ); - } + match cg.opt_level.as_ref().map(String::as_ref) { + None => OptLevel::No, + Some("0") => OptLevel::No, + Some("1") => OptLevel::Less, + Some("2") => OptLevel::Default, + Some("3") => OptLevel::Aggressive, + Some("s") => OptLevel::Size, + Some("z") => OptLevel::SizeMin, + Some(arg) => { + early_error( + error_format, + &format!( + "optimization level needs to be \ + between 0-3, s or z (instead was `{}`)", + arg + ), + ); } } - }; - // The `-g` and `-C debuginfo` flags specify the same setting, so we want to be able - // to use them interchangeably. See the note above (regarding `-O` and `-C opt-level`) - // for more details. - let debug_assertions = cg.debug_assertions.unwrap_or(opt_level == OptLevel::No); + } +} + +fn select_debuginfo( + matches: &getopts::Matches, + cg: &CodegenOptions, + error_format: ErrorOutputType, +) -> DebugInfo { let max_g = matches.opt_positions("g").into_iter().max(); let max_c = matches.opt_strs_pos("C").into_iter().flat_map(|(i, s)| { if let Some("debuginfo") = s.splitn(2, '=').next() { @@ -2306,7 +2291,7 @@ pub fn build_session_options_and_crate_config( None } }).max(); - let debuginfo = if max_g > max_c { + if max_g > max_c { DebugInfo::Full } else { match cg.debuginfo { @@ -2324,14 +2309,14 @@ pub fn build_session_options_and_crate_config( ); } } - }; - - let mut search_paths = vec![]; - for s in &matches.opt_strs("L") { - search_paths.push(SearchPath::from_cli_opt(&s[..], error_format)); } +} - let libs = matches +fn parse_libs( + matches: &getopts::Matches, + error_format: ErrorOutputType, +) -> Vec<(String, Option, Option)> { + matches .opt_strs("l") .into_iter() .map(|s| { @@ -2370,52 +2355,23 @@ pub fn build_session_options_and_crate_config( let new_name = name_parts.next(); (name.to_owned(), new_name.map(|n| n.to_owned()), kind) }) - .collect(); + .collect() +} - let cfg = parse_cfgspecs(matches.opt_strs("cfg")); - let test = matches.opt_present("test"); - - let is_unstable_enabled = nightly_options::is_unstable_enabled(matches); - - prints.extend(matches.opt_strs("print").into_iter().map(|s| match &*s { - "crate-name" => PrintRequest::CrateName, - "file-names" => PrintRequest::FileNames, - "sysroot" => PrintRequest::Sysroot, - "cfg" => PrintRequest::Cfg, - "target-list" => PrintRequest::TargetList, - "target-cpus" => PrintRequest::TargetCPUs, - "target-features" => PrintRequest::TargetFeatures, - "relocation-models" => PrintRequest::RelocationModels, - "code-models" => PrintRequest::CodeModels, - "tls-models" => PrintRequest::TlsModels, - "native-static-libs" => PrintRequest::NativeStaticLibs, - "target-spec-json" => { - if is_unstable_enabled { - PrintRequest::TargetSpec - } else { - early_error( - error_format, - "the `-Z unstable-options` flag must also be passed to \ - enable the target-spec-json print option", - ); - } - } - req => early_error(error_format, &format!("unknown print request `{}`", req)), - })); - - let borrowck_mode = match debugging_opts.borrowck.as_ref().map(|s| &s[..]) { +fn parse_borrowck_mode(dopts: &DebuggingOptions, error_format: ErrorOutputType) -> BorrowckMode { + match dopts.borrowck.as_ref().map(|s| &s[..]) { None | Some("migrate") => BorrowckMode::Migrate, Some("mir") => BorrowckMode::Mir, Some(m) => early_error(error_format, &format!("unknown borrowck mode `{}`", m)), - }; - - if !cg.remark.is_empty() && debuginfo == DebugInfo::None { - early_warn( - error_format, - "-C remark requires \"-C debuginfo=n\" to show source locations", - ); } +} +fn parse_externs( + matches: &getopts::Matches, + debugging_opts: &DebuggingOptions, + error_format: ErrorOutputType, + is_unstable_enabled: bool, +) -> Externs { if matches.opt_present("extern-private") && !debugging_opts.unstable_options { early_error( ErrorOutputType::default(), @@ -2456,10 +2412,14 @@ pub fn build_session_options_and_crate_config( // flag entry.is_private_dep |= private; } + Externs(externs) +} - let crate_name = matches.opt_str("crate-name"); - - let remap_path_prefix = matches +fn parse_remap_path_prefix( + matches: &getopts::Matches, + error_format: ErrorOutputType +) -> Vec<(PathBuf, PathBuf)> { + matches .opt_strs("remap-path-prefix") .into_iter() .map(|remap| { @@ -2474,42 +2434,130 @@ pub fn build_session_options_and_crate_config( ), } }) - .collect(); + .collect() +} - ( - Options { - crate_types, - optimize: opt_level, - debuginfo, - lint_opts, - lint_cap, - describe_lints, - output_types: OutputTypes(output_types), - search_paths, - maybe_sysroot: sysroot_opt, - target_triple, - test, - incremental, - debugging_opts, - prints, - borrowck_mode, - cg, +pub fn build_session_options(matches: &getopts::Matches) -> Options { + let color = parse_color(matches); + + let edition = parse_crate_edition(matches); + + let (json_rendered, json_artifact_notifications) = parse_json(matches); + + let error_format = parse_error_format(matches, color, json_rendered); + + let unparsed_crate_types = matches.opt_strs("crate-type"); + let crate_types = parse_crate_types_from_list(unparsed_crate_types) + .unwrap_or_else(|e| early_error(error_format, &e[..])); + + let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format); + + let mut debugging_opts = build_debugging_options(matches, error_format); + check_debug_option_stability(&debugging_opts, error_format, json_rendered); + + let output_types = parse_output_types(&debugging_opts, matches, error_format); + + let mut cg = build_codegen_options(matches, error_format); + let (disable_thinlto, codegen_units) = should_override_cgus_and_disable_thinlto( + &output_types, + matches, + error_format, + cg.codegen_units, + ); + + check_thread_count(&debugging_opts, error_format); + + let incremental = select_incremental_path(&debugging_opts, &cg, error_format); + + if debugging_opts.profile && incremental.is_some() { + early_error( error_format, - externs: Externs(externs), - crate_name, - alt_std_name: None, - libs, - unstable_features: UnstableFeatures::from_environment(), - debug_assertions, - actually_rustdoc: false, - cli_forced_codegen_units: codegen_units, - cli_forced_thinlto_off: disable_thinlto, - remap_path_prefix, - edition, - json_artifact_notifications, - }, - cfg, - ) + "can't instrument with gcov profiling when compiling incrementally", + ); + } + + if cg.profile_generate.enabled() && cg.profile_use.is_some() { + early_error( + error_format, + "options `-C profile-generate` and `-C profile-use` are exclusive", + ); + } + + let is_unstable_enabled = nightly_options::is_unstable_enabled(matches); + let prints = collect_print_requests( + &mut cg, + &mut debugging_opts, + matches, + is_unstable_enabled, + error_format, + ); + + let cg = cg; + + let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::from(&m)); + let target_triple = parse_target_triple(matches, error_format); + let opt_level = parse_opt_level(matches, &cg, error_format); + // The `-g` and `-C debuginfo` flags specify the same setting, so we want to be able + // to use them interchangeably. See the note above (regarding `-O` and `-C opt-level`) + // for more details. + let debug_assertions = cg.debug_assertions.unwrap_or(opt_level == OptLevel::No); + let debuginfo = select_debuginfo(matches, &cg, error_format); + + let mut search_paths = vec![]; + for s in &matches.opt_strs("L") { + search_paths.push(SearchPath::from_cli_opt(&s[..], error_format)); + } + + let libs = parse_libs(matches, error_format); + + let test = matches.opt_present("test"); + + let borrowck_mode = parse_borrowck_mode(&debugging_opts, error_format); + + if !cg.remark.is_empty() && debuginfo == DebugInfo::None { + early_warn( + error_format, + "-C remark requires \"-C debuginfo=n\" to show source locations", + ); + } + + let externs = parse_externs(matches, &debugging_opts, error_format, is_unstable_enabled); + + let crate_name = matches.opt_str("crate-name"); + + let remap_path_prefix = parse_remap_path_prefix(matches, error_format); + + Options { + crate_types, + optimize: opt_level, + debuginfo, + lint_opts, + lint_cap, + describe_lints, + output_types, + search_paths, + maybe_sysroot: sysroot_opt, + target_triple, + test, + incremental, + debugging_opts, + prints, + borrowck_mode, + cg, + error_format, + externs, + crate_name, + alt_std_name: None, + libs, + unstable_features: UnstableFeatures::from_environment(), + debug_assertions, + actually_rustdoc: false, + cli_forced_codegen_units: codegen_units, + cli_forced_thinlto_off: disable_thinlto, + remap_path_prefix, + edition, + json_artifact_notifications, + } } pub fn make_crate_type_option() -> RustcOptGroup { @@ -2772,6 +2820,3 @@ mod dep_tracking { } } } - -#[cfg(test)] -mod tests; diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index a24fed8f21..13b76b79b3 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -6,17 +6,14 @@ use crate::hir::def_id::CrateNum; use rustc_data_structures::fingerprint::Fingerprint; use crate::lint; -use crate::lint::builtin::BuiltinLintDiagnostics; -use crate::middle::dependency_format; -use crate::session::config::{OutputType, PrintRequest, SwitchWithOptPath}; +use crate::session::config::{OutputType, PrintRequest, Sanitizer, SwitchWithOptPath}; use crate::session::search_paths::{PathKind, SearchPath}; use crate::util::nodemap::{FxHashMap, FxHashSet}; use crate::util::common::{duration_to_secs_str, ErrorReported}; -use crate::util::common::ProfileQueriesMsg; use rustc_data_structures::base_n; use rustc_data_structures::sync::{ - self, Lrc, Lock, OneThread, Once, RwLock, AtomicU64, AtomicUsize, Ordering, + self, Lrc, Lock, OneThread, Once, AtomicU64, AtomicUsize, Ordering, Ordering::SeqCst, }; @@ -26,14 +23,14 @@ use errors::emitter::HumanReadableErrorType; use errors::annotate_snippet_emitter_writer::{AnnotateSnippetEmitterWriter}; use syntax::ast::{self, NodeId}; use syntax::edition::Edition; -use syntax::ext::allocator::AllocatorKind; +use syntax::expand::allocator::AllocatorKind; use syntax::feature_gate::{self, AttributeType}; use syntax::json::JsonEmitter; use syntax::source_map; -use syntax::parse::{self, ParseSess}; +use syntax::sess::ParseSess; use syntax::symbol::Symbol; use syntax_pos::{MultiSpan, Span}; -use crate::util::profiling::SelfProfiler; +use crate::util::profiling::{SelfProfiler, SelfProfilerRef}; use rustc_target::spec::{PanicStrategy, RelroLevel, Target, TargetTriple}; use rustc_data_structures::flock; @@ -47,7 +44,7 @@ use std::fmt; use std::io::Write; use std::path::PathBuf; use std::time::Duration; -use std::sync::{Arc, mpsc}; +use std::sync::Arc; mod code_stats; pub mod config; @@ -79,11 +76,6 @@ pub struct Session { /// if the value stored here has been affected by path remapping. pub working_dir: (PathBuf, bool), - // FIXME: `lint_store` and `buffered_lints` are not thread-safe, - // but are only used in a single thread. - pub lint_store: RwLock, - pub buffered_lints: Lock>, - /// Set of `(DiagnosticId, Option, message)` tuples tracking /// (sub)diagnostics that have been set once, but should not be set again, /// in order to avoid redundantly verbose output (Issue #24690, #44953). @@ -91,7 +83,6 @@ pub struct Session { pub plugin_llvm_passes: OneThread>>, pub plugin_attributes: Lock>, pub crate_types: Once>, - pub dependency_formats: Once, /// The `crate_disambiguator` is constructed out of all the `-C metadata` /// arguments passed to the compiler. Its value together with the crate-name /// forms a unique global identifier for the crate. It is used to allow @@ -127,11 +118,8 @@ pub struct Session { /// `-Zquery-dep-graph` is specified. pub cgu_reuse_tracker: CguReuseTracker, - /// Used by `-Z profile-queries` in `util::common`. - pub profile_channel: Lock>>, - /// Used by `-Z self-profile`. - pub self_profiling: Option>, + pub prof: SelfProfilerRef, /// Some measurements that are being gathered during compilation. pub perf_stats: PerfStats, @@ -316,11 +304,15 @@ impl Session { pub fn has_errors(&self) -> bool { self.diagnostic().has_errors() } + pub fn has_errors_or_delayed_span_bugs(&self) -> bool { + self.diagnostic().has_errors_or_delayed_span_bugs() + } pub fn abort_if_errors(&self) { self.diagnostic().abort_if_errors(); } pub fn compile_status(&self) -> Result<(), ErrorReported> { if self.has_errors() { + self.diagnostic().emit_stashed_diagnostics(); Err(ErrorReported) } else { Ok(()) @@ -366,35 +358,6 @@ impl Session { self.diagnostic().span_note_without_error(sp, msg) } - pub fn buffer_lint>( - &self, - lint: &'static lint::Lint, - id: ast::NodeId, - sp: S, - msg: &str, - ) { - match *self.buffered_lints.borrow_mut() { - Some(ref mut buffer) => { - buffer.add_lint(lint, id, sp.into(), msg, BuiltinLintDiagnostics::Normal) - } - None => bug!("can't buffer lints after HIR lowering"), - } - } - - pub fn buffer_lint_with_diagnostic>( - &self, - lint: &'static lint::Lint, - id: ast::NodeId, - sp: S, - msg: &str, - diagnostic: BuiltinLintDiagnostics, - ) { - match *self.buffered_lints.borrow_mut() { - Some(ref mut buffer) => buffer.add_lint(lint, id, sp.into(), msg, diagnostic), - None => bug!("can't buffer lints after HIR lowering"), - } - } - pub fn reserve_node_ids(&self, count: usize) -> ast::NodeId { let id = self.next_node_id.get(); @@ -510,13 +473,6 @@ impl Session { pub fn time_extended(&self) -> bool { self.opts.debugging_opts.time_passes } - pub fn profile_queries(&self) -> bool { - self.opts.debugging_opts.profile_queries - || self.opts.debugging_opts.profile_queries_and_keys - } - pub fn profile_queries_and_keys(&self) -> bool { - self.opts.debugging_opts.profile_queries_and_keys - } pub fn instrument_mcount(&self) -> bool { self.opts.debugging_opts.instrument_mcount } @@ -638,6 +594,14 @@ impl Session { .output_types .contains_key(&OutputType::LlvmAssembly) || self.opts.output_types.contains_key(&OutputType::Bitcode); + + // Address sanitizer and memory sanitizer use alloca name when reporting an issue. + let more_names = match self.opts.debugging_opts.sanitizer { + Some(Sanitizer::Address) => true, + Some(Sanitizer::Memory) => true, + _ => more_names, + }; + self.opts.debugging_opts.fewer_names || !more_names } @@ -836,24 +800,6 @@ impl Session { } } - #[inline(never)] - #[cold] - fn profiler_active ()>(&self, f: F) { - match &self.self_profiling { - None => bug!("profiler_active() called but there was no profiler active"), - Some(profiler) => { - f(&profiler); - } - } - } - - #[inline(always)] - pub fn profiler ()>(&self, f: F) { - if unlikely!(self.self_profiling.is_some()) { - self.profiler_active(f) - } - } - pub fn print_perf_stats(&self) { println!( "Total time spent computing symbol hashes: {}", @@ -897,16 +843,10 @@ impl Session { ret } - /// Returns the number of query threads that should be used for this - /// compilation - pub fn threads_from_count(query_threads: Option) -> usize { - query_threads.unwrap_or(::num_cpus::get()) - } - /// Returns the number of query threads that should be used for this /// compilation pub fn threads(&self) -> usize { - Self::threads_from_count(self.opts.debugging_opts.threads) + self.opts.debugging_opts.threads } /// Returns the number of codegen units that should be used for this @@ -1187,7 +1127,7 @@ fn build_session_( ); let target_cfg = config::build_target_config(&sopts, &span_diagnostic); - let parse_sess = parse::ParseSess::with_span_handler( + let parse_sess = ParseSess::with_span_handler( span_diagnostic, source_map, ); @@ -1241,13 +1181,10 @@ fn build_session_( sysroot, local_crate_source_file, working_dir, - lint_store: RwLock::new(lint::LintStore::new()), - buffered_lints: Lock::new(Some(Default::default())), one_time_diagnostics: Default::default(), plugin_llvm_passes: OneThread::new(RefCell::new(Vec::new())), plugin_attributes: Lock::new(Vec::new()), crate_types: Once::new(), - dependency_formats: Once::new(), crate_disambiguator: Once::new(), features: Once::new(), recursion_limit: Once::new(), @@ -1259,8 +1196,7 @@ fn build_session_( imported_macro_spans: OneThread::new(RefCell::new(FxHashMap::default())), incr_comp_session: OneThread::new(RefCell::new(IncrCompSession::NotInitialized)), cgu_reuse_tracker, - self_profiling: self_profiler, - profile_channel: Lock::new(None), + prof: SelfProfilerRef::new(self_profiler), perf_stats: PerfStats { symbol_hash_time: Lock::new(Duration::from_secs(0)), decode_def_path_tables_time: Lock::new(Duration::from_secs(0)), diff --git a/src/librustc/session/search_paths.rs b/src/librustc/session/search_paths.rs index 3695f0a82f..949dad751a 100644 --- a/src/librustc/session/search_paths.rs +++ b/src/librustc/session/search_paths.rs @@ -1,5 +1,4 @@ use std::path::{Path, PathBuf}; -use rustc_macros::HashStable; use crate::session::{early_error, config}; use crate::session::filesearch::make_target_lib_path; @@ -10,7 +9,7 @@ pub struct SearchPath { pub files: Vec, } -#[derive(Eq, PartialEq, Clone, Copy, Debug, PartialOrd, Ord, Hash, HashStable)] +#[derive(PartialEq, Clone, Copy, Debug, HashStable)] pub enum PathKind { Native, Crate, diff --git a/src/librustc/traits/auto_trait.rs b/src/librustc/traits/auto_trait.rs index d89cf8eb3e..9faf58aee6 100644 --- a/src/librustc/traits/auto_trait.rs +++ b/src/librustc/traits/auto_trait.rs @@ -321,7 +321,7 @@ impl AutoTraitFinder<'tcx> { match vtable { Vtable::VtableImpl(VtableImplData { impl_def_id, .. }) => { // Blame tidy for the weird bracket placement - if infcx.tcx.impl_polarity(*impl_def_id) == hir::ImplPolarity::Negative + if infcx.tcx.impl_polarity(*impl_def_id) == ty::ImplPolarity::Negative { debug!("evaluate_nested_obligations: Found explicit negative impl\ {:?}, bailing out", impl_def_id); @@ -601,7 +601,7 @@ impl AutoTraitFinder<'tcx> { } pub fn is_of_param(&self, ty: Ty<'_>) -> bool { - return match ty.sty { + return match ty.kind { ty::Param(_) => true, ty::Projection(p) => self.is_of_param(p.self_ty()), _ => false, @@ -609,7 +609,7 @@ impl AutoTraitFinder<'tcx> { } fn is_self_referential_projection(&self, p: ty::PolyProjectionPredicate<'_>) -> bool { - match p.ty().skip_binder().sty { + match p.ty().skip_binder().kind { ty::Projection(proj) if proj == p.skip_binder().projection_ty => { true }, diff --git a/src/librustc/traits/chalk_fulfill.rs b/src/librustc/traits/chalk_fulfill.rs index a7e1f2a6a7..d9e83df7dd 100644 --- a/src/librustc/traits/chalk_fulfill.rs +++ b/src/librustc/traits/chalk_fulfill.rs @@ -108,7 +108,7 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> { goal: obligation.goal.predicate, }, &mut orig_values); - match infcx.tcx.global_tcx().evaluate_goal(canonical_goal) { + match infcx.tcx.evaluate_goal(canonical_goal) { Ok(response) => { if response.is_proven() { making_progress = true; diff --git a/src/librustc/traits/codegen/mod.rs b/src/librustc/traits/codegen/mod.rs index 97fb430a3e..9dff699deb 100644 --- a/src/librustc/traits/codegen/mod.rs +++ b/src/librustc/traits/codegen/mod.rs @@ -3,12 +3,10 @@ // seems likely that they should eventually be merged into more // general routines. -use crate::dep_graph::{DepKind, DepTrackingMapConfig}; -use std::marker::PhantomData; use crate::infer::InferCtxt; use crate::traits::{FulfillmentContext, Obligation, ObligationCause, SelectionContext, TraitEngine, Vtable}; -use crate::ty::{self, Ty, TyCtxt}; +use crate::ty::{self, TyCtxt}; use crate::ty::subst::{Subst, SubstsRef}; use crate::ty::fold::TypeFoldable; @@ -100,33 +98,8 @@ impl<'tcx> TyCtxt<'tcx> { } } -// Implement DepTrackingMapConfig for `trait_cache` -pub struct TraitSelectionCache<'tcx> { - data: PhantomData<&'tcx ()> -} - -impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> { - type Key = (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>); - type Value = Vtable<'tcx, ()>; - fn to_dep_kind() -> DepKind { - DepKind::TraitSelect - } -} - // # Global Cache -pub struct ProjectionCache<'tcx> { - data: PhantomData<&'tcx ()>, -} - -impl<'tcx> DepTrackingMapConfig for ProjectionCache<'tcx> { - type Key = Ty<'tcx>; - type Value = Ty<'tcx>; - fn to_dep_kind() -> DepKind { - DepKind::TraitSelect - } -} - impl<'a, 'tcx> InferCtxt<'a, 'tcx> { /// Finishes processes any obligations that remain in the /// fulfillment context, and then returns the result with all type diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index b6f0addd77..49a4d17d88 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -237,7 +237,7 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>( } pub enum OrphanCheckErr<'tcx> { - NoLocalInputType, + NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>), UncoveredTy(Ty<'tcx>), } @@ -355,7 +355,7 @@ pub fn orphan_check( /// Note that this function is never called for types that have both type /// parameters and inference variables. fn orphan_check_trait_ref<'tcx>( - tcx: TyCtxt<'_>, + tcx: TyCtxt<'tcx>, trait_ref: ty::TraitRef<'tcx>, in_crate: InCrate, ) -> Result<(), OrphanCheckErr<'tcx>> { @@ -378,26 +378,51 @@ fn orphan_check_trait_ref<'tcx>( // Let Ti be the first such type. // - No uncovered type parameters P1..=Pn may appear in T0..Ti (excluding Ti) // - for input_ty in trait_ref.input_types() { + fn uncover_fundamental_ty<'tcx>( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + in_crate: InCrate, + ) -> Vec> { + if fundamental_ty(ty) && ty_is_non_local(tcx, ty, in_crate).is_some() { + ty.walk_shallow().flat_map(|ty| uncover_fundamental_ty(tcx, ty, in_crate)).collect() + } else { + vec![ty] + } + } + + let mut non_local_spans = vec![]; + for (i, input_ty) in trait_ref + .input_types() + .flat_map(|ty| uncover_fundamental_ty(tcx, ty, in_crate)) + .enumerate() + { debug!("orphan_check_trait_ref: check ty `{:?}`", input_ty); - if ty_is_local(tcx, input_ty, in_crate) { + let non_local_tys = ty_is_non_local(tcx, input_ty, in_crate); + if non_local_tys.is_none() { debug!("orphan_check_trait_ref: ty_is_local `{:?}`", input_ty); return Ok(()); - } else if let ty::Param(_) = input_ty.sty { + } else if let ty::Param(_) = input_ty.kind { debug!("orphan_check_trait_ref: uncovered ty: `{:?}`", input_ty); return Err(OrphanCheckErr::UncoveredTy(input_ty)) } + if let Some(non_local_tys) = non_local_tys { + for input_ty in non_local_tys { + non_local_spans.push((input_ty, i == 0)); + } + } } // If we exit above loop, never found a local type. debug!("orphan_check_trait_ref: no local type"); - Err(OrphanCheckErr::NoLocalInputType) + Err(OrphanCheckErr::NonLocalInputType(non_local_spans)) } else { + let mut non_local_spans = vec![]; // First, create an ordered iterator over all the type // parameters to the trait, with the self type appearing // first. Find the first input type that either references a // type parameter OR some local type. - for input_ty in trait_ref.input_types() { - if ty_is_local(tcx, input_ty, in_crate) { + for (i, input_ty) in trait_ref.input_types().enumerate() { + let non_local_tys = ty_is_non_local(tcx, input_ty, in_crate); + if non_local_tys.is_none() { debug!("orphan_check_trait_ref: ty_is_local `{:?}`", input_ty); // First local input type. Check that there are no @@ -424,15 +449,21 @@ fn orphan_check_trait_ref<'tcx>( debug!("orphan_check_trait_ref: uncovered type `{:?}`", param); return Err(OrphanCheckErr::UncoveredTy(param)); } + + if let Some(non_local_tys) = non_local_tys { + for input_ty in non_local_tys { + non_local_spans.push((input_ty, i == 0)); + } + } } // If we exit above loop, never found a local type. debug!("orphan_check_trait_ref: no local type"); - Err(OrphanCheckErr::NoLocalInputType) + Err(OrphanCheckErr::NonLocalInputType(non_local_spans)) } } -fn uncovered_tys<'tcx>(tcx: TyCtxt<'_>, ty: Ty<'tcx>, in_crate: InCrate) -> Vec> { - if ty_is_local_constructor(ty, in_crate) { +fn uncovered_tys<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, in_crate: InCrate) -> Vec> { + if ty_is_non_local_constructor(tcx, ty, in_crate).is_none() { vec![] } else if fundamental_ty(ty) { ty.walk_shallow() @@ -444,19 +475,33 @@ fn uncovered_tys<'tcx>(tcx: TyCtxt<'_>, ty: Ty<'tcx>, in_crate: InCrate) -> Vec< } fn is_possibly_remote_type(ty: Ty<'_>, _in_crate: InCrate) -> bool { - match ty.sty { + match ty.kind { ty::Projection(..) | ty::Param(..) => true, _ => false, } } -fn ty_is_local(tcx: TyCtxt<'_>, ty: Ty<'_>, in_crate: InCrate) -> bool { - ty_is_local_constructor(ty, in_crate) || - fundamental_ty(ty) && ty.walk_shallow().any(|t| ty_is_local(tcx, t, in_crate)) +fn ty_is_non_local<'t>(tcx: TyCtxt<'t>, ty: Ty<'t>, in_crate: InCrate) -> Option>> { + match ty_is_non_local_constructor(tcx, ty, in_crate) { + Some(ty) => if !fundamental_ty(ty) { + Some(vec![ty]) + } else { + let tys: Vec<_> = ty.walk_shallow() + .filter_map(|t| ty_is_non_local(tcx, t, in_crate)) + .flat_map(|i| i) + .collect(); + if tys.is_empty() { + None + } else { + Some(tys) + } + }, + None => None, + } } fn fundamental_ty(ty: Ty<'_>) -> bool { - match ty.sty { + match ty.kind { ty::Ref(..) => true, ty::Adt(def, _) => def.is_fundamental(), _ => false @@ -472,10 +517,14 @@ fn def_id_is_local(def_id: DefId, in_crate: InCrate) -> bool { } } -fn ty_is_local_constructor(ty: Ty<'_>, in_crate: InCrate) -> bool { - debug!("ty_is_local_constructor({:?})", ty); +fn ty_is_non_local_constructor<'tcx>( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + in_crate: InCrate, +) -> Option> { + debug!("ty_is_non_local_constructor({:?})", ty); - match ty.sty { + match ty.kind { ty::Bool | ty::Char | ty::Int(..) | @@ -492,34 +541,54 @@ fn ty_is_local_constructor(ty: Ty<'_>, in_crate: InCrate) -> bool { ty::Tuple(..) | ty::Param(..) | ty::Projection(..) => { - false + Some(ty) } ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) => match in_crate { - InCrate::Local => false, + InCrate::Local => Some(ty), // The inference variable might be unified with a local // type in that remote crate. - InCrate::Remote => true, + InCrate::Remote => None, }, - ty::Adt(def, _) => def_id_is_local(def.did, in_crate), - ty::Foreign(did) => def_id_is_local(did, in_crate), + ty::Adt(def, _) => if def_id_is_local(def.did, in_crate) { + None + } else { + Some(ty) + }, + ty::Foreign(did) => if def_id_is_local(did, in_crate) { + None + } else { + Some(ty) + }, + ty::Opaque(did, _) => { + // Check the underlying type that this opaque + // type resolves to. + // This recursion will eventually terminate, + // since we've already managed to successfully + // resolve all opaque types by this point + let real_ty = tcx.type_of(did); + ty_is_non_local_constructor(tcx, real_ty, in_crate) + } ty::Dynamic(ref tt, ..) => { if let Some(principal) = tt.principal() { - def_id_is_local(principal.def_id(), in_crate) + if def_id_is_local(principal.def_id(), in_crate) { + None + } else { + Some(ty) + } } else { - false + Some(ty) } } - ty::Error => true, + ty::Error => None, ty::UnnormalizedProjection(..) | ty::Closure(..) | ty::Generator(..) | - ty::GeneratorWitness(..) | - ty::Opaque(..) => { + ty::GeneratorWitness(..) => { bug!("ty_is_local invoked on unexpected type: {:?}", ty) } } diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 7935c01f84..1f7bce1c64 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -195,8 +195,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { obligation: &PredicateObligation<'tcx>, error: &MismatchedProjectionTypes<'tcx>, ) { - let predicate = - self.resolve_vars_if_possible(&obligation.predicate); + let predicate = self.resolve_vars_if_possible(&obligation.predicate); if predicate.references_error() { return @@ -227,25 +226,45 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { 0, &mut obligations ); + + debug!("report_projection_error obligation.cause={:?} obligation.param_env={:?}", + obligation.cause, obligation.param_env); + + debug!("report_projection_error normalized_ty={:?} data.ty={:?}", + normalized_ty, data.ty); + + let is_normalized_ty_expected = match &obligation.cause.code { + ObligationCauseCode::ItemObligation(_) | + ObligationCauseCode::BindingObligation(_, _) | + ObligationCauseCode::ObjectCastObligation(_) => false, + _ => true, + }; + if let Err(error) = self.at(&obligation.cause, obligation.param_env) - .eq(normalized_ty, data.ty) { - values = Some(infer::ValuePairs::Types(ExpectedFound { - expected: normalized_ty, - found: data.ty, - })); + .eq_exp(is_normalized_ty_expected, normalized_ty, data.ty) + { + values = Some(infer::ValuePairs::Types( + ExpectedFound::new(is_normalized_ty_expected, normalized_ty, data.ty))); + err_buf = error; err = &err_buf; } } let msg = format!("type mismatch resolving `{}`", predicate); - let error_id = (DiagnosticMessageId::ErrorId(271), - Some(obligation.cause.span), msg); + let error_id = ( + DiagnosticMessageId::ErrorId(271), + Some(obligation.cause.span), + msg, + ); let fresh = self.tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id); if fresh { let mut diag = struct_span_err!( - self.tcx.sess, obligation.cause.span, E0271, - "type mismatch resolving `{}`", predicate + self.tcx.sess, + obligation.cause.span, + E0271, + "type mismatch resolving `{}`", + predicate ); self.note_type_err(&mut diag, &obligation.cause, None, values, err); self.note_obligation_cause(&mut diag, obligation); @@ -258,7 +277,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { /// returns the fuzzy category of a given type, or None /// if the type can be equated to any type. fn type_category(t: Ty<'_>) -> Option { - match t.sty { + match t.kind { ty::Bool => Some(0), ty::Char => Some(1), ty::Str => Some(2), @@ -288,7 +307,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } match (type_category(a), type_category(b)) { - (Some(cat_a), Some(cat_b)) => match (&a.sty, &b.sty) { + (Some(cat_a), Some(cat_b)) => match (&a.kind, &b.kind) { (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => def_a == def_b, _ => cat_a == cat_b }, @@ -347,6 +366,52 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } } + fn describe_generator(&self, body_id: hir::BodyId) -> Option<&'static str> { + self.tcx.hir().body(body_id).generator_kind.map(|gen_kind| { + match gen_kind { + hir::GeneratorKind::Gen => "a generator", + hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) => "an async block", + hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Fn) => "an async function", + hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Closure) => "an async closure", + } + }) + } + + /// Used to set on_unimplemented's `ItemContext` + /// to be the enclosing (async) block/function/closure + fn describe_enclosure(&self, hir_id: hir::HirId) -> Option<&'static str> { + let hir = &self.tcx.hir(); + let node = hir.find(hir_id)?; + if let hir::Node::Item( + hir::Item{kind: hir::ItemKind::Fn(_ ,fn_header ,_ , body_id), .. }) = &node { + self.describe_generator(*body_id).or_else(|| + Some(if let hir::FnHeader{ asyncness: hir::IsAsync::Async, .. } = fn_header { + "an async function" + } else { + "a function" + }) + ) + } else if let hir::Node::Expr(hir::Expr { + kind: hir::ExprKind::Closure(_is_move, _, body_id, _, gen_movability), .. }) = &node { + self.describe_generator(*body_id).or_else(|| + Some(if gen_movability.is_some() { + "an async closure" + } else { + "a closure" + }) + ) + } else if let hir::Node::Expr(hir::Expr { .. }) = &node { + let parent_hid = hir.get_parent_node(hir_id); + if parent_hid != hir_id { + return self.describe_enclosure(parent_hid); + } else { + None + } + } else { + None + } + } + fn on_unimplemented_note( &self, trait_ref: ty::PolyTraitRef<'tcx>, @@ -357,6 +422,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let trait_ref = *trait_ref.skip_binder(); let mut flags = vec![]; + flags.push((sym::item_context, + self.describe_enclosure(obligation.cause.body_id).map(|s|s.to_owned()))); + match obligation.cause.code { ObligationCauseCode::BuiltinDerivedObligation(..) | ObligationCauseCode::ImplDerivedObligation(..) => {} @@ -406,7 +474,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { }, GenericParamDefKind::Lifetime => continue, }; - let name = param.name.as_symbol(); + let name = param.name; flags.push((name, Some(value))); } @@ -419,7 +487,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { flags.push((sym::_Self, Some("{integral}".to_owned()))); } - if let ty::Array(aty, len) = self_ty.sty { + if let ty::Array(aty, len) = self_ty.kind { flags.push((sym::_Self, Some("[]".to_owned()))); flags.push((sym::_Self, Some(format!("[{}]", aty)))); if let Some(def) = aty.ty_adt_def() { @@ -453,21 +521,17 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } } - fn find_similar_impl_candidates(&self, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Vec> - { - let simp = fast_reject::simplify_type(self.tcx, - trait_ref.skip_binder().self_ty(), - true); + fn find_similar_impl_candidates( + &self, + trait_ref: ty::PolyTraitRef<'tcx>, + ) -> Vec> { + let simp = fast_reject::simplify_type(self.tcx, trait_ref.skip_binder().self_ty(), true); let all_impls = self.tcx.all_impls(trait_ref.def_id()); match simp { Some(simp) => all_impls.iter().filter_map(|&def_id| { let imp = self.tcx.impl_trait_ref(def_id).unwrap(); - let imp_simp = fast_reject::simplify_type(self.tcx, - imp.self_ty(), - true); + let imp_simp = fast_reject::simplify_type(self.tcx, imp.self_ty(), true); if let Some(imp_simp) = imp_simp { if simp != imp_simp { return None @@ -482,10 +546,11 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } } - fn report_similar_impl_candidates(&self, - impl_candidates: Vec>, - err: &mut DiagnosticBuilder<'_>) - { + fn report_similar_impl_candidates( + &self, + impl_candidates: Vec>, + err: &mut DiagnosticBuilder<'_>, + ) { if impl_candidates.is_empty() { return; } @@ -497,7 +562,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { 4 }; - let normalize = |candidate| self.tcx.global_tcx().infer_ctxt().enter(|ref infcx| { + let normalize = |candidate| self.tcx.infer_ctxt().enter(|ref infcx| { let normalized = infcx .at(&ObligationCause::dummy(), ty::ParamEnv::empty()) .normalize(candidate) @@ -535,23 +600,33 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { /// whose result could not be truly determined and thus we can't say /// if the program type checks or not -- and they are unusual /// occurrences in any case. - pub fn report_overflow_error(&self, - obligation: &Obligation<'tcx, T>, - suggest_increasing_limit: bool) -> ! + pub fn report_overflow_error( + &self, + obligation: &Obligation<'tcx, T>, + suggest_increasing_limit: bool, + ) -> ! where T: fmt::Display + TypeFoldable<'tcx> { let predicate = self.resolve_vars_if_possible(&obligation.predicate); - let mut err = struct_span_err!(self.tcx.sess, obligation.cause.span, E0275, - "overflow evaluating the requirement `{}`", - predicate); + let mut err = struct_span_err!( + self.tcx.sess, + obligation.cause.span, + E0275, + "overflow evaluating the requirement `{}`", + predicate + ); if suggest_increasing_limit { self.suggest_new_overflow_limit(&mut err); } - self.note_obligation_cause_code(&mut err, &obligation.predicate, &obligation.cause.code, - &mut vec![]); + self.note_obligation_cause_code( + &mut err, + &obligation.predicate, + &obligation.cause.code, + &mut vec![], + ); err.emit(); self.tcx.sess.abort_if_errors(); @@ -718,12 +793,23 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // these notes will often be of the form // "the type `T` can't be frobnicated" // which is somewhat confusing. - err.help(&format!("consider adding a `where {}` bound", - trait_ref.to_predicate())); - } else if !have_alt_message { - // Can't show anything else useful, try to find similar impls. - let impl_candidates = self.find_similar_impl_candidates(trait_ref); - self.report_similar_impl_candidates(impl_candidates, &mut err); + self.suggest_restricting_param_bound( + &mut err, + &trait_ref, + obligation.cause.body_id, + ); + } else { + if !have_alt_message { + // Can't show anything else useful, try to find similar impls. + let impl_candidates = self.find_similar_impl_candidates(trait_ref); + self.report_similar_impl_candidates(impl_candidates, &mut err); + } + self.suggest_change_mut( + &obligation, + &mut err, + &trait_ref, + points_at_arg, + ); } // If this error is due to `!: Trait` not implemented but `(): Trait` is @@ -784,17 +870,12 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } ty::Predicate::ObjectSafe(trait_def_id) => { - let violations = self.tcx.global_tcx() - .object_safety_violations(trait_def_id); - if let Some(err) = self.tcx.report_object_safety_error( + let violations = self.tcx.object_safety_violations(trait_def_id); + self.tcx.report_object_safety_error( span, trait_def_id, violations, - ) { - err - } else { - return; - } + ) } ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { @@ -877,7 +958,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let found_trait_ty = found_trait_ref.self_ty(); - let found_did = match found_trait_ty.sty { + let found_did = match found_trait_ty.kind { ty::Closure(did, _) | ty::Foreign(did) | ty::FnDef(did, _) => Some(did), ty::Adt(def, _) => Some(def.did), _ => None, @@ -887,13 +968,21 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.tcx.hir().span_if_local(did) ).map(|sp| self.tcx.sess.source_map().def_span(sp)); // the sp could be an fn def - let found = match found_trait_ref.skip_binder().substs.type_at(1).sty { + if self.reported_closure_mismatch.borrow().contains(&(span, found_span)) { + // We check closures twice, with obligations flowing in different directions, + // but we want to complain about them only once. + return; + } + + self.reported_closure_mismatch.borrow_mut().insert((span, found_span)); + + let found = match found_trait_ref.skip_binder().substs.type_at(1).kind { ty::Tuple(ref tys) => vec![ArgKind::empty(); tys.len()], _ => vec![ArgKind::empty()], }; let expected_ty = expected_trait_ref.skip_binder().substs.type_at(1); - let expected = match expected_ty.sty { + let expected = match expected_ty.kind { ty::Tuple(ref tys) => tys.iter() .map(|t| ArgKind::from_expected_ty(t.expect_ty(), Some(span))).collect(), _ => vec![ArgKind::Arg("_".to_owned(), expected_ty.to_string())], @@ -921,12 +1010,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } TraitNotObjectSafe(did) => { - let violations = self.tcx.global_tcx().object_safety_violations(did); - if let Some(err) = self.tcx.report_object_safety_error(span, did, violations) { - err - } else { - return; - } + let violations = self.tcx.object_safety_violations(did); + self.tcx.report_object_safety_error(span, did, violations) } // already reported in the query @@ -948,6 +1033,175 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { err.emit(); } + fn suggest_restricting_param_bound( + &self, + err: &mut DiagnosticBuilder<'_>, + trait_ref: &ty::PolyTraitRef<'_>, + body_id: hir::HirId, + ) { + let self_ty = trait_ref.self_ty(); + let (param_ty, projection) = match &self_ty.kind { + ty::Param(_) => (true, None), + ty::Projection(projection) => (false, Some(projection)), + _ => return, + }; + + let mut suggest_restriction = |generics: &hir::Generics, msg| { + let span = generics.where_clause.span_for_predicates_or_empty_place(); + if !span.from_expansion() && span.desugaring_kind().is_none() { + err.span_suggestion( + generics.where_clause.span_for_predicates_or_empty_place().shrink_to_hi(), + &format!("consider further restricting {}", msg), + format!( + "{} {} ", + if !generics.where_clause.predicates.is_empty() { + "," + } else { + " where" + }, + trait_ref.to_predicate(), + ), + Applicability::MachineApplicable, + ); + } + }; + + // FIXME: Add check for trait bound that is already present, particularly `?Sized` so we + // don't suggest `T: Sized + ?Sized`. + let mut hir_id = body_id; + while let Some(node) = self.tcx.hir().find(hir_id) { + match node { + hir::Node::TraitItem(hir::TraitItem { + generics, + kind: hir::TraitItemKind::Method(..), .. + }) if param_ty && self_ty == self.tcx.types.self_param => { + // Restricting `Self` for a single method. + suggest_restriction(&generics, "`Self`"); + return; + } + + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Fn(_, _, generics, _), .. + }) | + hir::Node::TraitItem(hir::TraitItem { + generics, + kind: hir::TraitItemKind::Method(..), .. + }) | + hir::Node::ImplItem(hir::ImplItem { + generics, + kind: hir::ImplItemKind::Method(..), .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Trait(_, _, generics, _, _), .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Impl(_, _, _, generics, ..), .. + }) if projection.is_some() => { + // Missing associated type bound. + suggest_restriction(&generics, "the associated type"); + return; + } + + hir::Node::Item(hir::Item { kind: hir::ItemKind::Struct(_, generics), span, .. }) | + hir::Node::Item(hir::Item { kind: hir::ItemKind::Enum(_, generics), span, .. }) | + hir::Node::Item(hir::Item { kind: hir::ItemKind::Union(_, generics), span, .. }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Trait(_, _, generics, ..), span, .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Impl(_, _, _, generics, ..), span, .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Fn(_, _, generics, _), span, .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::TyAlias(_, generics), span, .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::TraitAlias(generics, _), span, .. + }) | + hir::Node::Item(hir::Item { + kind: hir::ItemKind::OpaqueTy(hir::OpaqueTy { generics, .. }), span, .. + }) | + hir::Node::TraitItem(hir::TraitItem { generics, span, .. }) | + hir::Node::ImplItem(hir::ImplItem { generics, span, .. }) + if param_ty => { + // Missing generic type parameter bound. + let restrict_msg = "consider further restricting this bound"; + let param_name = self_ty.to_string(); + for param in generics.params.iter().filter(|p| { + ¶m_name == std::convert::AsRef::::as_ref(&p.name.ident().as_str()) + }) { + if param_name.starts_with("impl ") { + // `impl Trait` in argument: + // `fn foo(x: impl Trait) {}` → `fn foo(t: impl Trait + Trait2) {}` + err.span_suggestion( + param.span, + restrict_msg, + // `impl CurrentTrait + MissingTrait` + format!("{} + {}", param.name.ident(), trait_ref), + Applicability::MachineApplicable, + ); + } else if generics.where_clause.predicates.is_empty() && + param.bounds.is_empty() + { + // If there are no bounds whatsoever, suggest adding a constraint + // to the type parameter: + // `fn foo(t: T) {}` → `fn foo(t: T) {}` + err.span_suggestion( + param.span, + "consider restricting this bound", + format!("{}", trait_ref.to_predicate()), + Applicability::MachineApplicable, + ); + } else if !generics.where_clause.predicates.is_empty() { + // There is a `where` clause, so suggest expanding it: + // `fn foo(t: T) where T: Debug {}` → + // `fn foo(t: T) where T: Debug, T: Trait {}` + err.span_suggestion( + generics.where_clause.span().unwrap().shrink_to_hi(), + &format!( + "consider further restricting type parameter `{}`", + param_name, + ), + format!(", {}", trait_ref.to_predicate()), + Applicability::MachineApplicable, + ); + } else { + // If there is no `where` clause lean towards constraining to the + // type parameter: + // `fn foo(t: T, x: X) {}` → `fn foo(t: T) {}` + // `fn foo(t: T) {}` → `fn foo(t: T) {}` + let sp = param.span.with_hi(span.hi()); + let span = self.tcx.sess.source_map() + .span_through_char(sp, ':'); + if sp != param.span && sp != span { + // Only suggest if we have high certainty that the span + // covers the colon in `foo`. + err.span_suggestion(span, restrict_msg, format!( + "{} + ", + trait_ref.to_predicate(), + ), Applicability::MachineApplicable); + } else { + err.span_label(param.span, &format!( + "consider adding a `where {}` bound", + trait_ref.to_predicate(), + )); + } + } + return; + } + } + + hir::Node::Crate => return, + + _ => {} + } + + hir_id = self.tcx.hir().get_parent_item(hir_id); + } + } + /// When encountering an assignment of an unsized trait, like `let x = ""[..];`, provide a /// suggestion to borrow the initializer in order to use have a slice instead. fn suggest_borrow_on_unsized_slice( @@ -959,7 +1213,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let parent_node = self.tcx.hir().get_parent_node(hir_id); if let Some(Node::Local(ref local)) = self.tcx.hir().find(parent_node) { if let Some(ref expr) = local.init { - if let hir::ExprKind::Index(_, _) = expr.node { + if let hir::ExprKind::Index(_, _) = expr.kind { if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(expr.span) { err.span_suggestion( expr.span, @@ -982,7 +1236,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { points_at_arg: bool, ) { let self_ty = trait_ref.self_ty(); - match self_ty.sty { + match self_ty.kind { ty::FnDef(def_id, _) => { // We tried to apply the bound to an `fn`. Check whether calling it would evaluate // to a type that *would* satisfy the trait binding. If it would, suggest calling @@ -1004,7 +1258,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { Ok(EvaluationResult::EvaluatedToAmbig) => { if let Some(hir::Node::Item(hir::Item { ident, - node: hir::ItemKind::Fn(.., body_id), + kind: hir::ItemKind::Fn(.., body_id), .. })) = self.tcx.hir().get_if_local(def_id) { let body = self.tcx.hir().body(*body_id); @@ -1013,7 +1267,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { "{}({})", ident, body.params.iter() - .map(|arg| match &arg.pat.node { + .map(|arg| match &arg.pat.kind { hir::PatKind::Binding(_, _, ident, None) if ident.name != kw::SelfLower => ident.to_string(), _ => "_".to_string(), @@ -1069,14 +1323,16 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let mut trait_type = trait_ref.self_ty(); for refs_remaining in 0..refs_number { - if let ty::Ref(_, t_type, _) = trait_type.sty { + if let ty::Ref(_, t_type, _) = trait_type.kind { trait_type = t_type; let substs = self.tcx.mk_substs_trait(trait_type, &[]); let new_trait_ref = ty::TraitRef::new(trait_ref.def_id, substs); - let new_obligation = Obligation::new(ObligationCause::dummy(), - obligation.param_env, - new_trait_ref.to_predicate()); + let new_obligation = Obligation::new( + ObligationCause::dummy(), + obligation.param_env, + new_trait_ref.to_predicate(), + ); if self.predicate_may_hold(&new_obligation) { let sp = self.tcx.sess.source_map() @@ -1098,6 +1354,77 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } } + /// Check if the trait bound is implemented for a different mutability and note it in the + /// final error. + fn suggest_change_mut( + &self, + obligation: &PredicateObligation<'tcx>, + err: &mut DiagnosticBuilder<'tcx>, + trait_ref: &ty::Binder>, + points_at_arg: bool, + ) { + let span = obligation.cause.span; + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { + let refs_number = snippet.chars() + .filter(|c| !c.is_whitespace()) + .take_while(|c| *c == '&') + .count(); + if let Some('\'') = snippet.chars() + .filter(|c| !c.is_whitespace()) + .skip(refs_number) + .next() + { // Do not suggest removal of borrow from type arguments. + return; + } + let trait_ref = self.resolve_vars_if_possible(trait_ref); + if trait_ref.has_infer_types() { + // Do not ICE while trying to find if a reborrow would succeed on a trait with + // unresolved bindings. + return; + } + + if let ty::Ref(region, t_type, mutability) = trait_ref.skip_binder().self_ty().kind { + let trait_type = match mutability { + hir::Mutability::MutMutable => self.tcx.mk_imm_ref(region, t_type), + hir::Mutability::MutImmutable => self.tcx.mk_mut_ref(region, t_type), + }; + + let substs = self.tcx.mk_substs_trait(&trait_type, &[]); + let new_trait_ref = ty::TraitRef::new(trait_ref.skip_binder().def_id, substs); + let new_obligation = Obligation::new( + ObligationCause::dummy(), + obligation.param_env, + new_trait_ref.to_predicate(), + ); + + if self.evaluate_obligation_no_overflow( + &new_obligation, + ).must_apply_modulo_regions() { + let sp = self.tcx.sess.source_map() + .span_take_while(span, |c| c.is_whitespace() || *c == '&'); + if points_at_arg && + mutability == hir::Mutability::MutImmutable && + refs_number > 0 + { + err.span_suggestion( + sp, + "consider changing this borrow's mutability", + "&mut ".to_string(), + Applicability::MachineApplicable, + ); + } else { + err.note(&format!( + "`{}` is implemented for `{:?}`, but not for `{:?}`", + trait_ref, + trait_type, + trait_ref.skip_binder().self_ty(), + )); + } + } + } + } + } + fn suggest_semicolon_removal( &self, obligation: &PredicateObligation<'tcx>, @@ -1109,11 +1436,11 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let parent_node = hir.get_parent_node(obligation.cause.body_id); let node = hir.find(parent_node); if let Some(hir::Node::Item(hir::Item { - node: hir::ItemKind::Fn(decl, _, _, body_id), + kind: hir::ItemKind::Fn(decl, _, _, body_id), .. })) = node { let body = hir.body(*body_id); - if let hir::ExprKind::Block(blk, _) = &body.value.node { + if let hir::ExprKind::Block(blk, _) = &body.value.kind { if decl.output.span().overlaps(span) && blk.expr.is_none() && "()" == &trait_ref.self_ty().to_string() { @@ -1137,14 +1464,14 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn get_fn_like_arguments(&self, node: Node<'_>) -> (Span, Vec) { match node { Node::Expr(&hir::Expr { - node: hir::ExprKind::Closure(_, ref _decl, id, span, _), + kind: hir::ExprKind::Closure(_, ref _decl, id, span, _), .. }) => { (self.tcx.sess.source_map().def_span(span), self.tcx.hir().body(id).params.iter() .map(|arg| { if let hir::Pat { - node: hir::PatKind::Tuple(ref args, _), + kind: hir::PatKind::Tuple(ref args, _), span, .. } = *arg.pat { @@ -1166,21 +1493,21 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } Node::Item(&hir::Item { span, - node: hir::ItemKind::Fn(ref decl, ..), + kind: hir::ItemKind::Fn(ref decl, ..), .. }) | Node::ImplItem(&hir::ImplItem { span, - node: hir::ImplItemKind::Method(hir::MethodSig { ref decl, .. }, _), + kind: hir::ImplItemKind::Method(hir::MethodSig { ref decl, .. }, _), .. }) | Node::TraitItem(&hir::TraitItem { span, - node: hir::TraitItemKind::Method(hir::MethodSig { ref decl, .. }, _), + kind: hir::TraitItemKind::Method(hir::MethodSig { ref decl, .. }, _), .. }) => { (self.tcx.sess.source_map().def_span(span), decl.inputs.iter() - .map(|arg| match arg.clone().node { + .map(|arg| match arg.clone().kind { hir::TyKind::Tup(ref tys) => ArgKind::Tuple( Some(arg.span), vec![("_".to_owned(), "_".to_owned()); tys.len()] @@ -1343,7 +1670,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ) -> DiagnosticBuilder<'tcx> { fn build_fn_sig_string<'tcx>(tcx: TyCtxt<'tcx>, trait_ref: &ty::TraitRef<'tcx>) -> String { let inputs = trait_ref.substs.type_at(1); - let sig = if let ty::Tuple(inputs) = inputs.sty { + let sig = if let ty::Tuple(inputs) = inputs.kind { tcx.mk_fn_sig( inputs.iter().map(|k| k.expect_ty()), tcx.mk_ty_infer(ty::TyVar(ty::TyVid { index: 0 })), @@ -1408,11 +1735,7 @@ impl<'tcx> TyCtxt<'tcx> { span: Span, trait_def_id: DefId, violations: Vec, - ) -> Option> { - if self.sess.trait_methods_not_found.borrow().contains(&span) { - // Avoid emitting error caused by non-existing method (#58734) - return None; - } + ) -> DiagnosticBuilder<'tcx> { let trait_str = self.def_path_str(trait_def_id); let span = self.sess.source_map().def_span(span); let mut err = struct_span_err!( @@ -1430,13 +1753,22 @@ impl<'tcx> TyCtxt<'tcx> { }; } } - Some(err) + + if self.sess.trait_methods_not_found.borrow().contains(&span) { + // Avoid emitting error caused by non-existing method (#58734) + err.cancel(); + } + + err } } impl<'a, 'tcx> InferCtxt<'a, 'tcx> { - fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>, - body_id: Option) { + fn maybe_report_ambiguity( + &self, + obligation: &PredicateObligation<'tcx>, + body_id: Option, + ) { // Unable to successfully determine, probably means // insufficient type information, but could mean // ambiguous impls. The latter *ought* to be a @@ -1445,9 +1777,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let predicate = self.resolve_vars_if_possible(&obligation.predicate); let span = obligation.cause.span; - debug!("maybe_report_ambiguity(predicate={:?}, obligation={:?})", - predicate, - obligation); + debug!( + "maybe_report_ambiguity(predicate={:?}, obligation={:?} body_id={:?}, code={:?})", + predicate, + obligation, + body_id, + obligation.cause.code, + ); // Ambiguity errors are often caused as fallout from earlier // errors. So just ignore them if this infcx is tainted. @@ -1459,6 +1795,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ty::Predicate::Trait(ref data) => { let trait_ref = data.to_poly_trait_ref(); let self_ty = trait_ref.self_ty(); + debug!("self_ty {:?} {:?} trait_ref {:?}", self_ty, self_ty.kind, trait_ref); + if predicate.references_error() { return; } @@ -1483,24 +1821,25 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // be ignoring the fact that we don't KNOW the type works // out. Though even that would probably be harmless, given that // we're only talking about builtin traits, which are known to be - // inhabited. But in any case I just threw in this check for - // has_errors() to be sure that compilation isn't happening - // anyway. In that case, why inundate the user. - if !self.tcx.sess.has_errors() { - if - self.tcx.lang_items().sized_trait() - .map_or(false, |sized_id| sized_id == trait_ref.def_id()) - { - self.need_type_info_err(body_id, span, self_ty).emit(); - } else { - let mut err = struct_span_err!(self.tcx.sess, - span, E0283, - "type annotations required: \ - cannot resolve `{}`", - predicate); - self.note_obligation_cause(&mut err, obligation); - err.emit(); - } + // inhabited. We used to check for `self.tcx.sess.has_errors()` to + // avoid inundating the user with unnecessary errors, but we now + // check upstream for type errors and dont add the obligations to + // begin with in those cases. + if + self.tcx.lang_items().sized_trait() + .map_or(false, |sized_id| sized_id == trait_ref.def_id()) + { + self.need_type_info_err(body_id, span, self_ty).emit(); + } else { + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0283, + "type annotations needed: cannot resolve `{}`", + predicate, + ); + self.note_obligation_cause(&mut err, obligation); + err.emit(); } } @@ -1527,11 +1866,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { _ => { if !self.tcx.sess.has_errors() { - let mut err = struct_span_err!(self.tcx.sess, - obligation.cause.span, E0284, - "type annotations required: \ - cannot resolve `{}`", - predicate); + let mut err = struct_span_err!( + self.tcx.sess, + obligation.cause.span, + E0284, + "type annotations needed: cannot resolve `{}`", + predicate, + ); self.note_obligation_cause(&mut err, obligation); err.emit(); } @@ -1555,7 +1896,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { self.infcx.tcx } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - if let ty::Param(ty::ParamTy {name, .. }) = ty.sty { + if let ty::Param(ty::ParamTy {name, .. }) = ty.kind { let infcx = self.infcx; self.var_map.entry(ty).or_insert_with(|| infcx.next_ty_var( @@ -1675,8 +2016,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ObligationCauseCode::BuiltinDerivedObligation(derived_obligation) | ObligationCauseCode::ImplDerivedObligation(derived_obligation) => { debug!("note_obligation_cause_for_async_await: self_ty.kind={:?}", - derived_obligation.parent_trait_ref.self_ty().sty); - match derived_obligation.parent_trait_ref.self_ty().sty { + derived_obligation.parent_trait_ref.self_ty().kind); + match derived_obligation.parent_trait_ref.self_ty().kind { ty::Adt(ty::AdtDef { did, .. }, ..) if self.tcx.is_diagnostic_item(sym::gen_future, *did) => {}, ty::Generator(did, ..) => generator = generator.or(Some(did)), @@ -1699,7 +2040,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { .and_then(|parent_did| self.tcx.hir().get_if_local(parent_did)); debug!("note_obligation_cause_for_async_await: parent_node={:?}", parent_node); if let Some(hir::Node::Item(hir::Item { - node: hir::ItemKind::Fn(_, header, _, _), + kind: hir::ItemKind::Fn(_, header, _, _), .. })) = parent_node { debug!("note_obligation_cause_for_async_await: header={:?}", header); @@ -1829,9 +2170,22 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { err.note(&format!("required for the cast to the object type `{}`", self.ty_to_string(object_ty))); } - ObligationCauseCode::RepeatVec => { + ObligationCauseCode::Coercion { source: _, target } => { + err.note(&format!("required by cast to type `{}`", + self.ty_to_string(target))); + } + ObligationCauseCode::RepeatVec(suggest_const_in_array_repeat_expression) => { err.note("the `Copy` trait is required because the \ repeated element will be copied"); + if suggest_const_in_array_repeat_expression { + err.note("this array initializer can be evaluated at compile-time, for more \ + information, see issue \ + https://github.com/rust-lang/rust/issues/49147"); + if tcx.sess.opts.unstable_features.is_nightly_build() { + err.help("add `#![feature(const_in_array_repeat_expression)]` to the \ + crate attributes to enable"); + } + } } ObligationCauseCode::VariableType(_) => { err.note("all local variables must have a statically known size"); @@ -1884,6 +2238,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ObligationCauseCode::ConstSized => { err.note("constant expressions must have a statically known size"); } + ObligationCauseCode::ConstPatternStructural => { + err.note("constants used for pattern-matching must derive `PartialEq` and `Eq`"); + } ObligationCauseCode::SharedStatic => { err.note("shared static variables must have a type that implements `Sync`"); } @@ -1919,7 +2276,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { but not on the corresponding trait method", predicate)); } - ObligationCauseCode::ReturnType(_) | + ObligationCauseCode::ReturnType | + ObligationCauseCode::ReturnValue(_) | ObligationCauseCode::BlockTailExpression(_) => (), ObligationCauseCode::TrivialBound => { err.help("see issue #48214"); @@ -1929,6 +2287,12 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ); } } + ObligationCauseCode::AssocTypeBound(impl_span, orig) => { + err.span_label(orig, "associated type defined here"); + if let Some(sp) = impl_span { + err.span_label(sp, "in this `impl` item"); + } + } } } @@ -1974,7 +2338,7 @@ impl ArgKind { /// Creates an `ArgKind` from the expected type of an /// argument. It has no name (`_`) and an optional source span. pub fn from_expected_ty(t: Ty<'_>, span: Option) -> ArgKind { - match t.sty { + match t.kind { ty::Tuple(ref tys) => ArgKind::Tuple( span, tys.iter() diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs index 805727b6ce..a981162fdc 100644 --- a/src/librustc/traits/fulfill.rs +++ b/src/librustc/traits/fulfill.rs @@ -256,29 +256,46 @@ impl<'a, 'b, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'tcx> { &mut self, pending_obligation: &mut Self::Obligation, ) -> ProcessResult { - // If we were stalled on some unresolved variables, first check - // whether any of them have been resolved; if not, don't bother - // doing more work yet - if !pending_obligation.stalled_on.is_empty() { - let mut changed = false; - // This `for` loop was once a call to `all()`, but this lower-level - // form was a perf win. See #64545 for details. - for &ty in &pending_obligation.stalled_on { - if ShallowResolver::new(self.selcx.infcx()).shallow_resolve_changed(ty) { - changed = true; - break; - } + // If we were stalled on some unresolved variables, first check whether + // any of them have been resolved; if not, don't bother doing more work + // yet. + let change = match pending_obligation.stalled_on.len() { + // Match arms are in order of frequency, which matters because this + // code is so hot. 1 and 0 dominate; 2+ is fairly rare. + 1 => { + let ty = pending_obligation.stalled_on[0]; + ShallowResolver::new(self.selcx.infcx()).shallow_resolve_changed(ty) } - if !changed { - debug!("process_predicate: pending obligation {:?} still stalled on {:?}", - self.selcx.infcx() - .resolve_vars_if_possible(&pending_obligation.obligation), - pending_obligation.stalled_on); - return ProcessResult::Unchanged; + 0 => { + // In this case we haven't changed, but wish to make a change. + true } - pending_obligation.stalled_on = vec![]; + _ => { + // This `for` loop was once a call to `all()`, but this lower-level + // form was a perf win. See #64545 for details. + (|| { + for &ty in &pending_obligation.stalled_on { + if ShallowResolver::new(self.selcx.infcx()).shallow_resolve_changed(ty) { + return true; + } + } + false + })() + } + }; + + if !change { + debug!("process_predicate: pending obligation {:?} still stalled on {:?}", + self.selcx.infcx() + .resolve_vars_if_possible(&pending_obligation.obligation), + pending_obligation.stalled_on); + return ProcessResult::Unchanged; } + // This part of the code is much colder. + + pending_obligation.stalled_on.truncate(0); + let obligation = &mut pending_obligation.obligation; if obligation.predicate.has_infer_types() { @@ -478,7 +495,7 @@ impl<'a, 'b, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'tcx> { } else { if !substs.has_local_value() { let instance = ty::Instance::resolve( - self.selcx.tcx().global_tcx(), + self.selcx.tcx(), obligation.param_env, def_id, substs, @@ -531,7 +548,7 @@ fn trait_ref_type_vars<'a, 'tcx>( .map(|t| selcx.infcx().resolve_vars_if_possible(&t)) .filter(|t| t.has_infer_types()) .flat_map(|t| t.walk()) - .filter(|t| match t.sty { ty::Infer(_) => true, _ => false }) + .filter(|t| match t.kind { ty::Infer(_) => true, _ => false }) .collect() } diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 1123422ad6..b827529956 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -188,6 +188,9 @@ pub enum ObligationCauseCode<'tcx> { /// Obligation incurred due to an object cast. ObjectCastObligation(/* Object type */ Ty<'tcx>), + /// Obligation incurred due to a coercion. + Coercion { source: Ty<'tcx>, target: Ty<'tcx> }, + // Various cases where expressions must be sized/copy/etc: /// L = X implies that L is Sized AssignmentLhsSized, @@ -203,8 +206,9 @@ pub enum ObligationCauseCode<'tcx> { SizedReturnType, /// Yield type must be Sized SizedYieldType, - /// [T,..n] --> T must be Copy - RepeatVec, + /// [T,..n] --> T must be Copy. If `true`, suggest `const_in_array_repeat_expression` feature + /// flag. + RepeatVec(bool), /// Types of fields (other than the last, except for packed structs) in a struct must be sized. FieldSized { adt_kind: AdtKind, last: bool }, @@ -212,14 +216,14 @@ pub enum ObligationCauseCode<'tcx> { /// Constant expressions must be sized. ConstSized, - /// static items must have `Sync` type + /// Static items must have `Sync` type SharedStatic, BuiltinDerivedObligation(DerivedObligationCause<'tcx>), ImplDerivedObligation(DerivedObligationCause<'tcx>), - /// error derived when matching traits/impls; see ObligationCause for more details + /// Error derived when matching traits/impls; see ObligationCause for more details CompareImplMethodObligation { item_name: ast::Name, impl_item_def_id: DefId, @@ -236,6 +240,9 @@ pub enum ObligationCauseCode<'tcx> { /// Computing common supertype in the pattern guard for the arms of a match expression MatchExpressionArmPattern { span: Span, ty: Ty<'tcx> }, + /// Constants in patterns must have `Structural` type. + ConstPatternStructural, + /// Computing common supertype in an if expression IfExpression(Box), @@ -248,23 +255,28 @@ pub enum ObligationCauseCode<'tcx> { /// `start` has wrong type StartFunctionType, - /// intrinsic has wrong type + /// Intrinsic has wrong type IntrinsicType, - /// method receiver + /// Method receiver MethodReceiver, /// `return` with no expression ReturnNoExpression, /// `return` with an expression - ReturnType(hir::HirId), + ReturnValue(hir::HirId), + + /// Return type of this function + ReturnType, /// Block implicit return BlockTailExpression(hir::HirId), /// #[feature(trivial_bounds)] is not enabled TrivialBound, + + AssocTypeBound(/*impl*/ Option, /*original*/ Span), } // `ObligationCauseCode` is used a lot. Make sure it doesn't unintentionally get bigger. @@ -607,7 +619,7 @@ pub struct VtableImplData<'tcx, N> { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableGeneratorData<'tcx, N> { pub generator_def_id: DefId, - pub substs: ty::GeneratorSubsts<'tcx>, + pub substs: SubstsRef<'tcx>, /// Nested obligations. This can be non-empty if the generator /// signature contains associated types. pub nested: Vec @@ -616,7 +628,7 @@ pub struct VtableGeneratorData<'tcx, N> { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableClosureData<'tcx, N> { pub closure_def_id: DefId, - pub substs: ty::ClosureSubsts<'tcx>, + pub substs: SubstsRef<'tcx>, /// Nested obligations. This can be non-empty if the closure /// signature contains associated types. pub nested: Vec diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs index a7990c4af6..8ded1417ee 100644 --- a/src/librustc/traits/object_safety.rs +++ b/src/librustc/traits/object_safety.rs @@ -19,7 +19,7 @@ use crate::ty::subst::{Subst, InternalSubsts}; use std::borrow::Cow; use std::iter::{self}; use syntax::ast::{self}; -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] @@ -520,9 +520,11 @@ impl<'tcx> TyCtxt<'tcx> { /// a pointer. /// /// In practice, we cannot use `dyn Trait` explicitly in the obligation because it would result - /// in a new check that `Trait` is object safe, creating a cycle. So instead, we fudge a little - /// by introducing a new type parameter `U` such that `Self: Unsize` and `U: Trait + ?Sized`, - /// and use `U` in place of `dyn Trait`. Written as a chalk-style query: + /// in a new check that `Trait` is object safe, creating a cycle (until object_safe_for_dispatch + /// is stabilized, see tracking issue https://github.com/rust-lang/rust/issues/43561). + /// Instead, we fudge a little by introducing a new type parameter `U` such that + /// `Self: Unsize` and `U: Trait + ?Sized`, and use `U` in place of `dyn Trait`. + /// Written as a chalk-style query: /// /// forall (U: Trait + ?Sized) { /// if (Self: Unsize) { @@ -556,11 +558,11 @@ impl<'tcx> TyCtxt<'tcx> { // the type `U` in the query // use a bogus type parameter to mimick a forall(U) query using u32::MAX for now. - // FIXME(mikeyhew) this is a total hack, and we should replace it when real forall queries - // are implemented + // FIXME(mikeyhew) this is a total hack. Once object_safe_for_dispatch is stabilized, we can + // replace this with `dyn Trait` let unsized_self_ty: Ty<'tcx> = self.mk_ty_param( ::std::u32::MAX, - InternedString::intern("RustaceansAreAwesome"), + Symbol::intern("RustaceansAreAwesome"), ); // `Receiver[Self => U]` @@ -677,7 +679,7 @@ impl<'tcx> TyCtxt<'tcx> { let mut error = false; let self_ty = self.types.self_param; ty.maybe_walk(|ty| { - match ty.sty { + match ty.kind { ty::Param(_) => { if ty == self_ty { error = true; diff --git a/src/librustc/traits/on_unimplemented.rs b/src/librustc/traits/on_unimplemented.rs index 5a988d9509..b39c00a56e 100644 --- a/src/librustc/traits/on_unimplemented.rs +++ b/src/librustc/traits/on_unimplemented.rs @@ -248,9 +248,11 @@ impl<'tcx> OnUnimplementedFormatString { Position::ArgumentNamed(s) if s == sym::from_method => (), // `{from_desugaring}` is allowed Position::ArgumentNamed(s) if s == sym::from_desugaring => (), + // `{ItemContext}` is allowed + Position::ArgumentNamed(s) if s == sym::item_context => (), // So is `{A}` if A is a type parameter Position::ArgumentNamed(s) => match generics.params.iter().find(|param| { - param.name.as_symbol() == s + param.name == s }) { Some(_) => (), None => { @@ -289,13 +291,14 @@ impl<'tcx> OnUnimplementedFormatString { }, GenericParamDefKind::Lifetime => return None }; - let name = param.name.as_symbol(); + let name = param.name; Some((name, value)) }).collect::>(); let empty_string = String::new(); let s = self.0.as_str(); let parser = Parser::new(&s, None, vec![], false); + let item_context = (options.get(&sym::item_context)).unwrap_or(&empty_string); parser.map(|p| match p { Piece::String(s) => s, @@ -311,6 +314,8 @@ impl<'tcx> OnUnimplementedFormatString { } else if s == sym::from_desugaring || s == sym::from_method { // don't break messages using these two arguments incorrectly &empty_string + } else if s == sym::item_context { + &item_context } else { bug!("broken on_unimplemented {:?} for {:?}: \ no argument matching {:?}", diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index 87a23f655a..d88bbe145d 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -337,7 +337,7 @@ impl<'a, 'b, 'tcx> TypeFolder<'tcx> for AssocTypeNormalizer<'a, 'b, 'tcx> { // should occur eventually). let ty = ty.super_fold_with(self); - match ty.sty { + match ty.kind { ty::Opaque(def_id, substs) if !substs.has_escaping_bound_vars() => { // (*) // Only normalize `impl Trait` after type-checking, usually in codegen. match self.param_env.reveal { @@ -921,7 +921,7 @@ fn assemble_candidates_from_trait_def<'cx, 'tcx>( let tcx = selcx.tcx(); // Check whether the self-type is itself a projection. - let (def_id, substs) = match obligation_trait_ref.self_ty().sty { + let (def_id, substs) = match obligation_trait_ref.self_ty().kind { ty::Projection(ref data) => { (data.trait_ref(tcx).def_id, data.substs) } @@ -1199,7 +1199,7 @@ fn confirm_object_candidate<'cx, 'tcx>( let object_ty = selcx.infcx().shallow_resolve(self_ty); debug!("confirm_object_candidate(object_ty={:?})", object_ty); - let data = match object_ty.sty { + let data = match object_ty.kind { ty::Dynamic(ref data, ..) => data, _ => { span_bug!( @@ -1259,7 +1259,7 @@ fn confirm_generator_candidate<'cx, 'tcx>( obligation: &ProjectionTyObligation<'tcx>, vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>, ) -> Progress<'tcx> { - let gen_sig = vtable.substs.poly_sig(vtable.generator_def_id, selcx.tcx()); + let gen_sig = vtable.substs.as_generator().poly_sig(vtable.generator_def_id, selcx.tcx()); let Normalized { value: gen_sig, obligations @@ -1334,7 +1334,8 @@ fn confirm_closure_candidate<'cx, 'tcx>( ) -> Progress<'tcx> { let tcx = selcx.tcx(); let infcx = selcx.infcx(); - let closure_sig_ty = vtable.substs.closure_sig_ty(vtable.closure_def_id, tcx); + let closure_sig_ty = vtable.substs + .as_closure().sig_ty(vtable.closure_def_id, tcx); let closure_sig = infcx.shallow_resolve(closure_sig_ty).fn_sig(tcx); let Normalized { value: closure_sig, @@ -1504,8 +1505,8 @@ fn assoc_ty_def( if let Some(assoc_item) = trait_def .ancestors(tcx, impl_def_id) - .defs(tcx, assoc_ty_name, ty::AssocKind::Type, trait_def_id) - .next() { + .leaf_def(tcx, assoc_ty_name, ty::AssocKind::Type) { + assoc_item } else { // This is saying that neither the trait nor diff --git a/src/librustc/traits/query/dropck_outlives.rs b/src/librustc/traits/query/dropck_outlives.rs index 46403a38c9..e84c91daf2 100644 --- a/src/librustc/traits/query/dropck_outlives.rs +++ b/src/librustc/traits/query/dropck_outlives.rs @@ -3,8 +3,9 @@ use crate::infer::InferOk; use crate::infer::canonical::OriginalQueryValues; use std::iter::FromIterator; use syntax::source_map::Span; -use crate::ty::subst::Kind; +use crate::ty::subst::GenericArg; use crate::ty::{self, Ty, TyCtxt}; +use crate::ty::query::Providers; impl<'cx, 'tcx> At<'cx, 'tcx> { /// Given a type `ty` of some value being dropped, computes a set @@ -24,7 +25,7 @@ impl<'cx, 'tcx> At<'cx, 'tcx> { /// /// [#1238]: https://github.com/rust-lang/rfcs/blob/master/text/1238-nonparametric-dropck.md /// [#1327]: https://github.com/rust-lang/rfcs/blob/master/text/1327-dropck-param-eyepatch.md - pub fn dropck_outlives(&self, ty: Ty<'tcx>) -> InferOk<'tcx, Vec>> { + pub fn dropck_outlives(&self, ty: Ty<'tcx>) -> InferOk<'tcx, Vec>> { debug!( "dropck_outlives(ty={:?}, param_env={:?})", ty, self.param_env, @@ -33,19 +34,18 @@ impl<'cx, 'tcx> At<'cx, 'tcx> { // Quick check: there are a number of cases that we know do not require // any destructor. let tcx = self.infcx.tcx; - if trivial_dropck_outlives(tcx, ty) { + if tcx.trivial_dropck_outlives(ty) { return InferOk { value: vec![], obligations: vec![], }; } - let gcx = tcx.global_tcx(); let mut orig_values = OriginalQueryValues::default(); let c_ty = self.infcx.canonicalize_query(&self.param_env.and(ty), &mut orig_values); let span = self.cause.span; debug!("c_ty = {:?}", c_ty); - if let Ok(result) = &gcx.dropck_outlives(c_ty) { + if let Ok(result) = &tcx.dropck_outlives(c_ty) { if result.is_proven() { if let Ok(InferOk { value, obligations }) = self.infcx.instantiate_query_response_and_region_obligations( @@ -80,7 +80,7 @@ impl<'cx, 'tcx> At<'cx, 'tcx> { #[derive(Clone, Debug, Default)] pub struct DropckOutlivesResult<'tcx> { - pub kinds: Vec>, + pub kinds: Vec>, pub overflows: Vec>, } @@ -104,7 +104,7 @@ impl<'tcx> DropckOutlivesResult<'tcx> { tcx: TyCtxt<'tcx>, span: Span, ty: Ty<'tcx>, - ) -> Vec> { + ) -> Vec> { self.report_overflows(tcx, span, ty); let DropckOutlivesResult { kinds, overflows: _ } = self; kinds @@ -117,7 +117,7 @@ impl<'tcx> DropckOutlivesResult<'tcx> { pub struct DtorckConstraint<'tcx> { /// Types that are required to be alive in order for this /// type to be valid for destruction. - pub outlives: Vec>, + pub outlives: Vec>, /// Types that could not be resolved: projections and params. pub dtorck_types: Vec>, @@ -186,7 +186,7 @@ impl_stable_hash_for!(struct DtorckConstraint<'tcx> { /// Note also that `needs_drop` requires a "global" type (i.e., one /// with erased regions), but this function does not. pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { - match ty.sty { + match ty.kind { // None of these types have a destructor and hence they do not // require anything in particular to outlive the dtor's // execution. @@ -208,14 +208,15 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { | ty::Error => true, // [T; N] and [T] have same properties as T. - ty::Array(ty, _) | ty::Slice(ty) => trivial_dropck_outlives(tcx, ty), + ty::Array(ty, _) | ty::Slice(ty) => tcx.trivial_dropck_outlives(ty), // (T1..Tn) and closures have same properties as T1..Tn -- // check if *any* of those are trivial. - ty::Tuple(ref tys) => tys.iter().all(|t| trivial_dropck_outlives(tcx, t.expect_ty())), + ty::Tuple(ref tys) => tys.iter().all(|t| tcx.trivial_dropck_outlives(t.expect_ty())), ty::Closure(def_id, ref substs) => substs + .as_closure() .upvar_tys(def_id, tcx) - .all(|t| trivial_dropck_outlives(tcx, t)), + .all(|t| tcx.trivial_dropck_outlives(t)), ty::Adt(def, _) => { if Some(def.did) == tcx.lang_items().manually_drop() { @@ -243,3 +244,10 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"), } } + +crate fn provide(p: &mut Providers<'_>) { + *p = Providers { + trivial_dropck_outlives, + ..*p + }; +} diff --git a/src/librustc/traits/query/evaluate_obligation.rs b/src/librustc/traits/query/evaluate_obligation.rs index b9557ceaa6..0d426cab9b 100644 --- a/src/librustc/traits/query/evaluate_obligation.rs +++ b/src/librustc/traits/query/evaluate_obligation.rs @@ -50,13 +50,13 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { // Run canonical query. If overflow occurs, rerun from scratch but this time // in standard trait query mode so that overflow is handled appropriately // within `SelectionContext`. - self.tcx.global_tcx().evaluate_obligation(c_pred) + self.tcx.evaluate_obligation(c_pred) } // Helper function that canonicalizes and runs the query. If an // overflow results, we re-run it in the local context so we can // report a nice error. - fn evaluate_obligation_no_overflow( + crate fn evaluate_obligation_no_overflow( &self, obligation: &PredicateObligation<'tcx>, ) -> EvaluationResult { diff --git a/src/librustc/traits/query/mod.rs b/src/librustc/traits/query/mod.rs index 112a1d0e09..f6ea77dc5c 100644 --- a/src/librustc/traits/query/mod.rs +++ b/src/librustc/traits/query/mod.rs @@ -40,7 +40,7 @@ pub type CanonicalTypeOpProvePredicateGoal<'tcx> = pub type CanonicalTypeOpNormalizeGoal<'tcx, T> = Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::normalize::Normalize>>; -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug)] pub struct NoSolution; pub type Fallible = Result; diff --git a/src/librustc/traits/query/normalize.rs b/src/librustc/traits/query/normalize.rs index c31ff3ab1b..ab42eab284 100644 --- a/src/librustc/traits/query/normalize.rs +++ b/src/librustc/traits/query/normalize.rs @@ -88,7 +88,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> { fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { let ty = ty.super_fold_with(self); - match ty.sty { + match ty.kind { ty::Opaque(def_id, substs) if !substs.has_escaping_bound_vars() => { // (*) // Only normalize `impl Trait` after type-checking, usually in codegen. @@ -141,7 +141,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> { // binder). It would be better to normalize in a // binding-aware fashion. - let gcx = self.infcx.tcx.global_tcx(); + let tcx = self.infcx.tcx; let mut orig_values = OriginalQueryValues::default(); // HACK(matthewjasper) `'static` is special-cased in selection, @@ -150,7 +150,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> { &self.param_env.and(*data), &mut orig_values); debug!("QueryNormalizer: c_data = {:#?}", c_data); debug!("QueryNormalizer: orig_values = {:#?}", orig_values); - match gcx.normalize_projection_ty(c_data) { + match tcx.normalize_projection_ty(c_data) { Ok(result) => { // We don't expect ambiguity. if result.is_ambiguous() { diff --git a/src/librustc/traits/query/outlives_bounds.rs b/src/librustc/traits/query/outlives_bounds.rs index 40bd18738b..eee084b789 100644 --- a/src/librustc/traits/query/outlives_bounds.rs +++ b/src/librustc/traits/query/outlives_bounds.rs @@ -7,8 +7,7 @@ use crate::traits::query::NoSolution; use crate::ty::{self, Ty, TyCtxt}; use crate::ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use std::mem; /// Outlives bounds are relationships between generic parameters, @@ -43,9 +42,7 @@ EnumTypeFoldableImpl! { } impl<'a, 'tcx> HashStable> for OutlivesBound<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { OutlivesBound::RegionSubRegion(ref a, ref b) => { @@ -97,7 +94,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { let mut orig_values = OriginalQueryValues::default(); let key = self.canonicalize_query(¶m_env.and(ty), &mut orig_values); - let result = match self.tcx.global_tcx().implied_outlives_bounds(key) { + let result = match self.tcx.implied_outlives_bounds(key) { Ok(r) => r, Err(NoSolution) => { self.tcx.sess.delay_span_bug( diff --git a/src/librustc/traits/query/type_op/ascribe_user_type.rs b/src/librustc/traits/query/type_op/ascribe_user_type.rs index 05a4d4336a..34aa4ee78d 100644 --- a/src/librustc/traits/query/type_op/ascribe_user_type.rs +++ b/src/librustc/traits/query/type_op/ascribe_user_type.rs @@ -1,4 +1,4 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use crate::traits::query::Fallible; use crate::hir::def_id::DefId; use crate::ty::{ParamEnvAnd, Ty, TyCtxt}; @@ -37,12 +37,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> { ) -> Fallible> { tcx.type_op_ascribe_user_type(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, ()>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> { - v - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/query/type_op/eq.rs b/src/librustc/traits/query/type_op/eq.rs index e8ec304f91..3653f9268d 100644 --- a/src/librustc/traits/query/type_op/eq.rs +++ b/src/librustc/traits/query/type_op/eq.rs @@ -1,4 +1,4 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use crate::traits::query::Fallible; use crate::ty::{ParamEnvAnd, Ty, TyCtxt}; @@ -34,12 +34,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> { ) -> Fallible> { tcx.type_op_eq(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, ()>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> { - v - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/query/type_op/implied_outlives_bounds.rs b/src/librustc/traits/query/type_op/implied_outlives_bounds.rs index 3beb4d6465..7aa9870341 100644 --- a/src/librustc/traits/query/type_op/implied_outlives_bounds.rs +++ b/src/librustc/traits/query/type_op/implied_outlives_bounds.rs @@ -1,9 +1,9 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use crate::traits::query::outlives_bounds::OutlivesBound; use crate::traits::query::Fallible; use crate::ty::{ParamEnvAnd, Ty, TyCtxt}; -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Clone, Debug)] pub struct ImpliedOutlivesBounds<'tcx> { pub ty: Ty<'tcx>, } @@ -38,12 +38,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> { tcx.implied_outlives_bounds(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> { - v - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/query/type_op/mod.rs b/src/librustc/traits/query/type_op/mod.rs index e2a5cd9670..98e535234b 100644 --- a/src/librustc/traits/query/type_op/mod.rs +++ b/src/librustc/traits/query/type_op/mod.rs @@ -1,6 +1,6 @@ use crate::infer::canonical::{ - Canonical, Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues, - QueryRegionConstraints, QueryResponse, + Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues, + QueryRegionConstraints, }; use crate::infer::{InferCtxt, InferOk}; use std::fmt; @@ -66,22 +66,6 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + 'tcx { canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>, ) -> Fallible>; - /// Casts a lifted query result (which is in the gcx lifetime) - /// into the tcx lifetime. This is always just an identity cast, - /// but the generic code doesn't realize it -- put another way, in - /// the generic code, we have a `Lifted<'tcx, Self::QueryResponse>` - /// and we want to convert that to a `Self::QueryResponse`. This is - /// not a priori valid, so we can't do it -- but in practice, it - /// is always a no-op (e.g., the lifted form of a type, - /// `Ty<'tcx>`, is a subtype of `Ty<'tcx>`). So we have to push - /// the operation into the impls that know more specifically what - /// `QueryResponse` is. This operation would (maybe) be nicer with - /// something like HKTs or GATs, since then we could make - /// `QueryResponse` parametric and `'tcx` and `'tcx` etc. - fn shrink_to_tcx_lifetime( - lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>>; - fn fully_perform_into( query_key: ParamEnvAnd<'tcx, Self>, infcx: &InferCtxt<'_, 'tcx>, @@ -99,7 +83,6 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + 'tcx { let canonical_self = infcx.canonicalize_hr_query_hack(&query_key, &mut canonical_var_values); let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?; - let canonical_result = Self::shrink_to_tcx_lifetime(&canonical_result); let param_env = query_key.param_env; diff --git a/src/librustc/traits/query/type_op/normalize.rs b/src/librustc/traits/query/type_op/normalize.rs index 3fe85d8d83..2138f792d4 100644 --- a/src/librustc/traits/query/type_op/normalize.rs +++ b/src/librustc/traits/query/type_op/normalize.rs @@ -1,4 +1,4 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use std::fmt; use crate::traits::query::Fallible; use crate::ty::fold::TypeFoldable; @@ -38,12 +38,6 @@ where ) -> Fallible> { T::type_op_method(tcx, canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, T>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, T>> { - T::shrink_to_tcx_lifetime(v) - } } pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Copy { @@ -51,12 +45,6 @@ pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Cop tcx: TyCtxt<'tcx>, canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize>>, ) -> Fallible>; - - /// Converts from the `'tcx` (lifted) form of `Self` into the `tcx` - /// form of `Self`. - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, Self>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>>; } impl Normalizable<'tcx> for Ty<'tcx> { @@ -66,12 +54,6 @@ impl Normalizable<'tcx> for Ty<'tcx> { ) -> Fallible> { tcx.type_op_normalize_ty(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, Self>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> { - v - } } impl Normalizable<'tcx> for ty::Predicate<'tcx> { @@ -81,12 +63,6 @@ impl Normalizable<'tcx> for ty::Predicate<'tcx> { ) -> Fallible> { tcx.type_op_normalize_predicate(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, Self>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> { - v - } } impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> { @@ -96,12 +72,6 @@ impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> { ) -> Fallible> { tcx.type_op_normalize_poly_fn_sig(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, Self>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> { - v - } } impl Normalizable<'tcx> for ty::FnSig<'tcx> { @@ -111,12 +81,6 @@ impl Normalizable<'tcx> for ty::FnSig<'tcx> { ) -> Fallible> { tcx.type_op_normalize_fn_sig(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, Self>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> { - v - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/query/type_op/outlives.rs b/src/librustc/traits/query/type_op/outlives.rs index d4b36356ff..86a32d68fc 100644 --- a/src/librustc/traits/query/type_op/outlives.rs +++ b/src/librustc/traits/query/type_op/outlives.rs @@ -1,5 +1,4 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; -use crate::traits::query::dropck_outlives::trivial_dropck_outlives; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use crate::traits::query::dropck_outlives::DropckOutlivesResult; use crate::traits::query::Fallible; use crate::ty::{ParamEnvAnd, Ty, TyCtxt}; @@ -22,7 +21,7 @@ impl super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> { tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>, ) -> Option { - if trivial_dropck_outlives(tcx, key.value.dropped_ty) { + if tcx.trivial_dropck_outlives(key.value.dropped_ty) { Some(DropckOutlivesResult::default()) } else { None @@ -53,12 +52,6 @@ impl super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> { tcx.dropck_outlives(canonicalized) } - - fn shrink_to_tcx_lifetime( - lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> { - lifted_query_result - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/query/type_op/prove_predicate.rs b/src/librustc/traits/query/type_op/prove_predicate.rs index 1efe66326d..2a908d0f66 100644 --- a/src/librustc/traits/query/type_op/prove_predicate.rs +++ b/src/librustc/traits/query/type_op/prove_predicate.rs @@ -1,4 +1,4 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use crate::traits::query::Fallible; use crate::ty::{ParamEnvAnd, Predicate, TyCtxt}; @@ -43,12 +43,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> { ) -> Fallible> { tcx.type_op_prove_predicate(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, ()>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> { - v - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/query/type_op/subtype.rs b/src/librustc/traits/query/type_op/subtype.rs index 71c74999c2..c89a55daa0 100644 --- a/src/librustc/traits/query/type_op/subtype.rs +++ b/src/librustc/traits/query/type_op/subtype.rs @@ -1,4 +1,4 @@ -use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; +use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse}; use crate::traits::query::Fallible; use crate::ty::{ParamEnvAnd, Ty, TyCtxt}; @@ -34,12 +34,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> { ) -> Fallible> { tcx.type_op_subtype(canonicalized) } - - fn shrink_to_tcx_lifetime( - v: &'a CanonicalizedQueryResponse<'tcx, ()>, - ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> { - v - } } BraceStructTypeFoldableImpl! { diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index a54bc05f16..d8a27f1e04 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -40,9 +40,11 @@ use crate::ty::subst::{Subst, SubstsRef}; use crate::ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, TypeFoldable}; use crate::hir; -use rustc_data_structures::bit_set::GrowableBitSet; +use rustc_index::bit_set::GrowableBitSet; use rustc_data_structures::sync::Lock; use rustc_target::spec::abi::Abi; +use syntax::attr; +use syntax::symbol::sym; use std::cell::{Cell, RefCell}; use std::cmp; use std::fmt::{self, Display}; @@ -99,6 +101,9 @@ pub enum IntercrateAmbiguityCause { trait_desc: String, self_desc: Option, }, + ReservationImpl { + message: String + }, } impl IntercrateAmbiguityCause { @@ -139,6 +144,11 @@ impl IntercrateAmbiguityCause { trait_desc, self_desc ) } + &IntercrateAmbiguityCause::ReservationImpl { + ref message + } => { + message.clone() + } } } } @@ -214,7 +224,7 @@ pub struct SelectionCache<'tcx> { /// of type variables - it just means the obligation isn't sufficiently /// elaborated. In that case we report an ambiguity, and the caller can /// try again after more type information has been gathered or report a -/// "type annotations required" error. +/// "type annotations needed" error. /// /// However, with type parameters, this can be a real problem - type /// parameters don't unify with regular types, but they *can* unify @@ -1326,17 +1336,38 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { (result, dep_node) } - // Treat negative impls as unimplemented - fn filter_negative_impls( - &self, + // Treat negative impls as unimplemented, and reservation impls as ambiguity. + fn filter_negative_and_reservation_impls( + &mut self, candidate: SelectionCandidate<'tcx>, ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { if let ImplCandidate(def_id) = candidate { - if !self.allow_negative_impls - && self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative - { - return Err(Unimplemented); - } + let tcx = self.tcx(); + match tcx.impl_polarity(def_id) { + ty::ImplPolarity::Negative if !self.allow_negative_impls => { + return Err(Unimplemented); + } + ty::ImplPolarity::Reservation => { + if let Some(intercrate_ambiguity_clauses) + = &mut self.intercrate_ambiguity_causes + { + let attrs = tcx.get_attrs(def_id); + let attr = attr::find_by_name(&attrs, sym::rustc_reservation_impl); + let value = attr.and_then(|a| a.value_str()); + if let Some(value) = value { + debug!("filter_negative_and_reservation_impls: \ + reservation impl ambiguity on {:?}", def_id); + intercrate_ambiguity_clauses.push( + IntercrateAmbiguityCause::ReservationImpl { + message: value.to_string() + } + ); + } + } + return Ok(None); + } + _ => {} + }; } Ok(Some(candidate)) } @@ -1453,7 +1484,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // Instead, we select the right impl now but report `Bar does // not implement Clone`. if candidates.len() == 1 { - return self.filter_negative_impls(candidates.pop().unwrap()); + return self.filter_negative_and_reservation_impls(candidates.pop().unwrap()); } // Winnow, but record the exact outcome of evaluation, which @@ -1528,7 +1559,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } // Just one candidate left. - self.filter_negative_impls(candidates.pop().unwrap().candidate) + self.filter_negative_and_reservation_impls(candidates.pop().unwrap().candidate) } fn is_knowable<'o>(&mut self, stack: &TraitObligationStack<'o, 'tcx>) -> Option { @@ -1785,7 +1816,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // before we go into the whole placeholder thing, just // quickly check if the self-type is a projection at all. - match obligation.predicate.skip_binder().trait_ref.self_ty().sty { + match obligation.predicate.skip_binder().trait_ref.self_ty().kind { ty::Projection(_) | ty::Opaque(..) => {} ty::Infer(ty::TyVar(_)) => { span_bug!( @@ -1823,7 +1854,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { placeholder_trait_predicate, ); - let (def_id, substs) = match placeholder_trait_predicate.trait_ref.self_ty().sty { + let (def_id, substs) = match placeholder_trait_predicate.trait_ref.self_ty().kind { ty::Projection(ref data) => (data.trait_ref(self.tcx()).def_id, data.substs), ty::Opaque(def_id, substs) => (def_id, substs), _ => { @@ -1971,7 +2002,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = *obligation.self_ty().skip_binder(); - match self_ty.sty { + match self_ty.kind { ty::Generator(..) => { debug!( "assemble_generator_candidates: self_ty={:?} obligation={:?}", @@ -2014,13 +2045,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // Okay to skip binder because the substs on closure types never // touch bound regions, they just capture the in-scope // type/region parameters - match obligation.self_ty().skip_binder().sty { + match obligation.self_ty().skip_binder().kind { ty::Closure(closure_def_id, closure_substs) => { debug!( "assemble_unboxed_candidates: kind={:?} obligation={:?}", kind, obligation ); - match self.infcx.closure_kind(closure_def_id, closure_substs) { + match self.infcx.closure_kind( + closure_def_id, + closure_substs + ) { Some(closure_kind) => { debug!( "assemble_unboxed_candidates: closure_kind = {:?}", @@ -2063,7 +2097,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // Okay to skip binder because what we are inspecting doesn't involve bound regions let self_ty = *obligation.self_ty().skip_binder(); - match self_ty.sty { + match self_ty.kind { ty::Infer(ty::TyVar(_)) => { debug!("assemble_fn_pointer_candidates: ambiguous self-type"); candidates.ambiguous = true; // could wind up being a fn() type @@ -2125,7 +2159,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let def_id = obligation.predicate.def_id(); if self.tcx().trait_is_auto(def_id) { - match self_ty.sty { + match self_ty.kind { ty::Dynamic(..) => { // For object types, we don't know what the closed // over types are. This means we conservatively @@ -2198,7 +2232,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // self-ty here doesn't escape this probe, so just erase // any LBR. let self_ty = self.tcx().erase_late_bound_regions(&obligation.self_ty()); - let poly_trait_ref = match self_ty.sty { + let poly_trait_ref = match self_ty.kind { ty::Dynamic(ref data, ..) => { if data.auto_traits() .any(|did| did == obligation.predicate.def_id()) @@ -2212,7 +2246,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } if let Some(principal) = data.principal() { - principal.with_self_ty(self.tcx(), self_ty) + if !self.infcx.tcx.features().object_safe_for_dispatch { + principal.with_self_ty(self.tcx(), self_ty) + } else if self.tcx().is_object_safe(principal.def_id()) { + principal.with_self_ty(self.tcx(), self_ty) + } else { + return; + } } else { // Only auto-trait bounds exist. return; @@ -2294,7 +2334,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { source, target ); - let may_apply = match (&source.sty, &target.sty) { + let may_apply = match (&source.kind, &target.kind) { // Trait+Kx+'a -> Trait+Ky+'b (upcasts). (&ty::Dynamic(ref data_a, ..), &ty::Dynamic(ref data_b, ..)) => { // Upcasts permit two things: @@ -2460,7 +2500,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { if other.evaluation.must_apply_modulo_regions() { match victim.candidate { ImplCandidate(victim_def) => { - let tcx = self.tcx().global_tcx(); + let tcx = self.tcx(); return tcx.specializes((other_def, victim_def)) || tcx.impls_are_allowed_to_overlap( other_def, victim_def).is_some(); @@ -2532,7 +2572,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let self_ty = self.infcx .shallow_resolve(obligation.predicate.skip_binder().self_ty()); - match self_ty.sty { + match self_ty.kind { ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) | ty::Uint(_) @@ -2598,7 +2638,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { use self::BuiltinImplConditions::{Ambiguous, None, Where}; - match self_ty.sty { + match self_ty.kind { ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) | ty::FnDef(..) @@ -2638,7 +2678,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::Closure(def_id, substs) => { // (*) binder moved here Where(ty::Binder::bind( - substs.upvar_tys(def_id, self.tcx()).collect(), + substs.as_closure().upvar_tys(def_id, self.tcx()).collect(), )) } @@ -2680,7 +2720,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { /// Zed where enum Zed { A(T), B(u32) } -> [i32, u32] /// ``` fn constituent_types_for_ty(&self, t: Ty<'tcx>) -> Vec> { - match t.sty { + match t.kind { ty::Uint(_) | ty::Int(_) | ty::Bool @@ -2722,11 +2762,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { tys.iter().map(|k| k.expect_ty()).collect() } - ty::Closure(def_id, ref substs) => substs.upvar_tys(def_id, self.tcx()).collect(), + ty::Closure(def_id, ref substs) => substs.as_closure() + .upvar_tys(def_id, self.tcx()) + .collect(), ty::Generator(def_id, ref substs, _) => { - let witness = substs.witness(def_id, self.tcx()); + let witness = substs.as_generator().witness(def_id, self.tcx()); substs + .as_generator() .upvar_tys(def_id, self.tcx()) .chain(iter::once(witness)) .collect() @@ -2782,7 +2825,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // binder moved -\ let ty: ty::Binder> = ty::Binder::bind(ty); // <----/ - self.infcx.in_snapshot(|_| { + self.infcx.commit_unconditionally(|_| { let (skol_ty, _) = self.infcx .replace_bound_vars_with_placeholders(&ty); let Normalized { @@ -2895,7 +2938,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } fn confirm_projection_candidate(&mut self, obligation: &TraitObligation<'tcx>) { - self.infcx.in_snapshot(|snapshot| { + self.infcx.commit_unconditionally(|snapshot| { let result = self.match_projection_obligation_against_definition_bounds( obligation, @@ -3017,19 +3060,20 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { nested, ); - let trait_obligations: Vec> = self.infcx.in_snapshot(|_| { - let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); - let (trait_ref, _) = self.infcx - .replace_bound_vars_with_placeholders(&poly_trait_ref); - let cause = obligation.derived_cause(ImplDerivedObligation); - self.impl_or_trait_obligations( - cause, - obligation.recursion_depth + 1, - obligation.param_env, - trait_def_id, - &trait_ref.substs, - ) - }); + let trait_obligations: Vec> = + self.infcx.commit_unconditionally(|_| { + let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); + let (trait_ref, _) = self.infcx + .replace_bound_vars_with_placeholders(&poly_trait_ref); + let cause = obligation.derived_cause(ImplDerivedObligation); + self.impl_or_trait_obligations( + cause, + obligation.recursion_depth + 1, + obligation.param_env, + trait_def_id, + &trait_ref.substs, + ) + }); // Adds the predicates from the trait. Note that this contains a `Self: Trait` // predicate as usual. It won't have any effect since auto traits are coinductive. @@ -3052,7 +3096,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // First, create the substitutions by matching the impl again, // this time not in a probe. - self.infcx.in_snapshot(|snapshot| { + self.infcx.commit_unconditionally(|snapshot| { let substs = self.rematch_impl(impl_def_id, obligation, snapshot); debug!("confirm_impl_candidate: substs={:?}", substs); let cause = obligation.derived_cause(ImplDerivedObligation); @@ -3118,7 +3162,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // results. let self_ty = self.infcx .shallow_resolve(*obligation.self_ty().skip_binder()); - let poly_trait_ref = match self_ty.sty { + let poly_trait_ref = match self_ty.kind { ty::Dynamic(ref data, ..) => data.principal().unwrap_or_else(|| { span_bug!(obligation.cause.span, "object candidate with no principal") @@ -3216,7 +3260,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { obligation, alias_def_id ); - self.infcx.in_snapshot(|_| { + self.infcx.commit_unconditionally(|_| { let (predicate, _) = self.infcx() .replace_bound_vars_with_placeholders(&obligation.predicate); let trait_ref = predicate.trait_ref; @@ -3252,7 +3296,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder()); - let (generator_def_id, substs) = match self_ty.sty { + let (generator_def_id, substs) = match self_ty.kind { ty::Generator(id, substs, _) => (id, substs), _ => bug!("closure candidate for non-closure {:?}", obligation), }; @@ -3288,8 +3332,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { )?); Ok(VtableGeneratorData { - generator_def_id: generator_def_id, - substs: substs.clone(), + generator_def_id, + substs, nested: obligations, }) } @@ -3309,7 +3353,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder()); - let (closure_def_id, substs) = match self_ty.sty { + let (closure_def_id, substs) = match self_ty.kind { ty::Closure(id, substs) => (id, substs), _ => bug!("closure candidate for non-closure {:?}", obligation), }; @@ -3339,17 +3383,22 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { )?); // FIXME: chalk + if !self.tcx().sess.opts.debugging_opts.chalk { obligations.push(Obligation::new( obligation.cause.clone(), obligation.param_env, - ty::Predicate::ClosureKind(closure_def_id, substs, kind), + ty::Predicate::ClosureKind( + closure_def_id, + substs, + kind + ), )); } Ok(VtableClosureData { closure_def_id, - substs: substs.clone(), + substs: substs, nested: obligations, }) } @@ -3418,7 +3467,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ); let mut nested = vec![]; - match (&source.sty, &target.sty) { + match (&source.kind, &target.kind) { // Trait+Kx+'a -> Trait+Ky+'b (upcasts). (&ty::Dynamic(ref data_a, r_a), &ty::Dynamic(ref data_b, r_b)) => { // See assemble_candidates_for_unsizing for more info. @@ -3550,7 +3599,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let mut ty_params = GrowableBitSet::new_empty(); let mut found = false; for ty in field.walk() { - if let ty::Param(p) = ty.sty { + if let ty::Param(p) = ty.kind { ty_params.insert(p.index as usize); found = true; } @@ -3728,6 +3777,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { return Err(()); } + if self.intercrate.is_none() + && self.tcx().impl_polarity(impl_def_id) == ty::ImplPolarity::Reservation + { + debug!("match_impl: reservation impls only apply in intercrate mode"); + return Err(()); + } + debug!("match_impl: success impl_substs={:?}", impl_substs); Ok(Normalized { value: impl_substs, @@ -3831,7 +3887,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { &mut self, obligation: &TraitObligation<'tcx>, closure_def_id: DefId, - substs: ty::ClosureSubsts<'tcx>, + substs: SubstsRef<'tcx>, ) -> ty::PolyTraitRef<'tcx> { debug!( "closure_trait_ref_unnormalized(obligation={:?}, closure_def_id={:?}, substs={:?})", @@ -3863,9 +3919,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { &mut self, obligation: &TraitObligation<'tcx>, closure_def_id: DefId, - substs: ty::GeneratorSubsts<'tcx>, + substs: SubstsRef<'tcx>, ) -> ty::PolyTraitRef<'tcx> { - let gen_sig = substs.poly_sig(closure_def_id, self.tcx()); + let gen_sig = substs.as_generator().poly_sig(closure_def_id, self.tcx()); // (1) Feels icky to skip the binder here, but OTOH we know // that the self-type is an generator type and hence is diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index f0389bb037..c1c6eb850f 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -125,7 +125,7 @@ pub fn find_associated_item<'tcx>( let trait_def = tcx.trait_def(trait_def_id); let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id); - match ancestors.defs(tcx, item.ident, item.kind, trait_def_id).next() { + match ancestors.leaf_def(tcx, item.ident, item.kind) { Some(node_item) => { let substs = tcx.infer_ctxt().enter(|infcx| { let param_env = param_env.with_reveal_all(); @@ -419,7 +419,7 @@ fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Option // The predicates will contain default bounds like `T: Sized`. We need to // remove these bounds, and add `T: ?Sized` to any untouched type parameters. - let predicates = &tcx.predicates_of(impl_def_id).predicates; + let predicates = tcx.predicates_of(impl_def_id).predicates; let mut pretty_predicates = Vec::with_capacity( predicates.len() + types_without_default_bounds.len()); diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index b43881defd..c64d6748ea 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -2,13 +2,11 @@ use super::OverlapError; use crate::hir::def_id::DefId; use crate::ich::{self, StableHashingContext}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use crate::traits; use crate::ty::{self, TyCtxt, TypeFoldable}; use crate::ty::fast_reject::{self, SimplifiedType}; use syntax::ast::Ident; -use crate::util::captures::Captures; use crate::util::nodemap::{DefIdMap, FxHashMap}; /// A per-trait graph of impls in specialization order. At the moment, this @@ -85,11 +83,11 @@ impl<'tcx> Children { /// Insert an impl into this set of children without comparing to any existing impls. fn insert_blindly(&mut self, tcx: TyCtxt<'tcx>, impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); - if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { - debug!("insert_blindly: impl_def_id={:?} sty={:?}", impl_def_id, sty); - self.nonblanket_impls.entry(sty).or_default().push(impl_def_id) + if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { + debug!("insert_blindly: impl_def_id={:?} st={:?}", impl_def_id, st); + self.nonblanket_impls.entry(st).or_default().push(impl_def_id) } else { - debug!("insert_blindly: impl_def_id={:?} sty=None", impl_def_id); + debug!("insert_blindly: impl_def_id={:?} st=None", impl_def_id); self.blanket_impls.push(impl_def_id) } } @@ -100,11 +98,11 @@ impl<'tcx> Children { fn remove_existing(&mut self, tcx: TyCtxt<'tcx>, impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); let vec: &mut Vec; - if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { - debug!("remove_existing: impl_def_id={:?} sty={:?}", impl_def_id, sty); - vec = self.nonblanket_impls.get_mut(&sty).unwrap(); + if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { + debug!("remove_existing: impl_def_id={:?} st={:?}", impl_def_id, st); + vec = self.nonblanket_impls.get_mut(&st).unwrap(); } else { - debug!("remove_existing: impl_def_id={:?} sty=None", impl_def_id); + debug!("remove_existing: impl_def_id={:?} st=None", impl_def_id); vec = &mut self.blanket_impls; } @@ -130,7 +128,7 @@ impl<'tcx> Children { ); let possible_siblings = match simplified_self { - Some(sty) => PotentialSiblings::Filtered(self.filtered(sty)), + Some(st) => PotentialSiblings::Filtered(self.filtered(st)), None => PotentialSiblings::Unfiltered(self.iter()), }; @@ -162,7 +160,6 @@ impl<'tcx> Children { } }; - let tcx = tcx.global_tcx(); let (le, ge) = traits::overlapping_impls( tcx, possible_sibling, @@ -249,8 +246,8 @@ impl<'tcx> Children { self.blanket_impls.iter().chain(nonblanket).cloned() } - fn filtered(&mut self, sty: SimplifiedType) -> impl Iterator + '_ { - let nonblanket = self.nonblanket_impls.entry(sty).or_default().iter(); + fn filtered(&mut self, st: SimplifiedType) -> impl Iterator + '_ { + let nonblanket = self.nonblanket_impls.entry(st).or_default().iter(); self.blanket_impls.iter().chain(nonblanket).cloned() } } @@ -395,7 +392,7 @@ impl<'tcx> Graph { /// The parent of a given impl, which is the `DefId` of the trait when the /// impl is a "specialization root". pub fn parent(&self, child: DefId) -> DefId { - *self.parent.get(&child).unwrap() + *self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {:?}", child)) } } @@ -421,6 +418,35 @@ impl<'tcx> Node { tcx.associated_items(self.def_id()) } + /// Finds an associated item defined in this node. + /// + /// If this returns `None`, the item can potentially still be found in + /// parents of this node. + pub fn item( + &self, + tcx: TyCtxt<'tcx>, + trait_item_name: Ident, + trait_item_kind: ty::AssocKind, + trait_def_id: DefId, + ) -> Option { + use crate::ty::AssocKind::*; + + tcx.associated_items(self.def_id()) + .find(move |impl_item| match (trait_item_kind, impl_item.kind) { + | (Const, Const) + | (Method, Method) + | (Type, Type) + | (Type, OpaqueTy) // assoc. types can be made opaque in impls + => tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id), + + | (Const, _) + | (Method, _) + | (Type, _) + | (OpaqueTy, _) + => false, + }) + } + pub fn def_id(&self) -> DefId { match *self { Node::Impl(did) => did, @@ -429,6 +455,7 @@ impl<'tcx> Node { } } +#[derive(Copy, Clone)] pub struct Ancestors<'tcx> { trait_def_id: DefId, specialization_graph: &'tcx Graph, @@ -467,32 +494,18 @@ impl NodeItem { } impl<'tcx> Ancestors<'tcx> { - /// Search the items from the given ancestors, returning each definition - /// with the given name and the given kind. - // FIXME(#35870): avoid closures being unexported due to `impl Trait`. - #[inline] - pub fn defs( - self, + /// Finds the bottom-most (ie. most specialized) definition of an associated + /// item. + pub fn leaf_def( + mut self, tcx: TyCtxt<'tcx>, trait_item_name: Ident, trait_item_kind: ty::AssocKind, - trait_def_id: DefId, - ) -> impl Iterator> + Captures<'tcx> + 'tcx { - self.flat_map(move |node| { - use crate::ty::AssocKind::*; - node.items(tcx).filter(move |impl_item| match (trait_item_kind, impl_item.kind) { - | (Const, Const) - | (Method, Method) - | (Type, Type) - | (Type, OpaqueTy) - => tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id), - - | (Const, _) - | (Method, _) - | (Type, _) - | (OpaqueTy, _) - => false, - }).map(move |item| NodeItem { node: node, item: item }) + ) -> Option> { + let trait_def_id = self.trait_def_id; + self.find_map(|node| { + node.item(tcx, trait_item_name, trait_item_kind, trait_def_id) + .map(|item| NodeItem { node, item }) }) } } @@ -513,9 +526,7 @@ pub fn ancestors( } impl<'a> HashStable> for Children { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let Children { ref nonblanket_impls, ref blanket_impls, diff --git a/src/librustc/traits/structural_impls.rs b/src/librustc/traits/structural_impls.rs index 68c97226f8..109e884f8b 100644 --- a/src/librustc/traits/structural_impls.rs +++ b/src/librustc/traits/structural_impls.rs @@ -4,7 +4,7 @@ use crate::traits; use crate::traits::project::Normalized; use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use crate::ty::{self, Lift, Ty, TyCtxt}; -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; use std::fmt; use std::rc::Rc; @@ -261,11 +261,11 @@ impl fmt::Display for traits::QuantifierKind { /// for debug output in tests anyway. struct BoundNamesCollector { // Just sort by name because `BoundRegion::BrNamed` does not have a `BoundVar` index anyway. - regions: BTreeSet, + regions: BTreeSet, // Sort by `BoundVar` index, so usually this should be equivalent to the order given // by the list of type parameters. - types: BTreeMap, + types: BTreeMap, binder_index: ty::DebruijnIndex, } @@ -312,14 +312,14 @@ impl<'tcx> TypeVisitor<'tcx> for BoundNamesCollector { } fn visit_ty(&mut self, t: Ty<'tcx>) -> bool { - match t.sty { + match t.kind { ty::Bound(debruijn, bound_ty) if debruijn == self.binder_index => { self.types.insert( bound_ty.var.as_u32(), match bound_ty.kind { ty::BoundTyKind::Param(name) => name, ty::BoundTyKind::Anon => - InternedString::intern(&format!("^{}", bound_ty.var.as_u32()), + Symbol::intern(&format!("^{}", bound_ty.var.as_u32()), ), } ); @@ -340,7 +340,7 @@ impl<'tcx> TypeVisitor<'tcx> for BoundNamesCollector { } ty::BoundRegion::BrAnon(var) => { - self.regions.insert(InternedString::intern(&format!("'^{}", var))); + self.regions.insert(Symbol::intern(&format!("'^{}", var))); } _ => (), @@ -481,17 +481,23 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> { .and_then(|r| Some(super::ObjectTypeBound(ty, r))) ), super::ObjectCastObligation(ty) => tcx.lift(&ty).map(super::ObjectCastObligation), + super::Coercion { source, target } => Some(super::Coercion { + source: tcx.lift(&source)?, + target: tcx.lift(&target)?, + }), super::AssignmentLhsSized => Some(super::AssignmentLhsSized), super::TupleInitializerSized => Some(super::TupleInitializerSized), super::StructInitializerSized => Some(super::StructInitializerSized), super::VariableType(id) => Some(super::VariableType(id)), - super::ReturnType(id) => Some(super::ReturnType(id)), + super::ReturnValue(id) => Some(super::ReturnValue(id)), + super::ReturnType => Some(super::ReturnType), super::SizedArgumentType => Some(super::SizedArgumentType), super::SizedReturnType => Some(super::SizedReturnType), super::SizedYieldType => Some(super::SizedYieldType), - super::RepeatVec => Some(super::RepeatVec), + super::RepeatVec(suggest_flag) => Some(super::RepeatVec(suggest_flag)), super::FieldSized { adt_kind, last } => Some(super::FieldSized { adt_kind, last }), super::ConstSized => Some(super::ConstSized), + super::ConstPatternStructural => Some(super::ConstPatternStructural), super::SharedStatic => Some(super::SharedStatic), super::BuiltinDerivedObligation(ref cause) => { tcx.lift(cause).map(super::BuiltinDerivedObligation) @@ -543,6 +549,7 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> { super::MethodReceiver => Some(super::MethodReceiver), super::BlockTailExpression(id) => Some(super::BlockTailExpression(id)), super::TrivialBound => Some(super::TrivialBound), + super::AssocTypeBound(impl_sp, sp) => Some(super::AssocTypeBound(impl_sp, sp)), } } } diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index 3e5520dd46..d8b1effe09 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -4,10 +4,9 @@ use syntax_pos::Span; use crate::hir; use crate::hir::def_id::DefId; -use crate::traits::specialize::specialization_graph::NodeItem; use crate::ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef}; use crate::ty::outlives::Component; -use crate::ty::subst::{Kind, Subst, SubstsRef}; +use crate::ty::subst::{GenericArg, Subst, SubstsRef}; use crate::util::nodemap::FxHashSet; use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized}; @@ -551,7 +550,7 @@ impl<'tcx> TyCtxt<'tcx> { trait_def_id: DefId, recursion_depth: usize, self_ty: Ty<'tcx>, - params: &[Kind<'tcx>]) + params: &[GenericArg<'tcx>]) -> PredicateObligation<'tcx> { let trait_ref = ty::TraitRef { @@ -654,22 +653,21 @@ impl<'tcx> TyCtxt<'tcx> { match self.hir().as_local_hir_id(node_item_def_id) { Some(hir_id) => { let item = self.hir().expect_item(hir_id); - if let hir::ItemKind::Impl(_, _, defaultness, ..) = item.node { + if let hir::ItemKind::Impl(_, _, defaultness, ..) = item.kind { defaultness.is_default() } else { false } } None => { - self.global_tcx() - .impl_defaultness(node_item_def_id) + self.impl_defaultness(node_item_def_id) .is_default() } } } - pub fn impl_item_is_final(self, node_item: &NodeItem) -> bool { - node_item.item.is_final() && !self.impl_is_default(node_item.node.def_id()) + pub fn impl_item_is_final(self, assoc_item: &ty::AssocItem) -> bool { + assoc_item.defaultness.is_final() && !self.impl_is_default(assoc_item.container.id()) } } diff --git a/src/librustc/ty/_match.rs b/src/librustc/ty/_match.rs index f800a70e0b..a0d22789da 100644 --- a/src/librustc/ty/_match.rs +++ b/src/librustc/ty/_match.rs @@ -59,7 +59,7 @@ impl TypeRelation<'tcx> for Match<'tcx> { a, b); if a == b { return Ok(a); } - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (_, &ty::Infer(ty::FreshTy(_))) | (_, &ty::Infer(ty::FreshIntTy(_))) | (_, &ty::Infer(ty::FreshFloatTy(_))) => { diff --git a/src/librustc/ty/binding.rs b/src/librustc/ty/binding.rs index 1290141b0a..5570144489 100644 --- a/src/librustc/ty/binding.rs +++ b/src/librustc/ty/binding.rs @@ -2,7 +2,7 @@ use crate::hir::BindingAnnotation::*; use crate::hir::BindingAnnotation; use crate::hir::Mutability; -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum BindingMode { BindByReference(Mutability), BindByValue(Mutability), diff --git a/src/librustc/ty/cast.rs b/src/librustc/ty/cast.rs index 7ea5c73c5b..bc12412312 100644 --- a/src/librustc/ty/cast.rs +++ b/src/librustc/ty/cast.rs @@ -52,7 +52,7 @@ impl<'tcx> CastTy<'tcx> { /// Returns `Some` for integral/pointer casts. /// casts like unsizing casts will return `None` pub fn from_ty(t: Ty<'tcx>) -> Option> { - match t.sty { + match t.kind { ty::Bool => Some(CastTy::Int(IntTy::Bool)), ty::Char => Some(CastTy::Int(IntTy::Char)), ty::Int(_) => Some(CastTy::Int(IntTy::I)), diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs index 1aa2150112..d5e7ac1926 100644 --- a/src/librustc/ty/codec.rs +++ b/src/librustc/ty/codec.rs @@ -13,9 +13,10 @@ use rustc_data_structures::fx::FxHashMap; use rustc_serialize::{Decodable, Decoder, Encoder, Encodable, opaque}; use std::hash::Hash; use std::intrinsics; -use crate::ty::{self, Ty, TyCtxt}; +use crate::ty::{self, List, Ty, TyCtxt}; use crate::ty::subst::SubstsRef; -use crate::mir::interpret::Allocation; +use crate::mir::{self, interpret::Allocation}; +use syntax_pos::Span; /// The shorthand encoding uses an enum's variant index `usize` /// and is offset by this value so it never matches a real variant. @@ -31,7 +32,7 @@ pub trait EncodableWithShorthand: Clone + Eq + Hash { impl<'tcx> EncodableWithShorthand for Ty<'tcx> { type Variant = ty::TyKind<'tcx>; fn variant(&self) -> &Self::Variant { - &self.sty + &self.kind } } @@ -92,16 +93,16 @@ pub fn encode_with_shorthand(encoder: &mut E, Ok(()) } -pub fn encode_predicates<'tcx, E, C>(encoder: &mut E, - predicates: &ty::GenericPredicates<'tcx>, - cache: C) - -> Result<(), E::Error> +pub fn encode_spanned_predicates<'tcx, E, C>( + encoder: &mut E, + predicates: &'tcx [(ty::Predicate<'tcx>, Span)], + cache: C, +) -> Result<(), E::Error> where E: TyEncoder, C: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap, usize>, { - predicates.parent.encode(encoder)?; - predicates.predicates.len().encode(encoder)?; - for (predicate, span) in &predicates.predicates { + predicates.len().encode(encoder)?; + for (predicate, span) in predicates { encode_with_shorthand(encoder, predicate, &cache)?; span.encode(encoder)?; } @@ -182,13 +183,15 @@ where } #[inline] -pub fn decode_predicates(decoder: &mut D) -> Result, D::Error> +pub fn decode_spanned_predicates( + decoder: &mut D, +) -> Result<&'tcx [(ty::Predicate<'tcx>, Span)], D::Error> where D: TyDecoder<'tcx>, { - Ok(ty::GenericPredicates { - parent: Decodable::decode(decoder)?, - predicates: (0..decoder.read_usize()?).map(|_| { + let tcx = decoder.tcx(); + Ok(tcx.arena.alloc_from_iter( + (0..decoder.read_usize()?).map(|_| { // Handle shorthands first, if we have an usize > 0x80. let predicate = if decoder.positioned_at_shorthand() { let pos = decoder.read_usize()?; @@ -202,7 +205,7 @@ where Ok((predicate, Decodable::decode(decoder)?)) }) .collect::, _>>()?, - }) + )) } #[inline] @@ -215,6 +218,18 @@ where Ok(tcx.mk_substs((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] +pub fn decode_place(decoder: &mut D) -> Result, D::Error> +where + D: TyDecoder<'tcx>, +{ + let base: mir::PlaceBase<'tcx> = Decodable::decode(decoder)?; + let len = decoder.read_usize()?; + let projection: &'tcx List> = + decoder.tcx().mk_place_elems((0..len).map(|_| Decodable::decode(decoder)))?; + Ok(mir::Place { base, projection }) +} + #[inline] pub fn decode_region(decoder: &mut D) -> Result, D::Error> where @@ -339,6 +354,8 @@ macro_rules! implement_ty_decoder { use $crate::ty::subst::SubstsRef; use $crate::hir::def_id::{CrateNum}; + use syntax_pos::Span; + use super::$DecoderName; impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> { @@ -393,11 +410,11 @@ macro_rules! implement_ty_decoder { } } - impl<$($typaram),*> SpecializedDecoder> + impl<$($typaram),*> SpecializedDecoder<&'tcx [(ty::Predicate<'tcx>, Span)]> for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) - -> Result, Self::Error> { - decode_predicates(self) + -> Result<&'tcx [(ty::Predicate<'tcx>, Span)], Self::Error> { + decode_spanned_predicates(self) } } @@ -408,6 +425,15 @@ macro_rules! implement_ty_decoder { } } + impl<$($typaram),*> SpecializedDecoder<$crate::mir::Place<'tcx>> + for $DecoderName<$($typaram),*> { + fn specialized_decode( + &mut self + ) -> Result<$crate::mir::Place<'tcx>, Self::Error> { + decode_place(self) + } + } + impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result, Self::Error> { diff --git a/src/librustc/ty/constness.rs b/src/librustc/ty/constness.rs index e4022bb24a..676916f530 100644 --- a/src/librustc/ty/constness.rs +++ b/src/librustc/ty/constness.rs @@ -2,7 +2,7 @@ use crate::ty::query::Providers; use crate::hir::def_id::DefId; use crate::hir; use crate::ty::TyCtxt; -use syntax_pos::symbol::{sym, Symbol}; +use syntax_pos::symbol::Symbol; use crate::hir::map::blocks::FnLikeNode; use syntax::attr; @@ -13,14 +13,11 @@ impl<'tcx> TyCtxt<'tcx> { self.is_const_fn_raw(def_id) && match self.is_unstable_const_fn(def_id) { Some(feature_name) => { // has a `rustc_const_unstable` attribute, check whether the user enabled the - // corresponding feature gate, const_constructor is not a lib feature, so has - // to be checked separately. + // corresponding feature gate. self.features() .declared_lib_features .iter() .any(|&(sym, _)| sym == feature_name) - || (feature_name == sym::const_constructor - && self.features().const_constructor) }, // functions without const stability are either stable user written // const fn or the user is using feature gates and we thus don't @@ -31,9 +28,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Whether the `def_id` is an unstable const fn and what feature gate is necessary to enable it pub fn is_unstable_const_fn(self, def_id: DefId) -> Option { - if self.is_constructor(def_id) { - Some(sym::const_constructor) - } else if self.is_const_fn_raw(def_id) { + if self.is_const_fn_raw(def_id) { self.lookup_stability(def_id)?.const_stability } else { None diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 785f45e46b..3d28beefb3 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1,6 +1,5 @@ -//! Type context book-keeping. - // ignore-tidy-filelength +//! Type context book-keeping. use crate::arena::Arena; use crate::dep_graph::DepGraph; @@ -23,15 +22,15 @@ use crate::middle::cstore::EncodedMetadata; use crate::middle::lang_items; use crate::middle::resolve_lifetime::{self, ObjectLifetimeDefault}; use crate::middle::stability; -use crate::mir::{Body, interpret, ProjectionKind, Promoted}; +use crate::mir::{Body, Field, interpret, Local, Place, PlaceElem, ProjectionKind, Promoted}; use crate::mir::interpret::{ConstValue, Allocation, Scalar}; -use crate::ty::subst::{Kind, InternalSubsts, SubstsRef, Subst}; +use crate::ty::subst::{GenericArg, InternalSubsts, SubstsRef, Subst}; use crate::ty::ReprOptions; use crate::traits; use crate::traits::{Clause, Clauses, GoalKind, Goal, Goals}; use crate::ty::{self, DefIdTree, Ty, TypeAndMut}; use crate::ty::{TyS, TyKind, List}; -use crate::ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const}; +use crate::ty::{AdtKind, AdtDef, Region, Const}; use crate::ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate}; use crate::ty::RegionKind; use crate::ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid, ConstVid}; @@ -41,20 +40,21 @@ use crate::ty::GenericParamDefKind; use crate::ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx}; use crate::ty::query; use crate::ty::steal::Steal; -use crate::ty::subst::{UserSubsts, UnpackedKind}; +use crate::ty::subst::{UserSubsts, GenericArgKind}; use crate::ty::{BoundVar, BindingMode}; use crate::ty::CanonicalPolyFnSig; use crate::util::common::ErrorReported; -use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet}; +use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet, NodeMap}; use crate::util::nodemap::{FxHashMap, FxHashSet}; +use crate::util::profiling::SelfProfilerRef; use errors::DiagnosticBuilder; use arena::SyncDroplessArena; use smallvec::SmallVec; use rustc_data_structures::stable_hasher::{ - HashStable, StableHasher, StableHasherResult, StableVec, hash_stable_hashmap, + HashStable, StableHasher, StableVec, hash_stable_hashmap, }; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use rustc_data_structures::sharded::ShardedHashMap; use rustc_data_structures::sync::{Lrc, Lock, WorkerLocal}; use std::any::Any; @@ -66,7 +66,6 @@ use std::fmt; use std::mem; use std::ops::{Deref, Bound}; use std::iter; -use std::sync::mpsc; use std::sync::Arc; use rustc_target::spec::abi; use rustc_macros::HashStable; @@ -74,7 +73,7 @@ use syntax::ast; use syntax::attr; use syntax::source_map::MultiSpan; use syntax::feature_gate; -use syntax::symbol::{Symbol, InternedString, kw, sym}; +use syntax::symbol::{Symbol, kw, sym}; use syntax_pos::Span; pub struct AllArenas { @@ -108,6 +107,7 @@ pub struct CtxtInterners<'tcx> { goal: InternedSet<'tcx, GoalKind<'tcx>>, goal_list: InternedSet<'tcx, List>>, projs: InternedSet<'tcx, List>, + place_elems: InternedSet<'tcx, List>>, const_: InternedSet<'tcx, Const<'tcx>>, } @@ -126,6 +126,7 @@ impl<'tcx> CtxtInterners<'tcx> { goal: Default::default(), goal_list: Default::default(), projs: Default::default(), + place_elems: Default::default(), const_: Default::default(), } } @@ -134,13 +135,13 @@ impl<'tcx> CtxtInterners<'tcx> { #[allow(rustc::usage_of_ty_tykind)] #[inline(never)] fn intern_ty(&self, - st: TyKind<'tcx> + kind: TyKind<'tcx> ) -> Ty<'tcx> { - self.type_.intern(st, |st| { - let flags = super::flags::FlagComputation::for_sty(&st); + self.type_.intern(kind, |kind| { + let flags = super::flags::FlagComputation::for_kind(&kind); let ty_struct = TyS { - sty: st, + kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; @@ -150,10 +151,6 @@ impl<'tcx> CtxtInterners<'tcx> { } } -pub struct Common<'tcx> { - pub empty_predicates: ty::GenericPredicates<'tcx>, -} - pub struct CommonTypes<'tcx> { pub unit: Ty<'tcx>, pub bool: Ty<'tcx>, @@ -645,7 +642,7 @@ impl<'tcx> TypeckTables<'tcx> { pub fn is_method_call(&self, expr: &hir::Expr) -> bool { // Only paths and method calls/overloaded operators have // entries in type_dependent_defs, ignore the former here. - if let hir::ExprKind::Path(_) = expr.node { + if let hir::ExprKind::Path(_) = expr.kind { return false; } @@ -747,9 +744,7 @@ impl<'tcx> TypeckTables<'tcx> { } impl<'a, 'tcx> HashStable> for TypeckTables<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ty::TypeckTables { local_id_root, ref type_dependent_defs, @@ -823,7 +818,7 @@ impl<'a, 'tcx> HashStable> for TypeckTables<'tcx> { } } -newtype_index! { +rustc_index::newtype_index! { pub struct UserTypeAnnotationIndex { derive [HashStable] DEBUG_FORMAT = "UserType({})", @@ -835,7 +830,7 @@ newtype_index! { pub type CanonicalUserTypeAnnotations<'tcx> = IndexVec>; -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct CanonicalUserTypeAnnotation<'tcx> { pub user_ty: CanonicalUserType<'tcx>, pub span: Span, @@ -871,7 +866,7 @@ impl CanonicalUserType<'tcx> { user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| { match kind.unpack() { - UnpackedKind::Type(ty) => match ty.sty { + GenericArgKind::Type(ty) => match ty.kind { ty::Bound(debruijn, b) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(debruijn, ty::INNERMOST); @@ -880,7 +875,7 @@ impl CanonicalUserType<'tcx> { _ => false, }, - UnpackedKind::Lifetime(r) => match r { + GenericArgKind::Lifetime(r) => match r { ty::ReLateBound(debruijn, br) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(*debruijn, ty::INNERMOST); @@ -889,8 +884,8 @@ impl CanonicalUserType<'tcx> { _ => false, }, - UnpackedKind::Const(ct) => match ct.val { - ConstValue::Infer(InferConst::Canonical(debruijn, b)) => { + GenericArgKind::Const(ct) => match ct.val { + ConstValue::Bound(debruijn, b) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(debruijn, ty::INNERMOST); cvar == b @@ -907,7 +902,7 @@ impl CanonicalUserType<'tcx> { /// A user-given type annotation attached to a constant. These arise /// from constants that are named via paths, like `Foo::::new` and /// so forth. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Copy, Clone, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable)] pub enum UserType<'tcx> { Ty(Ty<'tcx>), @@ -933,7 +928,7 @@ EnumLiftImpl! { impl<'tcx> CommonTypes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { - let mk = |sty| interners.intern_ty(sty); + let mk = |ty| interners.intern_ty(ty); CommonTypes { unit: mk(Tuple(List::empty())), @@ -957,7 +952,7 @@ impl<'tcx> CommonTypes<'tcx> { f64: mk(Float(ast::FloatTy::F64)), self_param: mk(ty::Param(ty::ParamTy { index: 0, - name: kw::SelfUpper.as_interned_str(), + name: kw::SelfUpper, })), trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), @@ -1017,7 +1012,7 @@ pub struct FreeRegionInfo { /// /// [rustc guide]: https://rust-lang.github.io/rustc-guide/ty.html #[derive(Copy, Clone)] -#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "TyCtxt")] +#[rustc_diagnostic_item = "TyCtxt"] pub struct TyCtxt<'tcx> { gcx: &'tcx GlobalCtxt<'tcx>, } @@ -1035,14 +1030,15 @@ pub struct GlobalCtxt<'tcx> { interners: CtxtInterners<'tcx>, - cstore: &'tcx CrateStoreDyn, + cstore: Box, pub sess: &'tcx Session, + pub lint_store: Lrc, + pub dep_graph: DepGraph, - /// Common objects. - pub common: Common<'tcx>, + pub prof: SelfProfilerRef, /// Common types, pre-interned for your convenience. pub types: CommonTypes<'tcx>, @@ -1053,6 +1049,9 @@ pub struct GlobalCtxt<'tcx> { /// Common consts, pre-interned for your convenience. pub consts: CommonConsts<'tcx>, + /// Resolutions of `extern crate` items produced by resolver. + extern_crate_map: NodeMap, + /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. trait_map: FxHashMap { layout_interner: ShardedHashMap<&'tcx LayoutDetails, ()>, - /// A general purpose channel to throw data out the back towards LLVM worker - /// threads. - /// - /// This is intended to only get used during the codegen phase of the compiler - /// when satisfying the query for a particular codegen unit. Internally in - /// the query it'll send data along this channel to get processed later. - pub tx_to_llvm_workers: Lock>>, - output_filenames: Arc, } impl<'tcx> TyCtxt<'tcx> { - /// Gets the global `TyCtxt`. - #[inline] - pub fn global_tcx(self) -> TyCtxt<'tcx> { - TyCtxt { - gcx: self.gcx, - } - } - #[inline(always)] pub fn hir(self) -> &'tcx hir_map::Map<'tcx> { &self.hir_map @@ -1193,7 +1176,7 @@ impl<'tcx> TyCtxt<'tcx> { None => return Bound::Unbounded, }; for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") { - match meta.literal().expect("attribute takes lit").node { + match meta.literal().expect("attribute takes lit").kind { ast::LitKind::Int(a, _) => return Bound::Included(a), _ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"), } @@ -1208,70 +1191,52 @@ impl<'tcx> TyCtxt<'tcx> { value.lift_to_tcx(self) } - /// Like lift, but only tries in the global tcx. - pub fn lift_to_global>(self, value: &T) -> Option { - value.lift_to_tcx(self.global_tcx()) - } - /// Creates a type context and call the closure with a `TyCtxt` reference /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. pub fn create_global_ctxt( s: &'tcx Session, - cstore: &'tcx CrateStoreDyn, + lint_store: Lrc, local_providers: ty::query::Providers<'tcx>, extern_providers: ty::query::Providers<'tcx>, arenas: &'tcx AllArenas, - resolutions: ty::Resolutions, + resolutions: ty::ResolverOutputs, hir: hir_map::Map<'tcx>, on_disk_query_result_cache: query::OnDiskCache<'tcx>, crate_name: &str, - tx: mpsc::Sender>, output_filenames: &OutputFilenames, ) -> GlobalCtxt<'tcx> { let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| { s.fatal(&err); }); let interners = CtxtInterners::new(&arenas.interner); - let common = Common { - empty_predicates: ty::GenericPredicates { - parent: None, - predicates: vec![], - }, - }; let common_types = CommonTypes::new(&interners); let common_lifetimes = CommonLifetimes::new(&interners); let common_consts = CommonConsts::new(&interners, &common_types); let dep_graph = hir.dep_graph.clone(); - let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0); + let cstore = resolutions.cstore; + let crates = cstore.crates_untracked(); + let max_cnum = crates.iter().map(|c| c.as_usize()).max().unwrap_or(0); let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1); providers[LOCAL_CRATE] = local_providers; let def_path_hash_to_def_id = if s.opts.build_dep_graph() { - let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore - .crates_untracked() + let def_path_tables = crates .iter() .map(|&cnum| (cnum, cstore.def_path_table(cnum))) - .collect(); - - let def_path_tables = || { - upstream_def_path_tables - .iter() - .map(|&(cnum, ref rc)| (cnum, &**rc)) - .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table()))) - }; + .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table()))); // Precompute the capacity of the hashmap so we don't have to // re-allocate when populating it. - let capacity = def_path_tables().map(|(_, t)| t.size()).sum::(); + let capacity = def_path_tables.clone().map(|(_, t)| t.size()).sum::(); let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher( capacity, ::std::default::Default::default() ); - for (cnum, def_path_table) in def_path_tables() { + for (cnum, def_path_table) in def_path_tables { def_path_table.add_def_path_hashes_to(cnum, &mut map); } @@ -1289,14 +1254,16 @@ impl<'tcx> TyCtxt<'tcx> { GlobalCtxt { sess: s, + lint_store, cstore, arena: WorkerLocal::new(|_| Arena::default()), interners, dep_graph, - common, + prof: s.prof.clone(), types: common_types, lifetimes: common_lifetimes, consts: common_consts, + extern_crate_map: resolutions.extern_crate_map, trait_map, export_map: resolutions.export_map.into_iter().map(|(k, v)| { let exports: Vec<_> = v.into_iter().map(|e| { @@ -1334,7 +1301,6 @@ impl<'tcx> TyCtxt<'tcx> { stability_interner: Default::default(), allocation_interner: Default::default(), alloc_map: Lock::new(interpret::AllocMap::new()), - tx_to_llvm_workers: Lock::new(tx), output_filenames: Arc::new(output_filenames.clone()), } } @@ -1442,13 +1408,14 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn encode_metadata(self)-> EncodedMetadata { + let _prof_timer = self.prof.generic_activity("generate_crate_metadata"); self.cstore.encode_metadata(self) } // Note that this is *untracked* and should only be used within the query // system if the result is otherwise tracked through queries - pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc { - self.cstore.crate_data_as_rc_any(cnum) + pub fn crate_data_as_any(self, cnum: CrateNum) -> &'tcx dyn Any { + self.cstore.crate_data_as_any(cnum) } #[inline(always)] @@ -1458,7 +1425,7 @@ impl<'tcx> TyCtxt<'tcx> { StableHashingContext::new(self.sess, krate, self.hir().definitions(), - self.cstore) + &*self.cstore) } // This method makes sure that we have a DepNode and a Fingerprint for @@ -1486,13 +1453,7 @@ impl<'tcx> TyCtxt<'tcx> { -> Result<(), E::Error> where E: ty::codec::TyEncoder { - self.queries.on_disk_cache.serialize(self.global_tcx(), encoder) - } - - /// If `true`, we should use the AST-based borrowck (we may *also* use - /// the MIR-based borrowck). - pub fn use_ast_borrowck(self) -> bool { - self.borrowck_mode().use_ast() + self.queries.on_disk_cache.serialize(self, encoder) } /// If `true`, we should use the MIR-based borrowck, but also @@ -1554,8 +1515,14 @@ impl<'tcx> TyCtxt<'tcx> { CrateType::Staticlib | CrateType::ProcMacro | CrateType::Cdylib => false, - CrateType::Rlib | + + // FIXME rust-lang/rust#64319, rust-lang/rust#64872: + // We want to block export of generics from dylibs, + // but we must fix rust-lang/rust#65890 before we can + // do that robustly. CrateType::Dylib => true, + + CrateType::Rlib => true, } }) } @@ -1597,7 +1564,7 @@ impl<'tcx> TyCtxt<'tcx> { let hir_id = self.hir().as_local_hir_id(scope_def_id).unwrap(); match self.hir().get(hir_id) { Node::Item(item) => { - match item.node { + match item.kind { ItemKind::Fn(..) => { /* `type_of_def_id()` will work */ } _ => { return None; @@ -1608,7 +1575,7 @@ impl<'tcx> TyCtxt<'tcx> { } let ret_ty = self.type_of(scope_def_id); - match ret_ty.sty { + match ret_ty.kind { ty::FnDef(_, _) => { let sig = ret_ty.fn_sig(*self); let output = self.erase_late_bound_regions(&sig.output()); @@ -1660,7 +1627,7 @@ impl<'tcx> GlobalCtxt<'tcx> { let tcx = TyCtxt { gcx: self, }; - ty::tls::with_related_context(tcx.global_tcx(), |icx| { + ty::tls::with_related_context(tcx, |icx| { let new_icx = ty::tls::ImplicitCtxt { tcx, query: icx.query.clone(), @@ -1744,7 +1711,7 @@ nop_list_lift!{CanonicalVarInfo => CanonicalVarInfo} nop_list_lift!{ProjectionKind => ProjectionKind} // This is the impl for `&'a InternalSubsts<'a>`. -nop_list_lift!{Kind<'a> => Kind<'tcx>} +nop_list_lift!{GenericArg<'a> => GenericArg<'tcx>} pub mod tls { use super::{GlobalCtxt, TyCtxt, ptr_eq}; @@ -2054,7 +2021,7 @@ macro_rules! sty_debug_print { let shards = tcx.interners.type_.lock_shards(); let types = shards.iter().flat_map(|shard| shard.keys()); for &Interned(t) in types { - let variant = match t.sty { + let variant = match t.kind { ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Str | ty::Never => continue, ty::Error => /* unimportant */ continue, @@ -2123,10 +2090,10 @@ impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> { } impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {} -// N.B., an `Interned` compares and hashes as a sty. +// N.B., an `Interned` compares and hashes as a `TyKind`. impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { - self.0.sty == other.0.sty + self.0.kind == other.0.kind } } @@ -2134,14 +2101,14 @@ impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { fn hash(&self, s: &mut H) { - self.0.sty.hash(s) + self.0.kind.hash(s) } } #[allow(rustc::usage_of_ty_tykind)] impl<'tcx> Borrow> for Interned<'tcx, TyS<'tcx>> { fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> { - &self.0.sty + &self.0.kind } } @@ -2172,8 +2139,8 @@ impl<'tcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List> } } -impl<'tcx> Borrow<[Kind<'tcx>]> for Interned<'tcx, InternalSubsts<'tcx>> { - fn borrow<'a>(&'a self) -> &'a [Kind<'tcx>] { +impl<'tcx> Borrow<[GenericArg<'tcx>]> for Interned<'tcx, InternalSubsts<'tcx>> { + fn borrow<'a>(&'a self) -> &'a [GenericArg<'tcx>] { &self.0[..] } } @@ -2185,6 +2152,13 @@ impl<'tcx> Borrow<[ProjectionKind]> } } +impl<'tcx> Borrow<[PlaceElem<'tcx>]> + for Interned<'tcx, List>> { + fn borrow(&self) -> &[PlaceElem<'tcx>] { + &self.0[..] + } +} + impl<'tcx> Borrow for Interned<'tcx, RegionKind> { fn borrow(&self) -> &RegionKind { &self.0 @@ -2229,44 +2203,29 @@ impl<'tcx> Borrow<[Goal<'tcx>]> for Interned<'tcx, List>> { } } -macro_rules! intern_method { - ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty, - $alloc_method:expr, - $alloc_to_key:expr) -> $ty:ty) => { - impl<$lt_tcx> TyCtxt<$lt_tcx> { - pub fn $method(self, v: $alloc) -> &$lt_tcx $ty { - let key = ($alloc_to_key)(&v); - - self.interners.$name.intern_ref(key, || { - Interned($alloc_method(&self.interners.arena, v)) - - }).0 - } - } - } -} - macro_rules! direct_interners { - ($lt_tcx:tt, $($name:ident: $method:ident($ty:ty)),+) => { - $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> { + ($($name:ident: $method:ident($ty:ty)),+) => { + $(impl<'tcx> PartialEq for Interned<'tcx, $ty> { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } - impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {} + impl<'tcx> Eq for Interned<'tcx, $ty> {} - impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> { + impl<'tcx> Hash for Interned<'tcx, $ty> { fn hash(&self, s: &mut H) { self.0.hash(s) } } - intern_method!( - $lt_tcx, - $name: $method($ty, - |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) }, - |x| x) -> $ty);)+ + impl<'tcx> TyCtxt<'tcx> { + pub fn $method(self, v: $ty) -> &'tcx $ty { + self.interners.$name.intern_ref(&v, || { + Interned(self.interners.arena.alloc(v)) + }).0 + } + })+ } } @@ -2274,7 +2233,7 @@ pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool { x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX) } -direct_interners!('tcx, +direct_interners!( region: mk_region(RegionKind), goal: mk_goal(GoalKind<'tcx>), const_: mk_const(Const<'tcx>) @@ -2282,37 +2241,28 @@ direct_interners!('tcx, macro_rules! slice_interners { ($($field:ident: $method:ident($ty:ty)),+) => ( - $(intern_method!( 'tcx, $field: $method( - &[$ty], - |a, v| List::from_arena(a, v), - Deref::deref) -> List<$ty>);)+ + $(impl<'tcx> TyCtxt<'tcx> { + pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { + self.interners.$field.intern_ref(v, || { + Interned(List::from_arena(&self.interners.arena, v)) + }).0 + } + })+ ); } slice_interners!( + type_list: _intern_type_list(Ty<'tcx>), + substs: _intern_substs(GenericArg<'tcx>), + canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo), existential_predicates: _intern_existential_predicates(ExistentialPredicate<'tcx>), predicates: _intern_predicates(Predicate<'tcx>), - type_list: _intern_type_list(Ty<'tcx>), - substs: _intern_substs(Kind<'tcx>), clauses: _intern_clauses(Clause<'tcx>), goal_list: _intern_goals(Goal<'tcx>), - projs: _intern_projs(ProjectionKind) + projs: _intern_projs(ProjectionKind), + place_elems: _intern_place_elems(PlaceElem<'tcx>) ); -// This isn't a perfect fit: `CanonicalVarInfo` slices are always -// allocated in the global arena, so this `intern_method!` macro is -// overly general. However, we just return `false` for the code that checks -// whether they belong in the thread-local arena, so no harm done, and -// seems better than open-coding the rest. -intern_method! { - 'tcx, - canonical_var_infos: _intern_canonical_var_infos( - &[CanonicalVarInfo], - |a, v| List::from_arena(a, v), - Deref::deref - ) -> List -} - impl<'tcx> TyCtxt<'tcx> { /// Given a `fn` type, returns an equivalent `unsafe fn` type; /// that is, a `fn` type that is equivalent in every way for being @@ -2335,7 +2285,7 @@ impl<'tcx> TyCtxt<'tcx> { /// It cannot convert a closure that requires unsafe. pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>, unsafety: hir::Unsafety) -> Ty<'tcx> { let converted_sig = sig.map_bound(|s| { - let params_iter = match s.inputs()[0].sty { + let params_iter = match s.inputs()[0].kind { ty::Tuple(params) => { params.into_iter().map(|k| k.expect_ty()) } @@ -2485,7 +2435,7 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> { - self.mk_ty(Array(ty, ty::Const::from_usize(self.global_tcx(), n))) + self.mk_ty(Array(ty, ty::Const::from_usize(self, n))) } #[inline] @@ -2495,13 +2445,13 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { - let kinds: Vec<_> = ts.into_iter().map(|&t| Kind::from(t)).collect(); + let kinds: Vec<_> = ts.into_iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_substs(&kinds))) } pub fn mk_tup], Ty<'tcx>>>(self, iter: I) -> I::Output { iter.intern_with(|ts| { - let kinds: Vec<_> = ts.into_iter().map(|&t| Kind::from(t)).collect(); + let kinds: Vec<_> = ts.into_iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_substs(&kinds))) }) } @@ -2557,7 +2507,7 @@ impl<'tcx> TyCtxt<'tcx> { } #[inline] - pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>) + pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Closure(closure_id, closure_substs)) } @@ -2565,7 +2515,7 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn mk_generator(self, id: DefId, - generator_substs: GeneratorSubsts<'tcx>, + generator_substs: SubstsRef<'tcx>, movability: hir::GeneratorMovability) -> Ty<'tcx> { self.mk_ty(Generator(id, generator_substs, movability)) @@ -2617,7 +2567,7 @@ impl<'tcx> TyCtxt<'tcx> { } #[inline] - pub fn mk_ty_param(self, index: u32, name: InternedString) -> Ty<'tcx> { + pub fn mk_ty_param(self, index: u32, name: Symbol) -> Ty<'tcx> { self.mk_ty(Param(ParamTy { index, name: name })) } @@ -2625,7 +2575,7 @@ impl<'tcx> TyCtxt<'tcx> { pub fn mk_const_param( self, index: u32, - name: InternedString, + name: Symbol, ty: Ty<'tcx> ) -> &'tcx Const<'tcx> { self.mk_const(ty::Const { @@ -2635,7 +2585,7 @@ impl<'tcx> TyCtxt<'tcx> { } - pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> { + pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { match param.kind { GenericParamDefKind::Lifetime => { self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() @@ -2652,6 +2602,48 @@ impl<'tcx> TyCtxt<'tcx> { self.mk_ty(Opaque(def_id, substs)) } + pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { + self.mk_place_elem(place, PlaceElem::Field(f, ty)) + } + + pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> { + self.mk_place_elem(place, PlaceElem::Deref) + } + + pub fn mk_place_downcast( + self, + place: Place<'tcx>, + adt_def: &'tcx AdtDef, + variant_index: VariantIdx, + ) -> Place<'tcx> { + self.mk_place_elem( + place, + PlaceElem::Downcast(Some(adt_def.variants[variant_index].ident.name), variant_index), + ) + } + + pub fn mk_place_downcast_unnamed( + self, + place: Place<'tcx>, + variant_index: VariantIdx, + ) -> Place<'tcx> { + self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index)) + } + + pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> { + self.mk_place_elem(place, PlaceElem::Index(index)) + } + + /// This method copies `Place`'s projection, add an element and reintern it. Should not be used + /// to build a full `Place` it's just a convenient way to grab a projection and modify it in + /// flight. + pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> { + let mut projection = place.projection.to_vec(); + projection.push(elem); + + Place { base: place.base, projection: self.intern_place_elems(&projection) } + } + pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>]) -> &'tcx List> { assert!(!eps.is_empty()); @@ -2680,7 +2672,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx List> { + pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List> { if ts.len() == 0 { List::empty() } else { @@ -2696,11 +2688,19 @@ impl<'tcx> TyCtxt<'tcx> { } } + pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List> { + if ts.len() == 0 { + List::empty() + } else { + self._intern_place_elems(ts) + } + } + pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> { if ts.len() == 0 { List::empty() } else { - self.global_tcx()._intern_canonical_var_infos(ts) + self._intern_canonical_var_infos(ts) } } @@ -2753,14 +2753,19 @@ impl<'tcx> TyCtxt<'tcx> { iter.intern_with(|xs| self.intern_type_list(xs)) } - pub fn mk_substs], - &'tcx List>>>(self, iter: I) -> I::Output { + pub fn mk_substs], + &'tcx List>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_substs(xs)) } + pub fn mk_place_elems], + &'tcx List>>>(self, iter: I) -> I::Output { + iter.intern_with(|xs| self.intern_place_elems(xs)) + } + pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, - rest: &[Kind<'tcx>]) + rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) @@ -2923,8 +2928,33 @@ impl<'a, T, R> InternIteratorElement for &'a T impl InternIteratorElement for Result { type Output = Result; - fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { - Ok(f(&iter.collect::, _>>()?)) + fn intern_with, F: FnOnce(&[T]) -> R>(mut iter: I, f: F) + -> Self::Output { + // This code is hot enough that it's worth specializing for the most + // common length lists, to avoid the overhead of `SmallVec` creation. + // The match arms are in order of frequency. The 1, 2, and 0 cases are + // typically hit in ~95% of cases. We assume that if the upper and + // lower bounds from `size_hint` agree they are correct. + Ok(match iter.size_hint() { + (1, Some(1)) => { + let t0 = iter.next().unwrap()?; + assert!(iter.next().is_none()); + f(&[t0]) + } + (2, Some(2)) => { + let t0 = iter.next().unwrap()?; + let t1 = iter.next().unwrap()?; + assert!(iter.next().is_none()); + f(&[t0, t1]) + } + (0, Some(0)) => { + assert!(iter.next().is_none()); + f(&[]) + } + _ => { + f(&iter.collect::, _>>()?) + } + }) } } @@ -2985,12 +3015,16 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) { }; providers.extern_mod_stmt_cnum = |tcx, id| { let id = tcx.hir().as_local_node_id(id).unwrap(); - tcx.cstore.extern_mod_stmt_cnum_untracked(id) + tcx.extern_crate_map.get(&id).cloned() }; providers.all_crate_nums = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.arena.alloc_slice(&tcx.cstore.crates_untracked()) }; + providers.crate_host_hash = |tcx, cnum| { + assert_ne!(cnum, LOCAL_CRATE); + tcx.cstore.crate_host_hash_untracked(cnum) + }; providers.postorder_cnums = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.arena.alloc_slice(&tcx.cstore.postorder_cnums_untracked()) diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index 125c48f5f3..77613b548c 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -45,13 +45,12 @@ pub enum TypeError<'tcx> { ProjectionMismatched(ExpectedFound), ProjectionBoundsLength(ExpectedFound), ExistentialMismatch(ExpectedFound<&'tcx ty::List>>), - + ObjectUnsafeCoercion(DefId), ConstMismatch(ExpectedFound<&'tcx ty::Const<'tcx>>), IntrinsicCast, } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum UnconstrainedNumeric { UnconstrainedFloat, UnconstrainedInt, @@ -179,13 +178,14 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { IntrinsicCast => { write!(f, "cannot coerce intrinsics to function pointers") } + ObjectUnsafeCoercion(_) => write!(f, "coercion to object-unsafe trait object"), } } } impl<'tcx> ty::TyS<'tcx> { pub fn sort_string(&self, tcx: TyCtxt<'_>) -> Cow<'static, str> { - match self.sty { + match self.kind { ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => self.to_string().into(), ty::Tuple(ref tys) if tys.is_empty() => self.to_string().into(), @@ -193,7 +193,7 @@ impl<'tcx> ty::TyS<'tcx> { ty::Adt(def, _) => format!("{} `{}`", def.descr(), tcx.def_path_str(def.did)).into(), ty::Foreign(def_id) => format!("extern type `{}`", tcx.def_path_str(def_id)).into(), ty::Array(_, n) => { - let n = tcx.lift_to_global(&n).unwrap(); + let n = tcx.lift(&n).unwrap(); match n.try_eval_usize(tcx, ty::ParamEnv::empty()) { Some(n) => { format!("array of {} element{}", n, pluralise!(n)).into() @@ -275,7 +275,7 @@ impl<'tcx> TyCtxt<'tcx> { `.await`ing on both of them"); } } - match (&values.expected.sty, &values.found.sty) { + match (&values.expected.kind, &values.found.kind) { (ty::Float(_), ty::Infer(ty::IntVar(_))) => if let Ok( // Issue #53280 snippet, ) = self.sess.source_map().span_to_snippet(sp) { @@ -373,9 +373,9 @@ impl Trait for X { debug!( "note_and_explain_type_err expected={:?} ({:?}) found={:?} ({:?})", values.expected, - values.expected.sty, + values.expected.kind, values.found, - values.found.sty, + values.found.kind, ); }, CyclicTy(ty) => { diff --git a/src/librustc/ty/fast_reject.rs b/src/librustc/ty/fast_reject.rs index ee0d33dbe3..27a09b394b 100644 --- a/src/librustc/ty/fast_reject.rs +++ b/src/librustc/ty/fast_reject.rs @@ -1,7 +1,6 @@ use crate::hir::def_id::DefId; use crate::ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, - HashStable}; +use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use std::fmt::Debug; use std::hash::Hash; use std::mem; @@ -20,7 +19,7 @@ pub type SimplifiedType = SimplifiedTypeGen; /// the non-stable but fast to construct DefId-version is the better choice. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, RustcEncodable, RustcDecodable)] pub enum SimplifiedTypeGen - where D: Copy + Debug + Ord + Eq + Hash + where D: Copy + Debug + Ord + Eq { BoolSimplifiedType, CharSimplifiedType, @@ -60,7 +59,7 @@ pub fn simplify_type( ty: Ty<'_>, can_simplify_params: bool, ) -> Option { - match ty.sty { + match ty.kind { ty::Bool => Some(BoolSimplifiedType), ty::Char => Some(CharSimplifiedType), ty::Int(int_type) => Some(IntSimplifiedType(int_type)), @@ -124,10 +123,10 @@ pub fn simplify_type( } } -impl SimplifiedTypeGen { +impl SimplifiedTypeGen { pub fn map_def(self, map: F) -> SimplifiedTypeGen where F: Fn(D) -> U, - U: Copy + Debug + Ord + Eq + Hash, + U: Copy + Debug + Ord + Eq, { match self { BoolSimplifiedType => BoolSimplifiedType, @@ -156,11 +155,9 @@ impl SimplifiedTypeGen { impl<'a, D> HashStable> for SimplifiedTypeGen where - D: Copy + Debug + Ord + Eq + Hash + HashStable>, + D: Copy + Debug + Ord + Eq + HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { BoolSimplifiedType | diff --git a/src/librustc/ty/flags.rs b/src/librustc/ty/flags.rs index b2d74f963b..d4b7f37b12 100644 --- a/src/librustc/ty/flags.rs +++ b/src/librustc/ty/flags.rs @@ -1,4 +1,4 @@ -use crate::ty::subst::{SubstsRef, UnpackedKind}; +use crate::ty::subst::{SubstsRef, GenericArgKind}; use crate::ty::{self, Ty, TypeFlags, InferConst}; use crate::mir::interpret::ConstValue; @@ -19,9 +19,9 @@ impl FlagComputation { } #[allow(rustc::usage_of_ty_tykind)] - pub fn for_sty(st: &ty::TyKind<'_>) -> FlagComputation { + pub fn for_kind(kind: &ty::TyKind<'_>) -> FlagComputation { let mut result = FlagComputation::new(); - result.add_sty(st); + result.add_kind(kind); result } @@ -63,8 +63,8 @@ impl FlagComputation { } #[allow(rustc::usage_of_ty_tykind)] - fn add_sty(&mut self, st: &ty::TyKind<'_>) { - match st { + fn add_kind(&mut self, kind: &ty::TyKind<'_>) { + match kind { &ty::Bool | &ty::Char | &ty::Int(_) | @@ -94,7 +94,7 @@ impl FlagComputation { &ty::Generator(_, ref substs, _) => { self.add_flags(TypeFlags::HAS_TY_CLOSURE); self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); - self.add_substs(&substs.substs); + self.add_substs(substs); } &ty::GeneratorWitness(ref ts) => { @@ -106,7 +106,7 @@ impl FlagComputation { &ty::Closure(_, ref substs) => { self.add_flags(TypeFlags::HAS_TY_CLOSURE); self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); - self.add_substs(&substs.substs); + self.add_substs(substs); } &ty::Bound(debruijn, _) => { @@ -114,6 +114,7 @@ impl FlagComputation { } &ty::Placeholder(..) => { + self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER); } @@ -123,8 +124,7 @@ impl FlagComputation { match infer { ty::FreshTy(_) | ty::FreshIntTy(_) | - ty::FreshFloatTy(_) => { - } + ty::FreshFloatTy(_) => {} ty::TyVar(_) | ty::IntVar(_) | @@ -240,17 +240,21 @@ impl FlagComputation { self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES | TypeFlags::HAS_CT_INFER); match infer { InferConst::Fresh(_) => {} - InferConst::Canonical(debruijn, _) => self.add_binder(debruijn), InferConst::Var(_) => self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX), } } + ConstValue::Bound(debruijn, _) => self.add_binder(debruijn), ConstValue::Param(_) => { - self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES | TypeFlags::HAS_PARAMS); + self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); + self.add_flags(TypeFlags::HAS_PARAMS); } ConstValue::Placeholder(_) => { - self.add_flags(TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_CT_PLACEHOLDER); + self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); + self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER); } - _ => {}, + ConstValue::Scalar(_) => {} + ConstValue::Slice { .. } => {} + ConstValue::ByRef { .. } => {} } } @@ -266,9 +270,9 @@ impl FlagComputation { fn add_substs(&mut self, substs: SubstsRef<'_>) { for kind in substs { match kind.unpack() { - UnpackedKind::Type(ty) => self.add_ty(ty), - UnpackedKind::Lifetime(lt) => self.add_region(lt), - UnpackedKind::Const(ct) => self.add_const(ct), + GenericArgKind::Type(ty) => self.add_ty(ty), + GenericArgKind::Lifetime(lt) => self.add_region(lt), + GenericArgKind::Const(ct) => self.add_const(ct), } } } diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 1e08ae4595..bacf3d42f0 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -88,6 +88,9 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { fn has_infer_types(&self) -> bool { self.has_type_flags(TypeFlags::HAS_TY_INFER) } + fn has_infer_consts(&self) -> bool { + self.has_type_flags(TypeFlags::HAS_CT_INFER) + } fn has_local_value(&self) -> bool { self.has_type_flags(TypeFlags::KEEP_IN_LOCAL_TCX) } @@ -472,7 +475,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for BoundVarReplacer<'a, 'tcx> { } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - match t.sty { + match t.kind { ty::Bound(debruijn, bound_ty) => { if debruijn == self.current_index { let fld_t = &mut self.fld_t; @@ -518,10 +521,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for BoundVarReplacer<'a, 'tcx> { } fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { - if let ty::Const { - val: ConstValue::Infer(ty::InferConst::Canonical(debruijn, bound_const)), - ty, - } = *ct { + if let ty::Const { val: ConstValue::Bound(debruijn, bound_const), ty } = *ct { if debruijn == self.current_index { let fld_c = &mut self.fld_c; let ct = fld_c(bound_const, ty); @@ -567,7 +567,10 @@ impl<'tcx> TyCtxt<'tcx> { // identity for bound types and consts let fld_t = |bound_ty| self.mk_ty(ty::Bound(ty::INNERMOST, bound_ty)); let fld_c = |bound_ct, ty| { - self.mk_const_infer(ty::InferConst::Canonical(ty::INNERMOST, bound_ct), ty) + self.mk_const(ty::Const { + val: ConstValue::Bound(ty::INNERMOST, bound_ct), + ty, + }) }; self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t, fld_c) } @@ -718,7 +721,6 @@ impl<'tcx> TyCtxt<'tcx> { // vars. See comment on `shift_vars_through_binders` method in // `subst.rs` for more details. -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] enum Direction { In, Out, @@ -776,7 +778,7 @@ impl TypeFolder<'tcx> for Shifter<'tcx> { } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - match ty.sty { + match ty.kind { ty::Bound(debruijn, bound_ty) => { if self.amount == 0 || debruijn < self.current_index { ty @@ -799,10 +801,7 @@ impl TypeFolder<'tcx> for Shifter<'tcx> { } fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { - if let ty::Const { - val: ConstValue::Infer(ty::InferConst::Canonical(debruijn, bound_const)), - ty, - } = *ct { + if let ty::Const { val: ConstValue::Bound(debruijn, bound_ct), ty } = *ct { if self.amount == 0 || debruijn < self.current_index { ct } else { @@ -813,7 +812,10 @@ impl TypeFolder<'tcx> for Shifter<'tcx> { debruijn.shifted_out(self.amount) } }; - self.tcx.mk_const_infer(ty::InferConst::Canonical(debruijn, bound_const), ty) + self.tcx.mk_const(ty::Const { + val: ConstValue::Bound(debruijn, bound_ct), + ty, + }) } } else { ct.super_fold_with(self) @@ -911,13 +913,14 @@ impl<'tcx> TypeVisitor<'tcx> for HasEscapingVarsVisitor { } fn visit_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> bool { - if let ty::Const { - val: ConstValue::Infer(ty::InferConst::Canonical(debruijn, _)), - .. - } = *ct { - debruijn >= self.outer_index - } else { - false + // we don't have a `visit_infer_const` callback, so we have to + // hook in here to catch this case (annoying...), but + // otherwise we do want to remember to visit the rest of the + // const, as it has types/regions embedded in a lot of other + // places. + match ct.val { + ConstValue::Bound(debruijn, _) if debruijn >= self.outer_index => true, + _ => ct.super_visit_with(self), } } } @@ -985,7 +988,7 @@ impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector { // ignore the inputs to a projection, as they may not appear // in the normalized form if self.just_constrained { - match t.sty { + match t.kind { ty::Projection(..) | ty::Opaque(..) => { return false; } _ => { } } diff --git a/src/librustc/ty/inhabitedness/mod.rs b/src/librustc/ty/inhabitedness/mod.rs index 1a0e351733..bc0cf4deaa 100644 --- a/src/librustc/ty/inhabitedness/mod.rs +++ b/src/librustc/ty/inhabitedness/mod.rs @@ -181,7 +181,7 @@ impl<'tcx> FieldDef { impl<'tcx> TyS<'tcx> { /// Calculates the forest of `DefId`s from which this type is visibly uninhabited. fn uninhabited_from(&self, tcx: TyCtxt<'tcx>) -> DefIdForest { - match self.sty { + match self.kind { Adt(def, substs) => def.uninhabited_from(tcx, substs), Never => DefIdForest::full(tcx), diff --git a/src/librustc/ty/instance.rs b/src/librustc/ty/instance.rs index a26fa72f33..5139c8085a 100644 --- a/src/librustc/ty/instance.rs +++ b/src/librustc/ty/instance.rs @@ -1,3 +1,4 @@ +use crate::hir::CodegenFnAttrFlags; use crate::hir::Unsafety; use crate::hir::def::Namespace; use crate::hir::def_id::DefId; @@ -25,6 +26,14 @@ pub enum InstanceDef<'tcx> { /// `::method` where `method` receives unsizeable `self: Self`. VtableShim(DefId), + /// `fn()` pointer where the function itself cannot be turned into a pointer. + /// + /// One example in the compiler today is functions annotated with `#[track_caller]`, which + /// must have their implicit caller location argument populated for a call. Because this is a + /// required part of the function's ABI but can't be tracked as a property of the function + /// pointer, we create a single "caller location" at the site where the function is reified. + ReifyShim(DefId), + /// `::call_*` /// `DefId` is `FnTrait::call_*` FnPtrShim(DefId, Ty<'tcx>), @@ -54,12 +63,12 @@ impl<'tcx> Instance<'tcx> { fn fn_sig_noadjust(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { let ty = self.ty(tcx); - match ty.sty { + match ty.kind { ty::FnDef(..) | // Shims currently have type FnPtr. Not sure this should remain. ty::FnPtr(_) => ty.fn_sig(tcx), ty::Closure(def_id, substs) => { - let sig = substs.closure_sig(def_id, tcx); + let sig = substs.as_closure().sig(def_id, tcx); let env_ty = tcx.closure_env_ty(def_id, substs).unwrap(); sig.map_bound(|sig| tcx.mk_fn_sig( @@ -71,7 +80,7 @@ impl<'tcx> Instance<'tcx> { )) } ty::Generator(def_id, substs, _) => { - let sig = substs.poly_sig(def_id, tcx); + let sig = substs.as_generator().poly_sig(def_id, tcx); let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv); let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty); @@ -123,6 +132,7 @@ impl<'tcx> InstanceDef<'tcx> { match *self { InstanceDef::Item(def_id) | InstanceDef::VtableShim(def_id) | + InstanceDef::ReifyShim(def_id) | InstanceDef::FnPtrShim(def_id, _) | InstanceDef::Virtual(def_id, _) | InstanceDef::Intrinsic(def_id, ) | @@ -178,6 +188,9 @@ impl<'tcx> fmt::Display for Instance<'tcx> { InstanceDef::VtableShim(_) => { write!(f, " - shim(vtable)") } + InstanceDef::ReifyShim(_) => { + write!(f, " - shim(reify)") + } InstanceDef::Intrinsic(_) => { write!(f, " - intrinsic") } @@ -210,7 +223,7 @@ impl<'tcx> Instance<'tcx> { } pub fn mono(tcx: TyCtxt<'tcx>, def_id: DefId) -> Instance<'tcx> { - Instance::new(def_id, tcx.global_tcx().empty_substs_for_def_id(def_id)) + Instance::new(def_id, tcx.empty_substs_for_def_id(def_id)) } #[inline] @@ -255,7 +268,7 @@ impl<'tcx> Instance<'tcx> { &ty, ); - let def = match item_type.sty { + let def = match item_type.kind { ty::FnDef(..) if { let f = item_type.fn_sig(tcx); f.abi() == Abi::RustIntrinsic || @@ -290,6 +303,30 @@ impl<'tcx> Instance<'tcx> { result } + pub fn resolve_for_fn_ptr( + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + def_id: DefId, + substs: SubstsRef<'tcx>, + ) -> Option> { + debug!("resolve(def_id={:?}, substs={:?})", def_id, substs); + Instance::resolve(tcx, param_env, def_id, substs).map(|resolved| { + let has_track_caller = |def| tcx.codegen_fn_attrs(def).flags + .contains(CodegenFnAttrFlags::TRACK_CALLER); + + match resolved.def { + InstanceDef::Item(def_id) if has_track_caller(def_id) => { + debug!(" => fn pointer created for function with #[track_caller]"); + Instance { + def: InstanceDef::ReifyShim(def_id), + substs, + } + }, + _ => resolved, + } + }) + } + pub fn resolve_for_vtable( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -315,14 +352,14 @@ impl<'tcx> Instance<'tcx> { pub fn resolve_closure( tcx: TyCtxt<'tcx>, def_id: DefId, - substs: ty::ClosureSubsts<'tcx>, + substs: ty::SubstsRef<'tcx>, requested_kind: ty::ClosureKind, ) -> Instance<'tcx> { - let actual_kind = substs.closure_kind(def_id, tcx); + let actual_kind = substs.as_closure().kind(def_id, tcx); match needs_fn_once_adapter_shim(actual_kind, requested_kind) { Ok(true) => Instance::fn_once_adapter_instance(tcx, def_id, substs), - _ => Instance::new(def_id, substs.substs) + _ => Instance::new(def_id, substs) } } @@ -335,7 +372,7 @@ impl<'tcx> Instance<'tcx> { pub fn fn_once_adapter_instance( tcx: TyCtxt<'tcx>, closure_did: DefId, - substs: ty::ClosureSubsts<'tcx>, + substs: ty::SubstsRef<'tcx>, ) -> Instance<'tcx> { debug!("fn_once_adapter_shim({:?}, {:?})", closure_did, @@ -348,7 +385,7 @@ impl<'tcx> Instance<'tcx> { let self_ty = tcx.mk_closure(closure_did, substs); - let sig = substs.closure_sig(closure_did, tcx); + let sig = substs.as_closure().sig(closure_did, tcx); let sig = tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig); assert_eq!(sig.inputs().len(), 1); let substs = tcx.mk_substs_trait(self_ty, &[sig.inputs()[0].into()]); @@ -395,7 +432,7 @@ fn resolve_associated_item<'tcx>( traits::VtableGenerator(generator_data) => { Some(Instance { def: ty::InstanceDef::Item(generator_data.generator_def_id), - substs: generator_data.substs.substs + substs: generator_data.substs }) } traits::VtableClosure(closure_data) => { diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index e52feea162..e82232ac10 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -1,5 +1,5 @@ use crate::session::{self, DataTypeKind}; -use crate::ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions}; +use crate::ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions, subst::SubstsRef}; use syntax::ast::{self, Ident, IntTy, UintTy}; use syntax::attr; @@ -15,17 +15,15 @@ use std::ops::Bound; use crate::hir; use crate::ich::StableHashingContext; use crate::mir::{GeneratorLayout, GeneratorSavedLocal}; -use crate::ty::GeneratorSubsts; use crate::ty::subst::Subst; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_index::bit_set::BitSet; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_index::vec::{IndexVec, Idx}; pub use rustc_target::abi::*; use rustc_target::spec::{HasTargetSpec, abi::Abi as SpecAbi}; use rustc_target::abi::call::{ - ArgAttribute, ArgAttributes, ArgType, Conv, FnType, IgnoreMode, PassMode, Reg, RegKind + ArgAttribute, ArgAttributes, ArgType, Conv, FnType, PassMode, Reg, RegKind }; pub trait IntegerExt { @@ -520,7 +518,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { }; debug_assert!(!ty.has_infer_types()); - Ok(match ty.sty { + Ok(match ty.kind { // Basic scalars. ty::Bool => { tcx.intern_layout(LayoutDetails::scalar(self, Scalar { @@ -573,7 +571,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { } let unsized_part = tcx.struct_tail_erasing_lifetimes(pointee, param_env); - let metadata = match unsized_part.sty { + let metadata = match unsized_part.kind { ty::Foreign(..) => { return Ok(tcx.intern_layout(LayoutDetails::scalar(self, data_ptr))); } @@ -672,10 +670,10 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { tcx.intern_layout(unit) } - ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, &substs)?, + ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, substs)?, ty::Closure(def_id, ref substs) => { - let tys = substs.upvar_tys(def_id, tcx); + let tys = substs.as_closure().upvar_tys(def_id, tcx); univariant(&tys.map(|ty| self.layout_of(ty)).collect::, _>>()?, &ReprOptions::default(), StructKind::AlwaysSized)? @@ -826,10 +824,14 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { }); (present_variants.next(), present_variants.next()) }; - if present_first.is_none() { + let present_first = match present_first { + present_first @ Some(_) => present_first, // Uninhabited because it has no variants, or only absent ones. - return tcx.layout_raw(param_env.and(tcx.types.never)); - } + None if def.is_enum() => return tcx.layout_raw(param_env.and(tcx.types.never)), + // if it's a struct, still compute a layout so that we can still compute the + // field offsets + None => Some(VariantIdx::new(0)), + }; let is_struct = !def.is_enum() || // Only one variant is present. @@ -1407,12 +1409,12 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { &self, ty: Ty<'tcx>, def_id: hir::def_id::DefId, - substs: &GeneratorSubsts<'tcx>, + substs: SubstsRef<'tcx>, ) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> { use SavedLocalEligibility::*; let tcx = self.tcx; - let subst_field = |ty: Ty<'tcx>| { ty.subst(tcx, substs.substs) }; + let subst_field = |ty: Ty<'tcx>| { ty.subst(tcx, substs) }; let info = tcx.generator_layout(def_id); let (ineligible_locals, assignments) = self.generator_saved_local_eligibility(&info); @@ -1420,9 +1422,9 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { // Build a prefix layout, including "promoting" all ineligible // locals as part of the prefix. We compute the layout of all of // these fields at once to get optimal packing. - let discr_index = substs.prefix_tys(def_id, tcx).count(); + let discr_index = substs.as_generator().prefix_tys(def_id, tcx).count(); // FIXME(eddyb) set the correct vaidity range for the discriminant. - let discr_layout = self.layout_of(substs.discr_ty(tcx))?; + let discr_layout = self.layout_of(substs.as_generator().discr_ty(tcx))?; let discr = match &discr_layout.abi { Abi::Scalar(s) => s.clone(), _ => bug!(), @@ -1431,7 +1433,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { .map(|local| subst_field(info.field_tys[local])) .map(|ty| tcx.mk_maybe_uninit(ty)) .map(|ty| self.layout_of(ty)); - let prefix_layouts = substs.prefix_tys(def_id, tcx) + let prefix_layouts = substs.as_generator().prefix_tys(def_id, tcx) .map(|ty| self.layout_of(ty)) .chain(iter::once(Ok(discr_layout))) .chain(promoted_layouts) @@ -1618,7 +1620,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { variants); }; - let adt_def = match layout.ty.sty { + let adt_def = match layout.ty.kind { ty::Adt(ref adt_def, _) => { debug!("print-type-size t: `{:?}` process adt", layout.ty); adt_def @@ -1759,12 +1761,12 @@ impl<'tcx> SizeSkeleton<'tcx> { Err(err) => err }; - match ty.sty { + match ty.kind { ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { let non_zero = !ty.is_unsafe_ptr(); let tail = tcx.struct_tail_erasing_lifetimes(pointee, param_env); - match tail.sty { + match tail.kind { ty::Param(_) | ty::Projection(_) => { debug_assert!(tail.has_param_types()); Ok(SizeSkeleton::Pointer { @@ -1883,7 +1885,7 @@ impl<'tcx> HasDataLayout for TyCtxt<'tcx> { impl<'tcx> HasTyCtxt<'tcx> for TyCtxt<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { - self.global_tcx() + *self } } @@ -2003,7 +2005,7 @@ impl TyCtxt<'tcx> { pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> Result, LayoutError<'tcx>> { let cx = LayoutCx { - tcx: self.global_tcx(), + tcx: self, param_env: param_env_and_ty.param_env }; cx.layout_of(param_env_and_ty.value) @@ -2017,7 +2019,7 @@ impl ty::query::TyCtxtAt<'tcx> { pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> Result, LayoutError<'tcx>> { let cx = LayoutCx { - tcx: self.global_tcx().at(self.span), + tcx: self.at(self.span), param_env: param_env_and_ty.param_env }; cx.layout_of(param_env_and_ty.value) @@ -2040,7 +2042,7 @@ where assert_eq!(layout.variants, Variants::Single { index }); } - let fields = match this.ty.sty { + let fields = match this.ty.kind { ty::Adt(def, _) => def.variants[variant_index].fields.len(), _ => bug!() }; @@ -2078,7 +2080,7 @@ where })) }; - cx.layout_of(match this.ty.sty { + cx.layout_of(match this.ty.kind { ty::Bool | ty::Char | ty::Int(_) | @@ -2115,7 +2117,7 @@ where })); } - match tcx.struct_tail_erasing_lifetimes(pointee, cx.param_env()).sty { + match tcx.struct_tail_erasing_lifetimes(pointee, cx.param_env()).kind { ty::Slice(_) | ty::Str => tcx.types.usize, ty::Dynamic(_, _) => { @@ -2148,13 +2150,13 @@ where // Tuples, generators and closures. ty::Closure(def_id, ref substs) => { - substs.upvar_tys(def_id, tcx).nth(i).unwrap() + substs.as_closure().upvar_tys(def_id, tcx).nth(i).unwrap() } ty::Generator(def_id, ref substs, _) => { match this.variants { Variants::Single { index } => { - substs.state_tys(def_id, tcx) + substs.as_generator().state_tys(def_id, tcx) .nth(index.as_usize()).unwrap() .nth(i).unwrap() } @@ -2162,7 +2164,7 @@ where if i == discr_index { return discr_layout(discr); } - substs.prefix_tys(def_id, tcx).nth(i).unwrap() + substs.as_generator().prefix_tys(def_id, tcx).nth(i).unwrap() } } } @@ -2202,7 +2204,7 @@ where cx: &C, offset: Size, ) -> Option { - match this.ty.sty { + match this.ty.kind { ty::RawPtr(mt) if offset.bytes() == 0 => { cx.layout_of(mt.ty).to_result().ok() .map(|layout| PointeeInfo { @@ -2309,7 +2311,7 @@ where // FIXME(eddyb) This should be for `ptr::Unique`, not `Box`. if let Some(ref mut pointee) = result { - if let ty::Adt(def, _) = this.ty.sty { + if let ty::Adt(def, _) = this.ty.kind { if def.is_box() && offset.bytes() == 0 { pointee.safe = Some(PointerKind::UniqueOwned); } @@ -2323,9 +2325,7 @@ where } impl<'a> HashStable> for Variants { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use crate::ty::layout::Variants::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -2349,9 +2349,7 @@ impl<'a> HashStable> for Variants { } impl<'a> HashStable> for DiscriminantKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use crate::ty::layout::DiscriminantKind::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -2372,9 +2370,7 @@ impl<'a> HashStable> for DiscriminantKind { } impl<'a> HashStable> for FieldPlacement { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use crate::ty::layout::FieldPlacement::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -2395,19 +2391,13 @@ impl<'a> HashStable> for FieldPlacement { } impl<'a> HashStable> for VariantIdx { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.as_u32().hash_stable(hcx, hasher) } } impl<'a> HashStable> for Abi { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use crate::ty::layout::Abi::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -2432,9 +2422,7 @@ impl<'a> HashStable> for Abi { } impl<'a> HashStable> for Scalar { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let Scalar { value, ref valid_range } = *self; value.hash_stable(hcx, hasher); valid_range.start().hash_stable(hcx, hasher); @@ -2476,29 +2464,19 @@ impl_stable_hash_for!(struct crate::ty::layout::AbiAndPrefAlign { }); impl<'tcx> HashStable> for Align { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'tcx>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) { self.bytes().hash_stable(hcx, hasher); } } impl<'tcx> HashStable> for Size { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'tcx>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) { self.bytes().hash_stable(hcx, hasher); } } impl<'a, 'tcx> HashStable> for LayoutError<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use crate::ty::layout::LayoutError::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -2618,6 +2596,7 @@ where // It's the ABI's job to select this, not ours. System => bug!("system abi should be selected elsewhere"), + EfiApi => bug!("eficall abi should be selected elsewhere"), Stdcall => Conv::X86Stdcall, Fastcall => Conv::X86Fastcall, @@ -2641,7 +2620,7 @@ where let extra_args = if sig.abi == RustCall { assert!(!sig.c_variadic && extra_args.is_empty()); - match sig.inputs().last().unwrap().sty { + match sig.inputs().last().unwrap().kind { ty::Tuple(tupled_arguments) => { inputs = &sig.inputs()[0..sig.inputs().len() - 1]; tupled_arguments.iter().map(|k| k.expect_ty()).collect() @@ -2722,14 +2701,6 @@ where } }; - // Store the index of the last argument. This is useful for working with - // C-compatible variadic arguments. - let last_arg_idx = if sig.inputs().is_empty() { - None - } else { - Some(sig.inputs().len() - 1) - }; - let arg_of = |ty: Ty<'tcx>, arg_idx: Option| { let is_return = arg_idx.is_none(); let mut arg = mk_arg_type(ty, arg_idx); @@ -2739,30 +2710,7 @@ where // The same is true for s390x-unknown-linux-gnu // and sparc64-unknown-linux-gnu. if is_return || rust_abi || (!win_x64_gnu && !linux_s390x && !linux_sparc64) { - arg.mode = PassMode::Ignore(IgnoreMode::Zst); - } - } - - // If this is a C-variadic function, this is not the return value, - // and there is one or more fixed arguments; ensure that the `VaListImpl` - // is ignored as an argument. - if sig.c_variadic { - match (last_arg_idx, arg_idx) { - (Some(last_idx), Some(cur_idx)) if last_idx == cur_idx => { - let va_list_did = match cx.tcx().lang_items().va_list() { - Some(did) => did, - None => bug!("`va_list` lang item required for C-variadic functions"), - }; - match ty.sty { - ty::Adt(def, _) if def.did == va_list_did => { - // This is the "spoofed" `VaListImpl`. Set the arguments mode - // so that it will be ignored. - arg.mode = PassMode::Ignore(IgnoreMode::CVarArgs); - } - _ => (), - } - } - _ => {} + arg.mode = PassMode::Ignore; } } diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index f879693eee..60028f2488 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -4,7 +4,7 @@ pub use self::Variance::*; pub use self::AssocItemContainer::*; pub use self::BorrowKind::*; pub use self::IntVarValue::*; -pub use self::fold::TypeFoldable; +pub use self::fold::{TypeFoldable, TypeVisitor}; use crate::hir::{map as hir_map, GlobMap, TraitMap}; use crate::hir::Node; @@ -15,6 +15,7 @@ use rustc_macros::HashStable; use crate::ich::Fingerprint; use crate::ich::StableHashingContext; use crate::infer::canonical::Canonical; +use crate::middle::cstore::CrateStoreDyn; use crate::middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem}; use crate::middle::resolve_lifetime::ObjectLifetimeDefault; use crate::mir::Body; @@ -28,7 +29,7 @@ use crate::ty::subst::{Subst, InternalSubsts, SubstsRef}; use crate::ty::util::{IntTypeExt, Discr}; use crate::ty::walk::TypeWalker; use crate::util::captures::Captures; -use crate::util::nodemap::{NodeSet, DefIdMap, FxHashMap}; +use crate::util::nodemap::{NodeMap, NodeSet, DefIdMap, FxHashMap}; use arena::SyncDroplessArena; use crate::session::DataTypeKind; @@ -45,15 +46,14 @@ use std::{mem, ptr}; use std::ops::Range; use syntax::ast::{self, Name, Ident, NodeId}; use syntax::attr; -use syntax::ext::hygiene::ExpnId; -use syntax::symbol::{kw, sym, Symbol, InternedString}; +use syntax_pos::symbol::{kw, sym, Symbol}; +use syntax_pos::hygiene::ExpnId; use syntax_pos::Span; use smallvec; -use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, - HashStable}; +use rustc_data_structures::fx::{FxIndexMap}; +use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; +use rustc_index::vec::{Idx, IndexVec}; use crate::hir; @@ -84,6 +84,10 @@ pub use self::context::{ pub use self::instance::{Instance, InstanceDef}; +pub use self::structural_match::search_for_structural_match_violation; +pub use self::structural_match::type_marked_structural; +pub use self::structural_match::NonStructuralMatchTy; + pub use self::trait_def::TraitDef; pub use self::query::queries; @@ -116,12 +120,15 @@ pub mod util; mod context; mod instance; mod structural_impls; +mod structural_match; mod sty; // Data types -#[derive(Clone)] -pub struct Resolutions { +pub struct ResolverOutputs { + pub definitions: hir_map::Definitions, + pub cstore: Box, + pub extern_crate_map: NodeMap, pub trait_map: TraitMap, pub maybe_unused_trait_imports: NodeSet, pub maybe_unused_extern_crates: Vec<(NodeId, Span)>, @@ -159,7 +166,7 @@ impl AssocItemContainer { /// The "header" of an impl is everything outside the body: a Self type, a trait /// ref (in the case of a trait impl), and a set of predicates (from the /// bounds / where-clauses). -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Debug)] pub struct ImplHeader<'tcx> { pub impl_def_id: DefId, pub self_ty: Ty<'tcx>, @@ -167,6 +174,19 @@ pub struct ImplHeader<'tcx> { pub predicates: Vec>, } +#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)] +pub enum ImplPolarity { + /// `impl Trait for Type` + Positive, + /// `impl !Trait for Type` + Negative, + /// `#[rustc_reservation_impl] impl Trait for Type` + /// + /// This is a "stability hack", not a real Rust feature. + /// See #64631 for details. + Reservation, +} + #[derive(Copy, Clone, Debug, PartialEq, HashStable)] pub struct AssocItem { pub def_id: DefId, @@ -182,7 +202,7 @@ pub struct AssocItem { pub method_has_self_argument: bool, } -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Copy, Clone, PartialEq, Debug, HashStable)] pub enum AssocKind { Const, Method, @@ -318,7 +338,7 @@ impl Visibility { } } -#[derive(Copy, Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Hash, HashStable)] +#[derive(Copy, Clone, PartialEq, RustcDecodable, RustcEncodable, HashStable)] pub enum Variance { Covariant, // T <: T iff A <: B -- e.g., function return type Invariant, // T <: T iff B == A -- e.g., type of mutable cell @@ -438,7 +458,7 @@ bitflags! { /// `true` if there are "names" of types and regions and so forth /// that are local to a particular fn - const HAS_FREE_LOCAL_NAMES = 1 << 9; + const HAS_FREE_LOCAL_NAMES = 1 << 9; /// Present if the type belongs in a local type context. /// Only set for Infer other than Fresh. @@ -446,11 +466,11 @@ bitflags! { /// Does this have any `ReLateBound` regions? Used to check /// if a global bound is safe to evaluate. - const HAS_RE_LATE_BOUND = 1 << 11; + const HAS_RE_LATE_BOUND = 1 << 11; const HAS_TY_PLACEHOLDER = 1 << 12; - const HAS_CT_INFER = 1 << 13; + const HAS_CT_INFER = 1 << 13; const HAS_CT_PLACEHOLDER = 1 << 14; const NEEDS_SUBST = TypeFlags::HAS_PARAMS.bits | @@ -479,7 +499,7 @@ bitflags! { #[allow(rustc::usage_of_ty_tykind)] pub struct TyS<'tcx> { - pub sty: TyKind<'tcx>, + pub kind: TyKind<'tcx>, pub flags: TypeFlags, /// This is a kind of confusing thing: it stores the smallest @@ -508,13 +528,13 @@ static_assert_size!(TyS<'_>, 32); impl<'tcx> Ord for TyS<'tcx> { fn cmp(&self, other: &TyS<'tcx>) -> Ordering { - self.sty.cmp(&other.sty) + self.kind.cmp(&other.kind) } } impl<'tcx> PartialOrd for TyS<'tcx> { fn partial_cmp(&self, other: &TyS<'tcx>) -> Option { - Some(self.sty.cmp(&other.sty)) + Some(self.kind.cmp(&other.kind)) } } @@ -534,7 +554,7 @@ impl<'tcx> Hash for TyS<'tcx> { impl<'tcx> TyS<'tcx> { pub fn is_primitive_ty(&self) -> bool { - match self.sty { + match self.kind { Bool | Char | Int(_) | @@ -550,7 +570,7 @@ impl<'tcx> TyS<'tcx> { } pub fn is_suggestable(&self) -> bool { - match self.sty { + match self.kind { Opaque(..) | FnDef(..) | FnPtr(..) | @@ -564,24 +584,22 @@ impl<'tcx> TyS<'tcx> { } impl<'a, 'tcx> HashStable> for ty::TyS<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ty::TyS { - ref sty, + ref kind, // The other fields just provide fast access to information that is - // also contained in `sty`, so no need to hash them. + // also contained in `kind`, so no need to hash them. flags: _, outer_exclusive_binder: _, } = *self; - sty.hash_stable(hcx, hasher); + kind.hash_stable(hcx, hasher); } } -#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "Ty")] +#[rustc_diagnostic_item = "Ty"] pub type Ty<'tcx> = &'tcx TyS<'tcx>; impl<'tcx> rustc_serialize::UseSpecializedEncodable for Ty<'tcx> {} @@ -590,7 +608,8 @@ impl<'tcx> rustc_serialize::UseSpecializedDecodable for Ty<'tcx> {} pub type CanonicalTy<'tcx> = Canonical<'tcx, Ty<'tcx>>; extern { - /// A dummy type used to force `List` to by unsized without requiring fat pointers. + /// A dummy type used to force `List` to be unsized while not requiring references to it be wide + /// pointers. type OpaqueListContents; } @@ -689,6 +708,13 @@ impl Deref for List { type Target = [T]; #[inline(always)] fn deref(&self) -> &[T] { + self.as_ref() + } +} + +impl AsRef<[T]> for List { + #[inline(always)] + fn as_ref(&self) -> &[T] { unsafe { slice::from_raw_parts(self.data.as_ptr(), self.len) } @@ -733,7 +759,7 @@ pub struct UpvarId { pub closure_expr_id: LocalDefId, } -#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy, HashStable)] +#[derive(Clone, PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, HashStable)] pub enum BorrowKind { /// Data must be immutable and is aliasable. ImmBorrow, @@ -830,7 +856,7 @@ impl ty::EarlyBoundRegion { /// Does this early bound region have a name? Early bound regions normally /// always have names except when using anonymous lifetimes (`'_`). pub fn has_name(&self) -> bool { - self.name != kw::UnderscoreLifetime.as_interned_str() + self.name != kw::UnderscoreLifetime } } @@ -847,7 +873,7 @@ pub enum GenericParamDefKind { #[derive(Clone, RustcEncodable, RustcDecodable, HashStable)] pub struct GenericParamDef { - pub name: InternedString, + pub name: Symbol, pub def_id: DefId, pub index: u32, @@ -999,15 +1025,12 @@ impl<'tcx> Generics { } /// Bounds on generics. -#[derive(Clone, Default, Debug, HashStable)] +#[derive(Copy, Clone, Default, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct GenericPredicates<'tcx> { pub parent: Option, - pub predicates: Vec<(Predicate<'tcx>, Span)>, + pub predicates: &'tcx [(Predicate<'tcx>, Span)], } -impl<'tcx> rustc_serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {} -impl<'tcx> rustc_serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {} - impl<'tcx> GenericPredicates<'tcx> { pub fn instantiate( &self, @@ -1100,7 +1123,7 @@ pub enum Predicate<'tcx> { /// No direct syntax. May be thought of as `where T: FnFoo<...>` /// for some substitutions `...` and `T` being a closure type. /// Satisfied (or refuted) once we know the closure's kind. - ClosureKind(DefId, ClosureSubsts<'tcx>, ClosureKind), + ClosureKind(DefId, SubstsRef<'tcx>, ClosureKind), /// `T1 <: T2` Subtype(PolySubtypePredicate<'tcx>), @@ -1120,7 +1143,7 @@ pub struct CratePredicatesMap<'tcx> { /// For each struct with outlive bounds, maps to a vector of the /// predicate of its outlive bounds. If an item has no outlives /// bounds, it will have no entry. - pub predicates: FxHashMap]>, + pub predicates: FxHashMap, Span)]>, } impl<'tcx> AsRef> for Predicate<'tcx> { @@ -1447,7 +1470,7 @@ impl<'tcx> Predicate<'tcx> { WalkTysIter::None } ty::Predicate::ClosureKind(_closure_def_id, closure_substs, _kind) => { - WalkTysIter::Types(closure_substs.substs.types()) + WalkTysIter::Types(closure_substs.types()) } ty::Predicate::ConstEvaluatable(_, substs) => { WalkTysIter::Types(substs.types()) @@ -1526,7 +1549,7 @@ impl<'tcx> InstantiatedPredicates<'tcx> { } } -newtype_index! { +rustc_index::newtype_index! { /// "Universes" are used during type- and trait-checking in the /// presence of `for<..>` binders to control what sets of names are /// visible. Universes are arranged into a tree: the root universe @@ -1620,11 +1643,7 @@ impl<'a, T> HashStable> for Placeholder where T: HashStable>, { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.universe.hash_stable(hcx, hasher); self.name.hash_stable(hcx, hasher); } @@ -1761,9 +1780,7 @@ impl<'a, 'tcx, T> HashStable> for ParamEnvAnd<'tcx, T> where T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ParamEnvAnd { ref param_env, ref value @@ -1997,9 +2014,7 @@ impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx AdtDef {} impl<'a> HashStable> for AdtDef { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { thread_local! { static CACHE: RefCell> = Default::default(); } @@ -2310,7 +2325,7 @@ impl<'tcx> AdtDef { } #[inline] - pub fn predicates(&self, tcx: TyCtxt<'tcx>) -> &'tcx GenericPredicates<'tcx> { + pub fn predicates(&self, tcx: TyCtxt<'tcx>) -> GenericPredicates<'tcx> { tcx.predicates_of(self.did) } @@ -2365,7 +2380,7 @@ impl<'tcx> AdtDef { pub fn eval_explicit_discr(&self, tcx: TyCtxt<'tcx>, expr_did: DefId) -> Option> { let param_env = tcx.param_env(expr_did); let repr_type = self.repr.discr_type(); - let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), expr_did); + let substs = InternalSubsts::identity_for_item(tcx, expr_did); let instance = ty::Instance::new(expr_did, substs); let cid = GlobalId { instance, @@ -2374,7 +2389,7 @@ impl<'tcx> AdtDef { match tcx.const_eval(param_env.and(cid)) { Ok(val) => { // FIXME: Find the right type and use it instead of `val.ty` here - if let Some(b) = val.try_eval_bits(tcx.global_tcx(), param_env, val.ty) { + if let Some(b) = val.try_eval_bits(tcx, param_env, val.ty) { trace!("discriminants: {} ({:?})", b, repr_type); Some(Discr { val: b, @@ -2410,7 +2425,7 @@ impl<'tcx> AdtDef { tcx: TyCtxt<'tcx>, ) -> impl Iterator)> + Captures<'tcx> { let repr_type = self.repr.discr_type(); - let initial = repr_type.initial_discriminant(tcx.global_tcx()); + let initial = repr_type.initial_discriminant(tcx); let mut prev_discr = None::>; self.variants.iter_enumerated().map(move |(i, v)| { let mut discr = prev_discr.map_or(initial, |d| d.wrap_incr(tcx)); @@ -2444,7 +2459,7 @@ impl<'tcx> AdtDef { let (val, offset) = self.discriminant_def_for_variant(variant_index); let explicit_value = val .and_then(|expr_did| self.eval_explicit_discr(tcx, expr_did)) - .unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx.global_tcx())); + .unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx)); explicit_value.checked_add(tcx, offset as u128).0 } @@ -2494,7 +2509,7 @@ impl<'tcx> AdtDef { } fn sized_constraint_for_ty(&self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Vec> { - let result = match ty.sty { + let result = match ty.kind { Bool | Char | Int(..) | Uint(..) | Float(..) | RawPtr(..) | Ref(..) | FnDef(..) | FnPtr(_) | Array(..) | Closure(..) | Generator(..) | Never => { @@ -2550,7 +2565,7 @@ impl<'tcx> AdtDef { def_id: sized_trait, substs: tcx.mk_substs_trait(ty, &[]) }).to_predicate(); - let predicates = &tcx.predicates_of(self.did).predicates; + let predicates = tcx.predicates_of(self.did).predicates; if predicates.iter().any(|(p, _)| *p == sized_predicate) { vec![] } else { @@ -2911,7 +2926,26 @@ impl<'tcx> TyCtxt<'tcx> { return Some(ImplOverlapKind::Permitted); } - let is_legit = if self.features().overlapping_marker_traits { + match (self.impl_polarity(def_id1), self.impl_polarity(def_id2)) { + (ImplPolarity::Reservation, _) | + (_, ImplPolarity::Reservation) => { + // `#[rustc_reservation_impl]` impls don't overlap with anything + debug!("impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted) (reservations)", + def_id1, def_id2); + return Some(ImplOverlapKind::Permitted); + } + (ImplPolarity::Positive, ImplPolarity::Negative) | + (ImplPolarity::Negative, ImplPolarity::Positive) => { + // `impl AutoTrait for Type` + `impl !AutoTrait for Type` + debug!("impls_are_allowed_to_overlap({:?}, {:?}) - None (differing polarities)", + def_id1, def_id2); + return None; + } + (ImplPolarity::Positive, ImplPolarity::Positive) | + (ImplPolarity::Negative, ImplPolarity::Negative) => {} + }; + + let is_marker_overlap = if self.features().overlapping_marker_traits { let trait1_is_empty = self.impl_trait_ref(def_id1) .map_or(false, |trait_ref| { self.associated_item_def_ids(trait_ref.def_id).is_empty() @@ -2920,22 +2954,19 @@ impl<'tcx> TyCtxt<'tcx> { .map_or(false, |trait_ref| { self.associated_item_def_ids(trait_ref.def_id).is_empty() }); - self.impl_polarity(def_id1) == self.impl_polarity(def_id2) - && trait1_is_empty - && trait2_is_empty + trait1_is_empty && trait2_is_empty } else { let is_marker_impl = |def_id: DefId| -> bool { let trait_ref = self.impl_trait_ref(def_id); trait_ref.map_or(false, |tr| self.trait_def(tr.def_id).is_marker) }; - self.impl_polarity(def_id1) == self.impl_polarity(def_id2) - && is_marker_impl(def_id1) - && is_marker_impl(def_id2) + is_marker_impl(def_id1) && is_marker_impl(def_id2) }; - if is_legit { - debug!("impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted)", - def_id1, def_id2); + + if is_marker_overlap { + debug!("impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted) (marker overlap)", + def_id1, def_id2); Some(ImplOverlapKind::Permitted) } else { if let Some(self_ty1) = self.issue33140_self_ty(def_id1) { @@ -2995,7 +3026,7 @@ impl<'tcx> TyCtxt<'tcx> { }), _ => def_key.disambiguated_data.data.get_opt_name().unwrap_or_else(|| { bug!("item_name: no name for {:?}", self.def_path(id)); - }).as_symbol(), + }), } } } @@ -3007,6 +3038,7 @@ impl<'tcx> TyCtxt<'tcx> { self.optimized_mir(did) } ty::InstanceDef::VtableShim(..) | + ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::Intrinsic(..) | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::Virtual(..) | @@ -3114,6 +3146,7 @@ impl<'tcx> TyCtxt<'tcx> { } } +#[derive(Clone)] pub struct AssocItemsIterator<'tcx> { tcx: TyCtxt<'tcx>, def_ids: &'tcx [DefId], @@ -3135,7 +3168,7 @@ fn associated_item(tcx: TyCtxt<'_>, def_id: DefId) -> AssocItem { let parent_id = tcx.hir().get_parent_item(id); let parent_def_id = tcx.hir().local_def_id(parent_id); let parent_item = tcx.hir().expect_item(parent_id); - match parent_item.node { + match parent_item.kind { hir::ItemKind::Impl(.., ref impl_item_refs) => { if let Some(impl_item_ref) = impl_item_refs.iter().find(|i| i.id.hir_id == id) { let assoc_item = tcx.associated_item_from_impl_item_ref(parent_def_id, @@ -3160,7 +3193,7 @@ fn associated_item(tcx: TyCtxt<'_>, def_id: DefId) -> AssocItem { span_bug!(parent_item.span, "unexpected parent of trait or impl item or item not found: {:?}", - parent_item.node) + parent_item.kind) } #[derive(Clone, HashStable)] @@ -3192,7 +3225,7 @@ fn adt_sized_constraint(tcx: TyCtxt<'_>, def_id: DefId) -> AdtSizedConstraint<'_ fn associated_item_def_ids(tcx: TyCtxt<'_>, def_id: DefId) -> &[DefId] { let id = tcx.hir().as_local_hir_id(def_id).unwrap(); let item = tcx.hir().expect_item(id); - match item.node { + match item.kind { hir::ItemKind::Trait(.., ref trait_item_refs) => { tcx.arena.alloc_from_iter( trait_item_refs.iter() @@ -3233,7 +3266,7 @@ fn trait_of_item(tcx: TyCtxt<'_>, def_id: DefId) -> Option { pub fn is_impl_trait_defn(tcx: TyCtxt<'_>, def_id: DefId) -> Option { if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) { if let Node::Item(item) = tcx.hir().get(hir_id) { - if let hir::ItemKind::OpaqueTy(ref opaque_ty) = item.node { + if let hir::ItemKind::OpaqueTy(ref opaque_ty) = item.kind { return opaque_ty.impl_trait_fn; } } @@ -3317,7 +3350,7 @@ fn issue33140_self_ty(tcx: TyCtxt<'_>, def_id: DefId) -> Option> { debug!("issue33140_self_ty({:?}), trait-ref={:?}", def_id, trait_ref); let is_marker_like = - tcx.impl_polarity(def_id) == hir::ImplPolarity::Positive && + tcx.impl_polarity(def_id) == ty::ImplPolarity::Positive && tcx.associated_item_def_ids(trait_ref.def_id).is_empty(); // Check whether these impls would be ok for a marker trait. @@ -3339,7 +3372,7 @@ fn issue33140_self_ty(tcx: TyCtxt<'_>, def_id: DefId) -> Option> { } let self_ty = trait_ref.self_ty(); - let self_ty_matches = match self_ty.sty { + let self_ty_matches = match self_ty.kind { ty::Dynamic(ref data, ty::ReStatic) => data.principal().is_none(), _ => false }; @@ -3368,13 +3401,13 @@ fn asyncness(tcx: TyCtxt<'_>, def_id: DefId) -> hir::IsAsync { fn_like.asyncness() } - pub fn provide(providers: &mut ty::query::Providers<'_>) { context::provide(providers); erase_regions::provide(providers); layout::provide(providers); util::provide(providers); constness::provide(providers); + crate::traits::query::dropck_outlives::provide(providers); *providers = ty::query::Providers { asyncness, associated_item, @@ -3403,11 +3436,11 @@ pub struct CrateInherentImpls { pub inherent_impls: DefIdMap>, } -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable)] +#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct SymbolName { // FIXME: we don't rely on interning or equality here - better have // this be a `&'tcx str`. - pub name: InternedString + pub name: Symbol } impl_stable_hash_for!(struct self::SymbolName { @@ -3417,11 +3450,24 @@ impl_stable_hash_for!(struct self::SymbolName { impl SymbolName { pub fn new(name: &str) -> SymbolName { SymbolName { - name: InternedString::intern(name) + name: Symbol::intern(name) } } } +impl PartialOrd for SymbolName { + fn partial_cmp(&self, other: &SymbolName) -> Option { + self.name.as_str().partial_cmp(&other.name.as_str()) + } +} + +/// Ordering must use the chars to ensure reproducible builds. +impl Ord for SymbolName { + fn cmp(&self, other: &SymbolName) -> Ordering { + self.name.as_str().cmp(&other.name.as_str()) + } +} + impl fmt::Display for SymbolName { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.name, fmt) diff --git a/src/librustc/ty/outlives.rs b/src/librustc/ty/outlives.rs index 7d1403d1e9..80e77cdfad 100644 --- a/src/librustc/ty/outlives.rs +++ b/src/librustc/ty/outlives.rs @@ -60,16 +60,16 @@ impl<'tcx> TyCtxt<'tcx> { // with `collect()` because of the need to sometimes skip subtrees // in the `subtys` iterator (e.g., when encountering a // projection). - match ty.sty { + match ty.kind { ty::Closure(def_id, ref substs) => { - for upvar_ty in substs.upvar_tys(def_id, *self) { + for upvar_ty in substs.as_closure().upvar_tys(def_id, *self) { self.compute_components(upvar_ty, out); } } ty::Generator(def_id, ref substs, _) => { // Same as the closure case - for upvar_ty in substs.upvar_tys(def_id, *self) { + for upvar_ty in substs.as_generator().upvar_tys(def_id, *self) { self.compute_components(upvar_ty, out); } diff --git a/src/librustc/ty/print/mod.rs b/src/librustc/ty/print/mod.rs index 50789bf621..d216c81f8a 100644 --- a/src/librustc/ty/print/mod.rs +++ b/src/librustc/ty/print/mod.rs @@ -1,7 +1,7 @@ use crate::hir::map::{DefPathData, DisambiguatedDefPathData}; use crate::hir::def_id::{CrateNum, DefId}; use crate::ty::{self, DefIdTree, Ty, TyCtxt}; -use crate::ty::subst::{Kind, Subst}; +use crate::ty::subst::{GenericArg, Subst}; use rustc_data_structures::fx::FxHashSet; @@ -43,7 +43,7 @@ pub trait Printer<'tcx>: Sized { fn print_def_path( self, def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], ) -> Result { self.default_print_def_path(def_id, substs) } @@ -51,7 +51,7 @@ pub trait Printer<'tcx>: Sized { fn print_impl_path( self, impl_def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result { @@ -106,7 +106,7 @@ pub trait Printer<'tcx>: Sized { fn path_generic_args( self, print_prefix: impl FnOnce(Self) -> Result, - args: &[Kind<'tcx>], + args: &[GenericArg<'tcx>], ) -> Result; // Defaults (should not be overriden): @@ -114,7 +114,7 @@ pub trait Printer<'tcx>: Sized { fn default_print_def_path( self, def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], ) -> Result { debug!("default_print_def_path: def_id={:?}, substs={:?}", def_id, substs); let key = self.tcx().def_key(def_id); @@ -189,8 +189,8 @@ pub trait Printer<'tcx>: Sized { fn generic_args_to_print( &self, generics: &'tcx ty::Generics, - substs: &'tcx [Kind<'tcx>], - ) -> &'tcx [Kind<'tcx>] { + substs: &'tcx [GenericArg<'tcx>], + ) -> &'tcx [GenericArg<'tcx>] { let mut own_params = generics.parent_count..generics.count(); // Don't print args for `Self` parameters (of traits). @@ -203,7 +203,7 @@ pub trait Printer<'tcx>: Sized { match param.kind { ty::GenericParamDefKind::Lifetime => false, ty::GenericParamDefKind::Type { has_default, .. } => { - has_default && substs[param.index as usize] == Kind::from( + has_default && substs[param.index as usize] == GenericArg::from( self.tcx().type_of(param.def_id).subst(self.tcx(), substs) ) } @@ -217,7 +217,7 @@ pub trait Printer<'tcx>: Sized { fn default_print_impl_path( self, impl_def_id: DefId, - _substs: &'tcx [Kind<'tcx>], + _substs: &'tcx [GenericArg<'tcx>], self_ty: Ty<'tcx>, impl_trait_ref: Option>, ) -> Result { @@ -266,7 +266,7 @@ pub trait Printer<'tcx>: Sized { /// type. It's just a heuristic so it makes some questionable /// decisions and we may want to adjust it later. pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { - match ty.sty { + match ty.kind { ty::Adt(adt_def, _) => Some(adt_def.did), ty::Dynamic(data, ..) => data.principal_def_id(), diff --git a/src/librustc/ty/print/obsolete.rs b/src/librustc/ty/print/obsolete.rs index b68e6a7448..e72916de6a 100644 --- a/src/librustc/ty/print/obsolete.rs +++ b/src/librustc/ty/print/obsolete.rs @@ -8,7 +8,7 @@ use rustc::hir::def_id::DefId; use rustc::mir::interpret::ConstValue; use rustc::ty::subst::SubstsRef; -use rustc::ty::{self, ClosureSubsts, Const, GeneratorSubsts, Instance, Ty, TyCtxt}; +use rustc::ty::{self, Const, Instance, Ty, TyCtxt}; use rustc::{bug, hir}; use std::fmt::Write; use std::iter; @@ -34,7 +34,7 @@ impl DefPathBasedNames<'tcx> { // When being used for codegen purposes, `debug` should be set to `false` // in order to catch unexpected types that should never end up in a type name. pub fn push_type_name(&self, t: Ty<'tcx>, output: &mut String, debug: bool) { - match t.sty { + match t.kind { ty::Bool => output.push_str("bool"), ty::Char => output.push_str("char"), ty::Str => output.push_str("str"), @@ -154,8 +154,8 @@ impl DefPathBasedNames<'tcx> { self.push_type_name(sig.output(), output, debug); } } - ty::Generator(def_id, GeneratorSubsts { ref substs }, _) - | ty::Closure(def_id, ClosureSubsts { ref substs }) => { + ty::Generator(def_id, substs, _) + | ty::Closure(def_id, substs) => { self.push_def_path(def_id, output); let generics = self.tcx.generics_of(self.tcx.closure_base_def_id(def_id)); let substs = substs.truncate_to(self.tcx, generics); @@ -218,9 +218,9 @@ impl DefPathBasedNames<'tcx> { // foo::bar::ItemName:: for part in self.tcx.def_path(def_id).data { if self.omit_disambiguators { - write!(output, "{}::", part.data.as_interned_str()).unwrap(); + write!(output, "{}::", part.data.as_symbol()).unwrap(); } else { - write!(output, "{}[{}]::", part.data.as_interned_str(), part.disambiguator) + write!(output, "{}[{}]::", part.data.as_symbol(), part.disambiguator) .unwrap(); } } diff --git a/src/librustc/ty/print/pretty.rs b/src/librustc/ty/print/pretty.rs index d99580116e..8a98a5d836 100644 --- a/src/librustc/ty/print/pretty.rs +++ b/src/librustc/ty/print/pretty.rs @@ -5,7 +5,7 @@ use crate::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use crate::middle::cstore::{ExternCrate, ExternCrateSource}; use crate::middle::region; use crate::ty::{self, DefIdTree, ParamConst, Ty, TyCtxt, TypeFoldable}; -use crate::ty::subst::{Kind, Subst, UnpackedKind}; +use crate::ty::subst::{GenericArg, Subst, GenericArgKind}; use crate::ty::layout::{Integer, IntegerExt, Size}; use crate::mir::interpret::{ConstValue, sign_extend, Scalar, truncate}; @@ -14,7 +14,7 @@ use rustc_apfloat::Float; use rustc_target::spec::abi::Abi; use syntax::ast; use syntax::attr::{SignedInt, UnsignedInt}; -use syntax::symbol::{kw, InternedString}; +use syntax::symbol::{kw, Symbol}; use std::cell::Cell; use std::fmt::{self, Write as _}; @@ -183,7 +183,7 @@ pub trait PrettyPrinter<'tcx>: fn print_value_path( self, def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], ) -> Result { self.print_def_path(def_id, substs) } @@ -278,7 +278,7 @@ pub trait PrettyPrinter<'tcx>: match self.tcx().extern_crate(def_id) { Some(&ExternCrate { src: ExternCrateSource::Extern(def_id), - direct: true, + dependency_of: LOCAL_CRATE, span, .. }) => { @@ -384,7 +384,7 @@ pub trait PrettyPrinter<'tcx>: let reexport = self.tcx().item_children(visible_parent) .iter() .find(|child| child.res.def_id() == def_id) - .map(|child| child.ident.as_interned_str()); + .map(|child| child.ident.name); if let Some(reexport) = reexport { *name = reexport; } @@ -392,7 +392,7 @@ pub trait PrettyPrinter<'tcx>: // Re-exported `extern crate` (#43189). DefPathData::CrateRoot => { data = DefPathData::TypeNs( - self.tcx().original_crate_name(def_id.krate).as_interned_str(), + self.tcx().original_crate_name(def_id.krate), ); } _ => {} @@ -414,7 +414,7 @@ pub trait PrettyPrinter<'tcx>: // Inherent impls. Try to print `Foo::bar` for an inherent // impl on `Foo`, but fallback to `::bar` if self-type is // anything other than a simple path. - match self_ty.sty { + match self_ty.kind { ty::Adt(..) | ty::Foreign(_) | ty::Bool | ty::Char | ty::Str | ty::Int(_) | ty::Uint(_) | ty::Float(_) => { @@ -463,7 +463,7 @@ pub trait PrettyPrinter<'tcx>: ) -> Result { define_scoped_cx!(self); - match ty.sty { + match ty.kind { ty::Bool => p!(write("bool")), ty::Char => p!(write("char")), ty::Int(t) => p!(write("{}", t.ty_to_string())), @@ -605,8 +605,8 @@ pub trait PrettyPrinter<'tcx>: } ty::Str => p!(write("str")), ty::Generator(did, substs, movability) => { - let upvar_tys = substs.upvar_tys(did, self.tcx()); - let witness = substs.witness(did, self.tcx()); + let upvar_tys = substs.as_generator().upvar_tys(did, self.tcx()); + let witness = substs.as_generator().witness(did, self.tcx()); if movability == hir::GeneratorMovability::Movable { p!(write("[generator")); } else { @@ -649,7 +649,7 @@ pub trait PrettyPrinter<'tcx>: p!(in_binder(&types)); } ty::Closure(did, substs) => { - let upvar_tys = substs.upvar_tys(did, self.tcx()); + let upvar_tys = substs.as_closure().upvar_tys(did, self.tcx()); p!(write("[closure")); // FIXME(eddyb) should use `def_span`. @@ -689,8 +689,8 @@ pub trait PrettyPrinter<'tcx>: if self.tcx().sess.verbose() { p!(write( " closure_kind_ty={:?} closure_sig_ty={:?}", - substs.closure_kind_ty(did, self.tcx()), - substs.closure_sig_ty(did, self.tcx()) + substs.as_closure().kind_ty(did, self.tcx()), + substs.as_closure().sig_ty(did, self.tcx()) )); } @@ -698,7 +698,9 @@ pub trait PrettyPrinter<'tcx>: }, ty::Array(ty, sz) => { p!(write("["), print(ty), write("; ")); - if let ConstValue::Unevaluated(..) = sz.val { + if self.tcx().sess.verbose() { + p!(write("{:?}", sz)); + } else if let ConstValue::Unevaluated(..) = sz.val { // do not try to evalute unevaluated constants. If we are const evaluating an // array length anon const, rustc will (with debug assertions) print the // constant's path. Which will end up here again. @@ -739,7 +741,7 @@ pub trait PrettyPrinter<'tcx>: // Special-case `Fn(...) -> ...` and resugar it. let fn_trait_kind = self.tcx().lang_items().fn_trait_kind(principal.def_id); if !self.tcx().sess.verbose() && fn_trait_kind.is_some() { - if let ty::Tuple(ref args) = principal.substs.type_at(0).sty { + if let ty::Tuple(ref args) = principal.substs.type_at(0).kind { let mut projections = predicates.projection_bounds(); if let (Some(proj), None) = (projections.next(), projections.next()) { let tys: Vec<_> = args.iter().map(|k| k.expect_ty()).collect(); @@ -764,13 +766,13 @@ pub trait PrettyPrinter<'tcx>: // Don't print `'_` if there's no unerased regions. let print_regions = args.iter().any(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(r) => *r != ty::ReErased, + GenericArgKind::Lifetime(r) => *r != ty::ReErased, _ => false, } }); let mut args = args.iter().cloned().filter(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(_) => print_regions, + GenericArgKind::Lifetime(_) => print_regions, _ => true, } }); @@ -855,126 +857,127 @@ pub trait PrettyPrinter<'tcx>: ) -> Result { define_scoped_cx!(self); - let u8 = self.tcx().types.u8; - if let ty::FnDef(did, substs) = ct.ty.sty { - p!(print_value_path(did, substs)); + if self.tcx().sess.verbose() { + p!(write("Const({:?}: {:?})", ct.val, ct.ty)); return Ok(self); } - if let ConstValue::Unevaluated(did, substs) = ct.val { - match self.tcx().def_kind(did) { - | Some(DefKind::Static) - | Some(DefKind::Const) - | Some(DefKind::AssocConst) => p!(print_value_path(did, substs)), - _ => if did.is_local() { - let span = self.tcx().def_span(did); - if let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span) { - p!(write("{}", snip)) + + let u8 = self.tcx().types.u8; + + match (ct.val, &ct.ty.kind) { + (_, ty::FnDef(did, substs)) => p!(print_value_path(*did, substs)), + (ConstValue::Unevaluated(did, substs), _) => { + match self.tcx().def_kind(did) { + | Some(DefKind::Static) + | Some(DefKind::Const) + | Some(DefKind::AssocConst) => p!(print_value_path(did, substs)), + _ => if did.is_local() { + let span = self.tcx().def_span(did); + if let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span) { + p!(write("{}", snip)) + } else { + p!(write("_: "), print(ct.ty)) + } } else { p!(write("_: "), print(ct.ty)) + }, + } + }, + (ConstValue::Infer(..), _) => p!(write("_: "), print(ct.ty)), + (ConstValue::Param(ParamConst { name, .. }), _) => p!(write("{}", name)), + (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Bool) => + p!(write("{}", if data == 0 { "false" } else { "true" })), + (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Float(ast::FloatTy::F32)) => + p!(write("{}f32", Single::from_bits(data))), + (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Float(ast::FloatTy::F64)) => + p!(write("{}f64", Double::from_bits(data))), + (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Uint(ui)) => { + let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(*ui)).size(); + let max = truncate(u128::max_value(), bit_size); + + if data == max { + p!(write("std::{}::MAX", ui)) + } else { + p!(write("{}{}", data, ui)) + }; + }, + (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Int(i)) => { + let bit_size = Integer::from_attr(&self.tcx(), SignedInt(*i)) + .size().bits() as u128; + let min = 1u128 << (bit_size - 1); + let max = min - 1; + + let ty = self.tcx().lift(&ct.ty).unwrap(); + let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty)) + .unwrap() + .size; + match data { + d if d == min => p!(write("std::{}::MIN", i)), + d if d == max => p!(write("std::{}::MAX", i)), + _ => p!(write("{}{}", sign_extend(data, size) as i128, i)) + } + }, + (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Char) => + p!(write("{:?}", ::std::char::from_u32(data as u32).unwrap())), + (ConstValue::Scalar(_), ty::RawPtr(_)) => p!(write("{{pointer}}")), + (ConstValue::Scalar(Scalar::Ptr(ptr)), ty::FnPtr(_)) => { + let instance = { + let alloc_map = self.tcx().alloc_map.lock(); + alloc_map.unwrap_fn(ptr.alloc_id) + }; + p!(print_value_path(instance.def_id(), instance.substs)); + }, + _ => { + let printed = if let ty::Ref(_, ref_ty, _) = ct.ty.kind { + let byte_str = match (ct.val, &ref_ty.kind) { + (ConstValue::Scalar(Scalar::Ptr(ptr)), ty::Array(t, n)) if *t == u8 => { + let n = n.eval_usize(self.tcx(), ty::ParamEnv::empty()); + Some(self.tcx() + .alloc_map.lock() + .unwrap_memory(ptr.alloc_id) + .get_bytes(&self.tcx(), ptr, Size::from_bytes(n)).unwrap()) + }, + (ConstValue::Slice { data, start, end }, ty::Slice(t)) if *t == u8 => { + // The `inspect` here is okay since we checked the bounds, and there are + // no relocations (we have an active slice reference here). We don't use + // this result to affect interpreter execution. + Some(data.inspect_with_undef_and_ptr_outside_interpreter(start..end)) + }, + _ => None, + }; + + if let Some(byte_str) = byte_str { + p!(write("b\"")); + for &c in byte_str { + for e in std::ascii::escape_default(c) { + self.write_char(e as char)?; + } + } + p!(write("\"")); + true + } else if let (ConstValue::Slice { data, start, end }, ty::Str) = + (ct.val, &ref_ty.kind) + { + // The `inspect` here is okay since we checked the bounds, and there are no + // relocations (we have an active `str` reference here). We don't use this + // result to affect interpreter execution. + let slice = data.inspect_with_undef_and_ptr_outside_interpreter(start..end); + let s = ::std::str::from_utf8(slice) + .expect("non utf8 str from miri"); + p!(write("{:?}", s)); + true + } else { + false } } else { - p!(write("_: "), print(ct.ty)) - }, - } - return Ok(self); - } - if let ConstValue::Infer(..) = ct.val { - p!(write("_: "), print(ct.ty)); - return Ok(self); - } - if let ConstValue::Param(ParamConst { name, .. }) = ct.val { - p!(write("{}", name)); - return Ok(self); - } - if let ConstValue::Scalar(Scalar::Raw { data, .. }) = ct.val { - match ct.ty.sty { - ty::Bool => { - p!(write("{}", if data == 0 { "false" } else { "true" })); - return Ok(self); - }, - ty::Float(ast::FloatTy::F32) => { - p!(write("{}f32", Single::from_bits(data))); - return Ok(self); - }, - ty::Float(ast::FloatTy::F64) => { - p!(write("{}f64", Double::from_bits(data))); - return Ok(self); - }, - ty::Uint(ui) => { - let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(ui)).size(); - let max = truncate(u128::max_value(), bit_size); - - if data == max { - p!(write("std::{}::MAX", ui)) - } else { - p!(write("{}{}", data, ui)) - }; - return Ok(self); - }, - ty::Int(i) =>{ - let bit_size = Integer::from_attr(&self.tcx(), SignedInt(i)) - .size().bits() as u128; - let min = 1u128 << (bit_size - 1); - let max = min - 1; - - let ty = self.tcx().lift_to_global(&ct.ty).unwrap(); - let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty)) - .unwrap() - .size; - match data { - d if d == min => p!(write("std::{}::MIN", i)), - d if d == max => p!(write("std::{}::MAX", i)), - _ => p!(write("{}{}", sign_extend(data, size) as i128, i)) - } - return Ok(self); - }, - ty::Char => { - p!(write("{:?}", ::std::char::from_u32(data as u32).unwrap())); - return Ok(self); + false + }; + if !printed { + // fallback + p!(write("{:?} : ", ct.val), print(ct.ty)) } - _ => {}, } - } - if let ty::Ref(_, ref_ty, _) = ct.ty.sty { - let byte_str = match (ct.val, &ref_ty.sty) { - (ConstValue::Scalar(Scalar::Ptr(ptr)), ty::Array(t, n)) if *t == u8 => { - let n = n.eval_usize(self.tcx(), ty::ParamEnv::empty()); - Some(self.tcx() - .alloc_map.lock() - .unwrap_memory(ptr.alloc_id) - .get_bytes(&self.tcx(), ptr, Size::from_bytes(n)).unwrap()) - }, - (ConstValue::Slice { data, start, end }, ty::Slice(t)) if *t == u8 => { - // The `inspect` here is okay since we checked the bounds, and there are no - // relocations (we have an active slice reference here). We don't use this - // result to affect interpreter execution. - Some(data.inspect_with_undef_and_ptr_outside_interpreter(start..end)) - }, - (ConstValue::Slice { data, start, end }, ty::Str) => { - // The `inspect` here is okay since we checked the bounds, and there are no - // relocations (we have an active `str` reference here). We don't use this - // result to affect interpreter execution. - let slice = data.inspect_with_undef_and_ptr_outside_interpreter(start..end); - let s = ::std::str::from_utf8(slice) - .expect("non utf8 str from miri"); - p!(write("{:?}", s)); - return Ok(self); - }, - _ => None, - }; - if let Some(byte_str) = byte_str { - p!(write("b\"")); - for &c in byte_str { - for e in std::ascii::escape_default(c) { - self.write_char(e as char)?; - } - } - p!(write("\"")); - return Ok(self); - } - } - p!(write("{:?} : ", ct.val), print(ct.ty)); - + }; Ok(self) } } @@ -989,7 +992,7 @@ pub struct FmtPrinterData<'a, 'tcx, F> { empty_path: bool, in_value: bool, - used_region_names: FxHashSet, + used_region_names: FxHashSet, region_index: usize, binder_depth: usize, @@ -1081,7 +1084,7 @@ impl Printer<'tcx> for FmtPrinter<'_, 'tcx, F> { fn print_def_path( mut self, def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], ) -> Result { define_scoped_cx!(self); @@ -1219,7 +1222,7 @@ impl Printer<'tcx> for FmtPrinter<'_, 'tcx, F> { // FIXME(eddyb) `name` should never be empty, but it // currently is for `extern { ... }` "foreign modules". - let name = disambiguated_data.data.as_interned_str().as_str(); + let name = disambiguated_data.data.as_symbol().as_str(); if !name.is_empty() { if !self.empty_path { write!(self, "::")?; @@ -1245,20 +1248,20 @@ impl Printer<'tcx> for FmtPrinter<'_, 'tcx, F> { fn path_generic_args( mut self, print_prefix: impl FnOnce(Self) -> Result, - args: &[Kind<'tcx>], + args: &[GenericArg<'tcx>], ) -> Result { self = print_prefix(self)?; // Don't print `'_` if there's no unerased regions. let print_regions = args.iter().any(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(r) => *r != ty::ReErased, + GenericArgKind::Lifetime(r) => *r != ty::ReErased, _ => false, } }); let args = args.iter().cloned().filter(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(_) => print_regions, + GenericArgKind::Lifetime(_) => print_regions, _ => true, } }); @@ -1282,7 +1285,7 @@ impl PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> { fn print_value_path( mut self, def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], ) -> Result { let was_in_value = std::mem::replace(&mut self.in_value, true); self = self.print_def_path(def_id, substs)?; @@ -1329,16 +1332,16 @@ impl PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> { match *region { ty::ReEarlyBound(ref data) => { - data.name.as_symbol() != kw::Invalid && - data.name.as_symbol() != kw::UnderscoreLifetime + data.name != kw::Invalid && + data.name != kw::UnderscoreLifetime } ty::ReLateBound(_, br) | ty::ReFree(ty::FreeRegion { bound_region: br, .. }) | ty::RePlaceholder(ty::Placeholder { name: br, .. }) => { if let ty::BrNamed(_, name) = br { - if name.as_symbol() != kw::Invalid && - name.as_symbol() != kw::UnderscoreLifetime { + if name != kw::Invalid && + name != kw::UnderscoreLifetime { return true; } } @@ -1394,7 +1397,7 @@ impl FmtPrinter<'_, '_, F> { // `explain_region()` or `note_and_explain_region()`. match *region { ty::ReEarlyBound(ref data) => { - if data.name.as_symbol() != kw::Invalid { + if data.name != kw::Invalid { p!(write("{}", data.name)); return Ok(self); } @@ -1403,8 +1406,8 @@ impl FmtPrinter<'_, '_, F> { ty::ReFree(ty::FreeRegion { bound_region: br, .. }) | ty::RePlaceholder(ty::Placeholder { name: br, .. }) => { if let ty::BrNamed(_, name) = br { - if name.as_symbol() != kw::Invalid && - name.as_symbol() != kw::UnderscoreLifetime { + if name != kw::Invalid && + name != kw::UnderscoreLifetime { p!(write("{}", name)); return Ok(self); } @@ -1471,16 +1474,16 @@ impl FmtPrinter<'_, 'tcx, F> { where T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable<'tcx>, { - fn name_by_region_index(index: usize) -> InternedString { + fn name_by_region_index(index: usize) -> Symbol { match index { - 0 => InternedString::intern("'r"), - 1 => InternedString::intern("'s"), - i => InternedString::intern(&format!("'t{}", i-2)), + 0 => Symbol::intern("'r"), + 1 => Symbol::intern("'s"), + i => Symbol::intern(&format!("'t{}", i-2)), } } // Replace any anonymous late-bound regions with named - // variants, using gensym'd identifiers, so that we can + // variants, using new unique identifiers, so that we can // clearly differentiate between named and unnamed regions in // the output. We'll probably want to tweak this over time to // decide just how much information to give. @@ -1538,7 +1541,7 @@ impl FmtPrinter<'_, 'tcx, F> { where T: TypeFoldable<'tcx> { - struct LateBoundRegionNameCollector<'a>(&'a mut FxHashSet); + struct LateBoundRegionNameCollector<'a>(&'a mut FxHashSet); impl<'tcx> ty::fold::TypeVisitor<'tcx> for LateBoundRegionNameCollector<'_> { fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool { match *r { @@ -1778,11 +1781,11 @@ define_print_and_forward_display! { } } - Kind<'tcx> { + GenericArg<'tcx> { match self.unpack() { - UnpackedKind::Lifetime(lt) => p!(print(lt)), - UnpackedKind::Type(ty) => p!(print(ty)), - UnpackedKind::Const(ct) => p!(print(ct)), + GenericArgKind::Lifetime(lt) => p!(print(lt)), + GenericArgKind::Type(ty) => p!(print(ty)), + GenericArgKind::Const(ct) => p!(print(ct)), } } } diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs index 91082c59ba..c1c6a655d9 100644 --- a/src/librustc/ty/query/config.rs +++ b/src/librustc/ty/query/config.rs @@ -73,6 +73,17 @@ impl<'tcx, M: QueryAccessors<'tcx, Key = DefId>> QueryDescription<'tcx> for M { format!("processing {:?} with query `{}`", def_id, name).into() } } + + default fn cache_on_disk(_: TyCtxt<'tcx>, _: Self::Key, _: Option<&Self::Value>) -> bool { + false + } + + default fn try_load_from_disk( + _: TyCtxt<'tcx>, + _: SerializedDepNodeIndex, + ) -> Option { + bug!("QueryDescription::load_from_disk() called for an unsupported query.") + } } impl<'tcx> QueryDescription<'tcx> for queries::analysis<'tcx> { diff --git a/src/librustc/ty/query/job.rs b/src/librustc/ty/query/job.rs index a25560ff76..391ea762a0 100644 --- a/src/librustc/ty/query/job.rs +++ b/src/librustc/ty/query/job.rs @@ -334,13 +334,13 @@ fn pick_query<'a, 'tcx, T, F: Fn(&T) -> (Span, Lrc>)>( let mut hcx = tcx.create_stable_hashing_context(); queries.iter().min_by_key(|v| { let (span, query) = f(v); - let mut stable_hasher = StableHasher::::new(); + let mut stable_hasher = StableHasher::new(); query.info.query.hash_stable(&mut hcx, &mut stable_hasher); // Prefer entry points which have valid spans for nicer error messages // We add an integer to the tuple ensuring that entry points // with valid spans are picked first let span_cmp = if span == DUMMY_SP { 1 } else { 0 }; - (span_cmp, stable_hasher.finish()) + (span_cmp, stable_hasher.finish::()) }).unwrap() } diff --git a/src/librustc/ty/query/keys.rs b/src/librustc/ty/query/keys.rs index 30a3e53ddd..a9e0a5d6ab 100644 --- a/src/librustc/ty/query/keys.rs +++ b/src/librustc/ty/query/keys.rs @@ -8,14 +8,12 @@ use crate::ty::subst::SubstsRef; use crate::ty::fast_reject::SimplifiedType; use crate::mir; -use std::fmt::Debug; -use std::hash::Hash; use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::symbol::InternedString; +use syntax_pos::symbol::Symbol; /// The `Key` trait controls what types can legally be used as the key /// for a query. -pub(super) trait Key: Clone + Hash + Eq + Debug { +pub(super) trait Key { /// Given an instance of this key, what crate is it referring to? /// This is used to find the provider. fn query_crate(&self) -> CrateNum; @@ -190,7 +188,7 @@ impl<'tcx> Key for traits::Environment<'tcx> { } } -impl Key for InternedString { +impl Key for Symbol { fn query_crate(&self) -> CrateNum { LOCAL_CRATE } @@ -201,10 +199,17 @@ impl Key for InternedString { /// Canonical query goals correspond to abstract trait operations that /// are not tied to any crate in particular. -impl<'tcx, T> Key for Canonical<'tcx, T> -where - T: Debug + Hash + Clone + Eq, -{ +impl<'tcx, T> Key for Canonical<'tcx, T> { + fn query_crate(&self) -> CrateNum { + LOCAL_CRATE + } + + fn default_span(&self, _tcx: TyCtxt<'_>) -> Span { + DUMMY_SP + } +} + +impl Key for (Symbol, u32, u32) { fn query_crate(&self) -> CrateNum { LOCAL_CRATE } diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs index fb2ad2aa54..9b15ad560b 100644 --- a/src/librustc/ty/query/mod.rs +++ b/src/librustc/ty/query/mod.rs @@ -4,7 +4,6 @@ use crate::hir::def::{DefKind, Export}; use crate::hir::{self, TraitCandidate, ItemLocalId, CodegenFnAttrs}; use crate::infer::canonical::{self, Canonical}; use crate::lint; -use crate::middle::borrowck::{BorrowCheckResult, SignalledError}; use crate::middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary, ForeignModule}; use crate::middle::cstore::{NativeLibraryKind, DepKind, CrateSource}; use crate::middle::privacy::AccessLevels; @@ -38,13 +37,13 @@ use crate::ty::{self, CrateInherentImpls, ParamEnvAnd, Ty, TyCtxt, AdtSizedConst use crate::ty::steal::Steal; use crate::ty::util::NeedsDrop; use crate::ty::subst::SubstsRef; -use crate::util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet}; -use crate::util::common::{ErrorReported}; +use crate::util::nodemap::{DefIdSet, DefIdMap}; +use crate::util::common::ErrorReported; use crate::util::profiling::ProfileCategory::*; use rustc_data_structures::svh::Svh; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::IndexVec; use rustc_data_structures::fx::{FxIndexMap, FxHashMap, FxHashSet}; use rustc_data_structures::stable_hasher::StableVec; use rustc_data_structures::sync::Lrc; @@ -56,7 +55,6 @@ use std::ops::Deref; use std::sync::Arc; use std::any::type_name; use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::symbol::InternedString; use syntax::attr; use syntax::ast; use syntax::feature_gate; diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index c20e758688..21a7cf00b2 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -15,7 +15,7 @@ use errors::Diagnostic; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::thin_vec::ThinVec; use rustc_data_structures::sync::{Lrc, Lock, HashMapExt, Once}; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use rustc_serialize::{ Decodable, Decoder, Encodable, Encoder, SpecializedDecoder, SpecializedEncoder, UseSpecializedDecodable, UseSpecializedEncodable, opaque, @@ -882,15 +882,16 @@ where } } -impl<'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'a, 'tcx, E> +impl<'a, 'tcx, E> SpecializedEncoder<&'tcx [(ty::Predicate<'tcx>, Span)]> + for CacheEncoder<'a, 'tcx, E> where E: 'a + TyEncoder, { #[inline] fn specialized_encode(&mut self, - predicates: &ty::GenericPredicates<'tcx>) + predicates: &&'tcx [(ty::Predicate<'tcx>, Span)]) -> Result<(), Self::Error> { - ty_codec::encode_predicates(self, predicates, + ty_codec::encode_spanned_predicates(self, predicates, |encoder| &mut encoder.predicate_shorthands) } } @@ -1075,7 +1076,7 @@ where let desc = &format!("encode_query_results for {}", ::std::any::type_name::()); - time_ext(tcx.sess.time_extended(), Some(tcx.sess), desc, || { + time_ext(tcx.sess.time_extended(), desc, || { let shards = Q::query_cache(tcx).lock_shards(); assert!(shards.iter().all(|shard| shard.active.is_empty())); for (key, entry) in shards.iter().flat_map(|shard| shard.results.iter()) { diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index a1828bb5ab..538154b035 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -9,12 +9,11 @@ use crate::ty::query::Query; use crate::ty::query::config::{QueryConfig, QueryDescription}; use crate::ty::query::job::{QueryJob, QueryResult, QueryInfo}; -use crate::util::common::{profq_msg, ProfileQueriesMsg, QueryMsg}; - use errors::DiagnosticBuilder; use errors::Level; use errors::Diagnostic; use errors::FatalError; +use errors::Handler; use rustc_data_structures::fx::{FxHashMap}; use rustc_data_structures::sync::{Lrc, Lock}; use rustc_data_structures::sharded::Sharded; @@ -61,33 +60,6 @@ impl<'tcx, M: QueryConfig<'tcx>> Default for QueryCache<'tcx, M> { } } -// If enabled, sends a message to the profile-queries thread. -macro_rules! profq_msg { - ($tcx:expr, $msg:expr) => { - if cfg!(debug_assertions) { - if $tcx.sess.profile_queries() { - profq_msg($tcx.sess, $msg) - } - } - } -} - -// If enabled, formats a key using its debug string, which can be -// expensive to compute (in terms of time). -macro_rules! profq_query_msg { - ($query:expr, $tcx:expr, $key:expr) => {{ - let msg = if cfg!(debug_assertions) { - if $tcx.sess.profile_queries_and_keys() { - Some(format!("{:?}", $key)) - } else { None } - } else { None }; - QueryMsg { - query: $query, - msg, - } - }} -} - /// A type representing the responsibility to execute the job in the `job` field. /// This will poison the relevant query if dropped. pub(super) struct JobOwner<'a, 'tcx, Q: QueryDescription<'tcx>> { @@ -110,8 +82,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { loop { let mut lock = cache.get_shard_by_value(key).lock(); if let Some(value) = lock.results.get(key) { - profq_msg!(tcx, ProfileQueriesMsg::CacheHit); - tcx.sess.profiler(|p| p.record_query_hit(Q::NAME)); + tcx.prof.query_cache_hit(Q::NAME); let result = (value.value.clone(), value.index); #[cfg(debug_assertions)] { @@ -119,6 +90,10 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { } return TryGetJob::JobCompleted(result); } + + #[cfg(parallel_compiler)] + let query_blocked_prof_timer; + let job = match lock.active.entry((*key).clone()) { Entry::Occupied(entry) => { match *entry.get() { @@ -127,7 +102,9 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { // in another thread has completed. Record how long we wait in the // self-profiler. #[cfg(parallel_compiler)] - tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME)); + { + query_blocked_prof_timer = tcx.prof.query_blocked(Q::NAME); + } job.clone() }, @@ -169,7 +146,11 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { #[cfg(parallel_compiler)] { let result = job.r#await(tcx, span); - tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME)); + + // This `drop()` is not strictly necessary as the binding + // would go out of scope anyway. But it's good to have an + // explicit marker of how far the measurement goes. + drop(query_blocked_prof_timer); if let Err(cycle) = result { return TryGetJob::Cycle(Q::handle_cycle_error(tcx, cycle)); @@ -265,7 +246,7 @@ impl<'tcx> TyCtxt<'tcx> { tls::with_related_context(self, move |current_icx| { // Update the `ImplicitCtxt` to point to our new query job. let new_icx = tls::ImplicitCtxt { - tcx: self.global_tcx(), + tcx: self, query: Some(job), diagnostics, layout_depth: current_icx.layout_depth, @@ -274,7 +255,7 @@ impl<'tcx> TyCtxt<'tcx> { // Use the `ImplicitCtxt` while we execute the query. tls::enter_context(&new_icx, |_| { - compute(self.global_tcx()) + compute(self) }) }) } @@ -321,9 +302,12 @@ impl<'tcx> TyCtxt<'tcx> { }) } - pub fn try_print_query_stack() { + pub fn try_print_query_stack(handler: &Handler) { eprintln!("query stack during panic:"); + // Be careful reyling on global state here: this code is called from + // a panic hook, which means that the global `Handler` may be in a weird + // state if it was responsible for triggering the panic. tls::with_context_opt(|icx| { if let Some(icx) = icx { let mut current_query = icx.query.clone(); @@ -336,7 +320,7 @@ impl<'tcx> TyCtxt<'tcx> { query.info.query.name(), query.info.query.describe(icx.tcx))); diag.span = icx.tcx.sess.source_map().def_span(query.info.span).into(); - icx.tcx.sess.diagnostic().force_print_diagnostic(diag); + handler.force_print_diagnostic(diag); current_query = query.parent.clone(); i += 1; @@ -354,13 +338,6 @@ impl<'tcx> TyCtxt<'tcx> { key, span); - profq_msg!(self, - ProfileQueriesMsg::QueryBegin( - span.data(), - profq_query_msg!(Q::NAME.as_str(), self, key), - ) - ); - let job = match JobOwner::try_get(self, span, &key) { TryGetJob::NotYetStarted(job) => job, TryGetJob::Cycle(result) => return result, @@ -378,19 +355,18 @@ impl<'tcx> TyCtxt<'tcx> { } if Q::ANON { - profq_msg!(self, ProfileQueriesMsg::ProviderBegin); - self.sess.profiler(|p| p.start_query(Q::NAME)); + + let prof_timer = self.prof.query_provider(Q::NAME); let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| { self.start_query(job.job.clone(), diagnostics, |tcx| { tcx.dep_graph.with_anon_task(Q::dep_kind(), || { - Q::compute(tcx.global_tcx(), key) + Q::compute(tcx, key) }) }) }); - self.sess.profiler(|p| p.end_query(Q::NAME)); - profq_msg!(self, ProfileQueriesMsg::ProviderEnd); + drop(prof_timer); self.dep_graph.read_index(dep_node_index); @@ -445,11 +421,10 @@ impl<'tcx> TyCtxt<'tcx> { debug_assert!(self.dep_graph.is_green(dep_node)); // First we try to load the result from the on-disk cache. - let result = if Q::cache_on_disk(self.global_tcx(), key.clone(), None) && + let result = if Q::cache_on_disk(self, key.clone(), None) && self.sess.opts.debugging_opts.incremental_queries { - self.sess.profiler(|p| p.incremental_load_result_start(Q::NAME)); - let result = Q::try_load_from_disk(self.global_tcx(), prev_dep_node_index); - self.sess.profiler(|p| p.incremental_load_result_end(Q::NAME)); + let _prof_timer = self.prof.incr_cache_loading(Q::NAME); + let result = Q::try_load_from_disk(self, prev_dep_node_index); // We always expect to find a cached result for things that // can be forced from `DepNode`. @@ -464,22 +439,17 @@ impl<'tcx> TyCtxt<'tcx> { }; let result = if let Some(result) = result { - profq_msg!(self, ProfileQueriesMsg::CacheHit); - self.sess.profiler(|p| p.record_query_hit(Q::NAME)); - result } else { // We could not load a result from the on-disk cache, so // recompute. - - self.sess.profiler(|p| p.start_query(Q::NAME)); + let _prof_timer = self.prof.query_provider(Q::NAME); // The dep-graph for this computation is already in-place. let result = self.dep_graph.with_ignore(|| { Q::compute(self, key) }); - self.sess.profiler(|p| p.end_query(Q::NAME)); result }; @@ -489,10 +459,6 @@ impl<'tcx> TyCtxt<'tcx> { self.incremental_verify_ich::(&result, dep_node, dep_node_index); } - if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) { - self.dep_graph.mark_loaded_from_cache(dep_node_index, true); - } - result } @@ -546,8 +512,7 @@ impl<'tcx> TyCtxt<'tcx> { - dep-node: {:?}", key, dep_node); - profq_msg!(self, ProfileQueriesMsg::ProviderBegin); - self.sess.profiler(|p| p.start_query(Q::NAME)); + let prof_timer = self.prof.query_provider(Q::NAME); let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| { self.start_query(job.job.clone(), diagnostics, |tcx| { @@ -567,12 +532,7 @@ impl<'tcx> TyCtxt<'tcx> { }) }); - self.sess.profiler(|p| p.end_query(Q::NAME)); - profq_msg!(self, ProfileQueriesMsg::ProviderEnd); - - if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) { - self.dep_graph.mark_loaded_from_cache(dep_node_index, false); - } + drop(prof_timer); if unlikely!(!diagnostics.is_empty()) { if dep_node.kind != crate::dep_graph::DepKind::Null { @@ -614,19 +574,12 @@ impl<'tcx> TyCtxt<'tcx> { let _ = self.get_query::(DUMMY_SP, key); } else { - profq_msg!(self, ProfileQueriesMsg::CacheHit); - self.sess.profiler(|p| p.record_query_hit(Q::NAME)); + self.prof.query_cache_hit(Q::NAME); } } #[allow(dead_code)] fn force_query>(self, key: Q::Key, span: Span, dep_node: DepNode) { - profq_msg!( - self, - ProfileQueriesMsg::QueryBegin(span.data(), - profq_query_msg!(Q::NAME.as_str(), self, key)) - ); - // We may be concurrently trying both execute and force a query. // Ensure that only one of them runs the query. let job = match JobOwner::try_get(self, span, &key) { @@ -643,7 +596,7 @@ impl<'tcx> TyCtxt<'tcx> { macro_rules! handle_cycle_error { ([][$tcx: expr, $error:expr]) => {{ $tcx.report_cycle($error).emit(); - Value::from_cycle_error($tcx.global_tcx()) + Value::from_cycle_error($tcx) }}; ([fatal_cycle$(, $modifiers:ident)*][$tcx:expr, $error:expr]) => {{ $tcx.report_cycle($error).emit(); @@ -652,7 +605,7 @@ macro_rules! handle_cycle_error { }}; ([cycle_delay_bug$(, $modifiers:ident)*][$tcx:expr, $error:expr]) => {{ $tcx.report_cycle($error).delay_as_bug(); - Value::from_cycle_error($tcx.global_tcx()) + Value::from_cycle_error($tcx) }}; ([$other:ident$(, $modifiers:ident)*][$($args:tt)*]) => { handle_cycle_error!([$($modifiers),*][$($args)*]) @@ -716,7 +669,6 @@ macro_rules! define_queries_inner { use rustc_data_structures::sharded::Sharded; use crate::{ rustc_data_structures::stable_hasher::HashStable, - rustc_data_structures::stable_hasher::StableHasherResult, rustc_data_structures::stable_hasher::StableHasher, ich::StableHashingContext }; @@ -859,7 +811,7 @@ macro_rules! define_queries_inner { } #[allow(nonstandard_style)] - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] + #[derive(Clone, Copy)] pub enum QueryName { $($name),* } @@ -877,7 +829,7 @@ macro_rules! define_queries_inner { } #[allow(nonstandard_style)] - #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] + #[derive(Clone, Debug)] pub enum Query<$tcx> { $($(#[$attr])* $name($K)),* } @@ -925,9 +877,7 @@ macro_rules! define_queries_inner { } impl<'a, $tcx> HashStable> for Query<$tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { $(Query::$name(key) => key.hash_stable(hcx, hasher),)* @@ -999,7 +949,7 @@ macro_rules! define_queries_inner { // would be missing appropriate entries in `providers`. .unwrap_or(&tcx.queries.fallback_extern_providers) .$name; - provider(tcx.global_tcx(), key) + provider(tcx, key) }) } @@ -1194,37 +1144,6 @@ pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool { return false } - macro_rules! def_id { - () => { - if let Some(def_id) = dep_node.extract_def_id(tcx) { - def_id - } else { - // Return from the whole function. - return false - } - } - }; - - macro_rules! krate { - () => { (def_id!()).krate } - }; - - macro_rules! force_ex { - ($tcx:expr, $query:ident, $key:expr) => { - { - $tcx.force_query::>( - $key, - DUMMY_SP, - *dep_node - ); - } - } - }; - - macro_rules! force { - ($query:ident, $key:expr) => { force_ex!(tcx, $query, $key) } - }; - rustc_dep_node_force!([dep_node, tcx] // These are inputs that are expected to be pre-allocated and that // should therefore always be red or green already. @@ -1243,7 +1162,19 @@ pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool { bug!("force_from_dep_node: encountered {:?}", dep_node) } - DepKind::Analysis => { force!(analysis, krate!()); } + DepKind::Analysis => { + let def_id = if let Some(def_id) = dep_node.extract_def_id(tcx) { + def_id + } else { + // Return from the whole function. + return false + }; + tcx.force_query::>( + def_id.krate, + DUMMY_SP, + *dep_node + ); + } ); true diff --git a/src/librustc/ty/query/values.rs b/src/librustc/ty/query/values.rs index 0149f75716..f0d1639f72 100644 --- a/src/librustc/ty/query/values.rs +++ b/src/librustc/ty/query/values.rs @@ -1,7 +1,7 @@ use crate::ty::{self, Ty, TyCtxt, AdtSizedConstraint}; use crate::ty::util::NeedsDrop; -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; pub(super) trait Value<'tcx>: Sized { fn from_cycle_error(tcx: TyCtxt<'tcx>) -> Self; @@ -22,7 +22,7 @@ impl<'tcx> Value<'tcx> for Ty<'tcx> { impl<'tcx> Value<'tcx> for ty::SymbolName { fn from_cycle_error(_: TyCtxt<'tcx>) -> Self { - ty::SymbolName { name: InternedString::intern("") } + ty::SymbolName { name: Symbol::intern("") } } } diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index 565447dd7e..1da65f4b51 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -5,10 +5,10 @@ //! subtyping, type equality, etc. use crate::hir::def_id::DefId; -use crate::ty::subst::{Kind, UnpackedKind, SubstsRef}; +use crate::ty::subst::{GenericArg, GenericArgKind, SubstsRef}; use crate::ty::{self, Ty, TyCtxt, TypeFoldable}; use crate::ty::error::{ExpectedFound, TypeError}; -use crate::mir::interpret::{ConstValue, Scalar}; +use crate::mir::interpret::{ConstValue, get_slice_bytes}; use std::rc::Rc; use std::iter; use rustc_target::spec::abi; @@ -349,7 +349,7 @@ pub fn super_relate_tys>( ) -> RelateResult<'tcx, Ty<'tcx>> { let tcx = relation.tcx(); debug!("super_relate_tys: a={:?} b={:?}", a, b); - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (&ty::Infer(_), _) | (_, &ty::Infer(_)) => { @@ -442,7 +442,7 @@ pub fn super_relate_tys>( // the (anonymous) type of the same closure expression. So // all of their regions should be equated. let substs = relation.relate(&a_substs, &b_substs)?; - Ok(tcx.mk_closure(a_id, substs)) + Ok(tcx.mk_closure(a_id, &substs)) } (&ty::RawPtr(ref a_mt), &ty::RawPtr(ref b_mt)) => @@ -557,48 +557,62 @@ pub fn super_relate_consts>( x.val }; - // Currently, the values that can be unified are those that - // implement both `PartialEq` and `Eq`, corresponding to - // `structural_match` types. - // FIXME(const_generics): check for `structural_match` synthetic attribute. - match (eagerly_eval(a), eagerly_eval(b)) { + // Currently, the values that can be unified are primitive types, + // and those that derive both `PartialEq` and `Eq`, corresponding + // to `structural_match` types. + let new_const_val = match (eagerly_eval(a), eagerly_eval(b)) { (ConstValue::Infer(_), _) | (_, ConstValue::Infer(_)) => { // The caller should handle these cases! bug!("var types encountered in super_relate_consts: {:?} {:?}", a, b) } (ConstValue::Param(a_p), ConstValue::Param(b_p)) if a_p.index == b_p.index => { - Ok(a) + return Ok(a); } (ConstValue::Placeholder(p1), ConstValue::Placeholder(p2)) if p1 == p2 => { - Ok(a) + return Ok(a); } - (a_val @ ConstValue::Scalar(Scalar::Raw { .. }), b_val @ _) - if a.ty == b.ty && a_val == b_val => - { - Ok(tcx.mk_const(ty::Const { - val: a_val, - ty: a.ty, - })) + (ConstValue::Scalar(a_val), ConstValue::Scalar(b_val)) if a.ty == b.ty => { + if a_val == b_val { + Ok(ConstValue::Scalar(a_val)) + } else if let ty::FnPtr(_) = a.ty.kind { + let alloc_map = tcx.alloc_map.lock(); + let a_instance = alloc_map.unwrap_fn(a_val.to_ptr().unwrap().alloc_id); + let b_instance = alloc_map.unwrap_fn(b_val.to_ptr().unwrap().alloc_id); + if a_instance == b_instance { + Ok(ConstValue::Scalar(a_val)) + } else { + Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))) + } + } else { + Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))) + } } - // FIXME(const_generics): we should either handle `Scalar::Ptr` or add a comment - // saying that we're not handling it intentionally. + (a_val @ ConstValue::Slice { .. }, b_val @ ConstValue::Slice { .. }) => { + let a_bytes = get_slice_bytes(&tcx, a_val); + let b_bytes = get_slice_bytes(&tcx, b_val); + if a_bytes == b_bytes { + Ok(a_val) + } else { + Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))) + } + } - // FIXME(const_generics): handle `ConstValue::ByRef` and `ConstValue::Slice`. + // FIXME(const_generics): handle `ConstValue::ByRef`. // FIXME(const_generics): this is wrong, as it is a projection (ConstValue::Unevaluated(a_def_id, a_substs), ConstValue::Unevaluated(b_def_id, b_substs)) if a_def_id == b_def_id => { - let substs = - relation.relate_with_variance(ty::Variance::Invariant, &a_substs, &b_substs)?; - Ok(tcx.mk_const(ty::Const { - val: ConstValue::Unevaluated(a_def_id, &substs), - ty: a.ty, - })) - } - - _ => Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))), - } + let substs = + relation.relate_with_variance(ty::Variance::Invariant, &a_substs, &b_substs)?; + Ok(ConstValue::Unevaluated(a_def_id, &substs)) + } + _ => Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))), + }; + new_const_val.map(|val| tcx.mk_const(ty::Const { + val, + ty: a.ty, + })) } impl<'tcx> Relate<'tcx> for &'tcx ty::List> { @@ -711,29 +725,29 @@ impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Box { } } -impl<'tcx> Relate<'tcx> for Kind<'tcx> { +impl<'tcx> Relate<'tcx> for GenericArg<'tcx> { fn relate>( relation: &mut R, - a: &Kind<'tcx>, - b: &Kind<'tcx>, - ) -> RelateResult<'tcx, Kind<'tcx>> { + a: &GenericArg<'tcx>, + b: &GenericArg<'tcx>, + ) -> RelateResult<'tcx, GenericArg<'tcx>> { match (a.unpack(), b.unpack()) { - (UnpackedKind::Lifetime(a_lt), UnpackedKind::Lifetime(b_lt)) => { + (GenericArgKind::Lifetime(a_lt), GenericArgKind::Lifetime(b_lt)) => { Ok(relation.relate(&a_lt, &b_lt)?.into()) } - (UnpackedKind::Type(a_ty), UnpackedKind::Type(b_ty)) => { + (GenericArgKind::Type(a_ty), GenericArgKind::Type(b_ty)) => { Ok(relation.relate(&a_ty, &b_ty)?.into()) } - (UnpackedKind::Const(a_ct), UnpackedKind::Const(b_ct)) => { + (GenericArgKind::Const(a_ct), GenericArgKind::Const(b_ct)) => { Ok(relation.relate(&a_ct, &b_ct)?.into()) } - (UnpackedKind::Lifetime(unpacked), x) => { + (GenericArgKind::Lifetime(unpacked), x) => { bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) } - (UnpackedKind::Type(unpacked), x) => { + (GenericArgKind::Type(unpacked), x) => { bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) } - (UnpackedKind::Const(unpacked), x) => { + (GenericArgKind::Const(unpacked), x) => { bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) } } diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index ec7cf1a13c..5d78d563e9 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -9,7 +9,7 @@ use crate::mir::interpret::ConstValue; use crate::ty::{self, Lift, Ty, TyCtxt, InferConst}; use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use crate::ty::print::{FmtPrinter, Printer}; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use smallvec::SmallVec; use crate::mir::interpret; @@ -749,6 +749,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch), ConstMismatch(ref x) => return tcx.lift(x).map(ConstMismatch), IntrinsicCast => IntrinsicCast, + ObjectUnsafeCoercion(ref x) => return tcx.lift(x).map(ObjectUnsafeCoercion), }) } } @@ -761,6 +762,8 @@ impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> { Some(ty::InstanceDef::Item(def_id)), ty::InstanceDef::VtableShim(def_id) => Some(ty::InstanceDef::VtableShim(def_id)), + ty::InstanceDef::ReifyShim(def_id) => + Some(ty::InstanceDef::ReifyShim(def_id)), ty::InstanceDef::Intrinsic(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), ty::InstanceDef::FnPtrShim(def_id, ref ty) => @@ -966,6 +969,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { def: match self.def { Item(did) => Item(did.fold_with(folder)), VtableShim(did) => VtableShim(did.fold_with(folder)), + ReifyShim(did) => ReifyShim(did.fold_with(folder)), Intrinsic(did) => Intrinsic(did.fold_with(folder)), FnPtrShim(did, ty) => FnPtrShim( did.fold_with(folder), @@ -994,7 +998,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { use crate::ty::InstanceDef::*; self.substs.visit_with(visitor) || match self.def { - Item(did) | VtableShim(did) | Intrinsic(did) | Virtual(did, _) => { + Item(did) | VtableShim(did) | ReifyShim(did) | Intrinsic(did) | Virtual(did, _) => { did.visit_with(visitor) }, FnPtrShim(did, ty) | CloneShim(did, ty) => { @@ -1023,7 +1027,7 @@ impl<'tcx> TypeFoldable<'tcx> for interpret::GlobalId<'tcx> { impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { - let sty = match self.sty { + let kind = match self.kind { ty::RawPtr(tm) => ty::RawPtr(tm.fold_with(folder)), ty::Array(typ, sz) => ty::Array(typ.fold_with(folder), sz.fold_with(folder)), ty::Slice(typ) => ty::Slice(typ.fold_with(folder)), @@ -1064,13 +1068,13 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { ty::Bound(..) | ty::Placeholder(..) | ty::Never | - ty::Foreign(..) => return self + ty::Foreign(..) => return self, }; - if self.sty == sty { + if self.kind == kind { self } else { - folder.tcx().mk_ty(sty) + folder.tcx().mk_ty(kind) } } @@ -1079,7 +1083,7 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { } fn super_visit_with>(&self, visitor: &mut V) -> bool { - match self.sty { + match self.kind { ty::RawPtr(ref tm) => tm.visit_with(visitor), ty::Array(typ, sz) => typ.visit_with(visitor) || sz.visit_with(visitor), ty::Slice(typ) => typ.visit_with(visitor), @@ -1215,16 +1219,23 @@ EnumTypeFoldableImpl! { } } -BraceStructTypeFoldableImpl! { - impl<'tcx> TypeFoldable<'tcx> for ty::GenericPredicates<'tcx> { - parent, predicates - } -} - impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with>(&self, folder: &mut F) -> Self { - let v = self.iter().map(|p| p.fold_with(folder)).collect::>(); - folder.tcx().intern_predicates(&v) + // This code is hot enough that it's worth specializing for a list of + // length 0. (No other length is common enough to be worth singling + // out). + if self.len() == 0 { + self + } else { + // Don't bother interning if nothing changed, which is the common + // case. + let v = self.iter().map(|p| p.fold_with(folder)).collect::>(); + if v[..] == self[..] { + self + } else { + folder.tcx().intern_predicates(&v) + } + } } fn super_visit_with>(&self, visitor: &mut V) -> bool { @@ -1340,6 +1351,7 @@ EnumTypeFoldableImpl! { (ty::error::TypeError::ExistentialMismatch)(x), (ty::error::TypeError::ConstMismatch)(x), (ty::error::TypeError::IntrinsicCast), + (ty::error::TypeError::ObjectUnsafeCoercion)(x), } } @@ -1369,27 +1381,23 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> { impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { match *self { - ConstValue::ByRef { alloc, offset } => - ConstValue::ByRef { alloc, offset }, ConstValue::Infer(ic) => ConstValue::Infer(ic.fold_with(folder)), ConstValue::Param(p) => ConstValue::Param(p.fold_with(folder)), - ConstValue::Placeholder(p) => ConstValue::Placeholder(p), - ConstValue::Scalar(a) => ConstValue::Scalar(a), - ConstValue::Slice { data, start, end } => ConstValue::Slice { data, start, end }, ConstValue::Unevaluated(did, substs) => ConstValue::Unevaluated(did, substs.fold_with(folder)), + ConstValue::ByRef { .. } | ConstValue::Bound(..) | ConstValue::Placeholder(..) + | ConstValue::Scalar(..) | ConstValue::Slice { .. } => *self, + } } fn super_visit_with>(&self, visitor: &mut V) -> bool { match *self { - ConstValue::ByRef { .. } => false, ConstValue::Infer(ic) => ic.visit_with(visitor), ConstValue::Param(p) => p.visit_with(visitor), - ConstValue::Placeholder(_) => false, - ConstValue::Scalar(_) => false, - ConstValue::Slice { .. } => false, ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor), + ConstValue::ByRef { .. } | ConstValue::Bound(..) | ConstValue::Placeholder(_) + | ConstValue::Scalar(_) | ConstValue::Slice { .. } => false, } } } diff --git a/src/librustc/ty/structural_match.rs b/src/librustc/ty/structural_match.rs new file mode 100644 index 0000000000..cdf5734f5a --- /dev/null +++ b/src/librustc/ty/structural_match.rs @@ -0,0 +1,209 @@ +use crate::hir; +use rustc::infer::InferCtxt; +use rustc::traits::{self, ConstPatternStructural, TraitEngine}; +use rustc::traits::ObligationCause; + +use rustc_data_structures::fx::{FxHashSet}; + +use syntax_pos::Span; + +use crate::ty::{self, AdtDef, Ty, TyCtxt}; +use crate::ty::fold::{TypeFoldable, TypeVisitor}; + +#[derive(Debug)] +pub enum NonStructuralMatchTy<'tcx> { + Adt(&'tcx AdtDef), + Param, +} + +/// This method traverses the structure of `ty`, trying to find an +/// instance of an ADT (i.e. struct or enum) that was declared without +/// the `#[structural_match]` attribute, or a generic type parameter +/// (which cannot be determined to be `structural_match`). +/// +/// The "structure of a type" includes all components that would be +/// considered when doing a pattern match on a constant of that +/// type. +/// +/// * This means this method descends into fields of structs/enums, +/// and also descends into the inner type `T` of `&T` and `&mut T` +/// +/// * The traversal doesn't dereference unsafe pointers (`*const T`, +/// `*mut T`), and it does not visit the type arguments of an +/// instantiated generic like `PhantomData`. +/// +/// The reason we do this search is Rust currently require all ADTs +/// reachable from a constant's type to be annotated with +/// `#[structural_match]`, an attribute which essentially says that +/// the implementation of `PartialEq::eq` behaves *equivalently* to a +/// comparison against the unfolded structure. +/// +/// For more background on why Rust has this requirement, and issues +/// that arose when the requirement was not enforced completely, see +/// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307. +pub fn search_for_structural_match_violation<'tcx>( + id: hir::HirId, + span: Span, + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, +) -> Option> { + // FIXME: we should instead pass in an `infcx` from the outside. + tcx.infer_ctxt().enter(|infcx| { + let mut search = Search { id, span, infcx, found: None, seen: FxHashSet::default() }; + ty.visit_with(&mut search); + search.found + }) +} + +/// This method returns true if and only if `adt_ty` itself has been marked as +/// eligible for structural-match: namely, if it implements both +/// `StructuralPartialEq` and `StructuralEq` (which are respectively injected by +/// `#[derive(PartialEq)]` and `#[derive(Eq)]`). +/// +/// Note that this does *not* recursively check if the substructure of `adt_ty` +/// implements the traits. +pub fn type_marked_structural(id: hir::HirId, + span: Span, + infcx: &InferCtxt<'_, 'tcx>, + adt_ty: Ty<'tcx>) + -> bool +{ + let mut fulfillment_cx = traits::FulfillmentContext::new(); + let cause = ObligationCause::new(span, id, ConstPatternStructural); + // require `#[derive(PartialEq)]` + let structural_peq_def_id = infcx.tcx.lang_items().structural_peq_trait().unwrap(); + fulfillment_cx.register_bound( + infcx, ty::ParamEnv::empty(), adt_ty, structural_peq_def_id, cause); + // for now, require `#[derive(Eq)]`. (Doing so is a hack to work around + // the type `for<'a> fn(&'a ())` failing to implement `Eq` itself.) + let cause = ObligationCause::new(span, id, ConstPatternStructural); + let structural_teq_def_id = infcx.tcx.lang_items().structural_teq_trait().unwrap(); + fulfillment_cx.register_bound( + infcx, ty::ParamEnv::empty(), adt_ty, structural_teq_def_id, cause); + + // We deliberately skip *reporting* fulfillment errors (via + // `report_fulfillment_errors`), for two reasons: + // + // 1. The error messages would mention `std::marker::StructuralPartialEq` + // (a trait which is solely meant as an implementation detail + // for now), and + // + // 2. We are sometimes doing future-incompatibility lints for + // now, so we do not want unconditional errors here. + fulfillment_cx.select_all_or_error(infcx).is_ok() +} + +/// This implements the traversal over the structure of a given type to try to +/// find instances of ADTs (specifically structs or enums) that do not implement +/// the structural-match traits (`StructuralPartialEq` and `StructuralEq`). +struct Search<'a, 'tcx> { + id: hir::HirId, + span: Span, + + infcx: InferCtxt<'a, 'tcx>, + + /// Records first ADT that does not implement a structural-match trait. + found: Option>, + + /// Tracks ADTs previously encountered during search, so that + /// we will not recur on them again. + seen: FxHashSet, +} + +impl Search<'a, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.infcx.tcx + } + + fn type_marked_structural(&self, adt_ty: Ty<'tcx>) -> bool { + type_marked_structural(self.id, self.span, &self.infcx, adt_ty) + } +} + +impl<'a, 'tcx> TypeVisitor<'tcx> for Search<'a, 'tcx> { + fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool { + debug!("Search visiting ty: {:?}", ty); + + let (adt_def, substs) = match ty.kind { + ty::Adt(adt_def, substs) => (adt_def, substs), + ty::Param(_) => { + self.found = Some(NonStructuralMatchTy::Param); + return true; // Stop visiting. + } + ty::RawPtr(..) => { + // structural-match ignores substructure of + // `*const _`/`*mut _`, so skip `super_visit_with`. + // + // For example, if you have: + // ``` + // struct NonStructural; + // #[derive(PartialEq, Eq)] + // struct T(*const NonStructural); + // const C: T = T(std::ptr::null()); + // ``` + // + // Even though `NonStructural` does not implement `PartialEq`, + // structural equality on `T` does not recur into the raw + // pointer. Therefore, one can still use `C` in a pattern. + + // (But still tell caller to continue search.) + return false; + } + ty::FnDef(..) | ty::FnPtr(..) => { + // types of formals and return in `fn(_) -> _` are also irrelevant; + // so we do not recur into them via `super_visit_with` + // + // (But still tell caller to continue search.) + return false; + } + ty::Array(_, n) if { + n.try_eval_usize(self.tcx(), ty::ParamEnv::reveal_all()) == Some(0) + } => { + // rust-lang/rust#62336: ignore type of contents + // for empty array. + return false; + } + _ => { + ty.super_visit_with(self); + return false; + } + }; + + if !self.seen.insert(adt_def.did) { + debug!("Search already seen adt_def: {:?}", adt_def); + // let caller continue its search + return false; + } + + if !self.type_marked_structural(ty) { + debug!("Search found ty: {:?}", ty); + self.found = Some(NonStructuralMatchTy::Adt(&adt_def)); + return true; // Halt visiting! + } + + // structural-match does not care about the + // instantiation of the generics in an ADT (it + // instead looks directly at its fields outside + // this match), so we skip super_visit_with. + // + // (Must not recur on substs for `PhantomData` cf + // rust-lang/rust#55028 and rust-lang/rust#55837; but also + // want to skip substs when only uses of generic are + // behind unsafe pointers `*const T`/`*mut T`.) + + // even though we skip super_visit_with, we must recur on + // fields of ADT. + let tcx = self.tcx(); + for field_ty in adt_def.all_fields().map(|field| field.ty(tcx, substs)) { + if field_ty.visit_with(self) { + // found an ADT without structural-match; halt visiting! + assert!(self.found.is_some()); + return true; + } + } + + // Even though we do not want to recur on substs, we do + // want our caller to continue its own search. + false + } +} diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index e73a51e6f7..3a9994ac64 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -8,9 +8,9 @@ use crate::infer::canonical::Canonical; use crate::mir::interpret::ConstValue; use crate::middle::region; use polonius_engine::Atom; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use rustc_macros::HashStable; -use crate::ty::subst::{InternalSubsts, Subst, SubstsRef, Kind, UnpackedKind}; +use crate::ty::subst::{InternalSubsts, Subst, SubstsRef, GenericArg, GenericArgKind}; use crate::ty::{self, AdtDef, Discr, DefIdTree, TypeFlags, Ty, TyCtxt, TypeFoldable}; use crate::ty::{List, TyS, ParamEnvAnd, ParamEnv}; use crate::ty::layout::VariantIdx; @@ -24,7 +24,7 @@ use std::marker::PhantomData; use std::ops::Range; use rustc_target::spec::abi; use syntax::ast::{self, Ident}; -use syntax::symbol::{kw, InternedString}; +use syntax::symbol::{kw, Symbol}; use self::InferTy::*; use self::TyKind::*; @@ -55,7 +55,7 @@ pub enum BoundRegion { /// /// The `DefId` is needed to distinguish free regions in /// the event of shadowing. - BrNamed(DefId, InternedString), + BrNamed(DefId, Symbol), /// Anonymous region for the implicit env pointer parameter /// to a closure @@ -86,7 +86,7 @@ impl BoundRegion { /// AST structure in `libsyntax/ast.rs` as well. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, HashStable, Debug)] -#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "TyKind")] +#[rustc_diagnostic_item = "TyKind"] pub enum TyKind<'tcx> { /// The primitive boolean type. Written as `bool`. Bool, @@ -158,11 +158,11 @@ pub enum TyKind<'tcx> { /// The anonymous type of a closure. Used to represent the type of /// `|a| a`. - Closure(DefId, ClosureSubsts<'tcx>), + Closure(DefId, SubstsRef<'tcx>), /// The anonymous type of a generator. Used to represent the type of /// `|a| yield a`. - Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability), + Generator(DefId, SubstsRef<'tcx>, hir::GeneratorMovability), /// A type representin the types stored inside a generator. /// This should only appear in GeneratorInteriors. @@ -304,8 +304,7 @@ static_assert_size!(TyKind<'_>, 24); /// type parameters is similar, but the role of CK and CS are /// different. CK represents the "yield type" and CS represents the /// "return type" of the generator. -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, - Debug, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Copy, Clone, Debug)] pub struct ClosureSubsts<'tcx> { /// Lifetime and type parameters from the enclosing function, /// concatenated with the types of the upvars. @@ -320,7 +319,7 @@ pub struct ClosureSubsts<'tcx> { struct SplitClosureSubsts<'tcx> { closure_kind_ty: Ty<'tcx>, closure_sig_ty: Ty<'tcx>, - upvar_kinds: &'tcx [Kind<'tcx>], + upvar_kinds: &'tcx [GenericArg<'tcx>], } impl<'tcx> ClosureSubsts<'tcx> { @@ -345,7 +344,7 @@ impl<'tcx> ClosureSubsts<'tcx> { ) -> impl Iterator> + 'tcx { let SplitClosureSubsts { upvar_kinds, .. } = self.split(def_id, tcx); upvar_kinds.iter().map(|t| { - if let UnpackedKind::Type(ty) = t.unpack() { + if let GenericArgKind::Type(ty) = t.unpack() { ty } else { bug!("upvar should be type") @@ -356,7 +355,7 @@ impl<'tcx> ClosureSubsts<'tcx> { /// Returns the closure kind for this closure; may return a type /// variable during inference. To get the closure kind during /// inference, use `infcx.closure_kind(def_id, substs)`. - pub fn closure_kind_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> { + pub fn kind_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> { self.split(def_id, tcx).closure_kind_ty } @@ -364,7 +363,7 @@ impl<'tcx> ClosureSubsts<'tcx> { /// closure; may contain type variables during inference. To get /// the closure signature during inference, use /// `infcx.fn_sig(def_id)`. - pub fn closure_sig_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> { + pub fn sig_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> { self.split(def_id, tcx).closure_sig_ty } @@ -373,7 +372,7 @@ impl<'tcx> ClosureSubsts<'tcx> { /// there are no type variables. /// /// If you have an inference context, use `infcx.closure_kind()`. - pub fn closure_kind(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> ty::ClosureKind { + pub fn kind(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> ty::ClosureKind { self.split(def_id, tcx).closure_kind_ty.to_opt_closure_kind().unwrap() } @@ -382,18 +381,17 @@ impl<'tcx> ClosureSubsts<'tcx> { /// there are no type variables. /// /// If you have an inference context, use `infcx.closure_sig()`. - pub fn closure_sig(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> { - let ty = self.closure_sig_ty(def_id, tcx); - match ty.sty { + pub fn sig(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> { + let ty = self.sig_ty(def_id, tcx); + match ty.kind { ty::FnPtr(sig) => sig, - _ => bug!("closure_sig_ty is not a fn-ptr: {:?}", ty.sty), + _ => bug!("closure_sig_ty is not a fn-ptr: {:?}", ty.kind), } } } /// Similar to `ClosureSubsts`; see the above documentation for more. -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, - RustcEncodable, RustcDecodable, HashStable)] +#[derive(Copy, Clone, Debug)] pub struct GeneratorSubsts<'tcx> { pub substs: SubstsRef<'tcx>, } @@ -402,7 +400,7 @@ struct SplitGeneratorSubsts<'tcx> { yield_ty: Ty<'tcx>, return_ty: Ty<'tcx>, witness: Ty<'tcx>, - upvar_kinds: &'tcx [Kind<'tcx>], + upvar_kinds: &'tcx [GenericArg<'tcx>], } impl<'tcx> GeneratorSubsts<'tcx> { @@ -434,7 +432,7 @@ impl<'tcx> GeneratorSubsts<'tcx> { ) -> impl Iterator> + 'tcx { let SplitGeneratorSubsts { upvar_kinds, .. } = self.split(def_id, tcx); upvar_kinds.iter().map(|t| { - if let UnpackedKind::Type(ty) = t.unpack() { + if let GenericArgKind::Type(ty) = t.unpack() { ty } else { bug!("upvar should be type") @@ -511,7 +509,7 @@ impl<'tcx> GeneratorSubsts<'tcx> { /// variant indices. #[inline] pub fn discriminants( - &'tcx self, + self, def_id: DefId, tcx: TyCtxt<'tcx>, ) -> impl Iterator)> + Captures<'tcx> { @@ -523,7 +521,7 @@ impl<'tcx> GeneratorSubsts<'tcx> { /// Calls `f` with a reference to the name of the enumerator for the given /// variant `v`. #[inline] - pub fn variant_name(&self, v: VariantIdx) -> Cow<'static, str> { + pub fn variant_name(self, v: VariantIdx) -> Cow<'static, str> { match v.as_usize() { Self::UNRESUMED => Cow::from(Self::UNRESUMED_NAME), Self::RETURNED => Cow::from(Self::RETURNED_NAME), @@ -568,8 +566,8 @@ impl<'tcx> GeneratorSubsts<'tcx> { #[derive(Debug, Copy, Clone)] pub enum UpvarSubsts<'tcx> { - Closure(ClosureSubsts<'tcx>), - Generator(GeneratorSubsts<'tcx>), + Closure(SubstsRef<'tcx>), + Generator(SubstsRef<'tcx>), } impl<'tcx> UpvarSubsts<'tcx> { @@ -577,14 +575,14 @@ impl<'tcx> UpvarSubsts<'tcx> { pub fn upvar_tys( self, def_id: DefId, - tcx: TyCtxt<'_>, + tcx: TyCtxt<'tcx>, ) -> impl Iterator> + 'tcx { let upvar_kinds = match self { - UpvarSubsts::Closure(substs) => substs.split(def_id, tcx).upvar_kinds, - UpvarSubsts::Generator(substs) => substs.split(def_id, tcx).upvar_kinds, + UpvarSubsts::Closure(substs) => substs.as_closure().split(def_id, tcx).upvar_kinds, + UpvarSubsts::Generator(substs) => substs.as_generator().split(def_id, tcx).upvar_kinds, }; upvar_kinds.iter().map(|t| { - if let UnpackedKind::Type(ty) = t.unpack() { + if let GenericArgKind::Type(ty) = t.unpack() { ty } else { bug!("upvar should be type") @@ -1035,7 +1033,7 @@ impl<'tcx> ProjectionTy<'tcx> { } } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)] +#[derive(Clone, Debug)] pub struct GenSig<'tcx> { pub yield_ty: Ty<'tcx>, pub return_ty: Ty<'tcx>, @@ -1123,16 +1121,16 @@ pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder>>; Hash, RustcEncodable, RustcDecodable, HashStable)] pub struct ParamTy { pub index: u32, - pub name: InternedString, + pub name: Symbol, } impl<'tcx> ParamTy { - pub fn new(index: u32, name: InternedString) -> ParamTy { + pub fn new(index: u32, name: Symbol) -> ParamTy { ParamTy { index, name: name } } pub fn for_self() -> ParamTy { - ParamTy::new(0, kw::SelfUpper.as_interned_str()) + ParamTy::new(0, kw::SelfUpper) } pub fn for_def(def: &ty::GenericParamDef) -> ParamTy { @@ -1148,11 +1146,11 @@ impl<'tcx> ParamTy { Eq, PartialEq, Ord, PartialOrd, HashStable)] pub struct ParamConst { pub index: u32, - pub name: InternedString, + pub name: Symbol, } impl<'tcx> ParamConst { - pub fn new(index: u32, name: InternedString) -> ParamConst { + pub fn new(index: u32, name: Symbol) -> ParamConst { ParamConst { index, name } } @@ -1165,7 +1163,7 @@ impl<'tcx> ParamConst { } } -newtype_index! { +rustc_index::newtype_index! { /// A [De Bruijn index][dbi] is a standard means of representing /// regions (and perhaps later types) in a higher-ranked setting. In /// particular, imagine a type like this: @@ -1325,7 +1323,7 @@ impl<'tcx> rustc_serialize::UseSpecializedDecodable for Region<'tcx> {} pub struct EarlyBoundRegion { pub def_id: DefId, pub index: u32, - pub name: InternedString, + pub name: Symbol, } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] @@ -1349,7 +1347,7 @@ pub struct FloatVid { pub index: u32, } -newtype_index! { +rustc_index::newtype_index! { pub struct RegionVid { DEBUG_FORMAT = custom, } @@ -1376,7 +1374,7 @@ pub enum InferTy { FreshFloatTy(u32), } -newtype_index! { +rustc_index::newtype_index! { pub struct BoundVar { .. } } @@ -1389,7 +1387,7 @@ pub struct BoundTy { #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] pub enum BoundTyKind { Anon, - Param(InternedString), + Param(Symbol), } impl_stable_hash_for!(struct BoundTy { var, kind }); @@ -1678,7 +1676,7 @@ impl RegionKind { impl<'tcx> TyS<'tcx> { #[inline] pub fn is_unit(&self) -> bool { - match self.sty { + match self.kind { Tuple(ref tys) => tys.is_empty(), _ => false, } @@ -1686,7 +1684,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_never(&self) -> bool { - match self.sty { + match self.kind { Never => true, _ => false, } @@ -1701,7 +1699,7 @@ impl<'tcx> TyS<'tcx> { pub fn conservative_is_privately_uninhabited(&self, tcx: TyCtxt<'tcx>) -> bool { // FIXME(varkor): we can make this less conversative by substituting concrete // type arguments. - match self.sty { + match self.kind { ty::Never => true, ty::Adt(def, _) if def.is_union() => { // For now, `union`s are never considered uninhabited. @@ -1741,7 +1739,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_primitive(&self) -> bool { - match self.sty { + match self.kind { Bool | Char | Int(_) | Uint(_) | Float(_) => true, _ => false, } @@ -1749,7 +1747,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_ty_var(&self) -> bool { - match self.sty { + match self.kind { Infer(TyVar(_)) => true, _ => false, } @@ -1757,7 +1755,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_ty_infer(&self) -> bool { - match self.sty { + match self.kind { Infer(_) => true, _ => false, } @@ -1765,7 +1763,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_phantom_data(&self) -> bool { - if let Adt(def, _) = self.sty { + if let Adt(def, _) = self.kind { def.is_phantom_data() } else { false @@ -1773,11 +1771,15 @@ impl<'tcx> TyS<'tcx> { } #[inline] - pub fn is_bool(&self) -> bool { self.sty == Bool } + pub fn is_bool(&self) -> bool { self.kind == Bool } + + /// Returns `true` if this type is a `str`. + #[inline] + pub fn is_str(&self) -> bool { self.kind == Str } #[inline] pub fn is_param(&self, index: u32) -> bool { - match self.sty { + match self.kind { ty::Param(ref data) => data.index == index, _ => false, } @@ -1785,8 +1787,8 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_slice(&self) -> bool { - match self.sty { - RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => match ty.sty { + match self.kind { + RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => match ty.kind { Slice(_) | Str => true, _ => false, }, @@ -1796,14 +1798,14 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_simd(&self) -> bool { - match self.sty { + match self.kind { Adt(def, _) => def.repr.simd(), _ => false, } } pub fn sequence_element_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { - match self.sty { + match self.kind { Array(ty, _) | Slice(ty) => ty, Str => tcx.mk_mach_uint(ast::UintTy::U8), _ => bug!("sequence_element_type called on non-sequence value: {}", self), @@ -1811,7 +1813,7 @@ impl<'tcx> TyS<'tcx> { } pub fn simd_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { - match self.sty { + match self.kind { Adt(def, substs) => { def.non_enum_variant().fields[0].ty(tcx, substs) } @@ -1820,7 +1822,7 @@ impl<'tcx> TyS<'tcx> { } pub fn simd_size(&self, _cx: TyCtxt<'_>) -> usize { - match self.sty { + match self.kind { Adt(def, _) => def.non_enum_variant().fields.len(), _ => bug!("simd_size called on invalid type") } @@ -1828,7 +1830,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_region_ptr(&self) -> bool { - match self.sty { + match self.kind { Ref(..) => true, _ => false, } @@ -1836,7 +1838,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_mutable_ptr(&self) -> bool { - match self.sty { + match self.kind { RawPtr(TypeAndMut { mutbl: hir::Mutability::MutMutable, .. }) | Ref(_, _, hir::Mutability::MutMutable) => true, _ => false @@ -1845,7 +1847,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_unsafe_ptr(&self) -> bool { - match self.sty { + match self.kind { RawPtr(_) => return true, _ => return false, } @@ -1860,7 +1862,7 @@ impl<'tcx> TyS<'tcx> { /// Returns `true` if this type is an `Arc`. #[inline] pub fn is_arc(&self) -> bool { - match self.sty { + match self.kind { Adt(def, _) => def.is_arc(), _ => false, } @@ -1869,7 +1871,7 @@ impl<'tcx> TyS<'tcx> { /// Returns `true` if this type is an `Rc`. #[inline] pub fn is_rc(&self) -> bool { - match self.sty { + match self.kind { Adt(def, _) => def.is_rc(), _ => false, } @@ -1877,7 +1879,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_box(&self) -> bool { - match self.sty { + match self.kind { Adt(def, _) => def.is_box(), _ => false, } @@ -1885,7 +1887,7 @@ impl<'tcx> TyS<'tcx> { /// panics if called on any type other than `Box` pub fn boxed_ty(&self) -> Ty<'tcx> { - match self.sty { + match self.kind { Adt(def, substs) if def.is_box() => substs.type_at(0), _ => bug!("`boxed_ty` is called on non-box type {:?}", self), } @@ -1896,7 +1898,7 @@ impl<'tcx> TyS<'tcx> { /// contents are abstract to rustc.) #[inline] pub fn is_scalar(&self) -> bool { - match self.sty { + match self.kind { Bool | Char | Int(_) | Float(_) | Uint(_) | Infer(IntVar(_)) | Infer(FloatVar(_)) | FnDef(..) | FnPtr(_) | RawPtr(_) => true, @@ -1907,7 +1909,7 @@ impl<'tcx> TyS<'tcx> { /// Returns `true` if this type is a floating point type. #[inline] pub fn is_floating_point(&self) -> bool { - match self.sty { + match self.kind { Float(_) | Infer(FloatVar(_)) => true, _ => false, @@ -1916,7 +1918,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_trait(&self) -> bool { - match self.sty { + match self.kind { Dynamic(..) => true, _ => false, } @@ -1924,7 +1926,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_enum(&self) -> bool { - match self.sty { + match self.kind { Adt(adt_def, _) => { adt_def.is_enum() } @@ -1934,7 +1936,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_closure(&self) -> bool { - match self.sty { + match self.kind { Closure(..) => true, _ => false, } @@ -1942,7 +1944,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_generator(&self) -> bool { - match self.sty { + match self.kind { Generator(..) => true, _ => false, } @@ -1950,7 +1952,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_integral(&self) -> bool { - match self.sty { + match self.kind { Infer(IntVar(_)) | Int(_) | Uint(_) => true, _ => false } @@ -1958,7 +1960,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_fresh_ty(&self) -> bool { - match self.sty { + match self.kind { Infer(FreshTy(_)) => true, _ => false, } @@ -1966,7 +1968,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_fresh(&self) -> bool { - match self.sty { + match self.kind { Infer(FreshTy(_)) => true, Infer(FreshIntTy(_)) => true, Infer(FreshFloatTy(_)) => true, @@ -1976,7 +1978,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_char(&self) -> bool { - match self.sty { + match self.kind { Char => true, _ => false, } @@ -1989,7 +1991,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_signed(&self) -> bool { - match self.sty { + match self.kind { Int(_) => true, _ => false, } @@ -1997,7 +1999,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_ptr_sized_integral(&self) -> bool { - match self.sty { + match self.kind { Int(ast::IntTy::Isize) | Uint(ast::UintTy::Usize) => true, _ => false, } @@ -2005,7 +2007,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_machine(&self) -> bool { - match self.sty { + match self.kind { Int(..) | Uint(..) | Float(..) => true, _ => false, } @@ -2013,7 +2015,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn has_concrete_skeleton(&self) -> bool { - match self.sty { + match self.kind { Param(_) | Infer(_) | Error => false, _ => true, } @@ -2024,7 +2026,7 @@ impl<'tcx> TyS<'tcx> { /// The parameter `explicit` indicates if this is an *explicit* dereference. /// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly. pub fn builtin_deref(&self, explicit: bool) -> Option> { - match self.sty { + match self.kind { Adt(def, _) if def.is_box() => { Some(TypeAndMut { ty: self.boxed_ty(), @@ -2039,14 +2041,14 @@ impl<'tcx> TyS<'tcx> { /// Returns the type of `ty[i]`. pub fn builtin_index(&self) -> Option> { - match self.sty { + match self.kind { Array(ty, _) | Slice(ty) => Some(ty), _ => None, } } pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { - match self.sty { + match self.kind { FnDef(def_id, substs) => { tcx.fn_sig(def_id).subst(tcx, substs) } @@ -2063,7 +2065,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_fn(&self) -> bool { - match self.sty { + match self.kind { FnDef(..) | FnPtr(_) => true, _ => false, } @@ -2071,7 +2073,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_fn_ptr(&self) -> bool { - match self.sty { + match self.kind { FnPtr(_) => true, _ => false, } @@ -2079,7 +2081,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn is_impl_trait(&self) -> bool { - match self.sty { + match self.kind { Opaque(..) => true, _ => false, } @@ -2087,7 +2089,7 @@ impl<'tcx> TyS<'tcx> { #[inline] pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> { - match self.sty { + match self.kind { Adt(adt, _) => Some(adt), _ => None, } @@ -2096,7 +2098,7 @@ impl<'tcx> TyS<'tcx> { /// Iterates over tuple fields. /// Panics when called on anything but a tuple. pub fn tuple_fields(&self) -> impl DoubleEndedIterator> { - match self.sty { + match self.kind { Tuple(substs) => substs.iter().map(|field| field.expect_ty()), _ => bug!("tuple_fields called on non-tuple"), } @@ -2106,9 +2108,10 @@ impl<'tcx> TyS<'tcx> { /// FIXME This requires the optimized MIR in the case of generators. #[inline] pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option> { - match self.sty { + match self.kind { TyKind::Adt(adt, _) => Some(adt.variant_range()), - TyKind::Generator(def_id, substs, _) => Some(substs.variant_range(def_id, tcx)), + TyKind::Generator(def_id, substs, _) => + Some(substs.as_generator().variant_range(def_id, tcx)), _ => None, } } @@ -2122,10 +2125,10 @@ impl<'tcx> TyS<'tcx> { tcx: TyCtxt<'tcx>, variant_index: VariantIdx, ) -> Option> { - match self.sty { + match self.kind { TyKind::Adt(adt, _) => Some(adt.discriminant_for_variant(tcx, variant_index)), TyKind::Generator(def_id, substs, _) => - Some(substs.discriminant_for_variant(def_id, tcx, variant_index)), + Some(substs.as_generator().discriminant_for_variant(def_id, tcx, variant_index)), _ => None, } } @@ -2134,7 +2137,7 @@ impl<'tcx> TyS<'tcx> { /// types reachable from this type via `walk_tys`). This ignores late-bound /// regions binders. pub fn push_regions(&self, out: &mut SmallVec<[ty::Region<'tcx>; 4]>) { - match self.sty { + match self.kind { Ref(region, _, _) => { out.push(region); } @@ -2147,8 +2150,8 @@ impl<'tcx> TyS<'tcx> { Adt(_, substs) | Opaque(_, substs) => { out.extend(substs.regions()) } - Closure(_, ClosureSubsts { ref substs }) | - Generator(_, GeneratorSubsts { ref substs }, _) => { + Closure(_, ref substs ) | + Generator(_, ref substs, _) => { out.extend(substs.regions()) } Projection(ref data) | UnnormalizedProjection(ref data) => { @@ -2190,7 +2193,7 @@ impl<'tcx> TyS<'tcx> { /// inferred. Once upvar inference (in `src/librustc_typeck/check/upvar.rs`) /// is complete, that type variable will be unified. pub fn to_opt_closure_kind(&self) -> Option { - match self.sty { + match self.kind { Int(int_ty) => match int_ty { ast::IntTy::I8 => Some(ty::ClosureKind::Fn), ast::IntTy::I16 => Some(ty::ClosureKind::FnMut), @@ -2198,7 +2201,9 @@ impl<'tcx> TyS<'tcx> { _ => bug!("cannot convert type `{:?}` to a closure kind", self), }, - Infer(_) => None, + // "Bound" types appear in canonical queries when the + // closure type is not yet known + Bound(..) | Infer(_) => None, Error => Some(ty::ClosureKind::Fn), @@ -2211,7 +2216,7 @@ impl<'tcx> TyS<'tcx> { /// Returning true means the type is known to be sized. Returning /// `false` means nothing -- could be sized, might not be. pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool { - match self.sty { + match self.kind { ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) | ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Float(_) | ty::FnDef(..) | ty::FnPtr(_) | ty::RawPtr(..) | @@ -2298,8 +2303,8 @@ impl<'tcx> Const<'tcx> { ty: Ty<'tcx>, ) -> Option { assert_eq!(self.ty, ty); - // if `ty` does not depend on generic parameters, use an empty param_env let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size; + // if `ty` does not depend on generic parameters, use an empty param_env self.eval(tcx, param_env).val.try_to_bits(size) } @@ -2366,6 +2371,4 @@ pub enum InferConst<'tcx> { Var(ConstVid<'tcx>), /// A fresh const variable. See `infer::freshen` for more details. Fresh(u32), - /// Canonicalized const variable, used only when preparing a trait query. - Canonical(DebruijnIndex, BoundVar), } diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index ea829da783..2972197909 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -2,9 +2,10 @@ use crate::hir::def_id::DefId; use crate::infer::canonical::Canonical; -use crate::ty::{self, Lift, List, Ty, TyCtxt, InferConst, ParamConst}; +use crate::ty::{self, Lift, List, Ty, TyCtxt, ParamConst}; use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use crate::mir::interpret::ConstValue; +use crate::ty::sty::{ClosureSubsts, GeneratorSubsts}; use rustc_serialize::{self, Encodable, Encoder, Decodable, Decoder}; use syntax_pos::{Span, DUMMY_SP}; @@ -20,11 +21,11 @@ use std::num::NonZeroUsize; /// An entity in the Rust type system, which can be one of /// several kinds (types, lifetimes, and consts). -/// To reduce memory usage, a `Kind` is a interned pointer, +/// To reduce memory usage, a `GenericArg` is a interned pointer, /// with the lowest 2 bits being reserved for a tag to /// indicate the type (`Ty`, `Region`, or `Const`) it points to. #[derive(Copy, Clone, PartialEq, Eq, Hash)] -pub struct Kind<'tcx> { +pub struct GenericArg<'tcx> { ptr: NonZeroUsize, marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>, &'tcx ty::Const<'tcx>)> } @@ -35,33 +36,33 @@ const REGION_TAG: usize = 0b01; const CONST_TAG: usize = 0b10; #[derive(Debug, RustcEncodable, RustcDecodable, PartialEq, Eq, PartialOrd, Ord, HashStable)] -pub enum UnpackedKind<'tcx> { +pub enum GenericArgKind<'tcx> { Lifetime(ty::Region<'tcx>), Type(Ty<'tcx>), Const(&'tcx ty::Const<'tcx>), } -impl<'tcx> UnpackedKind<'tcx> { - fn pack(self) -> Kind<'tcx> { +impl<'tcx> GenericArgKind<'tcx> { + fn pack(self) -> GenericArg<'tcx> { let (tag, ptr) = match self { - UnpackedKind::Lifetime(lt) => { + GenericArgKind::Lifetime(lt) => { // Ensure we can use the tag bits. assert_eq!(mem::align_of_val(lt) & TAG_MASK, 0); (REGION_TAG, lt as *const _ as usize) } - UnpackedKind::Type(ty) => { + GenericArgKind::Type(ty) => { // Ensure we can use the tag bits. assert_eq!(mem::align_of_val(ty) & TAG_MASK, 0); (TYPE_TAG, ty as *const _ as usize) } - UnpackedKind::Const(ct) => { + GenericArgKind::Const(ct) => { // Ensure we can use the tag bits. assert_eq!(mem::align_of_val(ct) & TAG_MASK, 0); (CONST_TAG, ct as *const _ as usize) } }; - Kind { + GenericArg { ptr: unsafe { NonZeroUsize::new_unchecked(ptr | tag) }, @@ -70,119 +71,137 @@ impl<'tcx> UnpackedKind<'tcx> { } } -impl fmt::Debug for Kind<'tcx> { +impl fmt::Debug for GenericArg<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.unpack() { - UnpackedKind::Lifetime(lt) => lt.fmt(f), - UnpackedKind::Type(ty) => ty.fmt(f), - UnpackedKind::Const(ct) => ct.fmt(f), + GenericArgKind::Lifetime(lt) => lt.fmt(f), + GenericArgKind::Type(ty) => ty.fmt(f), + GenericArgKind::Const(ct) => ct.fmt(f), } } } -impl<'tcx> Ord for Kind<'tcx> { - fn cmp(&self, other: &Kind<'_>) -> Ordering { +impl<'tcx> Ord for GenericArg<'tcx> { + fn cmp(&self, other: &GenericArg<'_>) -> Ordering { self.unpack().cmp(&other.unpack()) } } -impl<'tcx> PartialOrd for Kind<'tcx> { - fn partial_cmp(&self, other: &Kind<'_>) -> Option { +impl<'tcx> PartialOrd for GenericArg<'tcx> { + fn partial_cmp(&self, other: &GenericArg<'_>) -> Option { Some(self.cmp(&other)) } } -impl<'tcx> From> for Kind<'tcx> { - fn from(r: ty::Region<'tcx>) -> Kind<'tcx> { - UnpackedKind::Lifetime(r).pack() +impl<'tcx> From> for GenericArg<'tcx> { + fn from(r: ty::Region<'tcx>) -> GenericArg<'tcx> { + GenericArgKind::Lifetime(r).pack() } } -impl<'tcx> From> for Kind<'tcx> { - fn from(ty: Ty<'tcx>) -> Kind<'tcx> { - UnpackedKind::Type(ty).pack() +impl<'tcx> From> for GenericArg<'tcx> { + fn from(ty: Ty<'tcx>) -> GenericArg<'tcx> { + GenericArgKind::Type(ty).pack() } } -impl<'tcx> From<&'tcx ty::Const<'tcx>> for Kind<'tcx> { - fn from(c: &'tcx ty::Const<'tcx>) -> Kind<'tcx> { - UnpackedKind::Const(c).pack() +impl<'tcx> From<&'tcx ty::Const<'tcx>> for GenericArg<'tcx> { + fn from(c: &'tcx ty::Const<'tcx>) -> GenericArg<'tcx> { + GenericArgKind::Const(c).pack() } } -impl<'tcx> Kind<'tcx> { +impl<'tcx> GenericArg<'tcx> { #[inline] - pub fn unpack(self) -> UnpackedKind<'tcx> { + pub fn unpack(self) -> GenericArgKind<'tcx> { let ptr = self.ptr.get(); unsafe { match ptr & TAG_MASK { - REGION_TAG => UnpackedKind::Lifetime(&*((ptr & !TAG_MASK) as *const _)), - TYPE_TAG => UnpackedKind::Type(&*((ptr & !TAG_MASK) as *const _)), - CONST_TAG => UnpackedKind::Const(&*((ptr & !TAG_MASK) as *const _)), + REGION_TAG => GenericArgKind::Lifetime(&*((ptr & !TAG_MASK) as *const _)), + TYPE_TAG => GenericArgKind::Type(&*((ptr & !TAG_MASK) as *const _)), + CONST_TAG => GenericArgKind::Const(&*((ptr & !TAG_MASK) as *const _)), _ => intrinsics::unreachable() } } } - /// Unpack the `Kind` as a type when it is known certainly to be a type. + /// Unpack the `GenericArg` as a type when it is known certainly to be a type. /// This is true in cases where `Substs` is used in places where the kinds are known /// to be limited (e.g. in tuples, where the only parameters are type parameters). pub fn expect_ty(self) -> Ty<'tcx> { match self.unpack() { - UnpackedKind::Type(ty) => ty, + GenericArgKind::Type(ty) => ty, _ => bug!("expected a type, but found another kind"), } } } -impl<'a, 'tcx> Lift<'tcx> for Kind<'a> { - type Lifted = Kind<'tcx>; +impl<'a, 'tcx> Lift<'tcx> for GenericArg<'a> { + type Lifted = GenericArg<'tcx>; fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { match self.unpack() { - UnpackedKind::Lifetime(lt) => tcx.lift(<).map(|lt| lt.into()), - UnpackedKind::Type(ty) => tcx.lift(&ty).map(|ty| ty.into()), - UnpackedKind::Const(ct) => tcx.lift(&ct).map(|ct| ct.into()), + GenericArgKind::Lifetime(lt) => tcx.lift(<).map(|lt| lt.into()), + GenericArgKind::Type(ty) => tcx.lift(&ty).map(|ty| ty.into()), + GenericArgKind::Const(ct) => tcx.lift(&ct).map(|ct| ct.into()), } } } -impl<'tcx> TypeFoldable<'tcx> for Kind<'tcx> { +impl<'tcx> TypeFoldable<'tcx> for GenericArg<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { match self.unpack() { - UnpackedKind::Lifetime(lt) => lt.fold_with(folder).into(), - UnpackedKind::Type(ty) => ty.fold_with(folder).into(), - UnpackedKind::Const(ct) => ct.fold_with(folder).into(), + GenericArgKind::Lifetime(lt) => lt.fold_with(folder).into(), + GenericArgKind::Type(ty) => ty.fold_with(folder).into(), + GenericArgKind::Const(ct) => ct.fold_with(folder).into(), } } fn super_visit_with>(&self, visitor: &mut V) -> bool { match self.unpack() { - UnpackedKind::Lifetime(lt) => lt.visit_with(visitor), - UnpackedKind::Type(ty) => ty.visit_with(visitor), - UnpackedKind::Const(ct) => ct.visit_with(visitor), + GenericArgKind::Lifetime(lt) => lt.visit_with(visitor), + GenericArgKind::Type(ty) => ty.visit_with(visitor), + GenericArgKind::Const(ct) => ct.visit_with(visitor), } } } -impl<'tcx> Encodable for Kind<'tcx> { +impl<'tcx> Encodable for GenericArg<'tcx> { fn encode(&self, e: &mut E) -> Result<(), E::Error> { self.unpack().encode(e) } } -impl<'tcx> Decodable for Kind<'tcx> { - fn decode(d: &mut D) -> Result, D::Error> { - Ok(UnpackedKind::decode(d)?.pack()) +impl<'tcx> Decodable for GenericArg<'tcx> { + fn decode(d: &mut D) -> Result, D::Error> { + Ok(GenericArgKind::decode(d)?.pack()) } } /// A substitution mapping generic parameters to new values. -pub type InternalSubsts<'tcx> = List>; +pub type InternalSubsts<'tcx> = List>; pub type SubstsRef<'tcx> = &'tcx InternalSubsts<'tcx>; impl<'a, 'tcx> InternalSubsts<'tcx> { + /// Interpret these substitutions as the substitutions of a closure type. + /// Closure substitutions have a particular structure controlled by the + /// compiler that encodes information like the signature and closure kind; + /// see `ty::ClosureSubsts` struct for more comments. + pub fn as_closure(&'a self) -> ClosureSubsts<'a> { + ClosureSubsts { + substs: self, + } + } + + /// Interpret these substitutions as the substitutions of a generator type. + /// Closure substitutions have a particular structure controlled by the + /// compiler that encodes information like the signature and generator kind; + /// see `ty::GeneratorSubsts` struct for more comments. + pub fn as_generator(&'tcx self) -> GeneratorSubsts<'tcx> { + GeneratorSubsts { substs: self } + } + /// Creates a `InternalSubsts` that maps each generic parameter to itself. pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> { Self::for_item(tcx, def_id, |param, _| { @@ -215,9 +234,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { ty::GenericParamDefKind::Const => { tcx.mk_const(ty::Const { - val: ConstValue::Infer( - InferConst::Canonical(ty::INNERMOST, ty::BoundVar::from(param.index)) - ), + val: ConstValue::Bound(ty::INNERMOST, ty::BoundVar::from(param.index)), ty: tcx.type_of(def_id), }).into() } @@ -232,7 +249,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { /// substitute defaults of generic parameters. pub fn for_item(tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> SubstsRef<'tcx> where - F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, + F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, { let defs = tcx.generics_of(def_id); let count = defs.count(); @@ -243,7 +260,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { pub fn extend_to(&self, tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> SubstsRef<'tcx> where - F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, + F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, { Self::for_item(tcx, def_id, |param, substs| { self.get(param.index as usize) @@ -253,12 +270,12 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { } fn fill_item( - substs: &mut SmallVec<[Kind<'tcx>; 8]>, + substs: &mut SmallVec<[GenericArg<'tcx>; 8]>, tcx: TyCtxt<'tcx>, defs: &ty::Generics, mk_kind: &mut F, ) where - F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, + F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, { if let Some(def_id) = defs.parent { let parent_defs = tcx.generics_of(def_id); @@ -267,10 +284,10 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { Self::fill_single(substs, defs, mk_kind) } - fn fill_single(substs: &mut SmallVec<[Kind<'tcx>; 8]>, + fn fill_single(substs: &mut SmallVec<[GenericArg<'tcx>; 8]>, defs: &ty::Generics, mk_kind: &mut F) - where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx> + where F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx> { substs.reserve(defs.params.len()); for param in &defs.params { @@ -287,7 +304,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn types(&'a self) -> impl DoubleEndedIterator> + 'a { self.iter().filter_map(|k| { - if let UnpackedKind::Type(ty) = k.unpack() { + if let GenericArgKind::Type(ty) = k.unpack() { Some(ty) } else { None @@ -298,7 +315,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn regions(&'a self) -> impl DoubleEndedIterator> + 'a { self.iter().filter_map(|k| { - if let UnpackedKind::Lifetime(lt) = k.unpack() { + if let GenericArgKind::Lifetime(lt) = k.unpack() { Some(lt) } else { None @@ -309,7 +326,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn consts(&'a self) -> impl DoubleEndedIterator> + 'a { self.iter().filter_map(|k| { - if let UnpackedKind::Const(ct) = k.unpack() { + if let GenericArgKind::Const(ct) = k.unpack() { Some(ct) } else { None @@ -320,10 +337,10 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn non_erasable_generics( &'a self - ) -> impl DoubleEndedIterator> + 'a { + ) -> impl DoubleEndedIterator> + 'a { self.iter().filter_map(|k| { match k.unpack() { - UnpackedKind::Lifetime(_) => None, + GenericArgKind::Lifetime(_) => None, generic => Some(generic), } }) @@ -331,7 +348,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn type_at(&self, i: usize) -> Ty<'tcx> { - if let UnpackedKind::Type(ty) = self[i].unpack() { + if let GenericArgKind::Type(ty) = self[i].unpack() { ty } else { bug!("expected type for param #{} in {:?}", i, self); @@ -340,7 +357,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn region_at(&self, i: usize) -> ty::Region<'tcx> { - if let UnpackedKind::Lifetime(lt) = self[i].unpack() { + if let GenericArgKind::Lifetime(lt) = self[i].unpack() { lt } else { bug!("expected region for param #{} in {:?}", i, self); @@ -349,7 +366,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { #[inline] pub fn const_at(&self, i: usize) -> &'tcx ty::Const<'tcx> { - if let UnpackedKind::Const(ct) = self[i].unpack() { + if let GenericArgKind::Const(ct) = self[i].unpack() { ct } else { bug!("expected const for param #{} in {:?}", i, self); @@ -357,7 +374,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { } #[inline] - pub fn type_for_def(&self, def: &ty::GenericParamDef) -> Kind<'tcx> { + pub fn type_for_def(&self, def: &ty::GenericParamDef) -> GenericArg<'tcx> { self.type_at(def.index as usize).into() } @@ -383,14 +400,41 @@ impl<'a, 'tcx> InternalSubsts<'tcx> { impl<'tcx> TypeFoldable<'tcx> for SubstsRef<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { - let params: SmallVec<[_; 8]> = self.iter().map(|k| k.fold_with(folder)).collect(); - - // If folding doesn't change the substs, it's faster to avoid - // calling `mk_substs` and instead reuse the existing substs. - if params[..] == self[..] { - self - } else { - folder.tcx().intern_substs(¶ms) + // This code is hot enough that it's worth specializing for the most + // common length lists, to avoid the overhead of `SmallVec` creation. + // The match arms are in order of frequency. The 1, 2, and 0 cases are + // typically hit in 90--99.99% of cases. When folding doesn't change + // the substs, it's faster to reuse the existing substs rather than + // calling `intern_substs`. + match self.len() { + 1 => { + let param0 = self[0].fold_with(folder); + if param0 == self[0] { + self + } else { + folder.tcx().intern_substs(&[param0]) + } + } + 2 => { + let param0 = self[0].fold_with(folder); + let param1 = self[1].fold_with(folder); + if param0 == self[0] && param1 == self[1] { + self + } else { + folder.tcx().intern_substs(&[param0, param1]) + } + } + 0 => { + self + } + _ => { + let params: SmallVec<[_; 8]> = self.iter().map(|k| k.fold_with(folder)).collect(); + if params[..] == self[..] { + self + } else { + folder.tcx().intern_substs(¶ms) + } + } } } @@ -409,15 +453,25 @@ impl<'tcx> rustc_serialize::UseSpecializedDecodable for SubstsRef<'tcx> {} // there is more information available (for better errors). pub trait Subst<'tcx>: Sized { - fn subst(&self, tcx: TyCtxt<'tcx>, substs: &[Kind<'tcx>]) -> Self { + fn subst(&self, tcx: TyCtxt<'tcx>, substs: &[GenericArg<'tcx>]) -> Self { self.subst_spanned(tcx, substs, None) } - fn subst_spanned(&self, tcx: TyCtxt<'tcx>, substs: &[Kind<'tcx>], span: Option) -> Self; + fn subst_spanned( + &self, + tcx: TyCtxt<'tcx>, + substs: &[GenericArg<'tcx>], + span: Option, + ) -> Self; } impl<'tcx, T: TypeFoldable<'tcx>> Subst<'tcx> for T { - fn subst_spanned(&self, tcx: TyCtxt<'tcx>, substs: &[Kind<'tcx>], span: Option) -> T { + fn subst_spanned( + &self, + tcx: TyCtxt<'tcx>, + substs: &[GenericArg<'tcx>], + span: Option, + ) -> T { let mut folder = SubstFolder { tcx, substs, span, @@ -433,7 +487,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> Subst<'tcx> for T { struct SubstFolder<'a, 'tcx> { tcx: TyCtxt<'tcx>, - substs: &'a [Kind<'tcx>], + substs: &'a [GenericArg<'tcx>], /// The location for which the substitution is performed, if available. span: Option, @@ -468,7 +522,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { ty::ReEarlyBound(data) => { let rk = self.substs.get(data.index as usize).map(|k| k.unpack()); match rk { - Some(UnpackedKind::Lifetime(lt)) => { + Some(GenericArgKind::Lifetime(lt)) => { self.shift_region_through_binders(lt) } _ => { @@ -501,7 +555,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { } self.ty_stack_depth += 1; - let t1 = match t.sty { + let t1 = match t.kind { ty::Param(p) => { self.ty_for_param(p, t) } @@ -537,7 +591,7 @@ impl<'a, 'tcx> SubstFolder<'a, 'tcx> { // Look up the type in the substitutions. It really should be in there. let opt_ty = self.substs.get(p.index as usize).map(|k| k.unpack()); let ty = match opt_ty { - Some(UnpackedKind::Type(ty)) => ty, + Some(GenericArgKind::Type(ty)) => ty, Some(kind) => { let span = self.span.unwrap_or(DUMMY_SP); span_bug!( @@ -578,7 +632,7 @@ impl<'a, 'tcx> SubstFolder<'a, 'tcx> { // Look up the const in the substitutions. It really should be in there. let opt_ct = self.substs.get(p.index as usize).map(|k| k.unpack()); let ct = match opt_ct { - Some(UnpackedKind::Const(ct)) => ct, + Some(GenericArgKind::Const(ct)) => ct, Some(kind) => { let span = self.span.unwrap_or(DUMMY_SP); span_bug!( diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 2bb9c258f8..49ec908231 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -8,8 +8,7 @@ use crate::ty::fold::TypeFoldable; use crate::ty::{Ty, TyCtxt}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_macros::HashStable; /// A trait's definition with type information. @@ -194,9 +193,7 @@ pub(super) fn trait_impls_of_provider( } impl<'a> HashStable> for TraitImpls { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let TraitImpls { ref blanket_impls, ref non_blanket_impls, diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 2bb009ddf9..5555dace45 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -8,7 +8,7 @@ use crate::mir::interpret::{sign_extend, truncate}; use crate::ich::NodeIdHashingMode; use crate::traits::{self, ObligationCause}; use crate::ty::{self, DefIdTree, Ty, TyCtxt, GenericParamDefKind, TypeFoldable}; -use crate::ty::subst::{Subst, InternalSubsts, SubstsRef, UnpackedKind}; +use crate::ty::subst::{Subst, InternalSubsts, SubstsRef, GenericArgKind}; use crate::ty::query::TyCtxtAt; use crate::ty::TyKind::*; use crate::ty::layout::{Integer, IntegerExt}; @@ -33,7 +33,7 @@ pub struct Discr<'tcx> { impl<'tcx> fmt::Display for Discr<'tcx> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.ty.sty { + match self.ty.kind { ty::Int(ity) => { let size = ty::tls::with(|tcx| { Integer::from_attr(&tcx, SignedInt(ity)).size() @@ -54,7 +54,7 @@ impl<'tcx> Discr<'tcx> { self.checked_add(tcx, 1).0 } pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) { - let (int, signed) = match self.ty.sty { + let (int, signed) = match self.ty.kind { Int(ity) => (Integer::from_attr(&tcx, SignedInt(ity)), true), Uint(uty) => (Integer::from_attr(&tcx, UnsignedInt(uty)), false), _ => bug!("non integer discriminant"), @@ -179,7 +179,7 @@ impl<'tcx> ty::ParamEnv<'tcx> { ) -> Result<(), CopyImplementationError<'tcx>> { // FIXME: (@jroesch) float this code up tcx.infer_ctxt().enter(|infcx| { - let (adt, substs) = match self_type.sty { + let (adt, substs) = match self_type.kind { // These types used to have a builtin impl. // Now libcore provides that impl. ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Float(_) | @@ -246,10 +246,10 @@ impl<'tcx> TyCtxt<'tcx> { impl<'tcx> TyCtxt<'tcx> { pub fn has_error_field(self, ty: Ty<'tcx>) -> bool { - if let ty::Adt(def, substs) = ty.sty { + if let ty::Adt(def, substs) = ty.kind { for field in def.all_fields() { let field_ty = field.ty(self, substs); - if let Error = field_ty.sty { + if let Error = field_ty.kind { return true; } } @@ -298,7 +298,7 @@ impl<'tcx> TyCtxt<'tcx> { -> Ty<'tcx> { loop { - match ty.sty { + match ty.kind { ty::Adt(def, substs) => { if !def.is_struct() { break; @@ -370,7 +370,7 @@ impl<'tcx> TyCtxt<'tcx> { { let (mut a, mut b) = (source, target); loop { - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (&Adt(a_def, a_substs), &Adt(b_def, b_substs)) if a_def == b_def && a_def.is_struct() => { if let Some(f) = a_def.non_enum_variant().fields.last() { @@ -510,7 +510,7 @@ impl<'tcx> TyCtxt<'tcx> { /// destructor of `def` itself. For the destructors of the /// contents, you need `adt_dtorck_constraint`. pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) - -> Vec> + -> Vec> { let dtor = match def.destructor(self) { None => { @@ -544,12 +544,12 @@ impl<'tcx> TyCtxt<'tcx> { // , and then look up which of the impl substs refer to // parameters marked as pure. - let impl_substs = match self.type_of(impl_def_id).sty { + let impl_substs = match self.type_of(impl_def_id).kind { ty::Adt(def_, substs) if def_ == def => substs, _ => bug!() }; - let item_substs = match self.type_of(def.did).sty { + let item_substs = match self.type_of(def.did).kind { ty::Adt(def_, substs) if def_ == def => substs, _ => bug!() }; @@ -557,23 +557,23 @@ impl<'tcx> TyCtxt<'tcx> { let result = item_substs.iter().zip(impl_substs.iter()) .filter(|&(_, &k)| { match k.unpack() { - UnpackedKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => { + GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => { !impl_generics.region_param(ebr, self).pure_wrt_drop } - UnpackedKind::Type(&ty::TyS { - sty: ty::Param(ref pt), .. + GenericArgKind::Type(&ty::TyS { + kind: ty::Param(ref pt), .. }) => { !impl_generics.type_param(pt, self).pure_wrt_drop } - UnpackedKind::Const(&ty::Const { + GenericArgKind::Const(&ty::Const { val: ConstValue::Param(ref pc), .. }) => { !impl_generics.const_param(pc, self).pure_wrt_drop } - UnpackedKind::Lifetime(_) | - UnpackedKind::Type(_) | - UnpackedKind::Const(_) => { + GenericArgKind::Lifetime(_) | + GenericArgKind::Type(_) | + GenericArgKind::Const(_) => { // Not a type, const or region param: this should be reported // as an error. false @@ -642,12 +642,12 @@ impl<'tcx> TyCtxt<'tcx> { /// wrapped in a binder. pub fn closure_env_ty(self, closure_def_id: DefId, - closure_substs: ty::ClosureSubsts<'tcx>) + closure_substs: SubstsRef<'tcx>) -> Option>> { let closure_ty = self.mk_closure(closure_def_id, closure_substs); let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv); - let closure_kind_ty = closure_substs.closure_kind_ty(closure_def_id, self); + let closure_kind_ty = closure_substs.as_closure().kind_ty(closure_def_id, self); let closure_kind = closure_kind_ty.to_opt_closure_kind()?; let env_ty = match closure_kind { ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty), @@ -743,7 +743,7 @@ impl<'tcx> TyCtxt<'tcx> { } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - if let ty::Opaque(def_id, substs) = t.sty { + if let ty::Opaque(def_id, substs) = t.kind { self.expand_opaque_ty(def_id, substs).unwrap_or(t) } else if t.has_projections() { t.super_fold_with(self) @@ -818,13 +818,15 @@ impl<'tcx> ty::TyS<'tcx> { /// /// (Note that this implies that if `ty` has a destructor attached, /// then `needs_drop` will definitely return `true` for `ty`.) + /// + /// Note that this method is used to check eligible types in unions. #[inline] pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { tcx.needs_drop_raw(param_env.and(self)).0 } pub fn same_type(a: Ty<'tcx>, b: Ty<'tcx>) -> bool { - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (&Adt(did_a, substs_a), &Adt(did_b, substs_b)) => { if did_a != did_b { return false; @@ -859,7 +861,7 @@ impl<'tcx> ty::TyS<'tcx> { representable_cache: &mut FxHashMap, Representability>, ty: Ty<'tcx>, ) -> Representability { - match ty.sty { + match ty.kind { Tuple(..) => { // Find non representable fold_repr(ty.tuple_fields().map(|ty| { @@ -902,7 +904,7 @@ impl<'tcx> ty::TyS<'tcx> { } fn same_struct_or_enum<'tcx>(ty: Ty<'tcx>, def: &'tcx ty::AdtDef) -> bool { - match ty.sty { + match ty.kind { Adt(ty_def, _) => { ty_def == def } @@ -940,7 +942,7 @@ impl<'tcx> ty::TyS<'tcx> { representable_cache: &mut FxHashMap, Representability>, ty: Ty<'tcx>, ) -> Representability { - match ty.sty { + match ty.kind { Adt(def, _) => { { // Iterate through stack of previously seen types. @@ -1022,7 +1024,7 @@ impl<'tcx> ty::TyS<'tcx> { /// - `&'a *const &'b u8 -> *const &'b u8` pub fn peel_refs(&'tcx self) -> Ty<'tcx> { let mut ty = self; - while let Ref(_, inner_ty, _) = ty.sty { + while let Ref(_, inner_ty, _) = ty.kind { ty = inner_ty; } ty @@ -1030,34 +1032,25 @@ impl<'tcx> ty::TyS<'tcx> { } fn is_copy_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool { - let (param_env, ty) = query.into_parts(); - let trait_def_id = tcx.require_lang_item(lang_items::CopyTraitLangItem, None); - tcx.infer_ctxt() - .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions( - &infcx, - param_env, - ty, - trait_def_id, - DUMMY_SP, - )) + is_item_raw(tcx, query, lang_items::CopyTraitLangItem) } fn is_sized_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool { - let (param_env, ty) = query.into_parts(); - let trait_def_id = tcx.require_lang_item(lang_items::SizedTraitLangItem, None); - tcx.infer_ctxt() - .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions( - &infcx, - param_env, - ty, - trait_def_id, - DUMMY_SP, - )) + is_item_raw(tcx, query, lang_items::SizedTraitLangItem) + } fn is_freeze_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool { + is_item_raw(tcx, query, lang_items::FreezeTraitLangItem) +} + +fn is_item_raw<'tcx>( + tcx: TyCtxt<'tcx>, + query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + item: lang_items::LangItem, +) -> bool { let (param_env, ty) = query.into_parts(); - let trait_def_id = tcx.require_lang_item(lang_items::FreezeTraitLangItem, None); + let trait_def_id = tcx.require_lang_item(item, None); tcx.infer_ctxt() .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions( &infcx, @@ -1080,7 +1073,7 @@ fn needs_drop_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx> assert!(!ty.needs_infer()); - NeedsDrop(match ty.sty { + NeedsDrop(match ty.kind { // Fast-path for primitive types ty::Infer(ty::FreshIntTy(_)) | ty::Infer(ty::FreshFloatTy(_)) | ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Never | @@ -1118,10 +1111,15 @@ fn needs_drop_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx> ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"), + // Zero-length arrays never contain anything to drop. + ty::Array(_, len) if len.try_eval_usize(tcx, param_env) == Some(0) => false, + // Structural recursion. ty::Array(ty, _) | ty::Slice(ty) => needs_drop(ty), - ty::Closure(def_id, ref substs) => substs.upvar_tys(def_id, tcx).any(needs_drop), + ty::Closure(def_id, ref substs) => { + substs.as_closure().upvar_tys(def_id, tcx).any(needs_drop) + } // Pessimistically assume that all generators will require destructors // as we don't know if a destructor is a noop or not until after the MIR @@ -1183,7 +1181,7 @@ impl<'tcx> ExplicitSelf<'tcx> { { use self::ExplicitSelf::*; - match self_arg_ty.sty { + match self_arg_ty.kind { _ if is_self_ty(self_arg_ty) => ByValue, ty::Ref(region, ty, mutbl) if is_self_ty(ty) => { ByReference(region, mutbl) diff --git a/src/librustc/ty/walk.rs b/src/librustc/ty/walk.rs index 8c3110792a..f5b1902e3c 100644 --- a/src/librustc/ty/walk.rs +++ b/src/librustc/ty/walk.rs @@ -69,7 +69,7 @@ pub fn walk_shallow(ty: Ty<'_>) -> smallvec::IntoIter> { // natural order one would expect (basically, the order of the // types as they are written). fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) { - match parent_ty.sty { + match parent_ty.kind { ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Infer(_) | ty::Param(_) | ty::Never | ty::Error | ty::Placeholder(..) | ty::Bound(..) | ty::Foreign(..) => { @@ -110,11 +110,9 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) { ty::Adt(_, substs) | ty::Opaque(_, substs) => { stack.extend(substs.types().rev()); } - ty::Closure(_, ref substs) => { - stack.extend(substs.substs.types().rev()); - } - ty::Generator(_, ref substs, _) => { - stack.extend(substs.substs.types().rev()); + ty::Closure(_, ref substs) + | ty::Generator(_, ref substs, _) => { + stack.extend(substs.types().rev()); } ty::GeneratorWitness(ts) => { stack.extend(ts.skip_binder().iter().cloned().rev()); diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index d6de217f79..4ea01bf964 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -22,11 +22,14 @@ pub fn obligations<'a, 'tcx>( ty: Ty<'tcx>, span: Span, ) -> Option>> { - let mut wf = WfPredicates { infcx, - param_env, - body_id, - span, - out: vec![] }; + let mut wf = WfPredicates { + infcx, + param_env, + body_id, + span, + out: vec![], + item: None, + }; if wf.compute(ty) { debug!("wf::obligations({:?}, body_id={:?}) = {:?}", ty, body_id, wf.out); let result = wf.normalize(); @@ -47,8 +50,9 @@ pub fn trait_obligations<'a, 'tcx>( body_id: hir::HirId, trait_ref: &ty::TraitRef<'tcx>, span: Span, + item: Option<&'tcx hir::Item>, ) -> Vec> { - let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] }; + let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![], item }; wf.compute_trait_ref(trait_ref, Elaborate::All); wf.normalize() } @@ -60,7 +64,7 @@ pub fn predicate_obligations<'a, 'tcx>( predicate: &ty::Predicate<'tcx>, span: Span, ) -> Vec> { - let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] }; + let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![], item: None }; // (*) ok to skip binders, because wf code is prepared for it match *predicate { @@ -107,6 +111,7 @@ struct WfPredicates<'a, 'tcx> { body_id: hir::HirId, span: Span, out: Vec>, + item: Option<&'tcx hir::Item>, } /// Controls whether we "elaborate" supertraits and so forth on the WF @@ -157,33 +162,162 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { .collect() } - /// Pushes the obligations required for `trait_ref` to be WF into - /// `self.out`. + /// Pushes the obligations required for `trait_ref` to be WF into `self.out`. fn compute_trait_ref(&mut self, trait_ref: &ty::TraitRef<'tcx>, elaborate: Elaborate) { + let tcx = self.infcx.tcx; let obligations = self.nominal_obligations(trait_ref.def_id, trait_ref.substs); let cause = self.cause(traits::MiscObligation); let param_env = self.param_env; + let item = &self.item; + let extend_cause_with_original_assoc_item_obligation = | + cause: &mut traits::ObligationCause<'_>, + pred: &ty::Predicate<'_>, + trait_assoc_items: ty::AssocItemsIterator<'_>, + | { + let item_span = item.map(|i| tcx.sess.source_map().def_span(i.span)); + match pred { + ty::Predicate::Projection(proj) => { + // The obligation comes not from the current `impl` nor the `trait` being + // implemented, but rather from a "second order" obligation, like in + // `src/test/ui/associated-types/point-at-type-on-obligation-failure.rs`: + // + // error[E0271]: type mismatch resolving `::Ok == ()` + // --> $DIR/point-at-type-on-obligation-failure.rs:13:5 + // | + // LL | type Ok; + // | -- associated type defined here + // ... + // LL | impl Bar for Foo { + // | ---------------- in this `impl` item + // LL | type Ok = (); + // | ^^^^^^^^^^^^^ expected u32, found () + // | + // = note: expected type `u32` + // found type `()` + // + // FIXME: we would want to point a span to all places that contributed to this + // obligation. In the case above, it should be closer to: + // + // error[E0271]: type mismatch resolving `::Ok == ()` + // --> $DIR/point-at-type-on-obligation-failure.rs:13:5 + // | + // LL | type Ok; + // | -- associated type defined here + // LL | type Sibling: Bar2; + // | -------------------------------- obligation set here + // ... + // LL | impl Bar for Foo { + // | ---------------- in this `impl` item + // LL | type Ok = (); + // | ^^^^^^^^^^^^^ expected u32, found () + // ... + // LL | impl Bar2 for Foo2 { + // | ---------------- in this `impl` item + // LL | type Ok = u32; + // | -------------- obligation set here + // | + // = note: expected type `u32` + // found type `()` + if let Some(hir::ItemKind::Impl(.., impl_items)) = item.map(|i| &i.kind) { + let trait_assoc_item = tcx.associated_item(proj.projection_def_id()); + if let Some(impl_item) = impl_items.iter().filter(|item| { + item.ident == trait_assoc_item.ident + }).next() { + cause.span = impl_item.span; + cause.code = traits::AssocTypeBound( + item_span, + trait_assoc_item.ident.span, + ); + } + } + } + ty::Predicate::Trait(proj) => { + // An associated item obligation born out of the `trait` failed to be met. + // Point at the `impl` that failed the obligation, the associated item that + // needed to meet the obligation, and the definition of that associated item, + // which should hold the obligation in most cases. An example can be seen in + // `src/test/ui/associated-types/point-at-type-on-obligation-failure-2.rs`: + // + // error[E0277]: the trait bound `bool: Bar` is not satisfied + // --> $DIR/point-at-type-on-obligation-failure-2.rs:8:5 + // | + // LL | type Assoc: Bar; + // | ----- associated type defined here + // ... + // LL | impl Foo for () { + // | --------------- in this `impl` item + // LL | type Assoc = bool; + // | ^^^^^^^^^^^^^^^^^^ the trait `Bar` is not implemented for `bool` + // + // FIXME: if the obligation comes from the where clause in the `trait`, we + // should point at it: + // + // error[E0277]: the trait bound `bool: Bar` is not satisfied + // --> $DIR/point-at-type-on-obligation-failure-2.rs:8:5 + // | + // | trait Foo where >::Assoc: Bar { + // | -------------------------- obligation set here + // LL | type Assoc; + // | ----- associated type defined here + // ... + // LL | impl Foo for () { + // | --------------- in this `impl` item + // LL | type Assoc = bool; + // | ^^^^^^^^^^^^^^^^^^ the trait `Bar` is not implemented for `bool` + if let ( + ty::Projection(ty::ProjectionTy { item_def_id, .. }), + Some(hir::ItemKind::Impl(.., impl_items)), + ) = (&proj.skip_binder().self_ty().kind, item.map(|i| &i.kind)) { + if let Some((impl_item, trait_assoc_item)) = trait_assoc_items + .filter(|i| i.def_id == *item_def_id) + .next() + .and_then(|trait_assoc_item| impl_items.iter() + .filter(|i| i.ident == trait_assoc_item.ident) + .next() + .map(|impl_item| (impl_item, trait_assoc_item))) + { + cause.span = impl_item.span; + cause.code = traits::AssocTypeBound( + item_span, + trait_assoc_item.ident.span, + ); + } + } + } + _ => {} + } + }; + if let Elaborate::All = elaborate { + let trait_assoc_items = tcx.associated_items(trait_ref.def_id); + let predicates = obligations.iter() - .map(|obligation| obligation.predicate.clone()) - .collect(); - let implied_obligations = traits::elaborate_predicates(self.infcx.tcx, predicates); + .map(|obligation| obligation.predicate.clone()) + .collect(); + let implied_obligations = traits::elaborate_predicates(tcx, predicates); let implied_obligations = implied_obligations.map(|pred| { - traits::Obligation::new(cause.clone(), param_env, pred) + let mut cause = cause.clone(); + extend_cause_with_original_assoc_item_obligation( + &mut cause, + &pred, + trait_assoc_items.clone(), + ); + traits::Obligation::new(cause, param_env, pred) }); self.out.extend(implied_obligations); } self.out.extend(obligations); - self.out.extend( - trait_ref.substs.types() - .filter(|ty| !ty.has_escaping_bound_vars()) - .map(|ty| traits::Obligation::new(cause.clone(), - param_env, - ty::Predicate::WellFormed(ty)))); + self.out.extend(trait_ref.substs.types() + .filter(|ty| !ty.has_escaping_bound_vars()) + .map(|ty| traits::Obligation::new( + cause.clone(), + param_env, + ty::Predicate::WellFormed(ty), + ))); } /// Pushes the obligations required for `trait_ref::Item` to be WF @@ -236,7 +370,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { let mut subtys = ty0.walk(); let param_env = self.param_env; while let Some(ty) = subtys.next() { - match ty.sty { + match ty.kind { ty::Bool | ty::Char | ty::Int(..) | @@ -347,7 +481,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // anyway, except via auto trait matching (which // only inspects the upvar types). subtys.skip_current_subtree(); // subtree handled by compute_projection - for upvar_ty in substs.upvar_tys(def_id, self.infcx.tcx) { + for upvar_ty in substs.as_closure().upvar_tys(def_id, self.infcx.tcx) { self.compute(upvar_ty); } } @@ -380,16 +514,21 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // obligations that don't refer to Self and // checking those - let cause = self.cause(traits::MiscObligation); - let component_traits = - data.auto_traits().chain(data.principal_def_id()); - self.out.extend( - component_traits.map(|did| traits::Obligation::new( - cause.clone(), - param_env, - ty::Predicate::ObjectSafe(did) - )) - ); + let defer_to_coercion = + self.infcx.tcx.features().object_safe_for_dispatch; + + if !defer_to_coercion { + let cause = self.cause(traits::MiscObligation); + let component_traits = + data.auto_traits().chain(data.principal_def_id()); + self.out.extend( + component_traits.map(|did| traits::Obligation::new( + cause.clone(), + param_env, + ty::Predicate::ObjectSafe(did) + )) + ); + } } // Inference variables are the complicated case, since we don't @@ -407,7 +546,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // is satisfied to ensure termination.) ty::Infer(_) => { let ty = self.infcx.shallow_resolve(ty); - if let ty::Infer(_) = ty.sty { // not yet resolved... + if let ty::Infer(_) = ty.kind { // not yet resolved... if ty == ty0 { // ...this is the type we started from! no progress. return false; } diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index 2475b93d95..3e52a6aa50 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -1,17 +1,13 @@ #![allow(non_camel_case_types)] -use rustc_data_structures::{fx::FxHashMap, sync::Lock}; +use rustc_data_structures::sync::Lock; -use std::cell::{RefCell, Cell}; +use std::cell::Cell; use std::fmt::Debug; -use std::hash::Hash; use std::time::{Duration, Instant}; -use std::sync::mpsc::{Sender}; -use syntax_pos::{SpanData}; use syntax::symbol::{Symbol, sym}; use rustc_macros::HashStable; -use crate::dep_graph::{DepNode}; use crate::session::Session; #[cfg(test)] @@ -27,17 +23,6 @@ pub struct ErrorReported; thread_local!(static TIME_DEPTH: Cell = Cell::new(0)); -/// Parameters to the `Dump` variant of type `ProfileQueriesMsg`. -#[derive(Clone,Debug)] -pub struct ProfQDumpParams { - /// A base path for the files we will dump. - pub path:String, - /// To ensure that the compiler waits for us to finish our dumps. - pub ack:Sender<()>, - /// Toggle dumping a log file with every `ProfileQueriesMsg`. - pub dump_profq_msg_log:bool, -} - #[allow(nonstandard_style)] #[derive(Clone, Debug, PartialEq, Eq)] pub struct QueryMsg { @@ -45,53 +30,6 @@ pub struct QueryMsg { pub msg: Option, } -/// A sequence of these messages induce a trace of query-based incremental compilation. -// FIXME(matthewhammer): Determine whether we should include cycle detection here or not. -#[derive(Clone,Debug)] -pub enum ProfileQueriesMsg { - /// Begin a timed pass. - TimeBegin(String), - /// End a timed pass. - TimeEnd, - /// Begin a task (see `dep_graph::graph::with_task`). - TaskBegin(DepNode), - /// End a task. - TaskEnd, - /// Begin a new query. - /// Cannot use `Span` because queries are sent to other thread. - QueryBegin(SpanData, QueryMsg), - /// Query is satisfied by using an already-known value for the given key. - CacheHit, - /// Query requires running a provider; providers may nest, permitting queries to nest. - ProviderBegin, - /// Query is satisfied by a provider terminating with a value. - ProviderEnd, - /// Dump a record of the queries to the given path. - Dump(ProfQDumpParams), - /// Halt the profiling/monitoring background thread. - Halt -} - -/// If enabled, send a message to the profile-queries thread. -pub fn profq_msg(sess: &Session, msg: ProfileQueriesMsg) { - if let Some(s) = sess.profile_channel.borrow().as_ref() { - s.send(msg).unwrap() - } else { - // Do nothing. - } -} - -/// Set channel for profile queries channel. -pub fn profq_set_chan(sess: &Session, s: Sender) -> bool { - let mut channel = sess.profile_channel.borrow_mut(); - if channel.is_none() { - *channel = Some(s); - true - } else { - false - } -} - /// Read the current depth of `time()` calls. This is used to /// encourage indentation across threads. pub fn time_depth() -> usize { @@ -108,10 +46,10 @@ pub fn set_time_depth(depth: usize) { pub fn time(sess: &Session, what: &str, f: F) -> T where F: FnOnce() -> T, { - time_ext(sess.time_passes(), Some(sess), what, f) + time_ext(sess.time_passes(), what, f) } -pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T where +pub fn time_ext(do_it: bool, what: &str, f: F) -> T where F: FnOnce() -> T, { if !do_it { return f(); } @@ -122,19 +60,9 @@ pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> r }); - if let Some(sess) = sess { - if cfg!(debug_assertions) { - profq_msg(sess, ProfileQueriesMsg::TimeBegin(what.to_string())) - } - } let start = Instant::now(); let rv = f(); let dur = start.elapsed(); - if let Some(sess) = sess { - if cfg!(debug_assertions) { - profq_msg(sess, ProfileQueriesMsg::TimeEnd) - } - } print_time_passes_entry(true, what, dur); @@ -279,39 +207,3 @@ pub fn indenter() -> Indenter { debug!(">>"); Indenter { _cannot_construct_outside_of_this_module: () } } - -pub trait MemoizationMap { - type Key: Clone; - type Value: Clone; - - /// If `key` is present in the map, return the value, - /// otherwise invoke `op` and store the value in the map. - /// - /// N.B., if the receiver is a `DepTrackingMap`, special care is - /// needed in the `op` to ensure that the correct edges are - /// added into the dep graph. See the `DepTrackingMap` impl for - /// more details! - fn memoize(&self, key: Self::Key, op: OP) -> Self::Value - where OP: FnOnce() -> Self::Value; -} - -impl MemoizationMap for RefCell> - where K: Hash+Eq+Clone, V: Clone -{ - type Key = K; - type Value = V; - - fn memoize(&self, key: K, op: OP) -> V - where OP: FnOnce() -> V - { - let result = self.borrow().get(&key).cloned(); - match result { - Some(result) => result, - None => { - let result = op(); - self.borrow_mut().insert(key, result.clone()); - result - } - } - } -} diff --git a/src/librustc/util/profiling.rs b/src/librustc/util/profiling.rs index 8624856a4f..5a1b7f3aa4 100644 --- a/src/librustc/util/profiling.rs +++ b/src/librustc/util/profiling.rs @@ -1,9 +1,9 @@ -use std::borrow::Cow; use std::error::Error; use std::fs; use std::mem::{self, Discriminant}; use std::path::Path; use std::process; +use std::sync::Arc; use std::thread::ThreadId; use std::u32; @@ -14,9 +14,12 @@ use measureme::{StringId, TimestampKind}; /// MmapSerializatioSink is faster on macOS and Linux /// but FileSerializationSink is faster on Windows #[cfg(not(windows))] -type Profiler = measureme::Profiler; +type SerializationSink = measureme::MmapSerializationSink; #[cfg(windows)] -type Profiler = measureme::Profiler; +type SerializationSink = measureme::FileSerializationSink; + +type Profiler = measureme::Profiler; + #[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)] pub enum ProfileCategory { @@ -62,6 +65,146 @@ fn thread_id_to_u64(tid: ThreadId) -> u64 { unsafe { mem::transmute::(tid) } } + +/// A reference to the SelfProfiler. It can be cloned and sent across thread +/// boundaries at will. +#[derive(Clone)] +pub struct SelfProfilerRef { + // This field is `None` if self-profiling is disabled for the current + // compilation session. + profiler: Option>, + + // We store the filter mask directly in the reference because that doesn't + // cost anything and allows for filtering with checking if the profiler is + // actually enabled. + event_filter_mask: EventFilter, +} + +impl SelfProfilerRef { + + pub fn new(profiler: Option>) -> SelfProfilerRef { + // If there is no SelfProfiler then the filter mask is set to NONE, + // ensuring that nothing ever tries to actually access it. + let event_filter_mask = profiler + .as_ref() + .map(|p| p.event_filter_mask) + .unwrap_or(EventFilter::NONE); + + SelfProfilerRef { + profiler, + event_filter_mask, + } + } + + // This shim makes sure that calls only get executed if the filter mask + // lets them pass. It also contains some trickery to make sure that + // code is optimized for non-profiling compilation sessions, i.e. anything + // past the filter check is never inlined so it doesn't clutter the fast + // path. + #[inline(always)] + fn exec(&self, event_filter: EventFilter, f: F) -> TimingGuard<'_> + where F: for<'a> FnOnce(&'a SelfProfiler) -> TimingGuard<'a> + { + #[inline(never)] + fn cold_call(profiler_ref: &SelfProfilerRef, f: F) -> TimingGuard<'_> + where F: for<'a> FnOnce(&'a SelfProfiler) -> TimingGuard<'a> + { + let profiler = profiler_ref.profiler.as_ref().unwrap(); + f(&**profiler) + } + + if unlikely!(self.event_filter_mask.contains(event_filter)) { + cold_call(self, f) + } else { + TimingGuard::none() + } + } + + /// Start profiling a generic activity. Profiling continues until the + /// TimingGuard returned from this call is dropped. + #[inline(always)] + pub fn generic_activity(&self, event_id: &str) -> TimingGuard<'_> { + self.exec(EventFilter::GENERIC_ACTIVITIES, |profiler| { + let event_id = profiler.profiler.alloc_string(event_id); + TimingGuard::start( + profiler, + profiler.generic_activity_event_kind, + event_id + ) + }) + } + + /// Start profiling a query provider. Profiling continues until the + /// TimingGuard returned from this call is dropped. + #[inline(always)] + pub fn query_provider(&self, query_name: QueryName) -> TimingGuard<'_> { + self.exec(EventFilter::QUERY_PROVIDERS, |profiler| { + let event_id = SelfProfiler::get_query_name_string_id(query_name); + TimingGuard::start(profiler, profiler.query_event_kind, event_id) + }) + } + + /// Record a query in-memory cache hit. + #[inline(always)] + pub fn query_cache_hit(&self, query_name: QueryName) { + self.non_guard_query_event( + |profiler| profiler.query_cache_hit_event_kind, + query_name, + EventFilter::QUERY_CACHE_HITS, + TimestampKind::Instant, + ); + } + + /// Start profiling a query being blocked on a concurrent execution. + /// Profiling continues until the TimingGuard returned from this call is + /// dropped. + #[inline(always)] + pub fn query_blocked(&self, query_name: QueryName) -> TimingGuard<'_> { + self.exec(EventFilter::QUERY_BLOCKED, |profiler| { + let event_id = SelfProfiler::get_query_name_string_id(query_name); + TimingGuard::start(profiler, profiler.query_blocked_event_kind, event_id) + }) + } + + /// Start profiling how long it takes to load a query result from the + /// incremental compilation on-disk cache. Profiling continues until the + /// TimingGuard returned from this call is dropped. + #[inline(always)] + pub fn incr_cache_loading(&self, query_name: QueryName) -> TimingGuard<'_> { + self.exec(EventFilter::INCR_CACHE_LOADS, |profiler| { + let event_id = SelfProfiler::get_query_name_string_id(query_name); + TimingGuard::start( + profiler, + profiler.incremental_load_result_event_kind, + event_id + ) + }) + } + + #[inline(always)] + fn non_guard_query_event( + &self, + event_kind: fn(&SelfProfiler) -> StringId, + query_name: QueryName, + event_filter: EventFilter, + timestamp_kind: TimestampKind + ) { + drop(self.exec(event_filter, |profiler| { + let event_id = SelfProfiler::get_query_name_string_id(query_name); + let thread_id = thread_id_to_u64(std::thread::current().id()); + + profiler.profiler.record_event( + event_kind(profiler), + event_id, + thread_id, + timestamp_kind, + ); + + TimingGuard::none() + })); + } +} + pub struct SelfProfiler { profiler: Profiler, event_filter_mask: EventFilter, @@ -143,103 +286,28 @@ impl SelfProfiler { let id = SelfProfiler::get_query_name_string_id(query_name); self.profiler.alloc_string_with_reserved_id(id, query_name.as_str()); } +} +#[must_use] +pub struct TimingGuard<'a>(Option>); + +impl<'a> TimingGuard<'a> { #[inline] - pub fn start_activity( - &self, - label: impl Into>, - ) { - if self.event_filter_mask.contains(EventFilter::GENERIC_ACTIVITIES) { - self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::Start); - } - } - - #[inline] - pub fn end_activity( - &self, - label: impl Into>, - ) { - if self.event_filter_mask.contains(EventFilter::GENERIC_ACTIVITIES) { - self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::End); - } - } - - #[inline] - pub fn record_query_hit(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::QUERY_CACHE_HITS) { - self.record_query(query_name, self.query_cache_hit_event_kind, TimestampKind::Instant); - } - } - - #[inline] - pub fn start_query(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::QUERY_PROVIDERS) { - self.record_query(query_name, self.query_event_kind, TimestampKind::Start); - } - } - - #[inline] - pub fn end_query(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::QUERY_PROVIDERS) { - self.record_query(query_name, self.query_event_kind, TimestampKind::End); - } - } - - #[inline] - pub fn incremental_load_result_start(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::INCR_CACHE_LOADS) { - self.record_query( - query_name, - self.incremental_load_result_event_kind, - TimestampKind::Start - ); - } - } - - #[inline] - pub fn incremental_load_result_end(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::INCR_CACHE_LOADS) { - self.record_query( - query_name, - self.incremental_load_result_event_kind, - TimestampKind::End - ); - } - } - - #[inline] - pub fn query_blocked_start(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::QUERY_BLOCKED) { - self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::Start); - } - } - - #[inline] - pub fn query_blocked_end(&self, query_name: QueryName) { - if self.event_filter_mask.contains(EventFilter::QUERY_BLOCKED) { - self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::End); - } - } - - #[inline] - fn record(&self, event_id: &str, event_kind: StringId, timestamp_kind: TimestampKind) { - let thread_id = thread_id_to_u64(std::thread::current().id()); - - let event_id = self.profiler.alloc_string(event_id); - self.profiler.record_event(event_kind, event_id, thread_id, timestamp_kind); - } - - #[inline] - fn record_query( - &self, - query_name: QueryName, + pub fn start( + profiler: &'a SelfProfiler, event_kind: StringId, - timestamp_kind: TimestampKind, - ) { - let dep_node_name = SelfProfiler::get_query_name_string_id(query_name); - + event_id: StringId, + ) -> TimingGuard<'a> { let thread_id = thread_id_to_u64(std::thread::current().id()); + let raw_profiler = &profiler.profiler; + let timing_guard = raw_profiler.start_recording_interval_event(event_kind, + event_id, + thread_id); + TimingGuard(Some(timing_guard)) + } - self.profiler.record_event(event_kind, dep_node_name, thread_id, timestamp_kind); + #[inline] + pub fn none() -> TimingGuard<'a> { + TimingGuard(None) } } diff --git a/src/librustc_apfloat/Cargo.toml b/src/librustc_apfloat/Cargo.toml index af6c2feed0..4fc15f99e4 100644 --- a/src/librustc_apfloat/Cargo.toml +++ b/src/librustc_apfloat/Cargo.toml @@ -9,5 +9,5 @@ name = "rustc_apfloat" path = "lib.rs" [dependencies] -bitflags = "1.0" +bitflags = "1.2.1" smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } diff --git a/src/librustc_apfloat/ieee.rs b/src/librustc_apfloat/ieee.rs index 18d968fbdd..4abb86a525 100644 --- a/src/librustc_apfloat/ieee.rs +++ b/src/librustc_apfloat/ieee.rs @@ -1199,8 +1199,8 @@ impl Float for IeeeFloat { } // Handle a leading minus sign. - let minus = s.starts_with("-"); - if minus || s.starts_with("+") { + let minus = s.starts_with('-'); + if minus || s.starts_with('+') { s = &s[1..]; if s.is_empty() { return Err(ParseError("String has no digits")); diff --git a/src/librustc_apfloat/lib.rs b/src/librustc_apfloat/lib.rs index 1190cea21a..5efe4fda8c 100644 --- a/src/librustc_apfloat/lib.rs +++ b/src/librustc_apfloat/lib.rs @@ -555,12 +555,13 @@ pub trait Float fn ilogb(self) -> ExpInt; /// Returns: self * 2exp for integral exponents. + /// Equivalent to C standard library function `ldexp`. fn scalbn_r(self, exp: ExpInt, round: Round) -> Self; fn scalbn(self, exp: ExpInt) -> Self { self.scalbn_r(exp, Round::NearestTiesToEven) } - /// Equivalent of C standard library function. + /// Equivalent to C standard library function with the same name. /// /// While the C standard says exp is an unspecified value for infinity and nan, /// this returns INT_MAX for infinities, and INT_MIN for NaNs (see `ilogb`). diff --git a/src/librustc_ast_borrowck/Cargo.toml b/src/librustc_ast_borrowck/Cargo.toml deleted file mode 100644 index 024b2640e1..0000000000 --- a/src/librustc_ast_borrowck/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "rustc_ast_borrowck" -version = "0.0.0" -edition = "2018" - -[lib] -name = "rustc_ast_borrowck" -path = "lib.rs" -test = false -doctest = false - -[dependencies] -log = "0.4" -syntax = { path = "../libsyntax" } -syntax_pos = { path = "../libsyntax_pos" } -# for "clarity", rename the graphviz crate to dot; graphviz within `borrowck` -# refers to the borrowck-specific graphviz adapter traits. -dot = { path = "../libgraphviz", package = "graphviz" } -rustc = { path = "../librustc" } -errors = { path = "../librustc_errors", package = "rustc_errors" } -rustc_data_structures = { path = "../librustc_data_structures" } diff --git a/src/librustc_ast_borrowck/borrowck/README.md b/src/librustc_ast_borrowck/borrowck/README.md deleted file mode 100644 index 3f2175921d..0000000000 --- a/src/librustc_ast_borrowck/borrowck/README.md +++ /dev/null @@ -1,1167 +0,0 @@ -% The Borrow Checker - -> WARNING: This README is more or less obsolete, and will be removed -> soon! The new system is described in the [rustc guide]. - -[rustc guide]: https://rust-lang.github.io/rustc-guide/borrow_check.html - -This pass has the job of enforcing memory safety. This is a subtle -topic. This docs aim to explain both the practice and the theory -behind the borrow checker. They start with a high-level overview of -how it works, and then proceed to dive into the theoretical -background. Finally, they go into detail on some of the more subtle -aspects. - -# Table of contents - -These docs are long. Search for the section you are interested in. - -- Overview -- Formal model -- Borrowing and loans -- Moves and initialization -- Drop flags and structural fragments -- Future work - -# Overview - -The borrow checker checks one function at a time. It operates in two -passes. The first pass, called `gather_loans`, walks over the function -and identifies all of the places where borrows (e.g., `&` expressions -and `ref` bindings) and moves (copies or captures of a linear value) -occur. It also tracks initialization sites. For each borrow and move, -it checks various basic safety conditions at this time (for example, -that the lifetime of the borrow doesn't exceed the lifetime of the -value being borrowed, or that there is no move out of an `&T` -referent). - -It then uses the dataflow module to propagate which of those borrows -may be in scope at each point in the procedure. A loan is considered -to come into scope at the expression that caused it and to go out of -scope when the lifetime of the resulting reference expires. - -Once the in-scope loans are known for each point in the program, the -borrow checker walks the IR again in a second pass called -`check_loans`. This pass examines each statement and makes sure that -it is safe with respect to the in-scope loans. - -# Formal model - -Throughout the docs we'll consider a simple subset of Rust in which -you can only borrow from places, defined like so: - -```text -P = x | P.f | *P -``` - -Here `x` represents some variable, `P.f` is a field reference, -and `*P` is a pointer dereference. There is no auto-deref or other -niceties. This means that if you have a type like: - -```rust -struct S { f: i32 } -``` - -and a variable `a: Box`, then the rust expression `a.f` would correspond -to an `P` of `(*a).f`. - -Here is the formal grammar for the types we'll consider: - -```text -TY = i32 | bool | S<'LT...> | Box | & 'LT MQ TY -MQ = mut | imm -``` - -Most of these types should be pretty self explanatory. Here `S` is a -struct name and we assume structs are declared like so: - -```text -SD = struct S<'LT...> { (f: TY)... } -``` - -# Borrowing and loans - -## An intuitive explanation - -### Issuing loans - -Now, imagine we had a program like this: - -```rust -struct Foo { f: i32, g: i32 } -... -'a: { - let mut x: Box = ...; - let y = &mut (*x).f; - x = ...; -} -``` - -This is of course dangerous because mutating `x` will free the old -value and hence invalidate `y`. The borrow checker aims to prevent -this sort of thing. - -#### Loans and restrictions - -The way the borrow checker works is that it analyzes each borrow -expression (in our simple model, that's stuff like `&P`, though in -real life there are a few other cases to consider). For each borrow -expression, it computes a `Loan`, which is a data structure that -records (1) the value being borrowed, (2) the mutability and scope of -the borrow, and (3) a set of restrictions. In the code, `Loan` is a -struct defined in `middle::borrowck`. Formally, we define `LOAN` as -follows: - -```text -LOAN = (P, LT, MQ, RESTRICTION*) -RESTRICTION = (P, ACTION*) -ACTION = MUTATE | CLAIM | FREEZE -``` - -Here the `LOAN` tuple defines the place `P` being borrowed; the -lifetime `LT` of that borrow; the mutability `MQ` of the borrow; and a -list of restrictions. The restrictions indicate actions which, if -taken, could invalidate the loan and lead to type safety violations. - -Each `RESTRICTION` is a pair of a restrictive place `P` (which will -either be the path that was borrowed or some prefix of the path that -was borrowed) and a set of restricted actions. There are three kinds -of actions that may be restricted for the path `P`: - -- `MUTATE` means that `P` cannot be assigned to; -- `CLAIM` means that the `P` cannot be borrowed mutably; -- `FREEZE` means that the `P` cannot be borrowed immutably; - -Finally, it is never possible to move from a place that appears in a -restriction. This implies that the "empty restriction" `(P, [])`, -which contains an empty set of actions, still has a purpose---it -prevents moves from `P`. I chose not to make `MOVE` a fourth kind of -action because that would imply that sometimes moves are permitted -from restricted values, which is not the case. - -#### Example - -To give you a better feeling for what kind of restrictions derived -from a loan, let's look at the loan `L` that would be issued as a -result of the borrow `&mut (*x).f` in the example above: - -```text -L = ((*x).f, 'a, mut, RS) where - RS = [((*x).f, [MUTATE, CLAIM, FREEZE]), - (*x, [MUTATE, CLAIM, FREEZE]), - (x, [MUTATE, CLAIM, FREEZE])] -``` - -The loan states that the expression `(*x).f` has been loaned as -mutable for the lifetime `'a`. Because the loan is mutable, that means -that the value `(*x).f` may be mutated via the newly created reference -(and *only* via that pointer). This is reflected in the -restrictions `RS` that accompany the loan. - -The first restriction `((*x).f, [MUTATE, CLAIM, FREEZE])` states that -the lender may not mutate, freeze, nor alias `(*x).f`. Mutation is -illegal because `(*x).f` is only supposed to be mutated via the new -reference, not by mutating the original path `(*x).f`. Freezing is -illegal because the path now has an `&mut` alias; so even if we the -lender were to consider `(*x).f` to be immutable, it might be mutated -via this alias. They will be enforced for the lifetime `'a` of the -loan. After the loan expires, the restrictions no longer apply. - -The second restriction on `*x` is interesting because it does not -apply to the path that was lent (`(*x).f`) but rather to a prefix of -the borrowed path. This is due to the rules of inherited mutability: -if the user were to assign to (or freeze) `*x`, they would indirectly -overwrite (or freeze) `(*x).f`, and thus invalidate the reference -that was created. In general it holds that when a path is -lent, restrictions are issued for all the owning prefixes of that -path. In this case, the path `*x` owns the path `(*x).f` and, -because `x` has ownership, the path `x` owns the path `*x`. -Therefore, borrowing `(*x).f` yields restrictions on both -`*x` and `x`. - -### Checking for illegal assignments, moves, and reborrows - -Once we have computed the loans introduced by each borrow, the borrow -checker uses a data flow propagation to compute the full set of loans -in scope at each expression and then uses that set to decide whether -that expression is legal. Remember that the scope of loan is defined -by its lifetime LT. We sometimes say that a loan which is in-scope at -a particular point is an "outstanding loan", and the set of -restrictions included in those loans as the "outstanding -restrictions". - -The kinds of expressions which in-scope loans can render illegal are: -- *assignments* (`lv = v`): illegal if there is an in-scope restriction - against mutating `lv`; -- *moves*: illegal if there is any in-scope restriction on `lv` at all; -- *mutable borrows* (`&mut lv`): illegal there is an in-scope restriction - against claiming `lv`; -- *immutable borrows* (`&lv`): illegal there is an in-scope restriction - against freezing `lv`. - -## Formal rules - -Now that we hopefully have some kind of intuitive feeling for how the -borrow checker works, let's look a bit more closely now at the precise -conditions that it uses. - -I will present the rules in a modified form of standard inference -rules, which looks as follows: - -```text -PREDICATE(X, Y, Z) // Rule-Name - Condition 1 - Condition 2 - Condition 3 -``` - -The initial line states the predicate that is to be satisfied. The -indented lines indicate the conditions that must be met for the -predicate to be satisfied. The right-justified comment states the name -of this rule: there are comments in the borrowck source referencing -these names, so that you can cross reference to find the actual code -that corresponds to the formal rule. - -### Invariants - -I want to collect, at a high-level, the invariants the borrow checker -maintains. I will give them names and refer to them throughout the -text. Together these invariants are crucial for the overall soundness -of the system. - -**Mutability requires uniqueness.** To mutate a path - -**Unique mutability.** There is only one *usable* mutable path to any -given memory at any given time. This implies that when claiming memory -with an expression like `p = &mut x`, the compiler must guarantee that -the borrowed value `x` can no longer be mutated so long as `p` is -live. (This is done via restrictions, read on.) - -**.** - - -### The `gather_loans` pass - -We start with the `gather_loans` pass, which walks the AST looking for -borrows. For each borrow, there are three bits of information: the -place `P` being borrowed and the mutability `MQ` and lifetime `LT` -of the resulting pointer. Given those, `gather_loans` applies four -validity tests: - -1. `MUTABILITY(P, MQ)`: The mutability of the reference is -compatible with the mutability of `P` (i.e., not borrowing immutable -data as mutable). - -2. `ALIASABLE(P, MQ)`: The aliasability of the reference is -compatible with the aliasability of `P`. The goal is to prevent -`&mut` borrows of aliasability data. - -3. `LIFETIME(P, LT, MQ)`: The lifetime of the borrow does not exceed -the lifetime of the value being borrowed. - -4. `RESTRICTIONS(P, LT, ACTIONS) = RS`: This pass checks and computes the -restrictions to maintain memory safety. These are the restrictions -that will go into the final loan. We'll discuss in more detail below. - -## Checking mutability - -Checking mutability is fairly straightforward. We just want to prevent -immutable data from being borrowed as mutable. Note that it is ok to borrow -mutable data as immutable, since that is simply a freeze. The judgement -`MUTABILITY(P, MQ)` means the mutability of `P` is compatible with a borrow -of mutability `MQ`. The Rust code corresponding to this predicate is the -function `check_mutability` in `middle::borrowck::gather_loans`. - -### Checking mutability of variables - -*Code pointer:* Function `check_mutability()` in `gather_loans/mod.rs`, -but also the code in `mem_categorization`. - -Let's begin with the rules for variables, which state that if a -variable is declared as mutable, it may be borrowed any which way, but -otherwise the variable must be borrowed as immutable: - -```text -MUTABILITY(X, MQ) // M-Var-Mut - DECL(X) = mut - -MUTABILITY(X, imm) // M-Var-Imm - DECL(X) = imm -``` - -### Checking mutability of owned content - -Fields and boxes inherit their mutability from -their base expressions, so both of their rules basically -delegate the check to the base expression `P`: - -```text -MUTABILITY(P.f, MQ) // M-Field - MUTABILITY(P, MQ) - -MUTABILITY(*P, MQ) // M-Deref-Unique - TYPE(P) = Box - MUTABILITY(P, MQ) -``` - -### Checking mutability of immutable pointer types - -Immutable pointer types like `&T` can only -be borrowed if MQ is immutable: - -```text -MUTABILITY(*P, imm) // M-Deref-Borrowed-Imm - TYPE(P) = &Ty -``` - -### Checking mutability of mutable pointer types - -`&mut T` can be frozen, so it is acceptable to borrow it as either imm or mut: - -```text -MUTABILITY(*P, MQ) // M-Deref-Borrowed-Mut - TYPE(P) = &mut Ty -``` - -## Checking aliasability - -The goal of the aliasability check is to ensure that we never permit `&mut` -borrows of aliasable data. The judgement `ALIASABLE(P, MQ)` means the -aliasability of `P` is compatible with a borrow of mutability `MQ`. The Rust -code corresponding to this predicate is the function `check_aliasability()` in -`middle::borrowck::gather_loans`. - -### Checking aliasability of variables - -Local variables are never aliasable as they are accessible only within -the stack frame. - -```text - ALIASABLE(X, MQ) // M-Var-Mut -``` - -### Checking aliasable of owned content - -Owned content is aliasable if it is found in an aliasable location: - -```text -ALIASABLE(P.f, MQ) // M-Field - ALIASABLE(P, MQ) - -ALIASABLE(*P, MQ) // M-Deref-Unique - ALIASABLE(P, MQ) -``` - -### Checking aliasability of immutable pointer types - -Immutable pointer types like `&T` are aliasable, and hence can only be -borrowed immutably: - -```text -ALIASABLE(*P, imm) // M-Deref-Borrowed-Imm - TYPE(P) = &Ty -``` - -### Checking aliasability of mutable pointer types - -`&mut T` can be frozen, so it is acceptable to borrow it as either imm or mut: - -```text -ALIASABLE(*P, MQ) // M-Deref-Borrowed-Mut - TYPE(P) = &mut Ty -``` - -## Checking lifetime - -These rules aim to ensure that no data is borrowed for a scope that exceeds -its lifetime. These two computations wind up being intimately related. -Formally, we define a predicate `LIFETIME(P, LT, MQ)`, which states that -"the place `P` can be safely borrowed for the lifetime `LT` with mutability -`MQ`". The Rust code corresponding to this predicate is the module -`middle::borrowck::gather_loans::lifetime`. - -### Checking lifetime of variables - -The rule for variables states that a variable can only be borrowed a -lifetime `LT` that is a subregion of the variable's scope: - -```text -LIFETIME(X, LT, MQ) // L-Local - LT <= block where X is declared -``` - -### Checking lifetime for owned content - -The lifetime of a field or box is the same as the lifetime -of its owner: - -```text -LIFETIME(P.f, LT, MQ) // L-Field - LIFETIME(P, LT, MQ) - -LIFETIME(*P, LT, MQ) // L-Deref-Send - TYPE(P) = Box - LIFETIME(P, LT, MQ) -``` - -### Checking lifetime for derefs of references - -References have a lifetime `LT'` associated with them. The -data they point at has been guaranteed to be valid for at least this -lifetime. Therefore, the borrow is valid so long as the lifetime `LT` -of the borrow is shorter than the lifetime `LT'` of the pointer -itself: - -```text -LIFETIME(*P, LT, MQ) // L-Deref-Borrowed - TYPE(P) = <' Ty OR <' mut Ty - LT <= LT' -``` - -## Computing the restrictions - -The final rules govern the computation of *restrictions*, meaning that -we compute the set of actions that will be illegal for the life of the -loan. The predicate is written `RESTRICTIONS(P, LT, ACTIONS) = -RESTRICTION*`, which can be read "in order to prevent `ACTIONS` from -occurring on `P`, the restrictions `RESTRICTION*` must be respected -for the lifetime of the loan". - -Note that there is an initial set of restrictions: these restrictions -are computed based on the kind of borrow: - -```text -&mut P => RESTRICTIONS(P, LT, MUTATE|CLAIM|FREEZE) -&P => RESTRICTIONS(P, LT, MUTATE|CLAIM) -``` - -The reasoning here is that a mutable borrow must be the only writer, -therefore it prevents other writes (`MUTATE`), mutable borrows -(`CLAIM`), and immutable borrows (`FREEZE`). An immutable borrow -permits other immutable borrows but forbids writes and mutable borrows. - -### Restrictions for loans of a local variable - -The simplest case is a borrow of a local variable `X`: - -```text -RESTRICTIONS(X, LT, ACTIONS) = (X, ACTIONS) // R-Variable -``` - -In such cases we just record the actions that are not permitted. - -### Restrictions for loans of fields - -Restricting a field is the same as restricting the owner of that -field: - -```text -RESTRICTIONS(P.f, LT, ACTIONS) = RS, (P.f, ACTIONS) // R-Field - RESTRICTIONS(P, LT, ACTIONS) = RS -``` - -The reasoning here is as follows. If the field must not be mutated, -then you must not mutate the owner of the field either, since that -would indirectly modify the field. Similarly, if the field cannot be -frozen or aliased, we cannot allow the owner to be frozen or aliased, -since doing so indirectly freezes/aliases the field. This is the -origin of inherited mutability. - -### Restrictions for loans of owned referents - -Because the mutability of owned referents is inherited, restricting an -owned referent is similar to restricting a field, in that it implies -restrictions on the pointer. However, boxes have an important -twist: if the owner `P` is mutated, that causes the owned referent -`*P` to be freed! So whenever an owned referent `*P` is borrowed, we -must prevent the box `P` from being mutated, which means -that we always add `MUTATE` and `CLAIM` to the restriction set imposed -on `P`: - -```text -RESTRICTIONS(*P, LT, ACTIONS) = RS, (*P, ACTIONS) // R-Deref-Send-Pointer - TYPE(P) = Box - RESTRICTIONS(P, LT, ACTIONS|MUTATE|CLAIM) = RS -``` - -### Restrictions for loans of immutable borrowed referents - -Immutable borrowed referents are freely aliasable, meaning that -the compiler does not prevent you from copying the pointer. This -implies that issuing restrictions is useless. We might prevent the -user from acting on `*P` itself, but there could be another path -`*P1` that refers to the exact same memory, and we would not be -restricting that path. Therefore, the rule for `&Ty` pointers -always returns an empty set of restrictions, and it only permits -restricting `MUTATE` and `CLAIM` actions: - -```text -RESTRICTIONS(*P, LT, ACTIONS) = [] // R-Deref-Imm-Borrowed - TYPE(P) = <' Ty - LT <= LT' // (1) - ACTIONS subset of [MUTATE, CLAIM] -``` - -The reason that we can restrict `MUTATE` and `CLAIM` actions even -without a restrictions list is that it is never legal to mutate nor to -borrow mutably the contents of a `&Ty` pointer. In other words, -those restrictions are already inherent in the type. - -Clause (1) in the rule for `&Ty` deserves mention. Here I -specify that the lifetime of the loan must be less than the lifetime -of the `&Ty` pointer. In simple cases, this clause is redundant, since -the `LIFETIME()` function will already enforce the required rule: - -```rust -fn foo(point: &'a Point) -> &'static i32 { - &point.x // Error -} -``` - -The above example fails to compile both because of clause (1) above -but also by the basic `LIFETIME()` check. However, in more advanced -examples involving multiple nested pointers, clause (1) is needed: - -```rust -fn foo(point: &'a &'b mut Point) -> &'b i32 { - &point.x // Error -} -``` - -The `LIFETIME` rule here would accept `'b` because, in fact, the -*memory is* guaranteed to remain valid (i.e., not be freed) for the -lifetime `'b`, since the `&mut` pointer is valid for `'b`. However, we -are returning an immutable reference, so we need the memory to be both -valid and immutable. Even though `point.x` is referenced by an `&mut` -pointer, it can still be considered immutable so long as that `&mut` -pointer is found in an aliased location. That means the memory is -guaranteed to be *immutable* for the lifetime of the `&` pointer, -which is only `'a`, not `'b`. Hence this example yields an error. - -As a final twist, consider the case of two nested *immutable* -pointers, rather than a mutable pointer within an immutable one: - -```rust -fn foo(point: &'a &'b Point) -> &'b i32 { - &point.x // OK -} -``` - -This function is legal. The reason for this is that the inner pointer -(`*point : &'b Point`) is enough to guarantee the memory is immutable -and valid for the lifetime `'b`. This is reflected in -`RESTRICTIONS()` by the fact that we do not recurse (i.e., we impose -no restrictions on `P`, which in this particular case is the pointer -`point : &'a &'b Point`). - -#### Why both `LIFETIME()` and `RESTRICTIONS()`? - -Given the previous text, it might seem that `LIFETIME` and -`RESTRICTIONS` should be folded together into one check, but there is -a reason that they are separated. They answer separate concerns. -The rules pertaining to `LIFETIME` exist to ensure that we don't -create a borrowed pointer that outlives the memory it points at. So -`LIFETIME` prevents a function like this: - -```rust -fn get_1<'a>() -> &'a i32 { - let x = 1; - &x -} -``` - -Here we would be returning a pointer into the stack. Clearly bad. - -However, the `RESTRICTIONS` rules are more concerned with how memory -is used. The example above doesn't generate an error according to -`RESTRICTIONS` because, for local variables, we don't require that the -loan lifetime be a subset of the local variable lifetime. The idea -here is that we *can* guarantee that `x` is not (e.g.) mutated for the -lifetime `'a`, even though `'a` exceeds the function body and thus -involves unknown code in the caller -- after all, `x` ceases to exist -after we return and hence the remaining code in `'a` cannot possibly -mutate it. This distinction is important for type checking functions -like this one: - -```rust -fn inc_and_get<'a>(p: &'a mut Point) -> &'a i32 { - p.x += 1; - &p.x -} -``` - -In this case, we take in a `&mut` and return a frozen borrowed pointer -with the same lifetime. So long as the lifetime of the returned value -doesn't exceed the lifetime of the `&mut` we receive as input, this is -fine, though it may seem surprising at first (it surprised me when I -first worked it through). After all, we're guaranteeing that `*p` -won't be mutated for the lifetime `'a`, even though we can't "see" the -entirety of the code during that lifetime, since some of it occurs in -our caller. But we *do* know that nobody can mutate `*p` except -through `p`. So if we don't mutate `*p` and we don't return `p`, then -we know that the right to mutate `*p` has been lost to our caller -- -in terms of capability, the caller passed in the ability to mutate -`*p`, and we never gave it back. (Note that we can't return `p` while -`*p` is borrowed since that would be a move of `p`, as `&mut` pointers -are affine.) - -### Restrictions for loans of mutable borrowed referents - -Mutable borrowed pointers are guaranteed to be the only way to mutate -their referent. This permits us to take greater license with them; for -example, the referent can be frozen simply be ensuring that we do not -use the original pointer to perform mutate. Similarly, we can allow -the referent to be claimed, so long as the original pointer is unused -while the new claimant is live. - -The rule for mutable borrowed pointers is as follows: - -```text -RESTRICTIONS(*P, LT, ACTIONS) = RS, (*P, ACTIONS) // R-Deref-Mut-Borrowed - TYPE(P) = <' mut Ty - LT <= LT' // (1) - RESTRICTIONS(P, LT, ACTIONS) = RS // (2) -``` - -Let's examine the two numbered clauses: - -Clause (1) specifies that the lifetime of the loan (`LT`) cannot -exceed the lifetime of the `&mut` pointer (`LT'`). The reason for this -is that the `&mut` pointer is guaranteed to be the only legal way to -mutate its referent -- but only for the lifetime `LT'`. After that -lifetime, the loan on the referent expires and hence the data may be -modified by its owner again. This implies that we are only able to -guarantee that the referent will not be modified or aliased for a -maximum of `LT'`. - -Here is a concrete example of a bug this rule prevents: - -```rust -// Test region-reborrow-from-shorter-mut-ref.rs: -fn copy_borrowed_ptr<'a,'b,T>(x: &'a mut &'b mut T) -> &'b mut T { - &mut **p // ERROR due to clause (1) -} -fn main() { - let mut x = 1; - let mut y = &mut x; // <-'b-----------------------------+ - // +-'a--------------------+ | - // v v | - let z = copy_borrowed_ptr(&mut y); // y is lent | - *y += 1; // Here y==z, so both should not be usable... | - *z += 1; // ...and yet they would be, but for clause 1. | -} // <------------------------------------------------------+ -``` - -Clause (2) propagates the restrictions on the referent to the pointer -itself. This is the same as with an box, though the -reasoning is mildly different. The basic goal in all cases is to -prevent the user from establishing another route to the same data. To -see what I mean, let's examine various cases of what can go wrong and -show how it is prevented. - -**Example danger 1: Moving the base pointer.** One of the simplest -ways to violate the rules is to move the base pointer to a new name -and access it via that new name, thus bypassing the restrictions on -the old name. Here is an example: - -```rust -// src/test/compile-fail/borrowck-move-mut-base-ptr.rs -fn foo(t0: &mut i32) { - let p: &i32 = &*t0; // Freezes `*t0` - let t1 = t0; //~ ERROR cannot move out of `t0` - *t1 = 22; // OK, not a write through `*t0` -} -``` - -Remember that `&mut` pointers are linear, and hence `let t1 = t0` is a -move of `t0` -- or would be, if it were legal. Instead, we get an -error, because clause (2) imposes restrictions on `P` (`t0`, here), -and any restrictions on a path make it impossible to move from that -path. - -**Example danger 2: Claiming the base pointer.** Another possible -danger is to mutably borrow the base path. This can lead to two bad -scenarios. The most obvious is that the mutable borrow itself becomes -another path to access the same data, as shown here: - -```rust -// src/test/compile-fail/borrowck-mut-borrow-of-mut-base-ptr.rs -fn foo<'a>(mut t0: &'a mut i32, - mut t1: &'a mut i32) { - let p: &i32 = &*t0; // Freezes `*t0` - let mut t2 = &mut t0; //~ ERROR cannot borrow `t0` - **t2 += 1; // Mutates `*t0` -} -``` - -In this example, `**t2` is the same memory as `*t0`. Because `t2` is -an `&mut` pointer, `**t2` is a unique path and hence it would be -possible to mutate `**t2` even though that memory was supposed to be -frozen by the creation of `p`. However, an error is reported -- the -reason is that the freeze `&*t0` will restrict claims and mutation -against `*t0` which, by clause 2, in turn prevents claims and mutation -of `t0`. Hence the claim `&mut t0` is illegal. - -Another danger with an `&mut` pointer is that we could swap the `t0` -value away to create a new path: - -```rust -// src/test/compile-fail/borrowck-swap-mut-base-ptr.rs -fn foo<'a>(mut t0: &'a mut i32, - mut t1: &'a mut i32) { - let p: &i32 = &*t0; // Freezes `*t0` - swap(&mut t0, &mut t1); //~ ERROR cannot borrow `t0` - *t1 = 22; -} -``` - -This is illegal for the same reason as above. Note that if we added -back a swap operator -- as we used to have -- we would want to be very -careful to ensure this example is still illegal. - -**Example danger 3: Freeze the base pointer.** In the case where the -referent is claimed, even freezing the base pointer can be dangerous, -as shown in the following example: - -```rust -// src/test/compile-fail/borrowck-borrow-of-mut-base-ptr.rs -fn foo<'a>(mut t0: &'a mut i32, - mut t1: &'a mut i32) { - let p: &mut i32 = &mut *t0; // Claims `*t0` - let mut t2 = &t0; //~ ERROR cannot borrow `t0` - let q: &i32 = &*t2; // Freezes `*t0` but not through `*p` - *p += 1; // violates type of `*q` -} -``` - -Here the problem is that `*t0` is claimed by `p`, and hence `p` wants -to be the controlling pointer through which mutation or freezes occur. -But `t2` would -- if it were legal -- have the type `& &mut i32`, and -hence would be a mutable pointer in an aliasable location, which is -considered frozen (since no one can write to `**t2` as it is not a -unique path). Therefore, we could reasonably create a frozen `&i32` -pointer pointing at `*t0` that coexists with the mutable pointer `p`, -which is clearly unsound. - -However, it is not always unsafe to freeze the base pointer. In -particular, if the referent is frozen, there is no harm in it: - -```rust -// src/test/ui/borrowck-borrow-of-mut-base-ptr-safe.rs -fn foo<'a>(mut t0: &'a mut i32, - mut t1: &'a mut i32) { - let p: &i32 = &*t0; // Freezes `*t0` - let mut t2 = &t0; - let q: &i32 = &*t2; // Freezes `*t0`, but that's ok... - let r: &i32 = &*t0; // ...after all, could do same thing directly. -} -``` - -In this case, creating the alias `t2` of `t0` is safe because the only -thing `t2` can be used for is to further freeze `*t0`, which is -already frozen. In particular, we cannot assign to `*t0` through the -new alias `t2`, as demonstrated in this test case: - -```rust -// src/test/ui/borrowck-borrow-mut-base-ptr-in-aliasable-loc.rs -fn foo(t0: & &mut i32) { - let t1 = t0; - let p: &i32 = &**t0; - **t1 = 22; //~ ERROR cannot assign -} -``` - -This distinction is reflected in the rules. When doing an `&mut` -borrow -- as in the first example -- the set `ACTIONS` will be -`CLAIM|MUTATE|FREEZE`, because claiming the referent implies that it -cannot be claimed, mutated, or frozen by anyone else. These -restrictions are propagated back to the base path and hence the base -path is considered unfreezable. - -In contrast, when the referent is merely frozen -- as in the second -example -- the set `ACTIONS` will be `CLAIM|MUTATE`, because freezing -the referent implies that it cannot be claimed or mutated but permits -others to freeze. Hence when these restrictions are propagated back to -the base path, it will still be considered freezable. - - - -**FIXME [RFC 1751](https://github.com/rust-lang/rfcs/issues/1751) -Restrictions against mutating the base pointer.** -When an `&mut` pointer is frozen or claimed, we currently pass along the -restriction against MUTATE to the base pointer. I do not believe this -restriction is needed. It dates from the days when we had a way to -mutate that preserved the value being mutated (i.e., swap). Nowadays -the only form of mutation is assignment, which destroys the pointer -being mutated -- therefore, a mutation cannot create a new path to the -same data. Rather, it removes an existing path. This implies that not -only can we permit mutation, we can have mutation kill restrictions in -the dataflow sense. - -**WARNING:** We do not currently have `const` borrows in the -language. If they are added back in, we must ensure that they are -consistent with all of these examples. The crucial question will be -what sorts of actions are permitted with a `&const &mut` pointer. I -would suggest that an `&mut` referent found in an `&const` location be -prohibited from both freezes and claims. This would avoid the need to -prevent `const` borrows of the base pointer when the referent is -borrowed. - -[ Previous revisions of this document discussed `&const` in more detail. -See the revision history. ] - -# Moves and initialization - -The borrow checker is also in charge of ensuring that: - -- all memory which is accessed is initialized -- immutable local variables are assigned at most once. - -These are two separate dataflow analyses built on the same -framework. Let's look at checking that memory is initialized first; -the checking of immutable local variable assignments works in a very -similar way. - -To track the initialization of memory, we actually track all the -points in the program that *create uninitialized memory*, meaning -moves and the declaration of uninitialized variables. For each of -these points, we create a bit in the dataflow set. Assignments to a -variable `x` or path `a.b.c` kill the move/uninitialization bits for -those paths and any subpaths (e.g., `x`, `x.y`, `a.b.c`, `*a.b.c`). -Bits are unioned when two control-flow paths join. Thus, the -presence of a bit indicates that the move may have occurred without an -intervening assignment to the same memory. At each use of a variable, -we examine the bits in scope, and check that none of them are -moves/uninitializations of the variable that is being used. - -Let's look at a simple example: - -```rust -fn foo(a: Box) { - let b: Box; // Gen bit 0. - - if cond { // Bits: 0 - use(&*a); - b = a; // Gen bit 1, kill bit 0. - use(&*b); - } else { - // Bits: 0 - } - // Bits: 0,1 - use(&*a); // Error. - use(&*b); // Error. -} - -fn use(a: &i32) { } -``` - -In this example, the variable `b` is created uninitialized. In one -branch of an `if`, we then move the variable `a` into `b`. Once we -exit the `if`, therefore, it is an error to use `a` or `b` since both -are only conditionally initialized. I have annotated the dataflow -state using comments. There are two dataflow bits, with bit 0 -corresponding to the creation of `b` without an initializer, and bit 1 -corresponding to the move of `a`. The assignment `b = a` both -generates bit 1, because it is a move of `a`, and kills bit 0, because -`b` is now initialized. On the else branch, though, `b` is never -initialized, and so bit 0 remains untouched. When the two flows of -control join, we union the bits from both sides, resulting in both -bits 0 and 1 being set. Thus any attempt to use `a` uncovers the bit 1 -from the "then" branch, showing that `a` may be moved, and any attempt -to use `b` uncovers bit 0, from the "else" branch, showing that `b` -may not be initialized. - -## Initialization of immutable variables - -Initialization of immutable variables works in a very similar way, -except that: - -1. we generate bits for each assignment to a variable; -2. the bits are never killed except when the variable goes out of scope. - -Thus the presence of an assignment bit indicates that the assignment -may have occurred. Note that assignments are only killed when the -variable goes out of scope, as it is not relevant whether or not there -has been a move in the meantime. Using these bits, we can declare that -an assignment to an immutable variable is legal iff there is no other -assignment bit to that same variable in scope. - -## Why is the design made this way? - -It may seem surprising that we assign dataflow bits to *each move* -rather than *each path being moved*. This is somewhat less efficient, -since on each use, we must iterate through all moves and check whether -any of them correspond to the path in question. Similar concerns apply -to the analysis for double assignments to immutable variables. The -main reason to do it this way is that it allows us to print better -error messages, because when a use occurs, we can print out the -precise move that may be in scope, rather than simply having to say -"the variable may not be initialized". - -## Data structures used in the move analysis - -The move analysis maintains several data structures that enable it to -cross-reference moves and assignments to determine when they may be -moving/assigning the same memory. These are all collected into the -`MoveData` and `FlowedMoveData` structs. The former represents the set -of move paths, moves, and assignments, and the latter adds in the -results of a dataflow computation. - -### Move paths - -The `MovePath` tree tracks every path that is moved or assigned to. -These paths have the same form as the `LoanPath` data structure, which -in turn is the "real world version of the places `P` that we -introduced earlier. The difference between a `MovePath` and a `LoanPath` -is that move paths are: - -1. Canonicalized, so that we have exactly one copy of each, and - we can refer to move paths by index; -2. Cross-referenced with other paths into a tree, so that given a move - path we can efficiently find all parent move paths and all - extensions (e.g., given the `a.b` move path, we can easily find the - move path `a` and also the move paths `a.b.c`) -3. Cross-referenced with moves and assignments, so that we can - easily find all moves and assignments to a given path. - -The mechanism that we use is to create a `MovePath` record for each -move path. These are arranged in an array and are referenced using -`MovePathIndex` values, which are newtype'd indices. The `MovePath` -structs are arranged into a tree, representing using the standard -Knuth representation where each node has a child 'pointer' and a "next -sibling" 'pointer'. In addition, each `MovePath` has a parent -'pointer'. In this case, the 'pointers' are just `MovePathIndex` -values. - -In this way, if we want to find all base paths of a given move path, -we can just iterate up the parent pointers (see `each_base_path()` in -the `move_data` module). If we want to find all extensions, we can -iterate through the subtree (see `each_extending_path()`). - -### Moves and assignments - -There are structs to represent moves (`Move`) and assignments -(`Assignment`), and these are also placed into arrays and referenced -by index. All moves of a particular path are arranged into a linked -lists, beginning with `MovePath.first_move` and continuing through -`Move.next_move`. - -We distinguish between "var" assignments, which are assignments to a -variable like `x = foo`, and "path" assignments (`x.f = foo`). This -is because we need to assign dataflows to the former, but not the -latter, so as to check for double initialization of immutable -variables. - -### Gathering and checking moves - -Like loans, we distinguish two phases. The first, gathering, is where -we uncover all the moves and assignments. As with loans, we do some -basic sanity checking in this phase, so we'll report errors if you -attempt to move out of a borrowed pointer etc. Then we do the dataflow -(see `FlowedMoveData::new`). Finally, in the `check_loans.rs` code, we -walk back over, identify all uses, assignments, and captures, and -check that they are legal given the set of dataflow bits we have -computed for that program point. - -# Drop flags and structural fragments - -In addition to the job of enforcing memory safety, the borrow checker -code is also responsible for identifying the *structural fragments* of -data in the function, to support out-of-band dynamic drop flags -allocated on the stack. (For background, see [RFC PR #320].) - -[RFC PR #320]: https://github.com/rust-lang/rfcs/pull/320 - -Semantically, each piece of data that has a destructor may need a -boolean flag to indicate whether or not its destructor has been run -yet. However, in many cases there is no need to actually maintain such -a flag: It can be apparent from the code itself that a given path is -always initialized (or always deinitialized) when control reaches the -end of its owner's scope, and thus we can unconditionally emit (or -not) the destructor invocation for that path. - -A simple example of this is the following: - -```rust -struct D { p: i32 } -impl D { fn new(x: i32) -> D { ... } -impl Drop for D { ... } - -fn foo(a: D, b: D, t: || -> bool) { - let c: D; - let d: D; - if t() { c = b; } -} -``` - -At the end of the body of `foo`, the compiler knows that `a` is -initialized, introducing a drop obligation (deallocating the boxed -integer) for the end of `a`'s scope that is run unconditionally. -Likewise the compiler knows that `d` is not initialized, and thus it -leave out the drop code for `d`. - -The compiler cannot statically know the drop-state of `b` nor `c` at -the end of their scope, since that depends on the value of -`t`. Therefore, we need to insert boolean flags to track whether we -need to drop `b` and `c`. - -However, the matter is not as simple as just mapping local variables -to their corresponding drop flags when necessary. In particular, in -addition to being able to move data out of local variables, Rust -allows one to move values in and out of structured data. - -Consider the following: - -```rust -struct S { x: D, y: D, z: D } - -fn foo(a: S, mut b: S, t: || -> bool) { - let mut c: S; - let d: S; - let e: S = a.clone(); - if t() { - c = b; - b.x = e.y; - } - if t() { c.y = D::new(4); } -} -``` - -As before, the drop obligations of `a` and `d` can be statically -determined, and again the state of `b` and `c` depend on dynamic -state. But additionally, the dynamic drop obligations introduced by -`b` and `c` are not just per-local boolean flags. For example, if the -first call to `t` returns `false` and the second call `true`, then at -the end of their scope, `b` will be completely initialized, but only -`c.y` in `c` will be initialized. If both calls to `t` return `true`, -then at the end of their scope, `c` will be completely initialized, -but only `b.x` will be initialized in `b`, and only `e.x` and `e.z` -will be initialized in `e`. - -Note that we need to cover the `z` field in each case in some way, -since it may (or may not) need to be dropped, even though `z` is never -directly mentioned in the body of the `foo` function. We call a path -like `b.z` a *fragment sibling* of `b.x`, since the field `z` comes -from the same structure `S` that declared the field `x` in `b.x`. - -In general we need to maintain boolean flags that match the -`S`-structure of both `b` and `c`. In addition, we need to consult -such a flag when doing an assignment (such as `c.y = D::new(4);` -above), in order to know whether or not there is a previous value that -needs to be dropped before we do the assignment. - -So for any given function, we need to determine what flags are needed -to track its drop obligations. Our strategy for determining the set of -flags is to represent the fragmentation of the structure explicitly: -by starting initially from the paths that are explicitly mentioned in -moves and assignments (such as `b.x` and `c.y` above), and then -traversing the structure of the path's type to identify leftover -*unmoved fragments*: assigning into `c.y` means that `c.x` and `c.z` -are leftover unmoved fragments. Each fragment represents a drop -obligation that may need to be tracked. Paths that are only moved or -assigned in their entirety (like `a` and `d`) are treated as a single -drop obligation. - -The fragment construction process works by piggy-backing on the -existing `move_data` module. We already have callbacks that visit each -direct move and assignment; these form the basis for the sets of -moved_leaf_paths and assigned_leaf_paths. From these leaves, we can -walk up their parent chain to identify all of their parent paths. -We need to identify the parents because of cases like the following: - -```rust -struct Pair{ x: X, y: Y } -fn foo(dd_d_d: Pair, D>, D>) { - other_function(dd_d_d.x.y); -} -``` - -In this code, the move of the path `dd_d.x.y` leaves behind not only -the fragment drop-obligation `dd_d.x.x` but also `dd_d.y` as well. - -Once we have identified the directly-referenced leaves and their -parents, we compute the left-over fragments, in the function -`fragments::add_fragment_siblings`. As of this writing this works by -looking at each directly-moved or assigned path P, and blindly -gathering all sibling fields of P (as well as siblings for the parents -of P, etc). After accumulating all such siblings, we filter out the -entries added as siblings of P that turned out to be -directly-referenced paths (or parents of directly referenced paths) -themselves, thus leaving the never-referenced "left-overs" as the only -thing left from the gathering step. - -## Array structural fragments - -A special case of the structural fragments discussed above are -the elements of an array that has been passed by value, such as -the following: - -```rust -fn foo(a: [D; 10], i: i32) -> D { - a[i] -} -``` - -The above code moves a single element out of the input array `a`. -The remainder of the array still needs to be dropped; i.e., it -is a structural fragment. Note that after performing such a move, -it is not legal to read from the array `a`. There are a number of -ways to deal with this, but the important thing to note is that -the semantics needs to distinguish in some manner between a -fragment that is the *entire* array versus a fragment that represents -all-but-one element of the array. A place where that distinction -would arise is the following: - -```rust -fn foo(a: [D; 10], b: [D; 10], i: i32, t: bool) -> D { - if t { - a[i] - } else { - b[i] - } - - // When control exits, we will need either to drop all of `a` - // and all-but-one of `b`, or to drop all of `b` and all-but-one - // of `a`. -} -``` - -There are a number of ways that the codegen backend could choose to -compile this (e.g. a `[bool; 10]` array for each such moved array; -or an `Option` for each moved array). From the viewpoint of the -borrow-checker, the important thing is to record what kind of fragment -is implied by the relevant moves. - -# Future work - -While writing up these docs, I encountered some rules I believe to be -stricter than necessary: - -- I think restricting the `&mut` P against moves and `ALIAS` is sufficient, - `MUTATE` and `CLAIM` are overkill. `MUTATE` was necessary when swap was - a built-in operator, but as it is not, it is implied by `CLAIM`, - and `CLAIM` is implied by `ALIAS`. The only net effect of this is an - extra error message in some cases, though. -- I have not described how closures interact. Current code is unsound. - I am working on describing and implementing the fix. -- If we wish, we can easily extend the move checking to allow finer-grained - tracking of what is initialized and what is not, enabling code like - this: - - a = x.f.g; // x.f.g is now uninitialized - // here, x and x.f are not usable, but x.f.h *is* - x.f.g = b; // x.f.g is not initialized - // now x, x.f, x.f.g, x.f.h are all usable - - What needs to change here, most likely, is that the `moves` module - should record not only what paths are moved, but what expressions - are actual *uses*. For example, the reference to `x` in `x.f.g = b` - is not a true *use* in the sense that it requires `x` to be fully - initialized. This is in fact why the above code produces an error - today: the reference to `x` in `x.f.g = b` is considered illegal - because `x` is not fully initialized. - -There are also some possible refactorings: - -- It might be nice to replace all loan paths with the MovePath mechanism, - since they allow lightweight comparison using an integer. diff --git a/src/librustc_ast_borrowck/borrowck/check_loans.rs b/src/librustc_ast_borrowck/borrowck/check_loans.rs deleted file mode 100644 index 3d824ee6ce..0000000000 --- a/src/librustc_ast_borrowck/borrowck/check_loans.rs +++ /dev/null @@ -1,680 +0,0 @@ -// ---------------------------------------------------------------------- -// Checking loans -// -// Phase 2 of check: we walk down the tree and check that: -// 1. assignments are always made to mutable locations; -// 2. loans made in overlapping scopes do not conflict -// 3. assignments do not affect things loaned out as immutable -// 4. moves do not affect things loaned out in any way - -use crate::borrowck::*; -use crate::borrowck::InteriorKind::{InteriorElement, InteriorField}; -use rustc::middle::expr_use_visitor as euv; -use rustc::middle::expr_use_visitor::MutateMode; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::middle::region; -use rustc::ty::{self, TyCtxt, RegionKind}; -use syntax_pos::Span; -use rustc::hir; -use rustc::hir::Node; -use log::debug; - -use std::rc::Rc; - -// FIXME (#16118): These functions are intended to allow the borrow checker to -// be less precise in its handling of Box while still allowing moves out of a -// Box. They should be removed when Unique is removed from LoanPath. - -fn owned_ptr_base_path<'a, 'tcx>(loan_path: &'a LoanPath<'tcx>) -> &'a LoanPath<'tcx> { - //! Returns the base of the leftmost dereference of an Unique in - //! `loan_path`. If there is no dereference of an Unique in `loan_path`, - //! then it just returns `loan_path` itself. - - return match helper(loan_path) { - Some(new_loan_path) => new_loan_path, - None => loan_path, - }; - - fn helper<'a, 'tcx>(loan_path: &'a LoanPath<'tcx>) -> Option<&'a LoanPath<'tcx>> { - match loan_path.kind { - LpVar(_) | LpUpvar(_) => None, - LpExtend(ref lp_base, _, LpDeref(mc::Unique)) => { - match helper(&lp_base) { - v @ Some(_) => v, - None => Some(&lp_base) - } - } - LpDowncast(ref lp_base, _) | - LpExtend(ref lp_base, ..) => helper(&lp_base) - } - } -} - -fn owned_ptr_base_path_rc<'tcx>(loan_path: &Rc>) -> Rc> { - //! The equivalent of `owned_ptr_base_path` for an &Rc rather than - //! a &LoanPath. - - return match helper(loan_path) { - Some(new_loan_path) => new_loan_path, - None => loan_path.clone() - }; - - fn helper<'tcx>(loan_path: &Rc>) -> Option>> { - match loan_path.kind { - LpVar(_) | LpUpvar(_) => None, - LpExtend(ref lp_base, _, LpDeref(mc::Unique)) => { - match helper(lp_base) { - v @ Some(_) => v, - None => Some(lp_base.clone()) - } - } - LpDowncast(ref lp_base, _) | - LpExtend(ref lp_base, ..) => helper(lp_base) - } - } -} - -struct CheckLoanCtxt<'a, 'tcx> { - bccx: &'a BorrowckCtxt<'a, 'tcx>, - dfcx_loans: &'a LoanDataFlow<'tcx>, - move_data: &'a move_data::FlowedMoveData<'tcx>, - all_loans: &'a [Loan<'tcx>], - movable_generator: bool, -} - -impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { - fn consume(&mut self, - consume_id: hir::HirId, - _: Span, - cmt: &mc::cmt_<'tcx>, - mode: euv::ConsumeMode) { - debug!("consume(consume_id={}, cmt={:?})", consume_id, cmt); - - self.consume_common(consume_id.local_id, cmt, mode); - } - - fn matched_pat(&mut self, - _matched_pat: &hir::Pat, - _cmt: &mc::cmt_<'_>, - _mode: euv::MatchMode) { } - - fn consume_pat(&mut self, - consume_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: euv::ConsumeMode) { - debug!("consume_pat(consume_pat={:?}, cmt={:?})", consume_pat, cmt); - - self.consume_common(consume_pat.hir_id.local_id, cmt, mode); - } - - fn borrow(&mut self, - borrow_id: hir::HirId, - borrow_span: Span, - cmt: &mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>, - bk: ty::BorrowKind, - loan_cause: euv::LoanCause) - { - debug!("borrow(borrow_id={}, cmt={:?}, loan_region={:?}, \ - bk={:?}, loan_cause={:?})", - borrow_id, cmt, loan_region, - bk, loan_cause); - - if let Some(lp) = opt_loan_path(cmt) { - self.check_if_path_is_moved(borrow_id.local_id, &lp); - } - - self.check_for_conflicting_loans(borrow_id.local_id); - - self.check_for_loans_across_yields(cmt, loan_region, borrow_span); - } - - fn mutate(&mut self, - assignment_id: hir::HirId, - _: Span, - assignee_cmt: &mc::cmt_<'tcx>, - mode: euv::MutateMode) - { - debug!("mutate(assignment_id={}, assignee_cmt={:?})", - assignment_id, assignee_cmt); - - if let Some(lp) = opt_loan_path(assignee_cmt) { - match mode { - MutateMode::Init | MutateMode::JustWrite => { - // In a case like `path = 1`, then path does not - // have to be *FULLY* initialized, but we still - // must be careful lest it contains derefs of - // pointers. - self.check_if_assigned_path_is_moved(assignee_cmt.hir_id.local_id, &lp); - } - MutateMode::WriteAndRead => { - // In a case like `path += 1`, then path must be - // fully initialized, since we will read it before - // we write it. - self.check_if_path_is_moved(assignee_cmt.hir_id.local_id, - &lp); - } - } - } - self.check_assignment(assignment_id.local_id, assignee_cmt); - } - - fn decl_without_init(&mut self, _id: hir::HirId, _span: Span) { } -} - -pub fn check_loans<'a, 'tcx>( - bccx: &BorrowckCtxt<'a, 'tcx>, - dfcx_loans: &LoanDataFlow<'tcx>, - move_data: &move_data::FlowedMoveData<'tcx>, - all_loans: &[Loan<'tcx>], - body: &hir::Body, -) { - debug!("check_loans(body id={})", body.value.hir_id); - - let def_id = bccx.tcx.hir().body_owner_def_id(body.id()); - - let hir_id = bccx.tcx.hir().as_local_hir_id(def_id).unwrap(); - let movable_generator = !match bccx.tcx.hir().get(hir_id) { - Node::Expr(&hir::Expr { - node: hir::ExprKind::Closure(.., Some(hir::GeneratorMovability::Static)), - .. - }) => true, - _ => false, - }; - - let param_env = bccx.tcx.param_env(def_id); - let mut clcx = CheckLoanCtxt { - bccx, - dfcx_loans, - move_data, - all_loans, - movable_generator, - }; - let rvalue_promotable_map = bccx.tcx.rvalue_promotable_map(def_id); - euv::ExprUseVisitor::new(&mut clcx, - bccx.tcx, - def_id, - param_env, - &bccx.region_scope_tree, - bccx.tables, - Some(rvalue_promotable_map)) - .consume_body(body); -} - -fn compatible_borrow_kinds(borrow_kind1: ty::BorrowKind, - borrow_kind2: ty::BorrowKind) - -> bool { - borrow_kind1 == ty::ImmBorrow && borrow_kind2 == ty::ImmBorrow -} - -impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { - pub fn tcx(&self) -> TyCtxt<'tcx> { self.bccx.tcx } - - pub fn each_issued_loan(&self, node: hir::ItemLocalId, mut op: F) -> bool where - F: FnMut(&Loan<'tcx>) -> bool, - { - //! Iterates over each loan that has been issued - //! on entrance to `node`, regardless of whether it is - //! actually *in scope* at that point. Sometimes loans - //! are issued for future scopes and thus they may have been - //! *issued* but not yet be in effect. - - self.dfcx_loans.each_bit_on_entry(node, |loan_index| { - let loan = &self.all_loans[loan_index]; - op(loan) - }) - } - - pub fn each_in_scope_loan(&self, scope: region::Scope, mut op: F) -> bool where - F: FnMut(&Loan<'tcx>) -> bool, - { - //! Like `each_issued_loan()`, but only considers loans that are - //! currently in scope. - - self.each_issued_loan(scope.item_local_id(), |loan| { - if self.bccx.region_scope_tree.is_subscope_of(scope, loan.kill_scope) { - op(loan) - } else { - true - } - }) - } - - fn each_in_scope_loan_affecting_path(&self, - scope: region::Scope, - loan_path: &LoanPath<'tcx>, - mut op: F) - -> bool where - F: FnMut(&Loan<'tcx>) -> bool, - { - //! Iterates through all of the in-scope loans affecting `loan_path`, - //! calling `op`, and ceasing iteration if `false` is returned. - - // First, we check for a loan restricting the path P being used. This - // accounts for borrows of P but also borrows of subpaths, like P.a.b. - // Consider the following example: - // - // let x = &mut a.b.c; // Restricts a, a.b, and a.b.c - // let y = a; // Conflicts with restriction - - let loan_path = owned_ptr_base_path(loan_path); - let cont = self.each_in_scope_loan(scope, |loan| { - let mut ret = true; - for restr_path in &loan.restricted_paths { - if **restr_path == *loan_path { - if !op(loan) { - ret = false; - break; - } - } - } - ret - }); - - if !cont { - return false; - } - - // Next, we must check for *loans* (not restrictions) on the path P or - // any base path. This rejects examples like the following: - // - // let x = &mut a.b; - // let y = a.b.c; - // - // Limiting this search to *loans* and not *restrictions* means that - // examples like the following continue to work: - // - // let x = &mut a.b; - // let y = a.c; - - let mut loan_path = loan_path; - loop { - match loan_path.kind { - LpVar(_) | LpUpvar(_) => { - break; - } - LpDowncast(ref lp_base, _) | - LpExtend(ref lp_base, ..) => { - loan_path = &lp_base; - } - } - - let cont = self.each_in_scope_loan(scope, |loan| { - if *loan.loan_path == *loan_path { - op(loan) - } else { - true - } - }); - - if !cont { - return false; - } - } - - return true; - } - - pub fn loans_generated_by(&self, node: hir::ItemLocalId) -> Vec { - //! Returns a vector of the loans that are generated as - //! we enter `node`. - - let mut result = Vec::new(); - self.dfcx_loans.each_gen_bit(node, |loan_index| { - result.push(loan_index); - true - }); - return result; - } - - pub fn check_for_loans_across_yields(&self, - cmt: &mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>, - borrow_span: Span) { - pub fn borrow_of_local_data(cmt: &mc::cmt_<'_>) -> bool { - match cmt.cat { - // Borrows of static items is allowed - Categorization::StaticItem => false, - // Reborrow of already borrowed data is ignored - // Any errors will be caught on the initial borrow - Categorization::Deref(..) => false, - - // By-ref upvars has Derefs so they will get ignored. - // Generators counts as FnOnce so this leaves only - // by-move upvars, which is local data for generators - Categorization::Upvar(..) => true, - - Categorization::ThreadLocal(region) | - Categorization::Rvalue(region) => { - // Rvalues promoted to 'static are no longer local - if let RegionKind::ReStatic = *region { - false - } else { - true - } - } - - // Borrow of local data must be checked - Categorization::Local(..) => true, - - // For interior references and downcasts, find out if the base is local - Categorization::Downcast(ref cmt_base, _) | - Categorization::Interior(ref cmt_base, _) => borrow_of_local_data(&cmt_base), - } - } - - if !self.movable_generator { - return; - } - - if !borrow_of_local_data(cmt) { - return; - } - - let scope = match *loan_region { - // A concrete region in which we will look for a yield expression - RegionKind::ReScope(scope) => scope, - - // There cannot be yields inside an empty region - RegionKind::ReEmpty => return, - - // Local data cannot have these lifetimes - RegionKind::ReEarlyBound(..) | - RegionKind::ReLateBound(..) | - RegionKind::ReFree(..) | - RegionKind::ReStatic => { - self.bccx - .tcx - .sess.delay_span_bug(borrow_span, - &format!("unexpected region for local data {:?}", - loan_region)); - return - } - - // These cannot exist in borrowck - RegionKind::ReVar(..) | - RegionKind::RePlaceholder(..) | - RegionKind::ReClosureBound(..) | - RegionKind::ReErased => span_bug!(borrow_span, - "unexpected region in borrowck {:?}", - loan_region), - }; - - let body_id = self.bccx.body.value.hir_id.local_id; - - if self.bccx.region_scope_tree.containing_body(scope) != Some(body_id) { - // We are borrowing local data longer than its storage. - // This should result in other borrowck errors. - self.bccx.tcx.sess.delay_span_bug(borrow_span, - "borrowing local data longer than its storage"); - return; - } - - if let Some(_) = self.bccx.region_scope_tree - .yield_in_scope_for_expr(scope, cmt.hir_id, self.bccx.body) - { - self.bccx.signal_error(); - } - } - - pub fn check_for_conflicting_loans(&self, node: hir::ItemLocalId) { - //! Checks to see whether any of the loans that are issued - //! on entrance to `node` conflict with loans that have already been - //! issued when we enter `node` (for example, we do not - //! permit two `&mut` borrows of the same variable). - //! - //! (Note that some loans can be *issued* without necessarily - //! taking effect yet.) - - debug!("check_for_conflicting_loans(node={:?})", node); - - let new_loan_indices = self.loans_generated_by(node); - debug!("new_loan_indices = {:?}", new_loan_indices); - - for &new_loan_index in &new_loan_indices { - self.each_issued_loan(node, |issued_loan| { - let new_loan = &self.all_loans[new_loan_index]; - // Only report an error for the first issued loan that conflicts - // to avoid O(n^2) errors. - self.report_error_if_loans_conflict(issued_loan, new_loan) - }); - } - - for (i, &x) in new_loan_indices.iter().enumerate() { - let old_loan = &self.all_loans[x]; - for &y in &new_loan_indices[(i+1) ..] { - let new_loan = &self.all_loans[y]; - self.report_error_if_loans_conflict(old_loan, new_loan); - } - } - } - - pub fn report_error_if_loans_conflict( - &self, - old_loan: &Loan<'tcx>, - new_loan: &Loan<'tcx>, - ) -> bool { - //! Checks whether `old_loan` and `new_loan` can safely be issued - //! simultaneously. - - debug!("report_error_if_loans_conflict(old_loan={:?}, new_loan={:?})", - old_loan, - new_loan); - - // Should only be called for loans that are in scope at the same time. - assert!(self.bccx.region_scope_tree.scopes_intersect(old_loan.kill_scope, - new_loan.kill_scope)); - - self.report_error_if_loan_conflicts_with_restriction( - old_loan, new_loan) - && self.report_error_if_loan_conflicts_with_restriction( - new_loan, old_loan) - } - - pub fn report_error_if_loan_conflicts_with_restriction( - &self, - loan1: &Loan<'tcx>, - loan2: &Loan<'tcx>, - ) -> bool { - //! Checks whether the restrictions introduced by `loan1` would - //! prohibit `loan2`. - debug!("report_error_if_loan_conflicts_with_restriction(\ - loan1={:?}, loan2={:?})", - loan1, - loan2); - - if compatible_borrow_kinds(loan1.kind, loan2.kind) { - return true; - } - - let loan2_base_path = owned_ptr_base_path_rc(&loan2.loan_path); - for restr_path in &loan1.restricted_paths { - if *restr_path != loan2_base_path { continue; } - - self.bccx.signal_error(); - return false; - } - - true - } - - fn consume_common( - &self, - id: hir::ItemLocalId, - cmt: &mc::cmt_<'tcx>, - mode: euv::ConsumeMode, - ) { - if let Some(lp) = opt_loan_path(cmt) { - match mode { - euv::Copy => { - self.check_for_copy_of_frozen_path(id, &lp); - } - euv::Move(_) => { - // Sometimes moves aren't from a move path; - // this either means that the original move - // was from something illegal to move, - // or was moved from referent of an unsafe - // pointer or something like that. - if self.move_data.is_move_path(id, &lp) { - self.check_for_move_of_borrowed_path(id, &lp); - } - } - } - self.check_if_path_is_moved(id, &lp); - } - } - - fn check_for_copy_of_frozen_path(&self, - id: hir::ItemLocalId, - copy_path: &LoanPath<'tcx>) { - self.analyze_restrictions_on_use(id, copy_path, ty::ImmBorrow); - } - - fn check_for_move_of_borrowed_path(&self, - id: hir::ItemLocalId, - move_path: &LoanPath<'tcx>) { - // We want to detect if there are any loans at all, so we search for - // any loans incompatible with MutBorrrow, since all other kinds of - // loans are incompatible with that. - self.analyze_restrictions_on_use(id, move_path, ty::MutBorrow); - } - - fn analyze_restrictions_on_use(&self, - expr_id: hir::ItemLocalId, - use_path: &LoanPath<'tcx>, - borrow_kind: ty::BorrowKind) { - debug!("analyze_restrictions_on_use(expr_id={:?}, use_path={:?})", - expr_id, use_path); - - let scope = region::Scope { - id: expr_id, - data: region::ScopeData::Node - }; - self.each_in_scope_loan_affecting_path( - scope, use_path, |loan| { - if !compatible_borrow_kinds(loan.kind, borrow_kind) { - self.bccx.signal_error(); - false - } else { - true - } - }); - } - - /// Reports an error if `expr` (which should be a path) - /// is using a moved/uninitialized value - fn check_if_path_is_moved(&self, - id: hir::ItemLocalId, - lp: &Rc>) { - debug!("check_if_path_is_moved(id={:?}, lp={:?})", id, lp); - - // FIXME: if you find yourself tempted to cut and paste - // the body below and then specializing the error reporting, - // consider refactoring this instead! - - let base_lp = owned_ptr_base_path_rc(lp); - self.move_data.each_move_of(id, &base_lp, |_, _| { - self.bccx.signal_error(); - false - }); - } - - /// Reports an error if assigning to `lp` will use a - /// moved/uninitialized value. Mainly this is concerned with - /// detecting derefs of uninitialized pointers. - /// - /// For example: - /// - /// ``` - /// let a: i32; - /// a = 10; // ok, even though a is uninitialized - /// ``` - /// - /// ``` - /// struct Point { x: u32, y: u32 } - /// let mut p: Point; - /// p.x = 22; // ok, even though `p` is uninitialized - /// ``` - /// - /// ```compile_fail,E0381 - /// # struct Point { x: u32, y: u32 } - /// let mut p: Box; - /// (*p).x = 22; // not ok, p is uninitialized, can't deref - /// ``` - fn check_if_assigned_path_is_moved(&self, - id: hir::ItemLocalId, - lp: &Rc>) - { - match lp.kind { - LpVar(_) | LpUpvar(_) => { - // assigning to `x` does not require that `x` is initialized - } - LpDowncast(ref lp_base, _) => { - // assigning to `(P->Variant).f` is ok if assigning to `P` is ok - self.check_if_assigned_path_is_moved(id, lp_base); - } - LpExtend(ref lp_base, _, LpInterior(_, InteriorField(_))) => { - match lp_base.to_type().sty { - ty::Adt(def, _) if def.has_dtor(self.tcx()) => { - // In the case where the owner implements drop, then - // the path must be initialized to prevent a case of - // partial reinitialization - // - // FIXME: could refactor via hypothetical - // generalized check_if_path_is_moved - let loan_path = owned_ptr_base_path_rc(lp_base); - self.move_data.each_move_of(id, &loan_path, |_, _| { - self.bccx - .signal_error(); - false - }); - return; - }, - _ => {}, - } - - // assigning to `P.f` is ok if assigning to `P` is ok - self.check_if_assigned_path_is_moved(id, lp_base); - } - LpExtend(ref lp_base, _, LpInterior(_, InteriorElement)) | - LpExtend(ref lp_base, _, LpDeref(_)) => { - // assigning to `P[i]` requires `P` is initialized - // assigning to `(*P)` requires `P` is initialized - self.check_if_path_is_moved(id, lp_base); - } - } - } - - fn check_assignment(&self, - assignment_id: hir::ItemLocalId, - assignee_cmt: &mc::cmt_<'tcx>) { - debug!("check_assignment(assignee_cmt={:?})", assignee_cmt); - - // Check that we don't invalidate any outstanding loans - if let Some(loan_path) = opt_loan_path(assignee_cmt) { - let scope = region::Scope { - id: assignment_id, - data: region::ScopeData::Node - }; - self.each_in_scope_loan_affecting_path(scope, &loan_path, |_| { - self.bccx.signal_error(); - false - }); - } - - // Check for reassignments to (immutable) local variables. This - // needs to be done here instead of in check_loans because we - // depend on move data. - if let Categorization::Local(_) = assignee_cmt.cat { - let lp = opt_loan_path(assignee_cmt).unwrap(); - self.move_data.each_assignment_of(assignment_id, &lp, |_| { - if !assignee_cmt.mutbl.is_mutable() { - self.bccx.signal_error(); - } - false - }); - return - } - } -} diff --git a/src/librustc_ast_borrowck/borrowck/gather_loans/gather_moves.rs b/src/librustc_ast_borrowck/borrowck/gather_loans/gather_moves.rs deleted file mode 100644 index 617161109b..0000000000 --- a/src/librustc_ast_borrowck/borrowck/gather_loans/gather_moves.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! Computes moves. - -use crate::borrowck::*; -use crate::borrowck::move_data::*; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::middle::mem_categorization::InteriorOffsetKind as Kind; -use rustc::ty::{self, Ty}; - -use std::rc::Rc; -use syntax_pos::Span; -use log::debug; - -struct GatherMoveInfo<'c, 'tcx> { - id: hir::ItemLocalId, - cmt: &'c mc::cmt_<'tcx>, -} - -pub fn gather_decl<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - move_data: &MoveData<'tcx>, - var_id: hir::HirId, - var_ty: Ty<'tcx>) { - let loan_path = Rc::new(LoanPath::new(LpVar(var_id), var_ty)); - move_data.add_move(bccx.tcx, loan_path, var_id.local_id); -} - -pub fn gather_move_from_expr<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - move_data: &MoveData<'tcx>, - move_expr_id: hir::ItemLocalId, - cmt: &mc::cmt_<'tcx>) { - let move_info = GatherMoveInfo { - id: move_expr_id, - cmt, - }; - gather_move(bccx, move_data, move_info); -} - -pub fn gather_move_from_pat<'a, 'c, 'tcx>( - bccx: &BorrowckCtxt<'a, 'tcx>, - move_data: &MoveData<'tcx>, - move_pat: &hir::Pat, - cmt: &'c mc::cmt_<'tcx>, -) { - let move_info = GatherMoveInfo { - id: move_pat.hir_id.local_id, - cmt, - }; - - debug!("gather_move_from_pat: move_pat={:?}", move_pat); - - gather_move(bccx, move_data, move_info); -} - -fn gather_move<'a, 'c, 'tcx>( - bccx: &BorrowckCtxt<'a, 'tcx>, - move_data: &MoveData<'tcx>, - move_info: GatherMoveInfo<'c, 'tcx>, -) { - debug!("gather_move(move_id={:?}, cmt={:?})", - move_info.id, move_info.cmt); - - let potentially_illegal_move = check_and_get_illegal_move_origin(bccx, move_info.cmt); - if let Some(_) = potentially_illegal_move { - bccx.signal_error(); - return; - } - - match opt_loan_path(&move_info.cmt) { - Some(loan_path) => { - move_data.add_move(bccx.tcx, loan_path, - move_info.id); - } - None => { - // move from rvalue or raw pointer, hence ok - } - } -} - -pub fn gather_assignment<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - move_data: &MoveData<'tcx>, - assignment_id: hir::ItemLocalId, - assignment_span: Span, - assignee_loan_path: Rc>) { - move_data.add_assignment(bccx.tcx, - assignee_loan_path, - assignment_id, - assignment_span); -} - -// (keep in sync with move_error::report_cannot_move_out_of ) -fn check_and_get_illegal_move_origin<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - cmt: &mc::cmt_<'tcx>) - -> Option> { - match cmt.cat { - Categorization::Deref(_, mc::BorrowedPtr(..)) | - Categorization::Deref(_, mc::UnsafePtr(..)) | - Categorization::ThreadLocal(..) | - Categorization::StaticItem => { - Some(cmt.clone()) - } - - Categorization::Rvalue(..) | - Categorization::Local(..) | - Categorization::Upvar(..) => { - None - } - - Categorization::Downcast(ref b, _) | - Categorization::Interior(ref b, mc::InteriorField(_)) | - Categorization::Interior(ref b, mc::InteriorElement(Kind::Pattern)) => { - match b.ty.sty { - ty::Adt(def, _) => { - if def.has_dtor(bccx.tcx) { - Some(cmt.clone()) - } else { - check_and_get_illegal_move_origin(bccx, b) - } - } - ty::Slice(..) => Some(cmt.clone()), - _ => { - check_and_get_illegal_move_origin(bccx, b) - } - } - } - - Categorization::Interior(_, mc::InteriorElement(Kind::Index)) => { - // Forbid move of arr[i] for arr: [T; 3]; see RFC 533. - Some(cmt.clone()) - } - - Categorization::Deref(ref b, mc::Unique) => { - check_and_get_illegal_move_origin(bccx, b) - } - } -} diff --git a/src/librustc_ast_borrowck/borrowck/gather_loans/lifetime.rs b/src/librustc_ast_borrowck/borrowck/gather_loans/lifetime.rs deleted file mode 100644 index ff7dd66793..0000000000 --- a/src/librustc_ast_borrowck/borrowck/gather_loans/lifetime.rs +++ /dev/null @@ -1,113 +0,0 @@ -//! This module implements the check that the lifetime of a borrow -//! does not exceed the lifetime of the value being borrowed. - -use crate::borrowck::*; -use rustc::hir::HirId; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::middle::region; -use rustc::ty; - -use log::debug; - -type R = Result<(),()>; - -pub fn guarantee_lifetime<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - item_scope: region::Scope, - cmt: &'a mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>) - -> Result<(),()> { - //! Reports error if `loan_region` is larger than S - //! where S is `item_scope` if `cmt` is an upvar, - //! and is scope of `cmt` otherwise. - debug!("guarantee_lifetime(cmt={:?}, loan_region={:?})", - cmt, loan_region); - let ctxt = GuaranteeLifetimeContext { bccx, item_scope, loan_region }; - ctxt.check(cmt, None) -} - -/////////////////////////////////////////////////////////////////////////// -// Private - -struct GuaranteeLifetimeContext<'a, 'tcx> { - bccx: &'a BorrowckCtxt<'a, 'tcx>, - - // the scope of the function body for the enclosing item - item_scope: region::Scope, - - loan_region: ty::Region<'tcx>, -} - -impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> { - fn check(&self, cmt: &mc::cmt_<'tcx>, discr_scope: Option) -> R { - //! Main routine. Walks down `cmt` until we find the - //! "guarantor". Reports an error if `self.loan_region` is - //! larger than scope of `cmt`. - debug!("guarantee_lifetime.check(cmt={:?}, loan_region={:?})", - cmt, - self.loan_region); - - match cmt.cat { - Categorization::Rvalue(..) | - Categorization::ThreadLocal(..) | - Categorization::Local(..) | // L-Local - Categorization::Upvar(..) | - Categorization::Deref(_, mc::BorrowedPtr(..)) | // L-Deref-Borrowed - Categorization::Deref(_, mc::UnsafePtr(..)) => { - self.check_scope(self.scope(cmt)) - } - - Categorization::StaticItem => { - Ok(()) - } - - Categorization::Downcast(ref base, _) | - Categorization::Deref(ref base, mc::Unique) | // L-Deref-Send - Categorization::Interior(ref base, _) => { // L-Field - self.check(base, discr_scope) - } - } - } - - fn check_scope(&self, max_scope: ty::Region<'tcx>) -> R { - //! Reports an error if `loan_region` is larger than `max_scope` - - if !self.bccx.is_subregion_of(self.loan_region, max_scope) { - Err(self.bccx.signal_error()) - } else { - Ok(()) - } - } - - fn scope(&self, cmt: &mc::cmt_<'tcx>) -> ty::Region<'tcx> { - //! Returns the maximal region scope for the which the - //! place `cmt` is guaranteed to be valid without any - //! rooting etc, and presuming `cmt` is not mutated. - - match cmt.cat { - Categorization::ThreadLocal(temp_scope) | - Categorization::Rvalue(temp_scope) => { - temp_scope - } - Categorization::Upvar(..) => { - self.bccx.tcx.mk_region(ty::ReScope(self.item_scope)) - } - Categorization::Local(hir_id) => { - self.bccx.tcx.mk_region(ty::ReScope( - self.bccx.region_scope_tree.var_scope(hir_id.local_id))) - } - Categorization::StaticItem | - Categorization::Deref(_, mc::UnsafePtr(..)) => { - self.bccx.tcx.lifetimes.re_static - } - Categorization::Deref(_, mc::BorrowedPtr(_, r)) => { - r - } - Categorization::Downcast(ref cmt, _) | - Categorization::Deref(ref cmt, mc::Unique) | - Categorization::Interior(ref cmt, _) => { - self.scope(cmt) - } - } - } -} diff --git a/src/librustc_ast_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_ast_borrowck/borrowck/gather_loans/mod.rs deleted file mode 100644 index 16fef705ec..0000000000 --- a/src/librustc_ast_borrowck/borrowck/gather_loans/mod.rs +++ /dev/null @@ -1,433 +0,0 @@ -// ---------------------------------------------------------------------- -// Gathering loans -// -// The borrow check proceeds in two phases. In phase one, we gather the full -// set of loans that are required at any point. These are sorted according to -// their associated scopes. In phase two, checking loans, we will then make -// sure that all of these loans are honored. - -use crate::borrowck::*; -use crate::borrowck::move_data::MoveData; -use rustc::middle::expr_use_visitor as euv; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::middle::region; -use rustc::ty::{self, TyCtxt}; - -use syntax_pos::Span; -use rustc::hir; -use log::debug; - -use restrictions::RestrictionResult; - -mod lifetime; -mod restrictions; -mod gather_moves; - -pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - body: hir::BodyId) - -> (Vec>, move_data::MoveData<'tcx>) { - let def_id = bccx.tcx.hir().body_owner_def_id(body); - let param_env = bccx.tcx.param_env(def_id); - let mut glcx = GatherLoanCtxt { - bccx, - all_loans: Vec::new(), - item_ub: region::Scope { - id: bccx.tcx.hir().body(body).value.hir_id.local_id, - data: region::ScopeData::Node - }, - move_data: MoveData::default(), - }; - - let rvalue_promotable_map = bccx.tcx.rvalue_promotable_map(def_id); - euv::ExprUseVisitor::new(&mut glcx, - bccx.tcx, - def_id, - param_env, - &bccx.region_scope_tree, - bccx.tables, - Some(rvalue_promotable_map)) - .consume_body(bccx.body); - - let GatherLoanCtxt { all_loans, move_data, .. } = glcx; - (all_loans, move_data) -} - -struct GatherLoanCtxt<'a, 'tcx> { - bccx: &'a BorrowckCtxt<'a, 'tcx>, - move_data: move_data::MoveData<'tcx>, - all_loans: Vec>, - /// `item_ub` is used as an upper-bound on the lifetime whenever we - /// ask for the scope of an expression categorized as an upvar. - item_ub: region::Scope, -} - -impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { - fn consume(&mut self, - consume_id: hir::HirId, - _consume_span: Span, - cmt: &mc::cmt_<'tcx>, - mode: euv::ConsumeMode) { - debug!("consume(consume_id={}, cmt={:?}, mode={:?})", - consume_id, cmt, mode); - - match mode { - euv::Move(_) => { - gather_moves::gather_move_from_expr( - self.bccx, &self.move_data, - consume_id.local_id, cmt); - } - euv::Copy => { } - } - } - - fn matched_pat(&mut self, - matched_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: euv::MatchMode) { - debug!("matched_pat(matched_pat={:?}, cmt={:?}, mode={:?})", - matched_pat, - cmt, - mode); - } - - fn consume_pat(&mut self, - consume_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: euv::ConsumeMode) { - debug!("consume_pat(consume_pat={:?}, cmt={:?}, mode={:?})", - consume_pat, - cmt, - mode); - - match mode { - euv::Copy => { return; } - euv::Move(_) => { } - } - - gather_moves::gather_move_from_pat( - self.bccx, &self.move_data, - consume_pat, cmt); - } - - fn borrow(&mut self, - borrow_id: hir::HirId, - _: Span, - cmt: &mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>, - bk: ty::BorrowKind, - loan_cause: euv::LoanCause) - { - debug!("borrow(borrow_id={}, cmt={:?}, loan_region={:?}, \ - bk={:?}, loan_cause={:?})", - borrow_id, cmt, loan_region, - bk, loan_cause); - - self.guarantee_valid(borrow_id.local_id, - cmt, - bk, - loan_region); - } - - fn mutate(&mut self, - assignment_id: hir::HirId, - assignment_span: Span, - assignee_cmt: &mc::cmt_<'tcx>, - _: euv::MutateMode) - { - self.guarantee_assignment_valid(assignment_id, - assignment_span, - assignee_cmt); - } - - fn decl_without_init(&mut self, id: hir::HirId, _span: Span) { - let ty = self.bccx - .tables - .node_type(id); - gather_moves::gather_decl(self.bccx, &self.move_data, id, ty); - } - - fn nested_body(&mut self, body_id: hir::BodyId) { - debug!("nested_body(body_id={:?})", body_id); - // rust-lang/rust#58776: MIR and AST borrow check disagree on where - // certain closure errors are reported. As such migrate borrowck has to - // operate at the level of items, rather than bodies. Check if the - // contained closure had any errors and set `signalled_any_error` if it - // has. - let bccx = self.bccx; - if bccx.tcx.migrate_borrowck() { - if let SignalledError::NoErrorsSeen = bccx.signalled_any_error.get() { - let closure_def_id = bccx.tcx.hir().body_owner_def_id(body_id); - debug!("checking closure: {:?}", closure_def_id); - - bccx.signalled_any_error.set(bccx.tcx.borrowck(closure_def_id).signalled_any_error); - } - } - } -} - -/// Implements the A-* rules in README.md. -fn check_aliasability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - cmt: &mc::cmt_<'tcx>, - req_kind: ty::BorrowKind) - -> Result<(),()> { - - let aliasability = cmt.freely_aliasable(); - debug!("check_aliasability aliasability={:?} req_kind={:?}", - aliasability, req_kind); - - match (aliasability, req_kind) { - (mc::Aliasability::NonAliasable, _) => { - /* Uniquely accessible path -- OK for `&` and `&mut` */ - Ok(()) - } - (mc::Aliasability::FreelyAliasable(mc::AliasableStatic), ty::ImmBorrow) => { - // Borrow of an immutable static item. - Ok(()) - } - (mc::Aliasability::FreelyAliasable(mc::AliasableStaticMut), _) => { - // Even touching a static mut is considered unsafe. We assume the - // user knows what they're doing in these cases. - Ok(()) - } - (mc::Aliasability::FreelyAliasable(_), ty::UniqueImmBorrow) | - (mc::Aliasability::FreelyAliasable(_), ty::MutBorrow) => { - bccx.signal_error(); - Err(()) - } - (..) => { - Ok(()) - } - } -} - -/// Implements the M-* rules in README.md. -fn check_mutability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - cmt: &mc::cmt_<'tcx>, - req_kind: ty::BorrowKind) - -> Result<(),()> { - debug!("check_mutability(cmt={:?} req_kind={:?}", cmt, req_kind); - match req_kind { - ty::UniqueImmBorrow | ty::ImmBorrow => { - match cmt.mutbl { - // I am intentionally leaving this here to help - // refactoring if, in the future, we should add new - // kinds of mutability. - mc::McImmutable | mc::McDeclared | mc::McInherited => { - // both imm and mut data can be lent as imm; - // for mutable data, this is a freeze - Ok(()) - } - } - } - - ty::MutBorrow => { - // Only mutable data can be lent as mutable. - if !cmt.mutbl.is_mutable() { - Err(bccx.signal_error()) - } else { - Ok(()) - } - } - } -} - -impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { - pub fn tcx(&self) -> TyCtxt<'tcx> { self.bccx.tcx } - - /// Guarantees that `cmt` is assignable, or reports an error. - fn guarantee_assignment_valid(&mut self, - assignment_id: hir::HirId, - assignment_span: Span, - cmt: &mc::cmt_<'tcx>) { - - let opt_lp = opt_loan_path(cmt); - debug!("guarantee_assignment_valid(assignment_id={}, cmt={:?}) opt_lp={:?}", - assignment_id, cmt, opt_lp); - - if let Categorization::Local(..) = cmt.cat { - // Only re-assignments to locals require it to be - // mutable - this is checked in check_loans. - } else { - // Check that we don't allow assignments to non-mutable data. - if check_mutability(self.bccx, cmt, ty::MutBorrow).is_err() { - return; // reported an error, no sense in reporting more. - } - } - - // Check that we don't allow assignments to aliasable data - if check_aliasability(self.bccx, cmt, ty::MutBorrow).is_err() { - return; // reported an error, no sense in reporting more. - } - - match opt_lp { - Some(lp) => { - gather_moves::gather_assignment(self.bccx, &self.move_data, - assignment_id.local_id, - assignment_span, - lp); - } - None => { - // This can occur with e.g., `*foo() = 5`. In such - // cases, there is no need to check for conflicts - // with moves etc, just ignore. - } - } - } - - /// Guarantees that `addr_of(cmt)` will be valid for the duration of `static_scope_r`, or - /// reports an error. This may entail taking out loans, which will be added to the - /// `req_loan_map`. - fn guarantee_valid(&mut self, - borrow_id: hir::ItemLocalId, - cmt: &mc::cmt_<'tcx>, - req_kind: ty::BorrowKind, - loan_region: ty::Region<'tcx>) { - debug!("guarantee_valid(borrow_id={:?}, cmt={:?}, \ - req_mutbl={:?}, loan_region={:?})", - borrow_id, - cmt, - req_kind, - loan_region); - - // a loan for the empty region can never be dereferenced, so - // it is always safe - if *loan_region == ty::ReEmpty { - return; - } - - // Check that the lifetime of the borrow does not exceed - // the lifetime of the data being borrowed. - if lifetime::guarantee_lifetime(self.bccx, self.item_ub, cmt, loan_region).is_err() { - return; // reported an error, no sense in reporting more. - } - - // Check that we don't allow mutable borrows of non-mutable data. - if check_mutability(self.bccx, cmt, req_kind).is_err() { - return; // reported an error, no sense in reporting more. - } - - // Check that we don't allow mutable borrows of aliasable data. - if check_aliasability(self.bccx, cmt, req_kind).is_err() { - return; // reported an error, no sense in reporting more. - } - - // Compute the restrictions that are required to enforce the - // loan is safe. - let restr = restrictions::compute_restrictions(self.bccx, &cmt, loan_region); - - debug!("guarantee_valid(): restrictions={:?}", restr); - - // Create the loan record (if needed). - let loan = match restr { - RestrictionResult::Safe => { - // No restrictions---no loan record necessary - return; - } - - RestrictionResult::SafeIf(loan_path, restricted_paths) => { - let loan_scope = match *loan_region { - ty::ReScope(scope) => scope, - - ty::ReEarlyBound(ref br) => { - self.bccx.region_scope_tree.early_free_scope(self.tcx(), br) - } - - ty::ReFree(ref fr) => { - self.bccx.region_scope_tree.free_scope(self.tcx(), fr) - } - - ty::ReStatic => self.item_ub, - - ty::ReEmpty | - ty::ReClosureBound(..) | - ty::ReLateBound(..) | - ty::ReVar(..) | - ty::RePlaceholder(..) | - ty::ReErased => { - span_bug!( - cmt.span, - "invalid borrow lifetime: {:?}", - loan_region); - } - }; - debug!("loan_scope = {:?}", loan_scope); - - let borrow_scope = region::Scope { - id: borrow_id, - data: region::ScopeData::Node - }; - let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope); - debug!("gen_scope = {:?}", gen_scope); - - let kill_scope = self.compute_kill_scope(loan_scope, &loan_path); - debug!("kill_scope = {:?}", kill_scope); - - Loan { - index: self.all_loans.len(), - loan_path, - kind: req_kind, - gen_scope, - kill_scope, - restricted_paths, - } - } - }; - - debug!("guarantee_valid(borrow_id={:?}), loan={:?}", - borrow_id, loan); - - // let loan_path = loan.loan_path; - // let loan_gen_scope = loan.gen_scope; - // let loan_kill_scope = loan.kill_scope; - self.all_loans.push(loan); - } - - pub fn compute_gen_scope(&self, - borrow_scope: region::Scope, - loan_scope: region::Scope) - -> region::Scope { - //! Determine when to introduce the loan. Typically the loan - //! is introduced at the point of the borrow, but in some cases, - //! notably method arguments, the loan may be introduced only - //! later, once it comes into scope. - - if self.bccx.region_scope_tree.is_subscope_of(borrow_scope, loan_scope) { - borrow_scope - } else { - loan_scope - } - } - - pub fn compute_kill_scope(&self, loan_scope: region::Scope, lp: &LoanPath<'tcx>) - -> region::Scope { - //! Determine when the loan restrictions go out of scope. - //! This is either when the lifetime expires or when the - //! local variable which roots the loan-path goes out of scope, - //! whichever happens faster. - //! - //! It may seem surprising that we might have a loan region - //! larger than the variable which roots the loan-path; this can - //! come about when variables of `&mut` type are re-borrowed, - //! as in this example: - //! - //! struct Foo { counter: u32 } - //! - //! fn counter<'a>(v: &'a mut Foo) -> &'a mut u32 { - //! &mut v.counter - //! } - //! - //! In this case, the reference (`'a`) outlives the - //! variable `v` that hosts it. Note that this doesn't come up - //! with immutable `&` pointers, because borrows of such pointers - //! do not require restrictions and hence do not cause a loan. - - let lexical_scope = lp.kill_scope(self.bccx); - if self.bccx.region_scope_tree.is_subscope_of(lexical_scope, loan_scope) { - lexical_scope - } else { - assert!(self.bccx.region_scope_tree.is_subscope_of(loan_scope, lexical_scope)); - loan_scope - } - } -} diff --git a/src/librustc_ast_borrowck/borrowck/gather_loans/restrictions.rs b/src/librustc_ast_borrowck/borrowck/gather_loans/restrictions.rs deleted file mode 100644 index 545c27b17b..0000000000 --- a/src/librustc_ast_borrowck/borrowck/gather_loans/restrictions.rs +++ /dev/null @@ -1,179 +0,0 @@ -//! Computes the restrictions that result from a borrow. - -use crate::borrowck::*; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::ty; -use log::debug; - -use crate::borrowck::ToInteriorKind; - -use std::rc::Rc; - -#[derive(Debug)] -pub enum RestrictionResult<'tcx> { - Safe, - SafeIf(Rc>, Vec>>) -} - -pub fn compute_restrictions<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - cmt: &mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>) - -> RestrictionResult<'tcx> { - let ctxt = RestrictionsContext { bccx, loan_region }; - - ctxt.restrict(cmt) -} - -/////////////////////////////////////////////////////////////////////////// -// Private - -struct RestrictionsContext<'a, 'tcx> { - bccx: &'a BorrowckCtxt<'a, 'tcx>, - loan_region: ty::Region<'tcx>, -} - -impl<'a, 'tcx> RestrictionsContext<'a, 'tcx> { - fn restrict(&self, - cmt: &mc::cmt_<'tcx>) -> RestrictionResult<'tcx> { - debug!("restrict(cmt={:?})", cmt); - - let new_lp = |v: LoanPathKind<'tcx>| Rc::new(LoanPath::new(v, cmt.ty)); - - match cmt.cat.clone() { - Categorization::Rvalue(..) => { - // Effectively, rvalues are stored into a - // non-aliasable temporary on the stack. Since they - // are inherently non-aliasable, they can only be - // accessed later through the borrow itself and hence - // must inherently comply with its terms. - RestrictionResult::Safe - } - - Categorization::ThreadLocal(..) => { - // Thread-locals are statics that have a scope, with - // no underlying structure to provide restrictions. - RestrictionResult::Safe - } - - Categorization::Local(local_id) => { - // R-Variable, locally declared - let lp = new_lp(LpVar(local_id)); - RestrictionResult::SafeIf(lp.clone(), vec![lp]) - } - - Categorization::Upvar(mc::Upvar { id, .. }) => { - // R-Variable, captured into closure - let lp = new_lp(LpUpvar(id)); - RestrictionResult::SafeIf(lp.clone(), vec![lp]) - } - - Categorization::Downcast(cmt_base, _) => { - // When we borrow the interior of an enum, we have to - // ensure the enum itself is not mutated, because that - // could cause the type of the memory to change. - self.restrict(&cmt_base) - } - - Categorization::Interior(cmt_base, interior) => { - // R-Field - // - // Overwriting the base would not change the type of - // the memory, so no additional restrictions are - // needed. - let opt_variant_id = match cmt_base.cat { - Categorization::Downcast(_, variant_id) => Some(variant_id), - _ => None - }; - let interior = interior.cleaned(); - let base_ty = cmt_base.ty; - let result = self.restrict(&cmt_base); - // Borrowing one union field automatically borrows all its fields. - match base_ty.sty { - ty::Adt(adt_def, _) if adt_def.is_union() => match result { - RestrictionResult::Safe => RestrictionResult::Safe, - RestrictionResult::SafeIf(base_lp, mut base_vec) => { - for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() { - let field = InteriorKind::InteriorField( - mc::FieldIndex(i, field.ident.name) - ); - let field_ty = if field == interior { - cmt.ty - } else { - self.bccx.tcx.types.err // Doesn't matter - }; - let sibling_lp_kind = LpExtend(base_lp.clone(), cmt.mutbl, - LpInterior(opt_variant_id, field)); - let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty)); - base_vec.push(sibling_lp); - } - - let lp = new_lp(LpExtend(base_lp, cmt.mutbl, - LpInterior(opt_variant_id, interior))); - RestrictionResult::SafeIf(lp, base_vec) - } - }, - _ => self.extend(result, &cmt, LpInterior(opt_variant_id, interior)) - } - } - - Categorization::StaticItem => { - RestrictionResult::Safe - } - - Categorization::Deref(cmt_base, pk) => { - match pk { - mc::Unique => { - // R-Deref-Send-Pointer - // - // When we borrow the interior of a box, we - // cannot permit the base to be mutated, because that - // would cause the unique pointer to be freed. - // - // Eventually we should make these non-special and - // just rely on Deref implementation. - let result = self.restrict(&cmt_base); - self.extend(result, &cmt, LpDeref(pk)) - } - mc::BorrowedPtr(bk, lt) => { - // R-Deref-[Mut-]Borrowed - if !self.bccx.is_subregion_of(self.loan_region, lt) { - self.bccx.signal_error(); - return RestrictionResult::Safe; - } - - match bk { - ty::ImmBorrow => RestrictionResult::Safe, - ty::MutBorrow | ty::UniqueImmBorrow => { - // R-Deref-Mut-Borrowed - // - // The referent can be aliased after the - // references lifetime ends (by a newly-unfrozen - // borrow). - let result = self.restrict(&cmt_base); - self.extend(result, &cmt, LpDeref(pk)) - } - } - } - // Borrowck is not relevant for raw pointers - mc::UnsafePtr(..) => RestrictionResult::Safe - } - } - } - } - - fn extend(&self, - result: RestrictionResult<'tcx>, - cmt: &mc::cmt_<'tcx>, - elem: LoanPathElem<'tcx>) -> RestrictionResult<'tcx> { - match result { - RestrictionResult::Safe => RestrictionResult::Safe, - RestrictionResult::SafeIf(base_lp, mut base_vec) => { - let v = LpExtend(base_lp, cmt.mutbl, elem); - let lp = Rc::new(LoanPath::new(v, cmt.ty)); - base_vec.push(lp.clone()); - RestrictionResult::SafeIf(lp, base_vec) - } - } - } -} diff --git a/src/librustc_ast_borrowck/borrowck/mod.rs b/src/librustc_ast_borrowck/borrowck/mod.rs deleted file mode 100644 index 23d5480c60..0000000000 --- a/src/librustc_ast_borrowck/borrowck/mod.rs +++ /dev/null @@ -1,621 +0,0 @@ -//! See The Book chapter on the borrow checker for more details. - -#![allow(non_camel_case_types)] - -pub use LoanPathKind::*; -pub use LoanPathElem::*; - -use InteriorKind::*; - -use rustc::hir::HirId; -use rustc::hir::Node; -use rustc::middle::borrowck::{BorrowCheckResult, SignalledError}; -use rustc::hir::def_id::{DefId, LocalDefId}; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::middle::region; -use rustc::middle::free_region::RegionRelations; -use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::query::Providers; - -use std::borrow::Cow; -use std::cell::{Cell}; -use std::fmt; -use std::rc::Rc; -use std::hash::{Hash, Hasher}; -use log::debug; - -use rustc::hir; - -use crate::cfg; -use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom}; - -pub mod check_loans; - -pub mod gather_loans; - -pub mod move_data; - -#[derive(Clone, Copy)] -pub struct LoanDataFlowOperator; - -pub type LoanDataFlow<'tcx> = DataFlowContext<'tcx, LoanDataFlowOperator>; - -pub fn check_crate(tcx: TyCtxt<'_>) { - tcx.par_body_owners(|body_owner_def_id| { - tcx.ensure().borrowck(body_owner_def_id); - }); -} - -pub fn provide(providers: &mut Providers<'_>) { - *providers = Providers { - borrowck, - ..*providers - }; -} - -/// Collection of conclusions determined via borrow checker analyses. -pub struct AnalysisData<'tcx> { - pub all_loans: Vec>, - pub loans: DataFlowContext<'tcx, LoanDataFlowOperator>, - pub move_data: move_data::FlowedMoveData<'tcx>, -} - -fn borrowck(tcx: TyCtxt<'_>, owner_def_id: DefId) -> &BorrowCheckResult { - assert!(tcx.use_ast_borrowck() || tcx.migrate_borrowck()); - - debug!("borrowck(body_owner_def_id={:?})", owner_def_id); - - let signalled_error = tcx.check_match(owner_def_id); - if let SignalledError::SawSomeError = signalled_error { - return tcx.arena.alloc(BorrowCheckResult { - signalled_any_error: SignalledError::SawSomeError, - }) - } - - let owner_id = tcx.hir().as_local_hir_id(owner_def_id).unwrap(); - - match tcx.hir().get(owner_id) { - Node::Ctor(..) => { - // We get invoked with anything that has MIR, but some of - // those things (notably the synthesized constructors from - // tuple structs/variants) do not have an associated body - // and do not need borrowchecking. - return tcx.arena.alloc(BorrowCheckResult { - signalled_any_error: SignalledError::NoErrorsSeen, - }) - } - _ => { } - } - - let body_id = tcx.hir().body_owned_by(owner_id); - let tables = tcx.typeck_tables_of(owner_def_id); - let region_scope_tree = tcx.region_scope_tree(owner_def_id); - let body = tcx.hir().body(body_id); - let mut bccx = BorrowckCtxt { - tcx, - tables, - region_scope_tree, - owner_def_id, - body, - signalled_any_error: Cell::new(SignalledError::NoErrorsSeen), - }; - - // Eventually, borrowck will always read the MIR, but at the - // moment we do not. So, for now, we always force MIR to be - // constructed for a given fn, since this may result in errors - // being reported and we want that to happen. - // - // Note that `mir_validated` is a "stealable" result; the - // thief, `optimized_mir()`, forces borrowck, so we know that - // is not yet stolen. - tcx.ensure().mir_validated(owner_def_id); - - // option dance because you can't capture an uninitialized variable - // by mut-ref. - let mut cfg = None; - if let Some(AnalysisData { all_loans, - loans: loan_dfcx, - move_data: flowed_moves }) = - build_borrowck_dataflow_data(&mut bccx, false, body_id, - |bccx| { - cfg = Some(cfg::CFG::new(bccx.tcx, &body)); - cfg.as_mut().unwrap() - }) - { - check_loans::check_loans(&mut bccx, &loan_dfcx, &flowed_moves, &all_loans, body); - } - - tcx.arena.alloc(BorrowCheckResult { - signalled_any_error: bccx.signalled_any_error.into_inner(), - }) -} - -fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tcx>, - force_analysis: bool, - body_id: hir::BodyId, - get_cfg: F) - -> Option> - where F: FnOnce(&mut BorrowckCtxt<'a, 'tcx>) -> &'c cfg::CFG -{ - // Check the body of fn items. - let (all_loans, move_data) = - gather_loans::gather_loans_in_fn(this, body_id); - - if !force_analysis && move_data.is_empty() && all_loans.is_empty() { - // large arrays of data inserted as constants can take a lot of - // time and memory to borrow-check - see issue #36799. However, - // they don't have places, so no borrow-check is actually needed. - // Recognize that case and skip borrow-checking. - debug!("skipping loan propagation for {:?} because of no loans", body_id); - return None; - } else { - debug!("propagating loans in {:?}", body_id); - } - - let cfg = get_cfg(this); - let mut loan_dfcx = - DataFlowContext::new(this.tcx, - "borrowck", - Some(this.body), - cfg, - LoanDataFlowOperator, - all_loans.len()); - for (loan_idx, loan) in all_loans.iter().enumerate() { - loan_dfcx.add_gen(loan.gen_scope.item_local_id(), loan_idx); - loan_dfcx.add_kill(KillFrom::ScopeEnd, - loan.kill_scope.item_local_id(), - loan_idx); - } - loan_dfcx.add_kills_from_flow_exits(cfg); - loan_dfcx.propagate(cfg, this.body); - - let flowed_moves = move_data::FlowedMoveData::new(move_data, - this, - cfg, - this.body); - - Some(AnalysisData { all_loans, - loans: loan_dfcx, - move_data:flowed_moves }) -} - -/// Accessor for introspective clients inspecting `AnalysisData` and -/// the `BorrowckCtxt` itself , e.g., the flowgraph visualizer. -pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( - tcx: TyCtxt<'tcx>, - body_id: hir::BodyId, - cfg: &cfg::CFG) - -> (BorrowckCtxt<'a, 'tcx>, AnalysisData<'tcx>) -{ - let owner_id = tcx.hir().body_owner(body_id); - let owner_def_id = tcx.hir().local_def_id(owner_id); - let tables = tcx.typeck_tables_of(owner_def_id); - let region_scope_tree = tcx.region_scope_tree(owner_def_id); - let body = tcx.hir().body(body_id); - let mut bccx = BorrowckCtxt { - tcx, - tables, - region_scope_tree, - owner_def_id, - body, - signalled_any_error: Cell::new(SignalledError::NoErrorsSeen), - }; - - let dataflow_data = build_borrowck_dataflow_data(&mut bccx, true, body_id, |_| cfg); - (bccx, dataflow_data.unwrap()) -} - -// ---------------------------------------------------------------------- -// Type definitions - -pub struct BorrowckCtxt<'a, 'tcx> { - tcx: TyCtxt<'tcx>, - - // tables for the current thing we are checking; set to - // Some in `borrowck_fn` and cleared later - tables: &'a ty::TypeckTables<'tcx>, - - region_scope_tree: &'tcx region::ScopeTree, - - owner_def_id: DefId, - - body: &'tcx hir::Body, - - signalled_any_error: Cell, -} - - -impl<'a, 'tcx: 'a> BorrowckCtxt<'a, 'tcx> { - fn signal_error(&self) { - self.signalled_any_error.set(SignalledError::SawSomeError); - } -} - -/////////////////////////////////////////////////////////////////////////// -// Loans and loan paths - -/// Record of a loan that was issued. -pub struct Loan<'tcx> { - index: usize, - loan_path: Rc>, - kind: ty::BorrowKind, - restricted_paths: Vec>>, - - /// gen_scope indicates where loan is introduced. Typically the - /// loan is introduced at the point of the borrow, but in some - /// cases, notably method arguments, the loan may be introduced - /// only later, once it comes into scope. See also - /// `GatherLoanCtxt::compute_gen_scope`. - gen_scope: region::Scope, - - /// kill_scope indicates when the loan goes out of scope. This is - /// either when the lifetime expires or when the local variable - /// which roots the loan-path goes out of scope, whichever happens - /// faster. See also `GatherLoanCtxt::compute_kill_scope`. - kill_scope: region::Scope, -} - -impl<'tcx> Loan<'tcx> { - pub fn loan_path(&self) -> Rc> { - self.loan_path.clone() - } -} - -#[derive(Eq)] -pub struct LoanPath<'tcx> { - kind: LoanPathKind<'tcx>, - ty: Ty<'tcx>, -} - -impl<'tcx> PartialEq for LoanPath<'tcx> { - fn eq(&self, that: &LoanPath<'tcx>) -> bool { - self.kind == that.kind - } -} - -impl<'tcx> Hash for LoanPath<'tcx> { - fn hash(&self, state: &mut H) { - self.kind.hash(state); - } -} - -#[derive(PartialEq, Eq, Hash, Debug)] -pub enum LoanPathKind<'tcx> { - LpVar(hir::HirId), // `x` in README.md - LpUpvar(ty::UpvarId), // `x` captured by-value into closure - LpDowncast(Rc>, DefId), // `x` downcast to particular enum variant - LpExtend(Rc>, mc::MutabilityCategory, LoanPathElem<'tcx>) -} - -impl<'tcx> LoanPath<'tcx> { - fn new(kind: LoanPathKind<'tcx>, ty: Ty<'tcx>) -> LoanPath<'tcx> { - LoanPath { kind: kind, ty: ty } - } - - fn to_type(&self) -> Ty<'tcx> { self.ty } -} - -// FIXME (pnkfelix): See discussion here -// https://github.com/pnkfelix/rust/commit/ -// b2b39e8700e37ad32b486b9a8409b50a8a53aa51#commitcomment-7892003 -const DOWNCAST_PRINTED_OPERATOR: &'static str = " as "; - -// A local, "cleaned" version of `mc::InteriorKind` that drops -// information that is not relevant to loan-path analysis. (In -// particular, the distinction between how precisely an array-element -// is tracked is irrelevant here.) -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub enum InteriorKind { - InteriorField(mc::FieldIndex), - InteriorElement, -} - -trait ToInteriorKind { fn cleaned(self) -> InteriorKind; } -impl ToInteriorKind for mc::InteriorKind { - fn cleaned(self) -> InteriorKind { - match self { - mc::InteriorField(name) => InteriorField(name), - mc::InteriorElement(_) => InteriorElement, - } - } -} - -// This can be: -// - a pointer dereference (`*P` in README.md) -// - a field reference, with an optional definition of the containing -// enum variant (`P.f` in README.md) -// `DefId` is present when the field is part of struct that is in -// a variant of an enum. For instance in: -// `enum E { X { foo: u32 }, Y { foo: u32 }}` -// each `foo` is qualified by the definitition id of the variant (`X` or `Y`). -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] -pub enum LoanPathElem<'tcx> { - LpDeref(mc::PointerKind<'tcx>), - LpInterior(Option, InteriorKind), -} - -fn closure_to_block(closure_id: LocalDefId, tcx: TyCtxt<'_>) -> HirId { - let closure_id = tcx.hir().local_def_id_to_hir_id(closure_id); - match tcx.hir().get(closure_id) { - Node::Expr(expr) => match expr.node { - hir::ExprKind::Closure(.., body_id, _, _) => { - body_id.hir_id - } - _ => { - bug!("encountered non-closure id: {}", closure_id) - } - }, - _ => bug!("encountered non-expr id: {}", closure_id) - } -} - -impl<'a, 'tcx> LoanPath<'tcx> { - pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::Scope { - match self.kind { - LpVar(hir_id) => { - bccx.region_scope_tree.var_scope(hir_id.local_id) - } - LpUpvar(upvar_id) => { - let block_id = closure_to_block(upvar_id.closure_expr_id, bccx.tcx); - region::Scope { id: block_id.local_id, data: region::ScopeData::Node } - } - LpDowncast(ref base, _) | - LpExtend(ref base, ..) => base.kill_scope(bccx), - } - } -} - -// Avoid "cannot borrow immutable field `self.x` as mutable" as that implies that a field *can* be -// mutable independently of the struct it belongs to. (#35937) -pub fn opt_loan_path_is_field<'tcx>(cmt: &mc::cmt_<'tcx>) -> (Option>>, bool) { - let new_lp = |v: LoanPathKind<'tcx>| Rc::new(LoanPath::new(v, cmt.ty)); - - match cmt.cat { - Categorization::Rvalue(..) | - Categorization::ThreadLocal(..) | - Categorization::StaticItem => { - (None, false) - } - - Categorization::Local(id) => { - (Some(new_lp(LpVar(id))), false) - } - - Categorization::Upvar(mc::Upvar { id, .. }) => { - (Some(new_lp(LpUpvar(id))), false) - } - - Categorization::Deref(ref cmt_base, pk) => { - let lp = opt_loan_path_is_field(cmt_base); - (lp.0.map(|lp| { - new_lp(LpExtend(lp, cmt.mutbl, LpDeref(pk))) - }), lp.1) - } - - Categorization::Interior(ref cmt_base, ik) => { - (opt_loan_path(cmt_base).map(|lp| { - let opt_variant_id = match cmt_base.cat { - Categorization::Downcast(_, did) => Some(did), - _ => None - }; - new_lp(LpExtend(lp, cmt.mutbl, LpInterior(opt_variant_id, ik.cleaned()))) - }), true) - } - - Categorization::Downcast(ref cmt_base, variant_def_id) => { - let lp = opt_loan_path_is_field(cmt_base); - (lp.0.map(|lp| { - new_lp(LpDowncast(lp, variant_def_id)) - }), lp.1) - } - } -} - -/// Computes the `LoanPath` (if any) for a `cmt`. -/// Note that this logic is somewhat duplicated in -/// the method `compute()` found in `gather_loans::restrictions`, -/// which allows it to share common loan path pieces as it -/// traverses the CMT. -pub fn opt_loan_path<'tcx>(cmt: &mc::cmt_<'tcx>) -> Option>> { - opt_loan_path_is_field(cmt).0 -} - -/////////////////////////////////////////////////////////////////////////// -// Misc - -impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { - pub fn is_subregion_of(&self, - r_sub: ty::Region<'tcx>, - r_sup: ty::Region<'tcx>) - -> bool - { - let region_rels = RegionRelations::new(self.tcx, - self.owner_def_id, - &self.region_scope_tree, - &self.tables.free_region_map); - region_rels.is_subregion_of(r_sub, r_sup) - } - - pub fn append_loan_path_to_string(&self, - loan_path: &LoanPath<'tcx>, - out: &mut String) { - match loan_path.kind { - LpUpvar(ty::UpvarId { var_path: ty::UpvarPath { hir_id: id }, closure_expr_id: _ }) => { - out.push_str(&self.tcx.hir().name(id).as_str()); - } - LpVar(id) => { - out.push_str(&self.tcx.hir().name(id).as_str()); - } - - LpDowncast(ref lp_base, variant_def_id) => { - out.push('('); - self.append_loan_path_to_string(&lp_base, out); - out.push_str(DOWNCAST_PRINTED_OPERATOR); - out.push_str(&self.tcx.def_path_str(variant_def_id)); - out.push(')'); - } - - LpExtend(ref lp_base, _, LpInterior(_, InteriorField(mc::FieldIndex(_, info)))) => { - self.append_autoderefd_loan_path_to_string(&lp_base, out); - out.push('.'); - out.push_str(&info.as_str()); - } - - LpExtend(ref lp_base, _, LpInterior(_, InteriorElement)) => { - self.append_autoderefd_loan_path_to_string(&lp_base, out); - out.push_str("[..]"); - } - - LpExtend(ref lp_base, _, LpDeref(_)) => { - out.push('*'); - self.append_loan_path_to_string(&lp_base, out); - } - } - } - - pub fn append_autoderefd_loan_path_to_string(&self, - loan_path: &LoanPath<'tcx>, - out: &mut String) { - match loan_path.kind { - LpExtend(ref lp_base, _, LpDeref(_)) => { - // For a path like `(*x).f` or `(*x)[3]`, autoderef - // rules would normally allow users to omit the `*x`. - // So just serialize such paths to `x.f` or x[3]` respectively. - self.append_autoderefd_loan_path_to_string(&lp_base, out) - } - - LpDowncast(ref lp_base, variant_def_id) => { - out.push('('); - self.append_autoderefd_loan_path_to_string(&lp_base, out); - out.push_str(DOWNCAST_PRINTED_OPERATOR); - out.push_str(&self.tcx.def_path_str(variant_def_id)); - out.push(')'); - } - - LpVar(..) | LpUpvar(..) | LpExtend(.., LpInterior(..)) => { - self.append_loan_path_to_string(loan_path, out) - } - } - } - - pub fn loan_path_to_string(&self, loan_path: &LoanPath<'tcx>) -> String { - let mut result = String::new(); - self.append_loan_path_to_string(loan_path, &mut result); - result - } - - pub fn cmt_to_cow_str(&self, cmt: &mc::cmt_<'tcx>) -> Cow<'static, str> { - cmt.descriptive_string(self.tcx) - } - - pub fn cmt_to_path_or_string(&self, cmt: &mc::cmt_<'tcx>) -> String { - match opt_loan_path(cmt) { - Some(lp) => format!("`{}`", self.loan_path_to_string(&lp)), - None => self.cmt_to_cow_str(cmt).into_owned(), - } - } -} - -impl BitwiseOperator for LoanDataFlowOperator { - #[inline] - fn join(&self, succ: usize, pred: usize) -> usize { - succ | pred // loans from both preds are in scope - } -} - -impl DataFlowOperator for LoanDataFlowOperator { - #[inline] - fn initial_value(&self) -> bool { - false // no loans in scope by default - } -} - -impl fmt::Debug for InteriorKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - InteriorField(mc::FieldIndex(_, info)) => write!(f, "{}", info), - InteriorElement => write!(f, "[]"), - } - } -} - -impl<'tcx> fmt::Debug for Loan<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Loan_{}({:?}, {:?}, {:?}-{:?}, {:?})", - self.index, - self.loan_path, - self.kind, - self.gen_scope, - self.kill_scope, - self.restricted_paths) - } -} - -impl<'tcx> fmt::Debug for LoanPath<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.kind { - LpVar(id) => { - write!(f, "$({})", ty::tls::with(|tcx| tcx.hir().node_to_string(id))) - } - - LpUpvar(ty::UpvarId{ var_path: ty::UpvarPath {hir_id: var_id}, closure_expr_id }) => { - let s = ty::tls::with(|tcx| { - tcx.hir().node_to_string(var_id) - }); - write!(f, "$({} captured by id={:?})", s, closure_expr_id) - } - - LpDowncast(ref lp, variant_def_id) => { - let variant_str = if variant_def_id.is_local() { - ty::tls::with(|tcx| tcx.def_path_str(variant_def_id)) - } else { - format!("{:?}", variant_def_id) - }; - write!(f, "({:?}{}{})", lp, DOWNCAST_PRINTED_OPERATOR, variant_str) - } - - LpExtend(ref lp, _, LpDeref(_)) => { - write!(f, "{:?}.*", lp) - } - - LpExtend(ref lp, _, LpInterior(_, ref interior)) => { - write!(f, "{:?}.{:?}", lp, interior) - } - } - } -} - -impl<'tcx> fmt::Display for LoanPath<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.kind { - LpVar(id) => { - write!(f, "$({})", ty::tls::with(|tcx| tcx.hir().hir_to_user_string(id))) - } - - LpUpvar(ty::UpvarId{ var_path: ty::UpvarPath { hir_id }, closure_expr_id: _ }) => { - let s = ty::tls::with(|tcx| { - tcx.hir().node_to_string(hir_id) - }); - write!(f, "$({} captured by closure)", s) - } - - LpDowncast(ref lp, variant_def_id) => { - let variant_str = if variant_def_id.is_local() { - ty::tls::with(|tcx| tcx.def_path_str(variant_def_id)) - } else { - format!("{:?}", variant_def_id) - }; - write!(f, "({}{}{})", lp, DOWNCAST_PRINTED_OPERATOR, variant_str) - } - - LpExtend(ref lp, _, LpDeref(_)) => { - write!(f, "{}.*", lp) - } - - LpExtend(ref lp, _, LpInterior(_, ref interior)) => { - write!(f, "{}.{:?}", lp, interior) - } - } - } -} diff --git a/src/librustc_ast_borrowck/borrowck/move_data.rs b/src/librustc_ast_borrowck/borrowck/move_data.rs deleted file mode 100644 index 67d818161b..0000000000 --- a/src/librustc_ast_borrowck/borrowck/move_data.rs +++ /dev/null @@ -1,730 +0,0 @@ -//! Data structures used for tracking moves. Please see the extensive -//! comments in the section "Moves and initialization" in `README.md`. - -use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom}; - -use crate::borrowck::*; -use crate::cfg; -use rustc::ty::{self, TyCtxt}; -use rustc::util::nodemap::FxHashMap; - -use std::cell::RefCell; -use std::rc::Rc; -use std::usize; -use syntax_pos::Span; -use rustc::hir; -use log::debug; - -#[derive(Default)] -pub struct MoveData<'tcx> { - /// Move paths. See section "Move paths" in `README.md`. - pub paths: RefCell>>, - - /// Cache of loan path to move path index, for easy lookup. - pub path_map: RefCell>, MovePathIndex>>, - - /// Each move or uninitialized variable gets an entry here. - pub moves: RefCell>, - - /// Assignments to a variable, like `x = foo`. These are assigned - /// bits for dataflow, since we must track them to ensure that - /// immutable variables are assigned at most once along each path. - pub var_assignments: RefCell>, - - /// Assignments to a path, like `x.f = foo`. These are not - /// assigned dataflow bits, but we track them because they still - /// kill move bits. - pub path_assignments: RefCell>, -} - -pub struct FlowedMoveData<'tcx> { - pub move_data: MoveData<'tcx>, - - pub dfcx_moves: MoveDataFlow<'tcx>, - - // We could (and maybe should, for efficiency) combine both move - // and assign data flow into one, but this way it's easier to - // distinguish the bits that correspond to moves and assignments. - pub dfcx_assign: AssignDataFlow<'tcx>, -} - -/// Index into `MoveData.paths`, used like a pointer -#[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] -pub struct MovePathIndex(usize); - -impl MovePathIndex { - fn get(&self) -> usize { - let MovePathIndex(v) = *self; v - } -} - -impl Clone for MovePathIndex { - fn clone(&self) -> MovePathIndex { - MovePathIndex(self.get()) - } -} - -#[allow(non_upper_case_globals)] -const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX); - -/// Index into `MoveData.moves`, used like a pointer -#[derive(Copy, Clone, PartialEq)] -pub struct MoveIndex(usize); - -impl MoveIndex { - fn get(&self) -> usize { - let MoveIndex(v) = *self; v - } -} - -#[allow(non_upper_case_globals)] -const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX); - -pub struct MovePath<'tcx> { - /// Loan path corresponding to this move path - pub loan_path: Rc>, - - /// Parent pointer, `InvalidMovePathIndex` if root - pub parent: MovePathIndex, - - /// Head of linked list of moves to this path, - /// `InvalidMoveIndex` if not moved - pub first_move: MoveIndex, - - /// First node in linked list of children, `InvalidMovePathIndex` if leaf - pub first_child: MovePathIndex, - - /// Next node in linked list of parent's children (siblings), - /// `InvalidMovePathIndex` if none. - pub next_sibling: MovePathIndex, -} - - -#[derive(Copy, Clone)] -pub struct Move { - /// Path being moved. - pub path: MovePathIndex, - - /// ID of node that is doing the move. - pub id: hir::ItemLocalId, - - /// Next node in linked list of moves from `path`, or `InvalidMoveIndex` - pub next_move: MoveIndex -} - -#[derive(Copy, Clone)] -pub struct Assignment { - /// Path being assigned. - pub path: MovePathIndex, - - /// ID where assignment occurs - pub id: hir::ItemLocalId, - - /// span of node where assignment occurs - pub span: Span, -} - -#[derive(Clone, Copy)] -pub struct MoveDataFlowOperator; - -pub type MoveDataFlow<'tcx> = DataFlowContext<'tcx, MoveDataFlowOperator>; - -#[derive(Clone, Copy)] -pub struct AssignDataFlowOperator; - -pub type AssignDataFlow<'tcx> = DataFlowContext<'tcx, AssignDataFlowOperator>; - -fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool { - match loan_path.kind { - LpVar(_) | LpUpvar(_) => { - true - } - LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => { - // Paths involving element accesses a[i] do not refer to a unique - // location, as there is no accurate tracking of the indices. - // - // (Paths involving element accesses via slice pattern bindings - // can in principle be tracked precisely, but that is future - // work. For now, continue claiming that they are imprecise.) - false - } - LpDowncast(ref lp_base, _) | - LpExtend(ref lp_base, ..) => { - loan_path_is_precise(&lp_base) - } - } -} - -impl MoveData<'tcx> { - /// Returns `true` if there are no trackable assignments or moves - /// in this move data -- that means that there is nothing that - /// could cause a borrow error. - pub fn is_empty(&self) -> bool { - self.moves.borrow().is_empty() && - self.path_assignments.borrow().is_empty() && - self.var_assignments.borrow().is_empty() - } - - pub fn path_loan_path(&self, index: MovePathIndex) -> Rc> { - (*self.paths.borrow())[index.get()].loan_path.clone() - } - - fn path_parent(&self, index: MovePathIndex) -> MovePathIndex { - (*self.paths.borrow())[index.get()].parent - } - - fn path_first_move(&self, index: MovePathIndex) -> MoveIndex { - (*self.paths.borrow())[index.get()].first_move - } - - /// Returns the index of first child, or `InvalidMovePathIndex` if - /// `index` is leaf. - fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex { - (*self.paths.borrow())[index.get()].first_child - } - - fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex { - (*self.paths.borrow())[index.get()].next_sibling - } - - fn set_path_first_move(&self, - index: MovePathIndex, - first_move: MoveIndex) { - (*self.paths.borrow_mut())[index.get()].first_move = first_move - } - - fn set_path_first_child(&self, - index: MovePathIndex, - first_child: MovePathIndex) { - (*self.paths.borrow_mut())[index.get()].first_child = first_child - } - - fn move_next_move(&self, index: MoveIndex) -> MoveIndex { - //! Type safe indexing operator - (*self.moves.borrow())[index.get()].next_move - } - - fn is_var_path(&self, index: MovePathIndex) -> bool { - //! True if `index` refers to a variable - self.path_parent(index) == InvalidMovePathIndex - } - - /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for - /// `lp` and any of its base paths that do not yet have an index. - pub fn move_path(&self, tcx: TyCtxt<'tcx>, lp: Rc>) -> MovePathIndex { - if let Some(&index) = self.path_map.borrow().get(&lp) { - return index; - } - - let index = match lp.kind { - LpVar(..) | LpUpvar(..) => { - let index = MovePathIndex(self.paths.borrow().len()); - - self.paths.borrow_mut().push(MovePath { - loan_path: lp.clone(), - parent: InvalidMovePathIndex, - first_move: InvalidMoveIndex, - first_child: InvalidMovePathIndex, - next_sibling: InvalidMovePathIndex, - }); - - index - } - - LpDowncast(ref base, _) | - LpExtend(ref base, ..) => { - let parent_index = self.move_path(tcx, base.clone()); - - let index = MovePathIndex(self.paths.borrow().len()); - - let next_sibling = self.path_first_child(parent_index); - self.set_path_first_child(parent_index, index); - - self.paths.borrow_mut().push(MovePath { - loan_path: lp.clone(), - parent: parent_index, - first_move: InvalidMoveIndex, - first_child: InvalidMovePathIndex, - next_sibling, - }); - - index - } - }; - - debug!("move_path(lp={:?}, index={:?})", - lp, - index); - - assert_eq!(index.get(), self.paths.borrow().len() - 1); - self.path_map.borrow_mut().insert(lp, index); - return index; - } - - fn existing_move_path(&self, lp: &Rc>) - -> Option { - self.path_map.borrow().get(lp).cloned() - } - - fn existing_base_paths(&self, lp: &Rc>) - -> Vec { - let mut result = vec![]; - self.add_existing_base_paths(lp, &mut result); - result - } - - /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but - /// does not add new move paths - fn add_existing_base_paths(&self, lp: &Rc>, - result: &mut Vec) { - match self.path_map.borrow().get(lp).cloned() { - Some(index) => { - self.each_base_path(index, |p| { - result.push(p); - true - }); - } - None => { - match lp.kind { - LpVar(..) | LpUpvar(..) => { } - LpDowncast(ref b, _) | - LpExtend(ref b, ..) => { - self.add_existing_base_paths(b, result); - } - } - } - } - - } - - /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`. - pub fn add_move( - &self, - tcx: TyCtxt<'tcx>, - orig_lp: Rc>, - id: hir::ItemLocalId, - ) { - // Moving one union field automatically moves all its fields. Also move siblings of - // all parent union fields, moves do not propagate upwards automatically. - let mut lp = orig_lp.clone(); - while let LpExtend(ref base_lp, mutbl, lp_elem) = lp.clone().kind { - if let (&ty::Adt(adt_def, _), LpInterior(opt_variant_id, interior)) - = (&base_lp.ty.sty, lp_elem) { - if adt_def.is_union() { - for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() { - let field = - InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name)); - if field != interior { - let sibling_lp_kind = - LpExtend(base_lp.clone(), mutbl, LpInterior(opt_variant_id, field)); - let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, tcx.types.err)); - self.add_move_helper(tcx, sibling_lp, id); - } - } - } - } - lp = base_lp.clone(); - } - - self.add_move_helper(tcx, orig_lp, id); - } - - fn add_move_helper( - &self, - tcx: TyCtxt<'tcx>, - lp: Rc>, - id: hir::ItemLocalId, - ) { - debug!("add_move(lp={:?}, id={:?})", lp, id); - - let path_index = self.move_path(tcx, lp); - let move_index = MoveIndex(self.moves.borrow().len()); - - let next_move = self.path_first_move(path_index); - self.set_path_first_move(path_index, move_index); - - self.moves.borrow_mut().push(Move { - path: path_index, - id, - next_move, - }); - } - - /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given - /// `span`. - pub fn add_assignment( - &self, - tcx: TyCtxt<'tcx>, - lp: Rc>, - assign_id: hir::ItemLocalId, - span: Span, - ) { - // Assigning to one union field automatically assigns to all its fields. - if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind { - if let ty::Adt(adt_def, _) = base_lp.ty.sty { - if adt_def.is_union() { - for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() { - let field = - InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name)); - let field_ty = if field == interior { - lp.ty - } else { - tcx.types.err // Doesn't matter - }; - let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl, - LpInterior(opt_variant_id, field)); - let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty)); - self.add_assignment_helper(tcx, sibling_lp, assign_id, - span); - } - return; - } - } - } - - self.add_assignment_helper(tcx, lp, assign_id, span); - } - - fn add_assignment_helper( - &self, - tcx: TyCtxt<'tcx>, - lp: Rc>, - assign_id: hir::ItemLocalId, - span: Span, - ) { - debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id); - - let path_index = self.move_path(tcx, lp.clone()); - - let assignment = Assignment { - path: path_index, - id: assign_id, - span, - }; - - if self.is_var_path(path_index) { - debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})", - lp, self.var_assignments.borrow().len(), path_index); - - self.var_assignments.borrow_mut().push(assignment); - } else { - debug!("add_assignment[path](lp={:?}, path_index={:?})", - lp, path_index); - - self.path_assignments.borrow_mut().push(assignment); - } - } - - /// Adds the gen/kills for the various moves and - /// assignments into the provided data flow contexts. - /// Moves are generated by moves and killed by assignments and - /// scoping. Assignments are generated by assignment to variables and - /// killed by scoping. See `README.md` for more details. - fn add_gen_kills( - &self, - bccx: &BorrowckCtxt<'_, 'tcx>, - dfcx_moves: &mut MoveDataFlow<'_>, - dfcx_assign: &mut AssignDataFlow<'_>, - ) { - for (i, the_move) in self.moves.borrow().iter().enumerate() { - dfcx_moves.add_gen(the_move.id, i); - } - - for (i, assignment) in self.var_assignments.borrow().iter().enumerate() { - dfcx_assign.add_gen(assignment.id, i); - self.kill_moves(assignment.path, assignment.id, - KillFrom::Execution, dfcx_moves); - } - - for assignment in self.path_assignments.borrow().iter() { - self.kill_moves(assignment.path, assignment.id, - KillFrom::Execution, dfcx_moves); - } - - // Kill all moves related to a variable `x` when - // it goes out of scope: - for path in self.paths.borrow().iter() { - match path.loan_path.kind { - LpVar(..) | LpUpvar(..) | LpDowncast(..) => { - let kill_scope = path.loan_path.kill_scope(bccx); - let path = *self.path_map.borrow().get(&path.loan_path).unwrap(); - self.kill_moves(path, kill_scope.item_local_id(), - KillFrom::ScopeEnd, dfcx_moves); - } - LpExtend(..) => {} - } - } - - // Kill all assignments when the variable goes out of scope: - for (assignment_index, assignment) in - self.var_assignments.borrow().iter().enumerate() { - let lp = self.path_loan_path(assignment.path); - match lp.kind { - LpVar(..) | LpUpvar(..) | LpDowncast(..) => { - let kill_scope = lp.kill_scope(bccx); - dfcx_assign.add_kill(KillFrom::ScopeEnd, - kill_scope.item_local_id(), - assignment_index); - } - LpExtend(..) => { - bug!("var assignment for non var path"); - } - } - } - } - - fn each_base_path(&self, index: MovePathIndex, mut f: F) -> bool where - F: FnMut(MovePathIndex) -> bool, - { - let mut p = index; - while p != InvalidMovePathIndex { - if !f(p) { - return false; - } - p = self.path_parent(p); - } - return true; - } - - // FIXME(#19596) This is a workaround, but there should be better way to do this - fn each_extending_path_(&self, index: MovePathIndex, f: &mut F) -> bool where - F: FnMut(MovePathIndex) -> bool, - { - if !(*f)(index) { - return false; - } - - let mut p = self.path_first_child(index); - while p != InvalidMovePathIndex { - if !self.each_extending_path_(p, f) { - return false; - } - p = self.path_next_sibling(p); - } - - return true; - } - - fn each_extending_path(&self, index: MovePathIndex, mut f: F) -> bool where - F: FnMut(MovePathIndex) -> bool, - { - self.each_extending_path_(index, &mut f) - } - - fn each_applicable_move(&self, index0: MovePathIndex, mut f: F) -> bool where - F: FnMut(MoveIndex) -> bool, - { - let mut ret = true; - self.each_extending_path(index0, |index| { - let mut p = self.path_first_move(index); - while p != InvalidMoveIndex { - if !f(p) { - ret = false; - break; - } - p = self.move_next_move(p); - } - ret - }); - ret - } - - fn kill_moves( - &self, - path: MovePathIndex, - kill_id: hir::ItemLocalId, - kill_kind: KillFrom, - dfcx_moves: &mut MoveDataFlow<'_>, - ) { - // We can only perform kills for paths that refer to a unique location, - // since otherwise we may kill a move from one location with an - // assignment referring to another location. - - let loan_path = self.path_loan_path(path); - if loan_path_is_precise(&loan_path) { - self.each_applicable_move(path, |move_index| { - debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}", - kill_kind, kill_id, move_index.get()); - dfcx_moves.add_kill(kill_kind, kill_id, move_index.get()); - true - }); - } - } -} - -impl<'tcx> FlowedMoveData<'tcx> { - pub fn new( - move_data: MoveData<'tcx>, - bccx: &BorrowckCtxt<'_, 'tcx>, - cfg: &cfg::CFG, - body: &hir::Body, - ) -> FlowedMoveData<'tcx> { - let tcx = bccx.tcx; - - let mut dfcx_moves = - DataFlowContext::new(tcx, - "flowed_move_data_moves", - Some(body), - cfg, - MoveDataFlowOperator, - move_data.moves.borrow().len()); - let mut dfcx_assign = - DataFlowContext::new(tcx, - "flowed_move_data_assigns", - Some(body), - cfg, - AssignDataFlowOperator, - move_data.var_assignments.borrow().len()); - - move_data.add_gen_kills(bccx, - &mut dfcx_moves, - &mut dfcx_assign); - - dfcx_moves.add_kills_from_flow_exits(cfg); - dfcx_assign.add_kills_from_flow_exits(cfg); - - dfcx_moves.propagate(cfg, body); - dfcx_assign.propagate(cfg, body); - - FlowedMoveData { - move_data, - dfcx_moves, - dfcx_assign, - } - } - - pub fn is_move_path(&self, id: hir::ItemLocalId, loan_path: &Rc>) -> bool { - //! Returns the kind of a move of `loan_path` by `id`, if one exists. - - let mut ret = false; - if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) { - self.dfcx_moves.each_gen_bit(id, |move_index| { - let the_move = self.move_data.moves.borrow(); - let the_move = (*the_move)[move_index]; - if the_move.path == *loan_path_index { - ret = true; - false - } else { - true - } - }); - } - ret - } - - /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may* - /// have occurred on entry to `id` without an intervening assignment. In other words, any moves - /// that would invalidate a reference to `loan_path` at location `id`. - pub fn each_move_of(&self, - id: hir::ItemLocalId, - loan_path: &Rc>, - mut f: F) - -> bool where - F: FnMut(&Move, &LoanPath<'tcx>) -> bool, - { - // Bad scenarios: - // - // 1. Move of `a.b.c`, use of `a.b.c` - // 2. Move of `a.b.c`, use of `a.b.c.d` - // 3. Move of `a.b.c`, use of `a` or `a.b` - // - // OK scenario: - // - // 4. move of `a.b.c`, use of `a.b.d` - - let base_indices = self.move_data.existing_base_paths(loan_path); - if base_indices.is_empty() { - return true; - } - - let opt_loan_path_index = self.move_data.existing_move_path(loan_path); - - let mut ret = true; - - self.dfcx_moves.each_bit_on_entry(id, |index| { - let the_move = self.move_data.moves.borrow(); - let the_move = &(*the_move)[index]; - let moved_path = the_move.path; - if base_indices.iter().any(|x| x == &moved_path) { - // Scenario 1 or 2: `loan_path` or some base path of - // `loan_path` was moved. - if !f(the_move, &self.move_data.path_loan_path(moved_path)) { - ret = false; - } - } else { - if let Some(loan_path_index) = opt_loan_path_index { - let cont = self.move_data.each_base_path(moved_path, |p| { - if p == loan_path_index { - // Scenario 3: some extension of `loan_path` - // was moved - f(the_move, - &self.move_data.path_loan_path(moved_path)) - } else { - true - } - }); - if !cont { ret = false; } - } - } - ret - }) - } - - /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`. - /// `loan_path` must be a single variable. - pub fn each_assignment_of(&self, - id: hir::ItemLocalId, - loan_path: &Rc>, - mut f: F) - -> bool where - F: FnMut(&Assignment) -> bool, - { - let loan_path_index = { - match self.move_data.existing_move_path(loan_path) { - Some(i) => i, - None => { - // if there were any assignments, it'd have an index - return true; - } - } - }; - - self.dfcx_assign.each_bit_on_entry(id, |index| { - let assignment = self.move_data.var_assignments.borrow(); - let assignment = &(*assignment)[index]; - if assignment.path == loan_path_index && !f(assignment) { - false - } else { - true - } - }) - } -} - -impl BitwiseOperator for MoveDataFlowOperator { - #[inline] - fn join(&self, succ: usize, pred: usize) -> usize { - succ | pred // moves from both preds are in scope - } -} - -impl DataFlowOperator for MoveDataFlowOperator { - #[inline] - fn initial_value(&self) -> bool { - false // no loans in scope by default - } -} - -impl BitwiseOperator for AssignDataFlowOperator { - #[inline] - fn join(&self, succ: usize, pred: usize) -> usize { - succ | pred // moves from both preds are in scope - } -} - -impl DataFlowOperator for AssignDataFlowOperator { - #[inline] - fn initial_value(&self) -> bool { - false // no assignments in scope by default - } -} diff --git a/src/librustc_ast_borrowck/cfg/construct.rs b/src/librustc_ast_borrowck/cfg/construct.rs deleted file mode 100644 index 0dc999083a..0000000000 --- a/src/librustc_ast_borrowck/cfg/construct.rs +++ /dev/null @@ -1,545 +0,0 @@ -use crate::cfg::*; - -use rustc::hir::{self, PatKind}; -use rustc::hir::def_id::DefId; -use rustc::hir::ptr::P; -use rustc::middle::region; -use rustc::ty::{self, TyCtxt}; - -use rustc_data_structures::graph::implementation as graph; - -struct CFGBuilder<'a, 'tcx> { - tcx: TyCtxt<'tcx>, - owner_def_id: DefId, - tables: &'a ty::TypeckTables<'tcx>, - graph: CFGGraph, - fn_exit: CFGIndex, - loop_scopes: Vec, - breakable_block_scopes: Vec, -} - -#[derive(Copy, Clone)] -struct BlockScope { - block_expr_id: hir::ItemLocalId, // ID of breakable block expr node - break_index: CFGIndex, // where to go on `break` -} - -#[derive(Copy, Clone)] -struct LoopScope { - loop_id: hir::ItemLocalId, // ID of `loop`/`while` node - continue_index: CFGIndex, // where to go on a `loop` - break_index: CFGIndex, // where to go on a `break` -} - -pub(super) fn construct(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG { - let mut graph = graph::Graph::new(); - let entry = graph.add_node(CFGNodeData::Entry); - - // `fn_exit` is target of return exprs, which lies somewhere - // outside input `body`. (Distinguishing `fn_exit` and `body_exit` - // also resolves chicken-and-egg problem that arises if you try to - // have return exprs jump to `body_exit` during construction.) - let fn_exit = graph.add_node(CFGNodeData::Exit); - let body_exit; - - // Find the tables for this body. - let owner_def_id = tcx.hir().body_owner_def_id(body.id()); - let tables = tcx.typeck_tables_of(owner_def_id); - - let mut cfg_builder = CFGBuilder { - tcx, - owner_def_id, - tables, - graph, - fn_exit, - loop_scopes: Vec::new(), - breakable_block_scopes: Vec::new(), - }; - body_exit = cfg_builder.expr(&body.value, entry); - cfg_builder.add_contained_edge(body_exit, fn_exit); - let CFGBuilder { graph, .. } = cfg_builder; - CFG { - owner_def_id, - graph, - entry, - exit: fn_exit, - } -} - -impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { - fn block(&mut self, blk: &hir::Block, pred: CFGIndex) -> CFGIndex { - if blk.targeted_by_break { - let expr_exit = self.add_ast_node(blk.hir_id.local_id, &[]); - - self.breakable_block_scopes.push(BlockScope { - block_expr_id: blk.hir_id.local_id, - break_index: expr_exit, - }); - - let mut stmts_exit = pred; - for stmt in &blk.stmts { - stmts_exit = self.stmt(stmt, stmts_exit); - } - let blk_expr_exit = self.opt_expr(&blk.expr, stmts_exit); - self.add_contained_edge(blk_expr_exit, expr_exit); - - self.breakable_block_scopes.pop(); - - expr_exit - } else { - let mut stmts_exit = pred; - for stmt in &blk.stmts { - stmts_exit = self.stmt(stmt, stmts_exit); - } - - let expr_exit = self.opt_expr(&blk.expr, stmts_exit); - - self.add_ast_node(blk.hir_id.local_id, &[expr_exit]) - } - } - - fn stmt(&mut self, stmt: &hir::Stmt, pred: CFGIndex) -> CFGIndex { - let exit = match stmt.node { - hir::StmtKind::Local(ref local) => { - let init_exit = self.opt_expr(&local.init, pred); - self.pat(&local.pat, init_exit) - } - hir::StmtKind::Item(_) => pred, - hir::StmtKind::Expr(ref expr) | - hir::StmtKind::Semi(ref expr) => { - self.expr(&expr, pred) - } - }; - self.add_ast_node(stmt.hir_id.local_id, &[exit]) - } - - fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex { - match pat.node { - PatKind::Binding(.., None) | - PatKind::Path(_) | - PatKind::Lit(..) | - PatKind::Range(..) | - PatKind::Wild => self.add_ast_node(pat.hir_id.local_id, &[pred]), - - PatKind::Box(ref subpat) | - PatKind::Ref(ref subpat, _) | - PatKind::Binding(.., Some(ref subpat)) => { - let subpat_exit = self.pat(&subpat, pred); - self.add_ast_node(pat.hir_id.local_id, &[subpat_exit]) - } - - PatKind::TupleStruct(_, ref subpats, _) | - PatKind::Tuple(ref subpats, _) => { - let pats_exit = self.pats_all(subpats.iter(), pred); - self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) - } - - PatKind::Struct(_, ref subpats, _) => { - let pats_exit = self.pats_all(subpats.iter().map(|f| &f.pat), pred); - self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) - } - - PatKind::Or(ref pats) => { - let branches: Vec<_> = pats.iter().map(|p| self.pat(p, pred)).collect(); - self.add_ast_node(pat.hir_id.local_id, &branches) - } - - PatKind::Slice(ref pre, ref vec, ref post) => { - let pre_exit = self.pats_all(pre.iter(), pred); - let vec_exit = self.pats_all(vec.iter(), pre_exit); - let post_exit = self.pats_all(post.iter(), vec_exit); - self.add_ast_node(pat.hir_id.local_id, &[post_exit]) - } - } - } - - /// Handles case where all of the patterns must match. - fn pats_all<'b, I: Iterator>>( - &mut self, - pats: I, - pred: CFGIndex, - ) -> CFGIndex { - pats.fold(pred, |pred, pat| self.pat(&pat, pred)) - } - - fn expr(&mut self, expr: &hir::Expr, pred: CFGIndex) -> CFGIndex { - match expr.node { - hir::ExprKind::Block(ref blk, _) => { - let blk_exit = self.block(&blk, pred); - self.add_ast_node(expr.hir_id.local_id, &[blk_exit]) - } - - hir::ExprKind::Loop(ref body, _, _) => { - // - // [pred] - // | - // v 1 - // [loopback] <---+ - // | 4 | - // v 3 | - // [body] ------+ - // - // [expr] 2 - // - // Note that `break` and `loop` statements - // may cause additional edges. - - let loopback = self.add_dummy_node(&[pred]); // 1 - let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2 - self.loop_scopes.push(LoopScope { - loop_id: expr.hir_id.local_id, - continue_index: loopback, - break_index: expr_exit, - }); - let body_exit = self.block(&body, loopback); // 3 - self.add_contained_edge(body_exit, loopback); // 4 - self.loop_scopes.pop(); - expr_exit - } - - hir::ExprKind::Match(ref discr, ref arms, _) => { - self.match_(expr.hir_id.local_id, &discr, &arms, pred) - } - - hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => { - // - // [pred] - // | - // v 1 - // [l] - // | - // / \ - // / \ - // v 2 * - // [r] | - // | | - // v 3 v 4 - // [..exit..] - // - let l_exit = self.expr(&l, pred); // 1 - let r_exit = self.expr(&r, l_exit); // 2 - self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4 - } - - hir::ExprKind::Ret(ref v) => { - let v_exit = self.opt_expr(v, pred); - let b = self.add_ast_node(expr.hir_id.local_id, &[v_exit]); - self.add_returning_edge(expr, b); - self.add_unreachable_node() - } - - hir::ExprKind::Break(destination, ref opt_expr) => { - let v = self.opt_expr(opt_expr, pred); - let (target_scope, break_dest) = - self.find_scope_edge(expr, destination, ScopeCfKind::Break); - let b = self.add_ast_node(expr.hir_id.local_id, &[v]); - self.add_exiting_edge(expr, b, target_scope, break_dest); - self.add_unreachable_node() - } - - hir::ExprKind::Continue(destination) => { - let (target_scope, cont_dest) = - self.find_scope_edge(expr, destination, ScopeCfKind::Continue); - let a = self.add_ast_node(expr.hir_id.local_id, &[pred]); - self.add_exiting_edge(expr, a, target_scope, cont_dest); - self.add_unreachable_node() - } - - hir::ExprKind::Array(ref elems) => { - self.straightline(expr, pred, elems.iter().map(|e| &*e)) - } - - hir::ExprKind::Call(ref func, ref args) => { - self.call(expr, pred, &func, args.iter().map(|e| &*e)) - } - - hir::ExprKind::MethodCall(.., ref args) => { - self.call(expr, pred, &args[0], args[1..].iter().map(|e| &*e)) - } - - hir::ExprKind::Index(ref l, ref r) | - hir::ExprKind::Binary(_, ref l, ref r) if self.tables.is_method_call(expr) => { - self.call(expr, pred, &l, Some(&**r).into_iter()) - } - - hir::ExprKind::Unary(_, ref e) if self.tables.is_method_call(expr) => { - self.call(expr, pred, &e, None::.iter()) - } - - hir::ExprKind::Tup(ref exprs) => { - self.straightline(expr, pred, exprs.iter().map(|e| &*e)) - } - - hir::ExprKind::Struct(_, ref fields, ref base) => { - let field_cfg = self.straightline(expr, pred, fields.iter().map(|f| &*f.expr)); - self.opt_expr(base, field_cfg) - } - - hir::ExprKind::Assign(ref l, ref r) | - hir::ExprKind::AssignOp(_, ref l, ref r) => { - self.straightline(expr, pred, [r, l].iter().map(|&e| &**e)) - } - - hir::ExprKind::Index(ref l, ref r) | - hir::ExprKind::Binary(_, ref l, ref r) => { // N.B., && and || handled earlier - self.straightline(expr, pred, [l, r].iter().map(|&e| &**e)) - } - - hir::ExprKind::Box(ref e) | - hir::ExprKind::AddrOf(_, ref e) | - hir::ExprKind::Cast(ref e, _) | - hir::ExprKind::Type(ref e, _) | - hir::ExprKind::DropTemps(ref e) | - hir::ExprKind::Unary(_, ref e) | - hir::ExprKind::Field(ref e, _) | - hir::ExprKind::Yield(ref e, _) | - hir::ExprKind::Repeat(ref e, _) => { - self.straightline(expr, pred, Some(&**e).into_iter()) - } - - hir::ExprKind::InlineAsm(_, ref outputs, ref inputs) => { - let post_outputs = self.exprs(outputs.iter().map(|e| &*e), pred); - let post_inputs = self.exprs(inputs.iter().map(|e| &*e), post_outputs); - self.add_ast_node(expr.hir_id.local_id, &[post_inputs]) - } - - hir::ExprKind::Closure(..) | - hir::ExprKind::Lit(..) | - hir::ExprKind::Path(_) | - hir::ExprKind::Err => { - self.straightline(expr, pred, None::.iter()) - } - } - } - - fn call<'b, I: Iterator>( - &mut self, - call_expr: &hir::Expr, - pred: CFGIndex, - func_or_rcvr: &hir::Expr, - args: I, - ) -> CFGIndex { - let func_or_rcvr_exit = self.expr(func_or_rcvr, pred); - let ret = self.straightline(call_expr, func_or_rcvr_exit, args); - let m = self.tcx.hir().get_module_parent(call_expr.hir_id); - if self.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(call_expr)) { - self.add_unreachable_node() - } else { - ret - } - } - - /// Constructs graph for `exprs` evaluated in order. - fn exprs<'b, I: Iterator>( - &mut self, - exprs: I, - pred: CFGIndex, - ) -> CFGIndex { - exprs.fold(pred, |p, e| self.expr(e, p)) - } - - /// Constructs graph for `opt_expr` evaluated, if `Some`. - fn opt_expr( - &mut self, - opt_expr: &Option>, - pred: CFGIndex, - ) -> CFGIndex { - opt_expr.iter().fold(pred, |p, e| self.expr(&e, p)) - } - - /// Handles case of an expression that evaluates `subexprs` in order. - fn straightline<'b, I: Iterator>( - &mut self, - expr: &hir::Expr, - pred: CFGIndex, - subexprs: I, - ) -> CFGIndex { - let subexprs_exit = self.exprs(subexprs, pred); - self.add_ast_node(expr.hir_id.local_id, &[subexprs_exit]) - } - - fn match_(&mut self, id: hir::ItemLocalId, discr: &hir::Expr, - arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex { - // The CFG for match expressions is quite complex, so no ASCII - // art for it (yet). - // - // The CFG generated below matches roughly what MIR contains. - // Each pattern and guard is visited in parallel, with - // arms containing multiple patterns generating multiple nodes - // for the same guard expression. The guard expressions chain - // into each other from top to bottom, with a specific - // exception to allow some additional valid programs - // (explained below). MIR differs slightly in that the - // pattern matching may continue after a guard but the visible - // behaviour should be the same. - // - // What is going on is explained in further comments. - - // Visit the discriminant expression. - let discr_exit = self.expr(discr, pred); - - // Add a node for the exit of the match expression as a whole. - let expr_exit = self.add_ast_node(id, &[]); - - // Keep track of the previous guard expressions. - let mut prev_guard = None; - let match_scope = region::Scope { id, data: region::ScopeData::Node }; - - for arm in arms { - // Add an exit node for when we've visited all the - // patterns and the guard (if there is one) in the arm. - let bindings_exit = self.add_dummy_node(&[]); - - for pat in &arm.pats { - // Visit the pattern, coming from the discriminant exit - let mut pat_exit = self.pat(&pat, discr_exit); - - // If there is a guard expression, handle it here. - if let Some(ref guard) = arm.guard { - // Add a dummy node for the previous guard - // expression to target. - let guard_start = self.add_dummy_node(&[pat_exit]); - // Visit the guard expression. - let guard_exit = match guard { - hir::Guard::If(ref e) => (&**e, self.expr(e, guard_start)), - }; - // #47295: We used to have very special case code - // here for when a pair of arms are both formed - // solely from constants, and if so, not add these - // edges. But this was not actually sound without - // other constraints that we stopped enforcing at - // some point. - if let Some((prev_guard, prev_index)) = prev_guard.take() { - self.add_exiting_edge(prev_guard, prev_index, match_scope, guard_start); - } - - // Push the guard onto the list of previous guards. - prev_guard = Some(guard_exit); - - // Update the exit node for the pattern. - pat_exit = guard_exit.1; - } - - // Add an edge from the exit of this pattern to the exit of the arm. - self.add_contained_edge(pat_exit, bindings_exit); - } - - // Visit the body of this arm. - let body_exit = self.expr(&arm.body, bindings_exit); - - let arm_exit = self.add_ast_node(arm.hir_id.local_id, &[body_exit]); - - // Link the body to the exit of the expression. - self.add_contained_edge(arm_exit, expr_exit); - } - - expr_exit - } - - fn add_dummy_node(&mut self, preds: &[CFGIndex]) -> CFGIndex { - self.add_node(CFGNodeData::Dummy, preds) - } - - fn add_ast_node(&mut self, id: hir::ItemLocalId, preds: &[CFGIndex]) -> CFGIndex { - self.add_node(CFGNodeData::AST(id), preds) - } - - fn add_unreachable_node(&mut self) -> CFGIndex { - self.add_node(CFGNodeData::Unreachable, &[]) - } - - fn add_node(&mut self, data: CFGNodeData, preds: &[CFGIndex]) -> CFGIndex { - let node = self.graph.add_node(data); - for &pred in preds { - self.add_contained_edge(pred, node); - } - node - } - - fn add_contained_edge( - &mut self, - source: CFGIndex, - target: CFGIndex, - ) { - let data = CFGEdgeData {exiting_scopes: vec![] }; - self.graph.add_edge(source, target, data); - } - - fn add_exiting_edge( - &mut self, - from_expr: &hir::Expr, - from_index: CFGIndex, - target_scope: region::Scope, - to_index: CFGIndex, - ) { - let mut data = CFGEdgeData { exiting_scopes: vec![] }; - let mut scope = region::Scope { - id: from_expr.hir_id.local_id, - data: region::ScopeData::Node - }; - let region_scope_tree = self.tcx.region_scope_tree(self.owner_def_id); - while scope != target_scope { - data.exiting_scopes.push(scope.item_local_id()); - scope = region_scope_tree.encl_scope(scope); - } - self.graph.add_edge(from_index, to_index, data); - } - - fn add_returning_edge( - &mut self, - _from_expr: &hir::Expr, - from_index: CFGIndex, - ) { - let data = CFGEdgeData { - exiting_scopes: self.loop_scopes.iter() - .rev() - .map(|&LoopScope { loop_id: id, .. }| id) - .collect() - }; - self.graph.add_edge(from_index, self.fn_exit, data); - } - - fn find_scope_edge( - &self, - expr: &hir::Expr, - destination: hir::Destination, - scope_cf_kind: ScopeCfKind, - ) -> (region::Scope, CFGIndex) { - match destination.target_id { - Ok(loop_id) => { - for b in &self.breakable_block_scopes { - if b.block_expr_id == loop_id.local_id { - let scope = region::Scope { - id: loop_id.local_id, - data: region::ScopeData::Node - }; - return (scope, match scope_cf_kind { - ScopeCfKind::Break => b.break_index, - ScopeCfKind::Continue => bug!("can't continue to block"), - }); - } - } - for l in &self.loop_scopes { - if l.loop_id == loop_id.local_id { - let scope = region::Scope { - id: loop_id.local_id, - data: region::ScopeData::Node - }; - return (scope, match scope_cf_kind { - ScopeCfKind::Break => l.break_index, - ScopeCfKind::Continue => l.continue_index, - }); - } - } - span_bug!(expr.span, "no scope for ID {}", loop_id); - } - Err(err) => span_bug!(expr.span, "scope error: {}", err), - } - } -} - -#[derive(Copy, Clone, Eq, PartialEq)] -enum ScopeCfKind { - Break, - Continue, -} diff --git a/src/librustc_ast_borrowck/cfg/graphviz.rs b/src/librustc_ast_borrowck/cfg/graphviz.rs deleted file mode 100644 index 99c6b49cad..0000000000 --- a/src/librustc_ast_borrowck/cfg/graphviz.rs +++ /dev/null @@ -1,119 +0,0 @@ -/// This module provides linkage between `rustc::middle::graph` and -/// libgraphviz traits. - -use crate::cfg; -use rustc::hir; -use rustc::ty::TyCtxt; - -pub(crate) type Node<'a> = (cfg::CFGIndex, &'a cfg::CFGNode); -pub(crate) type Edge<'a> = &'a cfg::CFGEdge; - -pub struct LabelledCFG<'a, 'tcx> { - pub tcx: TyCtxt<'tcx>, - pub cfg: &'a cfg::CFG, - pub name: String, - /// `labelled_edges` controls whether we emit labels on the edges. - pub labelled_edges: bool, -} - -impl<'a, 'tcx> LabelledCFG<'a, 'tcx> { - fn local_id_to_string(&self, local_id: hir::ItemLocalId) -> String { - assert!(self.cfg.owner_def_id.is_local()); - let hir_id = hir::HirId { - owner: self.tcx.hir().def_index_to_hir_id(self.cfg.owner_def_id.index).owner, - local_id - }; - let s = self.tcx.hir().node_to_string(hir_id); - - // Replacing newlines with `\\l` causes each line to be left-aligned, - // improving presentation of (long) pretty-printed expressions. - if s.contains("\n") { - let mut s = s.replace("\n", "\\l"); - // Apparently left-alignment applies to the line that precedes - // `\l`, not the line that follows; so, add `\l` at end of string - // if not already present, ensuring last line gets left-aligned - // as well. - let mut last_two: Vec<_> = - s.chars().rev().take(2).collect(); - last_two.reverse(); - if last_two != ['\\', 'l'] { - s.push_str("\\l"); - } - s - } else { - s - } - } -} - -impl<'a, 'hir> dot::Labeller<'a> for LabelledCFG<'a, 'hir> { - type Node = Node<'a>; - type Edge = Edge<'a>; - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[..]).unwrap() } - - fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { - dot::Id::new(format!("N{}", i.node_id())).unwrap() - } - - fn node_label(&'a self, &(i, n): &Node<'a>) -> dot::LabelText<'a> { - if i == self.cfg.entry { - dot::LabelText::LabelStr("entry".into()) - } else if i == self.cfg.exit { - dot::LabelText::LabelStr("exit".into()) - } else if n.data.id() == hir::DUMMY_ITEM_LOCAL_ID { - dot::LabelText::LabelStr("(dummy_node)".into()) - } else { - let s = self.local_id_to_string(n.data.id()); - dot::LabelText::EscStr(s.into()) - } - } - - fn edge_label(&self, e: &Edge<'a>) -> dot::LabelText<'a> { - let mut label = String::new(); - if !self.labelled_edges { - return dot::LabelText::EscStr(label.into()); - } - let mut put_one = false; - for (i, &id) in e.data.exiting_scopes.iter().enumerate() { - if put_one { - label.push_str(",\\l"); - } else { - put_one = true; - } - let s = self.local_id_to_string(id); - label.push_str(&format!("exiting scope_{} {}", - i, - &s[..])); - } - dot::LabelText::EscStr(label.into()) - } -} - -impl<'a> dot::GraphWalk<'a> for &'a cfg::CFG { - type Node = Node<'a>; - type Edge = Edge<'a>; - fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { - let v: Vec<_> = self.graph.enumerated_nodes().collect(); - v.into() - } - fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { - self.graph.all_edges().iter().collect() - } - fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { - let i = edge.source(); - (i, self.graph.node(i)) - } - fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { - let i = edge.target(); - (i, self.graph.node(i)) - } -} - -impl<'a, 'hir> dot::GraphWalk<'a> for LabelledCFG<'a, 'hir> { - type Node = Node<'a>; - type Edge = Edge<'a>; - fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.cfg.nodes() } - fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.cfg.edges() } - fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.cfg.source(edge) } - fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.cfg.target(edge) } -} diff --git a/src/librustc_ast_borrowck/cfg/mod.rs b/src/librustc_ast_borrowck/cfg/mod.rs deleted file mode 100644 index 981199c91d..0000000000 --- a/src/librustc_ast_borrowck/cfg/mod.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! Module that constructs a control-flow graph representing an item. -//! Uses `Graph` as the underlying representation. - -use rustc_data_structures::graph::implementation as graph; -use rustc::ty::TyCtxt; -use rustc::hir; -use rustc::hir::def_id::DefId; - -mod construct; -pub mod graphviz; - -pub struct CFG { - owner_def_id: DefId, - pub(crate) graph: CFGGraph, - pub(crate) entry: CFGIndex, - exit: CFGIndex, -} - -#[derive(Copy, Clone, Debug, PartialEq)] -pub enum CFGNodeData { - AST(hir::ItemLocalId), - Entry, - Exit, - Dummy, - Unreachable, -} - -impl CFGNodeData { - pub(crate) fn id(&self) -> hir::ItemLocalId { - if let CFGNodeData::AST(id) = *self { - id - } else { - hir::DUMMY_ITEM_LOCAL_ID - } - } -} - -#[derive(Debug)] -pub struct CFGEdgeData { - pub(crate) exiting_scopes: Vec -} - -pub(crate) type CFGIndex = graph::NodeIndex; - -pub(crate) type CFGGraph = graph::Graph; - -pub(crate) type CFGNode = graph::Node; - -pub(crate) type CFGEdge = graph::Edge; - -impl CFG { - pub fn new(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG { - construct::construct(tcx, body) - } -} diff --git a/src/librustc_ast_borrowck/dataflow.rs b/src/librustc_ast_borrowck/dataflow.rs deleted file mode 100644 index a8562901d9..0000000000 --- a/src/librustc_ast_borrowck/dataflow.rs +++ /dev/null @@ -1,672 +0,0 @@ -//! A module for propagating forward dataflow information. The analysis -//! assumes that the items to be propagated can be represented as bits -//! and thus uses bitvectors. Your job is simply to specify the so-called -//! GEN and KILL bits for each expression. - -use crate::cfg::{self, CFGIndex}; -use std::mem; -use std::usize; -use log::debug; - -use rustc_data_structures::graph::implementation::OUTGOING; - -use rustc::util::nodemap::FxHashMap; -use rustc::hir; -use rustc::hir::intravisit; -use rustc::hir::print as pprust; -use rustc::ty::TyCtxt; - -#[derive(Copy, Clone, Debug)] -pub enum EntryOrExit { - Entry, - Exit, -} - -#[derive(Clone)] -pub struct DataFlowContext<'tcx, O> { - tcx: TyCtxt<'tcx>, - - /// a name for the analysis using this dataflow instance - analysis_name: &'static str, - - /// the data flow operator - oper: O, - - /// number of bits to propagate per id - bits_per_id: usize, - - /// number of words we will use to store bits_per_id. - /// equal to bits_per_id/usize::BITS rounded up. - words_per_id: usize, - - // mapping from node to cfg node index - // FIXME (#6298): Shouldn't this go with CFG? - local_id_to_index: FxHashMap>, - - // Bit sets per cfg node. The following three fields (`gens`, `kills`, - // and `on_entry`) all have the same structure. For each id in - // `id_range`, there is a range of words equal to `words_per_id`. - // So, to access the bits for any given id, you take a slice of - // the full vector (see the method `compute_id_range()`). - /// bits generated as we exit the cfg node. Updated by `add_gen()`. - gens: Vec, - - /// bits killed as we exit the cfg node, or non-locally jump over - /// it. Updated by `add_kill(KillFrom::ScopeEnd)`. - scope_kills: Vec, - - /// bits killed as we exit the cfg node directly; if it is jumped - /// over, e.g., via `break`, the kills are not reflected in the - /// jump's effects. Updated by `add_kill(KillFrom::Execution)`. - action_kills: Vec, - - /// bits that are valid on entry to the cfg node. Updated by - /// `propagate()`. - on_entry: Vec, -} - -pub trait BitwiseOperator { - /// Joins two predecessor bits together, typically either `|` or `&` - fn join(&self, succ: usize, pred: usize) -> usize; -} - -/// Parameterization for the precise form of data flow that is used. -pub trait DataFlowOperator : BitwiseOperator { - /// Specifies the initial value for each bit in the `on_entry` set - fn initial_value(&self) -> bool; -} - -struct PropagationContext<'a, 'tcx, O> { - dfcx: &'a mut DataFlowContext<'tcx, O>, - changed: bool, -} - -fn get_cfg_indices(id: hir::ItemLocalId, - index: &FxHashMap>) - -> &[CFGIndex] { - index.get(&id).map_or(&[], |v| &v[..]) -} - -impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> { - fn has_bitset_for_local_id(&self, n: hir::ItemLocalId) -> bool { - assert!(n != hir::DUMMY_ITEM_LOCAL_ID); - self.local_id_to_index.contains_key(&n) - } -} - -impl<'tcx, O: DataFlowOperator> pprust::PpAnn for DataFlowContext<'tcx, O> { - fn nested(&self, state: &mut pprust::State<'_>, nested: pprust::Nested) { - pprust::PpAnn::nested(self.tcx.hir(), state, nested) - } - fn pre(&self, - ps: &mut pprust::State<'_>, - node: pprust::AnnNode<'_>) { - let id = match node { - pprust::AnnNode::Name(_) => return, - pprust::AnnNode::Expr(expr) => expr.hir_id.local_id, - pprust::AnnNode::Block(blk) => blk.hir_id.local_id, - pprust::AnnNode::Item(_) | - pprust::AnnNode::SubItem(_) => return, - pprust::AnnNode::Pat(pat) => pat.hir_id.local_id, - pprust::AnnNode::Arm(arm) => arm.hir_id.local_id, - }; - - if !self.has_bitset_for_local_id(id) { - return; - } - - assert!(self.bits_per_id > 0); - let indices = get_cfg_indices(id, &self.local_id_to_index); - for &cfgidx in indices { - let (start, end) = self.compute_id_range(cfgidx); - let on_entry = &self.on_entry[start.. end]; - let entry_str = bits_to_string(on_entry); - - let gens = &self.gens[start.. end]; - let gens_str = if gens.iter().any(|&u| u != 0) { - format!(" gen: {}", bits_to_string(gens)) - } else { - String::new() - }; - - let action_kills = &self.action_kills[start .. end]; - let action_kills_str = if action_kills.iter().any(|&u| u != 0) { - format!(" action_kill: {}", bits_to_string(action_kills)) - } else { - String::new() - }; - - let scope_kills = &self.scope_kills[start .. end]; - let scope_kills_str = if scope_kills.iter().any(|&u| u != 0) { - format!(" scope_kill: {}", bits_to_string(scope_kills)) - } else { - String::new() - }; - - ps.synth_comment( - format!("id {}: {}{}{}{}", id.as_usize(), entry_str, - gens_str, action_kills_str, scope_kills_str)); - ps.s.space(); - } - } -} - -fn build_local_id_to_index(body: Option<&hir::Body>, - cfg: &cfg::CFG) - -> FxHashMap> { - let mut index = FxHashMap::default(); - - // FIXME(#15020) Would it be better to fold formals from decl - // into cfg itself? i.e., introduce a fn-based flow-graph in - // addition to the current block-based flow-graph, rather than - // have to put traversals like this here? - if let Some(body) = body { - add_entries_from_fn_body(&mut index, body, cfg.entry); - } - - cfg.graph.each_node(|node_idx, node| { - if let cfg::CFGNodeData::AST(id) = node.data { - index.entry(id).or_default().push(node_idx); - } - true - }); - - return index; - - /// Adds mappings from the ast nodes for the formal bindings to - /// the entry-node in the graph. - fn add_entries_from_fn_body(index: &mut FxHashMap>, - body: &hir::Body, - entry: CFGIndex) { - use rustc::hir::intravisit::Visitor; - - struct Formals<'a> { - entry: CFGIndex, - index: &'a mut FxHashMap>, - } - let mut formals = Formals { entry: entry, index: index }; - for param in &body.params { - formals.visit_pat(¶m.pat); - } - impl<'a, 'v> Visitor<'v> for Formals<'a> { - fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'v> { - intravisit::NestedVisitorMap::None - } - - fn visit_pat(&mut self, p: &hir::Pat) { - self.index.entry(p.hir_id.local_id).or_default().push(self.entry); - intravisit::walk_pat(self, p) - } - } - } -} - -/// Flag used by `add_kill` to indicate whether the provided kill -/// takes effect only when control flows directly through the node in -/// question, or if the kill's effect is associated with any -/// control-flow directly through or indirectly over the node. -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum KillFrom { - /// A `ScopeEnd` kill is one that takes effect when any control - /// flow goes over the node. A kill associated with the end of the - /// scope of a variable declaration `let x;` is an example of a - /// `ScopeEnd` kill. - ScopeEnd, - - /// An `Execution` kill is one that takes effect only when control - /// flow goes through the node to completion. A kill associated - /// with an assignment statement `x = expr;` is an example of an - /// `Execution` kill. - Execution, -} - -impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> { - pub fn new( - tcx: TyCtxt<'tcx>, - analysis_name: &'static str, - body: Option<&hir::Body>, - cfg: &cfg::CFG, - oper: O, - bits_per_id: usize, - ) -> DataFlowContext<'tcx, O> { - let usize_bits = mem::size_of::() * 8; - let words_per_id = (bits_per_id + usize_bits - 1) / usize_bits; - let num_nodes = cfg.graph.all_nodes().len(); - - debug!("DataFlowContext::new(analysis_name: {}, \ - bits_per_id={}, words_per_id={}) \ - num_nodes: {}", - analysis_name, bits_per_id, words_per_id, - num_nodes); - - let entry = if oper.initial_value() { usize::MAX } else {0}; - - let zeroes = vec![0; num_nodes * words_per_id]; - let gens = zeroes.clone(); - let kills1 = zeroes.clone(); - let kills2 = zeroes; - let on_entry = vec![entry; num_nodes * words_per_id]; - - let local_id_to_index = build_local_id_to_index(body, cfg); - - DataFlowContext { - tcx, - analysis_name, - words_per_id, - local_id_to_index, - bits_per_id, - oper, - gens, - action_kills: kills1, - scope_kills: kills2, - on_entry, - } - } - - pub fn add_gen(&mut self, id: hir::ItemLocalId, bit: usize) { - //! Indicates that `id` generates `bit` - debug!("{} add_gen(id={:?}, bit={})", - self.analysis_name, id, bit); - assert!(self.local_id_to_index.contains_key(&id)); - assert!(self.bits_per_id > 0); - - let indices = get_cfg_indices(id, &self.local_id_to_index); - for &cfgidx in indices { - let (start, end) = self.compute_id_range(cfgidx); - let gens = &mut self.gens[start.. end]; - set_bit(gens, bit); - } - } - - pub fn add_kill(&mut self, kind: KillFrom, id: hir::ItemLocalId, bit: usize) { - //! Indicates that `id` kills `bit` - debug!("{} add_kill(id={:?}, bit={})", - self.analysis_name, id, bit); - assert!(self.local_id_to_index.contains_key(&id)); - assert!(self.bits_per_id > 0); - - let indices = get_cfg_indices(id, &self.local_id_to_index); - for &cfgidx in indices { - let (start, end) = self.compute_id_range(cfgidx); - let kills = match kind { - KillFrom::Execution => &mut self.action_kills[start.. end], - KillFrom::ScopeEnd => &mut self.scope_kills[start.. end], - }; - set_bit(kills, bit); - } - } - - fn apply_gen_kill(&self, cfgidx: CFGIndex, bits: &mut [usize]) { - //! Applies the gen and kill sets for `cfgidx` to `bits` - debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [before]", - self.analysis_name, cfgidx, mut_bits_to_string(bits)); - assert!(self.bits_per_id > 0); - - let (start, end) = self.compute_id_range(cfgidx); - let gens = &self.gens[start.. end]; - bitwise(bits, gens, &Union); - let kills = &self.action_kills[start.. end]; - bitwise(bits, kills, &Subtract); - let kills = &self.scope_kills[start.. end]; - bitwise(bits, kills, &Subtract); - - debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [after]", - self.analysis_name, cfgidx, mut_bits_to_string(bits)); - } - - fn compute_id_range(&self, cfgidx: CFGIndex) -> (usize, usize) { - let n = cfgidx.node_id(); - let start = n * self.words_per_id; - let end = start + self.words_per_id; - - assert!(start < self.gens.len()); - assert!(end <= self.gens.len()); - assert!(self.gens.len() == self.action_kills.len()); - assert!(self.gens.len() == self.scope_kills.len()); - assert!(self.gens.len() == self.on_entry.len()); - - (start, end) - } - - - pub fn each_bit_on_entry(&self, id: hir::ItemLocalId, mut f: F) -> bool where - F: FnMut(usize) -> bool, - { - //! Iterates through each bit that is set on entry to `id`. - //! Only useful after `propagate()` has been called. - if !self.has_bitset_for_local_id(id) { - return true; - } - let indices = get_cfg_indices(id, &self.local_id_to_index); - for &cfgidx in indices { - if !self.each_bit_for_node(EntryOrExit::Entry, cfgidx, |i| f(i)) { - return false; - } - } - return true; - } - - pub fn each_bit_for_node(&self, e: EntryOrExit, cfgidx: CFGIndex, f: F) -> bool where - F: FnMut(usize) -> bool, - { - //! Iterates through each bit that is set on entry/exit to `cfgidx`. - //! Only useful after `propagate()` has been called. - - if self.bits_per_id == 0 { - // Skip the surprisingly common degenerate case. (Note - // compute_id_range requires self.words_per_id > 0.) - return true; - } - - let (start, end) = self.compute_id_range(cfgidx); - let on_entry = &self.on_entry[start.. end]; - let temp_bits; - let slice = match e { - EntryOrExit::Entry => on_entry, - EntryOrExit::Exit => { - let mut t = on_entry.to_vec(); - self.apply_gen_kill(cfgidx, &mut t); - temp_bits = t; - &temp_bits[..] - } - }; - debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}", - self.analysis_name, e, cfgidx, bits_to_string(slice)); - self.each_bit(slice, f) - } - - pub fn each_gen_bit(&self, id: hir::ItemLocalId, mut f: F) -> bool where - F: FnMut(usize) -> bool, - { - //! Iterates through each bit in the gen set for `id`. - if !self.has_bitset_for_local_id(id) { - return true; - } - - if self.bits_per_id == 0 { - // Skip the surprisingly common degenerate case. (Note - // compute_id_range requires self.words_per_id > 0.) - return true; - } - - let indices = get_cfg_indices(id, &self.local_id_to_index); - for &cfgidx in indices { - let (start, end) = self.compute_id_range(cfgidx); - let gens = &self.gens[start.. end]; - debug!("{} each_gen_bit(id={:?}, gens={})", - self.analysis_name, id, bits_to_string(gens)); - if !self.each_bit(gens, |i| f(i)) { - return false; - } - } - return true; - } - - fn each_bit(&self, words: &[usize], mut f: F) -> bool where - F: FnMut(usize) -> bool, - { - //! Helper for iterating over the bits in a bit set. - //! Returns false on the first call to `f` that returns false; - //! if all calls to `f` return true, then returns true. - - let usize_bits = mem::size_of::() * 8; - for (word_index, &word) in words.iter().enumerate() { - if word != 0 { - let base_index = word_index * usize_bits; - for offset in 0..usize_bits { - let bit = 1 << offset; - if (word & bit) != 0 { - // N.B., we round up the total number of bits - // that we store in any given bit set so that - // it is an even multiple of usize::BITS. This - // means that there may be some stray bits at - // the end that do not correspond to any - // actual value. So before we callback, check - // whether the bit_index is greater than the - // actual value the user specified and stop - // iterating if so. - let bit_index = base_index + offset as usize; - if bit_index >= self.bits_per_id { - return true; - } else if !f(bit_index) { - return false; - } - } - } - } - } - return true; - } - - pub fn add_kills_from_flow_exits(&mut self, cfg: &cfg::CFG) { - //! Whenever you have a `break` or `continue` statement, flow - //! exits through any number of enclosing scopes on its way to - //! the new destination. This function infers the kill bits of - //! those control operators based on the kill bits associated - //! with those scopes. - //! - //! This is usually called (if it is called at all), after - //! all add_gen and add_kill calls, but before propagate. - - debug!("{} add_kills_from_flow_exits", self.analysis_name); - if self.bits_per_id == 0 { - // Skip the surprisingly common degenerate case. (Note - // compute_id_range requires self.words_per_id > 0.) - return; - } - cfg.graph.each_edge(|_edge_index, edge| { - let flow_exit = edge.source(); - let (start, end) = self.compute_id_range(flow_exit); - let mut orig_kills = self.scope_kills[start.. end].to_vec(); - - let mut changed = false; - for &id in &edge.data.exiting_scopes { - let opt_cfg_idx = self.local_id_to_index.get(&id); - match opt_cfg_idx { - Some(indices) => { - for &cfg_idx in indices { - let (start, end) = self.compute_id_range(cfg_idx); - let kills = &self.scope_kills[start.. end]; - if bitwise(&mut orig_kills, kills, &Union) { - debug!("scope exits: scope id={:?} \ - (node={:?} of {:?}) added killset: {}", - id, cfg_idx, indices, - bits_to_string(kills)); - changed = true; - } - } - } - None => { - debug!("{} add_kills_from_flow_exits flow_exit={:?} \ - no cfg_idx for exiting_scope={:?}", - self.analysis_name, flow_exit, id); - } - } - } - - if changed { - let bits = &mut self.scope_kills[start.. end]; - debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]", - self.analysis_name, flow_exit, mut_bits_to_string(bits)); - bits.copy_from_slice(&orig_kills[..]); - debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]", - self.analysis_name, flow_exit, mut_bits_to_string(bits)); - } - true - }); - } -} - -// N.B. `Clone + 'static` only needed for pretty printing. -impl<'tcx, O: DataFlowOperator + Clone + 'static> DataFlowContext<'tcx, O> { - pub fn propagate(&mut self, cfg: &cfg::CFG, body: &hir::Body) { - //! Performs the data flow analysis. - - if self.bits_per_id == 0 { - // Optimize the surprisingly common degenerate case. - return; - } - - { - let words_per_id = self.words_per_id; - let mut propcx = PropagationContext { - dfcx: &mut *self, - changed: true - }; - - let nodes_po = cfg.graph.nodes_in_postorder(OUTGOING, cfg.entry); - let mut temp = vec![0; words_per_id]; - let mut num_passes = 0; - while propcx.changed { - num_passes += 1; - propcx.changed = false; - propcx.reset(&mut temp); - propcx.walk_cfg(cfg, &nodes_po, &mut temp); - } - debug!("finished in {} iterations", num_passes); - } - - debug!("Dataflow result for {}:", self.analysis_name); - debug!("{}", pprust::to_string(self, |s| { - s.cbox(pprust::INDENT_UNIT); - s.ibox(0); - s.print_expr(&body.value) - })); - } -} - -impl PropagationContext<'_, 'tcx, O> { - fn walk_cfg(&mut self, - cfg: &cfg::CFG, - nodes_po: &[CFGIndex], - in_out: &mut [usize]) { - debug!("DataFlowContext::walk_cfg(in_out={}) {}", - bits_to_string(in_out), self.dfcx.analysis_name); - assert!(self.dfcx.bits_per_id > 0); - - // Iterate over nodes in reverse post-order. - for &node_index in nodes_po.iter().rev() { - let node = cfg.graph.node(node_index); - debug!("DataFlowContext::walk_cfg idx={:?} id={:?} begin in_out={}", - node_index, node.data.id(), bits_to_string(in_out)); - - let (start, end) = self.dfcx.compute_id_range(node_index); - - // Initialize local bitvector with state on-entry. - in_out.copy_from_slice(&self.dfcx.on_entry[start.. end]); - - // Compute state on-exit by applying transfer function to - // state on-entry. - self.dfcx.apply_gen_kill(node_index, in_out); - - // Propagate state on-exit from node into its successors. - self.propagate_bits_into_graph_successors_of(in_out, cfg, node_index); - } - } - - fn reset(&mut self, bits: &mut [usize]) { - let e = if self.dfcx.oper.initial_value() {usize::MAX} else {0}; - for b in bits { - *b = e; - } - } - - fn propagate_bits_into_graph_successors_of(&mut self, - pred_bits: &[usize], - cfg: &cfg::CFG, - cfgidx: CFGIndex) { - for (_, edge) in cfg.graph.outgoing_edges(cfgidx) { - self.propagate_bits_into_entry_set_for(pred_bits, edge); - } - } - - fn propagate_bits_into_entry_set_for(&mut self, - pred_bits: &[usize], - edge: &cfg::CFGEdge) { - let source = edge.source(); - let cfgidx = edge.target(); - debug!("{} propagate_bits_into_entry_set_for(pred_bits={}, {:?} to {:?})", - self.dfcx.analysis_name, bits_to_string(pred_bits), source, cfgidx); - assert!(self.dfcx.bits_per_id > 0); - - let (start, end) = self.dfcx.compute_id_range(cfgidx); - let changed = { - // (scoping mutable borrow of self.dfcx.on_entry) - let on_entry = &mut self.dfcx.on_entry[start.. end]; - bitwise(on_entry, pred_bits, &self.dfcx.oper) - }; - if changed { - debug!("{} changed entry set for {:?} to {}", - self.dfcx.analysis_name, cfgidx, - bits_to_string(&self.dfcx.on_entry[start.. end])); - self.changed = true; - } - } -} - -fn mut_bits_to_string(words: &mut [usize]) -> String { - bits_to_string(words) -} - -fn bits_to_string(words: &[usize]) -> String { - let mut result = String::new(); - let mut sep = '['; - - // Note: this is a little endian printout of bytes. - - for &word in words { - let mut v = word; - for _ in 0..mem::size_of::() { - result.push(sep); - result.push_str(&format!("{:02x}", v & 0xFF)); - v >>= 8; - sep = '-'; - } - } - result.push(']'); - return result -} - -#[inline] -fn bitwise(out_vec: &mut [usize], - in_vec: &[usize], - op: &Op) -> bool { - assert_eq!(out_vec.len(), in_vec.len()); - let mut changed = false; - for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) { - let old_val = *out_elt; - let new_val = op.join(old_val, *in_elt); - *out_elt = new_val; - changed |= old_val != new_val; - } - changed -} - -fn set_bit(words: &mut [usize], bit: usize) -> bool { - debug!("set_bit: words={} bit={}", - mut_bits_to_string(words), bit_str(bit)); - let usize_bits = mem::size_of::() * 8; - let word = bit / usize_bits; - let bit_in_word = bit % usize_bits; - let bit_mask = 1 << bit_in_word; - debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, bit_mask); - let oldv = words[word]; - let newv = oldv | bit_mask; - words[word] = newv; - oldv != newv -} - -fn bit_str(bit: usize) -> String { - let byte = bit >> 3; - let lobits = 1 << (bit & 0b111); - format!("[{}:{}-{:02x}]", bit, byte, lobits) -} - -struct Union; -impl BitwiseOperator for Union { - fn join(&self, a: usize, b: usize) -> usize { a | b } -} -struct Subtract; -impl BitwiseOperator for Subtract { - fn join(&self, a: usize, b: usize) -> usize { a & !b } -} diff --git a/src/librustc_ast_borrowck/graphviz.rs b/src/librustc_ast_borrowck/graphviz.rs deleted file mode 100644 index c077dc828a..0000000000 --- a/src/librustc_ast_borrowck/graphviz.rs +++ /dev/null @@ -1,145 +0,0 @@ -//! This module provides linkage between rustc::middle::graph and -//! libgraphviz traits, specialized to attaching borrowck analysis -//! data to rendered labels. - -pub use Variant::*; - -pub(crate) use crate::cfg::graphviz::{Node, Edge}; -use crate::cfg::graphviz as cfg_dot; -use crate::cfg::CFGIndex; -use crate::borrowck::{self, BorrowckCtxt, LoanPath}; -use crate::dataflow::{DataFlowOperator, DataFlowContext, EntryOrExit}; -use log::debug; -use std::rc::Rc; - -#[derive(Debug, Copy, Clone)] -pub enum Variant { - Loans, - Moves, - Assigns, -} - -impl Variant { - pub fn short_name(&self) -> &'static str { - match *self { - Loans => "loans", - Moves => "moves", - Assigns => "assigns", - } - } -} - -pub struct DataflowLabeller<'a, 'tcx> { - pub inner: cfg_dot::LabelledCFG<'a, 'tcx>, - pub variants: Vec, - pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>, - pub analysis_data: &'a borrowck::AnalysisData<'tcx>, -} - -impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { - fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String { - let id = n.1.data.id(); - debug!("dataflow_for({:?}, id={:?}) {:?}", e, id, self.variants); - let mut sets = String::new(); - let mut seen_one = false; - for &variant in &self.variants { - if seen_one { sets.push_str(" "); } else { seen_one = true; } - sets.push_str(variant.short_name()); - sets.push_str(": "); - sets.push_str(&self.dataflow_for_variant(e, n, variant)); - } - sets - } - - fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node<'_>, v: Variant) -> String { - let cfgidx = n.0; - match v { - Loans => self.dataflow_loans_for(e, cfgidx), - Moves => self.dataflow_moves_for(e, cfgidx), - Assigns => self.dataflow_assigns_for(e, cfgidx), - } - } - - fn build_set( - &self, - e: EntryOrExit, - cfgidx: CFGIndex, - dfcx: &DataFlowContext<'tcx, O>, - mut to_lp: F, - ) -> String - where - F: FnMut(usize) -> Rc>, - { - let mut saw_some = false; - let mut set = "{".to_string(); - dfcx.each_bit_for_node(e, cfgidx, |index| { - let lp = to_lp(index); - if saw_some { - set.push_str(", "); - } - let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp); - set.push_str(&loan_str); - saw_some = true; - true - }); - set.push_str("}"); - set - } - - fn dataflow_loans_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String { - let dfcx = &self.analysis_data.loans; - let loan_index_to_path = |loan_index| { - let all_loans = &self.analysis_data.all_loans; - let l: &borrowck::Loan<'_> = &all_loans[loan_index]; - l.loan_path() - }; - self.build_set(e, cfgidx, dfcx, loan_index_to_path) - } - - fn dataflow_moves_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String { - let dfcx = &self.analysis_data.move_data.dfcx_moves; - let move_index_to_path = |move_index| { - let move_data = &self.analysis_data.move_data.move_data; - let moves = move_data.moves.borrow(); - let the_move: &borrowck::move_data::Move = &(*moves)[move_index]; - move_data.path_loan_path(the_move.path) - }; - self.build_set(e, cfgidx, dfcx, move_index_to_path) - } - - fn dataflow_assigns_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String { - let dfcx = &self.analysis_data.move_data.dfcx_assign; - let assign_index_to_path = |assign_index| { - let move_data = &self.analysis_data.move_data.move_data; - let assignments = move_data.var_assignments.borrow(); - let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index]; - move_data.path_loan_path(assignment.path) - }; - self.build_set(e, cfgidx, dfcx, assign_index_to_path) - } -} - -impl<'a, 'tcx> dot::Labeller<'a> for DataflowLabeller<'a, 'tcx> { - type Node = Node<'a>; - type Edge = Edge<'a>; - fn graph_id(&'a self) -> dot::Id<'a> { self.inner.graph_id() } - fn node_id(&'a self, n: &Node<'a>) -> dot::Id<'a> { self.inner.node_id(n) } - fn node_label(&'a self, n: &Node<'a>) -> dot::LabelText<'a> { - let prefix = self.dataflow_for(EntryOrExit::Entry, n); - let suffix = self.dataflow_for(EntryOrExit::Exit, n); - let inner_label = self.inner.node_label(n); - inner_label - .prefix_line(dot::LabelText::LabelStr(prefix.into())) - .suffix_line(dot::LabelText::LabelStr(suffix.into())) - } - fn edge_label(&'a self, e: &Edge<'a>) -> dot::LabelText<'a> { self.inner.edge_label(e) } -} - -impl<'a, 'tcx> dot::GraphWalk<'a> for DataflowLabeller<'a, 'tcx> { - type Node = Node<'a>; - type Edge = Edge<'a>; - fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.inner.nodes() } - fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.inner.edges() } - fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.source(edge) } - fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.target(edge) } -} diff --git a/src/librustc_ast_borrowck/lib.rs b/src/librustc_ast_borrowck/lib.rs deleted file mode 100644 index aea97fea1a..0000000000 --- a/src/librustc_ast_borrowck/lib.rs +++ /dev/null @@ -1,23 +0,0 @@ -#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] - -#![allow(non_camel_case_types)] - -#![feature(in_band_lifetimes)] -#![feature(nll)] - -#![recursion_limit="256"] - -#[macro_use] -extern crate rustc; - -pub use borrowck::check_crate; -pub use borrowck::build_borrowck_dataflow_data_for_fn; - -mod borrowck; - -pub mod graphviz; - -mod dataflow; -pub mod cfg; - -pub use borrowck::provide; diff --git a/src/librustc_codegen_llvm/Cargo.toml b/src/librustc_codegen_llvm/Cargo.toml index 98efa6a580..867bbd22cf 100644 --- a/src/librustc_codegen_llvm/Cargo.toml +++ b/src/librustc_codegen_llvm/Cargo.toml @@ -12,9 +12,3 @@ test = false [dependencies] rustc_llvm = { path = "../librustc_llvm" } - -[features] -# This is used to convince Cargo to separately cache builds of `rustc_codegen_llvm` -# when this option is enabled or not. That way we can build two, cache two -# artifacts, and have nice speedy rebuilds. -emscripten = ["rustc_llvm/emscripten"] diff --git a/src/librustc_codegen_llvm/abi.rs b/src/librustc_codegen_llvm/abi.rs index 2ca517dc3b..ae5cfc4d97 100644 --- a/src/librustc_codegen_llvm/abi.rs +++ b/src/librustc_codegen_llvm/abi.rs @@ -264,7 +264,7 @@ impl ArgTypeExt<'ll, 'tcx> for ArgType<'tcx, Ty<'tcx>> { val }; match self.mode { - PassMode::Ignore(_) => {} + PassMode::Ignore => {} PassMode::Pair(..) => { OperandValue::Pair(next(), next()).store(bx, dst); } @@ -319,9 +319,7 @@ impl<'tcx> FnTypeLlvmExt<'tcx> for FnType<'tcx, Ty<'tcx>> { ); let llreturn_ty = match self.ret.mode { - PassMode::Ignore(IgnoreMode::Zst) => cx.type_void(), - PassMode::Ignore(IgnoreMode::CVarArgs) => - bug!("`va_list` should never be a return type"), + PassMode::Ignore => cx.type_void(), PassMode::Direct(_) | PassMode::Pair(..) => { self.ret.layout.immediate_llvm_type(cx) } @@ -339,7 +337,7 @@ impl<'tcx> FnTypeLlvmExt<'tcx> for FnType<'tcx, Ty<'tcx>> { } let llarg_ty = match arg.mode { - PassMode::Ignore(_) => continue, + PassMode::Ignore => continue, PassMode::Direct(_) => arg.layout.immediate_llvm_type(cx), PassMode::Pair(..) => { llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 0, true)); @@ -408,7 +406,7 @@ impl<'tcx> FnTypeLlvmExt<'tcx> for FnType<'tcx, Ty<'tcx>> { apply(&ArgAttributes::new(), None); } match arg.mode { - PassMode::Ignore(_) => {} + PassMode::Ignore => {} PassMode::Direct(ref attrs) | PassMode::Indirect(ref attrs, None) => apply(attrs, Some(arg.layout.llvm_type(cx))), PassMode::Indirect(ref attrs, Some(ref extra_attrs)) => { @@ -455,7 +453,7 @@ impl<'tcx> FnTypeLlvmExt<'tcx> for FnType<'tcx, Ty<'tcx>> { apply(&ArgAttributes::new(), None); } match arg.mode { - PassMode::Ignore(_) => {} + PassMode::Ignore => {} PassMode::Direct(ref attrs) | PassMode::Indirect(ref attrs, None) => apply(attrs, Some(arg.layout.llvm_type(bx))), PassMode::Indirect(ref attrs, Some(ref extra_attrs)) => { diff --git a/src/librustc_codegen_llvm/allocator.rs b/src/librustc_codegen_llvm/allocator.rs index 5d43bf6ae2..11b6e0befa 100644 --- a/src/librustc_codegen_llvm/allocator.rs +++ b/src/librustc_codegen_llvm/allocator.rs @@ -3,7 +3,7 @@ use std::ffi::CString; use crate::attributes; use libc::c_uint; use rustc::ty::TyCtxt; -use syntax::ext::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS}; +use syntax::expand::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS}; use crate::ModuleLlvm; use crate::llvm::{self, False, True}; @@ -68,7 +68,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut ModuleLlvm, kind: Alloc let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, - "entry\0".as_ptr() as *const _); + "entry\0".as_ptr().cast()); let llbuilder = llvm::LLVMCreateBuilderInContext(llcx); llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb); @@ -80,7 +80,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut ModuleLlvm, kind: Alloc args.as_ptr(), args.len() as c_uint, None, - "\0".as_ptr() as *const _); + "\0".as_ptr().cast()); llvm::LLVMSetTailCall(ret, True); if output.is_some() { llvm::LLVMBuildRet(llbuilder, ret); diff --git a/src/librustc_codegen_llvm/attributes.rs b/src/librustc_codegen_llvm/attributes.rs index 423a2df352..6a36a4a50c 100644 --- a/src/librustc_codegen_llvm/attributes.rs +++ b/src/librustc_codegen_llvm/attributes.rs @@ -96,10 +96,12 @@ pub fn set_probestack(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { } // Currently stack probes seem somewhat incompatible with the address - // sanitizer. With asan we're already protected from stack overflow anyway - // so we don't really need stack probes regardless. - if let Some(Sanitizer::Address) = cx.sess().opts.debugging_opts.sanitizer { - return + // sanitizer and thread sanitizer. With asan we're already protected from + // stack overflow anyway so we don't really need stack probes regardless. + match cx.sess().opts.debugging_opts.sanitizer { + Some(Sanitizer::Address) | + Some(Sanitizer::Thread) => return, + _ => {}, } // probestack doesn't play nice either with `-C profile-generate`. @@ -268,23 +270,12 @@ pub fn from_fn_attrs( // optimize based on this! false } else if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::UNWIND) { - // If a specific #[unwind] attribute is present, use that + // If a specific #[unwind] attribute is present, use that. true } else if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_ALLOCATOR_NOUNWIND) { - // Special attribute for allocator functions, which can't unwind + // Special attribute for allocator functions, which can't unwind. false - } else if let Some(_) = id { - // rust-lang/rust#64655, rust-lang/rust#63909: to minimize - // risk associated with changing cases where nounwind - // attribute is attached, this code is deliberately mimicking - // old control flow based on whether `id` is `Some` or `None`. - // - // However, in the long term we should either: - // - fold this into final else (i.e. stop inspecting `id`) - // - or, adopt Rust PR #63909. - // - // see also Rust RFC 2753. - + } else { let sig = cx.tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig); if sig.abi == Abi::Rust || sig.abi == Abi::RustCall { // Any Rust method (or `extern "Rust" fn` or `extern @@ -310,15 +301,6 @@ pub fn from_fn_attrs( // In either case, we mark item as explicitly nounwind. false } - } else { - // assume this can possibly unwind, avoiding the application of a - // `nounwind` attribute below. - // - // (But: See comments in previous branch. Specifically, it is - // unclear whether there is real value in the assumption this - // can unwind. The conservatism here may just be papering over - // a real problem by making some UB a bit harder to hit.) - true }); // Always annotate functions with the target-cpu they are compiled for. diff --git a/src/librustc_codegen_llvm/back/archive.rs b/src/librustc_codegen_llvm/back/archive.rs index 68d3f90cd3..e169cfc4cc 100644 --- a/src/librustc_codegen_llvm/back/archive.rs +++ b/src/librustc_codegen_llvm/back/archive.rs @@ -9,7 +9,9 @@ use std::str; use crate::llvm::archive_ro::{ArchiveRO, Child}; use crate::llvm::{self, ArchiveKind}; -use rustc_codegen_ssa::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION}; +use rustc_codegen_ssa::{ + METADATA_FILENAME, RLIB_BYTECODE_EXTENSION, looks_like_rust_object_file +}; use rustc_codegen_ssa::back::archive::{ArchiveBuilder, find_library}; use rustc::session::Session; use syntax::symbol::Symbol; @@ -141,7 +143,7 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> { } // Don't include Rust objects if LTO is enabled - if lto && fname.starts_with(&obj_start) && fname.ends_with(".o") { + if lto && looks_like_rust_object_file(fname) { return true } diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs index a43fbb68db..b3be3d09f1 100644 --- a/src/librustc_codegen_llvm/back/lto.rs +++ b/src/librustc_codegen_llvm/back/lto.rs @@ -53,20 +53,20 @@ fn prepare_lto(cgcx: &CodegenContext, let symbol_filter = &|&(ref name, level): &(String, SymbolExportLevel)| { if level.is_below_threshold(export_threshold) { - let mut bytes = Vec::with_capacity(name.len() + 1); - bytes.extend(name.bytes()); - Some(CString::new(bytes).unwrap()) + Some(CString::new(name.as_str()).unwrap()) } else { None } }; let exported_symbols = cgcx.exported_symbols .as_ref().expect("needs exported symbols for LTO"); - let mut symbol_white_list = exported_symbols[&LOCAL_CRATE] - .iter() - .filter_map(symbol_filter) - .collect::>(); - let _timer = cgcx.profile_activity("generate_symbol_white_list_for_thinlto"); + let mut symbol_white_list = { + let _timer = cgcx.prof.generic_activity("LLVM_lto_generate_symbol_white_list"); + exported_symbols[&LOCAL_CRATE] + .iter() + .filter_map(symbol_filter) + .collect::>() + }; info!("{} symbols to preserve in this crate", symbol_white_list.len()); // If we're performing LTO for the entire crate graph, then for each of our @@ -95,14 +95,17 @@ fn prepare_lto(cgcx: &CodegenContext, } for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() { - let _timer = cgcx.profile_activity(format!("load: {}", path.display())); let exported_symbols = cgcx.exported_symbols .as_ref().expect("needs exported symbols for LTO"); - symbol_white_list.extend( - exported_symbols[&cnum] - .iter() - .filter_map(symbol_filter)); + { + let _timer = cgcx.prof.generic_activity("LLVM_lto_generate_symbol_white_list"); + symbol_white_list.extend( + exported_symbols[&cnum] + .iter() + .filter_map(symbol_filter)); + } + let _timer = cgcx.prof.generic_activity("LLVM_lto_load_upstream_bitcode"); let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let bytecodes = archive.iter().filter_map(|child| { child.ok().and_then(|c| c.name().map(|name| (name, c))) @@ -111,7 +114,7 @@ fn prepare_lto(cgcx: &CodegenContext, info!("adding bytecode {}", name); let bc_encoded = data.data(); - let (bc, id) = time_ext(cgcx.time_passes, None, &format!("decode {}", name), || { + let (bc, id) = time_ext(cgcx.time_passes, &format!("decode {}", name), || { match DecodedBytecode::new(bc_encoded) { Ok(b) => Ok((b.bytecode(), b.identifier().to_string())), Err(e) => Err(diag_handler.fatal(&e)), @@ -189,6 +192,7 @@ fn fat_lto(cgcx: &CodegenContext, symbol_white_list: &[*const libc::c_char]) -> Result, FatalError> { + let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module"); info!("going for a fat lto"); // Sort out all our lists of incoming modules into two lists. @@ -287,8 +291,9 @@ fn fat_lto(cgcx: &CodegenContext, // save and persist everything with the original module. let mut linker = Linker::new(llmod); for (bc_decoded, name) in serialized_modules { + let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_link_module"); info!("linking {:?}", name); - time_ext(cgcx.time_passes, None, &format!("ll link {:?}", name), || { + time_ext(cgcx.time_passes, &format!("ll link {:?}", name), || { let data = bc_decoded.data(); linker.add(&data).map_err(|()| { let msg = format!("failed to load bc of {:?}", name); @@ -388,6 +393,7 @@ fn thin_lto(cgcx: &CodegenContext, symbol_white_list: &[*const libc::c_char]) -> Result<(Vec>, Vec), FatalError> { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); unsafe { info!("going for that thin, thin LTO"); @@ -538,7 +544,7 @@ pub(crate) fn run_pass_manager(cgcx: &CodegenContext, llvm::LLVMRustAddAnalysisPasses(module.module_llvm.tm, pm, module.module_llvm.llmod()); if config.verify_llvm_ir { - let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr() as *const _); + let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr().cast()); llvm::LLVMRustAddPass(pm, pass.unwrap()); } @@ -573,16 +579,16 @@ pub(crate) fn run_pass_manager(cgcx: &CodegenContext, // We always generate bitcode through ThinLTOBuffers, // which do not support anonymous globals if config.bitcode_needed() { - let pass = llvm::LLVMRustFindAndCreatePass("name-anon-globals\0".as_ptr() as *const _); + let pass = llvm::LLVMRustFindAndCreatePass("name-anon-globals\0".as_ptr().cast()); llvm::LLVMRustAddPass(pm, pass.unwrap()); } if config.verify_llvm_ir { - let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr() as *const _); + let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr().cast()); llvm::LLVMRustAddPass(pm, pass.unwrap()); } - time_ext(cgcx.time_passes, None, "LTO passes", || + time_ext(cgcx.time_passes, "LTO passes", || llvm::LLVMRunPassManager(pm, module.module_llvm.llmod())); llvm::LLVMDisposePassManager(pm); @@ -601,16 +607,6 @@ impl ModuleBuffer { llvm::LLVMRustModuleBufferCreate(m) }) } - - pub fn parse<'a>( - &self, - name: &str, - cx: &'a llvm::Context, - handler: &Handler, - ) -> Result<&'a llvm::Module, FatalError> { - let name = CString::new(name).unwrap(); - parse_module(cx, &name, self.data(), handler) - } } impl ModuleBufferMethods for ModuleBuffer { @@ -723,7 +719,7 @@ pub unsafe fn optimize_thin_module( // Like with "fat" LTO, get some better optimizations if landing pads // are disabled by removing all landing pads. if cgcx.no_landing_pads { - let _timer = cgcx.profile_activity("LLVM_remove_landing_pads"); + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_remove_landing_pads"); llvm::LLVMRustMarkAllFunctionsNounwind(llmod); save_temp_bitcode(&cgcx, &module, "thin-lto-after-nounwind"); } @@ -736,26 +732,41 @@ pub unsafe fn optimize_thin_module( // // You can find some more comments about these functions in the LLVM // bindings we've got (currently `PassWrapper.cpp`) - if !llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod) { - let msg = "failed to prepare thin LTO module"; - return Err(write::llvm_err(&diag_handler, msg)) + { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_rename"); + if !llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod) { + let msg = "failed to prepare thin LTO module"; + return Err(write::llvm_err(&diag_handler, msg)) + } + save_temp_bitcode(cgcx, &module, "thin-lto-after-rename"); } - save_temp_bitcode(cgcx, &module, "thin-lto-after-rename"); - if !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) { - let msg = "failed to prepare thin LTO module"; - return Err(write::llvm_err(&diag_handler, msg)) + + { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_resolve_weak"); + if !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) { + let msg = "failed to prepare thin LTO module"; + return Err(write::llvm_err(&diag_handler, msg)) + } + save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve"); } - save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve"); - if !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) { - let msg = "failed to prepare thin LTO module"; - return Err(write::llvm_err(&diag_handler, msg)) + + { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_internalize"); + if !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) { + let msg = "failed to prepare thin LTO module"; + return Err(write::llvm_err(&diag_handler, msg)) + } + save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize"); } - save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize"); - if !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod) { - let msg = "failed to prepare thin LTO module"; - return Err(write::llvm_err(&diag_handler, msg)) + + { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_import"); + if !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod) { + let msg = "failed to prepare thin LTO module"; + return Err(write::llvm_err(&diag_handler, msg)) + } + save_temp_bitcode(cgcx, &module, "thin-lto-after-import"); } - save_temp_bitcode(cgcx, &module, "thin-lto-after-import"); // Ok now this is a bit unfortunate. This is also something you won't // find upstream in LLVM's ThinLTO passes! This is a hack for now to @@ -786,18 +797,24 @@ pub unsafe fn optimize_thin_module( // not too much) but for now at least gets LLVM to emit valid DWARF (or // so it appears). Hopefully we can remove this once upstream bugs are // fixed in LLVM. - llvm::LLVMRustThinLTOPatchDICompileUnit(llmod, cu1); - save_temp_bitcode(cgcx, &module, "thin-lto-after-patch"); + { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_patch_debuginfo"); + llvm::LLVMRustThinLTOPatchDICompileUnit(llmod, cu1); + save_temp_bitcode(cgcx, &module, "thin-lto-after-patch"); + } // Alright now that we've done everything related to the ThinLTO // analysis it's time to run some optimizations! Here we use the same // `run_pass_manager` as the "fat" LTO above except that we tell it to // populate a thin-specific pass manager, which presumably LLVM treats a // little differently. - info!("running thin lto passes over {}", module.name); - let config = cgcx.config(module.kind); - run_pass_manager(cgcx, &module, config, true); - save_temp_bitcode(cgcx, &module, "thin-lto-after-pm"); + { + let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_optimize"); + info!("running thin lto passes over {}", module.name); + let config = cgcx.config(module.kind); + run_pass_manager(cgcx, &module, config, true); + save_temp_bitcode(cgcx, &module, "thin-lto-after-pm"); + } } Ok(module) } diff --git a/src/librustc_codegen_llvm/back/write.rs b/src/librustc_codegen_llvm/back/write.rs index 253110dcb3..52f3a1cbb5 100644 --- a/src/librustc_codegen_llvm/back/write.rs +++ b/src/librustc_codegen_llvm/back/write.rs @@ -221,8 +221,8 @@ impl<'a> DiagnosticHandlers<'a> { llcx: &'a llvm::Context) -> Self { let data = Box::into_raw(Box::new((cgcx, handler))); unsafe { - llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, data as *mut _); - llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, data as *mut _); + llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, data.cast()); + llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, data.cast()); } DiagnosticHandlers { data, llcx } } @@ -306,6 +306,8 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext, config: &ModuleConfig) -> Result<(), FatalError> { + let _timer = cgcx.prof.generic_activity("LLVM_module_optimize"); + let llmod = module.module_llvm.llmod(); let llcx = &*module.module_llvm.llcx; let tm = &*module.module_llvm.tm; @@ -423,18 +425,16 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext, // Finally, run the actual optimization passes { - let _timer = cgcx.profile_activity("LLVM_function_passes"); + let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_function_passes"); time_ext(config.time_passes, - None, &format!("llvm function passes [{}]", module_name.unwrap()), || { llvm::LLVMRustRunFunctionPassManager(fpm, llmod) }); } { - let _timer = cgcx.profile_activity("LLVM_module_passes"); + let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_module_passes"); time_ext(config.time_passes, - None, &format!("llvm module passes [{}]", module_name.unwrap()), || { llvm::LLVMRunPassManager(mpm, llmod) @@ -454,7 +454,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, config: &ModuleConfig) -> Result { - let _timer = cgcx.profile_activity("codegen"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen"); { let llmod = module.module_llvm.llmod(); let llcx = &*module.module_llvm.llcx; @@ -505,12 +505,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, if write_bc || config.emit_bc_compressed || config.embed_bitcode { - let _timer = cgcx.profile_activity("LLVM_make_bitcode"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_make_bitcode"); let thin = ThinBuffer::new(llmod); let data = thin.data(); if write_bc { - let _timer = cgcx.profile_activity("LLVM_emit_bitcode"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_bitcode"); if let Err(e) = fs::write(&bc_out, data) { let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e); diag_handler.err(&msg); @@ -518,12 +518,13 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, } if config.embed_bitcode { - let _timer = cgcx.profile_activity("LLVM_embed_bitcode"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_embed_bitcode"); embed_bitcode(cgcx, llcx, llmod, Some(data)); } if config.emit_bc_compressed { - let _timer = cgcx.profile_activity("LLVM_compress_bitcode"); + let _timer = + cgcx.prof.generic_activity("LLVM_module_codegen_emit_compressed_bitcode"); let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION); let data = bytecode::encode(&module.name, data); if let Err(e) = fs::write(&dst, data) { @@ -535,10 +536,10 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, embed_bitcode(cgcx, llcx, llmod, None); } - time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()), + time_ext(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()), || -> Result<(), FatalError> { if config.emit_ir { - let _timer = cgcx.profile_activity("LLVM_emit_ir"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_ir"); let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); let out_c = path_to_c_string(&out); @@ -585,7 +586,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, } if config.emit_asm || asm_to_obj { - let _timer = cgcx.profile_activity("LLVM_emit_asm"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_asm"); let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); // We can't use the same module for asm and binary output, because that triggers @@ -603,13 +604,13 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, } if write_obj { - let _timer = cgcx.profile_activity("LLVM_emit_obj"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_obj"); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(diag_handler, tm, cpm, llmod, &obj_out, llvm::FileType::ObjectFile) })?; } else if asm_to_obj { - let _timer = cgcx.profile_activity("LLVM_asm_to_obj"); + let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_asm_to_obj"); let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); run_assembler(cgcx, diag_handler, &assembly, &obj_out); @@ -669,7 +670,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext, let llglobal = llvm::LLVMAddGlobal( llmod, common::val_ty(llconst), - "rustc.embedded.module\0".as_ptr() as *const _, + "rustc.embedded.module\0".as_ptr().cast(), ); llvm::LLVMSetInitializer(llglobal, llconst); @@ -681,7 +682,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext, } else { ".llvmbc\0" }; - llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _); + llvm::LLVMSetSection(llglobal, section.as_ptr().cast()); llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage); llvm::LLVMSetGlobalConstant(llglobal, llvm::True); @@ -689,7 +690,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext, let llglobal = llvm::LLVMAddGlobal( llmod, common::val_ty(llconst), - "rustc.embedded.cmdline\0".as_ptr() as *const _, + "rustc.embedded.cmdline\0".as_ptr().cast(), ); llvm::LLVMSetInitializer(llglobal, llconst); let section = if is_apple { @@ -697,7 +698,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext, } else { ".llvmcmd\0" }; - llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _); + llvm::LLVMSetSection(llglobal, section.as_ptr().cast()); llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage); } @@ -839,7 +840,7 @@ fn create_msvc_imps( for (imp_name, val) in globals { let imp = llvm::LLVMAddGlobal(llmod, i8p_ty, - imp_name.as_ptr() as *const _); + imp_name.as_ptr().cast()); llvm::LLVMSetInitializer(imp, consts::ptrcast(val, i8p_ty)); llvm::LLVMRustSetLinkage(imp, llvm::Linkage::ExternalLinkage); } diff --git a/src/librustc_codegen_llvm/base.rs b/src/librustc_codegen_llvm/base.rs index 21c19e167c..edd34b52ea 100644 --- a/src/librustc_codegen_llvm/base.rs +++ b/src/librustc_codegen_llvm/base.rs @@ -36,7 +36,7 @@ use rustc_codegen_ssa::back::write::submit_codegened_module_to_llvm; use std::ffi::CString; use std::time::Instant; -use syntax_pos::symbol::InternedString; +use syntax_pos::symbol::Symbol; use rustc::hir::CodegenFnAttrs; use crate::value::Value; @@ -103,7 +103,12 @@ pub fn iter_globals(llmod: &'ll llvm::Module) -> ValueIter<'ll> { } } -pub fn compile_codegen_unit(tcx: TyCtxt<'tcx>, cgu_name: InternedString) { +pub fn compile_codegen_unit( + tcx: TyCtxt<'tcx>, + cgu_name: Symbol, + tx_to_llvm_workers: &std::sync::mpsc::Sender>, +) { + let prof_timer = tcx.prof.generic_activity("codegen_module"); let start_time = Instant::now(); let dep_node = tcx.codegen_unit(cgu_name).codegen_dep_node(tcx); @@ -115,17 +120,18 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'tcx>, cgu_name: InternedString) { dep_graph::hash_result, ); let time_to_codegen = start_time.elapsed(); + drop(prof_timer); // We assume that the cost to run LLVM on a CGU is proportional to // the time we needed for codegenning it. let cost = time_to_codegen.as_secs() * 1_000_000_000 + time_to_codegen.subsec_nanos() as u64; - submit_codegened_module_to_llvm(&LlvmCodegenBackend(()), tcx, module, cost); + submit_codegened_module_to_llvm(&LlvmCodegenBackend(()), tx_to_llvm_workers, module, cost); fn module_codegen( tcx: TyCtxt<'_>, - cgu_name: InternedString, + cgu_name: Symbol, ) -> ModuleCodegen { let cgu = tcx.codegen_unit(cgu_name); // Instantiate monomorphizations without filling out definitions yet... diff --git a/src/librustc_codegen_llvm/builder.rs b/src/librustc_codegen_llvm/builder.rs index 423a01ad1f..312c41b88b 100644 --- a/src/librustc_codegen_llvm/builder.rs +++ b/src/librustc_codegen_llvm/builder.rs @@ -23,7 +23,6 @@ use std::ffi::CStr; use std::ops::{Deref, Range}; use std::ptr; use std::iter::TrustedLen; -use syntax::symbol::Symbol; // All Builders must have an llfn associated with them #[must_use] @@ -52,6 +51,7 @@ const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr(); impl BackendTypes for Builder<'_, 'll, 'tcx> { type Value = as BackendTypes>::Value; + type Function = as BackendTypes>::Function; type BasicBlock = as BackendTypes>::BasicBlock; type Type = as BackendTypes>::Type; type Funclet = as BackendTypes>::Funclet; @@ -324,7 +324,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { use syntax::ast::UintTy::*; use rustc::ty::{Int, Uint}; - let new_sty = match ty.sty { + let new_kind = match ty.kind { Int(Isize) => Int(self.tcx.sess.target.isize_ty), Uint(Usize) => Uint(self.tcx.sess.target.usize_ty), ref t @ Uint(_) | ref t @ Int(_) => t.clone(), @@ -332,7 +332,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { }; let name = match oop { - OverflowOp::Add => match new_sty { + OverflowOp::Add => match new_kind { Int(I8) => "llvm.sadd.with.overflow.i8", Int(I16) => "llvm.sadd.with.overflow.i16", Int(I32) => "llvm.sadd.with.overflow.i32", @@ -347,7 +347,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { _ => unreachable!(), }, - OverflowOp::Sub => match new_sty { + OverflowOp::Sub => match new_kind { Int(I8) => "llvm.ssub.with.overflow.i8", Int(I16) => "llvm.ssub.with.overflow.i16", Int(I32) => "llvm.ssub.with.overflow.i32", @@ -362,7 +362,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { _ => unreachable!(), }, - OverflowOp::Mul => match new_sty { + OverflowOp::Mul => match new_kind { Int(I8) => "llvm.smul.with.overflow.i8", Int(I16) => "llvm.smul.with.overflow.i16", Int(I32) => "llvm.smul.with.overflow.i32", @@ -1066,36 +1066,6 @@ impl StaticBuilderMethods for Builder<'a, 'll, 'tcx> { // Forward to the `get_static` method of `CodegenCx` self.cx().get_static(def_id) } - - fn static_panic_msg( - &mut self, - msg: Option, - filename: Symbol, - line: Self::Value, - col: Self::Value, - kind: &str, - ) -> Self::Value { - let align = self.tcx.data_layout.aggregate_align.abi - .max(self.tcx.data_layout.i32_align.abi) - .max(self.tcx.data_layout.pointer_align.abi); - - let filename = self.const_str_slice(filename); - - let with_msg_components; - let without_msg_components; - - let components = if let Some(msg) = msg { - let msg = self.const_str_slice(msg); - with_msg_components = [msg, filename, line, col]; - &with_msg_components as &[_] - } else { - without_msg_components = [filename, line, col]; - &without_msg_components as &[_] - }; - - let struct_ = self.const_struct(&components, false); - self.static_addr_of(struct_, align, Some(kind)) - } } impl Builder<'a, 'll, 'tcx> { diff --git a/src/librustc_codegen_llvm/callee.rs b/src/librustc_codegen_llvm/callee.rs index 35d5107842..08fa23f2a7 100644 --- a/src/librustc_codegen_llvm/callee.rs +++ b/src/librustc_codegen_llvm/callee.rs @@ -33,7 +33,7 @@ pub fn get_fn( assert!(!instance.substs.has_param_types()); let sig = instance.fn_sig(cx.tcx()); - if let Some(&llfn) = cx.instances().borrow().get(&instance) { + if let Some(&llfn) = cx.instances.borrow().get(&instance) { return llfn; } diff --git a/src/librustc_codegen_llvm/common.rs b/src/librustc_codegen_llvm/common.rs index 6fbea9646b..f38f9dfecd 100644 --- a/src/librustc_codegen_llvm/common.rs +++ b/src/librustc_codegen_llvm/common.rs @@ -2,8 +2,7 @@ //! Code that is useful in various codegen modules. -use crate::llvm::{self, True, False, Bool, BasicBlock, OperandBundleDef}; -use crate::abi; +use crate::llvm::{self, True, False, Bool, BasicBlock, OperandBundleDef, ConstantInt}; use crate::consts; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; @@ -86,6 +85,8 @@ impl Funclet<'ll> { impl BackendTypes for CodegenCx<'ll, 'tcx> { type Value = &'ll Value; + type Function = &'ll Value; + type BasicBlock = &'ll BasicBlock; type Type = &'ll Type; type Funclet = Funclet<'ll>; @@ -94,16 +95,6 @@ impl BackendTypes for CodegenCx<'ll, 'tcx> { } impl CodegenCx<'ll, 'tcx> { - pub fn const_fat_ptr( - &self, - ptr: &'ll Value, - meta: &'ll Value - ) -> &'ll Value { - assert_eq!(abi::FAT_PTR_ADDR, 0); - assert_eq!(abi::FAT_PTR_EXTRA, 1); - self.const_struct(&[ptr, meta], false) - } - pub fn const_array(&self, ty: &'ll Type, elts: &[&'ll Value]) -> &'ll Value { unsafe { return llvm::LLVMConstArray(ty, elts.as_ptr(), elts.len() as c_uint); @@ -148,13 +139,6 @@ impl CodegenCx<'ll, 'tcx> { } } - pub fn const_str_slice(&self, s: Symbol) -> &'ll Value { - let len = s.as_str().len(); - let cs = consts::ptrcast(self.const_cstr(s, false), - self.type_ptr_to(self.layout_of(self.tcx.mk_str()).llvm_type(self))); - self.const_fat_ptr(cs, self.const_usize(len as u64)) - } - pub fn const_get_elt(&self, v: &'ll Value, idx: u64) -> &'ll Value { unsafe { assert_eq!(idx as c_uint as u64, idx); @@ -235,6 +219,13 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { unsafe { llvm::LLVMConstReal(t, val) } } + fn const_str(&self, s: Symbol) -> (&'ll Value, &'ll Value) { + let len = s.as_str().len(); + let cs = consts::ptrcast(self.const_cstr(s, false), + self.type_ptr_to(self.layout_of(self.tcx.mk_str()).llvm_type(self))); + (cs, self.const_usize(len as u64)) + } + fn const_struct( &self, elts: &[&'ll Value], @@ -243,33 +234,23 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { struct_in_context(self.llcx, elts, packed) } - fn const_to_uint(&self, v: &'ll Value) -> u64 { - unsafe { + fn const_to_opt_uint(&self, v: &'ll Value) -> Option { + try_as_const_integral(v).map(|v| unsafe { llvm::LLVMConstIntGetZExtValue(v) - } - } - - fn is_const_integral(&self, v: &'ll Value) -> bool { - unsafe { - llvm::LLVMIsAConstantInt(v).is_some() - } + }) } fn const_to_opt_u128(&self, v: &'ll Value, sign_ext: bool) -> Option { - unsafe { - if self.is_const_integral(v) { - let (mut lo, mut hi) = (0u64, 0u64); - let success = llvm::LLVMRustConstInt128Get(v, sign_ext, - &mut hi, &mut lo); - if success { - Some(hi_lo_to_u128(lo, hi)) - } else { - None - } + try_as_const_integral(v).and_then(|v| unsafe { + let (mut lo, mut hi) = (0u64, 0u64); + let success = llvm::LLVMRustConstInt128Get(v, sign_ext, + &mut hi, &mut lo); + if success { + Some(hi_lo_to_u128(lo, hi)) } else { None } - } + }) } fn scalar_to_backend( @@ -305,7 +286,7 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { } } Some(GlobalAlloc::Function(fn_instance)) => { - self.get_fn(fn_instance) + self.get_fn_addr(fn_instance) } Some(GlobalAlloc::Static(def_id)) => { assert!(self.tcx.is_static(def_id)); @@ -386,3 +367,9 @@ pub fn struct_in_context( fn hi_lo_to_u128(lo: u64, hi: u64) -> u128 { ((hi as u128) << 64) | (lo as u128) } + +fn try_as_const_integral(v: &'ll Value) -> Option<&'ll ConstantInt> { + unsafe { + llvm::LLVMIsAConstantInt(v) + } +} diff --git a/src/librustc_codegen_llvm/consts.rs b/src/librustc_codegen_llvm/consts.rs index e71d1fc169..fd7054a5a0 100644 --- a/src/librustc_codegen_llvm/consts.rs +++ b/src/librustc_codegen_llvm/consts.rs @@ -134,7 +134,7 @@ fn check_and_apply_linkage( // extern "C" fn() from being non-null, so we can't just declare a // static and call it a day. Some linkages (like weak) will make it such // that the static actually has a null value. - let llty2 = if let ty::RawPtr(ref mt) = ty.sty { + let llty2 = if let ty::RawPtr(ref mt) = ty.kind { cx.layout_of(mt.ty).llvm_type(cx) } else { cx.sess().span_fatal( @@ -221,7 +221,7 @@ impl CodegenCx<'ll, 'tcx> { def_id); let ty = instance.ty(self.tcx); - let sym = self.tcx.symbol_name(instance).name.as_symbol(); + let sym = self.tcx.symbol_name(instance).name; debug!("get_static: sym={} instance={:?}", sym, instance); @@ -230,7 +230,7 @@ impl CodegenCx<'ll, 'tcx> { let llty = self.layout_of(ty).llvm_type(self); let (g, attrs) = match self.tcx.hir().get(id) { Node::Item(&hir::Item { - ref attrs, span, node: hir::ItemKind::Static(..), .. + ref attrs, span, kind: hir::ItemKind::Static(..), .. }) => { let sym_str = sym.as_str(); if self.get_declared_value(&sym_str).is_some() { @@ -249,7 +249,7 @@ impl CodegenCx<'ll, 'tcx> { } Node::ForeignItem(&hir::ForeignItem { - ref attrs, span, node: hir::ForeignItemKind::Static(..), .. + ref attrs, span, kind: hir::ForeignItemKind::Static(..), .. }) => { let fn_attrs = self.tcx.codegen_fn_attrs(def_id); (check_and_apply_linkage(&self, &fn_attrs, ty, sym, span), attrs) @@ -488,7 +488,7 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { if let Some(section) = attrs.link_section { let section = llvm::LLVMMDStringInContext( self.llcx, - section.as_str().as_ptr() as *const _, + section.as_str().as_ptr().cast(), section.as_str().len() as c_uint, ); assert!(alloc.relocations().is_empty()); @@ -500,14 +500,14 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> { 0..alloc.len()); let alloc = llvm::LLVMMDStringInContext( self.llcx, - bytes.as_ptr() as *const _, + bytes.as_ptr().cast(), bytes.len() as c_uint, ); let data = [section, alloc]; let meta = llvm::LLVMMDNodeInContext(self.llcx, data.as_ptr(), 2); llvm::LLVMAddNamedMetadataOperand( self.llmod, - "wasm.custom_sections\0".as_ptr() as *const _, + "wasm.custom_sections\0".as_ptr().cast(), meta, ); } diff --git a/src/librustc_codegen_llvm/context.rs b/src/librustc_codegen_llvm/context.rs index 58ce970390..2da9387717 100644 --- a/src/librustc_codegen_llvm/context.rs +++ b/src/librustc_codegen_llvm/context.rs @@ -20,7 +20,6 @@ use rustc::ty::layout::{ use rustc::ty::{self, Ty, TyCtxt, Instance}; use rustc::util::nodemap::FxHashMap; use rustc_target::spec::{HasTargetSpec, Target}; -use rustc_codegen_ssa::callee::resolve_and_get_fn; use rustc_codegen_ssa::base::wants_msvc_seh; use crate::callee::get_fn; @@ -211,7 +210,7 @@ pub unsafe fn create_module( // If skipping the PLT is enabled, we need to add some module metadata // to ensure intrinsic calls don't use it. if !sess.needs_plt() { - let avoid_plt = "RtLibUseGOT\0".as_ptr() as *const _; + let avoid_plt = "RtLibUseGOT\0".as_ptr().cast(); llvm::LLVMRustAddModuleFlag(llmod, avoid_plt, 1); } @@ -327,11 +326,11 @@ impl MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> { &self.vtables } - fn instances(&self) -> &RefCell, &'ll Value>> { - &self.instances + fn get_fn(&self, instance: Instance<'tcx>) -> &'ll Value { + get_fn(self, instance) } - fn get_fn(&self, instance: Instance<'tcx>) -> &'ll Value { + fn get_fn_addr(&self, instance: Instance<'tcx>) -> &'ll Value { get_fn(self, instance) } @@ -362,7 +361,14 @@ impl MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> { let tcx = self.tcx; let llfn = match tcx.lang_items().eh_personality() { Some(def_id) if !wants_msvc_seh(self.sess()) => { - resolve_and_get_fn(self, def_id, tcx.intern_substs(&[])) + self.get_fn_addr( + ty::Instance::resolve( + tcx, + ty::ParamEnv::reveal_all(), + def_id, + tcx.intern_substs(&[]), + ).unwrap() + ) } _ => { let name = if wants_msvc_seh(self.sess()) { @@ -390,7 +396,14 @@ impl MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> { let tcx = self.tcx; assert!(self.sess().target.target.options.custom_unwind_resume); if let Some(def_id) = tcx.lang_items().eh_unwind_resume() { - let llfn = resolve_and_get_fn(self, def_id, tcx.intern_substs(&[])); + let llfn = self.get_fn_addr( + ty::Instance::resolve( + tcx, + ty::ParamEnv::reveal_all(), + def_id, + tcx.intern_substs(&[]), + ).unwrap() + ); unwresume.set(Some(llfn)); return llfn; } @@ -537,6 +550,7 @@ impl CodegenCx<'b, 'tcx> { ifn!("llvm.trap", fn() -> void); ifn!("llvm.debugtrap", fn() -> void); ifn!("llvm.frameaddress", fn(t_i32) -> i8p); + ifn!("llvm.sideeffect", fn() -> void); ifn!("llvm.powi.f32", fn(t_f32, t_i32) -> t_f32); ifn!("llvm.powi.v2f32", fn(t_v2f32, t_i32) -> t_v2f32); diff --git a/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs b/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs index 8b3ed5b0c6..6ee76b71fc 100644 --- a/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs +++ b/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs @@ -1,4 +1,4 @@ -use rustc_codegen_ssa::debuginfo::{FunctionDebugContext, FunctionDebugContextData, MirDebugScope}; +use rustc_codegen_ssa::mir::debuginfo::{FunctionDebugContext, DebugScope}; use super::metadata::file_metadata; use super::utils::{DIB, span_start}; @@ -11,35 +11,21 @@ use libc::c_uint; use syntax_pos::Pos; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; - -use syntax_pos::BytePos; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::Idx; /// Produces DIScope DIEs for each MIR Scope which has variables defined in it. -/// If debuginfo is disabled, the returned vector is empty. -pub fn create_mir_scopes( +pub fn compute_mir_scopes( cx: &CodegenCx<'ll, '_>, mir: &Body<'_>, - debug_context: &FunctionDebugContext<&'ll DISubprogram>, -) -> IndexVec> { - let null_scope = MirDebugScope { - scope_metadata: None, - file_start_pos: BytePos(0), - file_end_pos: BytePos(0) - }; - let mut scopes = IndexVec::from_elem(null_scope, &mir.source_scopes); - - let debug_context = match *debug_context { - FunctionDebugContext::RegularContext(ref data) => data, - FunctionDebugContext::DebugInfoDisabled | - FunctionDebugContext::FunctionWithoutDebugInfo => { - return scopes; - } - }; - + fn_metadata: &'ll DISubprogram, + debug_context: &mut FunctionDebugContext<&'ll DIScope>, +) { // Find all the scopes with variables defined in them. let mut has_variables = BitSet::new_empty(mir.source_scopes.len()); + // FIXME(eddyb) base this on `decl.name`, or even better, on debuginfo. + // FIXME(eddyb) take into account that arguments always have debuginfo, + // irrespective of their name (assuming full debuginfo is enabled). for var in mir.vars_iter() { let decl = &mir.local_decls[var]; has_variables.insert(decl.visibility_scope); @@ -48,31 +34,29 @@ pub fn create_mir_scopes( // Instantiate all scopes. for idx in 0..mir.source_scopes.len() { let scope = SourceScope::new(idx); - make_mir_scope(cx, &mir, &has_variables, debug_context, scope, &mut scopes); + make_mir_scope(cx, &mir, fn_metadata, &has_variables, debug_context, scope); } - - scopes } fn make_mir_scope(cx: &CodegenCx<'ll, '_>, mir: &Body<'_>, + fn_metadata: &'ll DISubprogram, has_variables: &BitSet, - debug_context: &FunctionDebugContextData<&'ll DISubprogram>, - scope: SourceScope, - scopes: &mut IndexVec>) { - if scopes[scope].is_valid() { + debug_context: &mut FunctionDebugContext<&'ll DISubprogram>, + scope: SourceScope) { + if debug_context.scopes[scope].is_valid() { return; } let scope_data = &mir.source_scopes[scope]; let parent_scope = if let Some(parent) = scope_data.parent_scope { - make_mir_scope(cx, mir, has_variables, debug_context, parent, scopes); - scopes[parent] + make_mir_scope(cx, mir, fn_metadata, has_variables, debug_context, parent); + debug_context.scopes[parent] } else { // The root is the function itself. let loc = span_start(cx, mir.span); - scopes[scope] = MirDebugScope { - scope_metadata: Some(debug_context.fn_metadata), + debug_context.scopes[scope] = DebugScope { + scope_metadata: Some(fn_metadata), file_start_pos: loc.file.start_pos, file_end_pos: loc.file.end_pos, }; @@ -86,8 +70,8 @@ fn make_mir_scope(cx: &CodegenCx<'ll, '_>, // However, we don't skip creating a nested scope if // our parent is the root, because we might want to // put arguments in the root and not have shadowing. - if parent_scope.scope_metadata.unwrap() != debug_context.fn_metadata { - scopes[scope] = parent_scope; + if parent_scope.scope_metadata.unwrap() != fn_metadata { + debug_context.scopes[scope] = parent_scope; return; } } @@ -105,7 +89,7 @@ fn make_mir_scope(cx: &CodegenCx<'ll, '_>, loc.line as c_uint, loc.col.to_usize() as c_uint)) }; - scopes[scope] = MirDebugScope { + debug_context.scopes[scope] = DebugScope { scope_metadata, file_start_pos: loc.file.start_pos, file_end_pos: loc.file.end_pos, diff --git a/src/librustc_codegen_llvm/debuginfo/gdb.rs b/src/librustc_codegen_llvm/debuginfo/gdb.rs index 04c9e93c7a..9ed1c1730a 100644 --- a/src/librustc_codegen_llvm/debuginfo/gdb.rs +++ b/src/librustc_codegen_llvm/debuginfo/gdb.rs @@ -37,7 +37,7 @@ pub fn get_or_insert_gdb_debug_scripts_section_global(cx: &CodegenCx<'ll, '_>) let section_var = unsafe { llvm::LLVMGetNamedGlobal(cx.llmod, - c_section_var_name.as_ptr() as *const _) + c_section_var_name.as_ptr().cast()) }; section_var.unwrap_or_else(|| { @@ -52,7 +52,7 @@ pub fn get_or_insert_gdb_debug_scripts_section_global(cx: &CodegenCx<'ll, '_>) llvm_type).unwrap_or_else(||{ bug!("symbol `{}` is already defined", section_var_name) }); - llvm::LLVMSetSection(section_var, section_name.as_ptr() as *const _); + llvm::LLVMSetSection(section_var, section_name.as_ptr().cast()); llvm::LLVMSetInitializer(section_var, cx.const_bytes(section_contents)); llvm::LLVMSetGlobalConstant(section_var, llvm::True); llvm::LLVMSetUnnamedAddr(section_var, llvm::True); diff --git a/src/librustc_codegen_llvm/debuginfo/metadata.rs b/src/librustc_codegen_llvm/debuginfo/metadata.rs index d0b607bd88..f0148a21ae 100644 --- a/src/librustc_codegen_llvm/debuginfo/metadata.rs +++ b/src/librustc_codegen_llvm/debuginfo/metadata.rs @@ -6,7 +6,7 @@ use super::utils::{debug_context, DIB, span_start, get_namespace_for_item, create_DIArray, is_node_local_to_unit}; use super::namespace::mangled_name_of_instance; use super::type_names::compute_debuginfo_type_name; -use super::{CrateDebugContext}; +use super::CrateDebugContext; use crate::abi; use crate::value::Value; use rustc_codegen_ssa::traits::*; @@ -30,7 +30,7 @@ use rustc::ty::Instance; use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt}; use rustc::ty::layout::{self, Align, Integer, IntegerExt, LayoutOf, PrimitiveExt, Size, TyLayout, VariantIdx}; -use rustc::ty::subst::UnpackedKind; +use rustc::ty::subst::{GenericArgKind, SubstsRef}; use rustc::session::config::{self, DebugInfo}; use rustc::util::nodemap::FxHashMap; use rustc_fs_util::path_to_c_string; @@ -46,7 +46,7 @@ use std::iter; use std::ptr; use std::path::{Path, PathBuf}; use syntax::ast; -use syntax::symbol::{Interner, InternedString}; +use syntax::symbol::{Interner, Symbol}; use syntax_pos::{self, Span, FileName}; impl PartialEq for llvm::Metadata { @@ -187,7 +187,7 @@ impl TypeMap<'ll, 'tcx> { // The hasher we are using to generate the UniqueTypeId. We want // something that provides more than the 64 bits of the DefaultHasher. - let mut hasher = StableHasher::::new(); + let mut hasher = StableHasher::new(); let mut hcx = cx.tcx.create_stable_hashing_context(); let type_ = cx.tcx.erase_regions(&type_); hcx.while_hashing_spans(false, |hcx| { @@ -195,7 +195,7 @@ impl TypeMap<'ll, 'tcx> { type_.hash_stable(hcx, &mut hasher); }); }); - let unique_type_id = hasher.finish().to_hex(); + let unique_type_id = hasher.finish::().to_hex(); let key = self.unique_id_interner.intern(&unique_type_id); self.type_to_unique_id.insert(type_, UniqueTypeId(key)); @@ -340,7 +340,7 @@ fn fixed_vec_metadata( let (size, align) = cx.size_and_align_of(array_or_slice_type); - let upper_bound = match array_or_slice_type.sty { + let upper_bound = match array_or_slice_type.kind { ty::Array(_, len) => len.eval_usize(cx.tcx, ty::ParamEnv::reveal_all()) as c_longlong, _ => -1 }; @@ -427,7 +427,7 @@ fn subroutine_type_metadata( let signature_metadata: Vec<_> = iter::once( // return type - match signature.output().sty { + match signature.output().kind { ty::Tuple(ref tys) if tys.is_empty() => None, _ => Some(type_metadata(cx, signature.output(), span)) } @@ -466,7 +466,7 @@ fn trait_pointer_metadata( // type is assigned the correct name, size, namespace, and source location. // However, it does not describe the trait's methods. - let containing_scope = match trait_type.sty { + let containing_scope = match trait_type.kind { ty::Dynamic(ref data, ..) => data.principal_def_id().map(|did| get_namespace_for_item(cx, did)), _ => { @@ -563,7 +563,7 @@ pub fn type_metadata( debug!("type_metadata: {:?}", t); let ptr_metadata = |ty: Ty<'tcx>| { - match ty.sty { + match ty.kind { ty::Slice(typ) => { Ok(vec_slice_metadata(cx, t, typ, unique_type_id, usage_site_span)) } @@ -591,7 +591,7 @@ pub fn type_metadata( } }; - let MetadataCreationResult { metadata, already_stored_in_typemap } = match t.sty { + let MetadataCreationResult { metadata, already_stored_in_typemap } = match t.kind { ty::Never | ty::Bool | ty::Char | @@ -682,7 +682,7 @@ pub fn type_metadata( } ty::Closure(def_id, substs) => { - let upvar_tys : Vec<_> = substs.upvar_tys(def_id, cx.tcx).collect(); + let upvar_tys : Vec<_> = substs.as_closure().upvar_tys(def_id, cx.tcx).collect(); let containing_scope = get_namespace_for_item(cx, def_id); prepare_tuple_metadata(cx, t, @@ -692,9 +692,10 @@ pub fn type_metadata( Some(containing_scope)).finalize(cx) } ty::Generator(def_id, substs, _) => { - let upvar_tys : Vec<_> = substs.prefix_tys(def_id, cx.tcx).map(|t| { - cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t) - }).collect(); + let upvar_tys : Vec<_> = substs + .as_generator().prefix_tys(def_id, cx.tcx).map(|t| { + cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t) + }).collect(); prepare_enum_metadata(cx, t, def_id, @@ -835,7 +836,7 @@ fn file_metadata_raw(cx: &CodegenCx<'ll, '_>, fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType { debug!("basic_type_metadata: {:?}", t); - let (name, encoding) = match t.sty { + let (name, encoding) = match t.kind { ty::Never => ("!", DW_ATE_unsigned), ty::Tuple(ref elements) if elements.is_empty() => ("()", DW_ATE_unsigned), @@ -960,9 +961,9 @@ pub fn compile_unit_metadata( file_metadata, producer.as_ptr(), tcx.sess.opts.optimize != config::OptLevel::No, - flags.as_ptr() as *const _, + flags.as_ptr().cast(), 0, - split_name.as_ptr() as *const _, + split_name.as_ptr().cast(), kind); if tcx.sess.opts.debugging_opts.profile { @@ -991,7 +992,7 @@ pub fn compile_unit_metadata( if tcx.sess.opts.target_triple.triple().starts_with("wasm32") { let name_metadata = llvm::LLVMMDStringInContext( debug_context.llcontext, - rustc_producer.as_ptr() as *const _, + rustc_producer.as_ptr().cast(), rustc_producer.as_bytes().len() as c_uint, ); llvm::LLVMAddNamedMetadataOperand( @@ -1145,7 +1146,7 @@ fn prepare_struct_metadata( ) -> RecursiveTypeDescription<'ll, 'tcx> { let struct_name = compute_debuginfo_type_name(cx.tcx, struct_type, false); - let (struct_def_id, variant) = match struct_type.sty { + let (struct_def_id, variant) = match struct_type.kind { ty::Adt(def, _) => (def.did, def.non_enum_variant()), _ => bug!("prepare_struct_metadata on a non-ADT") }; @@ -1268,7 +1269,7 @@ fn prepare_union_metadata( ) -> RecursiveTypeDescription<'ll, 'tcx> { let union_name = compute_debuginfo_type_name(cx.tcx, union_type, false); - let (union_def_id, variant) = match union_type.sty { + let (union_def_id, variant) = match union_type.kind { ty::Adt(def, _) => (def.did, def.non_enum_variant()), _ => bug!("prepare_union_metadata on a non-ADT") }; @@ -1334,11 +1335,11 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec> { let variant_info_for = |index: VariantIdx| { - match &self.enum_type.sty { + match &self.enum_type.kind { ty::Adt(adt, _) => VariantInfo::Adt(&adt.variants[index]), ty::Generator(def_id, substs, _) => { let generator_layout = cx.tcx.generator_layout(*def_id); - VariantInfo::Generator(*substs, generator_layout, index) + VariantInfo::Generator(substs, generator_layout, index) } _ => bug!(), } @@ -1354,7 +1355,7 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { match self.layout.variants { layout::Variants::Single { index } => { - if let ty::Adt(adt, _) = &self.enum_type.sty { + if let ty::Adt(adt, _) = &self.enum_type.kind { if adt.variants.is_empty() { return vec![]; } @@ -1611,7 +1612,7 @@ enum EnumDiscriminantInfo<'ll> { #[derive(Copy, Clone)] enum VariantInfo<'tcx> { Adt(&'tcx ty::VariantDef), - Generator(ty::GeneratorSubsts<'tcx>, &'tcx GeneratorLayout<'tcx>, VariantIdx), + Generator(SubstsRef<'tcx>, &'tcx GeneratorLayout<'tcx>, VariantIdx), } impl<'tcx> VariantInfo<'tcx> { @@ -1619,7 +1620,7 @@ impl<'tcx> VariantInfo<'tcx> { match self { VariantInfo::Adt(variant) => f(&variant.ident.as_str()), VariantInfo::Generator(substs, _, variant_index) => - f(&substs.variant_name(*variant_index)), + f(&substs.as_generator().variant_name(*variant_index)), } } @@ -1747,7 +1748,7 @@ fn prepare_enum_metadata( let file_metadata = unknown_file_metadata(cx); let discriminant_type_metadata = |discr: layout::Primitive| { - let enumerators_metadata: Vec<_> = match enum_type.sty { + let enumerators_metadata: Vec<_> = match enum_type.kind { ty::Adt(def, _) => def .discriminants(cx.tcx) .zip(&def.variants) @@ -1763,9 +1764,10 @@ fn prepare_enum_metadata( }) .collect(), ty::Generator(_, substs, _) => substs + .as_generator() .variant_range(enum_def_id, cx.tcx) .map(|variant_index| { - let name = SmallCStr::new(&substs.variant_name(variant_index)); + let name = SmallCStr::new(&substs.as_generator().variant_name(variant_index)); unsafe { Some(llvm::LLVMRustDIBuilderCreateEnumerator( DIB(cx), @@ -1790,7 +1792,7 @@ fn prepare_enum_metadata( let discriminant_base_type_metadata = type_metadata(cx, discr.to_ty(cx.tcx), syntax_pos::DUMMY_SP); - let discriminant_name = match enum_type.sty { + let discriminant_name = match enum_type.kind { ty::Adt(..) => SmallCStr::new(&cx.tcx.item_name(enum_def_id).as_str()), ty::Generator(..) => SmallCStr::new(&enum_name), _ => bug!(), @@ -1881,7 +1883,7 @@ fn prepare_enum_metadata( ); } - let discriminator_name = match &enum_type.sty { + let discriminator_name = match &enum_type.kind { ty::Generator(..) => Some(SmallCStr::new(&"__state")), _ => None, }; @@ -2067,11 +2069,9 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>, { let mut composite_types_completed = debug_context(cx).composite_types_completed.borrow_mut(); - if composite_types_completed.contains(&composite_type_metadata) { + if !composite_types_completed.insert(&composite_type_metadata) { bug!("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); - } else { - composite_types_completed.insert(composite_type_metadata); } } @@ -2091,12 +2091,12 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>, // Compute the type parameters for a type, if any, for the given // metadata. fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'ll DIArray> { - if let ty::Adt(def, substs) = ty.sty { + if let ty::Adt(def, substs) = ty.kind { if !substs.types().next().is_none() { let generics = cx.tcx.generics_of(def.did); let names = get_parameter_names(cx, generics); let template_params: Vec<_> = substs.iter().zip(names).filter_map(|(kind, name)| { - if let UnpackedKind::Type(ty) = kind.unpack() { + if let GenericArgKind::Type(ty) = kind.unpack() { let actual_type = cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty); let actual_type_metadata = type_metadata(cx, actual_type, syntax_pos::DUMMY_SP); @@ -2125,7 +2125,7 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&' fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) - -> Vec { + -> Vec { let mut names = generics.parent.map_or(vec![], |def_id| { get_parameter_names(cx, cx.tcx.generics_of(def_id)) }); diff --git a/src/librustc_codegen_llvm/debuginfo/mod.rs b/src/librustc_codegen_llvm/debuginfo/mod.rs index 6dedf10f0a..7713fe4700 100644 --- a/src/librustc_codegen_llvm/debuginfo/mod.rs +++ b/src/librustc_codegen_llvm/debuginfo/mod.rs @@ -1,8 +1,7 @@ // See doc.rs for documentation. mod doc; -use rustc_codegen_ssa::debuginfo::VariableAccess::*; -use rustc_codegen_ssa::debuginfo::VariableKind::*; +use rustc_codegen_ssa::mir::debuginfo::VariableKind::*; use self::utils::{DIB, span_start, create_DIArray, is_node_local_to_unit}; use self::namespace::mangled_name_of_instance; @@ -11,11 +10,11 @@ use self::metadata::{type_metadata, file_metadata, TypeMap}; use self::source_loc::InternalDebugLocation::{self, UnknownLocation}; use crate::llvm; -use crate::llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilder, DISubprogram, DIArray, DIFlags, +use crate::llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilder, DIArray, DIFlags, DISPFlags, DILexicalBlock}; use rustc::hir::CodegenFnAttrFlags; use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE}; -use rustc::ty::subst::{SubstsRef, UnpackedKind}; +use rustc::ty::subst::{SubstsRef, GenericArgKind}; use crate::abi::Abi; use crate::common::CodegenCx; @@ -26,18 +25,20 @@ use rustc::mir; use rustc::session::config::{self, DebugInfo}; use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet}; use rustc_data_structures::small_c_str::SmallCStr; -use rustc_data_structures::indexed_vec::IndexVec; -use rustc_codegen_ssa::debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, - VariableKind, FunctionDebugContextData, type_names}; +use rustc_index::vec::IndexVec; +use rustc_codegen_ssa::debuginfo::type_names; +use rustc_codegen_ssa::mir::debuginfo::{FunctionDebugContext, DebugScope, + VariableKind}; use libc::c_uint; use std::cell::RefCell; use std::ffi::{CStr, CString}; -use syntax_pos::{self, Span, Pos}; +use smallvec::SmallVec; +use syntax_pos::{self, BytePos, Span, Pos}; use syntax::ast; -use syntax::symbol::InternedString; -use rustc::ty::layout::{self, LayoutOf, HasTyCtxt}; +use syntax::symbol::Symbol; +use rustc::ty::layout::{self, LayoutOf, HasTyCtxt, Size}; use rustc_codegen_ssa::traits::*; pub mod gdb; @@ -47,7 +48,7 @@ pub mod metadata; mod create_scope_map; mod source_loc; -pub use self::create_scope_map::{create_mir_scopes}; +pub use self::create_scope_map::compute_mir_scopes; pub use self::metadata::create_global_var_metadata; pub use self::metadata::extend_scope_to_file; pub use self::source_loc::set_source_location; @@ -127,20 +128,20 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) { if cx.sess().target.target.options.is_like_osx || cx.sess().target.target.options.is_like_android { llvm::LLVMRustAddModuleFlag(cx.llmod, - "Dwarf Version\0".as_ptr() as *const _, + "Dwarf Version\0".as_ptr().cast(), 2) } // Indicate that we want CodeView debug information on MSVC if cx.sess().target.target.options.is_like_msvc { llvm::LLVMRustAddModuleFlag(cx.llmod, - "CodeView\0".as_ptr() as *const _, + "CodeView\0".as_ptr().cast(), 1) } // Prevent bitcode readers from deleting the debug info. let ptr = "Debug Info Version\0".as_ptr(); - llvm::LLVMRustAddModuleFlag(cx.llmod, ptr as *const _, + llvm::LLVMRustAddModuleFlag(cx.llmod, ptr.cast(), llvm::LLVMRustDebugMetadataVersion()); }; } @@ -148,21 +149,23 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) { impl DebugInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { fn declare_local( &mut self, - dbg_context: &FunctionDebugContext<&'ll DISubprogram>, + dbg_context: &FunctionDebugContext<&'ll DIScope>, variable_name: ast::Name, variable_type: Ty<'tcx>, scope_metadata: &'ll DIScope, - variable_access: VariableAccess<'_, &'ll Value>, + variable_alloca: Self::Value, + direct_offset: Size, + indirect_offsets: &[Size], variable_kind: VariableKind, span: Span, ) { - assert!(!dbg_context.get_ref(span).source_locations_enabled); + assert!(!dbg_context.source_locations_enabled); let cx = self.cx(); let file = span_start(cx, span).file; let file_metadata = file_metadata(cx, &file.name, - dbg_context.get_ref(span).defining_crate); + dbg_context.defining_crate); let loc = span_start(cx, span); let type_metadata = type_metadata(cx, variable_type, span); @@ -173,49 +176,61 @@ impl DebugInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { }; let align = cx.align_of(variable_type); - let name = SmallCStr::new(&variable_name.as_str()); - match (variable_access, &[][..]) { - (DirectVariable { alloca }, address_operations) | - (IndirectVariable {alloca, address_operations}, _) => { - let metadata = unsafe { - llvm::LLVMRustDIBuilderCreateVariable( - DIB(cx), - dwarf_tag, - scope_metadata, - name.as_ptr(), - file_metadata, - loc.line as c_uint, - type_metadata, - cx.sess().opts.optimize != config::OptLevel::No, - DIFlags::FlagZero, - argument_index, - align.bytes() as u32, - ) - }; - source_loc::set_debug_location(self, - InternalDebugLocation::new(scope_metadata, loc.line, loc.col.to_usize())); - unsafe { - let debug_loc = llvm::LLVMGetCurrentDebugLocation(self.llbuilder); - let instr = llvm::LLVMRustDIBuilderInsertDeclareAtEnd( - DIB(cx), - alloca, - metadata, - address_operations.as_ptr(), - address_operations.len() as c_uint, - debug_loc, - self.llbb()); + // Convert the direct and indirect offsets to address ops. + let op_deref = || unsafe { llvm::LLVMRustDIBuilderCreateOpDeref() }; + let op_plus_uconst = || unsafe { llvm::LLVMRustDIBuilderCreateOpPlusUconst() }; + let mut addr_ops = SmallVec::<[_; 8]>::new(); - llvm::LLVMSetInstDebugLocation(self.llbuilder, instr); - } - source_loc::set_debug_location(self, UnknownLocation); + if direct_offset.bytes() > 0 { + addr_ops.push(op_plus_uconst()); + addr_ops.push(direct_offset.bytes() as i64); + } + for &offset in indirect_offsets { + addr_ops.push(op_deref()); + if offset.bytes() > 0 { + addr_ops.push(op_plus_uconst()); + addr_ops.push(offset.bytes() as i64); } } + + let name = SmallCStr::new(&variable_name.as_str()); + let metadata = unsafe { + llvm::LLVMRustDIBuilderCreateVariable( + DIB(cx), + dwarf_tag, + scope_metadata, + name.as_ptr(), + file_metadata, + loc.line as c_uint, + type_metadata, + cx.sess().opts.optimize != config::OptLevel::No, + DIFlags::FlagZero, + argument_index, + align.bytes() as u32, + ) + }; + source_loc::set_debug_location(self, + InternalDebugLocation::new(scope_metadata, loc.line, loc.col.to_usize())); + unsafe { + let debug_loc = llvm::LLVMGetCurrentDebugLocation(self.llbuilder); + let instr = llvm::LLVMRustDIBuilderInsertDeclareAtEnd( + DIB(cx), + variable_alloca, + metadata, + addr_ops.as_ptr(), + addr_ops.len() as c_uint, + debug_loc, + self.llbb()); + + llvm::LLVMSetInstDebugLocation(self.llbuilder, instr); + } + source_loc::set_debug_location(self, UnknownLocation); } fn set_source_location( &mut self, - debug_context: &mut FunctionDebugContext<&'ll DISubprogram>, - scope: Option<&'ll DIScope>, + debug_context: &mut FunctionDebugContext<&'ll DIScope>, + scope: &'ll DIScope, span: Span, ) { set_source_location(debug_context, &self, scope, span) @@ -224,7 +239,7 @@ impl DebugInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { gdb::insert_reference_to_gdb_debug_scripts_section_global(self) } - fn set_var_name(&mut self, value: &'ll Value, name: impl ToString) { + fn set_var_name(&mut self, value: &'ll Value, name: &str) { // Avoid wasting time if LLVM value names aren't even enabled. if self.sess().fewer_names() { return; @@ -254,7 +269,7 @@ impl DebugInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { Err(_) => return, } - let cname = CString::new(name.to_string()).unwrap(); + let cname = SmallCStr::new(name); unsafe { llvm::LLVMSetValueName(value, cname.as_ptr()); } @@ -268,14 +283,14 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { sig: ty::FnSig<'tcx>, llfn: &'ll Value, mir: &mir::Body<'_>, - ) -> FunctionDebugContext<&'ll DISubprogram> { + ) -> Option> { if self.sess().opts.debuginfo == DebugInfo::None { - return FunctionDebugContext::DebugInfoDisabled; + return None; } if let InstanceDef::Item(def_id) = instance.def { if self.tcx().codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::NO_DEBUG) { - return FunctionDebugContext::FunctionWithoutDebugInfo; + return None; } } @@ -284,7 +299,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { // This can be the case for functions inlined from another crate if span.is_dummy() { // FIXME(simulacrum): Probably can't happen; remove. - return FunctionDebugContext::FunctionWithoutDebugInfo; + return None; } let def_id = instance.def_id(); @@ -357,14 +372,23 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { None) }; - // Initialize fn debug context (including scope map and namespace map) - let fn_debug_context = FunctionDebugContextData { - fn_metadata, + // Initialize fn debug context (including scopes). + // FIXME(eddyb) figure out a way to not need `Option` for `scope_metadata`. + let null_scope = DebugScope { + scope_metadata: None, + file_start_pos: BytePos(0), + file_end_pos: BytePos(0) + }; + let mut fn_debug_context = FunctionDebugContext { + scopes: IndexVec::from_elem(null_scope, &mir.source_scopes), source_locations_enabled: false, defining_crate: def_id.krate, }; - return FunctionDebugContext::RegularContext(fn_debug_context); + // Fill in all the scopes, with the information from the MIR body. + compute_mir_scopes(self, mir, fn_metadata, &mut fn_debug_context); + + return Some(fn_debug_context); fn get_function_signature<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, @@ -377,7 +401,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { let mut signature = Vec::with_capacity(sig.inputs().len() + 1); // Return type -- llvm::DIBuilder wants this at index 0 - signature.push(match sig.output().sty { + signature.push(match sig.output().kind { ty::Tuple(ref tys) if tys.is_empty() => None, _ => Some(type_metadata(cx, sig.output(), syntax_pos::DUMMY_SP)) }); @@ -401,7 +425,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { // This transformed type is wrong, but these function types are // already inaccurate due to ABI adjustments (see #42800). signature.extend(inputs.iter().map(|&t| { - let t = match t.sty { + let t = match t.kind { ty::Array(ct, _) if (ct == cx.tcx.types.u8) || cx.layout_of(ct).is_zst() => { cx.tcx.mk_imm_ptr(ct) @@ -417,7 +441,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { } if sig.abi == Abi::RustCall && !sig.inputs().is_empty() { - if let ty::Tuple(args) = sig.inputs()[sig.inputs().len() - 1].sty { + if let ty::Tuple(args) = sig.inputs()[sig.inputs().len() - 1].kind { signature.extend( args.iter().map(|argument_type| { Some(type_metadata(cx, argument_type.expect_ty(), syntax_pos::DUMMY_SP)) @@ -460,7 +484,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { let template_params: Vec<_> = if cx.sess().opts.debuginfo == DebugInfo::Full { let names = get_parameter_names(cx, generics); substs.iter().zip(names).filter_map(|(kind, name)| { - if let UnpackedKind::Type(ty) = kind.unpack() { + if let GenericArgKind::Type(ty) = kind.unpack() { let actual_type = cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty); let actual_type_metadata = @@ -490,7 +514,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) - -> Vec { + -> Vec { let mut names = generics.parent.map_or(vec![], |def_id| { get_parameter_names(cx, cx.tcx.generics_of(def_id)) }); @@ -516,7 +540,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { // Only "class" methods are generally understood by LLVM, // so avoid methods on other types (e.g., `<*mut T>::null`). - match impl_self_ty.sty { + match impl_self_ty.kind { ty::Adt(def, ..) if !def.is_box() => { Some(type_metadata(cx, impl_self_ty, syntax_pos::DUMMY_SP)) } @@ -549,14 +573,6 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { metadata::create_vtable_metadata(self, ty, vtable) } - fn create_mir_scopes( - &self, - mir: &mir::Body<'_>, - debug_context: &mut FunctionDebugContext<&'ll DISubprogram>, - ) -> IndexVec> { - create_scope_map::create_mir_scopes(self, mir, debug_context) - } - fn extend_scope_to_file( &self, scope_metadata: &'ll DIScope, @@ -569,13 +585,4 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { fn debuginfo_finalize(&self) { finalize(self) } - - fn debuginfo_upvar_ops_sequence(&self, byte_offset_of_var_in_env: u64) -> [i64; 4] { - unsafe { - [llvm::LLVMRustDIBuilderCreateOpDeref(), - llvm::LLVMRustDIBuilderCreateOpPlusUconst(), - byte_offset_of_var_in_env as i64, - llvm::LLVMRustDIBuilderCreateOpDeref()] - } - } } diff --git a/src/librustc_codegen_llvm/debuginfo/namespace.rs b/src/librustc_codegen_llvm/debuginfo/namespace.rs index 889984749f..628d1372b5 100644 --- a/src/librustc_codegen_llvm/debuginfo/namespace.rs +++ b/src/librustc_codegen_llvm/debuginfo/namespace.rs @@ -35,7 +35,7 @@ pub fn item_namespace(cx: &CodegenCx<'ll, '_>, def_id: DefId) -> &'ll DIScope { let namespace_name = match def_key.disambiguated_data.data { DefPathData::CrateRoot => cx.tcx.crate_name(def_id.krate).as_str(), - data => data.as_interned_str().as_str() + data => data.as_symbol().as_str() }; let namespace_name = SmallCStr::new(&namespace_name); diff --git a/src/librustc_codegen_llvm/debuginfo/source_loc.rs b/src/librustc_codegen_llvm/debuginfo/source_loc.rs index dec93a65db..ccb3bde1cb 100644 --- a/src/librustc_codegen_llvm/debuginfo/source_loc.rs +++ b/src/librustc_codegen_llvm/debuginfo/source_loc.rs @@ -2,7 +2,7 @@ use self::InternalDebugLocation::*; use super::utils::{debug_context, span_start}; use super::metadata::UNKNOWN_COLUMN_NUMBER; -use rustc_codegen_ssa::debuginfo::FunctionDebugContext; +use rustc_codegen_ssa::mir::debuginfo::FunctionDebugContext; use crate::llvm; use crate::llvm::debuginfo::DIScope; @@ -18,22 +18,13 @@ use syntax_pos::{Span, Pos}; pub fn set_source_location( debug_context: &FunctionDebugContext, bx: &Builder<'_, 'll, '_>, - scope: Option<&'ll DIScope>, + scope: &'ll DIScope, span: Span, ) { - let function_debug_context = match *debug_context { - FunctionDebugContext::DebugInfoDisabled => return, - FunctionDebugContext::FunctionWithoutDebugInfo => { - set_debug_location(bx, UnknownLocation); - return; - } - FunctionDebugContext::RegularContext(ref data) => data - }; - - let dbg_loc = if function_debug_context.source_locations_enabled { + let dbg_loc = if debug_context.source_locations_enabled { debug!("set_source_location: {}", bx.sess().source_map().span_to_string(span)); let loc = span_start(bx.cx(), span); - InternalDebugLocation::new(scope.unwrap(), loc.line, loc.col.to_usize()) + InternalDebugLocation::new(scope, loc.line, loc.col.to_usize()) } else { UnknownLocation }; diff --git a/src/librustc_codegen_llvm/error_codes.rs b/src/librustc_codegen_llvm/error_codes.rs deleted file mode 100644 index 042e51ed2b..0000000000 --- a/src/librustc_codegen_llvm/error_codes.rs +++ /dev/null @@ -1,38 +0,0 @@ -register_diagnostics! { - -E0511: r##" -Invalid monomorphization of an intrinsic function was used. Erroneous code -example: - -```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail) -#![feature(platform_intrinsics)] - -extern "platform-intrinsic" { - fn simd_add(a: T, b: T) -> T; -} - -fn main() { - unsafe { simd_add(0, 1); } - // error: invalid monomorphization of `simd_add` intrinsic -} -``` - -The generic type has to be a SIMD type. Example: - -``` -#![feature(repr_simd)] -#![feature(platform_intrinsics)] - -#[repr(simd)] -#[derive(Copy, Clone)] -struct i32x2(i32, i32); - -extern "platform-intrinsic" { - fn simd_add(a: T, b: T) -> T; -} - -unsafe { simd_add(i32x2(0, 0), i32x2(1, 2)); } // ok! -``` -"##, - -} diff --git a/src/librustc_codegen_llvm/intrinsic.rs b/src/librustc_codegen_llvm/intrinsic.rs index 3f3c5ac146..02424956b9 100644 --- a/src/librustc_codegen_llvm/intrinsic.rs +++ b/src/librustc_codegen_llvm/intrinsic.rs @@ -19,10 +19,11 @@ use rustc::mir::interpret::GlobalId; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; use rustc::hir; use syntax::ast::{self, FloatTy}; +use rustc_target::abi::HasDataLayout; +use rustc_codegen_ssa::common::span_invalid_monomorphization_error; use rustc_codegen_ssa::traits::*; -use rustc::session::Session; use syntax_pos::Span; use std::cmp::Ordering; @@ -91,7 +92,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { let tcx = self.tcx; let callee_ty = instance.ty(tcx); - let (def_id, substs) = match callee_ty.sty { + let (def_id, substs) = match callee_ty.kind { ty::FnDef(def_id, substs) => (def_id, substs), _ => bug!("expected fn item type, found {}", callee_ty) }; @@ -694,6 +695,23 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { return; } + "ptr_offset_from" => { + let ty = substs.type_at(0); + let pointee_size = self.size_of(ty); + + // This is the same sequence that Clang emits for pointer subtraction. + // It can be neither `nsw` nor `nuw` because the input is treated as + // unsigned but then the output is treated as signed, so neither works. + let a = args[0].immediate(); + let b = args[1].immediate(); + let a = self.ptrtoint(a, self.type_isize()); + let b = self.ptrtoint(b, self.type_isize()); + let d = self.sub(a, b); + let pointee_size = self.const_usize(pointee_size.bytes()); + // this is where the signed magic happens (notice the `s` in `exactsdiv`) + self.exactsdiv(d, pointee_size) + } + _ => bug!("unknown intrinsic '{}'", name), }; @@ -724,6 +742,13 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { self.call(expect, &[cond, self.const_bool(expected)], None) } + fn sideeffect(&mut self) { + if self.tcx.sess.opts.debugging_opts.insert_sideeffect { + let fnname = self.get_intrinsic(&("llvm.sideeffect")); + self.call(fnname, &[], None); + } + } + fn va_start(&mut self, va_list: &'ll Value) -> &'ll Value { let intrinsic = self.cx().get_intrinsic("llvm.va_start"); self.call(intrinsic, &[va_list], None) @@ -810,6 +835,7 @@ fn codegen_msvc_try( ) { let llfn = get_rust_try_fn(bx, &mut |mut bx| { bx.set_personality_fn(bx.eh_personality()); + bx.sideeffect(); let mut normal = bx.build_sibling_block("normal"); let mut catchswitch = bx.build_sibling_block("catchswitch"); @@ -823,7 +849,7 @@ fn codegen_msvc_try( // We're generating an IR snippet that looks like: // // declare i32 @rust_try(%func, %data, %ptr) { - // %slot = alloca i64* + // %slot = alloca [2 x i64] // invoke %func(%data) to label %normal unwind label %catchswitch // // normal: @@ -847,21 +873,25 @@ fn codegen_msvc_try( // // #include // + // struct rust_panic { + // uint64_t x[2]; + // } + // // int bar(void (*foo)(void), uint64_t *ret) { // try { // foo(); // return 0; - // } catch(uint64_t a[2]) { - // ret[0] = a[0]; - // ret[1] = a[1]; + // } catch(rust_panic a) { + // ret[0] = a.x[0]; + // ret[1] = a.x[1]; // return 1; // } // } // // More information can be found in libstd's seh.rs implementation. - let i64p = bx.type_ptr_to(bx.type_i64()); - let ptr_align = bx.tcx().data_layout.pointer_align.abi; - let slot = bx.alloca(i64p, ptr_align); + let i64_2 = bx.type_array(bx.type_i64(), 2); + let i64_align = bx.tcx().data_layout.i64_align.abi; + let slot = bx.alloca(i64_2, i64_align); bx.invoke(func, &[data], normal.llbb(), catchswitch.llbb(), None); normal.ret(bx.const_i32(0)); @@ -869,22 +899,15 @@ fn codegen_msvc_try( let cs = catchswitch.catch_switch(None, None, 1); catchswitch.add_handler(cs, catchpad.llbb()); - let tydesc = match bx.tcx().lang_items().msvc_try_filter() { + let tydesc = match bx.tcx().lang_items().eh_catch_typeinfo() { Some(did) => bx.get_static(did), - None => bug!("msvc_try_filter not defined"), + None => bug!("eh_catch_typeinfo not defined, but needed for SEH unwinding"), }; let funclet = catchpad.catch_pad(cs, &[tydesc, bx.const_i32(0), slot]); - let addr = catchpad.load(slot, ptr_align); - let i64_align = bx.tcx().data_layout.i64_align.abi; - let arg1 = catchpad.load(addr, i64_align); - let val1 = bx.const_i32(1); - let gep1 = catchpad.inbounds_gep(addr, &[val1]); - let arg2 = catchpad.load(gep1, i64_align); - let local_ptr = catchpad.bitcast(local_ptr, i64p); - let gep2 = catchpad.inbounds_gep(local_ptr, &[val1]); - catchpad.store(arg1, local_ptr, i64_align); - catchpad.store(arg2, gep2, i64_align); + let payload = catchpad.load(slot, i64_align); + let local_ptr = catchpad.bitcast(local_ptr, bx.type_ptr_to(i64_2)); + catchpad.store(payload, local_ptr, i64_align); catchpad.catch_ret(&funclet, caught.llbb()); caught.ret(bx.const_i32(1)); @@ -933,6 +956,8 @@ fn codegen_gnu_try( // expected to be `*mut *mut u8` for this to actually work, but that's // managed by the standard library. + bx.sideeffect(); + let mut then = bx.build_sibling_block("then"); let mut catch = bx.build_sibling_block("catch"); @@ -950,7 +975,14 @@ fn codegen_gnu_try( // rust_try ignores the selector. let lpad_ty = bx.type_struct(&[bx.type_i8p(), bx.type_i32()], false); let vals = catch.landing_pad(lpad_ty, bx.eh_personality(), 1); - catch.add_clause(vals, bx.const_null(bx.type_i8p())); + let tydesc = match bx.tcx().lang_items().eh_catch_typeinfo() { + Some(tydesc) => { + let tydesc = bx.get_static(tydesc); + bx.bitcast(tydesc, bx.type_i8p()) + } + None => bx.const_null(bx.type_i8p()), + }; + catch.add_clause(vals, tydesc); let ptr = catch.extract_value(vals, 0); let ptr_align = bx.tcx().data_layout.pointer_align.abi; let bitcast = catch.bitcast(local_ptr, bx.type_ptr_to(bx.type_i8p())); @@ -1016,10 +1048,6 @@ fn get_rust_try_fn<'ll, 'tcx>( rust_try } -fn span_invalid_monomorphization_error(a: &Session, b: Span, c: &str) { - span_err!(a, b, E0511, "{}", c); -} - fn generic_simd_intrinsic( bx: &mut Builder<'a, 'll, 'tcx>, name: &str, @@ -1074,7 +1102,7 @@ fn generic_simd_intrinsic( if name == "simd_select_bitmask" { let in_ty = arg_tys[0]; - let m_len = match in_ty.sty { + let m_len = match in_ty.kind { // Note that this `.unwrap()` crashes for isize/usize, that's sort // of intentional as there's not currently a use case for that. ty::Int(i) => i.bit_width().unwrap(), @@ -1203,7 +1231,7 @@ fn generic_simd_intrinsic( "mismatched lengths: mask length `{}` != other vector length `{}`", m_len, v_len ); - match m_elem_ty.sty { + match m_elem_ty.kind { ty::Int(_) => {}, _ => return_error!("mask element type is `{}`, expected `i_`", m_elem_ty) } @@ -1218,12 +1246,11 @@ fn generic_simd_intrinsic( // The `fn simd_bitmask(vector) -> unsigned integer` intrinsic takes a // vector mask and returns an unsigned integer containing the most // significant bit (MSB) of each lane. - use rustc_target::abi::HasDataLayout; // If the vector has less than 8 lanes, an u8 is returned with zeroed // trailing bits. let expected_int_bits = in_len.max(8); - match ret_ty.sty { + match ret_ty.kind { ty::Uint(i) if i.bit_width() == Some(expected_int_bits) => (), _ => return_error!( "bitmask `{}`, expected `u{}`", @@ -1232,7 +1259,7 @@ fn generic_simd_intrinsic( } // Integer vector : - let (i_xn, in_elem_bitwidth) = match in_elem.sty { + let (i_xn, in_elem_bitwidth) = match in_elem.kind { ty::Int(i) => ( args[0].immediate(), i.bit_width().unwrap_or(bx.data_layout().pointer_size.bits() as _) @@ -1288,7 +1315,7 @@ fn generic_simd_intrinsic( } } } - let ety = match in_elem.sty { + let ety = match in_elem.kind { ty::Float(f) if f.bit_width() == 32 => { if in_len < 2 || in_len > 16 { return_error!( @@ -1375,7 +1402,7 @@ fn generic_simd_intrinsic( // https://github.com/llvm-mirror/llvm/blob/master/include/llvm/IR/Intrinsics.h#L81 fn llvm_vector_str(elem_ty: Ty<'_>, vec_len: usize, no_pointers: usize) -> String { let p0s: String = "p0".repeat(no_pointers); - match elem_ty.sty { + match elem_ty.kind { ty::Int(v) => format!("v{}{}i{}", vec_len, p0s, v.bit_width().unwrap()), ty::Uint(v) => format!("v{}{}i{}", vec_len, p0s, v.bit_width().unwrap()), ty::Float(v) => format!("v{}{}f{}", vec_len, p0s, v.bit_width()), @@ -1386,7 +1413,7 @@ fn generic_simd_intrinsic( fn llvm_vector_ty(cx: &CodegenCx<'ll, '_>, elem_ty: Ty<'_>, vec_len: usize, mut no_pointers: usize) -> &'ll Type { // FIXME: use cx.layout_of(ty).llvm_type() ? - let mut elem_ty = match elem_ty.sty { + let mut elem_ty = match elem_ty.kind { ty::Int(v) => cx.type_int_from_ty( v), ty::Uint(v) => cx.type_uint_from_ty( v), ty::Float(v) => cx.type_float_from_ty( v), @@ -1430,7 +1457,7 @@ fn generic_simd_intrinsic( // This counts how many pointers fn ptr_count(t: Ty<'_>) -> usize { - match t.sty { + match t.kind { ty::RawPtr(p) => 1 + ptr_count(p.ty), _ => 0, } @@ -1438,7 +1465,7 @@ fn generic_simd_intrinsic( // Non-ptr type fn non_ptr(t: Ty<'_>) -> Ty<'_> { - match t.sty { + match t.kind { ty::RawPtr(p) => non_ptr(p.ty), _ => t, } @@ -1446,7 +1473,7 @@ fn generic_simd_intrinsic( // The second argument must be a simd vector with an element type that's a pointer // to the element type of the first argument - let (pointer_count, underlying_ty) = match arg_tys[1].simd_type(tcx).sty { + let (pointer_count, underlying_ty) = match arg_tys[1].simd_type(tcx).kind { ty::RawPtr(p) if p.ty == in_elem => (ptr_count(arg_tys[1].simd_type(tcx)), non_ptr(arg_tys[1].simd_type(tcx))), _ => { @@ -1463,7 +1490,7 @@ fn generic_simd_intrinsic( assert_eq!(underlying_ty, non_ptr(arg_tys[0].simd_type(tcx))); // The element type of the third argument must be a signed integer type of any width: - match arg_tys[2].simd_type(tcx).sty { + match arg_tys[2].simd_type(tcx).kind { ty::Int(_) => (), _ => { require!(false, "expected element type `{}` of third argument `{}` \ @@ -1529,7 +1556,7 @@ fn generic_simd_intrinsic( // This counts how many pointers fn ptr_count(t: Ty<'_>) -> usize { - match t.sty { + match t.kind { ty::RawPtr(p) => 1 + ptr_count(p.ty), _ => 0, } @@ -1537,7 +1564,7 @@ fn generic_simd_intrinsic( // Non-ptr type fn non_ptr(t: Ty<'_>) -> Ty<'_> { - match t.sty { + match t.kind { ty::RawPtr(p) => non_ptr(p.ty), _ => t, } @@ -1545,7 +1572,7 @@ fn generic_simd_intrinsic( // The second argument must be a simd vector with an element type that's a pointer // to the element type of the first argument - let (pointer_count, underlying_ty) = match arg_tys[1].simd_type(tcx).sty { + let (pointer_count, underlying_ty) = match arg_tys[1].simd_type(tcx).kind { ty::RawPtr(p) if p.ty == in_elem && p.mutbl == hir::MutMutable => (ptr_count(arg_tys[1].simd_type(tcx)), non_ptr(arg_tys[1].simd_type(tcx))), @@ -1563,7 +1590,7 @@ fn generic_simd_intrinsic( assert_eq!(underlying_ty, non_ptr(arg_tys[0].simd_type(tcx))); // The element type of the third argument must be a signed integer type of any width: - match arg_tys[2].simd_type(tcx).sty { + match arg_tys[2].simd_type(tcx).kind { ty::Int(_) => (), _ => { require!(false, "expected element type `{}` of third argument `{}` \ @@ -1612,7 +1639,7 @@ fn generic_simd_intrinsic( require!(ret_ty == in_elem, "expected return type `{}` (element of input `{}`), found `{}`", in_elem, in_ty, ret_ty); - return match in_elem.sty { + return match in_elem.kind { ty::Int(_) | ty::Uint(_) => { let r = bx.$integer_reduce(args[0].immediate()); if $ordered { @@ -1669,7 +1696,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, require!(ret_ty == in_elem, "expected return type `{}` (element of input `{}`), found `{}`", in_elem, in_ty, ret_ty); - return match in_elem.sty { + return match in_elem.kind { ty::Int(_i) => { Ok(bx.$int_red(args[0].immediate(), true)) }, @@ -1704,7 +1731,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, in_elem, in_ty, ret_ty); args[0].immediate() } else { - match in_elem.sty { + match in_elem.kind { ty::Int(_) | ty::Uint(_) => {}, _ => { return_error!("unsupported {} from `{}` with element `{}` to `{}`", @@ -1717,7 +1744,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, let i1xn = bx.type_vector(i1, in_len as u64); bx.trunc(args[0].immediate(), i1xn) }; - return match in_elem.sty { + return match in_elem.kind { ty::Int(_) | ty::Uint(_) => { let r = bx.$red(input); Ok( @@ -1758,7 +1785,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, enum Style { Float, Int(/* is signed? */ bool), Unsupported } - let (in_style, in_width) = match in_elem.sty { + let (in_style, in_width) = match in_elem.kind { // vectors of pointer-sized integers should've been // disallowed before here, so this unwrap is safe. ty::Int(i) => (Style::Int(true), i.bit_width().unwrap()), @@ -1766,7 +1793,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, ty::Float(f) => (Style::Float, f.bit_width()), _ => (Style::Unsupported, 0) }; - let (out_style, out_width) = match out_elem.sty { + let (out_style, out_width) = match out_elem.kind { ty::Int(i) => (Style::Int(true), i.bit_width().unwrap()), ty::Uint(u) => (Style::Int(false), u.bit_width().unwrap()), ty::Float(f) => (Style::Float, f.bit_width()), @@ -1816,7 +1843,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, macro_rules! arith { ($($name: ident: $($($p: ident),* => $call: ident),*;)*) => { $(if name == stringify!($name) { - match in_elem.sty { + match in_elem.kind { $($(ty::$p(_))|* => { return Ok(bx.$call(args[0].immediate(), args[1].immediate())) })* @@ -1850,7 +1877,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, let rhs = args[1].immediate(); let is_add = name == "simd_saturating_add"; let ptr_bits = bx.tcx().data_layout.pointer_size.bits() as _; - let (signed, elem_width, elem_ty) = match in_elem.sty { + let (signed, elem_width, elem_ty) = match in_elem.kind { ty::Int(i) => ( true, @@ -1896,7 +1923,7 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#, // FIXME: there’s multiple of this functions, investigate using some of the already existing // stuffs. fn int_type_width_signed(ty: Ty<'_>, cx: &CodegenCx<'_, '_>) -> Option<(u64, bool)> { - match ty.sty { + match ty.kind { ty::Int(t) => Some((match t { ast::IntTy::Isize => cx.tcx.sess.target.isize_ty.bit_width().unwrap() as u64, ast::IntTy::I8 => 8, @@ -1920,7 +1947,7 @@ fn int_type_width_signed(ty: Ty<'_>, cx: &CodegenCx<'_, '_>) -> Option<(u64, boo // Returns the width of a float Ty // Returns None if the type is not a float fn float_type_width(ty: Ty<'_>) -> Option { - match ty.sty { + match ty.kind { ty::Float(t) => Some(t.bit_width() as u64), _ => None, } diff --git a/src/librustc_codegen_llvm/lib.rs b/src/librustc_codegen_llvm/lib.rs index 34e39af3c3..e7562c399b 100644 --- a/src/librustc_codegen_llvm/lib.rs +++ b/src/librustc_codegen_llvm/lib.rs @@ -19,7 +19,6 @@ #![feature(link_args)] #![feature(static_nobundle)] #![feature(trusted_len)] -#![feature(mem_take)] use back::write::{create_target_machine, create_informational_target_machine}; use syntax_pos::symbol::Symbol; @@ -31,6 +30,7 @@ extern crate libc; #[macro_use] extern crate rustc; extern crate rustc_target; #[macro_use] extern crate rustc_data_structures; +extern crate rustc_index; extern crate rustc_incremental; extern crate rustc_codegen_utils; extern crate rustc_codegen_ssa; @@ -38,7 +38,8 @@ extern crate rustc_fs_util; extern crate rustc_driver as _; #[macro_use] extern crate log; -#[macro_use] extern crate syntax; +extern crate smallvec; +extern crate syntax; extern crate syntax_pos; extern crate rustc_errors as errors; @@ -48,15 +49,14 @@ use rustc_codegen_ssa::back::lto::{SerializedModule, LtoModuleCodegen, ThinModul use rustc_codegen_ssa::CompiledModule; use errors::{FatalError, Handler}; use rustc::dep_graph::WorkProduct; -use syntax::ext::allocator::AllocatorKind; -use syntax_pos::symbol::InternedString; +use syntax::expand::allocator::AllocatorKind; pub use llvm_util::target_features; use std::any::Any; -use std::sync::{mpsc, Arc}; +use std::sync::Arc; use std::ffi::CStr; use rustc::dep_graph::DepGraph; -use rustc::middle::cstore::{EncodedMetadata, MetadataLoader}; +use rustc::middle::cstore::{EncodedMetadata, MetadataLoaderDyn}; use rustc::session::Session; use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel}; use rustc::ty::{self, TyCtxt}; @@ -64,8 +64,6 @@ use rustc::util::common::ErrorReported; use rustc_codegen_ssa::ModuleCodegen; use rustc_codegen_utils::codegen_backend::CodegenBackend; -mod error_codes; - mod back { pub mod archive; pub mod bytecode; @@ -122,8 +120,12 @@ impl ExtraBackendMethods for LlvmCodegenBackend { ) { unsafe { allocator::codegen(tcx, mods, kind) } } - fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: InternedString) { - base::compile_codegen_unit(tcx, cgu_name); + fn compile_codegen_unit( + &self, tcx: TyCtxt<'_>, + cgu_name: Symbol, + tx: &std::sync::mpsc::Sender>, + ) { + base::compile_codegen_unit(tcx, cgu_name, tx); } fn target_machine_factory( &self, @@ -254,28 +256,19 @@ impl CodegenBackend for LlvmCodegenBackend { llvm_util::print_version(); } - fn diagnostics(&self) -> &[(&'static str, &'static str)] { - &error_codes::DIAGNOSTICS - } - fn target_features(&self, sess: &Session) -> Vec { target_features(sess) } - fn metadata_loader(&self) -> Box { + fn metadata_loader(&self) -> Box { box metadata::LlvmMetadataLoader } fn provide(&self, providers: &mut ty::query::Providers<'_>) { - rustc_codegen_utils::symbol_names::provide(providers); - rustc_codegen_ssa::back::symbol_export::provide(providers); - rustc_codegen_ssa::base::provide_both(providers); attributes::provide(providers); } fn provide_extern(&self, providers: &mut ty::query::Providers<'_>) { - rustc_codegen_ssa::back::symbol_export::provide_extern(providers); - rustc_codegen_ssa::base::provide_both(providers); attributes::provide_extern(providers); } @@ -284,10 +277,9 @@ impl CodegenBackend for LlvmCodegenBackend { tcx: TyCtxt<'tcx>, metadata: EncodedMetadata, need_metadata_module: bool, - rx: mpsc::Receiver>, ) -> Box { box rustc_codegen_ssa::base::codegen_crate( - LlvmCodegenBackend(()), tcx, metadata, need_metadata_module, rx) + LlvmCodegenBackend(()), tcx, metadata, need_metadata_module) } fn join_codegen_and_link( @@ -320,8 +312,9 @@ impl CodegenBackend for LlvmCodegenBackend { // Run the linker on any artifacts that resulted from the LLVM run. // This should produce either a finished executable or library. - sess.profiler(|p| p.start_activity("link_crate")); time(sess, "linking", || { + let _prof_timer = sess.prof.generic_activity("link_crate"); + use rustc_codegen_ssa::back::link::link_binary; use crate::back::archive::LlvmArchiveBuilder; @@ -334,7 +327,6 @@ impl CodegenBackend for LlvmCodegenBackend { target_cpu, ); }); - sess.profiler(|p| p.end_activity("link_crate")); // Now that we won't touch anything in the incremental compilation directory // any more, we can finalize it (which involves renaming it) diff --git a/src/librustc_codegen_llvm/llvm/ffi.rs b/src/librustc_codegen_llvm/llvm/ffi.rs index b07214fdc0..c69942ef3f 100644 --- a/src/librustc_codegen_llvm/llvm/ffi.rs +++ b/src/librustc_codegen_llvm/llvm/ffi.rs @@ -50,7 +50,7 @@ pub enum CallConv { } /// LLVMRustLinkage -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(PartialEq)] #[repr(C)] pub enum Linkage { ExternalLinkage = 0, @@ -67,7 +67,6 @@ pub enum Linkage { } // LLVMRustVisibility -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[repr(C)] pub enum Visibility { Default = 0, @@ -510,6 +509,7 @@ extern { pub type Module; } extern { pub type Context; } extern { pub type Type; } extern { pub type Value; } +extern { pub type ConstantInt; } extern { pub type Metadata; } extern { pub type BasicBlock; } #[repr(C)] @@ -719,8 +719,8 @@ extern "C" { pub fn LLVMConstInt(IntTy: &Type, N: c_ulonglong, SignExtend: Bool) -> &Value; pub fn LLVMConstIntOfArbitraryPrecision(IntTy: &Type, Wn: c_uint, Ws: *const u64) -> &Value; pub fn LLVMConstReal(RealTy: &Type, N: f64) -> &Value; - pub fn LLVMConstIntGetZExtValue(ConstantVal: &Value) -> c_ulonglong; - pub fn LLVMRustConstInt128Get(ConstantVal: &Value, SExt: bool, + pub fn LLVMConstIntGetZExtValue(ConstantVal: &ConstantInt) -> c_ulonglong; + pub fn LLVMRustConstInt128Get(ConstantVal: &ConstantInt, SExt: bool, high: &mut u64, low: &mut u64) -> bool; @@ -1666,7 +1666,7 @@ extern "C" { #[allow(improper_ctypes)] pub fn LLVMRustWriteValueToString(value_ref: &Value, s: &RustString); - pub fn LLVMIsAConstantInt(value_ref: &Value) -> Option<&Value>; + pub fn LLVMIsAConstantInt(value_ref: &Value) -> Option<&ConstantInt>; pub fn LLVMRustPassKind(Pass: &Pass) -> PassKind; pub fn LLVMRustFindAndCreatePass(Pass: *const c_char) -> Option<&'static mut Pass>; diff --git a/src/librustc_codegen_llvm/llvm_util.rs b/src/librustc_codegen_llvm/llvm_util.rs index 541d3d98b7..85e0b6d465 100644 --- a/src/librustc_codegen_llvm/llvm_util.rs +++ b/src/librustc_codegen_llvm/llvm_util.rs @@ -3,7 +3,7 @@ use crate::llvm; use syntax_pos::symbol::Symbol; use rustc::session::Session; use rustc::session::config::PrintRequest; -use rustc_target::spec::MergeFunctions; +use rustc_target::spec::{MergeFunctions, PanicStrategy}; use libc::c_int; use std::ffi::CString; use syntax::feature_gate::UnstableFeatures; @@ -73,6 +73,11 @@ unsafe fn configure_llvm(sess: &Session) { } } + if sess.target.target.target_os == "emscripten" && + sess.panic_strategy() == PanicStrategy::Unwind { + add("-enable-emscripten-cxx-exceptions"); + } + // HACK(eddyb) LLVM inserts `llvm.assume` calls to preserve align attributes // during inlining. Unfortunately these may block other optimizations. add("-preserve-alignment-assumptions-during-inlining=false"); @@ -257,8 +262,7 @@ pub fn target_feature_whitelist(sess: &Session) "hexagon" => HEXAGON_WHITELIST, "mips" | "mips64" => MIPS_WHITELIST, "powerpc" | "powerpc64" => POWERPC_WHITELIST, - // wasm32 on emscripten does not support these target features - "wasm32" if !sess.target.target.options.is_like_emscripten => WASM_WHITELIST, + "wasm32" => WASM_WHITELIST, _ => &[], } } diff --git a/src/librustc_codegen_llvm/type_of.rs b/src/librustc_codegen_llvm/type_of.rs index 36a9ff0a2d..d921bbc96a 100644 --- a/src/librustc_codegen_llvm/type_of.rs +++ b/src/librustc_codegen_llvm/type_of.rs @@ -43,7 +43,7 @@ fn uncached_llvm_type<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, layout::Abi::Aggregate { .. } => {} } - let name = match layout.ty.sty { + let name = match layout.ty.kind { ty::Closure(..) | ty::Generator(..) | ty::Adt(..) | @@ -56,16 +56,16 @@ fn uncached_llvm_type<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, let printer = DefPathBasedNames::new(cx.tcx, true, true); printer.push_type_name(layout.ty, &mut name, false); if let (&ty::Adt(def, _), &layout::Variants::Single { index }) - = (&layout.ty.sty, &layout.variants) + = (&layout.ty.kind, &layout.variants) { if def.is_enum() && !def.variants.is_empty() { write!(&mut name, "::{}", def.variants[index].ident).unwrap(); } } if let (&ty::Generator(_, substs, _), &layout::Variants::Single { index }) - = (&layout.ty.sty, &layout.variants) + = (&layout.ty.kind, &layout.variants) { - write!(&mut name, "::{}", substs.variant_name(index)).unwrap(); + write!(&mut name, "::{}", substs.as_generator().variant_name(index)).unwrap(); } Some(name) } @@ -226,7 +226,7 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyLayout<'tcx> { if let Some(&llty) = cx.scalar_lltypes.borrow().get(&self.ty) { return llty; } - let llty = match self.ty.sty { + let llty = match self.ty.kind { ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => { cx.type_ptr_to(cx.layout_of(ty).llvm_type(cx)) @@ -318,7 +318,7 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyLayout<'tcx> { index: usize, immediate: bool) -> &'a Type { // HACK(eddyb) special-case fat pointers until LLVM removes // pointee types, to avoid bitcasting every `OperandRef::deref`. - match self.ty.sty { + match self.ty.kind { ty::Ref(..) | ty::RawPtr(_) => { return self.field(cx, index).llvm_type(cx); diff --git a/src/librustc_codegen_ssa/Cargo.toml b/src/librustc_codegen_ssa/Cargo.toml index bc028d6624..2eaae50591 100644 --- a/src/librustc_codegen_ssa/Cargo.toml +++ b/src/librustc_codegen_ssa/Cargo.toml @@ -10,14 +10,13 @@ path = "lib.rs" test = false [dependencies] -bitflags = "1.0.4" +bitflags = "1.2.1" cc = "1.0.1" num_cpus = "1.0" memmap = "0.6" log = "0.4.5" libc = "0.2.44" jobserver = "0.1.11" -parking_lot = "0.9" tempfile = "3.1" rustc_serialize = { path = "../libserialize", package = "serialize" } @@ -30,4 +29,5 @@ rustc_data_structures = { path = "../librustc_data_structures"} rustc_errors = { path = "../librustc_errors" } rustc_fs_util = { path = "../librustc_fs_util" } rustc_incremental = { path = "../librustc_incremental" } +rustc_index = { path = "../librustc_index" } rustc_target = { path = "../librustc_target" } diff --git a/src/librustc_codegen_ssa/README.md b/src/librustc_codegen_ssa/README.md index c8bb2e7ee9..2a3a4fcc5f 100644 --- a/src/librustc_codegen_ssa/README.md +++ b/src/librustc_codegen_ssa/README.md @@ -1,121 +1,3 @@ -# Refactoring of `rustc_codegen_llvm` -by Denis Merigoux, October 23rd 2018 +Please read the rustc-guide chapter on [Backend Agnostic Codegen][bac]. -## State of the code before the refactoring - -All the code related to the compilation of MIR into LLVM IR was contained inside the `rustc_codegen_llvm` crate. Here is the breakdown of the most important elements: -* the `back` folder (7,800 LOC) implements the mechanisms for creating the different object files and archive through LLVM, but also the communication mechanisms for parallel code generation; -* the `debuginfo` (3,200 LOC) folder contains all code that passes debug information down to LLVM; -* the `llvm` (2,200 LOC) folder defines the FFI necessary to communicate with LLVM using the C++ API; -* the `mir` (4,300 LOC) folder implements the actual lowering from MIR to LLVM IR; -* the `base.rs` (1,300 LOC) file contains some helper functions but also the high-level code that launches the code generation and distributes the work. -* the `builder.rs` (1,200 LOC) file contains all the functions generating individual LLVM IR instructions inside a basic block; -* the `common.rs` (450 LOC) contains various helper functions and all the functions generating LLVM static values; -* the `type_.rs` (300 LOC) defines most of the type translations to LLVM IR. - -The goal of this refactoring is to separate inside this crate code that is specific to the LLVM from code that can be reused for other rustc backends. For instance, the `mir` folder is almost entirely backend-specific but it relies heavily on other parts of the crate. The separation of the code must not affect the logic of the code nor its performance. - -For these reasons, the separation process involves two transformations that have to be done at the same time for the resulting code to compile : - -1. replace all the LLVM-specific types by generics inside function signatures and structure definitions; -2. encapsulate all functions calling the LLVM FFI inside a set of traits that will define the interface between backend-agnostic code and the backend. - -While the LLVM-specific code will be left in `rustc_codegen_llvm`, all the new traits and backend-agnostic code will be moved in `rustc_codegen_ssa` (name suggestion by @eddyb). - -## Generic types and structures - -@irinagpopa started to parametrize the types of `rustc_codegen_llvm` by a generic `Value` type, implemented in LLVM by a reference `&'ll Value`. This work has been extended to all structures inside the `mir` folder and elsewhere, as well as for LLVM's `BasicBlock` and `Type` types. - -The two most important structures for the LLVM codegen are `CodegenCx` and `Builder`. They are parametrized by multiple lifetime parameters and the type for `Value`. - -```rust -struct CodegenCx<'ll, 'tcx> { - /* ... */ -} - -struct Builder<'a, 'll, 'tcx> { - cx: &'a CodegenCx<'ll, 'tcx>, - /* ... */ -} -``` - -`CodegenCx` is used to compile one codegen-unit that can contain multiple functions, whereas `Builder` is created to compile one basic block. - -The code in `rustc_codegen_llvm` has to deal with multiple explicit lifetime parameters, that correspond to the following: -* `'tcx` is the longest lifetime, that corresponds to the original `TyCtxt` containing the program's information; -* `'a` is a short-lived reference of a `CodegenCx` or another object inside a struct; -* `'ll` is the lifetime of references to LLVM objects such as `Value` or `Type`. - -Although there are already many lifetime parameters in the code, making it generic uncovered situations where the borrow-checker was passing only due to the special nature of the LLVM objects manipulated (they are extern pointers). For instance, a additional lifetime parameter had to be added to `LocalAnalyser` in `analyse.rs`, leading to the definition: - -```rust -struct LocalAnalyzer<'mir, 'a, 'tcx> { - /* ... */ -} -``` - -However, the two most important structures `CodegenCx` and `Builder` are not defined in the backend-agnostic code. Indeed, their content is highly specific of the backend and it makes more sense to leave their definition to the backend implementor than to allow just a narrow spot via a generic field for the backend's context. - -## Traits and interface - -Because they have to be defined by the backend, `CodegenCx` and `Builder` will be the structures implementing all the traits defining the backend's interface. These traits are defined in the folder `rustc_codegen_ssa/traits` and all the backend-agnostic code is parametrized by them. For instance, let us explain how a function in `base.rs` is parametrized: - -```rust -pub fn codegen_instance<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( - cx: &'a Bx::CodegenCx, - instance: Instance<'tcx> -) { - /* ... */ -} -``` - -In this signature, we have the two lifetime parameters explained earlier and the master type `Bx` which satisfies the trait `BuilderMethods` corresponding to the interface satisfied by the `Builder` struct. The `BuilderMethods` defines an associated type `Bx::CodegenCx` that itself satisfies the `CodegenMethods` traits implemented by the struct `CodegenCx`. - -On the trait side, here is an example with part of the definition of `BuilderMethods` in `traits/builder.rs`: - -```rust -pub trait BuilderMethods<'a, 'tcx>: - HasCodegen<'tcx> - + DebugInfoBuilderMethods<'tcx> - + ArgTypeMethods<'tcx> - + AbiBuilderMethods<'tcx> - + IntrinsicCallMethods<'tcx> - + AsmBuilderMethods<'tcx> -{ - fn new_block<'b>( - cx: &'a Self::CodegenCx, - llfn: Self::Value, - name: &'b str - ) -> Self; - /* ... */ - fn cond_br( - &mut self, - cond: Self::Value, - then_llbb: Self::BasicBlock, - else_llbb: Self::BasicBlock, - ); - /* ... */ -} -``` - -Finally, a master structure implementing the `ExtraBackendMethods` trait is used for high-level codegen-driving functions like `codegen_crate` in `base.rs`. For LLVM, it is the empty `LlvmCodegenBackend`. `ExtraBackendMethods` should be implemented by the same structure that implements the `CodegenBackend` defined in `rustc_codegen_utils/codegen_backend.rs`. - -During the traitification process, certain functions have been converted from methods of a local structure to methods of `CodegenCx` or `Builder` and a corresponding `self` parameter has been added. Indeed, LLVM stores information internally that it can access when called through its API. This information does not show up in a Rust data structure carried around when these methods are called. However, when implementing a Rust backend for `rustc`, these methods will need information from `CodegenCx`, hence the additional parameter (unused in the LLVM implementation of the trait). - -## State of the code after the refactoring - -The traits offer an API which is very similar to the API of LLVM. This is not the best solution since LLVM has a very special way of doing things: when addding another backend, the traits definition might be changed in order to offer more flexibility. - -However, the current separation between backend-agnostic and LLVM-specific code has allows the reuse of a significant part of the old `rustc_codegen_llvm`. Here is the new LOC breakdown between backend-agnostic (BA) and LLVM for the most important elements: - -* `back` folder: 3,800 (BA) vs 4,100 (LLVM); -* `mir` folder: 4,400 (BA) vs 0 (LLVM); -* `base.rs`: 1,100 (BA) vs 250 (LLVM); -* `builder.rs`: 1,400 (BA) vs 0 (LLVM); -* `common.rs`: 350 (BA) vs 350 (LLVM); - -The `debuginfo` folder has been left almost untouched by the splitting and is specific to LLVM. Only its high-level features have been traitified. - -The new `traits` folder has 1500 LOC only for trait definitions. Overall, the 27,000 LOC-sized old `rustc_codegen_llvm` code has been split into the new 18,500 LOC-sized new `rustc_codegen_llvm` and the 12,000 LOC-sized `rustc_codegen_ssa`. We can say that this refactoring allowed the reuse of approximately 10,000 LOC that would otherwise have had to be duplicated between the multiple backends of `rustc`. - -The refactored version of `rustc`'s backend introduced no regression over the test suite nor in performance benchmark, which is in coherence with the nature of the refactoring that used only compile-time parametricity (no trait objects). +[bac]: https://rust-lang.github.io/rustc-guide/codegen/backend-agnostic.html diff --git a/src/librustc_codegen_ssa/back/link.rs b/src/librustc_codegen_ssa/back/link.rs index 9b044d9b45..a2b50ea8e2 100644 --- a/src/librustc_codegen_ssa/back/link.rs +++ b/src/librustc_codegen_ssa/back/link.rs @@ -3,7 +3,7 @@ use rustc::session::{Session, filesearch}; use rustc::session::config::{ - self, RUST_CGU_EXT, DebugInfo, OutputFilenames, OutputType, PrintRequest, Sanitizer + self, DebugInfo, OutputFilenames, OutputType, PrintRequest, Sanitizer }; use rustc::session::search_paths::PathKind; use rustc::middle::dependency_format::Linkage; @@ -15,7 +15,8 @@ use rustc_fs_util::fix_windows_verbatim_for_gcc; use rustc_target::spec::{PanicStrategy, RelroLevel, LinkerFlavor}; use syntax::symbol::Symbol; -use crate::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION, CrateInfo, CodegenResults}; +use crate::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION, CrateInfo, + looks_like_rust_object_file, CodegenResults}; use super::archive::ArchiveBuilder; use super::command::Command; use super::linker::Linker; @@ -219,15 +220,24 @@ pub fn get_linker(sess: &Session, linker: &Path, flavor: LinkerFlavor) -> (PathB (linker.to_path_buf(), cmd) } -pub fn each_linked_rlib(sess: &Session, - info: &CrateInfo, - f: &mut dyn FnMut(CrateNum, &Path)) -> Result<(), String> { +pub fn each_linked_rlib( + info: &CrateInfo, + f: &mut dyn FnMut(CrateNum, &Path), +) -> Result<(), String> { let crates = info.used_crates_static.iter(); - let fmts = sess.dependency_formats.borrow(); - let fmts = fmts.get(&config::CrateType::Executable) - .or_else(|| fmts.get(&config::CrateType::Staticlib)) - .or_else(|| fmts.get(&config::CrateType::Cdylib)) - .or_else(|| fmts.get(&config::CrateType::ProcMacro)); + let mut fmts = None; + for (ty, list) in info.dependency_formats.iter() { + match ty { + config::CrateType::Executable | + config::CrateType::Staticlib | + config::CrateType::Cdylib | + config::CrateType::ProcMacro => { + fmts = Some(list); + break; + } + _ => {} + } + } let fmts = match fmts { Some(f) => f, None => return Err("could not find formats for rlibs".to_string()) @@ -314,6 +324,7 @@ fn link_rlib<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, NativeLibraryKind::NativeStatic => {} NativeLibraryKind::NativeStaticNobundle | NativeLibraryKind::NativeFramework | + NativeLibraryKind::NativeRawDylib | NativeLibraryKind::NativeUnknown => continue, } if let Some(name) = lib.name { @@ -406,7 +417,7 @@ fn link_staticlib<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, tempdir); let mut all_native_libs = vec![]; - let res = each_linked_rlib(sess, &codegen_results.crate_info, &mut |cnum, path| { + let res = each_linked_rlib(&codegen_results.crate_info, &mut |cnum, path| { let name = &codegen_results.crate_info.crate_name[&cnum]; let native_libs = &codegen_results.crate_info.native_libraries[&cnum]; @@ -874,7 +885,8 @@ pub fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLibrary Some(format!("-framework {}", name)) }, // These are included, no need to print them - NativeLibraryKind::NativeStatic => None, + NativeLibraryKind::NativeStatic | + NativeLibraryKind::NativeRawDylib => None, } }) .collect(); @@ -1284,7 +1296,11 @@ pub fn add_local_native_libraries(cmd: &mut dyn Linker, NativeLibraryKind::NativeUnknown => cmd.link_dylib(name), NativeLibraryKind::NativeFramework => cmd.link_framework(name), NativeLibraryKind::NativeStaticNobundle => cmd.link_staticlib(name), - NativeLibraryKind::NativeStatic => cmd.link_whole_staticlib(name, &search_path) + NativeLibraryKind::NativeStatic => cmd.link_whole_staticlib(name, &search_path), + NativeLibraryKind::NativeRawDylib => { + // FIXME(#58713): Proper handling for raw dylibs. + bug!("raw_dylib feature not yet implemented"); + }, } } } @@ -1294,11 +1310,13 @@ pub fn add_local_native_libraries(cmd: &mut dyn Linker, // Rust crates are not considered at all when creating an rlib output. All // dependencies will be linked when producing the final output (instead of // the intermediate rlib version) -fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, - sess: &'a Session, - codegen_results: &CodegenResults, - crate_type: config::CrateType, - tmpdir: &Path) { +fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>( + cmd: &mut dyn Linker, + sess: &'a Session, + codegen_results: &CodegenResults, + crate_type: config::CrateType, + tmpdir: &Path, +) { // All of the heavy lifting has previously been accomplished by the // dependency_format module of the compiler. This is just crawling the // output of that module, adding crates as necessary. @@ -1307,8 +1325,10 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, // will slurp up the object files inside), and linking to a dynamic library // involves just passing the right -l flag. - let formats = sess.dependency_formats.borrow(); - let data = formats.get(&crate_type).unwrap(); + let (_, data) = codegen_results.crate_info.dependency_formats + .iter() + .find(|(ty, _)| *ty == crate_type) + .expect("failed to find crate type in dependency format list"); // Invoke get_used_crates to ensure that we get a topological sorting of // crates. @@ -1372,7 +1392,9 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, _ if codegen_results.crate_info.profiler_runtime == Some(cnum) => { add_static_crate::(cmd, sess, codegen_results, tmpdir, crate_type, cnum); } - _ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) => { + _ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) && + crate_type == config::CrateType::Executable => { + // Link the sanitizer runtimes only if we are actually producing an executable link_sanitizer_runtime::(cmd, sess, codegen_results, tmpdir, cnum); } // compiler-builtins are always placed last to ensure that they're @@ -1514,7 +1536,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, let name = cratepath.file_name().unwrap().to_str().unwrap(); let name = &name[3..name.len() - 5]; // chop off lib/.rlib - time_ext(sess.time_extended(), Some(sess), &format!("altering {}.rlib", name), || { + time_ext(sess.time_extended(), &format!("altering {}.rlib", name), || { let mut archive = ::new(sess, &dst, Some(cratepath)); archive.update_symbols(); @@ -1528,23 +1550,9 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, let canonical = f.replace("-", "_"); let canonical_name = name.replace("-", "_"); - // Look for `.rcgu.o` at the end of the filename to conclude - // that this is a Rust-related object file. - fn looks_like_rust(s: &str) -> bool { - let path = Path::new(s); - let ext = path.extension().and_then(|s| s.to_str()); - if ext != Some(OutputType::Object.extension()) { - return false - } - let ext2 = path.file_stem() - .and_then(|s| Path::new(s).extension()) - .and_then(|s| s.to_str()); - ext2 == Some(RUST_CGU_EXT) - } - let is_rust_object = canonical.starts_with(&canonical_name) && - looks_like_rust(&f); + looks_like_rust_object_file(&f); // If we've been requested to skip all native object files // (those not generated by the rust compiler) then we can skip @@ -1620,10 +1628,12 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, // generic function calls a native function, then the generic function must // be instantiated in the target crate, meaning that the native symbol must // also be resolved in the target crate. -pub fn add_upstream_native_libraries(cmd: &mut dyn Linker, - sess: &Session, - codegen_results: &CodegenResults, - crate_type: config::CrateType) { +pub fn add_upstream_native_libraries( + cmd: &mut dyn Linker, + sess: &Session, + codegen_results: &CodegenResults, + crate_type: config::CrateType, +) { // Be sure to use a topological sorting of crates because there may be // interdependencies between native libraries. When passing -nodefaultlibs, // for example, almost all native libraries depend on libc, so we have to @@ -1633,8 +1643,10 @@ pub fn add_upstream_native_libraries(cmd: &mut dyn Linker, // This passes RequireStatic, but the actual requirement doesn't matter, // we're just getting an ordering of crate numbers, we're not worried about // the paths. - let formats = sess.dependency_formats.borrow(); - let data = formats.get(&crate_type).unwrap(); + let (_, data) = codegen_results.crate_info.dependency_formats + .iter() + .find(|(ty, _)| *ty == crate_type) + .expect("failed to find crate type in dependency format list"); let crates = &codegen_results.crate_info.used_crates_static; for &(cnum, _) in crates { @@ -1661,7 +1673,11 @@ pub fn add_upstream_native_libraries(cmd: &mut dyn Linker, // ignore statically included native libraries here as we've // already included them when we included the rust library // previously - NativeLibraryKind::NativeStatic => {} + NativeLibraryKind::NativeStatic => {}, + NativeLibraryKind::NativeRawDylib => { + // FIXME(#58713): Proper handling for raw dylibs. + bug!("raw_dylib feature not yet implemented"); + }, } } } diff --git a/src/librustc_codegen_ssa/back/linker.rs b/src/librustc_codegen_ssa/back/linker.rs index c42cd02492..999cc40658 100644 --- a/src/librustc_codegen_ssa/back/linker.rs +++ b/src/librustc_codegen_ssa/back/linker.rs @@ -1092,18 +1092,31 @@ fn exported_symbols(tcx: TyCtxt<'_>, crate_type: CrateType) -> Vec { } } - let formats = tcx.sess.dependency_formats.borrow(); - let deps = formats[&crate_type].iter(); + let formats = tcx.dependency_formats(LOCAL_CRATE); + let deps = formats.iter().filter_map(|(t, list)| { + if *t == crate_type { + Some(list) + } else { + None + } + }).next().unwrap(); - for (index, dep_format) in deps.enumerate() { + for (index, dep_format) in deps.iter().enumerate() { let cnum = CrateNum::new(index + 1); // For each dependency that we are linking to statically ... if *dep_format == Linkage::Static { // ... we add its symbol list to our export list. for &(symbol, level) in tcx.exported_symbols(cnum).iter() { - if level.is_below_threshold(export_threshold) { - symbols.push(symbol.symbol_name(tcx).to_string()); + if !level.is_below_threshold(export_threshold) { + continue; } + + // FIXME rust-lang/rust#64319, rust-lang/rust#64872: + // We want to block export of generics from dylibs, + // but we must fix rust-lang/rust#65890 before we can + // do that robustly. + + symbols.push(symbol.symbol_name(tcx).to_string()); } } } diff --git a/src/librustc_codegen_ssa/back/symbol_export.rs b/src/librustc_codegen_ssa/back/symbol_export.rs index 7e700e6819..85a90459f5 100644 --- a/src/librustc_codegen_ssa/back/symbol_export.rs +++ b/src/librustc_codegen_ssa/back/symbol_export.rs @@ -13,8 +13,8 @@ use rustc::ty::{TyCtxt, SymbolName}; use rustc::ty::query::Providers; use rustc::ty::subst::SubstsRef; use rustc::util::nodemap::{FxHashMap, DefIdMap}; -use rustc_data_structures::indexed_vec::IndexVec; -use syntax::ext::allocator::ALLOCATOR_METHODS; +use rustc_index::vec::IndexVec; +use syntax::expand::allocator::ALLOCATOR_METHODS; pub type ExportedSymbols = FxHashMap< CrateNum, @@ -94,14 +94,14 @@ fn reachable_non_generics_provider( // Only consider nodes that actually have exported symbols. Node::Item(&hir::Item { - node: hir::ItemKind::Static(..), + kind: hir::ItemKind::Static(..), .. }) | Node::Item(&hir::Item { - node: hir::ItemKind::Fn(..), .. + kind: hir::ItemKind::Fn(..), .. }) | Node::ImplItem(&hir::ImplItem { - node: hir::ImplItemKind::Method(..), + kind: hir::ImplItemKind::Method(..), .. }) => { let def_id = tcx.hir().local_def_id(hir_id); @@ -298,7 +298,7 @@ fn upstream_monomorphizations_provider( }; for &cnum in cnums.iter() { - for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() { + for (exported_symbol, _) in tcx.exported_symbols(cnum).iter() { if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol { let substs_map = instances.entry(def_id).or_default(); @@ -364,10 +364,11 @@ fn symbol_export_level(tcx: TyCtxt<'_>, sym_def_id: DefId) -> SymbolExportLevel codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL); if is_extern && !std_internal { - // Emscripten cannot export statics, so reduce their export level here - if tcx.sess.target.target.options.is_like_emscripten { + let target = &tcx.sess.target.target.llvm_target; + // WebAssembly cannot export data symbols, so reduce their export level + if target.contains("wasm32") || target.contains("emscripten") { if let Some(Node::Item(&hir::Item { - node: hir::ItemKind::Static(..), + kind: hir::ItemKind::Static(..), .. })) = tcx.hir().get_if_local(sym_def_id) { return SymbolExportLevel::Rust; diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs index 1bba479c1f..b302b9ae7f 100644 --- a/src/librustc_codegen_ssa/back/write.rs +++ b/src/librustc_codegen_ssa/back/write.rs @@ -19,19 +19,19 @@ use rustc::util::nodemap::FxHashMap; use rustc::hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc::ty::TyCtxt; use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry}; -use rustc::util::profiling::SelfProfiler; +use rustc::util::profiling::SelfProfilerRef; use rustc_fs_util::link_or_copy; use rustc_data_structures::svh::Svh; -use rustc_errors::{Handler, Level, FatalError, DiagnosticId}; +use rustc_data_structures::sync::Lrc; +use rustc_errors::{Handler, Level, FatalError, DiagnosticId, SourceMapperDyn}; use rustc_errors::emitter::{Emitter}; use rustc_target::spec::MergeFunctions; use syntax::attr; -use syntax::ext::hygiene::ExpnId; +use syntax_pos::hygiene::ExpnId; use syntax_pos::symbol::{Symbol, sym}; use jobserver::{Client, Acquired}; use std::any::Any; -use std::borrow::Cow; use std::fs; use std::io; use std::mem; @@ -143,15 +143,12 @@ impl ModuleConfig { // Copy what clang does by turning on loop vectorization at O2 and // slp vectorization at O3. Otherwise configure other optimization aspects // of this pass manager builder. - // Turn off vectorization for emscripten, as it's not very well supported. self.vectorize_loop = !sess.opts.cg.no_vectorize_loops && (sess.opts.optimize == config::OptLevel::Default || - sess.opts.optimize == config::OptLevel::Aggressive) && - !sess.target.target.options.is_like_emscripten; + sess.opts.optimize == config::OptLevel::Aggressive); self.vectorize_slp = !sess.opts.cg.no_vectorize_slp && - sess.opts.optimize == config::OptLevel::Aggressive && - !sess.target.target.options.is_like_emscripten; + sess.opts.optimize == config::OptLevel::Aggressive; // Some targets (namely, NVPTX) interact badly with the MergeFunctions // pass. This is because MergeFunctions can generate new function calls @@ -196,42 +193,13 @@ impl Clone for TargetMachineFactory { } } -pub struct ProfileGenericActivityTimer { - profiler: Option>, - label: Cow<'static, str>, -} - -impl ProfileGenericActivityTimer { - pub fn start( - profiler: Option>, - label: Cow<'static, str>, - ) -> ProfileGenericActivityTimer { - if let Some(profiler) = &profiler { - profiler.start_activity(label.clone()); - } - - ProfileGenericActivityTimer { - profiler, - label, - } - } -} - -impl Drop for ProfileGenericActivityTimer { - fn drop(&mut self) { - if let Some(profiler) = &self.profiler { - profiler.end_activity(self.label.clone()); - } - } -} - /// Additional resources used by optimize_and_codegen (not module specific) #[derive(Clone)] pub struct CodegenContext { // Resources needed when running LTO pub backend: B, pub time_passes: bool, - pub profiler: Option>, + pub prof: SelfProfilerRef, pub lto: Lto, pub no_landing_pads: bool, pub save_temps: bool, @@ -283,31 +251,6 @@ impl CodegenContext { ModuleKind::Allocator => &self.allocator_module_config, } } - - #[inline(never)] - #[cold] - fn profiler_active ()>(&self, f: F) { - match &self.profiler { - None => bug!("profiler_active() called but there was no profiler active"), - Some(profiler) => { - f(&*profiler); - } - } - } - - #[inline(always)] - pub fn profile ()>(&self, f: F) { - if unlikely!(self.profiler.is_some()) { - self.profiler_active(f) - } - } - - pub fn profile_activity( - &self, - label: impl Into>, - ) -> ProfileGenericActivityTimer { - ProfileGenericActivityTimer::start(self.profiler.clone(), label.into()) - } } fn generate_lto_work( @@ -316,7 +259,7 @@ fn generate_lto_work( needs_thin_lto: Vec<(String, B::ThinBuffer)>, import_only_modules: Vec<(SerializedModule, WorkProduct)> ) -> Vec<(WorkItem, u64)> { - cgcx.profile(|p| p.start_activity("codegen_run_lto")); + let _prof_timer = cgcx.prof.generic_activity("codegen_generate_lto_work"); let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() { assert!(needs_thin_lto.is_empty()); @@ -343,8 +286,6 @@ fn generate_lto_work( }), 0) })).collect(); - cgcx.profile(|p| p.end_activity("codegen_run_lto")); - result } @@ -376,10 +317,11 @@ pub fn start_async_codegen( backend: B, tcx: TyCtxt<'_>, metadata: EncodedMetadata, - coordinator_receive: Receiver>, total_cgus: usize, ) -> OngoingCodegen { + let (coordinator_send, coordinator_receive) = channel(); let sess = tcx.sess; + let crate_name = tcx.crate_name(LOCAL_CRATE); let crate_hash = tcx.crate_hash(LOCAL_CRATE); let no_builtins = attr::contains_name(&tcx.hir().krate().attrs, sym::no_builtins); @@ -500,7 +442,8 @@ pub fn start_async_codegen( sess.jobserver.clone(), Arc::new(modules_config), Arc::new(metadata_config), - Arc::new(allocator_config)); + Arc::new(allocator_config), + coordinator_send.clone()); OngoingCodegen { backend, @@ -511,7 +454,7 @@ pub fn start_async_codegen( linker_info, crate_info, - coordinator_send: tcx.tx_to_llvm_workers.lock().clone(), + coordinator_send, codegen_worker_receive, shared_emitter_main, future: coordinator_thread, @@ -731,11 +674,11 @@ impl WorkItem { } } - pub fn name(&self) -> String { + fn profiling_event_id(&self) -> &'static str { match *self { - WorkItem::Optimize(ref m) => format!("optimize: {}", m.name), - WorkItem::CopyPostLtoArtifacts(ref m) => format!("copy post LTO artifacts: {}", m.name), - WorkItem::LTO(ref m) => format!("lto: {}", m.name()), + WorkItem::Optimize(_) => "codegen_module_optimize", + WorkItem::CopyPostLtoArtifacts(_) => "codegen_copy_artifacts_from_incr_cache", + WorkItem::LTO(_) => "codegen_module_perform_lto", } } } @@ -1005,8 +948,9 @@ fn start_executing_work( modules_config: Arc, metadata_config: Arc, allocator_config: Arc, + tx_to_llvm_workers: Sender>, ) -> thread::JoinHandle> { - let coordinator_send = tcx.tx_to_llvm_workers.lock().clone(); + let coordinator_send = tx_to_llvm_workers; let sess = tcx.sess; // Compute the set of symbols we need to retain when doing LTO (if we need to) @@ -1048,7 +992,7 @@ fn start_executing_work( }).expect("failed to spawn helper thread"); let mut each_linked_rlib_for_lto = Vec::new(); - drop(link::each_linked_rlib(sess, crate_info, &mut |cnum, path| { + drop(link::each_linked_rlib(crate_info, &mut |cnum, path| { if link::ignored_for_lto(sess, crate_info, cnum) { return } @@ -1086,7 +1030,7 @@ fn start_executing_work( save_temps: sess.opts.cg.save_temps, opts: Arc::new(sess.opts.clone()), time_passes: sess.time_extended(), - profiler: sess.self_profiling.clone(), + prof: sess.prof.clone(), exported_symbols, plugin_passes: sess.plugin_llvm_passes.borrow().clone(), remark: sess.opts.cg.remark.clone(), @@ -1643,12 +1587,8 @@ fn spawn_work( // as a diagnostic was already sent off to the main thread - just // surface that there was an error in this worker. bomb.result = { - let label = work.name(); - cgcx.profile(|p| p.start_activity(label.clone())); - let result = execute_work_item(&cgcx, work).ok(); - cgcx.profile(|p| p.end_activity(label)); - - result + let _prof_timer = cgcx.prof.generic_activity(work.profiling_event_id()); + execute_work_item(&cgcx, work).ok() }; }); } @@ -1724,13 +1664,13 @@ impl SharedEmitter { } impl Emitter for SharedEmitter { - fn emit_diagnostic(&mut self, db: &rustc_errors::Diagnostic) { + fn emit_diagnostic(&mut self, diag: &rustc_errors::Diagnostic) { drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic { - msg: db.message(), - code: db.code.clone(), - lvl: db.level, + msg: diag.message(), + code: diag.code.clone(), + lvl: diag.level, }))); - for child in &db.children { + for child in &diag.children { drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic { msg: child.message(), code: None, @@ -1739,6 +1679,9 @@ impl Emitter for SharedEmitter { } drop(self.sender.send(SharedEmitterMessage::AbortIfErrors)); } + fn source_map(&self) -> Option<&Lrc> { + None + } } impl SharedEmitterMain { @@ -1857,7 +1800,7 @@ impl OngoingCodegen { // These are generally cheap and won't throw off scheduling. let cost = 0; - submit_codegened_module_to_llvm(&self.backend, tcx, module, cost); + submit_codegened_module_to_llvm(&self.backend, &self.coordinator_send, module, cost); } pub fn codegen_finished(&self, tcx: TyCtxt<'_>) { @@ -1899,12 +1842,12 @@ impl OngoingCodegen { pub fn submit_codegened_module_to_llvm( _backend: &B, - tcx: TyCtxt<'_>, + tx_to_llvm_workers: &Sender>, module: ModuleCodegen, cost: u64, ) { let llvm_work_item = WorkItem::Optimize(module); - drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone:: { + drop(tx_to_llvm_workers.send(Box::new(Message::CodegenDone:: { llvm_work_item, cost, }))); @@ -1912,11 +1855,11 @@ pub fn submit_codegened_module_to_llvm( pub fn submit_post_lto_module_to_llvm( _backend: &B, - tcx: TyCtxt<'_>, + tx_to_llvm_workers: &Sender>, module: CachedModuleCodegen, ) { let llvm_work_item = WorkItem::CopyPostLtoArtifacts(module); - drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone:: { + drop(tx_to_llvm_workers.send(Box::new(Message::CodegenDone:: { llvm_work_item, cost: 0, }))); @@ -1925,6 +1868,7 @@ pub fn submit_post_lto_module_to_llvm( pub fn submit_pre_lto_module_to_llvm( _backend: &B, tcx: TyCtxt<'_>, + tx_to_llvm_workers: &Sender>, module: CachedModuleCodegen, ) { let filename = pre_lto_bitcode_filename(&module.name); @@ -1939,7 +1883,7 @@ pub fn submit_pre_lto_module_to_llvm( }) }; // Schedule the module to be loaded - drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::AddImportOnlyModule:: { + drop(tx_to_llvm_workers.send(Box::new(Message::AddImportOnlyModule:: { module_data: SerializedModule::FromUncompressedFile(mmap), work_product: module.source, }))); diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs index 4acbe0356b..ee4ec7fb41 100644 --- a/src/librustc_codegen_ssa/base.rs +++ b/src/librustc_codegen_ssa/base.rs @@ -29,25 +29,22 @@ use rustc::util::common::{time, print_time_passes_entry, set_time_depth, time_de use rustc::session::config::{self, EntryFnType, Lto}; use rustc::session::Session; use rustc::util::nodemap::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr}; use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA}; use crate::mir::place::PlaceRef; use crate::back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm, submit_post_lto_module_to_llvm}; use crate::{MemFlags, CrateInfo}; -use crate::callee; use crate::common::{RealPredicate, TypeKind, IntPredicate}; use crate::meth; use crate::mir; use crate::traits::*; -use std::any::Any; use std::cmp; use std::ops::{Deref, DerefMut}; use std::time::{Instant, Duration}; -use std::sync::mpsc; use syntax_pos::Span; use syntax::attr; use rustc::hir; @@ -96,7 +93,7 @@ pub fn compare_simd_types<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ret_ty: Bx::Type, op: hir::BinOpKind, ) -> Bx::Value { - let signed = match t.sty { + let signed = match t.kind { ty::Float(_) => { let cmp = bin_op_to_fcmp_predicate(op); let cmp = bx.fcmp(cmp, lhs, rhs); @@ -130,7 +127,7 @@ pub fn unsized_info<'tcx, Cx: CodegenMethods<'tcx>>( ) -> Cx::Value { let (source, target) = cx.tcx().struct_lockstep_tails_erasing_lifetimes(source, target, cx.param_env()); - match (&source.sty, &target.sty) { + match (&source.kind, &target.kind) { (&ty::Array(_, len), &ty::Slice(_)) => { cx.const_usize(len.eval_usize(cx.tcx(), ty::ParamEnv::reveal_all())) } @@ -160,7 +157,7 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( dst_ty: Ty<'tcx>, ) -> (Bx::Value, Bx::Value) { debug!("unsize_thin_ptr: {:?} => {:?}", src_ty, dst_ty); - match (&src_ty.sty, &dst_ty.sty) { + match (&src_ty.kind, &dst_ty.kind) { (&ty::Ref(_, a, _), &ty::Ref(_, b, _)) | (&ty::Ref(_, a, _), @@ -171,12 +168,6 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let ptr_ty = bx.cx().type_ptr_to(bx.cx().backend_type(bx.cx().layout_of(b))); (bx.pointercast(src, ptr_ty), unsized_info(bx.cx(), a, b, None)) } - (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => { - let (a, b) = (src_ty.boxed_ty(), dst_ty.boxed_ty()); - assert!(bx.cx().type_is_sized(a)); - let ptr_ty = bx.cx().type_ptr_to(bx.cx().backend_type(bx.cx().layout_of(b))); - (bx.pointercast(src, ptr_ty), unsized_info(bx.cx(), a, b, None)) - } (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => { assert_eq!(def_a, def_b); @@ -199,6 +190,8 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( } let (lldata, llextra) = result.unwrap(); // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types. + // FIXME(eddyb) move these out of this `match` arm, so they're always + // applied, uniformly, no matter the source/destination types. (bx.bitcast(lldata, bx.cx().scalar_pair_element_backend_type(dst_layout, 0, true)), bx.bitcast(llextra, bx.cx().scalar_pair_element_backend_type(dst_layout, 1, true))) } @@ -215,31 +208,27 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ) { let src_ty = src.layout.ty; let dst_ty = dst.layout.ty; - let mut coerce_ptr = || { - let (base, info) = match bx.load_operand(src).val { - OperandValue::Pair(base, info) => { - // fat-ptr to fat-ptr unsize preserves the vtable - // i.e., &'a fmt::Debug+Send => &'a fmt::Debug - // So we need to pointercast the base to ensure - // the types match up. - let thin_ptr = dst.layout.field(bx.cx(), FAT_PTR_ADDR); - (bx.pointercast(base, bx.cx().backend_type(thin_ptr)), info) - } - OperandValue::Immediate(base) => { - unsize_thin_ptr(bx, base, src_ty, dst_ty) - } - OperandValue::Ref(..) => bug!() - }; - OperandValue::Pair(base, info).store(bx, dst); - }; - match (&src_ty.sty, &dst_ty.sty) { + match (&src_ty.kind, &dst_ty.kind) { (&ty::Ref(..), &ty::Ref(..)) | (&ty::Ref(..), &ty::RawPtr(..)) | (&ty::RawPtr(..), &ty::RawPtr(..)) => { - coerce_ptr() - } - (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => { - coerce_ptr() + let (base, info) = match bx.load_operand(src).val { + OperandValue::Pair(base, info) => { + // fat-ptr to fat-ptr unsize preserves the vtable + // i.e., &'a fmt::Debug+Send => &'a fmt::Debug + // So we need to pointercast the base to ensure + // the types match up. + // FIXME(eddyb) use `scalar_pair_element_backend_type` here, + // like `unsize_thin_ptr` does. + let thin_ptr = dst.layout.field(bx.cx(), FAT_PTR_ADDR); + (bx.pointercast(base, bx.cx().backend_type(thin_ptr)), info) + } + OperandValue::Immediate(base) => { + unsize_thin_ptr(bx, base, src_ty, dst_ty) + } + OperandValue::Ref(..) => bug!() + }; + OperandValue::Pair(base, info).store(bx, dst); } (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => { @@ -379,8 +368,7 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( let sig = instance.fn_sig(cx.tcx()); let sig = cx.tcx().normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig); - let lldecl = cx.instances().borrow().get(&instance).cloned().unwrap_or_else(|| - bug!("Instance `{:?}` not already declared", instance)); + let lldecl = cx.get_fn(instance); let mir = cx.tcx().instance_mir(instance.def); mir::codegen_mir::(cx, lldecl, &mir, instance, sig); @@ -402,7 +390,7 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' return; } - let main_llfn = cx.get_fn(instance); + let main_llfn = cx.get_fn_addr(instance); let et = cx.tcx().entry_fn(LOCAL_CRATE).map(|e| e.1); match et { @@ -418,8 +406,13 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' rust_main_def_id: DefId, use_start_lang_item: bool, ) { - let llfty = - cx.type_func(&[cx.type_int(), cx.type_ptr_to(cx.type_i8p())], cx.type_int()); + // The entry function is either `int main(void)` or `int main(int argc, char **argv)`, + // depending on whether the target needs `argc` and `argv` to be passed in. + let llfty = if cx.sess().target.target.options.main_needs_argc_argv { + cx.type_func(&[cx.type_int(), cx.type_ptr_to(cx.type_i8p())], cx.type_int()) + } else { + cx.type_func(&[], cx.type_int()) + }; let main_ret_ty = cx.tcx().fn_sig(rust_main_def_id).output(); // Given that `main()` has no arguments, @@ -449,18 +442,17 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' bx.insert_reference_to_gdb_debug_scripts_section_global(); - // Params from native main() used as args for rust start function - let param_argc = bx.get_param(0); - let param_argv = bx.get_param(1); - let arg_argc = bx.intcast(param_argc, cx.type_isize(), true); - let arg_argv = param_argv; + let (arg_argc, arg_argv) = get_argc_argv(cx, &mut bx); let (start_fn, args) = if use_start_lang_item { let start_def_id = cx.tcx().require_lang_item(StartFnLangItem, None); - let start_fn = callee::resolve_and_get_fn( - cx, - start_def_id, - cx.tcx().intern_substs(&[main_ret_ty.into()]), + let start_fn = cx.get_fn_addr( + ty::Instance::resolve( + cx.tcx(), + ty::ParamEnv::reveal_all(), + start_def_id, + cx.tcx().intern_substs(&[main_ret_ty.into()]), + ).unwrap() ); (start_fn, vec![bx.pointercast(rust_main, cx.type_ptr_to(cx.type_i8p())), arg_argc, arg_argv]) @@ -475,6 +467,27 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &' } } +/// Obtain the `argc` and `argv` values to pass to the rust start function. +fn get_argc_argv<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( + cx: &'a Bx::CodegenCx, + bx: &mut Bx +) -> (Bx::Value, Bx::Value) +{ + if cx.sess().target.target.options.main_needs_argc_argv { + // Params from native `main()` used as args for rust start function + let param_argc = bx.get_param(0); + let param_argv = bx.get_param(1); + let arg_argc = bx.intcast(param_argc, cx.type_isize(), true); + let arg_argv = param_argv; + (arg_argc, arg_argv) + } else { + // The Rust start function doesn't need `argc` and `argv`, so just pass zeros. + let arg_argc = bx.const_int(cx.type_int(), 0); + let arg_argv = bx.const_null(cx.type_ptr_to(cx.type_i8p())); + (arg_argc, arg_argv) + } +} + pub const CODEGEN_WORKER_ID: usize = ::std::usize::MAX; pub fn codegen_crate( @@ -482,19 +495,13 @@ pub fn codegen_crate( tcx: TyCtxt<'tcx>, metadata: EncodedMetadata, need_metadata_module: bool, - rx: mpsc::Receiver>, ) -> OngoingCodegen { check_for_rustc_errors_attr(tcx); // Skip crate items and just output metadata in -Z no-codegen mode. if tcx.sess.opts.debugging_opts.no_codegen || !tcx.sess.opts.output_types.should_codegen() { - let ongoing_codegen = start_async_codegen( - backend, - tcx, - metadata, - rx, - 1); + let ongoing_codegen = start_async_codegen(backend, tcx, metadata, 1); ongoing_codegen.codegen_finished(tcx); @@ -519,16 +526,11 @@ pub fn codegen_crate( // unnecessarily. if tcx.dep_graph.is_fully_enabled() { for cgu in &codegen_units { - tcx.codegen_unit(cgu.name().clone()); + tcx.codegen_unit(cgu.name()); } } - let ongoing_codegen = start_async_codegen( - backend.clone(), - tcx, - metadata, - rx, - codegen_units.len()); + let ongoing_codegen = start_async_codegen(backend.clone(), tcx, metadata, codegen_units.len()); let ongoing_codegen = AbortCodegenOnDrop::(Some(ongoing_codegen)); // Codegen an allocator shim, if necessary. @@ -539,7 +541,7 @@ pub fn codegen_crate( // linkage, then it's already got an allocator shim and we'll be using that // one instead. If nothing exists then it's our job to generate the // allocator! - let any_dynamic_crate = tcx.sess.dependency_formats.borrow() + let any_dynamic_crate = tcx.dependency_formats(LOCAL_CRATE) .iter() .any(|(_, list)| { use rustc::middle::dependency_format::Linkage; @@ -572,8 +574,6 @@ pub fn codegen_crate( if need_metadata_module { // Codegen the encoded metadata. - tcx.sess.profiler(|p| p.start_activity("codegen crate metadata")); - let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("metadata")).as_str() @@ -583,7 +583,6 @@ pub fn codegen_crate( backend.write_compressed_metadata(tcx, &ongoing_codegen.metadata, &mut metadata_llvm_module); }); - tcx.sess.profiler(|p| p.end_activity("codegen crate metadata")); let metadata_module = ModuleCodegen { name: metadata_cgu_name, @@ -612,22 +611,22 @@ pub fn codegen_crate( match cgu_reuse { CguReuse::No => { - tcx.sess.profiler(|p| p.start_activity(format!("codegen {}", cgu.name()))); let start_time = Instant::now(); - backend.compile_codegen_unit(tcx, *cgu.name()); + backend.compile_codegen_unit(tcx, cgu.name(), &ongoing_codegen.coordinator_send); total_codegen_time += start_time.elapsed(); - tcx.sess.profiler(|p| p.end_activity(format!("codegen {}", cgu.name()))); false } CguReuse::PreLto => { - submit_pre_lto_module_to_llvm(&backend, tcx, CachedModuleCodegen { + submit_pre_lto_module_to_llvm(&backend, tcx, &ongoing_codegen.coordinator_send, + CachedModuleCodegen { name: cgu.name().to_string(), source: cgu.work_product(tcx), }); true } CguReuse::PostLto => { - submit_post_lto_module_to_llvm(&backend, tcx, CachedModuleCodegen { + submit_post_lto_module_to_llvm(&backend, &ongoing_codegen.coordinator_send, + CachedModuleCodegen { name: cgu.name().to_string(), source: cgu.work_product(tcx), }); @@ -731,6 +730,7 @@ impl CrateInfo { used_crate_source: Default::default(), lang_item_to_crate: Default::default(), missing_lang_items: Default::default(), + dependency_formats: tcx.dependency_formats(LOCAL_CRATE), }; let lang_items = tcx.lang_items(); diff --git a/src/librustc_codegen_ssa/callee.rs b/src/librustc_codegen_ssa/callee.rs deleted file mode 100644 index 4744dd6302..0000000000 --- a/src/librustc_codegen_ssa/callee.rs +++ /dev/null @@ -1,36 +0,0 @@ -use crate::traits::*; -use rustc::ty; -use rustc::ty::subst::SubstsRef; -use rustc::hir::def_id::DefId; - -pub fn resolve_and_get_fn<'tcx, Cx: CodegenMethods<'tcx>>( - cx: &Cx, - def_id: DefId, - substs: SubstsRef<'tcx>, -) -> Cx::Value { - cx.get_fn( - ty::Instance::resolve( - cx.tcx(), - ty::ParamEnv::reveal_all(), - def_id, - substs - ).unwrap() - ) -} - -pub fn resolve_and_get_fn_for_vtable<'tcx, - Cx: Backend<'tcx> + MiscMethods<'tcx> + TypeMethods<'tcx> ->( - cx: &Cx, - def_id: DefId, - substs: SubstsRef<'tcx>, -) -> Cx::Value { - cx.get_fn( - ty::Instance::resolve_for_vtable( - cx.tcx(), - ty::ParamEnv::reveal_all(), - def_id, - substs - ).unwrap() - ) -} diff --git a/src/librustc_codegen_ssa/common.rs b/src/librustc_codegen_ssa/common.rs index 6376512ca4..ac39ca9847 100644 --- a/src/librustc_codegen_ssa/common.rs +++ b/src/librustc_codegen_ssa/common.rs @@ -1,6 +1,7 @@ #![allow(non_camel_case_types, non_snake_case)] use rustc::ty::{Ty, TyCtxt}; +use rustc::session::Session; use syntax_pos::Span; use rustc::hir::def_id::DefId; @@ -109,14 +110,11 @@ pub enum TypeKind { // for now we content ourselves with providing a no-op HashStable // implementation for CGUs. mod temp_stable_hash_impls { - use rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, - HashStable}; + use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use crate::ModuleCodegen; impl HashStable for ModuleCodegen { - fn hash_stable(&self, - _: &mut HCX, - _: &mut StableHasher) { + fn hash_stable(&self, _: &mut HCX, _: &mut StableHasher) { // do nothing } } @@ -203,3 +201,7 @@ pub fn shift_mask_val<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( _ => bug!("shift_mask_val: expected Integer or Vector, found {:?}", kind), } } + +pub fn span_invalid_monomorphization_error(a: &Session, b: Span, c: &str) { + span_err!(a, b, E0511, "{}", c); +} diff --git a/src/librustc_codegen_ssa/debuginfo/mod.rs b/src/librustc_codegen_ssa/debuginfo/mod.rs index c9b1c0260e..d1a0cf78d6 100644 --- a/src/librustc_codegen_ssa/debuginfo/mod.rs +++ b/src/librustc_codegen_ssa/debuginfo/mod.rs @@ -1,82 +1,2 @@ -use syntax_pos::{BytePos, Span}; -use rustc::hir::def_id::CrateNum; - +// FIXME(eddyb) find a place for this (or a way to replace it). pub mod type_names; - -pub enum FunctionDebugContext { - RegularContext(FunctionDebugContextData), - DebugInfoDisabled, - FunctionWithoutDebugInfo, -} - -impl FunctionDebugContext { - pub fn get_ref(&self, span: Span) -> &FunctionDebugContextData { - match *self { - FunctionDebugContext::RegularContext(ref data) => data, - FunctionDebugContext::DebugInfoDisabled => { - span_bug!( - span, - "debuginfo: Error trying to access FunctionDebugContext \ - although debug info is disabled!", - ); - } - FunctionDebugContext::FunctionWithoutDebugInfo => { - span_bug!( - span, - "debuginfo: Error trying to access FunctionDebugContext \ - for function that should be ignored by debug info!", - ); - } - } - } -} - -/// Enables emitting source locations for the given functions. -/// -/// Since we don't want source locations to be emitted for the function prelude, -/// they are disabled when beginning to codegen a new function. This functions -/// switches source location emitting on and must therefore be called before the -/// first real statement/expression of the function is codegened. -pub fn start_emitting_source_locations(dbg_context: &mut FunctionDebugContext) { - match *dbg_context { - FunctionDebugContext::RegularContext(ref mut data) => { - data.source_locations_enabled = true; - }, - _ => { /* safe to ignore */ } - } -} - -pub struct FunctionDebugContextData { - pub fn_metadata: D, - pub source_locations_enabled: bool, - pub defining_crate: CrateNum, -} - -pub enum VariableAccess<'a, V> { - // The llptr given is an alloca containing the variable's value - DirectVariable { alloca: V }, - // The llptr given is an alloca containing the start of some pointer chain - // leading to the variable's content. - IndirectVariable { alloca: V, address_operations: &'a [i64] } -} - -pub enum VariableKind { - ArgumentVariable(usize /*index*/), - LocalVariable, -} - - -#[derive(Clone, Copy, Debug)] -pub struct MirDebugScope { - pub scope_metadata: Option, - // Start and end offsets of the file to which this DIScope belongs. - // These are used to quickly determine whether some span refers to the same file. - pub file_start_pos: BytePos, - pub file_end_pos: BytePos, -} - -impl MirDebugScope { - pub fn is_valid(&self) -> bool { - !self.scope_metadata.is_none() - } -} diff --git a/src/librustc_codegen_ssa/debuginfo/type_names.rs b/src/librustc_codegen_ssa/debuginfo/type_names.rs index 9b5ad94ecd..166a74fe48 100644 --- a/src/librustc_codegen_ssa/debuginfo/type_names.rs +++ b/src/librustc_codegen_ssa/debuginfo/type_names.rs @@ -32,7 +32,7 @@ pub fn push_debuginfo_type_name<'tcx>( // .natvis visualizers (and perhaps other existing native debuggers?) let cpp_like_names = tcx.sess.target.target.options.is_like_msvc; - match t.sty { + match t.kind { ty::Bool => output.push_str("bool"), ty::Char => output.push_str("char"), ty::Str => output.push_str("str"), @@ -221,7 +221,7 @@ pub fn push_debuginfo_type_name<'tcx>( output.push_str(&tcx.crate_name(def_id.krate).as_str()); for path_element in tcx.def_path(def_id).data { output.push_str("::"); - output.push_str(&path_element.data.as_interned_str().as_str()); + output.push_str(&path_element.data.as_symbol().as_str()); } } else { output.push_str(&tcx.item_name(def_id).as_str()); diff --git a/src/librustc_codegen_ssa/error_codes.rs b/src/librustc_codegen_ssa/error_codes.rs index 8ff41c275a..02e26d8f6e 100644 --- a/src/librustc_codegen_ssa/error_codes.rs +++ b/src/librustc_codegen_ssa/error_codes.rs @@ -1,5 +1,40 @@ syntax::register_diagnostics! { +E0511: r##" +Invalid monomorphization of an intrinsic function was used. Erroneous code +example: + +```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail) +#![feature(platform_intrinsics)] + +extern "platform-intrinsic" { + fn simd_add(a: T, b: T) -> T; +} + +fn main() { + unsafe { simd_add(0, 1); } + // error: invalid monomorphization of `simd_add` intrinsic +} +``` + +The generic type has to be a SIMD type. Example: + +``` +#![feature(repr_simd)] +#![feature(platform_intrinsics)] + +#[repr(simd)] +#[derive(Copy, Clone)] +struct i32x2(i32, i32); + +extern "platform-intrinsic" { + fn simd_add(a: T, b: T) -> T; +} + +unsafe { simd_add(i32x2(0, 0), i32x2(1, 2)); } // ok! +``` +"##, + E0668: r##" Malformed inline assembly rejected by LLVM. diff --git a/src/librustc_codegen_ssa/glue.rs b/src/librustc_codegen_ssa/glue.rs index 7fd9f67e2f..9818bb78e7 100644 --- a/src/librustc_codegen_ssa/glue.rs +++ b/src/librustc_codegen_ssa/glue.rs @@ -20,7 +20,7 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let align = bx.const_usize(layout.align.abi.bytes()); return (size, align); } - match t.sty { + match t.kind { ty::Dynamic(..) => { // load size/align from vtable let vtable = info.unwrap(); @@ -64,7 +64,7 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let size = bx.add(sized_size, unsized_size); // Packed types ignore the alignment of their fields. - if let ty::Adt(def, _) = t.sty { + if let ty::Adt(def, _) = t.kind { if def.repr.packed() { unsized_align = sized_align; } diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs index 1708d7235b..dd75883f97 100644 --- a/src/librustc_codegen_ssa/lib.rs +++ b/src/librustc_codegen_ssa/lib.rs @@ -10,7 +10,6 @@ #![feature(in_band_lifetimes)] #![feature(nll)] #![feature(trusted_len)] -#![feature(mem_take)] #![feature(associated_type_bounds)] #![recursion_limit="256"] @@ -21,18 +20,19 @@ #[macro_use] extern crate log; #[macro_use] extern crate rustc; -#[macro_use] extern crate rustc_data_structures; #[macro_use] extern crate syntax; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use rustc::dep_graph::WorkProduct; -use rustc::session::config::{OutputFilenames, OutputType}; +use rustc::session::config::{OutputFilenames, OutputType, RUST_CGU_EXT}; use rustc::middle::lang_items::LangItem; use rustc::hir::def_id::CrateNum; +use rustc::ty::query::Providers; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::sync::Lrc; use rustc_data_structures::svh::Svh; use rustc::middle::cstore::{LibSource, CrateSource, NativeLibrary}; +use rustc::middle::dependency_format::Dependencies; use syntax_pos::symbol::Symbol; mod error_codes; @@ -42,7 +42,6 @@ pub mod traits; pub mod mir; pub mod debuginfo; pub mod base; -pub mod callee; pub mod glue; pub mod meth; pub mod mono_item; @@ -63,6 +62,7 @@ pub struct ModuleCodegen { pub const METADATA_FILENAME: &str = "rust.metadata.bin"; pub const RLIB_BYTECODE_EXTENSION: &str = "bc.z"; + impl ModuleCodegen { pub fn into_compiled_module(self, emit_obj: bool, @@ -142,6 +142,7 @@ pub struct CrateInfo { pub used_crates_dynamic: Vec<(CrateNum, LibSource)>, pub lang_item_to_crate: FxHashMap, pub missing_lang_items: FxHashMap>, + pub dependency_formats: Lrc, } @@ -156,3 +157,32 @@ pub struct CodegenResults { pub linker_info: back::linker::LinkerInfo, pub crate_info: CrateInfo, } + +pub fn provide(providers: &mut Providers<'_>) { + crate::back::symbol_export::provide(providers); + crate::base::provide_both(providers); +} + +pub fn provide_extern(providers: &mut Providers<'_>) { + crate::back::symbol_export::provide_extern(providers); + crate::base::provide_both(providers); +} + +/// Checks if the given filename ends with the `.rcgu.o` extension that `rustc` +/// uses for the object files it generates. +pub fn looks_like_rust_object_file(filename: &str) -> bool { + let path = Path::new(filename); + let ext = path.extension().and_then(|s| s.to_str()); + if ext != Some(OutputType::Object.extension()) { + // The file name does not end with ".o", so it can't be an object file. + return false + } + + // Strip the ".o" at the end + let ext2 = path.file_stem() + .and_then(|s| Path::new(s).extension()) + .and_then(|s| s.to_str()); + + // Check if the "inner" extension + ext2 == Some(RUST_CGU_EXT) +} diff --git a/src/librustc_codegen_ssa/meth.rs b/src/librustc_codegen_ssa/meth.rs index 7fe9f5f251..266d2e5b18 100644 --- a/src/librustc_codegen_ssa/meth.rs +++ b/src/librustc_codegen_ssa/meth.rs @@ -1,6 +1,5 @@ use rustc_target::abi::call::FnType; -use crate::callee; use crate::traits::*; use rustc::ty::{self, Ty, Instance}; @@ -92,7 +91,14 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>( let methods = methods.cloned().map(|opt_mth| { opt_mth.map_or(nullptr, |(def_id, substs)| { - callee::resolve_and_get_fn_for_vtable(cx, def_id, substs) + cx.get_fn_addr( + ty::Instance::resolve_for_vtable( + cx.tcx(), + ty::ParamEnv::reveal_all(), + def_id, + substs, + ).unwrap() + ) }) }); @@ -102,7 +108,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>( // `get_vtable` in rust_mir/interpret/traits.rs // ///////////////////////////////////////////////////////////////////////////////////////////// let components: Vec<_> = [ - cx.get_fn(Instance::resolve_drop_in_place(cx.tcx(), ty)), + cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty)), cx.const_usize(layout.size.bytes()), cx.const_usize(layout.align.abi.bytes()) ].iter().cloned().chain(methods).collect(); diff --git a/src/librustc_codegen_ssa/mir/analyze.rs b/src/librustc_codegen_ssa/mir/analyze.rs index d192f2ffb6..2e5dc3db31 100644 --- a/src/librustc_codegen_ssa/mir/analyze.rs +++ b/src/librustc_codegen_ssa/mir/analyze.rs @@ -1,12 +1,13 @@ //! An analysis to determine which locals require allocas and //! which do not. -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use rustc_data_structures::graph::dominators::Dominators; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use rustc::mir::{self, Location, TerminatorKind}; use rustc::mir::visit::{Visitor, PlaceContext, MutatingUseContext, NonMutatingUseContext}; use rustc::mir::traversal; +use rustc::session::config::DebugInfo; use rustc::ty; use rustc::ty::layout::{LayoutOf, HasTyCtxt}; use syntax_pos::DUMMY_SP; @@ -21,13 +22,20 @@ pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( analyzer.visit_body(mir); - for (index, (ty, span)) in mir.local_decls.iter() - .map(|l| (l.ty, l.source_info.span)) - .enumerate() + for (local, decl) in mir.local_decls.iter_enumerated() { - let ty = fx.monomorphize(&ty); - debug!("local {} has type {:?}", index, ty); - let layout = fx.cx.spanned_layout_of(ty, span); + // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead + // of putting everything in allocas just so we can use llvm.dbg.declare. + if fx.cx.sess().opts.debuginfo == DebugInfo::Full { + if mir.local_kind(local) == mir::LocalKind::Arg || decl.name.is_some() { + analyzer.not_ssa(local); + continue; + } + } + + let ty = fx.monomorphize(&decl.ty); + debug!("local {:?} has type `{}`", local, ty); + let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span); if fx.cx.is_backend_immediate(layout) { // These sorts of types are immediates that we can store // in an Value without an alloca. @@ -40,7 +48,7 @@ pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // (e.g., structs) into an alloca unconditionally, just so // that we don't have to deal with having two pathways // (gep vs extractvalue etc). - analyzer.not_ssa(mir::Local::new(index)); + analyzer.not_ssa(local); } } @@ -191,10 +199,7 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx> location: Location) { debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue); - if let mir::Place { - base: mir::PlaceBase::Local(index), - projection: box [], - } = *place { + if let Some(index) = place.as_local() { self.assign(index, location); let decl_span = self.fx.mir.local_decls[index].source_info.span; if !self.fx.rvalue_creates_operand(rvalue, decl_span) { @@ -218,7 +223,7 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx> mir::TerminatorKind::Call { func: mir::Operand::Constant(ref c), ref args, .. - } => match c.literal.ty.sty { + } => match c.literal.ty.kind { ty::FnDef(did, _) => Some((did, args)), _ => None, }, diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index 1bb0ea5dae..79855311f3 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -1,9 +1,10 @@ +use rustc_index::vec::Idx; use rustc::middle::lang_items; use rustc::ty::{self, Ty, TypeFoldable, Instance}; use rustc::ty::layout::{self, LayoutOf, HasTyCtxt, FnTypeExt}; -use rustc::mir::{self, Place, PlaceBase, Static, StaticKind}; +use rustc::mir::{self, PlaceBase, Static, StaticKind}; use rustc::mir::interpret::PanicInfo; -use rustc_target::abi::call::{ArgType, FnType, PassMode, IgnoreMode}; +use rustc_target::abi::call::{ArgType, FnType, PassMode}; use rustc_target::spec::abi::Abi; use crate::base; use crate::MemFlags; @@ -14,8 +15,7 @@ use crate::traits::*; use std::borrow::Cow; -use syntax::symbol::Symbol; -use syntax_pos::Pos; +use syntax::{source_map::Span, symbol::Symbol}; use super::{FunctionCx, LocalRef}; use super::place::PlaceRef; @@ -148,6 +148,26 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'a, 'tcx> { } } } + + // Generate sideeffect intrinsic if jumping to any of the targets can form + // a loop. + fn maybe_sideeffect<'b, 'tcx2: 'b, Bx: BuilderMethods<'b, 'tcx2>>( + &self, + mir: &'b mir::Body<'tcx>, + bx: &mut Bx, + targets: &[mir::BasicBlock], + ) { + if bx.tcx().sess.opts.debugging_opts.insert_sideeffect { + if targets.iter().any(|target| { + *target <= *self.bb + && target + .start_location() + .is_predecessor_of(self.bb.start_location(), mir) + }) { + bx.sideeffect(); + } + } + } } /// Codegen implementations for some terminator variants. @@ -196,6 +216,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let lltrue = helper.llblock(self, targets[0]); let llfalse = helper.llblock(self, targets[1]); if switch_ty == bx.tcx().types.bool { + helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice()); // Don't generate trivial icmps when switching on bool if let [0] = values[..] { bx.cond_br(discr.immediate(), llfalse, lltrue); @@ -209,9 +230,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ); let llval = bx.const_uint_big(switch_llty, values[0]); let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval); + helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice()); bx.cond_br(cmp, lltrue, llfalse); } } else { + helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice()); let (otherwise, targets) = targets.split_last().unwrap(); bx.switch( discr.immediate(), @@ -224,14 +247,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } fn codegen_return_terminator(&mut self, mut bx: Bx) { + // Call `va_end` if this is the definition of a C-variadic function. if self.fn_ty.c_variadic { - match self.va_list_ref { - Some(va_list) => { + // The `VaList` "spoofed" argument is just after all the real arguments. + let va_list_arg_idx = self.fn_ty.args.len(); + match self.locals[mir::Local::new(1 + va_list_arg_idx)] { + LocalRef::Place(va_list) => { bx.va_end(va_list.llval); } - None => { - bug!("C-variadic function must have a `va_list_ref`"); - } + _ => bug!("C-variadic function must have a `VaList` place"), } } if self.fn_ty.ret.layout.abi.is_uninhabited() { @@ -242,15 +266,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { return; } let llval = match self.fn_ty.ret.mode { - PassMode::Ignore(IgnoreMode::Zst) | PassMode::Indirect(..) => { + PassMode::Ignore | PassMode::Indirect(..) => { bx.ret_void(); return; } - PassMode::Ignore(IgnoreMode::CVarArgs) => { - bug!("C-variadic arguments should never be the return type"); - } - PassMode::Direct(_) | PassMode::Pair(..) => { let op = self.codegen_consume(&mut bx, &mir::Place::return_place().as_ref()); @@ -310,6 +330,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def { // we don't actually need to drop anything. + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.funclet_br(self, &mut bx, target); return } @@ -323,7 +344,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { args1 = [place.llval]; &args1[..] }; - let (drop_fn, fn_ty) = match ty.sty { + let (drop_fn, fn_ty) = match ty.kind { ty::Dynamic(..) => { let sig = drop_fn.fn_sig(self.cx.tcx()); let sig = self.cx.tcx().normalize_erasing_late_bound_regions( @@ -336,10 +357,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { (meth::DESTRUCTOR.get_fn(&mut bx, vtable, &fn_ty), fn_ty) } _ => { - (bx.get_fn(drop_fn), + (bx.get_fn_addr(drop_fn), FnType::of_instance(&bx, drop_fn)) } }; + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.do_call(self, &mut bx, fn_ty, drop_fn, args, Some((ReturnDest::Nothing, target)), unwind); @@ -375,6 +397,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // Don't codegen the panic block if success if known. if const_cond == Some(expected) { + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.funclet_br(self, &mut bx, target); return; } @@ -385,6 +408,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // Create the failure block and the conditional branch to it. let lltarget = helper.llblock(self, target); let panic_block = self.new_block("panic"); + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); if expected { bx.cond_br(cond, lltarget, panic_block.llbb()); } else { @@ -396,38 +420,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.set_debug_loc(&mut bx, terminator.source_info); // Get the location information. - let loc = bx.sess().source_map().lookup_char_pos(span.lo()); - let filename = Symbol::intern(&loc.file.name.to_string()); - let line = bx.const_u32(loc.line as u32); - let col = bx.const_u32(loc.col.to_usize() as u32 + 1); + let location = self.get_caller_location(&mut bx, span).immediate(); // Put together the arguments to the panic entry point. let (lang_item, args) = match msg { PanicInfo::BoundsCheck { ref len, ref index } => { let len = self.codegen_operand(&mut bx, len).immediate(); let index = self.codegen_operand(&mut bx, index).immediate(); - - let file_line_col = bx.static_panic_msg( - None, - filename, - line, - col, - "panic_bounds_check_loc", - ); - (lang_items::PanicBoundsCheckFnLangItem, - vec![file_line_col, index, len]) + (lang_items::PanicBoundsCheckFnLangItem, vec![location, index, len]) } _ => { let msg_str = Symbol::intern(msg.description()); - let msg_file_line_col = bx.static_panic_msg( - Some(msg_str), - filename, - line, - col, - "panic_loc", - ); - (lang_items::PanicFnLangItem, - vec![msg_file_line_col]) + let msg = bx.const_str(msg_str); + (lang_items::PanicFnLangItem, vec![msg.0, msg.1, location]) } }; @@ -435,7 +440,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item); let instance = ty::Instance::mono(bx.tcx(), def_id); let fn_ty = FnType::of_instance(&bx, instance); - let llfn = bx.get_fn(instance); + let llfn = bx.get_fn_addr(instance); // Codegen the actual panic invoke/call. helper.do_call(self, &mut bx, fn_ty, llfn, &args, None, cleanup); @@ -455,7 +460,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar. let callee = self.codegen_operand(&mut bx, func); - let (instance, mut llfn) = match callee.layout.ty.sty { + let (instance, mut llfn) = match callee.layout.ty.kind { ty::FnDef(def_id, substs) => { (Some(ty::Instance::resolve(bx.tcx(), ty::ParamEnv::reveal_all(), @@ -488,6 +493,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if let Some(destination_ref) = destination.as_ref() { let &(ref dest, target) = destination_ref; self.codegen_transmute(&mut bx, &args[0], dest); + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.funclet_br(self, &mut bx, target); } else { // If we are trying to transmute to an uninhabited type, @@ -502,10 +508,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { return; } - // The "spoofed" `VaListImpl` added to a C-variadic functions signature - // should not be included in the `extra_args` calculation. - let extra_args_start_idx = sig.inputs().len() - if sig.c_variadic { 1 } else { 0 }; - let extra_args = &args[extra_args_start_idx..]; + let extra_args = &args[sig.inputs().len()..]; let extra_args = extra_args.iter().map(|op_arg| { let op_ty = op_arg.ty(self.mir, bx.tcx()); self.monomorphize(&op_ty) @@ -518,6 +521,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { Some(ty::InstanceDef::DropGlue(_, None)) => { // Empty drop glue; a no-op. let &(_, target) = destination.as_ref().unwrap(); + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.funclet_br(self, &mut bx, target); return; } @@ -529,44 +533,35 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let ty = instance.unwrap().substs.type_at(0); let layout = bx.layout_of(ty); if layout.abi.is_uninhabited() { - let loc = bx.sess().source_map().lookup_char_pos(span.lo()); - let filename = Symbol::intern(&loc.file.name.to_string()); - let line = bx.const_u32(loc.line as u32); - let col = bx.const_u32(loc.col.to_usize() as u32 + 1); - - let str = format!( - "Attempted to instantiate uninhabited type {}", - ty - ); - let msg_str = Symbol::intern(&str); - let msg_file_line_col = bx.static_panic_msg( - Some(msg_str), - filename, - line, - col, - "panic_loc", - ); + let msg_str = format!("Attempted to instantiate uninhabited type {}", ty); + let msg = bx.const_str(Symbol::intern(&msg_str)); + let location = self.get_caller_location(&mut bx, span).immediate(); // Obtain the panic entry point. let def_id = common::langcall(bx.tcx(), Some(span), "", lang_items::PanicFnLangItem); let instance = ty::Instance::mono(bx.tcx(), def_id); let fn_ty = FnType::of_instance(&bx, instance); - let llfn = bx.get_fn(instance); + let llfn = bx.get_fn_addr(instance); + if let Some((_, target)) = destination.as_ref() { + helper.maybe_sideeffect(self.mir, &mut bx, &[*target]); + } // Codegen the actual panic invoke/call. helper.do_call( self, &mut bx, fn_ty, llfn, - &[msg_file_line_col], + &[msg.0, msg.1, location], destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)), cleanup, ); } else { // a NOP - helper.funclet_br(self, &mut bx, destination.as_ref().unwrap().1) + let target = destination.as_ref().unwrap().1; + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); + helper.funclet_br(self, &mut bx, target); } return; } @@ -584,6 +579,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ReturnDest::Nothing }; + if intrinsic == Some("caller_location") { + if let Some((_, target)) = destination.as_ref() { + let location = self.get_caller_location(&mut bx, span); + + if let ReturnDest::IndirectOperand(tmp, _) = ret_dest { + location.val.store(&mut bx, tmp); + } + self.store_return(&mut bx, ret_dest, &fn_ty.ret, location.immediate()); + + helper.maybe_sideeffect(self.mir, &mut bx, &[*target]); + helper.funclet_br(self, &mut bx, *target); + } + return; + } + if intrinsic.is_some() && intrinsic != Some("drop_in_place") { let dest = match ret_dest { _ if fn_ty.ret.is_indirect() => llargs[0], @@ -601,53 +611,43 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // checked by const-qualification, which also // promotes any complex rvalues to constants. if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") { - match *arg { + match arg { // The shuffle array argument is usually not an explicit constant, // but specified directly in the code. This means it gets promoted // and we can then extract the value by evaluating the promoted. - mir::Operand::Copy( - Place { - base: PlaceBase::Static(box Static { - kind: StaticKind::Promoted(promoted, _), + mir::Operand::Copy(place) | mir::Operand::Move(place) => { + if let mir::PlaceRef { + base: + &PlaceBase::Static(box Static { + kind: StaticKind::Promoted(promoted, _), + ty, + def_id: _, + }), + projection: &[], + } = place.as_ref() + { + let param_env = ty::ParamEnv::reveal_all(); + let cid = mir::interpret::GlobalId { + instance: self.instance, + promoted: Some(promoted), + }; + let c = bx.tcx().const_eval(param_env.and(cid)); + let (llval, ty) = self.simd_shuffle_indices( + &bx, + terminator.source_info.span, ty, - def_id: _, - }), - projection: box [], + c, + ); + return OperandRef { + val: Immediate(llval), + layout: bx.layout_of(ty), + }; + } else { + span_bug!(span, "shuffle indices must be constant"); } - ) | - mir::Operand::Move( - Place { - base: PlaceBase::Static(box Static { - kind: StaticKind::Promoted(promoted, _), - ty, - def_id: _, - }), - projection: box [], - } - ) => { - let param_env = ty::ParamEnv::reveal_all(); - let cid = mir::interpret::GlobalId { - instance: self.instance, - promoted: Some(promoted), - }; - let c = bx.tcx().const_eval(param_env.and(cid)); - let (llval, ty) = self.simd_shuffle_indices( - &bx, - terminator.source_info.span, - ty, - c, - ); - return OperandRef { - val: Immediate(llval), - layout: bx.layout_of(ty), - }; + } - } - mir::Operand::Copy(_) | - mir::Operand::Move(_) => { - span_bug!(span, "shuffle indices must be constant"); - } - mir::Operand::Constant(ref constant) => { + mir::Operand::Constant(constant) => { let c = self.eval_mir_constant(constant); let (llval, ty) = self.simd_shuffle_indices( &bx, @@ -675,6 +675,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } if let Some((_, target)) = *destination { + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.funclet_br(self, &mut bx, target); } else { bx.unreachable(); @@ -691,26 +692,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { (&args[..], None) }; - // Useful determining if the current argument is the "spoofed" `VaListImpl` - let last_arg_idx = if sig.inputs().is_empty() { - None - } else { - Some(sig.inputs().len() - 1) - }; 'make_args: for (i, arg) in first_args.iter().enumerate() { - // If this is a C-variadic function the function signature contains - // an "spoofed" `VaListImpl`. This argument is ignored, but we need to - // populate it with a dummy operand so that the users real arguments - // are not overwritten. - let i = if sig.c_variadic && last_arg_idx.map(|x| i >= x).unwrap_or(false) { - if i + 1 < fn_ty.args.len() { - i + 1 - } else { - break 'make_args - } - } else { - i - }; let mut op = self.codegen_operand(&mut bx, arg); if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) { @@ -782,10 +764,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let fn_ptr = match (llfn, instance) { (Some(llfn), _) => llfn, - (None, Some(instance)) => bx.get_fn(instance), + (None, Some(instance)) => bx.get_fn_addr(instance), _ => span_bug!(span, "no llfn for call"), }; + if let Some((_, target)) = destination.as_ref() { + helper.maybe_sideeffect(self.mir, &mut bx, &[*target]); + } helper.do_call(self, &mut bx, fn_ty, fn_ptr, &llargs, destination.as_ref().map(|&(_, target)| (ret_dest, target)), cleanup); @@ -835,6 +820,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } mir::TerminatorKind::Goto { target } => { + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); helper.funclet_br(self, &mut bx, target); } @@ -1004,6 +990,20 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } + fn get_caller_location( + &mut self, + bx: &mut Bx, + span: Span, + ) -> OperandRef<'tcx, Bx::Value> { + let caller = bx.tcx().sess.source_map().lookup_char_pos(span.lo()); + let const_loc = bx.tcx().const_caller_location(( + Symbol::intern(&caller.file.name.to_string()), + caller.line as u32, + caller.col_display as u32 + 1, + )); + OperandRef::from_const(bx, const_loc) + } + fn get_personality_slot( &mut self, bx: &mut Bx @@ -1102,10 +1102,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if fn_ret.is_ignore() { return ReturnDest::Nothing; } - let dest = if let mir::Place { - base: mir::PlaceBase::Local(index), - projection: box [], - } = *dest { + let dest = if let Some(index) = dest.as_local() { match self.locals[index] { LocalRef::Place(dest) => dest, LocalRef::UnsizedPlace(_) => bug!("return type must be sized"), @@ -1163,10 +1160,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { src: &mir::Operand<'tcx>, dst: &mir::Place<'tcx> ) { - if let mir::Place { - base: mir::PlaceBase::Local(index), - projection: box [], - } = *dst { + if let Some(index) = dst.as_local() { match self.locals[index] { LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place), LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"), diff --git a/src/librustc_codegen_ssa/mir/constant.rs b/src/librustc_codegen_ssa/mir/constant.rs index 216e5a4645..72d098eb31 100644 --- a/src/librustc_codegen_ssa/mir/constant.rs +++ b/src/librustc_codegen_ssa/mir/constant.rs @@ -1,6 +1,6 @@ use rustc::mir::interpret::ErrorHandled; use rustc::mir; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use rustc::ty::{self, Ty}; use rustc::ty::layout::{self, HasTyCtxt}; use syntax::source_map::Span; @@ -23,7 +23,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { instance, promoted: None, }; - self.cx.tcx().const_eval(ty::ParamEnv::reveal_all().and(cid)) + self.cx.tcx().const_eval(ty::ParamEnv::reveal_all().and(cid)).map_err(|err| { + self.cx.tcx().sess.span_err(constant.span, "erroneous constant encountered"); + err + }) }, _ => Ok(self.monomorphize(&constant.literal)), } @@ -40,7 +43,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { constant .map(|c| { let field_ty = c.ty.builtin_index().unwrap(); - let fields = match c.ty.sty { + let fields = match c.ty.kind { ty::Array(_, n) => n.eval_usize(bx.tcx(), ty::ParamEnv::reveal_all()), _ => bug!("invalid simd shuffle type: {}", c.ty), }; diff --git a/src/librustc_codegen_ssa/mir/debuginfo.rs b/src/librustc_codegen_ssa/mir/debuginfo.rs new file mode 100644 index 0000000000..c215db34cc --- /dev/null +++ b/src/librustc_codegen_ssa/mir/debuginfo.rs @@ -0,0 +1,385 @@ +use rustc_index::vec::{Idx, IndexVec}; +use rustc::hir::def_id::CrateNum; +use rustc::mir; +use rustc::session::config::DebugInfo; +use rustc::ty::{self, TyCtxt}; +use rustc::ty::layout::{LayoutOf, Size, VariantIdx}; +use crate::traits::*; + +use syntax_pos::{BytePos, Span, Symbol}; +use syntax::symbol::kw; + +use super::{FunctionCx, LocalRef}; +use super::OperandValue; + +pub struct FunctionDebugContext { + pub scopes: IndexVec>, + pub source_locations_enabled: bool, + pub defining_crate: CrateNum, +} + +#[derive(Copy, Clone)] +pub enum VariableKind { + ArgumentVariable(usize /*index*/), + LocalVariable, +} + +#[derive(Clone, Copy, Debug)] +pub struct DebugScope { + pub scope_metadata: Option, + // Start and end offsets of the file to which this DIScope belongs. + // These are used to quickly determine whether some span refers to the same file. + pub file_start_pos: BytePos, + pub file_end_pos: BytePos, +} + +impl DebugScope { + pub fn is_valid(&self) -> bool { + !self.scope_metadata.is_none() + } +} + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { + pub fn set_debug_loc( + &mut self, + bx: &mut Bx, + source_info: mir::SourceInfo + ) { + let (scope, span) = self.debug_loc(source_info); + if let Some(debug_context) = &mut self.debug_context { + // FIXME(eddyb) get rid of this unwrap somehow. + bx.set_source_location(debug_context, scope.unwrap(), span); + } + } + + pub fn debug_loc(&self, source_info: mir::SourceInfo) -> (Option, Span) { + // Bail out if debug info emission is not enabled. + match self.debug_context { + None => return (None, source_info.span), + Some(_) => {} + } + + // In order to have a good line stepping behavior in debugger, we overwrite debug + // locations of macro expansions with that of the outermost expansion site + // (unless the crate is being compiled with `-Z debug-macros`). + if !source_info.span.from_expansion() || + self.cx.sess().opts.debugging_opts.debug_macros { + let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo()); + (scope, source_info.span) + } else { + // Walk up the macro expansion chain until we reach a non-expanded span. + // We also stop at the function body level because no line stepping can occur + // at the level above that. + let span = syntax_pos::hygiene::walk_chain(source_info.span, self.mir.span.ctxt()); + let scope = self.scope_metadata_for_loc(source_info.scope, span.lo()); + // Use span of the outermost expansion site, while keeping the original lexical scope. + (scope, span) + } + } + + // DILocations inherit source file name from the parent DIScope. Due to macro expansions + // it may so happen that the current span belongs to a different file than the DIScope + // corresponding to span's containing source scope. If so, we need to create a DIScope + // "extension" into that file. + fn scope_metadata_for_loc(&self, scope_id: mir::SourceScope, pos: BytePos) + -> Option { + let debug_context = self.debug_context.as_ref()?; + let scope_metadata = debug_context.scopes[scope_id].scope_metadata; + if pos < debug_context.scopes[scope_id].file_start_pos || + pos >= debug_context.scopes[scope_id].file_end_pos { + let sm = self.cx.sess().source_map(); + let defining_crate = debug_context.defining_crate; + Some(self.cx.extend_scope_to_file( + scope_metadata.unwrap(), + &sm.lookup_char_pos(pos).file, + defining_crate + )) + } else { + scope_metadata + } + } + + /// Apply debuginfo and/or name, after creating the `alloca` for a local, + /// or initializing the local with an operand (whichever applies). + // FIXME(eddyb) use `llvm.dbg.value` (which would work for operands), + // not just `llvm.dbg.declare` (which requires `alloca`). + pub fn debug_introduce_local(&self, bx: &mut Bx, local: mir::Local) { + // FIXME(eddyb) maybe name the return place as `_0` or `return`? + if local == mir::RETURN_PLACE { + return; + } + + let vars = match &self.per_local_var_debug_info { + Some(per_local) => &per_local[local], + None => return, + }; + let whole_local_var = vars.iter().find(|var| { + var.place.projection.is_empty() + }); + let has_proj = || vars.iter().any(|var| { + !var.place.projection.is_empty() + }); + + let (fallback_var, kind) = if self.mir.local_kind(local) == mir::LocalKind::Arg { + let arg_index = local.index() - 1; + + // Add debuginfo even to unnamed arguments. + // FIXME(eddyb) is this really needed? + let var = if arg_index == 0 && has_proj() { + // Hide closure environments from debuginfo. + // FIXME(eddyb) shouldn't `ArgumentVariable` indices + // be offset to account for the hidden environment? + None + } else { + Some(VarDebugInfo { + name: kw::Invalid, + source_info: self.mir.local_decls[local].source_info, + place: local.into(), + }) + }; + (var, VariableKind::ArgumentVariable(arg_index + 1)) + } else { + (None, VariableKind::LocalVariable) + }; + + let local_ref = &self.locals[local]; + + if !bx.sess().fewer_names() { + let name = match whole_local_var.or(fallback_var.as_ref()) { + Some(var) if var.name != kw::Invalid => var.name.to_string(), + _ => format!("{:?}", local), + }; + match local_ref { + LocalRef::Place(place) | + LocalRef::UnsizedPlace(place) => { + bx.set_var_name(place.llval, &name); + } + LocalRef::Operand(Some(operand)) => match operand.val { + OperandValue::Ref(x, ..) | + OperandValue::Immediate(x) => { + bx.set_var_name(x, &name); + } + OperandValue::Pair(a, b) => { + // FIXME(eddyb) these are scalar components, + // maybe extract the high-level fields? + bx.set_var_name(a, &(name.clone() + ".0")); + bx.set_var_name(b, &(name + ".1")); + } + } + LocalRef::Operand(None) => {} + } + } + + if bx.sess().opts.debuginfo != DebugInfo::Full { + return; + } + + let debug_context = match &self.debug_context { + Some(debug_context) => debug_context, + None => return, + }; + + // FIXME(eddyb) add debuginfo for unsized places too. + let base = match local_ref { + LocalRef::Place(place) => place, + _ => return, + }; + + let vars = vars.iter().chain(if whole_local_var.is_none() { + fallback_var.as_ref() + } else { + None + }); + + for var in vars { + let mut layout = base.layout; + let mut direct_offset = Size::ZERO; + // FIXME(eddyb) use smallvec here. + let mut indirect_offsets = vec![]; + + let kind = if var.place.projection.is_empty() { + kind + } else { + VariableKind::LocalVariable + }; + + for elem in &var.place.projection[..] { + match *elem { + mir::ProjectionElem::Deref => { + indirect_offsets.push(Size::ZERO); + layout = bx.cx().layout_of( + layout.ty.builtin_deref(true) + .unwrap_or_else(|| { + span_bug!( + var.source_info.span, + "cannot deref `{}`", + layout.ty, + ) + }).ty, + ); + } + mir::ProjectionElem::Field(field, _) => { + let i = field.index(); + let offset = indirect_offsets.last_mut() + .unwrap_or(&mut direct_offset); + *offset += layout.fields.offset(i); + layout = layout.field(bx.cx(), i); + } + mir::ProjectionElem::Downcast(_, variant) => { + layout = layout.for_variant(bx.cx(), variant); + } + _ => span_bug!( + var.source_info.span, + "unsupported var debuginfo place `{:?}`", + var.place, + ), + } + } + + let (scope, span) = self.debug_loc(var.source_info); + if let Some(scope) = scope { + bx.declare_local(debug_context, var.name, layout.ty, scope, + base.llval, direct_offset, &indirect_offsets, kind, span); + } + } + } + + pub fn debug_introduce_locals(&self, bx: &mut Bx) { + if bx.sess().opts.debuginfo == DebugInfo::Full || !bx.sess().fewer_names() { + for local in self.locals.indices() { + self.debug_introduce_local(bx, local); + } + } + } +} + +pub fn per_local_var_debug_info( + tcx: TyCtxt<'tcx>, + body: &mir::Body<'tcx>, +) -> Option>>> { + if tcx.sess.opts.debuginfo == DebugInfo::Full || !tcx.sess.fewer_names() { + let mut per_local = IndexVec::from_elem(vec![], &body.local_decls); + for (local, decl) in body.local_decls.iter_enumerated() { + if let Some(name) = decl.name { + per_local[local].push(VarDebugInfo { + name, + source_info: mir::SourceInfo { + span: decl.source_info.span, + scope: decl.visibility_scope, + }, + place: local.into(), + }); + } + } + + let upvar_debuginfo = &body.__upvar_debuginfo_codegen_only_do_not_use; + if !upvar_debuginfo.is_empty() { + + let env_arg = mir::Local::new(1); + let mut env_projs = vec![]; + + let pin_did = tcx.lang_items().pin_type(); + match body.local_decls[env_arg].ty.kind { + ty::RawPtr(_) | + ty::Ref(..) => { + env_projs.push(mir::ProjectionElem::Deref); + } + ty::Adt(def, substs) if Some(def.did) == pin_did => { + if let ty::Ref(..) = substs.type_at(0).kind { + env_projs.push(mir::ProjectionElem::Field( + mir::Field::new(0), + // HACK(eddyb) field types aren't used or needed here. + tcx.types.err, + )); + env_projs.push(mir::ProjectionElem::Deref); + } + } + _ => {} + } + + let extra_locals = { + let upvars = upvar_debuginfo + .iter() + .enumerate() + .map(|(i, upvar)| { + let source_info = mir::SourceInfo { + span: body.span, + scope: mir::OUTERMOST_SOURCE_SCOPE, + }; + (None, i, upvar.debug_name, upvar.by_ref, source_info) + }); + + let generator_fields = body.generator_layout.as_ref().map(|generator_layout| { + generator_layout.variant_fields.iter() + .enumerate() + .flat_map(move |(variant_idx, fields)| { + let variant_idx = Some(VariantIdx::from(variant_idx)); + fields.iter() + .enumerate() + .filter_map(move |(i, field)| { + let decl = &generator_layout. + __local_debuginfo_codegen_only_do_not_use[*field]; + if let Some(name) = decl.name { + let source_info = mir::SourceInfo { + span: decl.source_info.span, + scope: decl.visibility_scope, + }; + Some((variant_idx, i, name, false, source_info)) + } else { + None + } + }) + }) + }).into_iter().flatten(); + + upvars.chain(generator_fields) + }; + + for (variant_idx, field, name, by_ref, source_info) in extra_locals { + let mut projs = env_projs.clone(); + + if let Some(variant_idx) = variant_idx { + projs.push(mir::ProjectionElem::Downcast(None, variant_idx)); + } + + projs.push(mir::ProjectionElem::Field( + mir::Field::new(field), + // HACK(eddyb) field types aren't used or needed here. + tcx.types.err, + )); + + if by_ref { + projs.push(mir::ProjectionElem::Deref); + } + + per_local[env_arg].push(VarDebugInfo { + name, + source_info, + place: mir::Place { + base: mir::PlaceBase::Local(env_arg), + projection: tcx.intern_place_elems(&projs), + }, + }); + } + } + + Some(per_local) + } else { + None + } +} + +/// Debug information relatating to an user variable. +// FIXME(eddyb) move this to the MIR bodies themselves. +#[derive(Clone)] +pub struct VarDebugInfo<'tcx> { + pub name: Symbol, + + /// Source info of the user variable, including the scope + /// within which the variable is visible (to debuginfo) + /// (see `LocalDecl`'s `source_info` field for more details). + pub source_info: mir::SourceInfo, + + /// Where the data for this user variable is to be found. + pub place: mir::Place<'tcx>, +} diff --git a/src/librustc_codegen_ssa/mir/mod.rs b/src/librustc_codegen_ssa/mir/mod.rs index aa3971a1da..5ad1445628 100644 --- a/src/librustc_codegen_ssa/mir/mod.rs +++ b/src/librustc_codegen_ssa/mir/mod.rs @@ -1,22 +1,17 @@ -use rustc::ty::{self, Ty, TypeFoldable, UpvarSubsts, Instance}; +use rustc::ty::{self, Ty, TypeFoldable, Instance}; use rustc::ty::layout::{TyLayout, HasTyCtxt, FnTypeExt}; use rustc::mir::{self, Body}; -use rustc::session::config::DebugInfo; -use rustc_target::abi::call::{FnType, PassMode, IgnoreMode}; -use rustc_target::abi::{Variants, VariantIdx}; +use rustc_target::abi::call::{FnType, PassMode}; use crate::base; -use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext}; use crate::traits::*; -use syntax_pos::{DUMMY_SP, BytePos, Span}; -use syntax::symbol::kw; - use std::iter; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::IndexVec; use self::analyze::CleanupKind; +use self::debuginfo::FunctionDebugContext; use self::place::PlaceRef; use rustc::mir::traversal; @@ -28,9 +23,9 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { mir: &'a mir::Body<'tcx>, - debug_context: FunctionDebugContext, + debug_context: Option>, - llfn: Bx::Value, + llfn: Bx::Function, cx: &'a Bx::CodegenCx, @@ -79,12 +74,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { /// notably `expect`. locals: IndexVec>, - /// Debug information for MIR scopes. - scopes: IndexVec>, - - /// If this function is a C-variadic function, this contains the `PlaceRef` of the - /// "spoofed" `VaListImpl`. - va_list_ref: Option>, + per_local_var_debug_info: Option>>>, } impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { @@ -97,64 +87,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { value, ) } - - pub fn set_debug_loc( - &mut self, - bx: &mut Bx, - source_info: mir::SourceInfo - ) { - let (scope, span) = self.debug_loc(source_info); - bx.set_source_location(&mut self.debug_context, scope, span); - } - - pub fn debug_loc(&self, source_info: mir::SourceInfo) -> (Option, Span) { - // Bail out if debug info emission is not enabled. - match self.debug_context { - FunctionDebugContext::DebugInfoDisabled | - FunctionDebugContext::FunctionWithoutDebugInfo => { - return (self.scopes[source_info.scope].scope_metadata, source_info.span); - } - FunctionDebugContext::RegularContext(_) =>{} - } - - // In order to have a good line stepping behavior in debugger, we overwrite debug - // locations of macro expansions with that of the outermost expansion site - // (unless the crate is being compiled with `-Z debug-macros`). - if !source_info.span.from_expansion() || - self.cx.sess().opts.debugging_opts.debug_macros { - let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo()); - (scope, source_info.span) - } else { - // Walk up the macro expansion chain until we reach a non-expanded span. - // We also stop at the function body level because no line stepping can occur - // at the level above that. - let span = syntax_pos::hygiene::walk_chain(source_info.span, self.mir.span.ctxt()); - let scope = self.scope_metadata_for_loc(source_info.scope, span.lo()); - // Use span of the outermost expansion site, while keeping the original lexical scope. - (scope, span) - } - } - - // DILocations inherit source file name from the parent DIScope. Due to macro expansions - // it may so happen that the current span belongs to a different file than the DIScope - // corresponding to span's containing source scope. If so, we need to create a DIScope - // "extension" into that file. - fn scope_metadata_for_loc(&self, scope_id: mir::SourceScope, pos: BytePos) - -> Option { - let scope_metadata = self.scopes[scope_id].scope_metadata; - if pos < self.scopes[scope_id].file_start_pos || - pos >= self.scopes[scope_id].file_end_pos { - let sm = self.cx.sess().source_map(); - let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate; - Some(self.cx.extend_scope_to_file( - scope_metadata.unwrap(), - &sm.lookup_char_pos(pos).file, - defining_crate - )) - } else { - scope_metadata - } - } } enum LocalRef<'tcx, V> { @@ -187,7 +119,7 @@ impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> { pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( cx: &'a Bx::CodegenCx, - llfn: Bx::Value, + llfn: Bx::Function, mir: &'a Body<'tcx>, instance: Instance<'tcx>, sig: ty::FnSig<'tcx>, @@ -196,14 +128,18 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let fn_ty = FnType::new(cx, sig, &[]); debug!("fn_ty: {:?}", fn_ty); - let mut debug_context = + + let debug_context = cx.create_function_debug_context(instance, sig, llfn, mir); + let mut bx = Bx::new_block(cx, llfn, "start"); if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) { bx.set_personality_fn(cx.eh_personality()); } + bx.sideeffect(); + let cleanup_kinds = analyze::cleanup_kinds(&mir); // Allocate a `Block` for every basic block, except // the start block, if nothing loops back to it. @@ -217,8 +153,6 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( } }).collect(); - // Compute debuginfo scopes from MIR scopes. - let scopes = cx.create_mir_scopes(mir, &mut debug_context); let (landing_pads, funclets) = create_funclets(mir, &mut bx, &cleanup_kinds, &block_bxs); let mut fx = FunctionCx { @@ -233,83 +167,38 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( cleanup_kinds, landing_pads, funclets, - scopes, locals: IndexVec::new(), debug_context, - va_list_ref: None, + per_local_var_debug_info: debuginfo::per_local_var_debug_info(cx.tcx(), mir), }; let memory_locals = analyze::non_ssa_locals(&fx); // Allocate variable and temp allocas fx.locals = { - // FIXME(dlrobertson): This is ugly. Find a better way of getting the `PlaceRef` or - // `LocalRef` from `arg_local_refs` - let mut va_list_ref = None; - let args = arg_local_refs(&mut bx, &fx, &memory_locals, &mut va_list_ref); - fx.va_list_ref = va_list_ref; + let args = arg_local_refs(&mut bx, &fx, &memory_locals); let mut allocate_local = |local| { let decl = &mir.local_decls[local]; let layout = bx.layout_of(fx.monomorphize(&decl.ty)); assert!(!layout.ty.has_erasable_regions()); - if let Some(name) = decl.name { - // User variable - let debug_scope = fx.scopes[decl.visibility_scope]; - let dbg = debug_scope.is_valid() && - bx.sess().opts.debuginfo == DebugInfo::Full; + if local == mir::RETURN_PLACE && fx.fn_ty.ret.is_indirect() { + debug!("alloc: {:?} (return place) -> place", local); + let llretptr = bx.get_param(0); + return LocalRef::Place(PlaceRef::new_sized(llretptr, layout)); + } - if !memory_locals.contains(local) && !dbg { - debug!("alloc: {:?} ({}) -> operand", local, name); - return LocalRef::new_operand(&mut bx, layout); - } - - debug!("alloc: {:?} ({}) -> place", local, name); + if memory_locals.contains(local) { + debug!("alloc: {:?} -> place", local); if layout.is_unsized() { - let indirect_place = - PlaceRef::alloca_unsized_indirect(&mut bx, layout); - bx.set_var_name(indirect_place.llval, name); - // FIXME: add an appropriate debuginfo - LocalRef::UnsizedPlace(indirect_place) + LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut bx, layout)) } else { - let place = PlaceRef::alloca(&mut bx, layout); - bx.set_var_name(place.llval, name); - if dbg { - let (scope, span) = fx.debug_loc(mir::SourceInfo { - span: decl.source_info.span, - scope: decl.visibility_scope, - }); - bx.declare_local(&fx.debug_context, name, layout.ty, scope.unwrap(), - VariableAccess::DirectVariable { alloca: place.llval }, - VariableKind::LocalVariable, span); - } - LocalRef::Place(place) + LocalRef::Place(PlaceRef::alloca(&mut bx, layout)) } } else { - // Temporary or return place - if local == mir::RETURN_PLACE && fx.fn_ty.ret.is_indirect() { - debug!("alloc: {:?} (return place) -> place", local); - let llretptr = bx.get_param(0); - LocalRef::Place(PlaceRef::new_sized(llretptr, layout)) - } else if memory_locals.contains(local) { - debug!("alloc: {:?} -> place", local); - if layout.is_unsized() { - let indirect_place = PlaceRef::alloca_unsized_indirect(&mut bx, layout); - bx.set_var_name(indirect_place.llval, format_args!("{:?}", local)); - LocalRef::UnsizedPlace(indirect_place) - } else { - let place = PlaceRef::alloca(&mut bx, layout); - bx.set_var_name(place.llval, format_args!("{:?}", local)); - LocalRef::Place(place) - } - } else { - // If this is an immediate local, we do not create an - // alloca in advance. Instead we wait until we see the - // definition and update the operand there. - debug!("alloc: {:?} -> operand", local); - LocalRef::new_operand(&mut bx, layout) - } + debug!("alloc: {:?} -> operand", local); + LocalRef::new_operand(&mut bx, layout) } }; @@ -320,6 +209,9 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( .collect() }; + // Apply debuginfo to the newly allocated locals. + fx.debug_introduce_locals(&mut bx); + // Branch to the START block, if it's not the entry block. if reentrant_start_block { bx.br(fx.blocks[mir::START_BLOCK]); @@ -328,7 +220,9 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // Up until here, IR instructions for this function have explicitly not been annotated with // source code location, so we don't step into call setup code. From here on, source location // emitting should be enabled. - debuginfo::start_emitting_source_locations(&mut fx.debug_context); + if let Some(debug_context) = &mut fx.debug_context { + debug_context.source_locations_enabled = true; + } let rpo = traversal::reverse_postorder(&mir); let mut visited = BitSet::new_empty(mir.basic_blocks().len()); @@ -426,40 +320,14 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx: &mut Bx, fx: &FunctionCx<'a, 'tcx, Bx>, memory_locals: &BitSet, - va_list_ref: &mut Option>, ) -> Vec> { let mir = fx.mir; - let tcx = fx.cx.tcx(); let mut idx = 0; let mut llarg_idx = fx.fn_ty.ret.is_indirect() as usize; - // Get the argument scope, if it exists and if we need it. - let arg_scope = fx.scopes[mir::OUTERMOST_SOURCE_SCOPE]; - let arg_scope = if bx.sess().opts.debuginfo == DebugInfo::Full { - arg_scope.scope_metadata - } else { - None - }; - - // Store the index of the last argument. This is used to - // call va_start on the va_list instead of attempting - // to store_fn_arg. - let last_arg_idx = if fx.fn_ty.args.is_empty() { - None - } else { - Some(fx.fn_ty.args.len() - 1) - }; - mir.args_iter().enumerate().map(|(arg_index, local)| { let arg_decl = &mir.local_decls[local]; - // FIXME(eddyb) don't allocate a `String` unless it gets used. - let name = if let Some(name) = arg_decl.name { - name.as_str().to_string() - } else { - format!("{:?}", local) - }; - if Some(local) == mir.spread_arg { // This argument (e.g., the last argument in the "rust-call" ABI) // is a tuple that was spread at the ABI level and now we have @@ -467,13 +335,12 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // individual LLVM function arguments. let arg_ty = fx.monomorphize(&arg_decl.ty); - let tupled_arg_tys = match arg_ty.sty { + let tupled_arg_tys = match arg_ty.kind { ty::Tuple(ref tys) => tys, _ => bug!("spread argument isn't a tuple?!") }; let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty)); - bx.set_var_name(place.llval, name); for i in 0..tupled_arg_tys.len() { let arg = &fx.fn_ty.args[idx]; idx += 1; @@ -484,44 +351,35 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx.store_fn_arg(arg, &mut llarg_idx, pr_field); } - // Now that we have one alloca that contains the aggregate value, - // we can create one debuginfo entry for the argument. - arg_scope.map(|scope| { - let variable_access = VariableAccess::DirectVariable { - alloca: place.llval - }; - bx.declare_local( - &fx.debug_context, - arg_decl.name.unwrap_or(kw::Invalid), - arg_ty, scope, - variable_access, - VariableKind::ArgumentVariable(arg_index + 1), - DUMMY_SP - ); - }); - return LocalRef::Place(place); } + if fx.fn_ty.c_variadic && arg_index == fx.fn_ty.args.len() { + let arg_ty = fx.monomorphize(&arg_decl.ty); + + let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty)); + bx.va_start(va_list.llval); + + return LocalRef::Place(va_list); + } + let arg = &fx.fn_ty.args[idx]; idx += 1; if arg.pad.is_some() { llarg_idx += 1; } - if arg_scope.is_none() && !memory_locals.contains(local) { + if !memory_locals.contains(local) { // We don't have to cast or keep the argument in the alloca. // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead // of putting everything in allocas just so we can use llvm.dbg.declare. let local = |op| LocalRef::Operand(Some(op)); match arg.mode { - PassMode::Ignore(IgnoreMode::Zst) => { + PassMode::Ignore => { return local(OperandRef::new_zst(bx, arg.layout)); } - PassMode::Ignore(IgnoreMode::CVarArgs) => {} PassMode::Direct(_) => { let llarg = bx.get_param(llarg_idx); - bx.set_var_name(llarg, &name); llarg_idx += 1; return local( OperandRef::from_immediate_or_packed_pair(bx, llarg, arg.layout)); @@ -530,11 +388,6 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1)); llarg_idx += 2; - // FIXME(eddyb) these are scalar components, - // maybe extract the high-level fields? - bx.set_var_name(a, format_args!("{}.0", name)); - bx.set_var_name(b, format_args!("{}.1", name)); - return local(OperandRef { val: OperandValue::Pair(a, b), layout: arg.layout @@ -544,14 +397,13 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( } } - let place = if arg.is_sized_indirect() { + if arg.is_sized_indirect() { // Don't copy an indirect argument to an alloca, the caller // already put it in a temporary alloca and gave it up. // FIXME: lifetimes let llarg = bx.get_param(llarg_idx); - bx.set_var_name(llarg, &name); llarg_idx += 1; - PlaceRef::new_sized(llarg, arg.layout) + LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout)) } else if arg.is_unsized_indirect() { // As the storage for the indirect argument lives during // the whole function call, we just copy the fat pointer. @@ -562,165 +414,12 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let indirect_operand = OperandValue::Pair(llarg, llextra); let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout); - bx.set_var_name(tmp.llval, name); indirect_operand.store(bx, tmp); - tmp + LocalRef::UnsizedPlace(tmp) } else { let tmp = PlaceRef::alloca(bx, arg.layout); - bx.set_var_name(tmp.llval, name); - if fx.fn_ty.c_variadic && last_arg_idx.map(|idx| arg_index == idx).unwrap_or(false) { - let va_list_did = match tcx.lang_items().va_list() { - Some(did) => did, - None => bug!("`va_list` lang item required for C-variadic functions"), - }; - match arg_decl.ty.sty { - ty::Adt(def, _) if def.did == va_list_did => { - // Call `va_start` on the spoofed `VaListImpl`. - bx.va_start(tmp.llval); - *va_list_ref = Some(tmp); - }, - _ => bug!("last argument of variadic function is not a `va_list`") - } - } else { - bx.store_fn_arg(arg, &mut llarg_idx, tmp); - } - tmp - }; - let upvar_debuginfo = &mir.__upvar_debuginfo_codegen_only_do_not_use; - arg_scope.map(|scope| { - // Is this a regular argument? - if arg_index > 0 || upvar_debuginfo.is_empty() { - // The Rust ABI passes indirect variables using a pointer and a manual copy, so we - // need to insert a deref here, but the C ABI uses a pointer and a copy using the - // byval attribute, for which LLVM always does the deref itself, - // so we must not add it. - let variable_access = VariableAccess::DirectVariable { - alloca: place.llval - }; - - bx.declare_local( - &fx.debug_context, - arg_decl.name.unwrap_or(kw::Invalid), - arg.layout.ty, - scope, - variable_access, - VariableKind::ArgumentVariable(arg_index + 1), - DUMMY_SP - ); - return; - } - - let pin_did = tcx.lang_items().pin_type(); - // Or is it the closure environment? - let (closure_layout, env_ref) = match arg.layout.ty.sty { - ty::RawPtr(ty::TypeAndMut { ty, .. }) | - ty::Ref(_, ty, _) => (bx.layout_of(ty), true), - ty::Adt(def, substs) if Some(def.did) == pin_did => { - match substs.type_at(0).sty { - ty::Ref(_, ty, _) => (bx.layout_of(ty), true), - _ => (arg.layout, false), - } - } - _ => (arg.layout, false) - }; - - let (def_id, upvar_substs) = match closure_layout.ty.sty { - ty::Closure(def_id, substs) => (def_id, UpvarSubsts::Closure(substs)), - ty::Generator(def_id, substs, _) => (def_id, UpvarSubsts::Generator(substs)), - _ => bug!("upvar debuginfo with non-closure arg0 type `{}`", closure_layout.ty) - }; - let upvar_tys = upvar_substs.upvar_tys(def_id, tcx); - - let extra_locals = { - let upvars = upvar_debuginfo - .iter() - .zip(upvar_tys) - .enumerate() - .map(|(i, (upvar, ty))| { - (None, i, upvar.debug_name, upvar.by_ref, ty, scope, DUMMY_SP) - }); - - let generator_fields = mir.generator_layout.as_ref().map(|generator_layout| { - let (def_id, gen_substs) = match closure_layout.ty.sty { - ty::Generator(def_id, substs, _) => (def_id, substs), - _ => bug!("generator layout without generator substs"), - }; - let state_tys = gen_substs.state_tys(def_id, tcx); - - generator_layout.variant_fields.iter() - .zip(state_tys) - .enumerate() - .flat_map(move |(variant_idx, (fields, tys))| { - let variant_idx = Some(VariantIdx::from(variant_idx)); - fields.iter() - .zip(tys) - .enumerate() - .filter_map(move |(i, (field, ty))| { - let decl = &generator_layout. - __local_debuginfo_codegen_only_do_not_use[*field]; - if let Some(name) = decl.name { - let ty = fx.monomorphize(&ty); - let (var_scope, var_span) = fx.debug_loc(mir::SourceInfo { - span: decl.source_info.span, - scope: decl.visibility_scope, - }); - let var_scope = var_scope.unwrap_or(scope); - Some((variant_idx, i, name, false, ty, var_scope, var_span)) - } else { - None - } - }) - }) - }).into_iter().flatten(); - - upvars.chain(generator_fields) - }; - - for (variant_idx, field, name, by_ref, ty, var_scope, var_span) in extra_locals { - let fields = match variant_idx { - Some(variant_idx) => { - match &closure_layout.variants { - Variants::Multiple { variants, .. } => { - &variants[variant_idx].fields - }, - _ => bug!("variant index on univariant layout"), - } - } - None => &closure_layout.fields, - }; - let byte_offset_of_var_in_env = fields.offset(field).bytes(); - - let ops = bx.debuginfo_upvar_ops_sequence(byte_offset_of_var_in_env); - - // The environment and the capture can each be indirect. - let mut ops = if env_ref { &ops[..] } else { &ops[1..] }; - - let ty = if let (true, &ty::Ref(_, ty, _)) = (by_ref, &ty.sty) { - ty - } else { - ops = &ops[..ops.len() - 1]; - ty - }; - - let variable_access = VariableAccess::IndirectVariable { - alloca: place.llval, - address_operations: &ops - }; - bx.declare_local( - &fx.debug_context, - name, - ty, - var_scope, - variable_access, - VariableKind::LocalVariable, - var_span - ); - } - }); - if arg.is_unsized_indirect() { - LocalRef::UnsizedPlace(place) - } else { - LocalRef::Place(place) + bx.store_fn_arg(arg, &mut llarg_idx, tmp); + LocalRef::Place(tmp) } }).collect() } @@ -728,6 +427,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( mod analyze; mod block; pub mod constant; +pub mod debuginfo; pub mod place; pub mod operand; mod rvalue; diff --git a/src/librustc_codegen_ssa/mir/operand.rs b/src/librustc_codegen_ssa/mir/operand.rs index daa25b2ea0..ba5e47aeed 100644 --- a/src/librustc_codegen_ssa/mir/operand.rs +++ b/src/librustc_codegen_ssa/mir/operand.rs @@ -79,6 +79,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { ConstValue::Unevaluated(..) => bug!("unevaluated constant in `OperandRef::from_const`"), ConstValue::Param(_) => bug!("encountered a ConstValue::Param in codegen"), ConstValue::Infer(_) => bug!("encountered a ConstValue::Infer in codegen"), + ConstValue::Bound(..) => bug!("encountered a ConstValue::Bound in codegen"), ConstValue::Placeholder(_) => bug!("encountered a ConstValue::Placeholder in codegen"), ConstValue::Scalar(x) => { let scalar = match layout.abi { diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index a4b4cb53bb..3e7c4ef49f 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -68,6 +68,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { } } + // FIXME(eddyb) pass something else for the name so no work is done + // unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`). pub fn alloca>( bx: &mut Bx, layout: TyLayout<'tcx>, @@ -78,6 +80,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { } /// Returns a place for an indirect reference to an unsized place. + // FIXME(eddyb) pass something else for the name so no work is done + // unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`). pub fn alloca_unsized_indirect>( bx: &mut Bx, layout: TyLayout<'tcx>, @@ -144,7 +148,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { // * no metadata available - just log the case // * known alignment - sized types, `[T]`, `str` or a foreign type // * packed struct - there is no alignment padding - match field.ty.sty { + match field.ty.kind { _ if self.llextra.is_none() => { debug!("unsized field `{}`, of `{:?}` has no metadata for adjustment", ix, self.llval); @@ -394,8 +398,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { // Statically compute the offset if we can, otherwise just use the element size, // as this will yield the lowest alignment. let layout = self.layout.field(bx, 0); - let offset = if bx.is_const_integral(llindex) { - layout.size.checked_mul(bx.const_to_uint(llindex), bx).unwrap_or(layout.size) + let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) { + layout.size.checked_mul(llindex, bx).unwrap_or(layout.size) } else { layout.size }; diff --git a/src/librustc_codegen_ssa/mir/rvalue.rs b/src/librustc_codegen_ssa/mir/rvalue.rs index f21836a953..1608f222bc 100644 --- a/src/librustc_codegen_ssa/mir/rvalue.rs +++ b/src/librustc_codegen_ssa/mir/rvalue.rs @@ -10,7 +10,6 @@ use syntax::source_map::{DUMMY_SP, Span}; use crate::base; use crate::MemFlags; -use crate::callee; use crate::common::{self, RealPredicate, IntPredicate}; use crate::traits::*; @@ -95,7 +94,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let size = bx.const_usize(dest.layout.size.bytes()); // Use llvm.memset.p0i8.* to initialize all zero arrays - if bx.cx().is_const_integral(v) && bx.cx().const_to_uint(v) == 0 { + if bx.cx().const_to_opt_uint(v) == Some(0) { let fill = bx.cx().const_u8(0); bx.memset(start, fill, size, dest.align, MemFlags::empty()); return bx; @@ -184,13 +183,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let val = match *kind { mir::CastKind::Pointer(PointerCast::ReifyFnPointer) => { - match operand.layout.ty.sty { + match operand.layout.ty.kind { ty::FnDef(def_id, substs) => { if bx.cx().tcx().has_attr(def_id, sym::rustc_args_required_const) { bug!("reifying a fn ptr that requires const arguments"); } OperandValue::Immediate( - callee::resolve_and_get_fn(bx.cx(), def_id, substs)) + bx.get_fn_addr( + ty::Instance::resolve_for_fn_ptr( + bx.tcx(), + ty::ParamEnv::reveal_all(), + def_id, + substs + ).unwrap() + ) + ) } _ => { bug!("{} cannot be reified to a fn ptr", operand.layout.ty) @@ -198,11 +205,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)) => { - match operand.layout.ty.sty { + match operand.layout.ty.kind { ty::Closure(def_id, substs) => { let instance = Instance::resolve_closure( - bx.cx().tcx(), def_id, substs, ty::ClosureKind::FnOnce); - OperandValue::Immediate(bx.cx().get_fn(instance)) + bx.cx().tcx(), + def_id, + substs, + ty::ClosureKind::FnOnce); + OperandValue::Immediate(bx.cx().get_fn_addr(instance)) } _ => { bug!("{} cannot be cast to a fn ptr", operand.layout.ty) @@ -485,7 +495,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } }; let instance = ty::Instance::mono(bx.tcx(), def_id); - let r = bx.cx().get_fn(instance); + let r = bx.cx().get_fn_addr(instance); let call = bx.call(r, &[llsize, llalign], None); let val = bx.pointercast(call, llty_ptr); @@ -520,12 +530,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ) -> Bx::Value { // ZST are passed as operands and require special handling // because codegen_place() panics if Local is operand. - if let mir::Place { - base: mir::PlaceBase::Local(index), - projection: box [], - } = *place { + if let Some(index) = place.as_local() { if let LocalRef::Operand(Some(op)) = self.locals[index] { - if let ty::Array(_, n) = op.layout.ty.sty { + if let ty::Array(_, n) = op.layout.ty.kind { let n = n.eval_usize(bx.cx().tcx(), ty::ParamEnv::reveal_all()); return bx.cx().const_usize(n); } @@ -546,7 +553,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ) -> Bx::Value { let is_float = input_ty.is_floating_point(); let is_signed = input_ty.is_signed(); - let is_unit = input_ty.is_unit(); match op { mir::BinOp::Add => if is_float { bx.fadd(lhs, rhs) @@ -584,13 +590,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::BinOp::Shl => common::build_unchecked_lshift(bx, lhs, rhs), mir::BinOp::Shr => common::build_unchecked_rshift(bx, input_ty, lhs, rhs), mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt | - mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_unit { - bx.cx().const_bool(match op { - mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt => false, - mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => true, - _ => unreachable!() - }) - } else if is_float { + mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_float { bx.fcmp( base::bin_op_to_fcmp_predicate(op.to_hir_binop()), lhs, rhs diff --git a/src/librustc_codegen_ssa/mir/statement.rs b/src/librustc_codegen_ssa/mir/statement.rs index dab7dfc041..d11601be0b 100644 --- a/src/librustc_codegen_ssa/mir/statement.rs +++ b/src/librustc_codegen_ssa/mir/statement.rs @@ -17,11 +17,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.set_debug_loc(&mut bx, statement.source_info); match statement.kind { mir::StatementKind::Assign(box(ref place, ref rvalue)) => { - if let mir::Place { - base: mir::PlaceBase::Local(index), - projection: box [], - } = place { - match self.locals[*index] { + if let Some(index) = place.as_local() { + match self.locals[index] { LocalRef::Place(cg_dest) => { self.codegen_rvalue(bx, cg_dest, rvalue) } @@ -30,21 +27,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } LocalRef::Operand(None) => { let (mut bx, operand) = self.codegen_rvalue_operand(bx, rvalue); - if let Some(name) = self.mir.local_decls[*index].name { - match operand.val { - OperandValue::Ref(x, ..) | - OperandValue::Immediate(x) => { - bx.set_var_name(x, name); - } - OperandValue::Pair(a, b) => { - // FIXME(eddyb) these are scalar components, - // maybe extract the high-level fields? - bx.set_var_name(a, format_args!("{}.0", name)); - bx.set_var_name(b, format_args!("{}.1", name)); - } - } - } - self.locals[*index] = LocalRef::Operand(Some(operand)); + self.locals[index] = LocalRef::Operand(Some(operand)); + self.debug_introduce_local(&mut bx, index); bx } LocalRef::Operand(Some(op)) => { diff --git a/src/librustc_codegen_ssa/mono_item.rs b/src/librustc_codegen_ssa/mono_item.rs index 5801963c10..10177d2997 100644 --- a/src/librustc_codegen_ssa/mono_item.rs +++ b/src/librustc_codegen_ssa/mono_item.rs @@ -30,7 +30,7 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> { } MonoItem::GlobalAsm(hir_id) => { let item = cx.tcx().hir().expect_item(hir_id); - if let hir::ItemKind::GlobalAsm(ref ga) = item.node { + if let hir::ItemKind::GlobalAsm(ref ga) = item.kind { cx.codegen_global_asm(ga); } else { span_bug!(item.span, "Mismatch between hir::Item type and MonoItem type") diff --git a/src/librustc_codegen_ssa/traits/backend.rs b/src/librustc_codegen_ssa/traits/backend.rs index 9fbb44dcc9..8ab8243afd 100644 --- a/src/librustc_codegen_ssa/traits/backend.rs +++ b/src/librustc_codegen_ssa/traits/backend.rs @@ -8,11 +8,14 @@ use rustc::session::{Session, config}; use rustc::ty::TyCtxt; use rustc_codegen_utils::codegen_backend::CodegenBackend; use std::sync::Arc; -use syntax::ext::allocator::AllocatorKind; -use syntax_pos::symbol::InternedString; +use std::sync::mpsc; +use syntax::expand::allocator::AllocatorKind; +use syntax_pos::symbol::Symbol; pub trait BackendTypes { type Value: CodegenObject; + type Function: CodegenObject; + type BasicBlock: Copy; type Type: CodegenObject; type Funclet; @@ -44,7 +47,12 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se mods: &mut Self::Module, kind: AllocatorKind, ); - fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: InternedString); + fn compile_codegen_unit( + &self, + tcx: TyCtxt<'_>, + cgu_name: Symbol, + tx_to_llvm_workers: &mpsc::Sender>, + ); // If find_features is true this won't access `sess.crate_types` by assuming // that `is_pie_binary` is false. When we discover LLVM target features // `sess.crate_types` is uninitialized so we cannot access it. diff --git a/src/librustc_codegen_ssa/traits/builder.rs b/src/librustc_codegen_ssa/traits/builder.rs index 1886701fb3..62b5bcbb6c 100644 --- a/src/librustc_codegen_ssa/traits/builder.rs +++ b/src/librustc_codegen_ssa/traits/builder.rs @@ -34,7 +34,7 @@ pub trait BuilderMethods<'a, 'tcx>: + HasTargetSpec { - fn new_block<'b>(cx: &'a Self::CodegenCx, llfn: Self::Value, name: &'b str) -> Self; + fn new_block<'b>(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &'b str) -> Self; fn with_cx(cx: &'a Self::CodegenCx) -> Self; fn build_sibling_block(&self, name: &str) -> Self; fn cx(&self) -> &Self::CodegenCx; diff --git a/src/librustc_codegen_ssa/traits/consts.rs b/src/librustc_codegen_ssa/traits/consts.rs index e7ce03f183..8c462e77d5 100644 --- a/src/librustc_codegen_ssa/traits/consts.rs +++ b/src/librustc_codegen_ssa/traits/consts.rs @@ -3,6 +3,7 @@ use crate::mir::place::PlaceRef; use rustc::mir::interpret::Allocation; use rustc::mir::interpret::Scalar; use rustc::ty::layout; +use syntax_pos::Symbol; pub trait ConstMethods<'tcx>: BackendTypes { // Constant constructors @@ -19,13 +20,12 @@ pub trait ConstMethods<'tcx>: BackendTypes { fn const_u8(&self, i: u8) -> Self::Value; fn const_real(&self, t: Self::Type, val: f64) -> Self::Value; + fn const_str(&self, s: Symbol) -> (Self::Value, Self::Value); fn const_struct(&self, elts: &[Self::Value], packed: bool) -> Self::Value; - fn const_to_uint(&self, v: Self::Value) -> u64; + fn const_to_opt_uint(&self, v: Self::Value) -> Option; fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option; - fn is_const_integral(&self, v: Self::Value) -> bool; - fn scalar_to_backend( &self, cv: Scalar, diff --git a/src/librustc_codegen_ssa/traits/debuginfo.rs b/src/librustc_codegen_ssa/traits/debuginfo.rs index 9c16b864ef..802eaaa357 100644 --- a/src/librustc_codegen_ssa/traits/debuginfo.rs +++ b/src/librustc_codegen_ssa/traits/debuginfo.rs @@ -1,9 +1,9 @@ use super::BackendTypes; -use crate::debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, VariableKind}; +use crate::mir::debuginfo::{FunctionDebugContext, VariableKind}; use rustc::hir::def_id::CrateNum; use rustc::mir; use rustc::ty::{self, Ty, Instance}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc::ty::layout::Size; use syntax::ast::Name; use syntax_pos::{SourceFile, Span}; @@ -13,22 +13,15 @@ pub trait DebugInfoMethods<'tcx>: BackendTypes { /// Creates the function-specific debug context. /// /// Returns the FunctionDebugContext for the function which holds state needed - /// for debug info creation. The function may also return another variant of the - /// FunctionDebugContext enum which indicates why no debuginfo should be created - /// for the function. + /// for debug info creation, if it is enabled. fn create_function_debug_context( &self, instance: Instance<'tcx>, sig: ty::FnSig<'tcx>, - llfn: Self::Value, + llfn: Self::Function, mir: &mir::Body<'_>, - ) -> FunctionDebugContext; + ) -> Option>; - fn create_mir_scopes( - &self, - mir: &mir::Body<'_>, - debug_context: &mut FunctionDebugContext, - ) -> IndexVec>; fn extend_scope_to_file( &self, scope_metadata: Self::DIScope, @@ -36,7 +29,6 @@ pub trait DebugInfoMethods<'tcx>: BackendTypes { defining_crate: CrateNum, ) -> Self::DIScope; fn debuginfo_finalize(&self); - fn debuginfo_upvar_ops_sequence(&self, byte_offset_of_var_in_env: u64) -> [i64; 4]; } pub trait DebugInfoBuilderMethods<'tcx>: BackendTypes { @@ -46,16 +38,19 @@ pub trait DebugInfoBuilderMethods<'tcx>: BackendTypes { variable_name: Name, variable_type: Ty<'tcx>, scope_metadata: Self::DIScope, - variable_access: VariableAccess<'_, Self::Value>, + variable_alloca: Self::Value, + direct_offset: Size, + // NB: each offset implies a deref (i.e. they're steps in a pointer chain). + indirect_offsets: &[Size], variable_kind: VariableKind, span: Span, ); fn set_source_location( &mut self, debug_context: &mut FunctionDebugContext, - scope: Option, + scope: Self::DIScope, span: Span, ); fn insert_reference_to_gdb_debug_scripts_section_global(&mut self); - fn set_var_name(&mut self, value: Self::Value, name: impl ToString); + fn set_var_name(&mut self, value: Self::Value, name: &str); } diff --git a/src/librustc_codegen_ssa/traits/declare.rs b/src/librustc_codegen_ssa/traits/declare.rs index 624a982b61..cd42044e48 100644 --- a/src/librustc_codegen_ssa/traits/declare.rs +++ b/src/librustc_codegen_ssa/traits/declare.rs @@ -17,13 +17,13 @@ pub trait DeclareMethods<'tcx>: BackendTypes { /// /// If there’s a value with the same name already declared, the function will /// update the declaration and return existing Value instead. - fn declare_cfn(&self, name: &str, fn_type: Self::Type) -> Self::Value; + fn declare_cfn(&self, name: &str, fn_type: Self::Type) -> Self::Function; /// Declare a Rust function. /// /// If there’s a value with the same name already declared, the function will /// update the declaration and return existing Value instead. - fn declare_fn(&self, name: &str, sig: ty::PolyFnSig<'tcx>) -> Self::Value; + fn declare_fn(&self, name: &str, sig: ty::PolyFnSig<'tcx>) -> Self::Function; /// Declare a global with an intention to define it. /// diff --git a/src/librustc_codegen_ssa/traits/intrinsic.rs b/src/librustc_codegen_ssa/traits/intrinsic.rs index 7c79cd6021..2c484084c4 100644 --- a/src/librustc_codegen_ssa/traits/intrinsic.rs +++ b/src/librustc_codegen_ssa/traits/intrinsic.rs @@ -20,6 +20,7 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes { fn abort(&mut self); fn assume(&mut self, val: Self::Value); fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value; + fn sideeffect(&mut self); /// Trait method used to inject `va_start` on the "spoofed" `VaListImpl` in /// Rust defined C-variadic functions. fn va_start(&mut self, val: Self::Value) -> Self::Value; diff --git a/src/librustc_codegen_ssa/traits/misc.rs b/src/librustc_codegen_ssa/traits/misc.rs index 46c88a6113..658ddd0028 100644 --- a/src/librustc_codegen_ssa/traits/misc.rs +++ b/src/librustc_codegen_ssa/traits/misc.rs @@ -11,14 +11,14 @@ pub trait MiscMethods<'tcx>: BackendTypes { &self, ) -> &RefCell, Option>), Self::Value>>; fn check_overflow(&self) -> bool; - fn instances(&self) -> &RefCell, Self::Value>>; - fn get_fn(&self, instance: Instance<'tcx>) -> Self::Value; + fn get_fn(&self, instance: Instance<'tcx>) -> Self::Function; + fn get_fn_addr(&self, instance: Instance<'tcx>) -> Self::Value; fn eh_personality(&self) -> Self::Value; fn eh_unwind_resume(&self) -> Self::Value; fn sess(&self) -> &Session; fn codegen_unit(&self) -> &Arc>; fn used_statics(&self) -> &RefCell>; - fn set_frame_pointer_elimination(&self, llfn: Self::Value); - fn apply_target_cpu_attr(&self, llfn: Self::Value); + fn set_frame_pointer_elimination(&self, llfn: Self::Function); + fn apply_target_cpu_attr(&self, llfn: Self::Function); fn create_used_variable(&self); } diff --git a/src/librustc_codegen_ssa/traits/mod.rs b/src/librustc_codegen_ssa/traits/mod.rs index efe4a25570..4318ef1649 100644 --- a/src/librustc_codegen_ssa/traits/mod.rs +++ b/src/librustc_codegen_ssa/traits/mod.rs @@ -88,6 +88,7 @@ pub trait HasCodegen<'tcx>: type CodegenCx: CodegenMethods<'tcx> + BackendTypes< Value = Self::Value, + Function = Self::Function, BasicBlock = Self::BasicBlock, Type = Self::Type, Funclet = Self::Funclet, diff --git a/src/librustc_codegen_ssa/traits/statics.rs b/src/librustc_codegen_ssa/traits/statics.rs index 73c4c05397..5c108f9fa6 100644 --- a/src/librustc_codegen_ssa/traits/statics.rs +++ b/src/librustc_codegen_ssa/traits/statics.rs @@ -1,5 +1,4 @@ use super::BackendTypes; -use syntax_pos::symbol::Symbol; use rustc::hir::def_id::DefId; use rustc::ty::layout::Align; @@ -10,12 +9,4 @@ pub trait StaticMethods: BackendTypes { pub trait StaticBuilderMethods: BackendTypes { fn get_static(&mut self, def_id: DefId) -> Self::Value; - fn static_panic_msg( - &mut self, - msg: Option, - filename: Symbol, - line: Self::Value, - col: Self::Value, - kind: &str, - ) -> Self::Value; } diff --git a/src/librustc_codegen_ssa/traits/type_.rs b/src/librustc_codegen_ssa/traits/type_.rs index 13f72e2381..19d41c6b37 100644 --- a/src/librustc_codegen_ssa/traits/type_.rs +++ b/src/librustc_codegen_ssa/traits/type_.rs @@ -83,7 +83,7 @@ pub trait DerivedTypeMethods<'tcx>: BaseTypeMethods<'tcx> + MiscMethods<'tcx> { } let tail = self.tcx().struct_tail_erasing_lifetimes(ty, param_env); - match tail.sty { + match tail.kind { ty::Foreign(..) => false, ty::Str | ty::Slice(..) | ty::Dynamic(..) => true, _ => bug!("unexpected unsized tail: {:?}", tail), diff --git a/src/librustc_codegen_utils/Cargo.toml b/src/librustc_codegen_utils/Cargo.toml index 89b50c5dac..c8c219d039 100644 --- a/src/librustc_codegen_utils/Cargo.toml +++ b/src/librustc_codegen_utils/Cargo.toml @@ -10,7 +10,6 @@ path = "lib.rs" test = false [dependencies] -flate2 = "1.0" log = "0.4" punycode = "0.4.0" rustc-demangle = "0.1.16" diff --git a/src/librustc_codegen_utils/codegen_backend.rs b/src/librustc_codegen_utils/codegen_backend.rs index 262cfb1658..0e2c3731ea 100644 --- a/src/librustc_codegen_utils/codegen_backend.rs +++ b/src/librustc_codegen_utils/codegen_backend.rs @@ -7,7 +7,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] use std::any::Any; -use std::sync::mpsc; use syntax::symbol::Symbol; use rustc::session::Session; @@ -15,7 +14,7 @@ use rustc::util::common::ErrorReported; use rustc::session::config::{OutputFilenames, PrintRequest}; use rustc::ty::TyCtxt; use rustc::ty::query::Providers; -use rustc::middle::cstore::{EncodedMetadata, MetadataLoader}; +use rustc::middle::cstore::{EncodedMetadata, MetadataLoaderDyn}; use rustc::dep_graph::DepGraph; pub use rustc_data_structures::sync::MetadataRef; @@ -26,9 +25,8 @@ pub trait CodegenBackend { fn target_features(&self, _sess: &Session) -> Vec { vec![] } fn print_passes(&self) {} fn print_version(&self) {} - fn diagnostics(&self) -> &[(&'static str, &'static str)] { &[] } - fn metadata_loader(&self) -> Box; + fn metadata_loader(&self) -> Box; fn provide(&self, _providers: &mut Providers<'_>); fn provide_extern(&self, _providers: &mut Providers<'_>); fn codegen_crate<'tcx>( @@ -36,7 +34,6 @@ pub trait CodegenBackend { tcx: TyCtxt<'tcx>, metadata: EncodedMetadata, need_metadata_module: bool, - rx: mpsc::Receiver>, ) -> Box; /// This is called on the returned `Box` from `codegen_backend` diff --git a/src/librustc_codegen_utils/lib.rs b/src/librustc_codegen_utils/lib.rs index 1201446afb..66920342ff 100644 --- a/src/librustc_codegen_utils/lib.rs +++ b/src/librustc_codegen_utils/lib.rs @@ -18,6 +18,7 @@ extern crate rustc; use rustc::ty::TyCtxt; +use rustc::ty::query::Providers; use rustc::hir::def_id::LOCAL_CRATE; use syntax::symbol::sym; @@ -37,3 +38,7 @@ pub fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) { } } } + +pub fn provide(providers: &mut Providers<'_>) { + crate::symbol_names::provide(providers); +} diff --git a/src/librustc_codegen_utils/symbol_names.rs b/src/librustc_codegen_utils/symbol_names.rs index 7ccd024769..c52c6cfa83 100644 --- a/src/librustc_codegen_utils/symbol_names.rs +++ b/src/librustc_codegen_utils/symbol_names.rs @@ -95,7 +95,7 @@ use rustc::ty::query::Providers; use rustc::ty::{self, TyCtxt, Instance}; use rustc::mir::mono::{MonoItem, InstantiationMode}; -use syntax_pos::symbol::InternedString; +use syntax_pos::symbol::Symbol; use log::debug; @@ -112,7 +112,7 @@ pub fn provide(providers: &mut Providers<'_>) { }; } -fn symbol_name(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> InternedString { +fn symbol_name(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Symbol { let def_id = instance.def_id(); let substs = instance.substs; @@ -123,13 +123,11 @@ fn symbol_name(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> InternedString { if def_id.is_local() { if tcx.plugin_registrar_fn(LOCAL_CRATE) == Some(def_id) { let disambiguator = tcx.sess.local_crate_disambiguator(); - return - InternedString::intern(&tcx.sess.generate_plugin_registrar_symbol(disambiguator)); + return Symbol::intern(&tcx.sess.generate_plugin_registrar_symbol(disambiguator)); } if tcx.proc_macro_decls_static(LOCAL_CRATE) == Some(def_id) { let disambiguator = tcx.sess.local_crate_disambiguator(); - return - InternedString::intern(&tcx.sess.generate_proc_macro_decls_symbol(disambiguator)); + return Symbol::intern(&tcx.sess.generate_proc_macro_decls_symbol(disambiguator)); } } @@ -146,23 +144,22 @@ fn symbol_name(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> InternedString { let attrs = tcx.codegen_fn_attrs(def_id); if is_foreign { if let Some(name) = attrs.link_name { - return name.as_interned_str(); + return name; } // Don't mangle foreign items. - return tcx.item_name(def_id).as_interned_str(); + return tcx.item_name(def_id); } - if let Some(name) = &attrs.export_name { + if let Some(name) = attrs.export_name { // Use provided name - return name.as_interned_str(); + return name; } if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) { // Don't mangle - return tcx.item_name(def_id).as_interned_str(); + return tcx.item_name(def_id); } - let is_generic = substs.non_erasable_generics().next().is_some(); let avoid_cross_crate_conflicts = // If this is an instance of a generic function, we also hash in @@ -222,5 +219,5 @@ fn symbol_name(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> InternedString { SymbolManglingVersion::V0 => v0::mangle(tcx, instance, instantiating_crate), }; - InternedString::intern(&mangled) + Symbol::intern(&mangled) } diff --git a/src/librustc_codegen_utils/symbol_names/legacy.rs b/src/librustc_codegen_utils/symbol_names/legacy.rs index 22b7e0a2fb..601a33a66b 100644 --- a/src/librustc_codegen_utils/symbol_names/legacy.rs +++ b/src/librustc_codegen_utils/symbol_names/legacy.rs @@ -3,7 +3,7 @@ use rustc::hir::map::{DefPathData, DisambiguatedDefPathData}; use rustc::ich::NodeIdHashingMode; use rustc::mir::interpret::{ConstValue, Scalar}; use rustc::ty::print::{PrettyPrinter, Printer, Print}; -use rustc::ty::subst::{Kind, UnpackedKind}; +use rustc::ty::subst::{GenericArg, GenericArgKind}; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Instance}; use rustc::util::common::record_time; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -89,7 +89,7 @@ fn get_symbol_hash<'tcx>( def_id, substs ); - let mut hasher = StableHasher::::new(); + let mut hasher = StableHasher::new(); let mut hcx = tcx.create_stable_hashing_context(); record_time(&tcx.sess.perf_stats.symbol_hash_time, || { @@ -111,7 +111,7 @@ fn get_symbol_hash<'tcx>( // If this is a function, we hash the signature as well. // This is not *strictly* needed, but it may help in some // situations, see the `run-make/a-b-a-linker-guard` test. - if let ty::FnDef(..) = item_type.sty { + if let ty::FnDef(..) = item_type.kind { item_type.fn_sig(tcx).hash_stable(&mut hcx, &mut hasher); } @@ -132,7 +132,7 @@ fn get_symbol_hash<'tcx>( }); // 64 bits should be enough to avoid collisions. - hasher.finish() + hasher.finish::() } // Follow C++ namespace-mangling style, see @@ -218,14 +218,14 @@ impl Printer<'tcx> for SymbolPrinter<'tcx> { self, ty: Ty<'tcx>, ) -> Result { - match ty.sty { + match ty.kind { // Print all nominal types as paths (unlike `pretty_print_type`). ty::FnDef(def_id, substs) | ty::Opaque(def_id, substs) | ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) | ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) | - ty::Closure(def_id, ty::ClosureSubsts { substs }) | - ty::Generator(def_id, ty::GeneratorSubsts { substs }, _) => { + ty::Closure(def_id, substs) | + ty::Generator(def_id, substs, _) => { self.print_def_path(def_id, substs) } _ => self.pretty_print_type(ty), @@ -275,7 +275,7 @@ impl Printer<'tcx> for SymbolPrinter<'tcx> { ) -> Result { // Similar to `pretty_path_qualified`, but for the other // types that are printed as paths (see `print_type` above). - match self_ty.sty { + match self_ty.kind { ty::FnDef(..) | ty::Opaque(..) | ty::Projection(_) | @@ -335,19 +335,19 @@ impl Printer<'tcx> for SymbolPrinter<'tcx> { self.path.finalize_pending_component(); } - self.write_str(&disambiguated_data.data.as_interned_str().as_str())?; + self.write_str(&disambiguated_data.data.as_symbol().as_str())?; Ok(self) } fn path_generic_args( mut self, print_prefix: impl FnOnce(Self) -> Result, - args: &[Kind<'tcx>], + args: &[GenericArg<'tcx>], ) -> Result { self = print_prefix(self)?; let args = args.iter().cloned().filter(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(_) => false, + GenericArgKind::Lifetime(_) => false, _ => true, } }); diff --git a/src/librustc_codegen_utils/symbol_names/v0.rs b/src/librustc_codegen_utils/symbol_names/v0.rs index 8d6a1d757e..55b148fceb 100644 --- a/src/librustc_codegen_utils/symbol_names/v0.rs +++ b/src/librustc_codegen_utils/symbol_names/v0.rs @@ -3,7 +3,7 @@ use rustc::hir::def_id::{CrateNum, DefId}; use rustc::hir::map::{DefPathData, DisambiguatedDefPathData}; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Instance}; use rustc::ty::print::{Printer, Print}; -use rustc::ty::subst::{Kind, Subst, UnpackedKind}; +use rustc::ty::subst::{GenericArg, Subst, GenericArgKind}; use rustc_data_structures::base_n; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_target::spec::abi::Abi; @@ -56,7 +56,7 @@ struct CompressionCaches<'tcx> { start_offset: usize, // The values are start positions in `out`, in bytes. - paths: FxHashMap<(DefId, &'tcx [Kind<'tcx>]), usize>, + paths: FxHashMap<(DefId, &'tcx [GenericArg<'tcx>]), usize>, types: FxHashMap, usize>, consts: FxHashMap<&'tcx ty::Const<'tcx>, usize>, } @@ -234,7 +234,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { fn print_def_path( mut self, def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], ) -> Result { if let Some(&i) = self.compress.as_ref().and_then(|c| c.paths.get(&(def_id, substs))) { return self.print_backref(i); @@ -256,7 +256,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { fn print_impl_path( self, impl_def_id: DefId, - substs: &'tcx [Kind<'tcx>], + substs: &'tcx [GenericArg<'tcx>], mut self_ty: Ty<'tcx>, mut impl_trait_ref: Option>, ) -> Result { @@ -323,7 +323,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { ty: Ty<'tcx>, ) -> Result { // Basic types, never cached (single-character). - let basic_type = match ty.sty { + let basic_type = match ty.kind { ty::Bool => "b", ty::Char => "c", ty::Str => "e", @@ -360,7 +360,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { } let start = self.out.len(); - match ty.sty { + match ty.kind { // Basic types, handled above. ty::Bool | ty::Char | ty::Str | ty::Int(_) | ty::Uint(_) | ty::Float(_) | @@ -414,8 +414,8 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { ty::Opaque(def_id, substs) | ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) | ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) | - ty::Closure(def_id, ty::ClosureSubsts { substs }) | - ty::Generator(def_id, ty::GeneratorSubsts { substs }, _) => { + ty::Closure(def_id, substs) | + ty::Generator(def_id, substs, _) => { self = self.print_def_path(def_id, substs)?; } ty::Foreign(def_id) => { @@ -511,7 +511,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { } let start = self.out.len(); - match ct.ty.sty { + match ct.ty.kind { ty::Uint(_) => {} _ => { bug!("symbol_names: unsupported constant of type `{}` ({:?})", @@ -619,18 +619,18 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { fn path_generic_args( mut self, print_prefix: impl FnOnce(Self) -> Result, - args: &[Kind<'tcx>], + args: &[GenericArg<'tcx>], ) -> Result { // Don't print any regions if they're all erased. let print_regions = args.iter().any(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(r) => *r != ty::ReErased, + GenericArgKind::Lifetime(r) => *r != ty::ReErased, _ => false, } }); let args = args.iter().cloned().filter(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(_) => print_regions, + GenericArgKind::Lifetime(_) => print_regions, _ => true, } }); @@ -643,13 +643,13 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { self = print_prefix(self)?; for arg in args { match arg.unpack() { - UnpackedKind::Lifetime(lt) => { + GenericArgKind::Lifetime(lt) => { self = lt.print(self)?; } - UnpackedKind::Type(ty) => { + GenericArgKind::Type(ty) => { self = ty.print(self)?; } - UnpackedKind::Const(c) => { + GenericArgKind::Const(c) => { self.push("K"); // FIXME(const_generics) implement `ty::print::Print` on `ty::Const`. // self = c.print(self)?; diff --git a/src/librustc_data_structures/Cargo.toml b/src/librustc_data_structures/Cargo.toml index be9f79c83b..065c8436ae 100644 --- a/src/librustc_data_structures/Cargo.toml +++ b/src/librustc_data_structures/Cargo.toml @@ -10,7 +10,7 @@ path = "lib.rs" doctest = false [dependencies] -ena = "0.13" +ena = "0.13.1" indexmap = "1" log = "0.4" jobserver_crate = { version = "0.1.13", package = "jobserver" } @@ -20,10 +20,11 @@ graphviz = { path = "../libgraphviz" } cfg-if = "0.1.2" crossbeam-utils = { version = "0.6.5", features = ["nightly"] } stable_deref_trait = "1.0.0" -rayon = { version = "0.2.0", package = "rustc-rayon" } -rayon-core = { version = "0.2.0", package = "rustc-rayon-core" } +rayon = { version = "0.3.0", package = "rustc-rayon" } +rayon-core = { version = "0.3.0", package = "rustc-rayon-core" } rustc-hash = "1.0.1" smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } +rustc_index = { path = "../librustc_index", package = "rustc_index" } [dependencies.parking_lot] version = "0.9" diff --git a/src/librustc_data_structures/fingerprint.rs b/src/librustc_data_structures/fingerprint.rs index c8012bb942..b43df6045d 100644 --- a/src/librustc_data_structures/fingerprint.rs +++ b/src/librustc_data_structures/fingerprint.rs @@ -76,7 +76,7 @@ impl ::std::fmt::Display for Fingerprint { impl stable_hasher::StableHasherResult for Fingerprint { #[inline] - fn finish(hasher: stable_hasher::StableHasher) -> Self { + fn finish(hasher: stable_hasher::StableHasher) -> Self { let (_0, _1) = hasher.finalize(); Fingerprint(_0, _1) } diff --git a/src/librustc_data_structures/graph/dominators/mod.rs b/src/librustc_data_structures/graph/dominators/mod.rs index 41e6b72953..444463c08e 100644 --- a/src/librustc_data_structures/graph/dominators/mod.rs +++ b/src/librustc_data_structures/graph/dominators/mod.rs @@ -4,7 +4,7 @@ //! Rice Computer Science TS-06-33870 //! -use super::super::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use super::iterate::reverse_post_order; use super::ControlFlowGraph; @@ -17,7 +17,7 @@ pub fn dominators(graph: &G) -> Dominators { dominators_given_rpo(graph, &rpo) } -pub fn dominators_given_rpo( +fn dominators_given_rpo( graph: &G, rpo: &[G::Node], ) -> Dominators { @@ -43,14 +43,12 @@ pub fn dominators_given_rpo( let mut new_idom = None; for pred in graph.predecessors(node) { if immediate_dominators[pred].is_some() { - // (*) // (*) dominators for `pred` have been calculated - new_idom = intersect_opt( - &post_order_rank, - &immediate_dominators, - new_idom, - Some(pred), - ); + new_idom = Some(if let Some(new_idom) = new_idom { + intersect(&post_order_rank, &immediate_dominators, new_idom, pred) + } else { + pred + }); } } @@ -67,19 +65,6 @@ pub fn dominators_given_rpo( } } -fn intersect_opt( - post_order_rank: &IndexVec, - immediate_dominators: &IndexVec>, - node1: Option, - node2: Option, -) -> Option { - match (node1, node2) { - (None, None) => None, - (Some(n), None) | (None, Some(n)) => Some(n), - (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)), - } -} - fn intersect( post_order_rank: &IndexVec, immediate_dominators: &IndexVec>, diff --git a/src/librustc_data_structures/graph/implementation/mod.rs b/src/librustc_data_structures/graph/implementation/mod.rs index d2699004c8..9fdcea6df8 100644 --- a/src/librustc_data_structures/graph/implementation/mod.rs +++ b/src/librustc_data_structures/graph/implementation/mod.rs @@ -20,7 +20,7 @@ //! the field `next_edge`). Each of those fields is an array that should //! be indexed by the direction (see the type `Direction`). -use crate::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use crate::snapshot_vec::{SnapshotVec, SnapshotVecDelegate}; use std::fmt::Debug; use std::usize; @@ -60,10 +60,10 @@ impl SnapshotVecDelegate for Edge { fn reverse(_: &mut Vec>, _: ()) {} } -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +#[derive(Copy, Clone, PartialEq, Debug)] pub struct NodeIndex(pub usize); -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +#[derive(Copy, Clone, PartialEq, Debug)] pub struct EdgeIndex(pub usize); pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX); @@ -303,11 +303,11 @@ pub struct AdjacentEdges<'g, N, E> { impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> { fn targets(self) -> impl Iterator + 'g { - self.into_iter().map(|(_, edge)| edge.target) + self.map(|(_, edge)| edge.target) } fn sources(self) -> impl Iterator + 'g { - self.into_iter().map(|(_, edge)| edge.source) + self.map(|(_, edge)| edge.source) } } diff --git a/src/librustc_data_structures/graph/iterate/mod.rs b/src/librustc_data_structures/graph/iterate/mod.rs index c4185fc7cd..e268b28174 100644 --- a/src/librustc_data_structures/graph/iterate/mod.rs +++ b/src/librustc_data_structures/graph/iterate/mod.rs @@ -1,6 +1,6 @@ -use super::super::indexed_vec::IndexVec; -use super::{DirectedGraph, WithNumNodes, WithSuccessors}; -use crate::bit_set::BitSet; +use rustc_index::vec::IndexVec; +use super::{DirectedGraph, WithNumNodes, WithSuccessors, WithStartNode}; +use rustc_index::bit_set::BitSet; #[cfg(test)] mod tests; @@ -85,3 +85,205 @@ where Some(n) } } + +/// Allows searches to terminate early with a value. +#[derive(Clone, Copy, Debug)] +pub enum ControlFlow { + Break(T), + Continue, +} + +/// The status of a node in the depth-first search. +/// +/// See the documentation of `TriColorDepthFirstSearch` to see how a node's status is updated +/// during DFS. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum NodeStatus { + /// This node has been examined by the depth-first search but is not yet `Settled`. + /// + /// Also referred to as "gray" or "discovered" nodes in [CLR][]. + /// + /// [CLR]: https://en.wikipedia.org/wiki/Introduction_to_Algorithms + Visited, + + /// This node and all nodes reachable from it have been examined by the depth-first search. + /// + /// Also referred to as "black" or "finished" nodes in [CLR][]. + /// + /// [CLR]: https://en.wikipedia.org/wiki/Introduction_to_Algorithms + Settled, +} + +struct Event { + node: N, + becomes: NodeStatus, +} + +/// A depth-first search that also tracks when all successors of a node have been examined. +/// +/// This is based on the DFS described in [Introduction to Algorithms (1st ed.)][CLR], hereby +/// referred to as **CLR**. However, we use the terminology in [`NodeStatus`][] above instead of +/// "discovered"/"finished" or "white"/"grey"/"black". Each node begins the search with no status, +/// becomes `Visited` when it is first examined by the DFS and is `Settled` when all nodes +/// reachable from it have been examined. This allows us to differentiate between "tree", "back" +/// and "forward" edges (see [`TriColorVisitor::node_examined`]). +/// +/// Unlike the pseudocode in [CLR][], this implementation is iterative and does not use timestamps. +/// We accomplish this by storing `Event`s on the stack that result in a (possible) state change +/// for each node. A `Visited` event signifies that we should examine this node if it has not yet +/// been `Visited` or `Settled`. When a node is examined for the first time, we mark it as +/// `Visited` and push a `Settled` event for it on stack followed by `Visited` events for all of +/// its predecessors, scheduling them for examination. Multiple `Visited` events for a single node +/// may exist on the stack simultaneously if a node has multiple predecessors, but only one +/// `Settled` event will ever be created for each node. After all `Visited` events for a node's +/// successors have been popped off the stack (as well as any new events triggered by visiting +/// those successors), we will pop off that node's `Settled` event. +/// +/// [CLR]: https://en.wikipedia.org/wiki/Introduction_to_Algorithms +/// [`NodeStatus`]: ./enum.NodeStatus.html +/// [`TriColorVisitor::node_examined`]: ./trait.TriColorVisitor.html#method.node_examined +pub struct TriColorDepthFirstSearch<'graph, G> +where + G: ?Sized + DirectedGraph + WithNumNodes + WithSuccessors, +{ + graph: &'graph G, + stack: Vec>, + visited: BitSet, + settled: BitSet, +} + +impl TriColorDepthFirstSearch<'graph, G> +where + G: ?Sized + DirectedGraph + WithNumNodes + WithSuccessors, +{ + pub fn new(graph: &'graph G) -> Self { + TriColorDepthFirstSearch { + graph, + stack: vec![], + visited: BitSet::new_empty(graph.num_nodes()), + settled: BitSet::new_empty(graph.num_nodes()), + } + } + + /// Performs a depth-first search, starting from the given `root`. + /// + /// This won't visit nodes that are not reachable from `root`. + pub fn run_from(mut self, root: G::Node, visitor: &mut V) -> Option + where + V: TriColorVisitor, + { + use NodeStatus::{Visited, Settled}; + + self.stack.push(Event { node: root, becomes: Visited }); + + loop { + match self.stack.pop()? { + Event { node, becomes: Settled } => { + let not_previously_settled = self.settled.insert(node); + assert!(not_previously_settled, "A node should be settled exactly once"); + if let ControlFlow::Break(val) = visitor.node_settled(node) { + return Some(val); + } + } + + Event { node, becomes: Visited } => { + let not_previously_visited = self.visited.insert(node); + let prior_status = if not_previously_visited { + None + } else if self.settled.contains(node) { + Some(Settled) + } else { + Some(Visited) + }; + + if let ControlFlow::Break(val) = visitor.node_examined(node, prior_status) { + return Some(val); + } + + // If this node has already been examined, we are done. + if prior_status.is_some() { + continue; + } + + // Otherwise, push a `Settled` event for this node onto the stack, then + // schedule its successors for examination. + self.stack.push(Event { node, becomes: Settled }); + for succ in self.graph.successors(node) { + self.stack.push(Event { node: succ, becomes: Visited }); + } + } + } + } + } +} + +impl TriColorDepthFirstSearch<'graph, G> +where + G: ?Sized + DirectedGraph + WithNumNodes + WithSuccessors + WithStartNode, +{ + /// Performs a depth-first search, starting from `G::start_node()`. + /// + /// This won't visit nodes that are not reachable from the start node. + pub fn run_from_start(self, visitor: &mut V) -> Option + where + V: TriColorVisitor, + { + let root = self.graph.start_node(); + self.run_from(root, visitor) + } +} + +/// What to do when a node is examined or becomes `Settled` during DFS. +pub trait TriColorVisitor +where + G: ?Sized + DirectedGraph, +{ + /// The value returned by this search. + type BreakVal; + + /// Called when a node is examined by the depth-first search. + /// + /// By checking the value of `prior_status`, this visitor can determine whether the edge + /// leading to this node was a tree edge (`None`), forward edge (`Some(Settled)`) or back edge + /// (`Some(Visited)`). For a full explanation of each edge type, see the "Depth-first Search" + /// chapter in [CLR][] or [wikipedia][]. + /// + /// If you want to know *both* nodes linked by each edge, you'll need to modify + /// `TriColorDepthFirstSearch` to store a `source` node for each `Visited` event. + /// + /// [wikipedia]: https://en.wikipedia.org/wiki/Depth-first_search#Output_of_a_depth-first_search + /// [CLR]: https://en.wikipedia.org/wiki/Introduction_to_Algorithms + fn node_examined( + &mut self, + _target: G::Node, + _prior_status: Option, + ) -> ControlFlow { + ControlFlow::Continue + } + + /// Called after all nodes reachable from this one have been examined. + fn node_settled(&mut self, _target: G::Node) -> ControlFlow { + ControlFlow::Continue + } +} + +/// This `TriColorVisitor` looks for back edges in a graph, which indicate that a cycle exists. +pub struct CycleDetector; + +impl TriColorVisitor for CycleDetector +where + G: ?Sized + DirectedGraph, +{ + type BreakVal = (); + + fn node_examined( + &mut self, + _node: G::Node, + prior_status: Option, + ) -> ControlFlow { + match prior_status { + Some(NodeStatus::Visited) => ControlFlow::Break(()), + _ => ControlFlow::Continue, + } + } +} diff --git a/src/librustc_data_structures/graph/iterate/tests.rs b/src/librustc_data_structures/graph/iterate/tests.rs index 6c7cfd6d8a..0e038e88b2 100644 --- a/src/librustc_data_structures/graph/iterate/tests.rs +++ b/src/librustc_data_structures/graph/iterate/tests.rs @@ -9,3 +9,14 @@ fn diamond_post_order() { let result = post_order_from(&graph, 0); assert_eq!(result, vec![3, 1, 2, 0]); } + +#[test] +fn is_cyclic() { + use super::super::is_cyclic; + + let diamond_acyclic = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]); + let diamond_cyclic = TestGraph::new(0, &[(0, 1), (1, 2), (2, 3), (3, 0)]); + + assert!(!is_cyclic(&diamond_acyclic)); + assert!(is_cyclic(&diamond_cyclic)); +} diff --git a/src/librustc_data_structures/graph/mod.rs b/src/librustc_data_structures/graph/mod.rs index 662581ca1e..37335799d1 100644 --- a/src/librustc_data_structures/graph/mod.rs +++ b/src/librustc_data_structures/graph/mod.rs @@ -1,4 +1,4 @@ -use super::indexed_vec::Idx; +use rustc_index::vec::Idx; pub mod dominators; pub mod implementation; @@ -81,3 +81,13 @@ where + WithNumNodes, { } + +/// Returns `true` if the graph has a cycle that is reachable from the start node. +pub fn is_cyclic(graph: &G) -> bool +where + G: ?Sized + DirectedGraph + WithStartNode + WithSuccessors + WithNumNodes, +{ + iterate::TriColorDepthFirstSearch::new(graph) + .run_from_start(&mut iterate::CycleDetector) + .is_some() +} diff --git a/src/librustc_data_structures/graph/scc/mod.rs b/src/librustc_data_structures/graph/scc/mod.rs index 23a1a2a90a..c214f66cd1 100644 --- a/src/librustc_data_structures/graph/scc/mod.rs +++ b/src/librustc_data_structures/graph/scc/mod.rs @@ -6,7 +6,7 @@ use crate::fx::FxHashSet; use crate::graph::{DirectedGraph, WithNumNodes, WithNumEdges, WithSuccessors, GraphSuccessors}; use crate::graph::vec_graph::VecGraph; -use crate::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use std::ops::Range; #[cfg(test)] diff --git a/src/librustc_data_structures/graph/vec_graph/mod.rs b/src/librustc_data_structures/graph/vec_graph/mod.rs index 19c61f2680..aad5944dcd 100644 --- a/src/librustc_data_structures/graph/vec_graph/mod.rs +++ b/src/librustc_data_structures/graph/vec_graph/mod.rs @@ -1,4 +1,4 @@ -use crate::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use crate::graph::{DirectedGraph, WithNumNodes, WithNumEdges, WithSuccessors, GraphSuccessors}; #[cfg(test)] diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index f759350195..474a42644d 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -68,13 +68,12 @@ pub mod macros; pub mod svh; pub mod base_n; pub mod binary_search_util; -pub mod bit_set; pub mod box_region; pub mod const_cstr; pub mod flock; pub mod fx; +pub mod stable_map; pub mod graph; -pub mod indexed_vec; pub mod jobserver; pub mod obligation_forest; pub mod owning_ref; @@ -84,6 +83,7 @@ pub mod small_c_str; pub mod snapshot_map; pub use ena::snapshot_vec; pub mod sorted_map; +pub mod stable_set; #[macro_use] pub mod stable_hasher; pub mod sync; pub mod sharded; diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index 98ae1a5832..958ab617cb 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -138,7 +138,7 @@ pub struct ObligationForest { /// call to `compress`. /// /// `usize` indices are used here and throughout this module, rather than - /// `newtype_index!` indices, because this code is hot enough that the + /// `rustc_index::newtype_index!` indices, because this code is hot enough that the /// `u32`-to-`usize` conversions that would be required are significant, /// and space considerations are not important. nodes: Vec>, @@ -149,11 +149,10 @@ pub struct ObligationForest { /// A cache of the nodes in `nodes`, indexed by predicate. Unfortunately, /// its contents are not guaranteed to match those of `nodes`. See the /// comments in `process_obligation` for details. - waiting_cache: FxHashMap, + active_cache: FxHashMap, - /// A scratch vector reused in various operations, to avoid allocating new - /// vectors. - scratch: RefCell>, + /// A vector reused in compress(), to avoid allocating new vectors. + node_rewrites: RefCell>, obligation_tree_id_generator: ObligationTreeIdGenerator, @@ -235,10 +234,6 @@ enum NodeState { /// This obligation was resolved to an error. Error nodes are /// removed from the vector by the compression step. Error, - - /// This is a temporary state used in DFS loops to detect cycles, - /// it should not exist outside of these DFSes. - OnDfsStack, } #[derive(Debug)] @@ -278,8 +273,8 @@ impl ObligationForest { ObligationForest { nodes: vec![], done_cache: Default::default(), - waiting_cache: Default::default(), - scratch: RefCell::new(vec![]), + active_cache: Default::default(), + node_rewrites: RefCell::new(vec![]), obligation_tree_id_generator: (0..).map(ObligationTreeId), error_cache: Default::default(), } @@ -303,15 +298,16 @@ impl ObligationForest { return Ok(()); } - match self.waiting_cache.entry(obligation.as_predicate().clone()) { + match self.active_cache.entry(obligation.as_predicate().clone()) { Entry::Occupied(o) => { + let index = *o.get(); debug!("register_obligation_at({:?}, {:?}) - duplicate of {:?}!", - obligation, parent, o.get()); - let node = &mut self.nodes[*o.get()]; + obligation, parent, index); + let node = &mut self.nodes[index]; if let Some(parent_index) = parent { - // If the node is already in `waiting_cache`, it has - // already had its chance to be marked with a parent. So if - // it's not already present, just dump `parent` into the + // If the node is already in `active_cache`, it has already + // had its chance to be marked with a parent. So if it's + // not already present, just dump `parent` into the // dependents as a non-parent. if !node.dependents.contains(&parent_index) { node.dependents.push(parent_index); @@ -342,7 +338,8 @@ impl ObligationForest { if already_failed { Err(()) } else { - v.insert(self.nodes.len()); + let new_index = self.nodes.len(); + v.insert(new_index); self.nodes.push(Node::new(parent, obligation, obligation_tree_id)); Ok(()) } @@ -352,16 +349,16 @@ impl ObligationForest { /// Converts all remaining obligations to the given error. pub fn to_errors(&mut self, error: E) -> Vec> { - let mut errors = vec![]; - for (index, node) in self.nodes.iter().enumerate() { - if let NodeState::Pending = node.state.get() { - let backtrace = self.error_at(index); - errors.push(Error { + let errors = self.nodes.iter().enumerate() + .filter(|(_index, node)| node.state.get() == NodeState::Pending) + .map(|(index, _node)| { + Error { error: error.clone(), - backtrace, - }); - } - } + backtrace: self.error_at(index), + } + }) + .collect(); + let successful_obligations = self.compress(DoCompleted::Yes); assert!(successful_obligations.unwrap().is_empty()); errors @@ -371,15 +368,14 @@ impl ObligationForest { pub fn map_pending_obligations(&self, f: F) -> Vec

where F: Fn(&O) -> P { - self.nodes - .iter() - .filter(|n| n.state.get() == NodeState::Pending) - .map(|n| f(&n.obligation)) + self.nodes.iter() + .filter(|node| node.state.get() == NodeState::Pending) + .map(|node| f(&node.obligation)) .collect() } - fn insert_into_error_cache(&mut self, node_index: usize) { - let node = &self.nodes[node_index]; + fn insert_into_error_cache(&mut self, index: usize) { + let node = &self.nodes[index]; self.error_cache .entry(node.obligation_tree_id) .or_default() @@ -406,13 +402,13 @@ impl ObligationForest { // `processor.process_obligation` can modify the predicate within // `node.obligation`, and that predicate is the key used for - // `self.waiting_cache`. This means that `self.waiting_cache` can - // get out of sync with `nodes`. It's not very common, but it does + // `self.active_cache`. This means that `self.active_cache` can get + // out of sync with `nodes`. It's not very common, but it does // happen, and code in `compress` has to allow for it. - let result = match node.state.get() { - NodeState::Pending => processor.process_obligation(&mut node.obligation), - _ => continue - }; + if node.state.get() != NodeState::Pending { + continue; + } + let result = processor.process_obligation(&mut node.obligation); debug!("process_obligations: node {} got result {:?}", index, result); @@ -439,10 +435,9 @@ impl ObligationForest { } ProcessResult::Error(err) => { stalled = false; - let backtrace = self.error_at(index); errors.push(Error { error: err, - backtrace, + backtrace: self.error_at(index), }); } } @@ -478,61 +473,53 @@ impl ObligationForest { fn process_cycles

(&self, processor: &mut P) where P: ObligationProcessor { - let mut stack = self.scratch.replace(vec![]); - debug_assert!(stack.is_empty()); + let mut stack = vec![]; debug!("process_cycles()"); for (index, node) in self.nodes.iter().enumerate() { - // For rustc-benchmarks/inflate-0.1.0 this state test is extremely - // hot and the state is almost always `Pending` or `Waiting`. It's - // a win to handle the no-op cases immediately to avoid the cost of - // the function call. - match node.state.get() { - NodeState::Waiting | NodeState::Pending | NodeState::Done | NodeState::Error => {}, - _ => self.find_cycles_from_node(&mut stack, processor, index), + // For some benchmarks this state test is extremely + // hot. It's a win to handle the no-op cases immediately to avoid + // the cost of the function call. + if node.state.get() == NodeState::Success { + self.find_cycles_from_node(&mut stack, processor, index); } } debug!("process_cycles: complete"); debug_assert!(stack.is_empty()); - self.scratch.replace(stack); } fn find_cycles_from_node

(&self, stack: &mut Vec, processor: &mut P, index: usize) where P: ObligationProcessor { let node = &self.nodes[index]; - match node.state.get() { - NodeState::OnDfsStack => { - let index = stack.iter().rposition(|&n| n == index).unwrap(); - processor.process_backedge(stack[index..].iter().map(GetObligation(&self.nodes)), - PhantomData); - } - NodeState::Success => { - node.state.set(NodeState::OnDfsStack); - stack.push(index); - for &index in node.dependents.iter() { - self.find_cycles_from_node(stack, processor, index); + if node.state.get() == NodeState::Success { + match stack.iter().rposition(|&n| n == index) { + None => { + stack.push(index); + for &index in node.dependents.iter() { + self.find_cycles_from_node(stack, processor, index); + } + stack.pop(); + node.state.set(NodeState::Done); + } + Some(rpos) => { + // Cycle detected. + processor.process_backedge( + stack[rpos..].iter().map(GetObligation(&self.nodes)), + PhantomData + ); } - stack.pop(); - node.state.set(NodeState::Done); - }, - NodeState::Waiting | NodeState::Pending => { - // This node is still reachable from some pending node. We - // will get to it when they are all processed. } - NodeState::Done | NodeState::Error => { - // Already processed that node. - } - }; + } } /// Returns a vector of obligations for `p` and all of its /// ancestors, putting them into the error state in the process. fn error_at(&self, mut index: usize) -> Vec { - let mut error_stack = self.scratch.replace(vec![]); + let mut error_stack: Vec = vec![]; let mut trace = vec![]; loop { @@ -553,15 +540,12 @@ impl ObligationForest { while let Some(index) = error_stack.pop() { let node = &self.nodes[index]; - match node.state.get() { - NodeState::Error => continue, - _ => node.state.set(NodeState::Error), + if node.state.get() != NodeState::Error { + node.state.set(NodeState::Error); + error_stack.extend(node.dependents.iter()); } - - error_stack.extend(node.dependents.iter()); } - self.scratch.replace(error_stack); trace } @@ -569,7 +553,19 @@ impl ObligationForest { #[inline(always)] fn inlined_mark_neighbors_as_waiting_from(&self, node: &Node) { for &index in node.dependents.iter() { - self.mark_as_waiting_from(&self.nodes[index]); + let node = &self.nodes[index]; + match node.state.get() { + NodeState::Waiting | NodeState::Error => {} + NodeState::Success => { + node.state.set(NodeState::Waiting); + // This call site is cold. + self.uninlined_mark_neighbors_as_waiting_from(node); + } + NodeState::Pending | NodeState::Done => { + // This call site is cold. + self.uninlined_mark_neighbors_as_waiting_from(node); + } + } } } @@ -595,37 +591,28 @@ impl ObligationForest { } } - fn mark_as_waiting_from(&self, node: &Node) { - match node.state.get() { - NodeState::Waiting | NodeState::Error | NodeState::OnDfsStack => return, - NodeState::Success => node.state.set(NodeState::Waiting), - NodeState::Pending | NodeState::Done => {}, - } - - // This call site is cold. - self.uninlined_mark_neighbors_as_waiting_from(node); - } - - /// Compresses the vector, removing all popped nodes. This adjusts - /// the indices and hence invalidates any outstanding - /// indices. Cannot be used during a transaction. + /// Compresses the vector, removing all popped nodes. This adjusts the + /// indices and hence invalidates any outstanding indices. /// /// Beforehand, all nodes must be marked as `Done` and no cycles /// on these nodes may be present. This is done by e.g., `process_cycles`. #[inline(never)] fn compress(&mut self, do_completed: DoCompleted) -> Option> { - let nodes_len = self.nodes.len(); - let mut node_rewrites: Vec<_> = self.scratch.replace(vec![]); - node_rewrites.extend(0..nodes_len); + let orig_nodes_len = self.nodes.len(); + let mut node_rewrites: Vec<_> = self.node_rewrites.replace(vec![]); + debug_assert!(node_rewrites.is_empty()); + node_rewrites.extend(0..orig_nodes_len); let mut dead_nodes = 0; + let mut removed_done_obligations: Vec = vec![]; - // Now move all popped nodes to the end. Try to keep the order. + // Now move all Done/Error nodes to the end, preserving the order of + // the Pending/Waiting nodes. // // LOOP INVARIANT: // self.nodes[0..index - dead_nodes] are the first remaining nodes // self.nodes[index - dead_nodes..index] are all dead // self.nodes[index..] are unchanged - for index in 0..self.nodes.len() { + for index in 0..orig_nodes_len { let node = &self.nodes[index]; match node.state.get() { NodeState::Pending | NodeState::Waiting => { @@ -636,88 +623,77 @@ impl ObligationForest { } NodeState::Done => { // This lookup can fail because the contents of - // `self.waiting_cache` is not guaranteed to match those of + // `self.active_cache` are not guaranteed to match those of // `self.nodes`. See the comment in `process_obligation` // for more details. - if let Some((predicate, _)) = self.waiting_cache - .remove_entry(node.obligation.as_predicate()) + if let Some((predicate, _)) = + self.active_cache.remove_entry(node.obligation.as_predicate()) { self.done_cache.insert(predicate); } else { self.done_cache.insert(node.obligation.as_predicate().clone()); } - node_rewrites[index] = nodes_len; + if do_completed == DoCompleted::Yes { + // Extract the success stories. + removed_done_obligations.push(node.obligation.clone()); + } + node_rewrites[index] = orig_nodes_len; dead_nodes += 1; } NodeState::Error => { // We *intentionally* remove the node from the cache at this point. Otherwise // tests must come up with a different type on every type error they // check against. - self.waiting_cache.remove(node.obligation.as_predicate()); - node_rewrites[index] = nodes_len; - dead_nodes += 1; + self.active_cache.remove(node.obligation.as_predicate()); self.insert_into_error_cache(index); + node_rewrites[index] = orig_nodes_len; + dead_nodes += 1; } - NodeState::OnDfsStack | NodeState::Success => unreachable!() + NodeState::Success => unreachable!() } } - // No compression needed. - if dead_nodes == 0 { - node_rewrites.truncate(0); - self.scratch.replace(node_rewrites); - return if do_completed == DoCompleted::Yes { Some(vec![]) } else { None }; + if dead_nodes > 0 { + // Remove the dead nodes and rewrite indices. + self.nodes.truncate(orig_nodes_len - dead_nodes); + self.apply_rewrites(&node_rewrites); } - // Pop off all the nodes we killed and extract the success stories. - let successful = if do_completed == DoCompleted::Yes { - Some((0..dead_nodes) - .map(|_| self.nodes.pop().unwrap()) - .flat_map(|node| { - match node.state.get() { - NodeState::Error => None, - NodeState::Done => Some(node.obligation), - _ => unreachable!() - } - }) - .collect()) - } else { - self.nodes.truncate(self.nodes.len() - dead_nodes); - None - }; - self.apply_rewrites(&node_rewrites); - node_rewrites.truncate(0); - self.scratch.replace(node_rewrites); + self.node_rewrites.replace(node_rewrites); - successful + if do_completed == DoCompleted::Yes { + Some(removed_done_obligations) + } else { + None + } } fn apply_rewrites(&mut self, node_rewrites: &[usize]) { - let nodes_len = node_rewrites.len(); + let orig_nodes_len = node_rewrites.len(); for node in &mut self.nodes { - let mut index = 0; - while index < node.dependents.len() { - let new_index = node_rewrites[node.dependents[index]]; - if new_index >= nodes_len { - node.dependents.swap_remove(index); - if index == 0 && node.has_parent { + let mut i = 0; + while i < node.dependents.len() { + let new_index = node_rewrites[node.dependents[i]]; + if new_index >= orig_nodes_len { + node.dependents.swap_remove(i); + if i == 0 && node.has_parent { // We just removed the parent. node.has_parent = false; } } else { - node.dependents[index] = new_index; - index += 1; + node.dependents[i] = new_index; + i += 1; } } } - // This updating of `self.waiting_cache` is necessary because the + // This updating of `self.active_cache` is necessary because the // removal of nodes within `compress` can fail. See above. - self.waiting_cache.retain(|_predicate, index| { + self.active_cache.retain(|_predicate, index| { let new_index = node_rewrites[*index]; - if new_index >= nodes_len { + if new_index >= orig_nodes_len { false } else { *index = new_index; diff --git a/src/librustc_data_structures/obligation_forest/tests.rs b/src/librustc_data_structures/obligation_forest/tests.rs index e20466572a..54b6f6d0ad 100644 --- a/src/librustc_data_structures/obligation_forest/tests.rs +++ b/src/librustc_data_structures/obligation_forest/tests.rs @@ -116,7 +116,9 @@ fn push_pop() { _ => unreachable!(), } }, |_| {}), DoCompleted::Yes); - assert_eq!(ok.unwrap(), vec!["A.3", "A.1", "A.3.i"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["A.1", "A.3", "A.3.i"]); assert_eq!(err, vec![Error { error: "A is for apple", @@ -132,7 +134,9 @@ fn push_pop() { _ => panic!("unexpected obligation {:?}", obligation), } }, |_| {}), DoCompleted::Yes); - assert_eq!(ok.unwrap(), vec!["D.2.i", "D.2"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["D.2", "D.2.i"]); assert_eq!(err, vec![Error { error: "D is for dumb", @@ -172,7 +176,9 @@ fn success_in_grandchildren() { _ => unreachable!(), } }, |_| {}), DoCompleted::Yes); - assert_eq!(ok.unwrap(), vec!["A.3", "A.1"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["A.1", "A.3"]); assert!(err.is_empty()); let Outcome { completed: ok, errors: err, .. } = @@ -193,7 +199,9 @@ fn success_in_grandchildren() { _ => unreachable!(), } }, |_| {}), DoCompleted::Yes); - assert_eq!(ok.unwrap(), vec!["A.2.i.a", "A.2.i", "A.2", "A"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["A", "A.2", "A.2.i", "A.2.i.a"]); assert!(err.is_empty()); let Outcome { completed: ok, errors: err, .. } = @@ -261,7 +269,9 @@ fn diamond() { } }, |_|{}), DoCompleted::Yes); assert_eq!(d_count, 1); - assert_eq!(ok.unwrap(), vec!["D", "A.2", "A.1", "A"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["A", "A.1", "A.2", "D"]); assert_eq!(err.len(), 0); let errors = forest.to_errors(()); @@ -323,7 +333,9 @@ fn done_dependency() { _ => unreachable!(), } }, |_|{}), DoCompleted::Yes); - assert_eq!(ok.unwrap(), vec!["C: Sized", "B: Sized", "A: Sized"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["A: Sized", "B: Sized", "C: Sized"]); assert_eq!(err.len(), 0); forest.register_obligation("(A,B,C): Sized"); @@ -361,7 +373,9 @@ fn orphan() { _ => unreachable!(), } }, |_|{}), DoCompleted::Yes); - assert_eq!(ok.unwrap(), vec!["C2", "C1"]); + let mut ok = ok.unwrap(); + ok.sort(); + assert_eq!(ok, vec!["C1", "C2"]); assert_eq!(err.len(), 0); let Outcome { completed: ok, errors: err, .. } = diff --git a/src/librustc_data_structures/owning_ref/mod.rs b/src/librustc_data_structures/owning_ref/mod.rs index b835b1706b..0213eb4f2a 100644 --- a/src/librustc_data_structures/owning_ref/mod.rs +++ b/src/librustc_data_structures/owning_ref/mod.rs @@ -1046,14 +1046,14 @@ unsafe impl CloneStableAddress for OwningRef where O: CloneStableAddress {} unsafe impl Send for OwningRef - where O: Send, for<'a> (&'a T): Send {} + where O: Send, for<'a> &'a T: Send {} unsafe impl Sync for OwningRef - where O: Sync, for<'a> (&'a T): Sync {} + where O: Sync, for<'a> &'a T: Sync {} unsafe impl Send for OwningRefMut - where O: Send, for<'a> (&'a mut T): Send {} + where O: Send, for<'a> &'a mut T: Send {} unsafe impl Sync for OwningRefMut - where O: Sync, for<'a> (&'a mut T): Sync {} + where O: Sync, for<'a> &'a mut T: Sync {} impl Debug for dyn Erased { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/src/librustc_data_structures/sharded.rs b/src/librustc_data_structures/sharded.rs index 31cb22098b..2f972eeccd 100644 --- a/src/librustc_data_structures/sharded.rs +++ b/src/librustc_data_structures/sharded.rs @@ -2,6 +2,7 @@ use std::hash::{Hasher, Hash}; use std::mem; use std::borrow::Borrow; use std::collections::hash_map::RawEntryMut; +use smallvec::SmallVec; use crate::fx::{FxHasher, FxHashMap}; use crate::sync::{Lock, LockGuard}; @@ -18,7 +19,7 @@ const SHARD_BITS: usize = 5; #[cfg(not(parallel_compiler))] const SHARD_BITS: usize = 0; -const SHARDS: usize = 1 << SHARD_BITS; +pub const SHARDS: usize = 1 << SHARD_BITS; /// An array of cache-line aligned inner locked structures with convenience methods. #[derive(Clone)] @@ -29,21 +30,36 @@ pub struct Sharded { impl Default for Sharded { #[inline] fn default() -> Self { + Self::new(|| T::default()) + } +} + +impl Sharded { + #[inline] + pub fn new(mut value: impl FnMut() -> T) -> Self { + // Create a vector of the values we want + let mut values: SmallVec<[_; SHARDS]> = (0..SHARDS).map(|_| { + CacheAligned(Lock::new(value())) + }).collect(); + + // Create an unintialized array let mut shards: mem::MaybeUninit<[CacheAligned>; SHARDS]> = mem::MaybeUninit::uninit(); - let first = shards.as_mut_ptr() as *mut CacheAligned>; + unsafe { - for i in 0..SHARDS { - first.add(i).write(CacheAligned(Lock::new(T::default()))); - } + // Copy the values into our array + let first = shards.as_mut_ptr() as *mut CacheAligned>; + values.as_ptr().copy_to_nonoverlapping(first, SHARDS); + + // Ignore the content of the vector + values.set_len(0); + Sharded { shards: shards.assume_init(), } } } -} -impl Sharded { #[inline] pub fn get_shard_by_value(&self, val: &K) -> &Lock { if SHARDS == 1 { @@ -74,7 +90,7 @@ impl Sharded { pub type ShardedHashMap = Sharded>; -impl ShardedHashMap { +impl ShardedHashMap { pub fn len(&self) -> usize { self.lock_shards().iter().map(|shard| shard.len()).sum() } diff --git a/src/librustc_data_structures/snapshot_map/mod.rs b/src/librustc_data_structures/snapshot_map/mod.rs index ce0aa07cc2..bdd3dc9665 100644 --- a/src/librustc_data_structures/snapshot_map/mod.rs +++ b/src/librustc_data_structures/snapshot_map/mod.rs @@ -7,7 +7,7 @@ use std::mem; mod tests; pub struct SnapshotMap - where K: Hash + Clone + Eq + where K: Clone + Eq { map: FxHashMap, undo_log: Vec>, diff --git a/src/librustc_data_structures/stable_hasher.rs b/src/librustc_data_structures/stable_hasher.rs index 47dfc1d168..092208cfe1 100644 --- a/src/librustc_data_structures/stable_hasher.rs +++ b/src/librustc_data_structures/stable_hasher.rs @@ -1,10 +1,9 @@ use std::hash::{Hash, Hasher, BuildHasher}; -use std::marker::PhantomData; use std::mem; use smallvec::SmallVec; use crate::sip128::SipHasher128; -use crate::indexed_vec; -use crate::bit_set; +use rustc_index::vec; +use rustc_index::bit_set; /// When hashing something that ends up affecting properties like symbol names, /// we want these symbol names to be calculated independently of other factors @@ -13,55 +12,53 @@ use crate::bit_set; /// To that end we always convert integers to little-endian format before /// hashing and the architecture dependent `isize` and `usize` types are /// extended to 64 bits if needed. -pub struct StableHasher { +pub struct StableHasher { state: SipHasher128, - width: PhantomData, } -impl ::std::fmt::Debug for StableHasher { +impl ::std::fmt::Debug for StableHasher { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { write!(f, "{:?}", self.state) } } pub trait StableHasherResult: Sized { - fn finish(hasher: StableHasher) -> Self; + fn finish(hasher: StableHasher) -> Self; } -impl StableHasher { +impl StableHasher { pub fn new() -> Self { StableHasher { state: SipHasher128::new_with_keys(0, 0), - width: PhantomData, } } - pub fn finish(self) -> W { + pub fn finish(self) -> W { W::finish(self) } } impl StableHasherResult for u128 { - fn finish(hasher: StableHasher) -> Self { + fn finish(hasher: StableHasher) -> Self { let (_0, _1) = hasher.finalize(); u128::from(_0) | (u128::from(_1) << 64) } } impl StableHasherResult for u64 { - fn finish(hasher: StableHasher) -> Self { + fn finish(hasher: StableHasher) -> Self { hasher.finalize().0 } } -impl StableHasher { +impl StableHasher { #[inline] pub fn finalize(self) -> (u64, u64) { self.state.finish128() } } -impl Hasher for StableHasher { +impl Hasher for StableHasher { fn finish(&self) -> u64 { panic!("use StableHasher::finalize instead"); } @@ -165,16 +162,14 @@ impl Hasher for StableHasher { /// `StableHasher` takes care of endianness and `isize`/`usize` platform /// differences. pub trait HashStable { - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher); + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher); } /// Implement this for types that can be turned into stable keys like, for /// example, for DefId that can be converted to a DefPathHash. This is used for /// bringing maps into a predictable order before hashing them. pub trait ToStableHashKey { - type KeyType: Ord + Clone + Sized + HashStable; + type KeyType: Ord + Sized + HashStable; fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType; } @@ -185,10 +180,10 @@ macro_rules! impl_stable_hash_via_hash { ($t:ty) => ( impl $crate::stable_hasher::HashStable for $t { #[inline] - fn hash_stable( + fn hash_stable( &self, _: &mut CTX, - hasher: &mut $crate::stable_hasher::StableHasher + hasher: &mut $crate::stable_hasher::StableHasher ) { ::std::hash::Hash::hash(self, hasher); } @@ -215,17 +210,13 @@ impl_stable_hash_via_hash!(char); impl_stable_hash_via_hash!(()); impl HashStable for ::std::num::NonZeroU32 { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.get().hash_stable(ctx, hasher) } } impl HashStable for f32 { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { let val: u32 = unsafe { ::std::mem::transmute(*self) }; @@ -234,9 +225,7 @@ impl HashStable for f32 { } impl HashStable for f64 { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { let val: u64 = unsafe { ::std::mem::transmute(*self) }; @@ -245,26 +234,20 @@ impl HashStable for f64 { } impl HashStable for ::std::cmp::Ordering { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (*self as i8).hash_stable(ctx, hasher); } } impl, CTX> HashStable for (T1,) { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { let (ref _0,) = *self; _0.hash_stable(ctx, hasher); } } impl, T2: HashStable, CTX> HashStable for (T1, T2) { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { let (ref _0, ref _1) = *self; _0.hash_stable(ctx, hasher); _1.hash_stable(ctx, hasher); @@ -276,9 +259,7 @@ impl HashStable for (T1, T2, T3) T2: HashStable, T3: HashStable, { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { let (ref _0, ref _1, ref _2) = *self; _0.hash_stable(ctx, hasher); _1.hash_stable(ctx, hasher); @@ -292,9 +273,7 @@ impl HashStable for (T1, T2, T3, T4) T3: HashStable, T4: HashStable, { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { let (ref _0, ref _1, ref _2, ref _3) = *self; _0.hash_stable(ctx, hasher); _1.hash_stable(ctx, hasher); @@ -304,9 +283,7 @@ impl HashStable for (T1, T2, T3, T4) } impl, CTX> HashStable for [T] { - default fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + default fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.len().hash_stable(ctx, hasher); for item in self { item.hash_stable(ctx, hasher); @@ -316,9 +293,7 @@ impl, CTX> HashStable for [T] { impl, CTX> HashStable for Vec { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (&self[..]).hash_stable(ctx, hasher); } } @@ -329,9 +304,7 @@ impl HashStable for indexmap::IndexMap R: BuildHasher, { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.len().hash_stable(ctx, hasher); for kv in self { kv.hash_stable(ctx, hasher); @@ -344,9 +317,7 @@ impl HashStable for indexmap::IndexSet R: BuildHasher, { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.len().hash_stable(ctx, hasher); for key in self { key.hash_stable(ctx, hasher); @@ -356,45 +327,35 @@ impl HashStable for indexmap::IndexSet impl HashStable for SmallVec<[A; 1]> where A: HashStable { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (&self[..]).hash_stable(ctx, hasher); } } impl, CTX> HashStable for Box { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (**self).hash_stable(ctx, hasher); } } impl, CTX> HashStable for ::std::rc::Rc { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (**self).hash_stable(ctx, hasher); } } impl, CTX> HashStable for ::std::sync::Arc { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (**self).hash_stable(ctx, hasher); } } impl HashStable for str { #[inline] - fn hash_stable(&self, - _: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, _: &mut CTX, hasher: &mut StableHasher) { self.len().hash(hasher); self.as_bytes().hash(hasher); } @@ -403,9 +364,7 @@ impl HashStable for str { impl HashStable for String { #[inline] - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { (&self[..]).hash_stable(hcx, hasher); } } @@ -420,9 +379,7 @@ impl ToStableHashKey for String { impl HashStable for bool { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (if *self { 1u8 } else { 0u8 }).hash_stable(ctx, hasher); } } @@ -432,9 +389,7 @@ impl HashStable for Option where T: HashStable { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { if let Some(ref value) = *self { 1u8.hash_stable(ctx, hasher); value.hash_stable(ctx, hasher); @@ -449,9 +404,7 @@ impl HashStable for Result T2: HashStable, { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(ctx, hasher); match *self { Ok(ref x) => x.hash_stable(ctx, hasher), @@ -464,28 +417,22 @@ impl<'a, T, CTX> HashStable for &'a T where T: HashStable + ?Sized { #[inline] - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { (**self).hash_stable(ctx, hasher); } } impl HashStable for ::std::mem::Discriminant { #[inline] - fn hash_stable(&self, - _: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, _: &mut CTX, hasher: &mut StableHasher) { ::std::hash::Hash::hash(self, hasher); } } -impl HashStable for indexed_vec::IndexVec +impl HashStable for vec::IndexVec where T: HashStable, { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.len().hash_stable(ctx, hasher); for v in &self.raw { v.hash_stable(ctx, hasher); @@ -494,21 +441,17 @@ impl HashStable for indexed_vec::IndexVec HashStable for bit_set::BitSet +impl HashStable for bit_set::BitSet { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.words().hash_stable(ctx, hasher); } } -impl HashStable +impl HashStable for bit_set::BitMatrix { - fn hash_stable(&self, - ctx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { self.words().hash_stable(ctx, hasher); } } @@ -517,25 +460,21 @@ impl_stable_hash_via_hash!(::std::path::Path); impl_stable_hash_via_hash!(::std::path::PathBuf); impl HashStable for ::std::collections::HashMap - where K: ToStableHashKey + Eq + Hash, + where K: ToStableHashKey + Eq, V: HashStable, R: BuildHasher, { #[inline] - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { hash_stable_hashmap(hcx, hasher, self, ToStableHashKey::to_stable_hash_key); } } impl HashStable for ::std::collections::HashSet - where K: ToStableHashKey + Eq + Hash, + where K: ToStableHashKey + Eq, R: BuildHasher, { - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { let mut keys: Vec<_> = self.iter() .map(|k| k.to_stable_hash_key(hcx)) .collect(); @@ -548,9 +487,7 @@ impl HashStable for ::std::collections::BTreeMap where K: ToStableHashKey, V: HashStable, { - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { let mut entries: Vec<_> = self.iter() .map(|(k, v)| (k.to_stable_hash_key(hcx), v)) .collect(); @@ -562,9 +499,7 @@ impl HashStable for ::std::collections::BTreeMap impl HashStable for ::std::collections::BTreeSet where K: ToStableHashKey, { - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { let mut keys: Vec<_> = self.iter() .map(|k| k.to_stable_hash_key(hcx)) .collect(); @@ -573,17 +508,16 @@ impl HashStable for ::std::collections::BTreeSet } } -pub fn hash_stable_hashmap( +pub fn hash_stable_hashmap( hcx: &mut HCX, - hasher: &mut StableHasher, + hasher: &mut StableHasher, map: &::std::collections::HashMap, to_stable_hash_key: F) - where K: Eq + Hash, + where K: Eq, V: HashStable, R: BuildHasher, - SK: HashStable + Ord + Clone, + SK: HashStable + Ord, F: Fn(&K, &HCX) -> SK, - W: StableHasherResult, { let mut entries: Vec<_> = map.iter() .map(|(k, v)| (to_stable_hash_key(k, hcx), v)) @@ -614,9 +548,7 @@ impl ::std::ops::Deref for StableVec { impl HashStable for StableVec where T: HashStable + ToStableHashKey { - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { let StableVec(ref v) = *self; let mut sorted: Vec<_> = v.iter() diff --git a/src/librustc_data_structures/stable_map.rs b/src/librustc_data_structures/stable_map.rs new file mode 100644 index 0000000000..f69f28e14b --- /dev/null +++ b/src/librustc_data_structures/stable_map.rs @@ -0,0 +1,99 @@ +pub use rustc_hash::FxHashMap; +use std::borrow::Borrow; +use std::collections::hash_map::Entry; +use std::fmt; +use std::hash::Hash; + +/// A deterministic wrapper around FxHashMap that does not provide iteration support. +/// +/// It supports insert, remove, get and get_mut functions from FxHashMap. +/// It also allows to convert hashmap to a sorted vector with the method `into_sorted_vector()`. +#[derive(Clone)] +pub struct StableMap { + base: FxHashMap, +} + +impl Default for StableMap +where + K: Eq + Hash, +{ + fn default() -> StableMap { + StableMap::new() + } +} + +impl fmt::Debug for StableMap +where + K: Eq + Hash + fmt::Debug, + V: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self.base) + } +} + +impl PartialEq for StableMap +where + K: Eq + Hash, + V: PartialEq, +{ + fn eq(&self, other: &StableMap) -> bool { + self.base == other.base + } +} + +impl Eq for StableMap +where + K: Eq + Hash, + V: Eq, +{} + +impl StableMap +where + K: Eq + Hash, +{ + pub fn new() -> StableMap { + StableMap { base: FxHashMap::default() } + } + + pub fn into_sorted_vector(self) -> Vec<(K, V)> + where + K: Ord + Copy, + { + let mut vector = self.base.into_iter().collect::>(); + vector.sort_unstable_by_key(|pair| pair.0); + vector + } + + pub fn entry(&mut self, k: K) -> Entry<'_, K, V> { + self.base.entry(k) + } + + pub fn get(&self, k: &Q) -> Option<&V> + where + K: Borrow, + Q: Hash + Eq, + { + self.base.get(k) + } + + pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> + where + K: Borrow, + Q: Hash + Eq, + { + self.base.get_mut(k) + } + + pub fn insert(&mut self, k: K, v: V) -> Option { + self.base.insert(k, v) + } + + pub fn remove(&mut self, k: &Q) -> Option + where + K: Borrow, + Q: Hash + Eq, + { + self.base.remove(k) + } +} diff --git a/src/librustc_data_structures/stable_set.rs b/src/librustc_data_structures/stable_set.rs new file mode 100644 index 0000000000..c7ca74f5fb --- /dev/null +++ b/src/librustc_data_structures/stable_set.rs @@ -0,0 +1,77 @@ +pub use rustc_hash::FxHashSet; +use std::borrow::Borrow; +use std::fmt; +use std::hash::Hash; + +/// A deterministic wrapper around FxHashSet that does not provide iteration support. +/// +/// It supports insert, remove, get functions from FxHashSet. +/// It also allows to convert hashset to a sorted vector with the method `into_sorted_vector()`. +#[derive(Clone)] +pub struct StableSet { + base: FxHashSet, +} + +impl Default for StableSet +where + T: Eq + Hash, +{ + fn default() -> StableSet { + StableSet::new() + } +} + +impl fmt::Debug for StableSet +where + T: Eq + Hash + fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self.base) + } +} + +impl PartialEq> for StableSet +where + T: Eq + Hash, +{ + fn eq(&self, other: &StableSet) -> bool { + self.base == other.base + } +} + +impl Eq for StableSet where T: Eq + Hash {} + +impl StableSet { + pub fn new() -> StableSet { + StableSet { base: FxHashSet::default() } + } + + pub fn into_sorted_vector(self) -> Vec + where + T: Ord, + { + let mut vector = self.base.into_iter().collect::>(); + vector.sort_unstable(); + vector + } + + pub fn get(&self, value: &Q) -> Option<&T> + where + T: Borrow, + Q: Hash + Eq, + { + self.base.get(value) + } + + pub fn insert(&mut self, value: T) -> bool { + self.base.insert(value) + } + + pub fn remove(&mut self, value: &Q) -> bool + where + T: Borrow, + Q: Hash + Eq, + { + self.base.remove(value) + } +} diff --git a/src/librustc_data_structures/svh.rs b/src/librustc_data_structures/svh.rs index 3123c182b0..64042264d7 100644 --- a/src/librustc_data_structures/svh.rs +++ b/src/librustc_data_structures/svh.rs @@ -61,11 +61,7 @@ impl Decodable for Svh { impl stable_hasher::HashStable for Svh { #[inline] - fn hash_stable( - &self, - ctx: &mut T, - hasher: &mut stable_hasher::StableHasher - ) { + fn hash_stable(&self, ctx: &mut T, hasher: &mut stable_hasher::StableHasher) { let Svh { hash } = *self; diff --git a/src/librustc_data_structures/sync.rs b/src/librustc_data_structures/sync.rs index 3277b85c28..6a19f52897 100644 --- a/src/librustc_data_structures/sync.rs +++ b/src/librustc_data_structures/sync.rs @@ -1,6 +1,6 @@ //! This module defines types which are thread safe if cfg!(parallel_compiler) is true. //! -//! `Lrc` is an alias of either Rc or Arc. +//! `Lrc` is an alias of `Arc` if cfg!(parallel_compiler) is true, `Rc` otherwise. //! //! `Lock` is a mutex. //! It internally uses `parking_lot::Mutex` if cfg!(parallel_compiler) is true, @@ -12,7 +12,7 @@ //! //! `MTLock` is a mutex which disappears if cfg!(parallel_compiler) is false. //! -//! `MTRef` is a immutable reference if cfg!(parallel_compiler), and an mutable reference otherwise. +//! `MTRef` is an immutable reference if cfg!(parallel_compiler), and a mutable reference otherwise. //! //! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync //! depending on the value of cfg!(parallel_compiler). @@ -23,29 +23,6 @@ use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; use crate::owning_ref::{Erased, OwningRef}; -pub fn serial_join(oper_a: A, oper_b: B) -> (RA, RB) - where A: FnOnce() -> RA, - B: FnOnce() -> RB -{ - (oper_a(), oper_b()) -} - -pub struct SerialScope; - -impl SerialScope { - pub fn spawn(&self, f: F) - where F: FnOnce(&SerialScope) - { - f(self) - } -} - -pub fn serial_scope(f: F) -> R - where F: FnOnce(&SerialScope) -> R -{ - f(&SerialScope) -} - pub use std::sync::atomic::Ordering::SeqCst; pub use std::sync::atomic::Ordering; @@ -176,8 +153,28 @@ cfg_if! { pub type AtomicU32 = Atomic; pub type AtomicU64 = Atomic; - pub use self::serial_join as join; - pub use self::serial_scope as scope; + pub fn join(oper_a: A, oper_b: B) -> (RA, RB) + where A: FnOnce() -> RA, + B: FnOnce() -> RB + { + (oper_a(), oper_b()) + } + + pub struct SerialScope; + + impl SerialScope { + pub fn spawn(&self, f: F) + where F: FnOnce(&SerialScope) + { + f(self) + } + } + + pub fn scope(f: F) -> R + where F: FnOnce(&SerialScope) -> R + { + f(&SerialScope) + } #[macro_export] macro_rules! parallel { @@ -495,18 +492,20 @@ impl Once { assert!(self.try_set(value).is_none()); } - /// Tries to initialize the inner value by calling the closure while ensuring that no-one else - /// can access the value in the mean time by holding a lock for the duration of the closure. - /// If the value was already initialized the closure is not called and `false` is returned, - /// otherwise if the value from the closure initializes the inner value, `true` is returned + /// Initializes the inner value if it wasn't already done by calling the provided closure. It + /// ensures that no-one else can access the value in the mean time by holding a lock for the + /// duration of the closure. + /// A reference to the inner value is returned. #[inline] - pub fn init_locking T>(&self, f: F) -> bool { - let mut lock = self.0.lock(); - if lock.is_some() { - return false; + pub fn init_locking T>(&self, f: F) -> &T { + { + let mut lock = self.0.lock(); + if lock.is_none() { + *lock = Some(f()); + } } - *lock = Some(f()); - true + + self.borrow() } /// Tries to initialize the inner value by calling the closure without ensuring that no-one @@ -741,7 +740,7 @@ impl Clone for RwLock { /// A type which only allows its inner value to be used in one thread. /// It will panic if it is used on multiple threads. -#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)] +#[derive(Debug)] pub struct OneThread { #[cfg(parallel_compiler)] thread: thread::ThreadId, diff --git a/src/librustc_data_structures/thin_vec.rs b/src/librustc_data_structures/thin_vec.rs index 6692903cd4..d97da489db 100644 --- a/src/librustc_data_structures/thin_vec.rs +++ b/src/librustc_data_structures/thin_vec.rs @@ -1,9 +1,9 @@ -use crate::stable_hasher::{StableHasher, StableHasherResult, HashStable}; +use crate::stable_hasher::{StableHasher, HashStable}; /// A vector type optimized for cases where this size is usually 0 (cf. `SmallVector`). /// The `Option>` wrapping allows us to represent a zero sized vector with `None`, /// which uses only a single (null) pointer. -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ThinVec(Option>>); impl ThinVec { @@ -60,9 +60,7 @@ impl Extend for ThinVec { } impl, CTX> HashStable for ThinVec { - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { (**self).hash_stable(hcx, hasher) } } diff --git a/src/librustc_data_structures/tiny_list.rs b/src/librustc_data_structures/tiny_list.rs index ea771d9f20..371f0f6fa0 100644 --- a/src/librustc_data_structures/tiny_list.rs +++ b/src/librustc_data_structures/tiny_list.rs @@ -14,7 +14,7 @@ #[cfg(test)] mod tests; -#[derive(Clone, Hash, Debug, PartialEq)] +#[derive(Clone)] pub struct TinyList { head: Option> } @@ -80,7 +80,7 @@ impl TinyList { } } -#[derive(Clone, Hash, Debug, PartialEq)] +#[derive(Clone)] struct Element { data: T, next: Option>>, diff --git a/src/librustc_data_structures/transitive_relation.rs b/src/librustc_data_structures/transitive_relation.rs index ffc964ddb5..a3926c1555 100644 --- a/src/librustc_data_structures/transitive_relation.rs +++ b/src/librustc_data_structures/transitive_relation.rs @@ -1,6 +1,6 @@ -use crate::bit_set::BitMatrix; +use rustc_index::bit_set::BitMatrix; use crate::fx::FxHashMap; -use crate::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use crate::stable_hasher::{HashStable, StableHasher}; use crate::sync::Lock; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use std::fmt::Debug; @@ -11,7 +11,7 @@ use std::mem; mod tests; #[derive(Clone, Debug)] -pub struct TransitiveRelation { +pub struct TransitiveRelation { // List of elements. This is used to map from a T to a usize. elements: Vec, @@ -35,7 +35,7 @@ pub struct TransitiveRelation { } // HACK(eddyb) manual impl avoids `Default` bound on `T`. -impl Default for TransitiveRelation { +impl Default for TransitiveRelation { fn default() -> Self { TransitiveRelation { elements: Default::default(), @@ -46,7 +46,7 @@ impl Default for TransitiveRelation { } } -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, RustcEncodable, RustcDecodable, Debug)] struct Index(usize); #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] @@ -442,9 +442,7 @@ impl Decodable for TransitiveRelation impl HashStable for TransitiveRelation where T: HashStable + Eq + Debug + Clone + Hash { - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { // We are assuming here that the relation graph has been built in a // deterministic way and we can just hash it the way it is. let TransitiveRelation { @@ -462,9 +460,7 @@ impl HashStable for TransitiveRelation } impl HashStable for Edge { - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { let Edge { ref source, ref target, @@ -476,9 +472,7 @@ impl HashStable for Edge { } impl HashStable for Index { - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { let Index(idx) = *self; idx.hash_stable(hcx, hasher); } diff --git a/src/librustc_data_structures/vec_linked_list.rs b/src/librustc_data_structures/vec_linked_list.rs index 0fb8060031..7744c30655 100644 --- a/src/librustc_data_structures/vec_linked_list.rs +++ b/src/librustc_data_structures/vec_linked_list.rs @@ -1,4 +1,4 @@ -use crate::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; pub fn iter( first: Option, diff --git a/src/librustc_data_structures/work_queue.rs b/src/librustc_data_structures/work_queue.rs index 193025aafa..af63b18e9e 100644 --- a/src/librustc_data_structures/work_queue.rs +++ b/src/librustc_data_structures/work_queue.rs @@ -1,5 +1,5 @@ -use crate::bit_set::BitSet; -use crate::indexed_vec::Idx; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::Idx; use std::collections::VecDeque; /// A work queue is a handy data structure for tracking work left to diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 25f67b3046..a9e4e6db1c 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -13,10 +13,10 @@ crate-type = ["dylib"] graphviz = { path = "../libgraphviz" } lazy_static = "1.0" log = "0.4" -env_logger = { version = "0.6", default-features = false } +env_logger = { version = "0.7", default-features = false } rustc = { path = "../librustc" } rustc_target = { path = "../librustc_target" } -rustc_ast_borrowck = { path = "../librustc_ast_borrowck" } +rustc_lint = { path = "../librustc_lint" } rustc_data_structures = { path = "../librustc_data_structures" } errors = { path = "../librustc_errors", package = "rustc_errors" } rustc_metadata = { path = "../librustc_metadata" } diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index f99e65b449..6e8bc11162 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -36,11 +36,11 @@ use rustc::session::config::nightly_options; use rustc::session::{early_error, early_warn}; use rustc::lint::Lint; use rustc::lint; +use rustc::middle::cstore::MetadataLoader; use rustc::hir::def_id::LOCAL_CRATE; use rustc::ty::TyCtxt; use rustc::util::common::{set_time_depth, time, print_time_passes_entry, ErrorReported}; use rustc_metadata::locator; -use rustc_metadata::cstore::CStore; use rustc_codegen_utils::codegen_backend::CodegenBackend; use rustc_interface::interface; use rustc_interface::util::get_codegen_sysroot; @@ -166,7 +166,8 @@ pub fn run_compiler( None => return Ok(()), }; - let (sopts, cfg) = config::build_session_options_and_crate_config(&matches); + let sopts = config::build_session_options(&matches); + let cfg = interface::parse_cfgspecs(matches.opt_strs("cfg")); let mut dummy_config = |sopts, cfg, diagnostic_output| { let mut config = interface::Config { @@ -181,6 +182,7 @@ pub fn run_compiler( stderr: None, crate_name: None, lint_caps: Default::default(), + register_lints: None, }; callbacks.config(&mut config); config @@ -201,9 +203,13 @@ pub fn run_compiler( interface::run_compiler(config, |compiler| { let sopts = &compiler.session().opts; if sopts.describe_lints { + let lint_store = rustc_lint::new_lint_store( + sopts.debugging_opts.no_interleave_lints, + compiler.session().unstable_options(), + ); describe_lints( compiler.session(), - &*compiler.session().lint_store.borrow(), + &lint_store, false ); return; @@ -254,6 +260,7 @@ pub fn run_compiler( stderr: None, crate_name: None, lint_caps: Default::default(), + register_lints: None, }; callbacks.config(&mut config); @@ -268,7 +275,7 @@ pub fn run_compiler( compiler.output_file(), ).and_then(|| RustcDefaultCalls::list_metadata( sess, - compiler.cstore(), + &*compiler.codegen_backend().metadata_loader(), &matches, compiler.input() )); @@ -296,7 +303,6 @@ pub fn run_compiler( ); Ok(()) })?; - return sess.compile_status(); } else { let mut krate = compiler.parse()?.take(); pretty::visit_crate(sess, &mut krate, ppm); @@ -307,8 +313,8 @@ pub fn run_compiler( ppm, compiler.output_file().as_ref().map(|p| &**p), ); - return sess.compile_status(); } + return sess.compile_status(); } if callbacks.after_parsing(compiler) == Compilation::Stop { @@ -321,12 +327,14 @@ pub fn run_compiler( return sess.compile_status(); } - compiler.register_plugins()?; + { + let (_, _, lint_store) = &*compiler.register_plugins()?.peek(); - // Lint plugins are registered; now we can process command line flags. - if sess.opts.describe_lints { - describe_lints(&sess, &sess.lint_store.borrow(), true); - return sess.compile_status(); + // Lint plugins are registered; now we can process command line flags. + if sess.opts.describe_lints { + describe_lints(&sess, &lint_store, true); + return sess.compile_status(); + } } compiler.expansion()?; @@ -604,7 +612,7 @@ fn show_content_with_pager(content: &String) { impl RustcDefaultCalls { pub fn list_metadata(sess: &Session, - cstore: &CStore, + metadata_loader: &dyn MetadataLoader, matches: &getopts::Matches, input: &Input) -> Compilation { @@ -616,7 +624,7 @@ impl RustcDefaultCalls { let mut v = Vec::new(); locator::list_file_metadata(&sess.target.target, path, - &*cstore.metadata_loader, + metadata_loader, &mut v) .unwrap(); println!("{}", String::from_utf8(v).unwrap()); @@ -701,7 +709,7 @@ impl RustcDefaultCalls { let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| { let gated_cfg = GatedCfg::gate(&ast::MetaItem { path: ast::Path::from_ident(ast::Ident::with_dummy_span(name)), - node: ast::MetaItemKind::Word, + kind: ast::MetaItemKind::Word, span: DUMMY_SP, }); @@ -835,8 +843,7 @@ Available lint options: "); - fn sort_lints(sess: &Session, lints: Vec<(&'static Lint, bool)>) -> Vec<&'static Lint> { - let mut lints: Vec<_> = lints.into_iter().map(|(x, _)| x).collect(); + fn sort_lints(sess: &Session, mut lints: Vec<&'static Lint>) -> Vec<&'static Lint> { // The sort doesn't case-fold but it's doubtful we care. lints.sort_by_cached_key(|x: &&Lint| (x.default_level(sess), x.name)); lints @@ -852,7 +859,7 @@ Available lint options: let (plugin, builtin): (Vec<_>, _) = lint_store.get_lints() .iter() .cloned() - .partition(|&(_, p)| p); + .partition(|&lint| lint.is_plugin); let plugin = sort_lints(sess, plugin); let builtin = sort_lints(sess, builtin); @@ -1232,7 +1239,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) { let backtrace = env::var_os("RUST_BACKTRACE").map(|x| &x != "0").unwrap_or(false); if backtrace { - TyCtxt::try_print_query_stack(); + TyCtxt::try_print_query_stack(&handler); } #[cfg(windows)] diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index fa9504e220..0de5b700b4 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -2,7 +2,6 @@ use rustc::hir; use rustc::hir::map as hir_map; -use rustc::hir::map::blocks; use rustc::hir::print as pprust_hir; use rustc::hir::def_id::LOCAL_CRATE; use rustc::session::Session; @@ -10,9 +9,6 @@ use rustc::session::config::Input; use rustc::ty::{self, TyCtxt}; use rustc::util::common::ErrorReported; use rustc_interface::util::ReplaceBodyWithLoop; -use rustc_ast_borrowck as borrowck; -use rustc_ast_borrowck::graphviz as borrowck_dot; -use rustc_ast_borrowck::cfg::{self, graphviz::LabelledCFG}; use rustc_mir::util::{write_mir_pretty, write_mir_graphviz}; use syntax::ast; @@ -20,11 +16,9 @@ use syntax::mut_visit::MutVisitor; use syntax::print::{pprust}; use syntax_pos::FileName; -use graphviz as dot; - use std::cell::Cell; use std::fs::File; -use std::io::{self, Write}; +use std::io::Write; use std::option; use std::path::Path; use std::str::FromStr; @@ -48,21 +42,11 @@ pub enum PpSourceMode { PpmTyped, } -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum PpFlowGraphMode { - Default, - /// Drops the labels from the edges in the flowgraph output. This - /// is mostly for use in the -Z unpretty flowgraph run-make tests, - /// since the labels are largely uninteresting in those cases and - /// have become a pain to maintain. - UnlabelledEdges, -} #[derive(Copy, Clone, PartialEq, Debug)] pub enum PpMode { PpmSource(PpSourceMode), PpmHir(PpSourceMode), PpmHirTree(PpSourceMode), - PpmFlowGraph(PpFlowGraphMode), PpmMir, PpmMirCFG, } @@ -80,15 +64,14 @@ impl PpMode { PpmHir(_) | PpmHirTree(_) | PpmMir | - PpmMirCFG | - PpmFlowGraph(_) => true, + PpmMirCFG => true, PpmSource(PpmTyped) => panic!("invalid state"), } } pub fn needs_analysis(&self) -> bool { match *self { - PpmMir | PpmMirCFG | PpmFlowGraph(_) => true, + PpmMir | PpmMirCFG => true, _ => false, } } @@ -114,13 +97,11 @@ pub fn parse_pretty(sess: &Session, ("hir-tree", true) => PpmHirTree(PpmNormal), ("mir", true) => PpmMir, ("mir-cfg", true) => PpmMirCFG, - ("flowgraph", true) => PpmFlowGraph(PpFlowGraphMode::Default), - ("flowgraph,unlabelled", true) => PpmFlowGraph(PpFlowGraphMode::UnlabelledEdges), _ => { if extended { sess.fatal(&format!("argument to `unpretty` must be one of `normal`, \ - `expanded`, `flowgraph[,unlabelled]=`, \ - `identified`, `expanded,identified`, `everybody_loops`, \ + `expanded`, `identified`, `expanded,identified`, \ + `expanded,hygiene`, `everybody_loops`, \ `hir`, `hir,identified`, `hir,typed`, `hir-tree`, \ `mir` or `mir-cfg`; got {}", name)); @@ -501,24 +482,6 @@ impl<'a, 'tcx> pprust_hir::PpAnn for TypedAnnotation<'a, 'tcx> { } } -fn gather_flowgraph_variants(sess: &Session) -> Vec { - let print_loans = sess.opts.debugging_opts.flowgraph_print_loans; - let print_moves = sess.opts.debugging_opts.flowgraph_print_moves; - let print_assigns = sess.opts.debugging_opts.flowgraph_print_assigns; - let print_all = sess.opts.debugging_opts.flowgraph_print_all; - let mut variants = Vec::new(); - if print_all || print_loans { - variants.push(borrowck_dot::Loans); - } - if print_all || print_moves { - variants.push(borrowck_dot::Moves); - } - if print_all || print_assigns { - variants.push(borrowck_dot::Assigns); - } - variants -} - #[derive(Clone, Debug)] pub enum UserIdentifiedItem { ItemViaNode(ast::NodeId), @@ -609,81 +572,6 @@ impl UserIdentifiedItem { } } -fn print_flowgraph<'tcx, W: Write>( - variants: Vec, - tcx: TyCtxt<'tcx>, - code: blocks::Code<'tcx>, - mode: PpFlowGraphMode, - mut out: W, -) -> io::Result<()> { - let body_id = match code { - blocks::Code::Expr(expr) => { - // Find the function this expression is from. - let mut hir_id = expr.hir_id; - loop { - let node = tcx.hir().get(hir_id); - if let Some(n) = hir::map::blocks::FnLikeNode::from_node(node) { - break n.body(); - } - let parent = tcx.hir().get_parent_node(hir_id); - assert_ne!(hir_id, parent); - hir_id = parent; - } - } - blocks::Code::FnLike(fn_like) => fn_like.body(), - }; - let body = tcx.hir().body(body_id); - let cfg = cfg::CFG::new(tcx, &body); - let labelled_edges = mode != PpFlowGraphMode::UnlabelledEdges; - let hir_id = code.id(); - // We have to disassemble the hir_id because name must be ASCII - // alphanumeric. This does not appear in the rendered graph, so it does not - // have to be user friendly. - let name = format!( - "hir_id_{}_{}", - hir_id.owner.index(), - hir_id.local_id.index(), - ); - let lcfg = LabelledCFG { - tcx, - cfg: &cfg, - name, - labelled_edges, - }; - - match code { - _ if variants.is_empty() => { - let r = dot::render(&lcfg, &mut out); - return expand_err_details(r); - } - blocks::Code::Expr(_) => { - tcx.sess.err("--pretty flowgraph with -Z flowgraph-print annotations requires \ - fn-like node id."); - return Ok(()); - } - blocks::Code::FnLike(fn_like) => { - let (bccx, analysis_data) = - borrowck::build_borrowck_dataflow_data_for_fn(tcx, fn_like.body(), &cfg); - - let lcfg = borrowck_dot::DataflowLabeller { - inner: lcfg, - variants, - borrowck_ctxt: &bccx, - analysis_data: &analysis_data, - }; - let r = dot::render(&lcfg, &mut out); - return expand_err_details(r); - } - } - - fn expand_err_details(r: io::Result<()>) -> io::Result<()> { - r.map_err(|ioerr| { - io::Error::new(io::ErrorKind::Other, - format!("graphviz::render failed: {}", ioerr)) - }) - } -} - pub fn visit_crate(sess: &Session, krate: &mut ast::Crate, ppm: PpMode) { if let PpmSource(PpmEveryBodyLoops) = ppm { ReplaceBodyWithLoop::new(sess).visit_crate(krate); @@ -872,55 +760,17 @@ fn print_with_analysis( tcx.analysis(LOCAL_CRATE)?; - let mut print = || match ppm { + match ppm { PpmMir | PpmMirCFG => { - if let Some(nodeid) = nodeid { - let def_id = tcx.hir().local_def_id_from_node_id(nodeid); - match ppm { - PpmMir => write_mir_pretty(tcx, Some(def_id), &mut out), - PpmMirCFG => write_mir_graphviz(tcx, Some(def_id), &mut out), - _ => unreachable!(), - }?; - } else { - match ppm { - PpmMir => write_mir_pretty(tcx, None, &mut out), - PpmMirCFG => write_mir_graphviz(tcx, None, &mut out), - _ => unreachable!(), - }?; - } - Ok(()) - } - PpmFlowGraph(mode) => { - let nodeid = - nodeid.expect("`pretty flowgraph=..` needs NodeId (int) or unique path \ - suffix (b::c::d)"); - let hir_id = tcx.hir().node_to_hir_id(nodeid); - let node = tcx.hir().find(hir_id).unwrap_or_else(|| { - tcx.sess.fatal(&format!("`--pretty=flowgraph` couldn't find ID: {}", nodeid)) - }); - - match blocks::Code::from_node(&tcx.hir(), hir_id) { - Some(code) => { - let variants = gather_flowgraph_variants(tcx.sess); - - let out: &mut dyn Write = &mut out; - - print_flowgraph(variants, tcx, code, mode, out) - } - None => { - let message = format!("`--pretty=flowgraph` needs block, fn, or method; \ - got {:?}", - node); - - let hir_id = tcx.hir().node_to_hir_id(nodeid); - tcx.sess.span_fatal(tcx.hir().span(hir_id), &message) - } + let def_id = nodeid.map(|nid| tcx.hir().local_def_id_from_node_id(nid)); + match ppm { + PpmMir => write_mir_pretty(tcx, def_id, &mut out), + PpmMirCFG => write_mir_graphviz(tcx, def_id, &mut out), + _ => unreachable!(), } } _ => unreachable!(), - }; - - print().unwrap(); + }.unwrap(); write_output(out, ofile); diff --git a/src/librustc_errors/annotate_snippet_emitter_writer.rs b/src/librustc_errors/annotate_snippet_emitter_writer.rs index 0281d10fd9..491bc2aa6a 100644 --- a/src/librustc_errors/annotate_snippet_emitter_writer.rs +++ b/src/librustc_errors/annotate_snippet_emitter_writer.rs @@ -31,24 +31,28 @@ pub struct AnnotateSnippetEmitterWriter { impl Emitter for AnnotateSnippetEmitterWriter { /// The entry point for the diagnostics generation - fn emit_diagnostic(&mut self, db: &Diagnostic) { - let mut children = db.children.clone(); - let (mut primary_span, suggestions) = self.primary_span_formatted(&db); + fn emit_diagnostic(&mut self, diag: &Diagnostic) { + let mut children = diag.children.clone(); + let (mut primary_span, suggestions) = self.primary_span_formatted(&diag); self.fix_multispans_in_std_macros(&self.source_map, &mut primary_span, &mut children, - &db.level, + &diag.level, self.external_macro_backtrace); - self.emit_messages_default(&db.level, - db.message(), - &db.code, + self.emit_messages_default(&diag.level, + diag.message(), + &diag.code, &primary_span, &children, &suggestions); } + fn source_map(&self) -> Option<&Lrc> { + self.source_map.as_ref() + } + fn should_show_explain(&self) -> bool { !self.short_message } diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index 3f1b91256c..5a09898f18 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -34,7 +34,7 @@ pub struct SubDiagnostic { pub render_span: Option, } -#[derive(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] pub struct DiagnosticStyledString(pub Vec); impl DiagnosticStyledString { @@ -60,7 +60,7 @@ impl DiagnosticStyledString { } } -#[derive(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] pub enum StringPart { Normal(String), Highlighted(String), @@ -152,6 +152,32 @@ impl Diagnostic { self.note_expected_found_extra(label, expected, found, &"", &"") } + pub fn note_unsuccessfull_coercion(&mut self, + expected: DiagnosticStyledString, + found: DiagnosticStyledString) + -> &mut Self + { + let mut msg: Vec<_> = + vec![(format!("required when trying to coerce from type `"), + Style::NoStyle)]; + msg.extend(expected.0.iter() + .map(|x| match *x { + StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), + StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), + })); + msg.push((format!("` to type '"), Style::NoStyle)); + msg.extend(found.0.iter() + .map(|x| match *x { + StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), + StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), + })); + msg.push((format!("`"), Style::NoStyle)); + + // For now, just attach these as notes + self.highlighted_note(msg); + self + } + pub fn note_expected_found_extra(&mut self, label: &dyn fmt::Display, expected: DiagnosticStyledString, @@ -298,9 +324,31 @@ impl Diagnostic { /// * may contain a name of a function, variable, or type, but not whole expressions /// /// See `CodeSuggestion` for more information. - pub fn span_suggestion(&mut self, sp: Span, msg: &str, - suggestion: String, - applicability: Applicability) -> &mut Self { + pub fn span_suggestion( + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, + ) -> &mut Self { + self.span_suggestion_with_style( + sp, + msg, + suggestion, + applicability, + SuggestionStyle::ShowCode, + ); + self + } + + pub fn span_suggestion_with_style( + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, + style: SuggestionStyle, + ) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: vec![SubstitutionPart { @@ -309,16 +357,37 @@ impl Diagnostic { }], }], msg: msg.to_owned(), - style: SuggestionStyle::ShowCode, + style, applicability, }); self } + pub fn span_suggestion_verbose( + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, + ) -> &mut Self { + self.span_suggestion_with_style( + sp, + msg, + suggestion, + applicability, + SuggestionStyle::ShowAlways, + ); + self + } + /// Prints out a message with multiple suggested edits of the code. - pub fn span_suggestions(&mut self, sp: Span, msg: &str, - suggestions: impl Iterator, applicability: Applicability) -> &mut Self - { + pub fn span_suggestions( + &mut self, + sp: Span, + msg: &str, + suggestions: impl Iterator, + applicability: Applicability, + ) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: suggestions.map(|snippet| Substitution { parts: vec![SubstitutionPart { @@ -340,17 +409,13 @@ impl Diagnostic { pub fn span_suggestion_short( &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability ) -> &mut Self { - self.suggestions.push(CodeSuggestion { - substitutions: vec![Substitution { - parts: vec![SubstitutionPart { - snippet: suggestion, - span: sp, - }], - }], - msg: msg.to_owned(), - style: SuggestionStyle::HideCodeInline, + self.span_suggestion_with_style( + sp, + msg, + suggestion, applicability, - }); + SuggestionStyle::HideCodeInline, + ); self } @@ -363,17 +428,13 @@ impl Diagnostic { pub fn span_suggestion_hidden( &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability ) -> &mut Self { - self.suggestions.push(CodeSuggestion { - substitutions: vec![Substitution { - parts: vec![SubstitutionPart { - snippet: suggestion, - span: sp, - }], - }], - msg: msg.to_owned(), - style: SuggestionStyle::HideCodeAlways, + self.span_suggestion_with_style( + sp, + msg, + suggestion, applicability, - }); + SuggestionStyle::HideCodeAlways, + ); self } @@ -384,17 +445,13 @@ impl Diagnostic { pub fn tool_only_span_suggestion( &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability ) -> &mut Self { - self.suggestions.push(CodeSuggestion { - substitutions: vec![Substitution { - parts: vec![SubstitutionPart { - snippet: suggestion, - span: sp, - }], - }], - msg: msg.to_owned(), - style: SuggestionStyle::CompletelyHidden, + self.span_suggestion_with_style( + sp, + msg, + suggestion, applicability, - }); + SuggestionStyle::CompletelyHidden, + ); self } diff --git a/src/librustc_errors/diagnostic_builder.rs b/src/librustc_errors/diagnostic_builder.rs index e85388bfea..40642dd14b 100644 --- a/src/librustc_errors/diagnostic_builder.rs +++ b/src/librustc_errors/diagnostic_builder.rs @@ -1,10 +1,6 @@ -use crate::Diagnostic; -use crate::DiagnosticId; -use crate::DiagnosticStyledString; -use crate::Applicability; +use crate::{Diagnostic, DiagnosticId, DiagnosticStyledString}; +use crate::{Applicability, Level, Handler, StashKey}; -use crate::Level; -use crate::Handler; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::thread::panicking; @@ -117,18 +113,30 @@ impl<'a> DiagnosticBuilder<'a> { } } - /// Buffers the diagnostic for later emission, unless handler - /// has disabled such buffering. - pub fn buffer(mut self, buffered_diagnostics: &mut Vec) { + /// Stashes diagnostic for possible later improvement in a different, + /// later stage of the compiler. The diagnostic can be accessed with + /// the provided `span` and `key` through `.steal_diagnostic` on `Handler`. + /// + /// As with `buffer`, this is unless the handler has disabled such buffering. + pub fn stash(self, span: Span, key: StashKey) { + if let Some((diag, handler)) = self.into_diagnostic() { + handler.stash_diagnostic(span, key, diag); + } + } + + /// Converts the builder to a `Diagnostic` for later emission, + /// unless handler has disabled such buffering. + pub fn into_diagnostic(mut self) -> Option<(Diagnostic, &'a Handler)> { if self.0.handler.flags.dont_buffer_diagnostics || self.0.handler.flags.treat_err_as_bug.is_some() { self.emit(); - return; + return None; } - // We need to use `ptr::read` because `DiagnosticBuilder` - // implements `Drop`. + let handler = self.0.handler; + + // We need to use `ptr::read` because `DiagnosticBuilder` implements `Drop`. let diagnostic; unsafe { diagnostic = std::ptr::read(&self.0.diagnostic); @@ -137,7 +145,14 @@ impl<'a> DiagnosticBuilder<'a> { // Logging here is useful to help track down where in logs an error was // actually emitted. debug!("buffer: diagnostic={:?}", diagnostic); - buffered_diagnostics.push(diagnostic); + + Some((diagnostic, handler)) + } + + /// Buffers the diagnostic for later emission, + /// unless handler has disabled such buffering. + pub fn buffer(self, buffered_diagnostics: &mut Vec) { + buffered_diagnostics.extend(self.into_diagnostic().map(|(diag, _)| diag)); } /// Convenience function for internal use, clients should use one of the @@ -194,6 +209,11 @@ impl<'a> DiagnosticBuilder<'a> { found_extra: &dyn fmt::Display, ) -> &mut Self); + forward!(pub fn note_unsuccessfull_coercion(&mut self, + expected: DiagnosticStyledString, + found: DiagnosticStyledString, + ) -> &mut Self); + forward!(pub fn note(&mut self, msg: &str) -> &mut Self); forward!(pub fn span_note>(&mut self, sp: S, diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index fc441320e0..b153f0f0e8 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -12,8 +12,8 @@ use Destination::*; use syntax_pos::{SourceFile, Span, MultiSpan}; use crate::{ - Level, CodeSuggestion, Diagnostic, SubDiagnostic, - SuggestionStyle, SourceMapperDyn, DiagnosticId, + Level, CodeSuggestion, Diagnostic, SubDiagnostic, pluralise, + SuggestionStyle, SourceMapper, SourceMapperDyn, DiagnosticId, }; use crate::Level::Error; use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style}; @@ -99,8 +99,8 @@ impl Margin { // ``` let mut m = Margin { - whitespace_left: if whitespace_left >= 6 { whitespace_left - 6 } else { 0 }, - span_left: if span_left >= 6 { span_left - 6 } else { 0 }, + whitespace_left: whitespace_left.saturating_sub(6), + span_left: span_left.saturating_sub(6), span_right: span_right + 6, computed_left: 0, computed_right: 0, @@ -125,7 +125,7 @@ impl Margin { } else { self.computed_right }; - right < line_len && line_len > self.computed_left + self.column_width + right < line_len && self.computed_left + self.column_width < line_len } fn compute(&mut self, max_line_len: usize) { @@ -167,12 +167,10 @@ impl Margin { } fn right(&self, line_len: usize) -> usize { - if max(line_len, self.computed_left) - self.computed_left <= self.column_width { - line_len - } else if self.computed_right > line_len { + if line_len.saturating_sub(self.computed_left) <= self.column_width { line_len } else { - self.computed_right + min(line_len, self.computed_right) } } } @@ -182,7 +180,7 @@ const ANONYMIZED_LINE_NUM: &str = "LL"; /// Emitter trait for emitting errors. pub trait Emitter { /// Emit a structured diagnostic. - fn emit_diagnostic(&mut self, db: &Diagnostic); + fn emit_diagnostic(&mut self, diag: &Diagnostic); /// Emit a notification that an artifact has been output. /// This is currently only supported for the JSON format, @@ -194,6 +192,8 @@ pub trait Emitter { true } + fn source_map(&self) -> Option<&Lrc>; + /// Formats the substitutions of the primary_span /// /// The are a lot of conditions to this method, but in short: @@ -206,10 +206,10 @@ pub trait Emitter { /// we return the original `primary_span` and the original suggestions. fn primary_span_formatted<'a>( &mut self, - db: &'a Diagnostic + diag: &'a Diagnostic, ) -> (MultiSpan, &'a [CodeSuggestion]) { - let mut primary_span = db.span.clone(); - if let Some((sugg, rest)) = db.suggestions.split_first() { + let mut primary_span = diag.span.clone(); + if let Some((sugg, rest)) = diag.suggestions.split_first() { if rest.is_empty() && // ^ if there is only one suggestion // don't display multi-suggestions as labels @@ -220,10 +220,14 @@ pub trait Emitter { sugg.msg.split_whitespace().count() < 10 && // don't display multiline suggestions as labels !sugg.substitutions[0].parts[0].snippet.contains('\n') && - // when this style is set we want the suggestion to be a message, not inline - sugg.style != SuggestionStyle::HideCodeAlways && - // trivial suggestion for tooling's sake, never shown - sugg.style != SuggestionStyle::CompletelyHidden + ![ + // when this style is set we want the suggestion to be a message, not inline + SuggestionStyle::HideCodeAlways, + // trivial suggestion for tooling's sake, never shown + SuggestionStyle::CompletelyHidden, + // subtle suggestion, never shown inline + SuggestionStyle::ShowAlways, + ].contains(&sugg.style) { let substitution = &sugg.substitutions[0].parts[0].snippet.trim(); let msg = if substitution.len() == 0 || sugg.style.hide_inline() { @@ -232,7 +236,20 @@ pub trait Emitter { format!("help: {}", sugg.msg) } else { // Show the default suggestion text with the substitution - format!("help: {}: `{}`", sugg.msg, substitution) + format!( + "help: {}{}: `{}`", + sugg.msg, + if self.source_map().map(|sm| is_case_difference( + &**sm, + substitution, + sugg.substitutions[0].parts[0].span, + )).unwrap_or(false) { + " (notice the capitalization)" + } else { + "" + }, + substitution, + ) }; primary_span.push_span_label(sugg.substitutions[0].parts[0].span, msg); @@ -243,10 +260,10 @@ pub trait Emitter { // to be consistent. We could try to figure out if we can // make one (or the first one) inline, but that would give // undue importance to a semi-random suggestion - (primary_span, &db.suggestions) + (primary_span, &diag.suggestions) } } else { - (primary_span, &db.suggestions) + (primary_span, &diag.suggestions) } } @@ -297,81 +314,82 @@ pub trait Emitter { source_map: &Option>, span: &mut MultiSpan, always_backtrace: bool) -> bool { - let mut spans_updated = false; + let sm = match source_map { + Some(ref sm) => sm, + None => return false, + }; - if let Some(ref sm) = source_map { - let mut before_after: Vec<(Span, Span)> = vec![]; - let mut new_labels: Vec<(Span, String)> = vec![]; + let mut before_after: Vec<(Span, Span)> = vec![]; + let mut new_labels: Vec<(Span, String)> = vec![]; - // First, find all the spans in <*macros> and point instead at their use site - for sp in span.primary_spans() { - if sp.is_dummy() { + // First, find all the spans in <*macros> and point instead at their use site + for sp in span.primary_spans() { + if sp.is_dummy() { + continue; + } + let call_sp = sm.call_span_if_macro(*sp); + if call_sp != *sp && !always_backtrace { + before_after.push((*sp, call_sp)); + } + let backtrace_len = sp.macro_backtrace().len(); + for (i, trace) in sp.macro_backtrace().iter().rev().enumerate() { + // Only show macro locations that are local + // and display them like a span_note + if trace.def_site_span.is_dummy() { continue; } - let call_sp = sm.call_span_if_macro(*sp); - if call_sp != *sp && !always_backtrace { - before_after.push((*sp, call_sp)); + if always_backtrace { + new_labels.push((trace.def_site_span, + format!("in this expansion of `{}`{}", + trace.macro_decl_name, + if backtrace_len > 2 { + // if backtrace_len == 1 it'll be pointed + // at by "in this macro invocation" + format!(" (#{})", i + 1) + } else { + String::new() + }))); } - let backtrace_len = sp.macro_backtrace().len(); - for (i, trace) in sp.macro_backtrace().iter().rev().enumerate() { - // Only show macro locations that are local - // and display them like a span_note - if trace.def_site_span.is_dummy() { - continue; - } - if always_backtrace { - new_labels.push((trace.def_site_span, - format!("in this expansion of `{}`{}", - trace.macro_decl_name, - if backtrace_len > 2 { - // if backtrace_len == 1 it'll be pointed - // at by "in this macro invocation" - format!(" (#{})", i + 1) - } else { - String::new() - }))); - } - // Check to make sure we're not in any <*macros> - if !sm.span_to_filename(trace.def_site_span).is_macros() && - !trace.macro_decl_name.starts_with("desugaring of ") && - !trace.macro_decl_name.starts_with("#[") || - always_backtrace { - new_labels.push((trace.call_site, - format!("in this macro invocation{}", - if backtrace_len > 2 && always_backtrace { - // only specify order when the macro - // backtrace is multiple levels deep - format!(" (#{})", i + 1) - } else { - String::new() - }))); - if !always_backtrace { - break; - } + // Check to make sure we're not in any <*macros> + if !sm.span_to_filename(trace.def_site_span).is_macros() && + !trace.macro_decl_name.starts_with("desugaring of ") && + !trace.macro_decl_name.starts_with("#[") || + always_backtrace { + new_labels.push((trace.call_site, + format!("in this macro invocation{}", + if backtrace_len > 2 && always_backtrace { + // only specify order when the macro + // backtrace is multiple levels deep + format!(" (#{})", i + 1) + } else { + String::new() + }))); + if !always_backtrace { + break; } } } - for (label_span, label_text) in new_labels { - span.push_span_label(label_span, label_text); + } + for (label_span, label_text) in new_labels { + span.push_span_label(label_span, label_text); + } + for sp_label in span.span_labels() { + if sp_label.span.is_dummy() { + continue; } - for sp_label in span.span_labels() { - if sp_label.span.is_dummy() { - continue; - } - if sm.span_to_filename(sp_label.span.clone()).is_macros() && - !always_backtrace - { - let v = sp_label.span.macro_backtrace(); - if let Some(use_site) = v.last() { - before_after.push((sp_label.span.clone(), use_site.call_site.clone())); - } + if sm.span_to_filename(sp_label.span.clone()).is_macros() && + !always_backtrace + { + let v = sp_label.span.macro_backtrace(); + if let Some(use_site) = v.last() { + before_after.push((sp_label.span.clone(), use_site.call_site.clone())); } } - // After we have them, make sure we replace these 'bad' def sites with their use sites - for (before, after) in before_after { - span.replace(before, after); - spans_updated = true; - } + } + // After we have them, make sure we replace these 'bad' def sites with their use sites + let spans_updated = !before_after.is_empty(); + for (before, after) in before_after { + span.replace(before, after); } spans_updated @@ -379,19 +397,23 @@ pub trait Emitter { } impl Emitter for EmitterWriter { - fn emit_diagnostic(&mut self, db: &Diagnostic) { - let mut children = db.children.clone(); - let (mut primary_span, suggestions) = self.primary_span_formatted(&db); + fn source_map(&self) -> Option<&Lrc> { + self.sm.as_ref() + } + + fn emit_diagnostic(&mut self, diag: &Diagnostic) { + let mut children = diag.children.clone(); + let (mut primary_span, suggestions) = self.primary_span_formatted(&diag); self.fix_multispans_in_std_macros(&self.sm, &mut primary_span, &mut children, - &db.level, + &diag.level, self.external_macro_backtrace); - self.emit_messages_default(&db.level, - &db.styled_message(), - &db.code, + self.emit_messages_default(&diag.level, + &diag.styled_message(), + &diag.code, &primary_span, &children, &suggestions); @@ -593,9 +615,9 @@ impl EmitterWriter { let left = margin.left(source_string.len()); // Left trim // Account for unicode characters of width !=0 that were removed. - let left = source_string.chars().take(left).fold(0, |acc, ch| { - acc + unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1) - }); + let left = source_string.chars().take(left) + .map(|ch| unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1)) + .sum(); self.draw_line( buffer, @@ -623,18 +645,16 @@ impl EmitterWriter { // 3 | | // 4 | | } // | |_^ test - if line.annotations.len() == 1 { - if let Some(ref ann) = line.annotations.get(0) { - if let AnnotationType::MultilineStart(depth) = ann.annotation_type { - if source_string.chars().take(ann.start_col).all(|c| c.is_whitespace()) { - let style = if ann.is_primary { - Style::UnderlinePrimary - } else { - Style::UnderlineSecondary - }; - buffer.putc(line_offset, width_offset + depth - 1, '/', style); - return vec![(depth, style)]; - } + if let [ann] = &line.annotations[..] { + if let AnnotationType::MultilineStart(depth) = ann.annotation_type { + if source_string.chars().take(ann.start_col).all(|c| c.is_whitespace()) { + let style = if ann.is_primary { + Style::UnderlinePrimary + } else { + Style::UnderlineSecondary + }; + buffer.putc(line_offset, width_offset + depth - 1, '/', style); + return vec![(depth, style)]; } } } @@ -763,11 +783,7 @@ impl EmitterWriter { annotations_position.push((p, annotation)); for (j, next) in annotations.iter().enumerate() { if j > i { - let l = if let Some(ref label) = next.label { - label.len() + 2 - } else { - 0 - }; + let l = next.label.as_ref().map_or(0, |label| label.len() + 2); if (overlaps(next, annotation, l) // Do not allow two labels to be in the same // line if they overlap including padding, to // avoid situations like: @@ -797,9 +813,7 @@ impl EmitterWriter { } } } - if line_len < p { - line_len = p; - } + line_len = max(line_len, p); } if line_len != 0 { @@ -941,17 +955,9 @@ impl EmitterWriter { Style::LabelSecondary }; let (pos, col) = if pos == 0 { - (pos + 1, if annotation.end_col + 1 > left { - annotation.end_col + 1 - left - } else { - 0 - }) + (pos + 1, (annotation.end_col + 1).saturating_sub(left)) } else { - (pos + 2, if annotation.start_col > left { - annotation.start_col - left - } else { - 0 - }) + (pos + 2, annotation.start_col.saturating_sub(left)) }; if let Some(ref label) = annotation.label { buffer.puts(line_offset + pos, code_offset + col, &label, style); @@ -966,9 +972,9 @@ impl EmitterWriter { // | | | // | | something about `foo` // | something about `fn foo()` - annotations_position.sort_by(|a, b| { - // Decreasing order. When `a` and `b` are the same length, prefer `Primary`. - (a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse() + annotations_position.sort_by_key(|(_, ann)| { + // Decreasing order. When annotations share the same length, prefer `Primary`. + (Reverse(ann.len()), ann.is_primary) }); // Write the underlines. @@ -991,11 +997,7 @@ impl EmitterWriter { for p in annotation.start_col..annotation.end_col { buffer.putc( line_offset + 1, - if code_offset + p > left { - code_offset + p - left - } else { - 0 - }, + (code_offset + p).saturating_sub(left), underline, style, ); @@ -1018,41 +1020,37 @@ impl EmitterWriter { } fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize { + let sm = match self.sm { + Some(ref sm) => sm, + None => return 0, + }; + let mut max = 0; - if let Some(ref sm) = self.sm { - for primary_span in msp.primary_spans() { - if !primary_span.is_dummy() { - let hi = sm.lookup_char_pos(primary_span.hi()); - if hi.line > max { - max = hi.line; - } - } + for primary_span in msp.primary_spans() { + if !primary_span.is_dummy() { + let hi = sm.lookup_char_pos(primary_span.hi()); + max = (hi.line).max(max); } - if !self.short_message { - for span_label in msp.span_labels() { - if !span_label.span.is_dummy() { - let hi = sm.lookup_char_pos(span_label.span.hi()); - if hi.line > max { - max = hi.line; - } - } + } + if !self.short_message { + for span_label in msp.span_labels() { + if !span_label.span.is_dummy() { + let hi = sm.lookup_char_pos(span_label.span.hi()); + max = (hi.line).max(max); } } } + max } fn get_max_line_num(&mut self, span: &MultiSpan, children: &[SubDiagnostic]) -> usize { - let mut max = 0; - let primary = self.get_multispan_max_line_num(span); - max = if primary > max { primary } else { max }; - - for sub in children { - let sub_result = self.get_multispan_max_line_num(&sub.span); - max = if sub_result > max { primary } else { max }; - } - max + children.iter() + .map(|sub| self.get_multispan_max_line_num(&sub.span)) + .max() + .unwrap_or(0) + .max(primary) } /// Adds a left margin to every line but the first, given a padding length and the label being @@ -1082,14 +1080,12 @@ impl EmitterWriter { // `max_line_num_len` let padding = " ".repeat(padding + label.len() + 5); - /// Returns `true` if `style`, or the override if present and the style is `NoStyle`. - fn style_or_override(style: Style, override_style: Option\n\n").unwrap(); - trace::write_traces(&mut html_file, &mut counts_file, &frame.traces); - writeln!(html_file, "\n").unwrap(); - - let ack_path = format!("{}.ack", params.path); - let ack_file = File::create(&ack_path).unwrap(); - drop(ack_file); - - // Tell main thread that we are done, e.g., so it can exit - params.ack.send(()).unwrap(); - } - // Actual query message: - msg => { - // Record msg in our log - profq_msgs.push(msg.clone()); - // Respond to the message, knowing that we've already handled Halt and Dump, above. - match (frame.parse_st.clone(), msg) { - (_, ProfileQueriesMsg::Halt) | (_, ProfileQueriesMsg::Dump(_)) => { - unreachable!(); - }, - // Parse State: Clear - (ParseState::Clear, - ProfileQueriesMsg::QueryBegin(span, querymsg)) => { - let start = Instant::now(); - frame.parse_st = ParseState::HaveQuery - (Query { span, msg: querymsg }, start) - }, - (ParseState::Clear, - ProfileQueriesMsg::CacheHit) => { - panic!("parse error: unexpected CacheHit; expected QueryBegin") - }, - (ParseState::Clear, - ProfileQueriesMsg::ProviderBegin) => { - panic!("parse error: expected QueryBegin before beginning a provider") - }, - (ParseState::Clear, - ProfileQueriesMsg::ProviderEnd) => { - let provider_extent = frame.traces; - match stack.pop() { - None => - panic!("parse error: expected a stack frame; found an empty stack"), - Some(old_frame) => { - match old_frame.parse_st { - ParseState::HaveQuery(q, start) => { - let duration = start.elapsed(); - frame = StackFrame{ - parse_st: ParseState::Clear, - traces: old_frame.traces - }; - let dur_extent = total_duration(&provider_extent); - let trace = Rec { - effect: Effect::QueryBegin(q, CacheCase::Miss), - extent: Box::new(provider_extent), - start: start, - dur_self: duration - dur_extent, - dur_total: duration, - }; - frame.traces.push( trace ); - }, - _ => panic!("internal parse error: malformed parse stack") - } - } - } - }, - (ParseState::Clear, - ProfileQueriesMsg::TimeBegin(msg)) => { - let start = Instant::now(); - frame.parse_st = ParseState::HaveTimeBegin(msg, start); - stack.push(frame); - frame = StackFrame{parse_st: ParseState::Clear, traces: vec![]}; - }, - (_, ProfileQueriesMsg::TimeBegin(_)) => { - panic!("parse error; did not expect time begin here"); - }, - (ParseState::Clear, - ProfileQueriesMsg::TimeEnd) => { - let provider_extent = frame.traces; - match stack.pop() { - None => - panic!("parse error: expected a stack frame; found an empty stack"), - Some(old_frame) => { - match old_frame.parse_st { - ParseState::HaveTimeBegin(msg, start) => { - let duration = start.elapsed(); - frame = StackFrame{ - parse_st: ParseState::Clear, - traces: old_frame.traces - }; - let dur_extent = total_duration(&provider_extent); - let trace = Rec { - effect: Effect::TimeBegin(msg), - extent: Box::new(provider_extent), - start: start, - dur_total: duration, - dur_self: duration - dur_extent, - }; - frame.traces.push( trace ); - }, - _ => panic!("internal parse error: malformed parse stack") - } - } - } - }, - (_, ProfileQueriesMsg::TimeEnd) => { - panic!("parse error") - }, - (ParseState::Clear, - ProfileQueriesMsg::TaskBegin(key)) => { - let start = Instant::now(); - frame.parse_st = ParseState::HaveTaskBegin(key, start); - stack.push(frame); - frame = StackFrame{ parse_st: ParseState::Clear, traces: vec![] }; - }, - (_, ProfileQueriesMsg::TaskBegin(_)) => { - panic!("parse error; did not expect time begin here"); - }, - (ParseState::Clear, - ProfileQueriesMsg::TaskEnd) => { - let provider_extent = frame.traces; - match stack.pop() { - None => - panic!("parse error: expected a stack frame; found an empty stack"), - Some(old_frame) => { - match old_frame.parse_st { - ParseState::HaveTaskBegin(key, start) => { - let duration = start.elapsed(); - frame = StackFrame{ - parse_st: ParseState::Clear, - traces: old_frame.traces - }; - let dur_extent = total_duration(&provider_extent); - let trace = Rec { - effect: Effect::TaskBegin(key), - extent: Box::new(provider_extent), - start: start, - dur_total: duration, - dur_self: duration - dur_extent, - }; - frame.traces.push( trace ); - }, - _ => panic!("internal parse error: malformed parse stack") - } - } - } - }, - (_, ProfileQueriesMsg::TaskEnd) => { - panic!("parse error") - }, - // Parse State: HaveQuery - (ParseState::HaveQuery(q,start), - ProfileQueriesMsg::CacheHit) => { - let duration = start.elapsed(); - let trace : Rec = Rec{ - effect: Effect::QueryBegin(q, CacheCase::Hit), - extent: Box::new(vec![]), - start: start, - dur_self: duration, - dur_total: duration, - }; - frame.traces.push( trace ); - frame.parse_st = ParseState::Clear; - }, - (ParseState::HaveQuery(_, _), - ProfileQueriesMsg::ProviderBegin) => { - stack.push(frame); - frame = StackFrame{ parse_st: ParseState::Clear, traces: vec![] }; - }, - - // Parse errors: - - (ParseState::HaveQuery(q, _), - ProfileQueriesMsg::ProviderEnd) => { - panic!("parse error: unexpected ProviderEnd; \ - expected something else to follow BeginQuery for {:?}", q) - }, - (ParseState::HaveQuery(q1, _), - ProfileQueriesMsg::QueryBegin(span2, querymsg2)) => { - panic!("parse error: unexpected QueryBegin; \ - earlier query is unfinished: {:?} and now {:?}", - q1, Query{span:span2, msg: querymsg2}) - }, - (ParseState::HaveTimeBegin(_, _), _) => { - unreachable!() - }, - (ParseState::HaveTaskBegin(_, _), _) => { - unreachable!() - }, - } - } - } - } -} diff --git a/src/librustc_interface/profile/trace.rs b/src/librustc_interface/profile/trace.rs deleted file mode 100644 index 95c4ea6ff2..0000000000 --- a/src/librustc_interface/profile/trace.rs +++ /dev/null @@ -1,304 +0,0 @@ -use super::*; -use syntax_pos::SpanData; -use rustc_data_structures::fx::FxHashMap; -use rustc::util::common::QueryMsg; -use std::fs::File; -use std::time::{Duration, Instant}; -use rustc::dep_graph::{DepNode}; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct Query { - pub span: SpanData, - pub msg: QueryMsg, -} -pub enum Effect { - QueryBegin(Query, CacheCase), - TimeBegin(String), - TaskBegin(DepNode), -} -pub enum CacheCase { - Hit, Miss -} -/// Recursive trace structure -pub struct Rec { - pub effect: Effect, - pub start: Instant, - pub dur_self: Duration, - pub dur_total: Duration, - pub extent: Box>, -} -pub struct QueryMetric { - pub count: usize, - pub dur_self: Duration, - pub dur_total: Duration, -} - -fn cons(s: &str) -> String { - let first = s.split(|d| d == '(' || d == '{').next(); - assert!(first.is_some() && first != Some("")); - first.unwrap().to_owned() -} - -pub fn cons_of_query_msg(q: &trace::Query) -> String { - cons(&format!("{:?}", q.msg)) -} - -pub fn cons_of_key(k: &DepNode) -> String { - cons(&format!("{:?}", k)) -} - -// First return value is text; second return value is a CSS class -pub fn html_of_effect(eff: &Effect) -> (String, String) { - match *eff { - Effect::TimeBegin(ref msg) => { - (msg.clone(), - "time-begin".to_string()) - }, - Effect::TaskBegin(ref key) => { - let cons = cons_of_key(key); - (cons.clone(), format!("{} task-begin", cons)) - }, - Effect::QueryBegin(ref qmsg, ref cc) => { - let cons = cons_of_query_msg(qmsg); - (cons.clone(), - format!("{} {}", - cons, - match *cc { - CacheCase::Hit => "hit", - CacheCase::Miss => "miss", - })) - } - } -} - -// First return value is text; second return value is a CSS class -fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) { - use rustc::util::common::duration_to_secs_str; - (duration_to_secs_str(dur.clone()), String::new()) -} - -fn html_of_fraction(frac: f64) -> (String, &'static str) { - let css = { - if frac > 0.50 { "frac-50" } - else if frac > 0.40 { "frac-40" } - else if frac > 0.30 { "frac-30" } - else if frac > 0.20 { "frac-20" } - else if frac > 0.10 { "frac-10" } - else if frac > 0.05 { "frac-05" } - else if frac > 0.02 { "frac-02" } - else if frac > 0.01 { "frac-01" } - else if frac > 0.001 { "frac-001" } - else { "frac-0" } - }; - let percent = frac * 100.0; - - if percent > 0.1 { - (format!("{:.1}%", percent), css) - } else { - ("< 0.1%".to_string(), css) - } -} - -fn total_duration(traces: &[Rec]) -> Duration { - Duration::new(0, 0) + traces.iter().map(|t| t.dur_total).sum() -} - -fn duration_div(nom: Duration, den: Duration) -> f64 { - fn to_nanos(d: Duration) -> u64 { - d.as_secs() * 1_000_000_000 + d.subsec_nanos() as u64 - } - - to_nanos(nom) as f64 / to_nanos(den) as f64 -} - -fn write_traces_rec(file: &mut File, traces: &[Rec], total: Duration, depth: usize) { - for t in traces { - let (eff_text, eff_css_classes) = html_of_effect(&t.effect); - let (dur_text, dur_css_classes) = html_of_duration(&t.start, &t.dur_total); - let fraction = duration_div(t.dur_total, total); - let percent = fraction * 100.0; - let (frc_text, frc_css_classes) = html_of_fraction(fraction); - writeln!(file, "

").unwrap(); - } -} - -fn compute_counts_rec(counts: &mut FxHashMap, traces: &[Rec]) { - counts.reserve(traces.len()); - for t in traces.iter() { - match t.effect { - Effect::TimeBegin(ref msg) => { - let qm = match counts.get(msg) { - Some(_qm) => panic!("TimeBegin with non-unique, repeat message"), - None => QueryMetric { - count: 1, - dur_self: t.dur_self, - dur_total: t.dur_total, - } - }; - counts.insert(msg.clone(), qm); - }, - Effect::TaskBegin(ref key) => { - let cons = cons_of_key(key); - let qm = match counts.get(&cons) { - Some(qm) => - QueryMetric { - count: qm.count + 1, - dur_self: qm.dur_self + t.dur_self, - dur_total: qm.dur_total + t.dur_total, - }, - None => QueryMetric { - count: 1, - dur_self: t.dur_self, - dur_total: t.dur_total, - } - }; - counts.insert(cons, qm); - }, - Effect::QueryBegin(ref qmsg, ref _cc) => { - let qcons = cons_of_query_msg(qmsg); - let qm = match counts.get(&qcons) { - Some(qm) => - QueryMetric { - count: qm.count + 1, - dur_total: qm.dur_total + t.dur_total, - dur_self: qm.dur_self + t.dur_self - }, - None => QueryMetric { - count: 1, - dur_total: t.dur_total, - dur_self: t.dur_self, - } - }; - counts.insert(qcons, qm); - } - } - compute_counts_rec(counts, &t.extent) - } -} - -pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap) { - use rustc::util::common::duration_to_secs_str; - use std::cmp::Reverse; - - let mut data = counts.iter().map(|(ref cons, ref qm)| - (cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone()) - ).collect::>(); - - data.sort_by_key(|k| Reverse(k.3)); - for (cons, count, dur_total, dur_self) in data { - writeln!(count_file, "{}, {}, {}, {}", - cons, count, - duration_to_secs_str(dur_total), - duration_to_secs_str(dur_self) - ).unwrap(); - } -} - -pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &[Rec]) { - let capacity = traces.iter().fold(0, |acc, t| acc + 1 + t.extent.len()); - let mut counts = FxHashMap::with_capacity_and_hasher(capacity, Default::default()); - compute_counts_rec(&mut counts, traces); - write_counts(counts_file, &mut counts); - - let total: Duration = total_duration(traces); - write_traces_rec(html_file, traces, total, 0) -} - -pub fn write_style(html_file: &mut File) { - write!(html_file, "{}", " -body { - font-family: sans-serif; - background: black; -} -.trace { - color: black; - display: inline-block; - border-style: solid; - border-color: red; - border-width: 1px; - border-radius: 5px; - padding: 0px; - margin: 1px; - font-size: 0px; -} -.task-begin { - border-width: 1px; - color: white; - border-color: #ff8; - font-size: 0px; -} -.miss { - border-color: red; - border-width: 1px; -} -.extent-0 { - padding: 2px; -} -.time-begin { - border-width: 4px; - font-size: 12px; - color: white; - border-color: #afa; -} -.important { - border-width: 3px; - font-size: 12px; - color: white; - border-color: #f77; -} -.hit { - padding: 0px; - border-color: blue; - border-width: 3px; -} -.eff { - color: #fff; - display: inline-block; -} -.frc { - color: #7f7; - display: inline-block; -} -.dur { - display: none -} -.frac-50 { - padding: 10px; - border-width: 10px; - font-size: 32px; -} -.frac-40 { - padding: 8px; - border-width: 8px; - font-size: 24px; -} -.frac-30 { - padding: 6px; - border-width: 6px; - font-size: 18px; -} -.frac-20 { - padding: 4px; - border-width: 6px; - font-size: 16px; -} -.frac-10 { - padding: 2px; - border-width: 6px; - font-size: 14px; -} -").unwrap(); -} diff --git a/src/librustc_interface/queries.rs b/src/librustc_interface/queries.rs index ed50dadb60..ea51e63725 100644 --- a/src/librustc_interface/queries.rs +++ b/src/librustc_interface/queries.rs @@ -1,16 +1,20 @@ use crate::interface::{Compiler, Result}; -use crate::passes::{self, BoxedResolver, ExpansionResult, BoxedGlobalCtxt, PluginInfo}; +use crate::passes::{self, BoxedResolver, BoxedGlobalCtxt, PluginInfo}; use rustc_incremental::DepGraphFuture; +use rustc_data_structures::sync::Lrc; use rustc::session::config::{OutputFilenames, OutputType}; use rustc::util::common::{time, ErrorReported}; use rustc::hir; +use rustc::lint; +use rustc::session::Session; +use rustc::lint::LintStore; use rustc::hir::def_id::LOCAL_CRATE; use rustc::ty::steal::Steal; +use rustc::ty::ResolverOutputs; use rustc::dep_graph::DepGraph; use std::cell::{Ref, RefMut, RefCell}; use std::rc::Rc; -use std::sync::mpsc; use std::any::Any; use std::mem; use syntax::{self, ast}; @@ -75,13 +79,11 @@ pub(crate) struct Queries { dep_graph_future: Query>, parse: Query, crate_name: Query, - register_plugins: Query<(ast::Crate, PluginInfo)>, - expansion: Query<(ast::Crate, Steal>>)>, + register_plugins: Query<(ast::Crate, PluginInfo, Lrc)>, + expansion: Query<(ast::Crate, Steal>>, Lrc)>, dep_graph: Query, - lower_to_hir: Query<(Steal, ExpansionResult)>, + lower_to_hir: Query<(Steal, Steal)>, prepare_outputs: Query, - codegen_channel: Query<(Steal>>, - Steal>>)>, global_ctxt: Query, ongoing_codegen: Query>, link: Query<()>, @@ -109,50 +111,67 @@ impl Compiler { }) } - pub fn register_plugins(&self) -> Result<&Query<(ast::Crate, PluginInfo)>> { + pub fn register_plugins(&self) -> Result<&Query<(ast::Crate, PluginInfo, Lrc)>> { self.queries.register_plugins.compute(|| { let crate_name = self.crate_name()?.peek().clone(); let krate = self.parse()?.take(); - passes::register_plugins( - self, + let empty: &(dyn Fn(&Session, &mut lint::LintStore) + Sync + Send) = &|_, _| {}; + let result = passes::register_plugins( self.session(), - self.cstore(), + &*self.codegen_backend().metadata_loader(), + self.register_lints + .as_ref() + .map(|p| &**p) + .unwrap_or_else(|| empty), krate, &crate_name, - ) + ); + + // Compute the dependency graph (in the background). We want to do + // this as early as possible, to give the DepGraph maximum time to + // load before dep_graph() is called, but it also can't happen + // until after rustc_incremental::prepare_session_directory() is + // called, which happens within passes::register_plugins(). + self.dep_graph_future().ok(); + + result }) } pub fn crate_name(&self) -> Result<&Query> { self.queries.crate_name.compute(|| { - let parse_result = self.parse()?; - let krate = parse_result.peek(); - let result = match self.crate_name { + Ok(match self.crate_name { Some(ref crate_name) => crate_name.clone(), - None => rustc_codegen_utils::link::find_crate_name( - Some(self.session()), - &krate.attrs, - &self.input - ), - }; - Ok(result) + None => { + let parse_result = self.parse()?; + let krate = parse_result.peek(); + rustc_codegen_utils::link::find_crate_name( + Some(self.session()), + &krate.attrs, + &self.input + ) + } + }) }) } pub fn expansion( &self - ) -> Result<&Query<(ast::Crate, Steal>>)>> { + ) -> Result<&Query<(ast::Crate, Steal>>, Lrc)>> { self.queries.expansion.compute(|| { let crate_name = self.crate_name()?.peek().clone(); - let (krate, plugin_info) = self.register_plugins()?.take(); + let (krate, plugin_info, lint_store) = self.register_plugins()?.take(); passes::configure_and_expand( self.sess.clone(), - self.cstore().clone(), + lint_store.clone(), + self.codegen_backend().metadata_loader(), krate, &crate_name, plugin_info, - ).map(|(krate, resolver)| (krate, Steal::new(Rc::new(RefCell::new(resolver))))) + ).map(|(krate, resolver)| { + (krate, Steal::new(Rc::new(RefCell::new(resolver))), lint_store) + }) }) } @@ -173,41 +192,35 @@ impl Compiler { }) } - pub fn lower_to_hir(&self) -> Result<&Query<(Steal, ExpansionResult)>> { + pub fn lower_to_hir( + &self, + ) -> Result<&Query<(Steal, Steal)>> { self.queries.lower_to_hir.compute(|| { let expansion_result = self.expansion()?; let peeked = expansion_result.peek(); let krate = &peeked.0; let resolver = peeked.1.steal(); + let lint_store = &peeked.2; let hir = Steal::new(resolver.borrow_mut().access(|resolver| { passes::lower_to_hir( self.session(), - self.cstore(), + lint_store, resolver, &*self.dep_graph()?.peek(), &krate ) })?); - Ok((hir, BoxedResolver::to_expansion_result(resolver))) + Ok((hir, Steal::new(BoxedResolver::to_resolver_outputs(resolver)))) }) } pub fn prepare_outputs(&self) -> Result<&Query> { self.queries.prepare_outputs.compute(|| { - self.lower_to_hir()?; - let krate = self.expansion()?; - let krate = krate.peek(); + let expansion_result = self.expansion()?; + let (krate, boxed_resolver, _) = &*expansion_result.peek(); let crate_name = self.crate_name()?; let crate_name = crate_name.peek(); - passes::prepare_outputs(self.session(), self, &krate.0, &*crate_name) - }) - } - - pub fn codegen_channel(&self) -> Result<&Query<(Steal>>, - Steal>>)>> { - self.queries.codegen_channel.compute(|| { - let (tx, rx) = mpsc::channel(); - Ok((Steal::new(tx), Steal::new(rx))) + passes::prepare_outputs(self.session(), self, &krate, &boxed_resolver, &crate_name) }) } @@ -215,24 +228,22 @@ impl Compiler { self.queries.global_ctxt.compute(|| { let crate_name = self.crate_name()?.peek().clone(); let outputs = self.prepare_outputs()?.peek().clone(); + let lint_store = self.expansion()?.peek().2.clone(); let hir = self.lower_to_hir()?; let hir = hir.peek(); - let (ref hir_forest, ref expansion) = *hir; - let tx = self.codegen_channel()?.peek().0.steal(); + let (hir_forest, resolver_outputs) = &*hir; Ok(passes::create_global_ctxt( self, + lint_store, hir_forest.steal(), - expansion.defs.steal(), - expansion.resolutions.steal(), + resolver_outputs.steal(), outputs, - tx, &crate_name)) }) } pub fn ongoing_codegen(&self) -> Result<&Query>> { self.queries.ongoing_codegen.compute(|| { - let rx = self.codegen_channel()?.peek().1.steal(); let outputs = self.prepare_outputs()?; self.global_ctxt()?.peek_mut().enter(|tcx| { tcx.analysis(LOCAL_CRATE).ok(); @@ -243,7 +254,6 @@ impl Compiler { Ok(passes::start_codegen( &***self.codegen_backend(), tcx, - rx, &*outputs.peek() )) }) @@ -267,6 +277,11 @@ impl Compiler { }) } + // This method is different to all the other methods in `Compiler` because + // it lacks a `Queries` entry. It's also not currently used. It does serve + // as an example of how `Compiler` can be used, with additional steps added + // between some passes. And see `rustc_driver::run_compiler` for a more + // complex example. pub fn compile(&self) -> Result<()> { self.prepare_outputs()?; @@ -278,12 +293,12 @@ impl Compiler { self.global_ctxt()?; - // Drop AST after creating GlobalCtxt to free memory + // Drop AST after creating GlobalCtxt to free memory. mem::drop(self.expansion()?.take()); self.ongoing_codegen()?; - // Drop GlobalCtxt after starting codegen to free memory + // Drop GlobalCtxt after starting codegen to free memory. mem::drop(self.global_ctxt()?.take()); self.link().map(|_| ()) diff --git a/src/librustc/session/config/tests.rs b/src/librustc_interface/tests.rs similarity index 91% rename from src/librustc/session/config/tests.rs rename to src/librustc_interface/tests.rs index 9eb68056bf..7a57605da5 100644 --- a/src/librustc/session/config/tests.rs +++ b/src/librustc_interface/tests.rs @@ -1,40 +1,51 @@ -use getopts; -use crate::lint; -use crate::middle::cstore; -use crate::session::config::{ - build_configuration, - build_session_options_and_crate_config, - to_crate_config -}; -use crate::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry}; -use crate::session::build_session; -use crate::session::search_paths::SearchPath; +extern crate getopts; + +use crate::interface::parse_cfgspecs; + +use rustc::lint; +use rustc::middle::cstore; +use rustc::session::config::{build_configuration, build_session_options, to_crate_config}; +use rustc::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry}; +use rustc::session::config::{Externs, OutputType, OutputTypes, SymbolManglingVersion}; +use rustc::session::config::{rustc_optgroups, Options, ErrorOutputType, Passes}; +use rustc::session::build_session; +use rustc::session::search_paths::SearchPath; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; use std::path::PathBuf; -use super::{Externs, OutputType, OutputTypes, SymbolManglingVersion}; use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel}; use syntax::symbol::sym; use syntax::edition::{Edition, DEFAULT_EDITION}; use syntax; -use super::Options; +use rustc_data_structures::fx::FxHashSet; +use rustc_errors::{ColorConfig, emitter::HumanReadableErrorType, registry}; -impl ExternEntry { - fn new_public, - I: IntoIterator>>(locations: I) -> ExternEntry { - let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into())) - .collect(); +pub fn build_session_options_and_crate_config( + matches: &getopts::Matches, +) -> (Options, FxHashSet<(String, Option)>) { + ( + build_session_options(matches), + parse_cfgspecs(matches.opt_strs("cfg")), + ) +} - ExternEntry { - locations, - is_private_dep: false - } +fn new_public_extern_entry(locations: I) -> ExternEntry +where + S: Into, + I: IntoIterator>, +{ + let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into())) + .collect(); + + ExternEntry { + locations, + is_private_dep: false } } fn optgroups() -> getopts::Options { let mut opts = getopts::Options::new(); - for group in super::rustc_optgroups() { + for group in rustc_optgroups() { (group.apply)(&mut opts); } return opts; @@ -52,7 +63,7 @@ fn test_switch_implies_cfg_test() { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test: {}", f), }; - let registry = errors::registry::Registry::new(&[]); + let registry = registry::Registry::new(&[]); let (sessopts, cfg) = build_session_options_and_crate_config(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess, to_crate_config(cfg)); @@ -70,7 +81,7 @@ fn test_switch_implies_cfg_test_unless_cfg_test() { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f), }; - let registry = errors::registry::Registry::new(&[]); + let registry = registry::Registry::new(&[]); let (sessopts, cfg) = build_session_options_and_crate_config(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess, to_crate_config(cfg)); @@ -84,7 +95,7 @@ fn test_switch_implies_cfg_test_unless_cfg_test() { fn test_can_print_warnings() { syntax::with_default_globals(|| { let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap(); - let registry = errors::registry::Registry::new(&[]); + let registry = registry::Registry::new(&[]); let (sessopts, _) = build_session_options_and_crate_config(&matches); let sess = build_session(sessopts, None, registry); assert!(!sess.diagnostic().can_emit_warnings()); @@ -94,7 +105,7 @@ fn test_can_print_warnings() { let matches = optgroups() .parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()]) .unwrap(); - let registry = errors::registry::Registry::new(&[]); + let registry = registry::Registry::new(&[]); let (sessopts, _) = build_session_options_and_crate_config(&matches); let sess = build_session(sessopts, None, registry); assert!(sess.diagnostic().can_emit_warnings()); @@ -102,7 +113,7 @@ fn test_can_print_warnings() { syntax::with_default_globals(|| { let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap(); - let registry = errors::registry::Registry::new(&[]); + let registry = registry::Registry::new(&[]); let (sessopts, _) = build_session_options_and_crate_config(&matches); let sess = build_session(sessopts, None, registry); assert!(sess.diagnostic().can_emit_warnings()); @@ -161,33 +172,33 @@ fn test_externs_tracking_hash_different_construction_order() { v1.externs = Externs::new(mk_map(vec![ ( String::from("a"), - ExternEntry::new_public(vec![Some("b"), Some("c")]) + new_public_extern_entry(vec![Some("b"), Some("c")]) ), ( String::from("d"), - ExternEntry::new_public(vec![Some("e"), Some("f")]) + new_public_extern_entry(vec![Some("e"), Some("f")]) ), ])); v2.externs = Externs::new(mk_map(vec![ ( String::from("d"), - ExternEntry::new_public(vec![Some("e"), Some("f")]) + new_public_extern_entry(vec![Some("e"), Some("f")]) ), ( String::from("a"), - ExternEntry::new_public(vec![Some("b"), Some("c")]) + new_public_extern_entry(vec![Some("b"), Some("c")]) ), ])); v3.externs = Externs::new(mk_map(vec![ ( String::from("a"), - ExternEntry::new_public(vec![Some("b"), Some("c")]) + new_public_extern_entry(vec![Some("b"), Some("c")]) ), ( String::from("d"), - ExternEntry::new_public(vec![Some("f"), Some("e")]) + new_public_extern_entry(vec![Some("f"), Some("e")]) ), ])); @@ -271,9 +282,9 @@ fn test_search_paths_tracking_hash_different_order() { let mut v3 = Options::default(); let mut v4 = Options::default(); - const JSON: super::ErrorOutputType = super::ErrorOutputType::Json { + const JSON: ErrorOutputType = ErrorOutputType::Json { pretty: false, - json_rendered: super::HumanReadableErrorType::Default(super::ColorConfig::Never), + json_rendered: HumanReadableErrorType::Default(ColorConfig::Never), }; // Reference @@ -444,7 +455,7 @@ fn test_codegen_options_tracking_hash() { opts.cg.codegen_units = Some(42); assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); - opts.cg.remark = super::Passes::Some(vec![String::from("pass1"), String::from("pass2")]); + opts.cg.remark = Passes::Some(vec![String::from("pass1"), String::from("pass2")]); assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); opts.cg.save_temps = true; @@ -589,14 +600,6 @@ fn test_debugging_options_tracking_hash() { assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); opts.debugging_opts.save_analysis = true; assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); - opts.debugging_opts.flowgraph_print_loans = true; - assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); - opts.debugging_opts.flowgraph_print_moves = true; - assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); - opts.debugging_opts.flowgraph_print_assigns = true; - assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); - opts.debugging_opts.flowgraph_print_all = true; - assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); opts.debugging_opts.print_region_graph = true; assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash()); opts.debugging_opts.parse_only = true; diff --git a/src/librustc_interface/util.rs b/src/librustc_interface/util.rs index b81f814de0..d0c15073f1 100644 --- a/src/librustc_interface/util.rs +++ b/src/librustc_interface/util.rs @@ -13,7 +13,6 @@ use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::thin_vec::ThinVec; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use rustc_errors::registry::Registry; -use rustc_lint; use rustc_metadata::dynamic_lib::DynamicLibrary; use rustc_mir; use rustc_passes; @@ -108,11 +107,6 @@ pub fn create_session( let codegen_backend = get_codegen_backend(&sess); - rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); - if sess.unstable_options() { - rustc_lint::register_internals(&mut sess.lint_store.borrow_mut(), Some(&sess)); - } - let mut cfg = config::build_configuration(&sess, config::to_crate_config(cfg)); add_configuration(&mut cfg, &sess, &*codegen_backend); sess.parse_sess.config = cfg; @@ -173,7 +167,7 @@ pub fn scoped_thread R + Send, R: Send>(cfg: thread::Builder, f: #[cfg(not(parallel_compiler))] pub fn spawn_thread_pool R + Send, R: Send>( edition: Edition, - _threads: Option, + _threads: usize, stderr: &Option>>>, f: F, ) -> R { @@ -198,18 +192,19 @@ pub fn spawn_thread_pool R + Send, R: Send>( #[cfg(parallel_compiler)] pub fn spawn_thread_pool R + Send, R: Send>( edition: Edition, - threads: Option, + threads: usize, stderr: &Option>>>, f: F, ) -> R { - use rayon::{ThreadPool, ThreadPoolBuilder}; + use rayon::{ThreadBuilder, ThreadPool, ThreadPoolBuilder}; let gcx_ptr = &Lock::new(0); let mut config = ThreadPoolBuilder::new() + .thread_name(|_| "rustc".to_string()) .acquire_thread_handler(jobserver::acquire_thread) .release_thread_handler(jobserver::release_thread) - .num_threads(Session::threads_from_count(threads)) + .num_threads(threads) .deadlock_handler(|| unsafe { ty::query::handle_deadlock() }); if let Some(size) = get_stack_size() { @@ -225,20 +220,20 @@ pub fn spawn_thread_pool R + Send, R: Send>( // the thread local rustc uses. syntax_globals and syntax_pos_globals are // captured and set on the new threads. ty::tls::with_thread_locals sets up // thread local callbacks from libsyntax - let main_handler = move |worker: &mut dyn FnMut()| { + let main_handler = move |thread: ThreadBuilder| { syntax::GLOBALS.set(syntax_globals, || { syntax_pos::GLOBALS.set(syntax_pos_globals, || { if let Some(stderr) = stderr { io::set_panic(Some(box Sink(stderr.clone()))); } ty::tls::with_thread_locals(|| { - ty::tls::GCX_PTR.set(gcx_ptr, || worker()) + ty::tls::GCX_PTR.set(gcx_ptr, || thread.run()) }) }) }) }; - ThreadPool::scoped_pool(config, main_handler, with_pool).unwrap() + config.build_scoped(main_handler, with_pool).unwrap() }) }) }) @@ -502,7 +497,7 @@ pub(crate) fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguat // into various other hashes quite a bit (symbol hashes, incr. comp. hashes, // debuginfo type IDs, etc), so we don't want it to be too wide. 128 bits // should still be safe enough to avoid collisions in practice. - let mut hasher = StableHasher::::new(); + let mut hasher = StableHasher::new(); let mut metadata = session.opts.cg.metadata.clone(); // We don't want the crate_disambiguator to dependent on the order @@ -528,7 +523,64 @@ pub(crate) fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguat .contains(&config::CrateType::Executable); hasher.write(if is_exe { b"exe" } else { b"lib" }); - CrateDisambiguator::from(hasher.finish()) + CrateDisambiguator::from(hasher.finish::()) +} + +pub(crate) fn check_attr_crate_type(attrs: &[ast::Attribute], lint_buffer: &mut lint::LintBuffer) { + // Unconditionally collect crate types from attributes to make them used + for a in attrs.iter() { + if a.check_name(sym::crate_type) { + if let Some(n) = a.value_str() { + if let Some(_) = categorize_crate_type(n) { + return; + } + + if let ast::MetaItemKind::NameValue(spanned) = a.meta().unwrap().kind { + let span = spanned.span; + let lev_candidate = find_best_match_for_name( + CRATE_TYPES.iter().map(|(k, _)| k), + &n.as_str(), + None + ); + if let Some(candidate) = lev_candidate { + lint_buffer.buffer_lint_with_diagnostic( + lint::builtin::UNKNOWN_CRATE_TYPES, + ast::CRATE_NODE_ID, + span, + "invalid `crate_type` value", + lint::builtin::BuiltinLintDiagnostics:: + UnknownCrateTypes( + span, + "did you mean".to_string(), + format!("\"{}\"", candidate) + ) + ); + } else { + lint_buffer.buffer_lint( + lint::builtin::UNKNOWN_CRATE_TYPES, + ast::CRATE_NODE_ID, + span, + "invalid `crate_type` value" + ); + } + } + } + } + } +} + +const CRATE_TYPES: &[(Symbol, config::CrateType)] = &[ + (sym::rlib, config::CrateType::Rlib), + (sym::dylib, config::CrateType::Dylib), + (sym::cdylib, config::CrateType::Cdylib), + (sym::lib, config::default_lib_output()), + (sym::staticlib, config::CrateType::Staticlib), + (sym::proc_dash_macro, config::CrateType::ProcMacro), + (sym::bin, config::CrateType::Executable), +]; + +fn categorize_crate_type(s: Symbol) -> Option { + Some(CRATE_TYPES.iter().find(|(key, _)| *key == s)?.1) } pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec { @@ -538,56 +590,8 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec Some(config::CrateType::Rlib), - Some(sym::dylib) => Some(config::CrateType::Dylib), - Some(sym::cdylib) => Some(config::CrateType::Cdylib), - Some(sym::lib) => Some(config::default_lib_output()), - Some(sym::staticlib) => Some(config::CrateType::Staticlib), - Some(sym::proc_dash_macro) => Some(config::CrateType::ProcMacro), - Some(sym::bin) => Some(config::CrateType::Executable), - Some(n) => { - let crate_types = vec![ - sym::rlib, - sym::dylib, - sym::cdylib, - sym::lib, - sym::staticlib, - sym::proc_dash_macro, - sym::bin - ]; - - if let ast::MetaItemKind::NameValue(spanned) = a.meta().unwrap().node { - let span = spanned.span; - let lev_candidate = find_best_match_for_name( - crate_types.iter(), - &n.as_str(), - None - ); - if let Some(candidate) = lev_candidate { - session.buffer_lint_with_diagnostic( - lint::builtin::UNKNOWN_CRATE_TYPES, - ast::CRATE_NODE_ID, - span, - "invalid `crate_type` value", - lint::builtin::BuiltinLintDiagnostics:: - UnknownCrateTypes( - span, - "did you mean".to_string(), - format!("\"{}\"", candidate) - ) - ); - } else { - session.buffer_lint( - lint::builtin::UNKNOWN_CRATE_TYPES, - ast::CRATE_NODE_ID, - span, - "invalid `crate_type` value" - ); - } - } - None - } - None => None + Some(s) => categorize_crate_type(s), + _ => None, } } else { None @@ -738,7 +742,7 @@ impl<'a> ReplaceBodyWithLoop<'a> { fn should_ignore_fn(ret_ty: &ast::FnDecl) -> bool { if let ast::FunctionRetTy::Ty(ref ty) = ret_ty.output { fn involves_impl_trait(ty: &ast::Ty) -> bool { - match ty.node { + match ty.kind { ast::TyKind::ImplTrait(..) => true, ast::TyKind::Slice(ref subty) | ast::TyKind::Array(ref subty, _) | @@ -796,7 +800,7 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a> { } fn flat_map_trait_item(&mut self, i: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { - let is_const = match i.node { + let is_const = match i.kind { ast::TraitItemKind::Const(..) => true, ast::TraitItemKind::Method(ast::MethodSig { ref decl, ref header, .. }, _) => header.constness.node == ast::Constness::Const || Self::should_ignore_fn(decl), @@ -806,7 +810,7 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a> { } fn flat_map_impl_item(&mut self, i: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { - let is_const = match i.node { + let is_const = match i.kind { ast::ImplItemKind::Const(..) => true, ast::ImplItemKind::Method(ast::MethodSig { ref decl, ref header, .. }, _) => header.constness.node == ast::Constness::Const || Self::should_ignore_fn(decl), @@ -834,21 +838,21 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a> { fn block_to_stmt(b: ast::Block, sess: &Session) -> ast::Stmt { let expr = P(ast::Expr { id: sess.next_node_id(), - node: ast::ExprKind::Block(P(b), None), + kind: ast::ExprKind::Block(P(b), None), span: syntax_pos::DUMMY_SP, attrs: ThinVec::new(), }); ast::Stmt { id: sess.next_node_id(), - node: ast::StmtKind::Expr(expr), + kind: ast::StmtKind::Expr(expr), span: syntax_pos::DUMMY_SP, } } let empty_block = stmt_to_block(BlockCheckMode::Default, None, self.sess); let loop_expr = P(ast::Expr { - node: ast::ExprKind::Loop(P(empty_block), None), + kind: ast::ExprKind::Loop(P(empty_block), None), id: self.sess.next_node_id(), span: syntax_pos::DUMMY_SP, attrs: ThinVec::new(), @@ -857,7 +861,7 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a> { let loop_stmt = ast::Stmt { id: self.sess.next_node_id(), span: syntax_pos::DUMMY_SP, - node: ast::StmtKind::Expr(loop_expr), + kind: ast::StmtKind::Expr(loop_expr), }; if self.within_static_or_const { diff --git a/src/librustc_lexer/src/cursor.rs b/src/librustc_lexer/src/cursor.rs index 5831159c34..73d305c6d4 100644 --- a/src/librustc_lexer/src/cursor.rs +++ b/src/librustc_lexer/src/cursor.rs @@ -1,5 +1,9 @@ use std::str::Chars; +/// Peekable iterator over a char sequence. +/// +/// Next characters can be peeked via `nth_char` method, +/// and position can be shifted forward via `bump` method. pub(crate) struct Cursor<'a> { initial_len: usize, chars: Chars<'a>, @@ -18,7 +22,9 @@ impl<'a> Cursor<'a> { prev: EOF_CHAR, } } + /// For debug assertions only + /// Returns the last eaten symbol (or '\0' in release builds). pub(crate) fn prev(&self) -> char { #[cfg(debug_assertions)] { @@ -30,19 +36,30 @@ impl<'a> Cursor<'a> { '\0' } } + + /// Returns nth character relative to the current cursor position. + /// If requested position doesn't exist, `EOF_CHAR` is returned. + /// However, getting `EOF_CHAR` doesn't always mean actual end of file, + /// it should be checked with `is_eof` method. pub(crate) fn nth_char(&self, n: usize) -> char { self.chars().nth(n).unwrap_or(EOF_CHAR) } + + /// Checks if there is nothing more to consume. pub(crate) fn is_eof(&self) -> bool { self.chars.as_str().is_empty() } + + /// Returns amount of already consumed symbols. pub(crate) fn len_consumed(&self) -> usize { self.initial_len - self.chars.as_str().len() } - /// Returns an iterator over the remaining characters. + + /// Returns a `Chars` iterator over the remaining characters. fn chars(&self) -> Chars<'a> { self.chars.clone() } + /// Moves to the next character. pub(crate) fn bump(&mut self) -> Option { let c = self.chars.next()?; diff --git a/src/librustc_lexer/src/lib.rs b/src/librustc_lexer/src/lib.rs index 08608cfe98..d55ef46d75 100644 --- a/src/librustc_lexer/src/lib.rs +++ b/src/librustc_lexer/src/lib.rs @@ -1,3 +1,16 @@ +//! Low-level Rust lexer. +//! +//! Tokens produced by this lexer are not yet ready for parsing the Rust syntax, +//! for that see `libsyntax::parse::lexer`, which converts this basic token stream +//! into wide tokens used by actual parser. +//! +//! The purpose of this crate is to convert raw sources into a labeled sequence +//! of well-known token types, so building an actual Rust token stream will +//! be easier. +//! +//! Main entity of this crate is [`TokenKind`] enum which represents common +//! lexeme types. + // We want to be able to build this crate with a stable compiler, so no // `#![feature]` attributes should be added. @@ -6,78 +19,144 @@ pub mod unescape; use crate::cursor::{Cursor, EOF_CHAR}; +/// Parsed token. +/// It doesn't contain information about data that has been parsed, +/// only the type of the token and its size. pub struct Token { pub kind: TokenKind, pub len: usize, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub enum TokenKind { - LineComment, - BlockComment { terminated: bool }, - Whitespace, - Ident, - RawIdent, - Literal { kind: LiteralKind, suffix_start: usize }, - Lifetime { starts_with_number: bool }, - Semi, - Comma, - Dot, - OpenParen, - CloseParen, - OpenBrace, - CloseBrace, - OpenBracket, - CloseBracket, - At, - Pound, - Tilde, - Question, - Colon, - Dollar, - Eq, - Not, - Lt, - Gt, - Minus, - And, - Or, - Plus, - Star, - Slash, - Caret, - Percent, - Unknown, -} -use self::TokenKind::*; - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub enum LiteralKind { - Int { base: Base, empty_int: bool }, - Float { base: Base, empty_exponent: bool }, - Char { terminated: bool }, - Byte { terminated: bool }, - Str { terminated: bool }, - ByteStr { terminated: bool }, - RawStr { n_hashes: usize, started: bool, terminated: bool }, - RawByteStr { n_hashes: usize, started: bool, terminated: bool }, -} -use self::LiteralKind::*; - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub enum Base { - Binary, - Octal, - Hexadecimal, - Decimal, -} - impl Token { fn new(kind: TokenKind, len: usize) -> Token { Token { kind, len } } } +/// Enum represening common lexeme types. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum TokenKind { + // Multi-char tokens: + + /// "// comment" + LineComment, + /// "/* block comment */" + /// Block comments can be recursive, so the sequence like "/* /* */" + /// will not be considered terminated and will result in a parsing error. + BlockComment { terminated: bool }, + /// Any whitespace characters sequence. + Whitespace, + /// "ident" or "continue" + /// At this step keywords are also considered identifiers. + Ident, + /// "r#ident" + RawIdent, + /// "12_u8", "1.0e-40", "b"123"". See `LiteralKind` for more details. + Literal { kind: LiteralKind, suffix_start: usize }, + /// "'a" + Lifetime { starts_with_number: bool }, + + // One-char tokens: + + /// ";" + Semi, + /// "," + Comma, + /// "." + Dot, + /// "(" + OpenParen, + /// ")" + CloseParen, + /// "{" + OpenBrace, + /// "}" + CloseBrace, + /// "[" + OpenBracket, + /// "]" + CloseBracket, + /// "@" + At, + /// "#" + Pound, + /// "~" + Tilde, + /// "?" + Question, + /// ":" + Colon, + /// "$" + Dollar, + /// "=" + Eq, + /// "!" + Not, + /// "<" + Lt, + /// ">" + Gt, + /// "-" + Minus, + /// "&" + And, + /// "|" + Or, + /// "+" + Plus, + /// "*" + Star, + /// "/" + Slash, + /// "^" + Caret, + /// "%" + Percent, + + /// Unknown token, not expected by the lexer, e.g. "№" + Unknown, +} +use self::TokenKind::*; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum LiteralKind { + /// "12_u8", "0o100", "0b120i99" + Int { base: Base, empty_int: bool }, + /// "12.34f32", "0b100.100" + Float { base: Base, empty_exponent: bool }, + /// "'a'", "'\\'", "'''", "';" + Char { terminated: bool }, + /// "b'a'", "b'\\'", "b'''", "b';" + Byte { terminated: bool }, + /// ""abc"", ""abc" + Str { terminated: bool }, + /// "b"abc"", "b"abc" + ByteStr { terminated: bool }, + /// "r"abc"", "r#"abc"#", "r####"ab"###"c"####", "r#"a" + RawStr { n_hashes: usize, started: bool, terminated: bool }, + /// "br"abc"", "br#"abc"#", "br####"ab"###"c"####", "br#"a" + RawByteStr { n_hashes: usize, started: bool, terminated: bool }, +} +use self::LiteralKind::*; + +/// Base of numeric literal encoding according to its prefix. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum Base { + /// Literal starts with "0b". + Binary, + /// Literal starts with "0o". + Octal, + /// Literal starts with "0x". + Hexadecimal, + /// Literal doesn't contain a prefix. + Decimal, +} + +/// `rustc` allows files to have a shebang, e.g. "#!/usr/bin/rustrun", +/// but shebang isn't a part of rust syntax, so this function +/// skips the line if it starts with a shebang ("#!"). +/// Line won't be skipped if it represents a valid Rust syntax +/// (e.g. "#![deny(missing_docs)]"). pub fn strip_shebang(input: &str) -> Option { debug_assert!(!input.is_empty()); if !input.starts_with("#!") || input.starts_with("#![") { @@ -86,11 +165,13 @@ pub fn strip_shebang(input: &str) -> Option { Some(input.find('\n').unwrap_or(input.len())) } +/// Parses the first token from the provided input string. pub fn first_token(input: &str) -> Token { debug_assert!(!input.is_empty()); Cursor::new(input).advance_token() } +/// Creates an iterator that produces tokens from the input string. pub fn tokenize(mut input: &str) -> impl Iterator + '_ { std::iter::from_fn(move || { if input.is_empty() { @@ -102,10 +183,9 @@ pub fn tokenize(mut input: &str) -> impl Iterator + '_ { }) } -// See [UAX #31](http://unicode.org/reports/tr31) for definitions of these -// classes. - /// True if `c` is considered a whitespace according to Rust language definition. +/// See [Rust language reference](https://doc.rust-lang.org/reference/whitespace.html) +/// for definitions of these classes. pub fn is_whitespace(c: char) -> bool { // This is Pattern_White_Space. // @@ -137,6 +217,8 @@ pub fn is_whitespace(c: char) -> bool { } /// True if `c` is valid as a first character of an identifier. +/// See [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html) for +/// a formal definition of valid identifier name. pub fn is_id_start(c: char) -> bool { // This is XID_Start OR '_' (which formally is not a XID_Start). // We also add fast-path for ascii idents @@ -147,6 +229,8 @@ pub fn is_id_start(c: char) -> bool { } /// True if `c` is valid as a non-first character of an identifier. +/// See [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html) for +/// a formal definition of valid identifier name. pub fn is_id_continue(c: char) -> bool { // This is exactly XID_Continue. // We also add fast-path for ascii idents @@ -159,15 +243,21 @@ pub fn is_id_continue(c: char) -> bool { impl Cursor<'_> { + /// Parses a token from the input string. fn advance_token(&mut self) -> Token { let first_char = self.bump().unwrap(); let token_kind = match first_char { + // Slash, comment or block comment. '/' => match self.nth_char(0) { '/' => self.line_comment(), '*' => self.block_comment(), _ => Slash, }, + + // Whitespace sequence. c if is_whitespace(c) => self.whitespace(), + + // Raw string literal or identifier. 'r' => match (self.nth_char(0), self.nth_char(1)) { ('#', c1) if is_id_start(c1) => self.raw_ident(), ('#', _) | ('"', _) => { @@ -181,6 +271,8 @@ impl Cursor<'_> { } _ => self.ident(), }, + + // Byte literal, byte string literal, raw byte string literal or identifier. 'b' => match (self.nth_char(0), self.nth_char(1)) { ('\'', _) => { self.bump(); @@ -214,13 +306,20 @@ impl Cursor<'_> { } _ => self.ident(), }, + + // Identifier (this should be checked after other variant that can + // start as identifier). c if is_id_start(c) => self.ident(), + + // Numeric literal. c @ '0'..='9' => { let literal_kind = self.number(c); let suffix_start = self.len_consumed(); self.eat_literal_suffix(); TokenKind::Literal { kind: literal_kind, suffix_start } } + + // One-symbol tokens. ';' => Semi, ',' => Comma, '.' => Dot, @@ -247,7 +346,11 @@ impl Cursor<'_> { '*' => Star, '^' => Caret, '%' => Percent, + + // Lifetime or character literal. '\'' => self.lifetime_or_char(), + + // String literal. '"' => { let terminated = self.double_quoted_string(); let suffix_start = self.len_consumed(); @@ -268,7 +371,6 @@ impl Cursor<'_> { loop { match self.nth_char(0) { '\n' => break, - '\r' if self.nth_char(1) == '\n' => break, EOF_CHAR if self.is_eof() => break, _ => { self.bump(); @@ -292,6 +394,9 @@ impl Cursor<'_> { self.bump(); depth -= 1; if depth == 0 { + // This block comment is closed, so for a construction like "/* */ */" + // there will be a successfully parsed block comment "/* */" + // and " */" will be processed separately. break; } } @@ -336,6 +441,7 @@ impl Cursor<'_> { debug_assert!('0' <= self.prev() && self.prev() <= '9'); let mut base = Base::Decimal; if first_digit == '0' { + // Attempt to parse encoding base. let has_digits = match self.nth_char(0) { 'b' => { base = Base::Binary; @@ -352,17 +458,21 @@ impl Cursor<'_> { self.bump(); self.eat_hexadecimal_digits() } + // Not a base prefix. '0'..='9' | '_' | '.' | 'e' | 'E' => { self.eat_decimal_digits(); true } - // just a 0 + // Just a 0. _ => return Int { base, empty_int: false }, }; + // Base prefix was provided, but there were no digits + // after it, e.g. "0x". if !has_digits { return Int { base, empty_int: true }; } } else { + // No base prefix, parse number in the usual way. self.eat_decimal_digits(); }; @@ -401,6 +511,9 @@ impl Cursor<'_> { fn lifetime_or_char(&mut self) -> TokenKind { debug_assert!(self.prev() == '\''); let mut starts_with_number = false; + + // Check if the first symbol after '\'' is a valid identifier + // character or a number (not a digit followed by '\''). if (is_id_start(self.nth_char(0)) || self.nth_char(0).is_digit(10) && { starts_with_number = true; @@ -409,6 +522,8 @@ impl Cursor<'_> { && self.nth_char(1) != '\'' { self.bump(); + + // Skip the identifier. while is_id_continue(self.nth_char(0)) { self.bump(); } @@ -421,6 +536,8 @@ impl Cursor<'_> { Lifetime { starts_with_number } }; } + + // This is not a lifetime (checked above), parse a char literal. let terminated = self.single_quoted_string(); let suffix_start = self.len_consumed(); if terminated { @@ -432,25 +549,32 @@ impl Cursor<'_> { fn single_quoted_string(&mut self) -> bool { debug_assert!(self.prev() == '\''); - // parse `'''` as a single char literal + // Parse `'''` as a single char literal. if self.nth_char(0) == '\'' && self.nth_char(1) == '\'' { self.bump(); } + // Parse until either quotes are terminated or error is detected. let mut first = true; loop { match self.nth_char(0) { + // Probably beginning of the comment, which we don't want to include + // to the error report. '/' if !first => break, + // Newline without following '\'' means unclosed quote, stop parsing. '\n' if self.nth_char(1) != '\'' => break, - '\r' if self.nth_char(1) == '\n' => break, + // End of file, stop parsing. EOF_CHAR if self.is_eof() => break, + // Quotes are terminated, finish parsing. '\'' => { self.bump(); return true; } + // Escaped slash is considered one character, so bump twice. '\\' => { self.bump(); self.bump(); } + // Skip the character. _ => { self.bump(); } @@ -460,6 +584,8 @@ impl Cursor<'_> { false } + /// Eats double-quoted string and returns true + /// if string is terminated. fn double_quoted_string(&mut self) -> bool { debug_assert!(self.prev() == '"'); loop { @@ -478,8 +604,11 @@ impl Cursor<'_> { } } + /// Eats the double-quoted string and returns a tuple of + /// (amount of the '#' symbols, raw string started, raw string terminated) fn raw_double_quoted_string(&mut self) -> (usize, bool, bool) { debug_assert!(self.prev() == 'r'); + // Count opening '#' symbols. let n_hashes = { let mut acc: usize = 0; loop { @@ -491,6 +620,8 @@ impl Cursor<'_> { } }; + // Skip the string itself and check that amount of closing '#' + // symbols is equal to the amount of opening ones. loop { match self.bump() { Some('"') => { @@ -551,6 +682,7 @@ impl Cursor<'_> { if self.eat_decimal_digits() { Ok(()) } else { Err(()) } } + // Eats the suffix if it's an identifier. fn eat_literal_suffix(&mut self) { if !is_id_start(self.nth_char(0)) { return; diff --git a/src/librustc_lexer/src/unescape.rs b/src/librustc_lexer/src/unescape.rs index d8e00d4c7c..dee7bc2260 100644 --- a/src/librustc_lexer/src/unescape.rs +++ b/src/librustc_lexer/src/unescape.rs @@ -7,32 +7,54 @@ use std::ops::Range; #[cfg(test)] mod tests; +/// Errors that can occur during string unescaping. #[derive(Debug, PartialEq, Eq)] pub enum EscapeError { + /// Expected 1 char, but 0 were found. ZeroChars, + /// Expected 1 char, but more than 1 were found. MoreThanOneChar, + /// Escaped '\' character without continuation. LoneSlash, + /// Invalid escape characted (e.g. '\z'). InvalidEscape, + /// Raw '\r' encountered. BareCarriageReturn, + /// Raw '\r' encountered in raw string. BareCarriageReturnInRawString, + /// Unescaped character that was expected to be escaped (e.g. raw '\t'). EscapeOnlyChar, + /// Numeric character escape is too short (e.g. '\x1'). TooShortHexEscape, + /// Invalid character in numeric escape (e.g. '\xz') InvalidCharInHexEscape, + /// Character code in numeric escape is non-ascii (e.g. '\xFF'). OutOfRangeHexEscape, + /// '\u' not followed by '{'. NoBraceInUnicodeEscape, + /// Non-hexadecimal value in '\u{..}'. InvalidCharInUnicodeEscape, + /// '\u{}' EmptyUnicodeEscape, + /// No closing brace in '\u{..}', e.g. '\u{12'. UnclosedUnicodeEscape, + /// '\u{_12}' LeadingUnderscoreUnicodeEscape, + /// More than 6 charactes in '\u{..}', e.g. '\u{10FFFF_FF}' OverlongUnicodeEscape, + /// Invalid in-bound unicode character code, e.g. '\u{DFFF}'. LoneSurrogateUnicodeEscape, + /// Out of bounds unicode character code, e.g. '\u{FFFFFF}'. OutOfRangeUnicodeEscape, + /// Unicode escape code in byte literal. UnicodeEscapeInByte, + /// Non-ascii character in byte literal. NonAsciiCharInByte, + /// Non-ascii character in byte string literal. NonAsciiCharInByteString, } @@ -44,15 +66,8 @@ pub fn unescape_char(literal_text: &str) -> Result { .map_err(|err| (literal_text.len() - chars.as_str().len(), err)) } -/// Takes a contents of a string literal (without quotes) and produces a -/// sequence of escaped characters or errors. -pub fn unescape_str(literal_text: &str, callback: &mut F) -where - F: FnMut(Range, Result), -{ - unescape_str_or_byte_str(literal_text, Mode::Str, callback) -} - +/// Takes a contents of a byte literal (without quotes), and returns an +/// unescaped byte or an error. pub fn unescape_byte(literal_text: &str) -> Result { let mut chars = literal_text.chars(); unescape_char_or_byte(&mut chars, Mode::Byte) @@ -62,6 +77,17 @@ pub fn unescape_byte(literal_text: &str) -> Result { /// Takes a contents of a string literal (without quotes) and produces a /// sequence of escaped characters or errors. +/// Values are returned through invoking of the provided callback. +pub fn unescape_str(literal_text: &str, callback: &mut F) +where + F: FnMut(Range, Result), +{ + unescape_str_or_byte_str(literal_text, Mode::Str, callback) +} + +/// Takes a contents of a byte string literal (without quotes) and produces a +/// sequence of bytes or errors. +/// Values are returned through invoking of the provided callback. pub fn unescape_byte_str(literal_text: &str, callback: &mut F) where F: FnMut(Range, Result), @@ -71,8 +97,9 @@ where }) } -/// Takes a contents of a string literal (without quotes) and produces a +/// Takes a contents of a raw string literal (without quotes) and produces a /// sequence of characters or errors. +/// Values are returned through invoking of the provided callback. /// NOTE: Raw strings do not perform any explicit character escaping, here we /// only translate CRLF to LF and produce errors on bare CR. pub fn unescape_raw_str(literal_text: &str, callback: &mut F) @@ -82,8 +109,9 @@ where unescape_raw_str_or_byte_str(literal_text, Mode::Str, callback) } -/// Takes a contents of a string literal (without quotes) and produces a -/// sequence of characters or errors. +/// Takes a contents of a raw byte string literal (without quotes) and produces a +/// sequence of bytes or errors. +/// Values are returned through invoking of the provided callback. /// NOTE: Raw strings do not perform any explicit character escaping, here we /// only translate CRLF to LF and produce errors on bare CR. pub fn unescape_raw_byte_str(literal_text: &str, callback: &mut F) @@ -95,6 +123,7 @@ where }) } +/// What kind of literal do we parse. #[derive(Debug, Clone, Copy)] pub enum Mode { Char, @@ -126,17 +155,16 @@ impl Mode { fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result { if first_char != '\\' { + // Previous character was not a slash, and we don't expect it to be + // an escape-only character. return match first_char { '\t' | '\n' => Err(EscapeError::EscapeOnlyChar), - '\r' => Err(if chars.clone().next() == Some('\n') { - EscapeError::EscapeOnlyChar - } else { - EscapeError::BareCarriageReturn - }), + '\r' => Err(EscapeError::BareCarriageReturn), '\'' if mode.in_single_quotes() => Err(EscapeError::EscapeOnlyChar), '"' if mode.in_double_quotes() => Err(EscapeError::EscapeOnlyChar), _ => { if mode.is_bytes() && !first_char.is_ascii() { + // Byte literal can't be a non-ascii character. return Err(EscapeError::NonAsciiCharInByte); } Ok(first_char) @@ -144,6 +172,8 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result, mode: Mode) -> Result '\0', 'x' => { + // Parse hexadecimal character code. + let hi = chars.next().ok_or(EscapeError::TooShortHexEscape)?; let hi = hi.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; @@ -164,6 +196,7 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result, mode: Mode) -> Result { + // We've parsed '\u', now we have to parse '{..}'. + if chars.next() != Some('{') { return Err(EscapeError::NoBraceInUnicodeEscape); } + // First characrer must be a hexadecimal digit. let mut n_digits = 1; let mut value: u32 = match chars.next().ok_or(EscapeError::UnclosedUnicodeEscape)? { '_' => return Err(EscapeError::LeadingUnderscoreUnicodeEscape), @@ -184,6 +220,8 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result c.to_digit(16).ok_or(EscapeError::InvalidCharInUnicodeEscape)?, }; + // First character is valid, now parse the rest of the number + // and closing brace. loop { match chars.next() { None => return Err(EscapeError::UnclosedUnicodeEscape), @@ -192,6 +230,9 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result 6 { return Err(EscapeError::OverlongUnicodeEscape); } + + // Incorrect syntax has higher priority for error reporting + // than unallowed value for a literal. if mode.is_bytes() { return Err(EscapeError::UnicodeEscapeInByte); } @@ -208,6 +249,7 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result 6 { + // Stop updating value since we're sure that it's is incorrect already. continue; } let digit = digit as u32; @@ -244,27 +286,19 @@ where let unescaped_char = match first_char { '\\' => { - let (second_char, third_char) = { - let mut chars = chars.clone(); - (chars.next(), chars.next()) - }; - match (second_char, third_char) { - (Some('\n'), _) | (Some('\r'), Some('\n')) => { + let second_char = chars.clone().next(); + match second_char { + Some('\n') => { + // Rust language specification requires us to skip whitespaces + // if unescaped '\' character is followed by '\n'. + // For details see [Rust language reference] + // (https://doc.rust-lang.org/reference/tokens.html#string-literals). skip_ascii_whitespace(&mut chars); continue; } _ => scan_escape(first_char, &mut chars, mode), } } - '\r' => { - let second_char = chars.clone().next(); - if second_char == Some('\n') { - chars.next(); - Ok('\n') - } else { - scan_escape(first_char, &mut chars, mode) - } - } '\n' => Ok('\n'), '\t' => Ok('\t'), _ => scan_escape(first_char, &mut chars, mode), @@ -298,15 +332,11 @@ where while let Some(curr) = chars.next() { let start = initial_len - chars.as_str().len() - curr.len_utf8(); - let result = match (curr, chars.clone().next()) { - ('\r', Some('\n')) => { - chars.next(); - Ok('\n') - }, - ('\r', _) => Err(EscapeError::BareCarriageReturnInRawString), - (c, _) if mode.is_bytes() && !c.is_ascii() => + let result = match curr { + '\r' => Err(EscapeError::BareCarriageReturnInRawString), + c if mode.is_bytes() && !c.is_ascii() => Err(EscapeError::NonAsciiCharInByteString), - (c, _) => Ok(c), + c => Ok(c), }; let end = initial_len - chars.as_str().len(); diff --git a/src/librustc_lexer/src/unescape/tests.rs b/src/librustc_lexer/src/unescape/tests.rs index 496527eb26..e7b1ff6479 100644 --- a/src/librustc_lexer/src/unescape/tests.rs +++ b/src/librustc_lexer/src/unescape/tests.rs @@ -11,7 +11,6 @@ fn test_unescape_char_bad() { check(r"\", EscapeError::LoneSlash); check("\n", EscapeError::EscapeOnlyChar); - check("\r\n", EscapeError::EscapeOnlyChar); check("\t", EscapeError::EscapeOnlyChar); check("'", EscapeError::EscapeOnlyChar); check("\r", EscapeError::BareCarriageReturn); @@ -31,6 +30,7 @@ fn test_unescape_char_bad() { check(r"\v", EscapeError::InvalidEscape); check(r"\💩", EscapeError::InvalidEscape); check(r"\●", EscapeError::InvalidEscape); + check("\\\r", EscapeError::InvalidEscape); check(r"\x", EscapeError::TooShortHexEscape); check(r"\x0", EscapeError::TooShortHexEscape); @@ -116,10 +116,9 @@ fn test_unescape_str_good() { check("foo", "foo"); check("", ""); - check(" \t\n\r\n", " \t\n\n"); + check(" \t\n", " \t\n"); check("hello \\\n world", "hello world"); - check("hello \\\r\n world", "hello world"); check("thread's", "thread's") } @@ -134,7 +133,6 @@ fn test_unescape_byte_bad() { check(r"\", EscapeError::LoneSlash); check("\n", EscapeError::EscapeOnlyChar); - check("\r\n", EscapeError::EscapeOnlyChar); check("\t", EscapeError::EscapeOnlyChar); check("'", EscapeError::EscapeOnlyChar); check("\r", EscapeError::BareCarriageReturn); @@ -238,10 +236,9 @@ fn test_unescape_byte_str_good() { check("foo", b"foo"); check("", b""); - check(" \t\n\r\n", b" \t\n\n"); + check(" \t\n", b" \t\n"); check("hello \\\n world", b"hello world"); - check("hello \\\r\n world", b"hello world"); check("thread's", b"thread's") } @@ -253,7 +250,6 @@ fn test_unescape_raw_str() { assert_eq!(unescaped, expected); } - check("\r\n", &[(0..2, Ok('\n'))]); check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]); check("\rx", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString)), (1..2, Ok('x'))]); } @@ -266,7 +262,6 @@ fn test_unescape_raw_byte_str() { assert_eq!(unescaped, expected); } - check("\r\n", &[(0..2, Ok(byte_from_char('\n')))]); check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]); check("🦀", &[(0..4, Err(EscapeError::NonAsciiCharInByteString))]); check( diff --git a/src/librustc_lint/Cargo.toml b/src/librustc_lint/Cargo.toml index 041d0aaead..a61a314d54 100644 --- a/src/librustc_lint/Cargo.toml +++ b/src/librustc_lint/Cargo.toml @@ -15,3 +15,4 @@ rustc_target = { path = "../librustc_target" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_index = { path = "../librustc_index" } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index cf19a9eb14..e3c3966c2f 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -27,6 +27,7 @@ use rustc::hir::def::{Res, DefKind}; use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use rustc::ty::{self, Ty, TyCtxt, layout::VariantIdx}; use rustc::{lint, util}; +use rustc::lint::FutureIncompatibleInfo; use hir::Node; use util::nodemap::HirIdSet; use lint::{LateContext, LintContext, LintArray}; @@ -45,7 +46,7 @@ use syntax::feature_gate::{Stability, deprecated_attributes}; use syntax_pos::{BytePos, Span}; use syntax::symbol::{Symbol, kw, sym}; use syntax::errors::{Applicability, DiagnosticBuilder}; -use syntax::print::pprust::expr_to_string; +use syntax::print::pprust::{self, expr_to_string}; use syntax::visit::FnKind; use rustc::hir::{self, GenericParamKind, PatKind}; @@ -67,7 +68,7 @@ declare_lint_pass!(WhileTrue => [WHILE_TRUE]); /// Traverse through any amount of parenthesis and return the first non-parens expression. fn pierce_parens(mut expr: &ast::Expr) -> &ast::Expr { - while let ast::ExprKind::Paren(sub) = &expr.node { + while let ast::ExprKind::Paren(sub) = &expr.kind { expr = sub; } expr @@ -75,9 +76,9 @@ fn pierce_parens(mut expr: &ast::Expr) -> &ast::Expr { impl EarlyLintPass for WhileTrue { fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { - if let ast::ExprKind::While(cond, ..) = &e.node { - if let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).node { - if let ast::LitKind::Bool(true) = lit.node { + if let ast::ExprKind::While(cond, ..) = &e.kind { + if let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).kind { + if let ast::LitKind::Bool(true) = lit.kind { if !lit.span.from_expansion() { let msg = "denote infinite loops with `loop { ... }`"; let condition_span = cx.sess.source_map().def_span(e.span); @@ -117,7 +118,7 @@ impl BoxPointers { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for BoxPointers { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - match it.node { + match it.kind { hir::ItemKind::Fn(..) | hir::ItemKind::TyAlias(..) | hir::ItemKind::Enum(..) | @@ -130,7 +131,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for BoxPointers { } // If it's a struct, we also have to check the fields' types - match it.node { + match it.kind { hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => { for struct_field in struct_def.fields() { @@ -159,7 +160,7 @@ declare_lint_pass!(NonShorthandFieldPatterns => [NON_SHORTHAND_FIELD_PATTERNS]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { fn check_pat(&mut self, cx: &LateContext<'_, '_>, pat: &hir::Pat) { - if let PatKind::Struct(ref qpath, ref field_pats, _) = pat.node { + if let PatKind::Struct(ref qpath, ref field_pats, _) = pat.kind { let variant = cx.tables.pat_ty(pat).ty_adt_def() .expect("struct pattern type is not an ADT") .variant_of_res(cx.tables.qpath_res(qpath, pat.hir_id)); @@ -173,7 +174,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { // (Issue #49588) continue; } - if let PatKind::Binding(_, _, ident, None) = fieldpat.pat.node { + if let PatKind::Binding(_, _, ident, None) = fieldpat.pat.kind { if cx.tcx.find_field_index(ident, &variant) == Some(cx.tcx.field_index(fieldpat.hir_id, cx.tables)) { let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS, @@ -224,7 +225,7 @@ impl EarlyLintPass for UnsafeCode { } fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { - if let ast::ExprKind::Block(ref blk, _) = e.node { + if let ast::ExprKind::Block(ref blk, _) = e.kind { // Don't warn about generated blocks; that'll just pollute the output. if blk.rules == ast::BlockCheckMode::Unsafe(ast::UserProvided) { self.report_unsafe(cx, blk.span, "usage of an `unsafe` block"); @@ -233,7 +234,7 @@ impl EarlyLintPass for UnsafeCode { } fn check_item(&mut self, cx: &EarlyContext<'_>, it: &ast::Item) { - match it.node { + match it.kind { ast::ItemKind::Trait(_, ast::Unsafety::Unsafe, ..) => { self.report_unsafe(cx, it.span, "declaration of an `unsafe` trait") } @@ -268,7 +269,7 @@ impl EarlyLintPass for UnsafeCode { } fn check_trait_item(&mut self, cx: &EarlyContext<'_>, item: &ast::TraitItem) { - if let ast::TraitItemKind::Method(ref sig, None) = item.node { + if let ast::TraitItemKind::Method(ref sig, None) = item.kind { if sig.header.unsafety == ast::Unsafety::Unsafe { self.report_unsafe(cx, item.span, "declaration of an `unsafe` method") } @@ -280,7 +281,7 @@ declare_lint! { pub MISSING_DOCS, Allow, "detects missing documentation for public members", - report_in_external_macro: true + report_in_external_macro } pub struct MissingDoc { @@ -391,7 +392,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc { } fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - let desc = match it.node { + let desc = match it.kind { hir::ItemKind::Fn(..) => "a function", hir::ItemKind::Mod(..) => "a module", hir::ItemKind::Enum(..) => "an enum", @@ -440,7 +441,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc { return; } - let desc = match trait_item.node { + let desc = match trait_item.kind { hir::TraitItemKind::Const(..) => "an associated constant", hir::TraitItemKind::Method(..) => "a trait method", hir::TraitItemKind::Type(..) => "an associated type", @@ -459,7 +460,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc { return; } - let desc = match impl_item.node { + let desc = match impl_item.kind { hir::ImplItemKind::Const(..) => "an associated constant", hir::ImplItemKind::Method(..) => "a method", hir::ImplItemKind::TyAlias(_) => "an associated type", @@ -504,7 +505,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingCopyImplementations { if !cx.access_levels.is_reachable(item.hir_id) { return; } - let (def, ty) = match item.node { + let (def, ty) = match item.kind { hir::ItemKind::Struct(_, ref ast_generics) => { if !ast_generics.params.is_empty() { return; @@ -563,7 +564,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDebugImplementations { return; } - match item.node { + match item.kind { hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) => {} @@ -601,7 +602,11 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDebugImplementations { declare_lint! { pub ANONYMOUS_PARAMETERS, Allow, - "detects anonymous parameters" + "detects anonymous parameters", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #41686 ", + edition: Some(Edition::Edition2018), + }; } declare_lint_pass!( @@ -611,10 +616,10 @@ declare_lint_pass!( impl EarlyLintPass for AnonymousParameters { fn check_trait_item(&mut self, cx: &EarlyContext<'_>, it: &ast::TraitItem) { - match it.node { + match it.kind { ast::TraitItemKind::Method(ref sig, _) => { for arg in sig.decl.inputs.iter() { - match arg.pat.node { + match arg.pat.kind { ast::PatKind::Ident(_, ident, None) => { if ident.name == kw::Invalid { let ty_snip = cx @@ -701,7 +706,8 @@ impl EarlyLintPass for DeprecatedAttr { } } if attr.check_name(sym::no_start) || attr.check_name(sym::crate_id) { - let msg = format!("use of deprecated attribute `{}`: no longer used.", attr.path); + let path_str = pprust::path_to_string(&attr.path); + let msg = format!("use of deprecated attribute `{}`: no longer used.", path_str); lint_deprecated_attr(cx, attr, &msg, None); } } @@ -766,13 +772,13 @@ impl UnusedDocComment { impl EarlyLintPass for UnusedDocComment { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) { - if let ast::ItemKind::Mac(..) = item.node { + if let ast::ItemKind::Mac(..) = item.kind { self.warn_if_doc(cx, item.span, "macro expansions", true, &item.attrs); } } fn check_stmt(&mut self, cx: &EarlyContext<'_>, stmt: &ast::Stmt) { - let (kind, is_macro_expansion) = match stmt.node { + let (kind, is_macro_expansion) = match stmt.kind { ast::StmtKind::Local(..) => ("statements", false), ast::StmtKind::Item(..) => ("inner items", false), ast::StmtKind::Mac(..) => ("macro expansions", true), @@ -781,7 +787,7 @@ impl EarlyLintPass for UnusedDocComment { ast::StmtKind::Expr(..) => return, }; - self.warn_if_doc(cx, stmt.span, kind, is_macro_expansion, stmt.node.attrs()); + self.warn_if_doc(cx, stmt.span, kind, is_macro_expansion, stmt.kind.attrs()); } fn check_arm(&mut self, cx: &EarlyContext<'_>, arm: &ast::Arm) { @@ -809,7 +815,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for PluginAsLibrary { return; } - match it.node { + match it.kind { hir::ItemKind::ExternCrate(..) => (), _ => return, }; @@ -849,7 +855,7 @@ declare_lint_pass!(InvalidNoMangleItems => [NO_MANGLE_CONST_ITEMS, NO_MANGLE_GEN impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidNoMangleItems { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - match it.node { + match it.kind { hir::ItemKind::Fn(.., ref generics, _) => { if let Some(no_mangle_attr) = attr::find_by_name(&it.attrs, sym::no_mangle) { for param in &generics.params { @@ -918,7 +924,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MutableTransmutes { let msg = "mutating transmuted &mut T from &T may cause undefined behavior, \ consider instead using an UnsafeCell"; - match get_transmute_from_to(cx, expr).map(|(ty1, ty2)| (&ty1.sty, &ty2.sty)) { + match get_transmute_from_to(cx, expr).map(|(ty1, ty2)| (&ty1.kind, &ty2.kind)) { Some((&ty::Ref(_, _, from_mt), &ty::Ref(_, _, to_mt))) => { if to_mt == hir::Mutability::MutMutable && from_mt == hir::Mutability::MutImmutable { @@ -932,7 +938,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MutableTransmutes { (cx: &LateContext<'a, 'tcx>, expr: &hir::Expr) -> Option<(Ty<'tcx>, Ty<'tcx>)> { - let def = if let hir::ExprKind::Path(ref qpath) = expr.node { + let def = if let hir::ExprKind::Path(ref qpath) = expr.kind { cx.tables.qpath_res(qpath, expr.hir_id) } else { return None; @@ -979,35 +985,6 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures { } } -declare_lint! { - UNIONS_WITH_DROP_FIELDS, - Warn, - "use of unions that contain fields with possibly non-trivial drop code" -} - -declare_lint_pass!( - /// Lint for unions that contain fields with possibly non-trivial destructors. - UnionsWithDropFields => [UNIONS_WITH_DROP_FIELDS] -); - -impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnionsWithDropFields { - fn check_item(&mut self, ctx: &LateContext<'_, '_>, item: &hir::Item) { - if let hir::ItemKind::Union(ref vdata, _) = item.node { - for field in vdata.fields() { - let field_ty = ctx.tcx.type_of( - ctx.tcx.hir().local_def_id(field.hir_id)); - if field_ty.needs_drop(ctx.tcx, ctx.param_env) { - ctx.span_lint(UNIONS_WITH_DROP_FIELDS, - field.span, - "union contains a field with possibly non-trivial drop code, \ - drop code of union fields is ignored when dropping the union"); - return; - } - } - } - } -} - declare_lint! { pub UNREACHABLE_PUB, Allow, @@ -1090,7 +1067,7 @@ impl TypeAliasBounds { match *qpath { hir::QPath::TypeRelative(ref ty, _) => { // If this is a type variable, we found a `T::Assoc`. - match ty.node { + match ty.kind { hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) => { match path.res { Res::Def(DefKind::TyParam, _) => true, @@ -1137,7 +1114,7 @@ impl TypeAliasBounds { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeAliasBounds { fn check_item(&mut self, cx: &LateContext<'_, '_>, item: &hir::Item) { - let (ty, type_alias_generics) = match item.node { + let (ty, type_alias_generics) = match item.kind { hir::ItemKind::TyAlias(ref ty, ref generics) => (&*ty, generics), _ => return, }; @@ -1148,8 +1125,12 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeAliasBounds { .map(|pred| pred.span()).collect(); let mut err = cx.struct_span_lint(TYPE_ALIAS_BOUNDS, spans, "where clauses are not enforced in type aliases"); - err.help("the clause will not be checked when the type alias is used, \ - and should be removed"); + err.span_suggestion( + type_alias_generics.where_clause.span_for_predicates_or_empty_place(), + "the clause will not be checked when the type alias is used, and should be removed", + String::new(), + Applicability::MachineApplicable, + ); if !suggested_changing_assoc_types { TypeAliasBounds::suggest_changing_assoc_types(ty, &mut err); suggested_changing_assoc_types = true; @@ -1159,14 +1140,19 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeAliasBounds { // The parameters must not have bounds for param in type_alias_generics.params.iter() { let spans: Vec<_> = param.bounds.iter().map(|b| b.span()).collect(); + let suggestion = spans.iter().map(|sp| { + let start = param.span.between(*sp); // Include the `:` in `T: Bound`. + (start.to(*sp), String::new()) + }).collect(); if !spans.is_empty() { let mut err = cx.struct_span_lint( TYPE_ALIAS_BOUNDS, spans, "bounds on generic parameters are not enforced in type aliases", ); - err.help("the bound will not be checked when the type alias is used, \ - and should be removed"); + let msg = "the bound will not be checked when the type alias is used, \ + and should be removed"; + err.multipart_suggestion(&msg, suggestion, Applicability::MachineApplicable); if !suggested_changing_assoc_types { TypeAliasBounds::suggest_changing_assoc_types(ty, &mut err); suggested_changing_assoc_types = true; @@ -1204,7 +1190,7 @@ fn check_const(cx: &LateContext<'_, '_>, body_id: hir::BodyId) { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedBrokenConst { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - match it.node { + match it.kind { hir::ItemKind::Const(_, body_id) => { check_const(cx, body_id); }, @@ -1240,7 +1226,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TrivialConstraints { if cx.tcx.features().trivial_bounds { let def_id = cx.tcx.hir().local_def_id(item.hir_id); let predicates = cx.tcx.predicates_of(def_id); - for &(predicate, span) in &predicates.predicates { + for &(predicate, span) in predicates.predicates { let predicate_kind_name = match predicate { Trait(..) => "Trait", TypeOutlives(..) | @@ -1287,7 +1273,6 @@ declare_lint_pass!( NO_MANGLE_GENERIC_ITEMS, MUTABLE_TRANSMUTES, UNSTABLE_FEATURES, - UNIONS_WITH_DROP_FIELDS, UNREACHABLE_PUB, TYPE_ALIAS_BOUNDS, TRIVIAL_BOUNDS @@ -1321,7 +1306,7 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns { /// If `pat` is a `...` pattern, return the start and end of the range, as well as the span /// corresponding to the ellipsis. fn matches_ellipsis_pat(pat: &ast::Pat) -> Option<(&P, &P, Span)> { - match &pat.node { + match &pat.kind { PatKind::Range(a, b, Spanned { span, node: RangeEnd::Included(DotDotDot), .. }) => { Some((a, b, *span)) } @@ -1329,7 +1314,7 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns { } } - let (parenthesise, endpoints) = match &pat.node { + let (parenthesise, endpoints) = match &pat.kind { PatKind::Ref(subpat, _) => (true, matches_ellipsis_pat(&subpat)), _ => (false, matches_ellipsis_pat(pat)), }; @@ -1373,7 +1358,7 @@ declare_lint! { UNNAMEABLE_TEST_ITEMS, Warn, "detects an item that cannot be named being marked as `#[test_case]`", - report_in_external_macro: true + report_in_external_macro } pub struct UnnameableTestItems { @@ -1395,7 +1380,7 @@ impl UnnameableTestItems { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnnameableTestItems { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { if self.items_nameable { - if let hir::ItemKind::Mod(..) = it.node {} + if let hir::ItemKind::Mod(..) = it.kind {} else { self.items_nameable = false; self.boundary = it.hir_id; @@ -1422,7 +1407,11 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnnameableTestItems { declare_lint! { pub KEYWORD_IDENTS, Allow, - "detects edition keywords being used as an identifier" + "detects edition keywords being used as an identifier", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #49716 ", + edition: Some(Edition::Edition2018), + }; } declare_lint_pass!( @@ -1517,10 +1506,10 @@ declare_lint_pass!(ExplicitOutlivesRequirements => [EXPLICIT_OUTLIVES_REQUIREMEN impl ExplicitOutlivesRequirements { fn lifetimes_outliving_lifetime<'tcx>( - inferred_outlives: &'tcx [ty::Predicate<'tcx>], + inferred_outlives: &'tcx [(ty::Predicate<'tcx>, Span)], index: u32, ) -> Vec> { - inferred_outlives.iter().filter_map(|pred| { + inferred_outlives.iter().filter_map(|(pred, _)| { match pred { ty::Predicate::RegionOutlives(outlives) => { let outlives = outlives.skip_binder(); @@ -1537,10 +1526,10 @@ impl ExplicitOutlivesRequirements { } fn lifetimes_outliving_type<'tcx>( - inferred_outlives: &'tcx [ty::Predicate<'tcx>], + inferred_outlives: &'tcx [(ty::Predicate<'tcx>, Span)], index: u32, ) -> Vec> { - inferred_outlives.iter().filter_map(|pred| { + inferred_outlives.iter().filter_map(|(pred, _)| { match pred { ty::Predicate::TypeOutlives(outlives) => { let outlives = outlives.skip_binder(); @@ -1559,7 +1548,7 @@ impl ExplicitOutlivesRequirements { &self, param: &'tcx hir::GenericParam, tcx: TyCtxt<'tcx>, - inferred_outlives: &'tcx [ty::Predicate<'tcx>], + inferred_outlives: &'tcx [(ty::Predicate<'tcx>, Span)], ty_generics: &'tcx ty::Generics, ) -> Vec> { let index = ty_generics.param_def_id_to_index[ @@ -1684,7 +1673,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ExplicitOutlivesRequirements { let def_id = cx.tcx.hir().local_def_id(item.hir_id); if let hir::ItemKind::Struct(_, ref hir_generics) | hir::ItemKind::Enum(_, ref hir_generics) - | hir::ItemKind::Union(_, ref hir_generics) = item.node + | hir::ItemKind::Union(_, ref hir_generics) = item.kind { let inferred_outlives = cx.tcx.inferred_outlives_of(def_id); if inferred_outlives.is_empty() { @@ -1750,7 +1739,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ExplicitOutlivesRequirements { hir::WherePredicate::BoundPredicate(predicate) => { // FIXME we can also infer bounds on associated types, // and should check for them here. - match predicate.bounded_ty.node { + match predicate.bounded_ty.kind { hir::TyKind::Path(hir::QPath::Resolved( None, ref path, @@ -1812,7 +1801,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ExplicitOutlivesRequirements { // generics, except for tuple struct, which have the `where` // after the fields of the struct. let full_where_span = if let hir::ItemKind::Struct(hir::VariantData::Tuple(..), _) - = item.node + = item.kind { where_span } else { @@ -1900,7 +1889,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidValue { fn is_zero(expr: &hir::Expr) -> bool { use hir::ExprKind::*; use syntax::ast::LitKind::*; - match &expr.node { + match &expr.kind { Lit(lit) => if let Int(i, _) = lit.node { i == 0 @@ -1923,8 +1912,8 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidValue { const TRANSMUTE_PATH: &[Symbol] = &[sym::core, sym::intrinsics, kw::Invalid, sym::transmute]; - if let hir::ExprKind::Call(ref path_expr, ref args) = expr.node { - if let hir::ExprKind::Path(ref qpath) = path_expr.node { + if let hir::ExprKind::Call(ref path_expr, ref args) = expr.kind { + if let hir::ExprKind::Path(ref qpath) = path_expr.kind { let def_id = cx.tables.qpath_res(qpath, path_expr.hir_id).opt_def_id()?; if cx.match_def_path(def_id, ZEROED_PATH) { @@ -1954,7 +1943,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidValue { init: InitKind, ) -> Option { use rustc::ty::TyKind::*; - match ty.sty { + match ty.kind { // Primitive types that don't like 0 as a value. Ref(..) => Some((format!("References must be non-null"), None)), Adt(..) if ty.is_box() => Some((format!("`Box` must be non-null"), None)), diff --git a/src/librustc_lint/error_codes.rs b/src/librustc_lint/error_codes.rs index ea2e1d9ecc..2edc8fadf4 100644 --- a/src/librustc_lint/error_codes.rs +++ b/src/librustc_lint/error_codes.rs @@ -1,4 +1,4 @@ syntax::register_diagnostics! { ; - E0721, // `await` keyword +// E0721, // `await` keyword } diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 0e054013cd..b1beef04c5 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -33,27 +33,21 @@ use rustc::lint; use rustc::lint::{EarlyContext, LateContext, LateLintPass, EarlyLintPass, LintPass, LintArray}; use rustc::lint::builtin::{ BARE_TRAIT_OBJECTS, - ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, ELIDED_LIFETIMES_IN_PATHS, EXPLICIT_OUTLIVES_REQUIREMENTS, INTRA_DOC_LINK_RESOLUTION_FAILURE, MISSING_DOC_CODE_EXAMPLES, PRIVATE_DOC_TESTS, - parser::ILL_FORMED_ATTRIBUTE_INPUT, }; -use rustc::session; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::ty::query::Providers; use rustc::ty::TyCtxt; use syntax::ast; -use syntax::edition::Edition; use syntax_pos::Span; -use session::Session; use lint::LintId; -use lint::FutureIncompatibleInfo; use redundant_semicolon::*; use nonstandard_style::*; @@ -164,9 +158,6 @@ macro_rules! late_lint_mod_passes { // Depends on referenced function signatures in expressions MutableTransmutes: MutableTransmutes, - // Depends on types of fields, checks if they implement Drop - UnionsWithDropFields: UnionsWithDropFields, - TypeAliasBounds: TypeAliasBounds, TrivialConstraints: TrivialConstraints, @@ -195,59 +186,60 @@ late_lint_passes!(declare_combined_late_pass, [pub BuiltinCombinedLateLintPass]) late_lint_mod_passes!(declare_combined_late_pass, [BuiltinCombinedModuleLateLintPass]); +pub fn new_lint_store(no_interleave_lints: bool, internal_lints: bool) -> lint::LintStore { + let mut lint_store = lint::LintStore::new(); + + register_builtins(&mut lint_store, no_interleave_lints); + if internal_lints { + register_internals(&mut lint_store); + } + + lint_store +} + /// Tell the `LintStore` about all the built-in lints (the ones /// defined in this crate and the ones defined in /// `rustc::lint::builtin`). -pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { +fn register_builtins(store: &mut lint::LintStore, no_interleave_lints: bool) { macro_rules! add_lint_group { - ($sess:ident, $name:expr, $($lint:ident),*) => ( - store.register_group($sess, false, $name, None, vec![$(LintId::of($lint)),*]); + ($name:expr, $($lint:ident),*) => ( + store.register_group(false, $name, None, vec![$(LintId::of($lint)),*]); ) } macro_rules! register_pass { - ($method:ident, $constructor:expr, [$($args:expr),*]) => ( - store.$method(sess, false, false, $($args,)* box $constructor); + ($method:ident, $ty:ident, $constructor:expr) => ( + store.register_lints(&$ty::get_lints()); + store.$method(|| box $constructor); ) } macro_rules! register_passes { - ([$method:ident, $args:tt], [$($passes:ident: $constructor:expr,)*]) => ( + ($method:ident, [$($passes:ident: $constructor:expr,)*]) => ( $( - register_pass!($method, $constructor, $args); + register_pass!($method, $passes, $constructor); )* ) } - if sess.map(|sess| sess.opts.debugging_opts.no_interleave_lints).unwrap_or(false) { - pre_expansion_lint_passes!(register_passes, [register_pre_expansion_pass, []]); - early_lint_passes!(register_passes, [register_early_pass, []]); - late_lint_passes!(register_passes, [register_late_pass, [false]]); - late_lint_mod_passes!(register_passes, [register_late_pass, [true]]); + if no_interleave_lints { + pre_expansion_lint_passes!(register_passes, register_pre_expansion_pass); + early_lint_passes!(register_passes, register_early_pass); + late_lint_passes!(register_passes, register_late_pass); + late_lint_mod_passes!(register_passes, register_late_mod_pass); } else { - store.register_pre_expansion_pass( - sess, - false, - true, - box BuiltinCombinedPreExpansionLintPass::new() - ); - store.register_early_pass(sess, false, true, box BuiltinCombinedEarlyLintPass::new()); - store.register_late_pass( - sess, false, true, true, box BuiltinCombinedModuleLateLintPass::new() - ); - store.register_late_pass( - sess, false, true, false, box BuiltinCombinedLateLintPass::new() - ); + store.register_lints(&BuiltinCombinedPreExpansionLintPass::get_lints()); + store.register_lints(&BuiltinCombinedEarlyLintPass::get_lints()); + store.register_lints(&BuiltinCombinedModuleLateLintPass::get_lints()); + store.register_lints(&BuiltinCombinedLateLintPass::get_lints()); } - add_lint_group!(sess, - "nonstandard_style", + add_lint_group!("nonstandard_style", NON_CAMEL_CASE_TYPES, NON_SNAKE_CASE, NON_UPPER_CASE_GLOBALS); - add_lint_group!(sess, - "unused", + add_lint_group!("unused", UNUSED_IMPORTS, UNUSED_VARIABLES, UNUSED_ASSIGNMENTS, @@ -255,6 +247,7 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { UNUSED_MUT, UNREACHABLE_CODE, UNREACHABLE_PATTERNS, + OVERLAPPING_PATTERNS, UNUSED_MUST_USE, UNUSED_UNSAFE, PATH_STATEMENTS, @@ -267,8 +260,7 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { UNUSED_LABELS, UNUSED_PARENS); - add_lint_group!(sess, - "rust_2018_idioms", + add_lint_group!("rust_2018_idioms", BARE_TRAIT_OBJECTS, UNUSED_EXTERN_CRATES, ELLIPSIS_INCLUSIVE_RANGE_PATTERNS, @@ -284,165 +276,11 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { // MACRO_USE_EXTERN_CRATE, ); - add_lint_group!(sess, - "rustdoc", + add_lint_group!("rustdoc", INTRA_DOC_LINK_RESOLUTION_FAILURE, MISSING_DOC_CODE_EXAMPLES, PRIVATE_DOC_TESTS); - // Guidelines for creating a future incompatibility lint: - // - // - Create a lint defaulting to warn as normal, with ideally the same error - // message you would normally give - // - Add a suitable reference, typically an RFC or tracking issue. Go ahead - // and include the full URL, sort items in ascending order of issue numbers. - // - Later, change lint to error - // - Eventually, remove lint - store.register_future_incompatible(sess, vec![ - FutureIncompatibleInfo { - id: LintId::of(PRIVATE_IN_PUBLIC), - reference: "issue #34537 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(PUB_USE_OF_PRIVATE_EXTERN_CRATE), - reference: "issue #34537 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(PATTERNS_IN_FNS_WITHOUT_BODY), - reference: "issue #35203 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(DUPLICATE_MACRO_EXPORTS), - reference: "issue #35896 ", - edition: Some(Edition::Edition2018), - }, - FutureIncompatibleInfo { - id: LintId::of(KEYWORD_IDENTS), - reference: "issue #49716 ", - edition: Some(Edition::Edition2018), - }, - FutureIncompatibleInfo { - id: LintId::of(SAFE_EXTERN_STATICS), - reference: "issue #36247 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(INVALID_TYPE_PARAM_DEFAULT), - reference: "issue #36887 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(LEGACY_DIRECTORY_OWNERSHIP), - reference: "issue #37872 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(LEGACY_CONSTRUCTOR_VISIBILITY), - reference: "issue #39207 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(MISSING_FRAGMENT_SPECIFIER), - reference: "issue #40107 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(ILLEGAL_FLOATING_POINT_LITERAL_PATTERN), - reference: "issue #41620 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(ANONYMOUS_PARAMETERS), - reference: "issue #41686 ", - edition: Some(Edition::Edition2018), - }, - FutureIncompatibleInfo { - id: LintId::of(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES), - reference: "issue #42238 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(LATE_BOUND_LIFETIME_ARGUMENTS), - reference: "issue #42868 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(SAFE_PACKED_BORROWS), - reference: "issue #46043 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(ORDER_DEPENDENT_TRAIT_OBJECTS), - reference: "issue #56484 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(TYVAR_BEHIND_RAW_POINTER), - reference: "issue #46906 ", - edition: Some(Edition::Edition2018), - }, - FutureIncompatibleInfo { - id: LintId::of(UNSTABLE_NAME_COLLISIONS), - reference: "issue #48919 ", - edition: None, - // Note: this item represents future incompatibility of all unstable functions in the - // standard library, and thus should never be removed or changed to an error. - }, - FutureIncompatibleInfo { - id: LintId::of(ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE), - reference: "issue #53130 ", - edition: Some(Edition::Edition2018), - }, - FutureIncompatibleInfo { - id: LintId::of(WHERE_CLAUSES_OBJECT_SAFETY), - reference: "issue #51443 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(PROC_MACRO_DERIVE_RESOLUTION_FALLBACK), - reference: "issue #50504 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS), - reference: "issue #52234 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(ILL_FORMED_ATTRIBUTE_INPUT), - reference: "issue #57571 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(AMBIGUOUS_ASSOCIATED_ITEMS), - reference: "issue #57644 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(NESTED_IMPL_TRAIT), - reference: "issue #59014 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(MUTABLE_BORROW_RESERVATION_CONFLICT), - reference: "issue #59159 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(INDIRECT_STRUCTURAL_MATCH), - reference: "issue #62411 ", - edition: None, - }, - FutureIncompatibleInfo { - id: LintId::of(SOFT_UNSTABLE), - reference: "issue #64266 ", - edition: None, - }, - ]); - // Register renamed and removed lints. store.register_renamed("single_use_lifetime", "single_use_lifetimes"); store.register_renamed("elided_lifetime_in_path", "elided_lifetimes_in_paths"); @@ -498,12 +336,14 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { "converted into hard error, see https://github.com/rust-lang/rust/issues/46205"); } -pub fn register_internals(store: &mut lint::LintStore, sess: Option<&Session>) { - store.register_early_pass(sess, false, false, box DefaultHashTypes::new()); - store.register_early_pass(sess, false, false, box LintPassImpl); - store.register_late_pass(sess, false, false, false, box TyTyKind); +fn register_internals(store: &mut lint::LintStore) { + store.register_lints(&DefaultHashTypes::get_lints()); + store.register_early_pass(|| box DefaultHashTypes::new()); + store.register_lints(&LintPassImpl::get_lints()); + store.register_early_pass(|| box LintPassImpl); + store.register_lints(&TyTyKind::get_lints()); + store.register_late_pass(|| box TyTyKind); store.register_group( - sess, false, "rustc::internal", None, diff --git a/src/librustc_lint/nonstandard_style.rs b/src/librustc_lint/nonstandard_style.rs index bb6119d0ff..dceb79fd30 100644 --- a/src/librustc_lint/nonstandard_style.rs +++ b/src/librustc_lint/nonstandard_style.rs @@ -136,7 +136,7 @@ impl EarlyLintPass for NonCamelCaseTypes { return; } - match it.node { + match it.kind { ast::ItemKind::TyAlias(..) | ast::ItemKind::Enum(..) | ast::ItemKind::Struct(..) | @@ -258,7 +258,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase { .and_then(|attr| attr.meta()) .and_then(|meta| { meta.name_value_literal().and_then(|lit| { - if let ast::LitKind::Str(name, ..) = lit.node { + if let ast::LitKind::Str(name, ..) = lit.kind { // Discard the double quotes surrounding the literal. let sp = cx.sess().source_map().span_to_snippet(lit.span) .ok() @@ -326,13 +326,13 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase { } fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - if let hir::ItemKind::Mod(_) = it.node { + if let hir::ItemKind::Mod(_) = it.kind { self.check_snake_case(cx, "module", &it.ident); } } fn check_trait_item(&mut self, cx: &LateContext<'_, '_>, item: &hir::TraitItem) { - if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(pnames)) = &item.node { + if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(pnames)) = &item.kind { self.check_snake_case(cx, "trait method", &item.ident); for param_name in pnames { self.check_snake_case(cx, "variable", param_name); @@ -341,7 +341,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase { } fn check_pat(&mut self, cx: &LateContext<'_, '_>, p: &hir::Pat) { - if let &PatKind::Binding(_, _, ident, _) = &p.node { + if let &PatKind::Binding(_, _, ident, _) = &p.kind { self.check_snake_case(cx, "variable", &ident); } } @@ -387,7 +387,7 @@ impl NonUpperCaseGlobals { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonUpperCaseGlobals { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - match it.node { + match it.kind { hir::ItemKind::Static(..) if !attr::contains_name(&it.attrs, sym::no_mangle) => { NonUpperCaseGlobals::check_upper_case(cx, "static variable", &it.ident); } @@ -399,20 +399,20 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonUpperCaseGlobals { } fn check_trait_item(&mut self, cx: &LateContext<'_, '_>, ti: &hir::TraitItem) { - if let hir::TraitItemKind::Const(..) = ti.node { + if let hir::TraitItemKind::Const(..) = ti.kind { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", &ti.ident); } } fn check_impl_item(&mut self, cx: &LateContext<'_, '_>, ii: &hir::ImplItem) { - if let hir::ImplItemKind::Const(..) = ii.node { + if let hir::ImplItemKind::Const(..) = ii.kind { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", &ii.ident); } } fn check_pat(&mut self, cx: &LateContext<'_, '_>, p: &hir::Pat) { // Lint for constants that look like binding identifiers (#7526) - if let PatKind::Path(hir::QPath::Resolved(None, ref path)) = p.node { + if let PatKind::Path(hir::QPath::Resolved(None, ref path)) = p.kind { if let Res::Def(DefKind::Const, _) = path.res { if path.segments.len() == 1 { NonUpperCaseGlobals::check_upper_case( diff --git a/src/librustc_lint/redundant_semicolon.rs b/src/librustc_lint/redundant_semicolon.rs index 7c9df3578b..0adf1eeb41 100644 --- a/src/librustc_lint/redundant_semicolon.rs +++ b/src/librustc_lint/redundant_semicolon.rs @@ -12,8 +12,8 @@ declare_lint_pass!(RedundantSemicolon => [REDUNDANT_SEMICOLON]); impl EarlyLintPass for RedundantSemicolon { fn check_stmt(&mut self, cx: &EarlyContext<'_>, stmt: &Stmt) { - if let StmtKind::Semi(expr) = &stmt.node { - if let ExprKind::Tup(ref v) = &expr.node { + if let StmtKind::Semi(expr) = &stmt.kind { + if let ExprKind::Tup(ref v) = &expr.kind { if v.is_empty() { // Strings of excess semicolons are encoded as empty tuple expressions // during the parsing stage, so we check for empty tuple expressions diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index 40261f6d13..aa6dfa50dd 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -7,7 +7,7 @@ use rustc::ty::subst::SubstsRef; use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt}; use rustc::ty::layout::{self, IntegerExt, LayoutOf, VariantIdx, SizeSkeleton}; use rustc::{lint, util}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use util::nodemap::FxHashSet; use lint::{LateContext, LintContext, LintArray}; use lint::{LintPass, LateLintPass}; @@ -72,7 +72,7 @@ fn lint_overflowing_range_endpoint<'a, 'tcx>( ) -> bool { // We only want to handle exclusive (`..`) ranges, // which are represented as `ExprKind::Struct`. - if let ExprKind::Struct(_, eps, _) = &parent_expr.node { + if let ExprKind::Struct(_, eps, _) = &parent_expr.kind { if eps.len() != 2 { return false; } @@ -227,7 +227,7 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option { } } } - match t.sty { + match t.kind { ty::Int(i) => find_fit!(i, val, negative, I8 => [U8] => [I16, I32, I64, I128], I16 => [U16] => [I32, I64, I128], @@ -279,7 +279,7 @@ fn lint_int_literal<'a, 'tcx>( let par_id = cx.tcx.hir().get_parent_node(e.hir_id); if let Node::Expr(par_e) = cx.tcx.hir().get(par_id) { - if let hir::ExprKind::Struct(..) = par_e.node { + if let hir::ExprKind::Struct(..) = par_e.kind { if is_range_literal(cx.sess(), par_e) && lint_overflowing_range_endpoint(cx, lit, v, max, e, par_e, t) { @@ -318,9 +318,9 @@ fn lint_uint_literal<'a, 'tcx>( if lit_val < min || lit_val > max { let parent_id = cx.tcx.hir().get_parent_node(e.hir_id); if let Node::Expr(par_e) = cx.tcx.hir().get(parent_id) { - match par_e.node { + match par_e.kind { hir::ExprKind::Cast(..) => { - if let ty::Char = cx.tables.expr_ty(par_e).sty { + if let ty::Char = cx.tables.expr_ty(par_e).kind { let mut err = cx.struct_span_lint( OVERFLOWING_LITERALS, par_e.span, @@ -364,7 +364,7 @@ fn lint_literal<'a, 'tcx>( e: &'tcx hir::Expr, lit: &hir::Lit, ) { - match cx.tables.node_type(e.hir_id).sty { + match cx.tables.node_type(e.hir_id).kind { ty::Int(t) => { match lit.node { ast::LitKind::Int(v, ast::LitIntType::Signed(_)) | @@ -400,7 +400,7 @@ fn lint_literal<'a, 'tcx>( impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits { fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, e: &'tcx hir::Expr) { - match e.node { + match e.kind { hir::ExprKind::Unary(hir::UnNeg, ref expr) => { // propagate negation, if the negation itself isn't negated if self.negated_expr_id != e.hir_id { @@ -445,7 +445,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits { l: &hir::Expr, r: &hir::Expr) -> bool { - let (lit, expr, swap) = match (&l.node, &r.node) { + let (lit, expr, swap) = match (&l.kind, &r.kind) { (&hir::ExprKind::Lit(_), _) => (l, r, true), (_, &hir::ExprKind::Lit(_)) => (r, l, false), _ => return true, @@ -453,10 +453,10 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits { // Normalize the binop so that the literal is always on the RHS in // the comparison let norm_binop = if swap { rev_binop(binop) } else { binop }; - match cx.tables.node_type(expr.hir_id).sty { + match cx.tables.node_type(expr.hir_id).kind { ty::Int(int_ty) => { let (min, max) = int_ty_range(int_ty); - let lit_val: i128 = match lit.node { + let lit_val: i128 = match lit.kind { hir::ExprKind::Lit(ref li) => { match li.node { ast::LitKind::Int(v, ast::LitIntType::Signed(_)) | @@ -470,7 +470,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits { } ty::Uint(uint_ty) => { let (min, max) :(u128, u128) = uint_ty_range(uint_ty); - let lit_val: u128 = match lit.node { + let lit_val: u128 = match lit.kind { hir::ExprKind::Lit(ref li) => { match li.node { ast::LitKind::Int(v, _) => v, @@ -526,7 +526,7 @@ fn is_zst<'tcx>(tcx: TyCtxt<'tcx>, did: DefId, ty: Ty<'tcx>) -> bool { } fn ty_is_known_nonnull<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { - match ty.sty { + match ty.kind { ty::FnPtr(_) => true, ty::Ref(..) => true, ty::Adt(field_def, substs) if field_def.repr.transparent() && !field_def.is_union() => { @@ -615,7 +615,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { return FfiSafe; } - match ty.sty { + match ty.kind { ty::Adt(def, substs) => { if def.is_phantom_data() { return FfiPhantom(ty); @@ -631,6 +631,16 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { }; } + let is_non_exhaustive = + def.non_enum_variant().is_field_list_non_exhaustive(); + if is_non_exhaustive && !def.did.is_local() { + return FfiUnsafe { + ty, + reason: "this struct is non-exhaustive", + help: None, + }; + } + if def.non_enum_variant().fields.is_empty() { return FfiUnsafe { ty, @@ -730,8 +740,25 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { } } + if def.is_variant_list_non_exhaustive() && !def.did.is_local() { + return FfiUnsafe { + ty, + reason: "this enum is non-exhaustive", + help: None, + }; + } + // Check the contained variants. for variant in &def.variants { + let is_non_exhaustive = variant.is_field_list_non_exhaustive(); + if is_non_exhaustive && !variant.def_id.is_local() { + return FfiUnsafe { + ty, + reason: "this enum has non-exhaustive variants", + help: None, + }; + } + for field in &variant.fields { let field_ty = cx.normalize_erasing_regions( ParamEnv::reveal_all(), @@ -876,7 +903,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { diag.help(help); } diag.note(note); - if let ty::Adt(def, _) = ty.sty { + if let ty::Adt(def, _) = ty.kind { if let Some(sp) = self.cx.tcx.hir().span_if_local(def.did) { diag.span_note(sp, "type defined here"); } @@ -893,7 +920,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { impl<'tcx> ty::fold::TypeVisitor<'tcx> for ProhibitOpaqueTypes<'tcx> { fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool { - if let ty::Opaque(..) = ty.sty { + if let ty::Opaque(..) = ty.kind { self.ty = Some(ty); true } else { @@ -944,15 +971,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { let def_id = self.cx.tcx.hir().local_def_id(id); let sig = self.cx.tcx.fn_sig(def_id); let sig = self.cx.tcx.erase_late_bound_regions(&sig); - let inputs = if sig.c_variadic { - // Don't include the spoofed `VaListImpl` in the functions list - // of inputs. - &sig.inputs()[..sig.inputs().len() - 1] - } else { - &sig.inputs()[..] - }; - for (input_ty, input_hir) in inputs.iter().zip(&decl.inputs) { + for (input_ty, input_hir) in sig.inputs().iter().zip(&decl.inputs) { self.check_type_for_ffi_and_report_errors(input_hir.span, input_ty); } @@ -978,7 +998,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImproperCTypes { if let Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic = abi { // Don't worry about types in internal ABIs. } else { - match it.node { + match it.kind { hir::ForeignItemKind::Fn(ref decl, _, _) => { vis.check_foreign_fn(it.hir_id, decl); } @@ -995,7 +1015,7 @@ declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for VariantSizeDifferences { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item) { - if let hir::ItemKind::Enum(ref enum_definition, _) = it.node { + if let hir::ItemKind::Enum(ref enum_definition, _) = it.kind { let item_def_id = cx.tcx.hir().local_def_id(it.hir_id); let t = cx.tcx.type_of(item_def_id); let ty = cx.tcx.erase_regions(&t); diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index 2d4af2f606..5b29cff9da 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -1,6 +1,7 @@ use rustc::hir::def::{Res, DefKind}; use rustc::hir::def_id::DefId; use rustc::lint; +use rustc::lint::builtin::UNUSED_ATTRIBUTES; use rustc::ty::{self, Ty}; use rustc::ty::adjustment; use rustc_data_structures::fx::FxHashMap; @@ -25,7 +26,7 @@ declare_lint! { pub UNUSED_MUST_USE, Warn, "unused result of a type flagged as `#[must_use]`", - report_in_external_macro: true + report_in_external_macro } declare_lint! { @@ -38,12 +39,12 @@ declare_lint_pass!(UnusedResults => [UNUSED_MUST_USE, UNUSED_RESULTS]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { fn check_stmt(&mut self, cx: &LateContext<'_, '_>, s: &hir::Stmt) { - let expr = match s.node { + let expr = match s.kind { hir::StmtKind::Semi(ref expr) => &**expr, _ => return, }; - if let hir::ExprKind::Ret(..) = expr.node { + if let hir::ExprKind::Ret(..) = expr.kind { return; } @@ -52,9 +53,9 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { let mut fn_warned = false; let mut op_warned = false; - let maybe_def_id = match expr.node { + let maybe_def_id = match expr.kind { hir::ExprKind::Call(ref callee, _) => { - match callee.node { + match callee.kind { hir::ExprKind::Path(ref qpath) => { match cx.tables.qpath_res(qpath, callee.hir_id) { Res::Def(DefKind::Fn, def_id) @@ -80,7 +81,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { return; } - let must_use_op = match expr.node { + let must_use_op = match expr.kind { // Hardcoding operators here seemed more expedient than the // refactoring that would be needed to look up the `#[must_use]` // attribute which does exist on the comparison trait methods @@ -145,7 +146,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { let plural_suffix = pluralise!(plural_len); - match ty.sty { + match ty.kind { ty::Adt(..) if ty.is_box() => { let boxed_ty = ty.boxed_ty(); let descr_pre = &format!("{}boxed ", descr_pre); @@ -156,7 +157,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { } ty::Opaque(def, _) => { let mut has_emitted = false; - for (predicate, _) in &cx.tcx.predicates_of(def).predicates { + for (predicate, _) in cx.tcx.predicates_of(def).predicates { if let ty::Predicate::Trait(ref poly_trait_predicate) = predicate { let trait_ref = poly_trait_predicate.skip_binder().trait_ref; let def_id = trait_ref.def_id; @@ -193,7 +194,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { } ty::Tuple(ref tys) => { let mut has_emitted = false; - let spans = if let hir::ExprKind::Tup(comps) = &expr.node { + let spans = if let hir::ExprKind::Tup(comps) = &expr.kind { debug_assert_eq!(comps.len(), tys.len()); comps.iter().map(|e| e.span).collect() } else { @@ -269,20 +270,14 @@ declare_lint_pass!(PathStatements => [PATH_STATEMENTS]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for PathStatements { fn check_stmt(&mut self, cx: &LateContext<'_, '_>, s: &hir::Stmt) { - if let hir::StmtKind::Semi(ref expr) = s.node { - if let hir::ExprKind::Path(_) = expr.node { + if let hir::StmtKind::Semi(ref expr) = s.kind { + if let hir::ExprKind::Path(_) = expr.kind { cx.span_lint(PATH_STATEMENTS, s.span, "path statement with no effect"); } } } } -declare_lint! { - pub UNUSED_ATTRIBUTES, - Warn, - "detects attributes that were not used by the compiler" -} - #[derive(Copy, Clone)] pub struct UnusedAttributes { builtin_attributes: &'static FxHashMap, @@ -363,7 +358,7 @@ declare_lint_pass!(UnusedParens => [UNUSED_PARENS]); impl UnusedParens { fn is_expr_parens_necessary(inner: &ast::Expr, followed_by_block: bool) -> bool { - followed_by_block && match inner.node { + followed_by_block && match inner.kind { ast::ExprKind::Ret(_) | ast::ExprKind::Break(..) => true, _ => parser::contains_exterior_struct_lit(&inner), } @@ -376,10 +371,12 @@ impl UnusedParens { followed_by_block: bool, left_pos: Option, right_pos: Option) { - match value.node { + match value.kind { ast::ExprKind::Paren(ref inner) => { if !Self::is_expr_parens_necessary(inner, followed_by_block) && - value.attrs.is_empty() { + value.attrs.is_empty() && + !value.span.from_expansion() + { let expr_text = if let Ok(snippet) = cx.sess().source_map() .span_to_snippet(value.span) { snippet @@ -416,8 +413,8 @@ impl UnusedParens { ) { use ast::{PatKind, BindingMode::ByValue, Mutability::Mutable}; - if let PatKind::Paren(inner) = &value.node { - match inner.node { + if let PatKind::Paren(inner) = &value.kind { + match inner.kind { // The lint visitor will visit each subpattern of `p`. We do not want to lint // any range pattern no matter where it occurs in the pattern. For something like // `&(a..=b)`, there is a recursive `check_pat` on `a` and `b`, but we will assume @@ -501,7 +498,7 @@ impl UnusedParens { impl EarlyLintPass for UnusedParens { fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { use syntax::ast::ExprKind::*; - let (value, msg, followed_by_block, left_pos, right_pos) = match e.node { + let (value, msg, followed_by_block, left_pos, right_pos) = match e.kind { Let(ref pat, ..) => { self.check_unused_parens_pat(cx, pat, false, false); return; @@ -566,7 +563,7 @@ impl EarlyLintPass for UnusedParens { fn check_pat(&mut self, cx: &EarlyContext<'_>, p: &ast::Pat) { use ast::{PatKind::*, Mutability}; - match &p.node { + match &p.kind { // Do not lint on `(..)` as that will result in the other arms being useless. Paren(_) // The other cases do not contain sub-patterns. @@ -587,7 +584,7 @@ impl EarlyLintPass for UnusedParens { } fn check_stmt(&mut self, cx: &EarlyContext<'_>, s: &ast::Stmt) { - if let ast::StmtKind::Local(ref local) = s.node { + if let ast::StmtKind::Local(ref local) = s.kind { self.check_unused_parens_pat(cx, &local.pat, false, false); if let Some(ref value) = local.init { @@ -603,6 +600,25 @@ impl EarlyLintPass for UnusedParens { fn check_arm(&mut self, cx: &EarlyContext<'_>, arm: &ast::Arm) { self.check_unused_parens_pat(cx, &arm.pat, false, false); } + + fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) { + if let &ast::TyKind::Paren(ref r) = &ty.kind { + match &r.kind { + &ast::TyKind::TraitObject(..) => {} + &ast::TyKind::ImplTrait(_, ref bounds) if bounds.len() > 1 => {} + _ => { + let pattern_text = if let Ok(snippet) = cx.sess().source_map() + .span_to_snippet(ty.span) { + snippet + } else { + pprust::ty_to_string(ty) + }; + + Self::remove_outer_parens(cx, ty.span, &pattern_text, "type", (false, false)); + } + } + } + } } declare_lint! { @@ -647,7 +663,7 @@ impl UnusedImportBraces { impl EarlyLintPass for UnusedImportBraces { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) { - if let ast::ItemKind::Use(ref use_tree) = item.node { + if let ast::ItemKind::Use(ref use_tree) = item.kind { self.check_use_tree(cx, use_tree, item); } } @@ -663,7 +679,7 @@ declare_lint_pass!(UnusedAllocation => [UNUSED_ALLOCATION]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAllocation { fn check_expr(&mut self, cx: &LateContext<'_, '_>, e: &hir::Expr) { - match e.node { + match e.kind { hir::ExprKind::Box(_) => {} _ => return, } diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs index 62a3757757..c5d5f066f4 100644 --- a/src/librustc_llvm/build.rs +++ b/src/librustc_llvm/build.rs @@ -279,7 +279,11 @@ fn main() { let path = PathBuf::from(s); println!("cargo:rustc-link-search=native={}", path.parent().unwrap().display()); - println!("cargo:rustc-link-lib=static={}", stdcppname); + if target.contains("windows") { + println!("cargo:rustc-link-lib=static-nobundle={}", stdcppname); + } else { + println!("cargo:rustc-link-lib=static={}", stdcppname); + } } else if cxxflags.contains("stdlib=libc++") { println!("cargo:rustc-link-lib=c++"); } else { diff --git a/src/librustc_macros/Cargo.toml b/src/librustc_macros/Cargo.toml index f989ebc6df..c28fcb1a39 100644 --- a/src/librustc_macros/Cargo.toml +++ b/src/librustc_macros/Cargo.toml @@ -8,8 +8,8 @@ edition = "2018" proc-macro = true [dependencies] -synstructure = "0.10.2" -syn = { version = "0.15.22", features = ["full"] } -proc-macro2 = "0.4.24" -quote = "0.6.10" +synstructure = "0.12.1" +syn = { version = "1", features = ["full"] } +proc-macro2 = "1" +quote = "1" itertools = "0.8" diff --git a/src/librustc_macros/src/hash_stable.rs b/src/librustc_macros/src/hash_stable.rs index 6d7590c7d1..735cfb11b3 100644 --- a/src/librustc_macros/src/hash_stable.rs +++ b/src/librustc_macros/src/hash_stable.rs @@ -15,22 +15,22 @@ fn parse_attributes(field: &syn::Field) -> Attributes { }; for attr in &field.attrs { if let Ok(meta) = attr.parse_meta() { - if &meta.name().to_string() != "stable_hasher" { + if !meta.path().is_ident("stable_hasher") { continue; } let mut any_attr = false; if let Meta::List(list) = meta { for nested in list.nested.iter() { if let NestedMeta::Meta(meta) = nested { - if &meta.name().to_string() == "ignore" { + if meta.path().is_ident("ignore") { attrs.ignore = true; any_attr = true; } - if &meta.name().to_string() == "project" { + if meta.path().is_ident("project") { if let Meta::List(list) = meta { if let Some(nested) = list.nested.iter().next() { if let NestedMeta::Meta(meta) = nested { - attrs.project = Some(meta.name()); + attrs.project = meta.path().get_ident().cloned(); any_attr = true; } } @@ -76,10 +76,10 @@ pub fn hash_stable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::To s.bound_impl(quote!(::rustc_data_structures::stable_hasher::HashStable <::rustc::ich::StableHashingContext<'__ctx>>), quote!{ - fn hash_stable<__W: ::rustc_data_structures::stable_hasher::StableHasherResult>( + fn hash_stable( &self, __hcx: &mut ::rustc::ich::StableHashingContext<'__ctx>, - __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<__W>) { + __hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) { #discriminant match *self { #body } } diff --git a/src/librustc_macros/src/lib.rs b/src/librustc_macros/src/lib.rs index 3d3a020ef0..0540c95d3d 100644 --- a/src/librustc_macros/src/lib.rs +++ b/src/librustc_macros/src/lib.rs @@ -1,4 +1,3 @@ -#![feature(proc_macro_hygiene)] #![allow(rustc::default_hash_types)] #![recursion_limit="128"] diff --git a/src/librustc_macros/src/query.rs b/src/librustc_macros/src/query.rs index a8df7e197a..139e1b554c 100644 --- a/src/librustc_macros/src/query.rs +++ b/src/librustc_macros/src/query.rs @@ -442,8 +442,8 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { .map(|c| c.is_green()) .unwrap_or(false)); - let key = RecoverKey::recover(tcx.global_tcx(), self).unwrap(); - if queries::#name::cache_on_disk(tcx.global_tcx(), key, None) { + let key = RecoverKey::recover(tcx, self).unwrap(); + if queries::#name::cache_on_disk(tcx, key, None) { let _ = tcx.#name(key); } } @@ -495,7 +495,11 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { dep_node_force_stream.extend(quote! { DepKind::#name => { if let Some(key) = RecoverKey::recover($tcx, $dep_node) { - force_ex!($tcx, #name, key); + $tcx.force_query::>( + key, + DUMMY_SP, + *$dep_node + ); } else { return false; } diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index 5ff60a9267..18192e35f8 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -18,7 +18,9 @@ rustc = { path = "../librustc" } rustc_data_structures = { path = "../librustc_data_structures" } errors = { path = "../librustc_errors", package = "rustc_errors" } rustc_target = { path = "../librustc_target" } +rustc_index = { path = "../librustc_index" } rustc_serialize = { path = "../libserialize", package = "serialize" } stable_deref_trait = "1.0.0" syntax = { path = "../libsyntax" } +syntax_expand = { path = "../libsyntax_expand" } syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index af41b6a4c8..07c49d9179 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -1,47 +1,49 @@ //! Validates all used crates and extern libraries and loads their metadata -use crate::cstore::{self, CStore, CrateSource, MetadataBlob}; +use crate::cstore::{self, CStore, MetadataBlob}; use crate::locator::{self, CratePaths}; -use crate::schema::{CrateRoot}; -use rustc_data_structures::sync::{Lrc, RwLock, Lock}; +use crate::schema::{CrateRoot, CrateDep}; +use rustc_data_structures::sync::{Lock, Once, AtomicCell}; use rustc::hir::def_id::CrateNum; use rustc_data_structures::svh::Svh; +use rustc::dep_graph::DepNodeIndex; use rustc::middle::cstore::DepKind; use rustc::mir::interpret::AllocDecodingState; use rustc::session::{Session, CrateDisambiguator}; use rustc::session::config::{Sanitizer, self}; use rustc_target::spec::{PanicStrategy, TargetTriple}; use rustc::session::search_paths::PathKind; -use rustc::middle::cstore::{ExternCrate, ExternCrateSource}; +use rustc::middle::cstore::{CrateSource, ExternCrate, ExternCrateSource, MetadataLoaderDyn}; use rustc::util::common::record_time; use rustc::util::nodemap::FxHashSet; use rustc::hir::map::Definitions; +use rustc::hir::def_id::LOCAL_CRATE; -use std::ops::Deref; -use std::path::PathBuf; +use std::path::Path; use std::{cmp, fs}; use syntax::ast; use syntax::attr; -use syntax::ext::allocator::{global_allocator_spans, AllocatorKind}; +use syntax::expand::allocator::{global_allocator_spans, AllocatorKind}; use syntax::symbol::{Symbol, sym}; -use syntax::{span_err, span_fatal}; +use syntax::span_fatal; use syntax_pos::{Span, DUMMY_SP}; use log::{debug, info, log_enabled}; use proc_macro::bridge::client::ProcMacro; -pub struct Library { - pub dylib: Option<(PathBuf, PathKind)>, - pub rlib: Option<(PathBuf, PathKind)>, - pub rmeta: Option<(PathBuf, PathKind)>, +crate struct Library { + pub source: CrateSource, pub metadata: MetadataBlob, } pub struct CrateLoader<'a> { - pub sess: &'a Session, - cstore: &'a CStore, + // Immutable configuration. + sess: &'a Session, + metadata_loader: &'a MetadataLoaderDyn, local_crate_name: Symbol, + // Mutable output. + cstore: CStore, } fn dump_crates(cstore: &CStore) { @@ -58,29 +60,6 @@ fn dump_crates(cstore: &CStore) { }); } -// Extra info about a crate loaded for plugins or exported macros. -struct ExtensionCrate { - metadata: PMDSource, - dylib: Option, - target_only: bool, -} - -enum PMDSource { - Registered(Lrc), - Owned(Library), -} - -impl Deref for PMDSource { - type Target = MetadataBlob; - - fn deref(&self) -> &MetadataBlob { - match *self { - PMDSource::Registered(ref cmd) => &cmd.blob, - PMDSource::Owned(ref lib) => &lib.metadata - } - } -} - enum LoadResult { Previous(CrateNum), Loaded(Library), @@ -99,19 +78,32 @@ impl<'a> LoadError<'a> { } impl<'a> CrateLoader<'a> { - pub fn new(sess: &'a Session, cstore: &'a CStore, local_crate_name: &str) -> Self { + pub fn new( + sess: &'a Session, + metadata_loader: &'a MetadataLoaderDyn, + local_crate_name: &str, + ) -> Self { CrateLoader { sess, - cstore, + metadata_loader, local_crate_name: Symbol::intern(local_crate_name), + cstore: Default::default(), } } + pub fn cstore(&self) -> &CStore { + &self.cstore + } + + pub fn into_cstore(self) -> CStore { + self.cstore + } + fn existing_match(&self, name: Symbol, hash: Option<&Svh>, kind: PathKind) -> Option { let mut ret = None; self.cstore.iter_crate_data(|cnum, data| { - if data.name != name { return } + if data.root.name != name { return } match hash { Some(hash) if *hash == data.root.hash => { ret = Some(cnum); return } @@ -189,54 +181,51 @@ impl<'a> CrateLoader<'a> { fn register_crate( &mut self, host_lib: Option, - root: &Option, - ident: Symbol, + root: Option<&CratePaths>, span: Span, lib: Library, dep_kind: DepKind, name: Symbol - ) -> (CrateNum, Lrc) { - let crate_root = lib.metadata.get_root(); + ) -> CrateNum { + let _prof_timer = self.sess.prof.generic_activity("metadata_register_crate"); + + let Library { source, metadata } = lib; + let crate_root = metadata.get_root(); + let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash); self.verify_no_symbol_conflicts(span, &crate_root); let private_dep = self.sess.opts.externs.get(&name.as_str()) .map(|e| e.is_private_dep) .unwrap_or(false); - info!("register crate `extern crate {} as {}` (private_dep = {})", - crate_root.name, ident, private_dep); - + info!("register crate `{}` (private_dep = {})", crate_root.name, private_dep); // Claim this crate number and cache it let cnum = self.cstore.alloc_new_crate_num(); - // Stash paths for top-most crate locally if necessary. - let crate_paths = if root.is_none() { - Some(CratePaths { - ident: ident.to_string(), - dylib: lib.dylib.clone().map(|p| p.0), - rlib: lib.rlib.clone().map(|p| p.0), - rmeta: lib.rmeta.clone().map(|p| p.0), - }) - } else { - None - }; // Maintain a reference to the top most crate. - let root = if root.is_some() { root } else { &crate_paths }; + // Stash paths for top-most crate locally if necessary. + let crate_paths; + let root = if let Some(root) = root { + root + } else { + crate_paths = CratePaths { name: crate_root.name, source: source.clone() }; + &crate_paths + }; - let Library { dylib, rlib, rmeta, metadata } = lib; let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind); let dependencies: Vec = cnum_map.iter().cloned().collect(); let raw_proc_macros = crate_root.proc_macro_data.map(|_| { - if self.sess.opts.debugging_opts.dual_proc_macros { - let host_lib = host_lib.as_ref().unwrap(); - self.dlsym_proc_macros(host_lib.dylib.as_ref().map(|p| p.0.clone()), - &host_lib.metadata.get_root(), span) - } else { - self.dlsym_proc_macros(dylib.clone().map(|p| p.0), &crate_root, span) - } + let temp_root; + let (dlsym_source, dlsym_root) = match &host_lib { + Some(host_lib) => + (&host_lib.source, { temp_root = host_lib.metadata.get_root(); &temp_root }), + None => (&source, &crate_root), + }; + let dlsym_dylib = dlsym_source.dylib.as_ref().expect("no dylib for a proc-macro crate"); + self.dlsym_proc_macros(&dlsym_dylib.0, dlsym_root.disambiguator, span) }); let interpret_alloc_index: Vec = crate_root.interpret_alloc_index @@ -252,38 +241,30 @@ impl<'a> CrateLoader<'a> { crate_root.def_path_table.decode((&metadata, self.sess)) }); - let cmeta = cstore::CrateMetadata { - name: crate_root.name, - imported_name: ident, + self.cstore.set_crate_data(cnum, cstore::CrateMetadata { extern_crate: Lock::new(None), - def_path_table: Lrc::new(def_path_table), + def_path_table, trait_impls, root: crate_root, + host_hash, blob: metadata, cnum_map, cnum, dependencies: Lock::new(dependencies), - source_map_import_info: RwLock::new(vec![]), + source_map_import_info: Once::new(), alloc_decoding_state: AllocDecodingState::new(interpret_alloc_index), dep_kind: Lock::new(dep_kind), - source: cstore::CrateSource { - dylib, - rlib, - rmeta, - }, + source, private_dep, - span, - host_lib, - raw_proc_macros - }; + raw_proc_macros, + dep_node_index: AtomicCell::new(DepNodeIndex::INVALID), + }); - let cmeta = Lrc::new(cmeta); - self.cstore.set_crate_data(cnum, cmeta.clone()); - (cnum, cmeta) + cnum } fn load_proc_macro<'b>( - &mut self, + &self, locate_ctxt: &mut locator::Context<'b>, path_kind: PathKind, ) -> Option<(LoadResult, Option)> @@ -304,9 +285,7 @@ impl<'a> CrateLoader<'a> { LoadResult::Previous(cnum) => return Some((LoadResult::Previous(cnum), None)), LoadResult::Loaded(library) => Some(LoadResult::Loaded(library)) }; - // Don't look for a matching hash when looking for the host crate. - // It won't be the same as the target crate hash - locate_ctxt.hash = None; + locate_ctxt.hash = locate_ctxt.host_hash; // Use the locate_ctxt when looking for the host proc macro crate, as that is required // so we want it to affect the error message (locate_ctxt, result) @@ -339,16 +318,32 @@ impl<'a> CrateLoader<'a> { fn resolve_crate<'b>( &'b mut self, - root: &'b Option, - ident: Symbol, name: Symbol, - hash: Option<&'b Svh>, - extra_filename: Option<&'b str>, span: Span, - path_kind: PathKind, + dep_kind: DepKind, + dep: Option<(&'b CratePaths, &'b CrateDep)>, + ) -> CrateNum { + self.maybe_resolve_crate(name, span, dep_kind, dep).unwrap_or_else(|err| err.report()) + } + + fn maybe_resolve_crate<'b>( + &'b mut self, + name: Symbol, + span: Span, mut dep_kind: DepKind, - ) -> Result<(CrateNum, Lrc), LoadError<'b>> { - info!("resolving crate `extern crate {} as {}`", name, ident); + dep: Option<(&'b CratePaths, &'b CrateDep)>, + ) -> Result> { + info!("resolving crate `{}`", name); + let (root, hash, host_hash, extra_filename, path_kind) = match dep { + Some((root, dep)) => ( + Some(root), + Some(&dep.hash), + dep.host_hash.as_ref(), + Some(&dep.extra_filename[..]), + PathKind::Dependency + ), + None => (None, None, None, None, PathKind::Crate), + }; let result = if let Some(cnum) = self.existing_match(name, hash, path_kind) { (LoadResult::Previous(cnum), None) } else { @@ -356,9 +351,9 @@ impl<'a> CrateLoader<'a> { let mut locate_ctxt = locator::Context { sess: self.sess, span, - ident, crate_name: name, hash, + host_hash, extra_filename, filesearch: self.sess.target_filesearch(path_kind), target: &self.sess.target.target, @@ -371,7 +366,7 @@ impl<'a> CrateLoader<'a> { rejected_via_filename: vec![], should_match_name: true, is_proc_macro: Some(false), - metadata_loader: &*self.cstore.metadata_loader, + metadata_loader: self.metadata_loader, }; self.load(&mut locate_ctxt).map(|r| (r, None)).or_else(|| { @@ -389,16 +384,16 @@ impl<'a> CrateLoader<'a> { data.dep_kind.with_lock(|data_dep_kind| { *data_dep_kind = cmp::max(*data_dep_kind, dep_kind); }); - Ok((cnum, data)) + Ok(cnum) } (LoadResult::Loaded(library), host_library) => { - Ok(self.register_crate(host_library, root, ident, span, library, dep_kind, name)) + Ok(self.register_crate(host_library, root, span, library, dep_kind, name)) } _ => panic!() } } - fn load(&mut self, locate_ctxt: &mut locator::Context<'_>) -> Option { + fn load(&self, locate_ctxt: &mut locator::Context<'_>) -> Option { let library = locate_ctxt.maybe_load_library_crate()?; // In the case that we're loading a crate, but not matching @@ -425,12 +420,12 @@ impl<'a> CrateLoader<'a> { } } - fn update_extern_crate(&mut self, + fn update_extern_crate(&self, cnum: CrateNum, mut extern_crate: ExternCrate, visited: &mut FxHashSet<(CrateNum, bool)>) { - if !visited.insert((cnum, extern_crate.direct)) { return } + if !visited.insert((cnum, extern_crate.is_direct())) { return } let cmeta = self.cstore.get_crate_data(cnum); let mut old_extern_crate = cmeta.extern_crate.borrow_mut(); @@ -441,14 +436,14 @@ impl<'a> CrateLoader<'a> { // - shorter paths to longer (tuple.2). let new_rank = ( true, - extern_crate.direct, + extern_crate.is_direct(), cmp::Reverse(extern_crate.path_len), ); let old_rank = match *old_extern_crate { None => (false, false, cmp::Reverse(usize::max_value())), Some(ref c) => ( true, - c.direct, + c.is_direct(), cmp::Reverse(c.path_len), ), }; @@ -460,7 +455,7 @@ impl<'a> CrateLoader<'a> { drop(old_extern_crate); // Propagate the extern crate info to dependencies. - extern_crate.direct = false; + extern_crate.dependency_of = cnum; for &dep_cnum in cmeta.dependencies.borrow().iter() { self.update_extern_crate(dep_cnum, extern_crate, visited); } @@ -468,7 +463,7 @@ impl<'a> CrateLoader<'a> { // Go through the crate metadata and load any crates that it references fn resolve_crate_deps(&mut self, - root: &Option, + root: &CratePaths, crate_root: &CrateRoot<'_>, metadata: &MetadataBlob, krate: CrateNum, @@ -483,9 +478,7 @@ impl<'a> CrateLoader<'a> { // The map from crate numbers in the crate we're resolving to local crate numbers. // We map 0 and all other holes in the map to our parent crate. The "additional" // self-dependencies should be harmless. - std::iter::once(krate).chain(crate_root.crate_deps - .decode(metadata) - .map(|dep| { + std::iter::once(krate).chain(crate_root.crate_deps.decode(metadata).map(|dep| { info!("resolving dep crate {} hash: `{}` extra filename: `{}`", dep.name, dep.hash, dep.extra_filename); if dep.kind == DepKind::UnexportedMacrosOnly { @@ -495,91 +488,18 @@ impl<'a> CrateLoader<'a> { DepKind::MacrosOnly => DepKind::MacrosOnly, _ => dep.kind, }; - let (local_cnum, ..) = self.resolve_crate( - root, dep.name, dep.name, Some(&dep.hash), Some(&dep.extra_filename), span, - PathKind::Dependency, dep_kind, - ).unwrap_or_else(|err| err.report()); - local_cnum + self.resolve_crate(dep.name, span, dep_kind, Some((root, &dep))) })).collect() } - fn read_extension_crate(&mut self, span: Span, orig_name: Symbol, rename: Symbol) - -> ExtensionCrate { - info!("read extension crate `extern crate {} as {}`", orig_name, rename); - let target_triple = self.sess.opts.target_triple.clone(); - let host_triple = TargetTriple::from_triple(config::host_triple()); - let is_cross = target_triple != host_triple; - let mut target_only = false; - let mut locate_ctxt = locator::Context { - sess: self.sess, - span, - ident: orig_name, - crate_name: rename, - hash: None, - extra_filename: None, - filesearch: self.sess.host_filesearch(PathKind::Crate), - target: &self.sess.host, - triple: host_triple, - root: &None, - rejected_via_hash: vec![], - rejected_via_triple: vec![], - rejected_via_kind: vec![], - rejected_via_version: vec![], - rejected_via_filename: vec![], - should_match_name: true, - is_proc_macro: None, - metadata_loader: &*self.cstore.metadata_loader, - }; - let library = self.load(&mut locate_ctxt).or_else(|| { - if !is_cross { - return None - } - // Try loading from target crates. This will abort later if we - // try to load a plugin registrar function, - target_only = true; - - locate_ctxt.target = &self.sess.target.target; - locate_ctxt.triple = target_triple; - locate_ctxt.filesearch = self.sess.target_filesearch(PathKind::Crate); - - self.load(&mut locate_ctxt) - }); - let library = match library { - Some(l) => l, - None => locate_ctxt.report_errs(), - }; - - let (dylib, metadata) = match library { - LoadResult::Previous(cnum) => { - let data = self.cstore.get_crate_data(cnum); - (data.source.dylib.clone(), PMDSource::Registered(data)) - } - LoadResult::Loaded(library) => { - let dylib = library.dylib.clone(); - let metadata = PMDSource::Owned(library); - (dylib, metadata) - } - }; - - ExtensionCrate { - metadata, - dylib: dylib.map(|p| p.0), - target_only, - } - } - fn dlsym_proc_macros(&self, - dylib: Option, - root: &CrateRoot<'_>, + path: &Path, + disambiguator: CrateDisambiguator, span: Span ) -> &'static [ProcMacro] { use std::env; use crate::dynamic_lib::DynamicLibrary; - let path = match dylib { - Some(dylib) => dylib, - None => span_bug!(span, "proc-macro crate not dylib"), - }; // Make sure the path contains a / or the linker will search for it. let path = env::current_dir().unwrap().join(path); let lib = match DynamicLibrary::open(Some(&path)) { @@ -587,7 +507,7 @@ impl<'a> CrateLoader<'a> { Err(err) => self.sess.span_fatal(span, &err), }; - let sym = self.sess.generate_proc_macro_decls_symbol(root.disambiguator); + let sym = self.sess.generate_proc_macro_decls_symbol(disambiguator); let decls = unsafe { let sym = match lib.symbol(&sym) { Ok(f) => f, @@ -603,41 +523,6 @@ impl<'a> CrateLoader<'a> { decls } - /// Look for a plugin registrar. Returns library path, crate - /// SVH and DefIndex of the registrar function. - pub fn find_plugin_registrar(&mut self, - span: Span, - name: Symbol) - -> Option<(PathBuf, CrateDisambiguator)> { - let ekrate = self.read_extension_crate(span, name, name); - - if ekrate.target_only { - // Need to abort before syntax expansion. - let message = format!("plugin `{}` is not available for triple `{}` \ - (only found {})", - name, - config::host_triple(), - self.sess.opts.target_triple); - span_fatal!(self.sess, span, E0456, "{}", &message); - } - - let root = ekrate.metadata.get_root(); - match ekrate.dylib.as_ref() { - Some(dylib) => { - Some((dylib.to_path_buf(), root.disambiguator)) - } - None => { - span_err!(self.sess, span, E0457, - "plugin `{}` only found in rlib format, but must be available \ - in dylib format", - name); - // No need to abort because the loading code will just ignore this - // empty dylib. - None - } - } - } - fn inject_panic_runtime(&mut self, krate: &ast::Crate) { // If we're only compiling an rlib, then there's no need to select a // panic runtime, so we just skip this section entirely. @@ -700,10 +585,8 @@ impl<'a> CrateLoader<'a> { }; info!("panic runtime not found -- loading {}", name); - let dep_kind = DepKind::Implicit; - let (cnum, data) = - self.resolve_crate(&None, name, name, None, None, DUMMY_SP, PathKind::Crate, dep_kind) - .unwrap_or_else(|err| err.report()); + let cnum = self.resolve_crate(name, DUMMY_SP, DepKind::Implicit, None); + let data = self.cstore.get_crate_data(cnum); // Sanity check the loaded crate to ensure it is indeed a panic runtime // and the panic strategy is indeed what we thought it was. @@ -754,10 +637,10 @@ impl<'a> CrateLoader<'a> { if !self.sess.crate_types.borrow().iter().all(|ct| { match *ct { // Link the runtime - config::CrateType::Staticlib | config::CrateType::Executable => true, // This crate will be compiled with the required // instrumentation pass + config::CrateType::Staticlib | config::CrateType::Rlib | config::CrateType::Dylib | config::CrateType::Cdylib => @@ -793,26 +676,22 @@ impl<'a> CrateLoader<'a> { let mut uses_std = false; self.cstore.iter_crate_data(|_, data| { - if data.name == sym::std { + if data.root.name == sym::std { uses_std = true; } }); if uses_std { - let name = match *sanitizer { + let name = Symbol::intern(match sanitizer { Sanitizer::Address => "rustc_asan", Sanitizer::Leak => "rustc_lsan", Sanitizer::Memory => "rustc_msan", Sanitizer::Thread => "rustc_tsan", - }; + }); info!("loading sanitizer: {}", name); - let symbol = Symbol::intern(name); - let dep_kind = DepKind::Explicit; - let (_, data) = - self.resolve_crate(&None, symbol, symbol, None, None, DUMMY_SP, - PathKind::Crate, dep_kind) - .unwrap_or_else(|err| err.report()); + let cnum = self.resolve_crate(name, DUMMY_SP, DepKind::Explicit, None); + let data = self.cstore.get_crate_data(cnum); // Sanity check the loaded crate to ensure it is indeed a sanitizer runtime if !data.root.sanitizer_runtime { @@ -831,12 +710,9 @@ impl<'a> CrateLoader<'a> { { info!("loading profiler"); - let symbol = Symbol::intern("profiler_builtins"); - let dep_kind = DepKind::Implicit; - let (_, data) = - self.resolve_crate(&None, symbol, symbol, None, None, DUMMY_SP, - PathKind::Crate, dep_kind) - .unwrap_or_else(|err| err.report()); + let name = Symbol::intern("profiler_builtins"); + let cnum = self.resolve_crate(name, DUMMY_SP, DepKind::Implicit, None); + let data = self.cstore.get_crate_data(cnum); // Sanity check the loaded crate to ensure it is indeed a profiler runtime if !data.root.profiler_runtime { @@ -846,7 +722,7 @@ impl<'a> CrateLoader<'a> { } } - fn inject_allocator_crate(&mut self, krate: &ast::Crate) { + fn inject_allocator_crate(&self, krate: &ast::Crate) { let has_global_allocator = match &*global_allocator_spans(krate) { [span1, span2, ..] => { self.sess.struct_span_err(*span2, "cannot define multiple global allocators") @@ -982,9 +858,7 @@ impl<'a> CrateLoader<'a> { data.dependencies.borrow_mut().push(krate); }); } -} -impl<'a> CrateLoader<'a> { pub fn postprocess(&mut self, krate: &ast::Crate) { self.inject_sanitizer_runtime(); self.inject_profiler_runtime(); @@ -997,13 +871,15 @@ impl<'a> CrateLoader<'a> { } pub fn process_extern_crate( - &mut self, item: &ast::Item, definitions: &Definitions, + &mut self, + item: &ast::Item, + definitions: &Definitions, ) -> CrateNum { - match item.node { + match item.kind { ast::ItemKind::ExternCrate(orig_name) => { debug!("resolving extern crate stmt. ident: {} orig_name: {:?}", item.ident, orig_name); - let orig_name = match orig_name { + let name = match orig_name { Some(orig_name) => { crate::validate_crate_name(Some(self.sess), &orig_name.as_str(), Some(item.span)); @@ -1017,10 +893,7 @@ impl<'a> CrateLoader<'a> { DepKind::Explicit }; - let (cnum, ..) = self.resolve_crate( - &None, item.ident.name, orig_name, None, None, - item.span, PathKind::Crate, dep_kind, - ).unwrap_or_else(|err| err.report()); + let cnum = self.resolve_crate(name, item.span, dep_kind, None); let def_id = definitions.opt_local_def_id(item.id).unwrap(); let path_len = definitions.def_path(def_id.index).data.len(); @@ -1030,25 +903,18 @@ impl<'a> CrateLoader<'a> { src: ExternCrateSource::Extern(def_id), span: item.span, path_len, - direct: true, + dependency_of: LOCAL_CRATE, }, &mut FxHashSet::default(), ); - self.cstore.add_extern_mod_stmt_cnum(item.id, cnum); cnum } _ => bug!(), } } - pub fn process_path_extern( - &mut self, - name: Symbol, - span: Span, - ) -> CrateNum { - let cnum = self.resolve_crate( - &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit - ).unwrap_or_else(|err| err.report()).0; + pub fn process_path_extern(&mut self, name: Symbol, span: Span) -> CrateNum { + let cnum = self.resolve_crate(name, span, DepKind::Explicit, None); self.update_extern_crate( cnum, @@ -1057,7 +923,7 @@ impl<'a> CrateLoader<'a> { span, // to have the least priority in `update_extern_crate` path_len: usize::max_value(), - direct: true, + dependency_of: LOCAL_CRATE, }, &mut FxHashSet::default(), ); @@ -1065,14 +931,8 @@ impl<'a> CrateLoader<'a> { cnum } - pub fn maybe_process_path_extern( - &mut self, - name: Symbol, - span: Span, - ) -> Option { - let cnum = self.resolve_crate( - &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit - ).ok()?.0; + pub fn maybe_process_path_extern(&mut self, name: Symbol, span: Span) -> Option { + let cnum = self.maybe_resolve_crate(name, span, DepKind::Explicit, None).ok()?; self.update_extern_crate( cnum, @@ -1081,7 +941,7 @@ impl<'a> CrateLoader<'a> { span, // to have the least priority in `update_extern_crate` path_len: usize::max_value(), - direct: true, + dependency_of: LOCAL_CRATE, }, &mut FxHashSet::default(), ); diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 5bf4067431..b7596d2018 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -2,22 +2,20 @@ // crates and libraries use crate::schema; +use rustc::dep_graph::DepNodeIndex; use rustc::hir::def_id::{CrateNum, DefIndex}; use rustc::hir::map::definitions::DefPathTable; -use rustc::middle::cstore::{DepKind, ExternCrate, MetadataLoader}; +use rustc::middle::cstore::{CrateSource, DepKind, ExternCrate}; use rustc::mir::interpret::AllocDecodingState; -use rustc_data_structures::indexed_vec::IndexVec; -use rustc::util::nodemap::{FxHashMap, NodeMap}; - -use rustc_data_structures::sync::{Lrc, RwLock, Lock}; +use rustc_index::vec::IndexVec; +use rustc::util::nodemap::FxHashMap; +use rustc_data_structures::sync::{Lrc, Lock, MetadataRef, Once, AtomicCell}; +use rustc_data_structures::svh::Svh; use syntax::ast; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; +use syntax::edition::Edition; +use syntax_expand::base::SyntaxExtension; use syntax_pos; - -pub use rustc::middle::cstore::{NativeLibrary, NativeLibraryKind, LinkagePreference}; -pub use rustc::middle::cstore::NativeLibraryKind::*; -pub use rustc::middle::cstore::{CrateSource, LibSource, ForeignModule}; +use proc_macro::bridge::client::ProcMacro; pub use crate::cstore_impl::{provide, provide_extern}; @@ -25,18 +23,13 @@ pub use crate::cstore_impl::{provide, provide_extern}; // local crate numbers (as generated during this session). Each external // crate may refer to types in other external crates, and each has their // own crate numbers. -pub type CrateNumMap = IndexVec; +crate type CrateNumMap = IndexVec; -pub use rustc_data_structures::sync::MetadataRef; -use crate::creader::Library; -use syntax_pos::Span; -use proc_macro::bridge::client::ProcMacro; - -pub struct MetadataBlob(pub MetadataRef); +crate struct MetadataBlob(pub MetadataRef); /// Holds information about a syntax_pos::SourceFile imported from another crate. /// See `imported_source_files()` for more information. -pub struct ImportedSourceFile { +crate struct ImportedSourceFile { /// This SourceFile's byte-offset within the source_map of its original crate pub original_start_pos: syntax_pos::BytePos, /// The end of this SourceFile within the source_map of its original crate @@ -45,119 +38,122 @@ pub struct ImportedSourceFile { pub translated_source_file: Lrc, } -pub struct CrateMetadata { - /// Original name of the crate. - pub name: Symbol, +crate struct CrateMetadata { + /// The primary crate data - binary metadata blob. + crate blob: MetadataBlob, - /// Name of the crate as imported. I.e., if imported with - /// `extern crate foo as bar;` this will be `bar`. - pub imported_name: Symbol, - - /// Information about the extern crate that caused this crate to - /// be loaded. If this is `None`, then the crate was injected - /// (e.g., by the allocator) - pub extern_crate: Lock>, - - pub blob: MetadataBlob, - pub cnum_map: CrateNumMap, - pub cnum: CrateNum, - pub dependencies: Lock>, - pub source_map_import_info: RwLock>, - - /// Used for decoding interpret::AllocIds in a cached & thread-safe manner. - pub alloc_decoding_state: AllocDecodingState, - - // NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this - // lifetime is only used behind `Lazy`, and therefore acts like an - // universal (`for<'tcx>`), that is paired up with whichever `TyCtxt` - // is being used to decode those values. - pub root: schema::CrateRoot<'static>, + // --- Some data pre-decoded from the metadata blob, usually for performance --- + /// Properties of the whole crate. + /// NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this + /// lifetime is only used behind `Lazy`, and therefore acts like an + /// universal (`for<'tcx>`), that is paired up with whichever `TyCtxt` + /// is being used to decode those values. + crate root: schema::CrateRoot<'static>, /// For each definition in this crate, we encode a key. When the /// crate is loaded, we read all the keys and put them in this /// hashmap, which gives the reverse mapping. This allows us to /// quickly retrace a `DefPath`, which is needed for incremental /// compilation support. - pub def_path_table: Lrc, + crate def_path_table: DefPathTable, + /// Trait impl data. + /// FIXME: Used only from queries and can use query cache, + /// so pre-decoding can probably be avoided. + crate trait_impls: FxHashMap<(u32, DefIndex), schema::Lazy<[DefIndex]>>, + /// Proc macro descriptions for this crate, if it's a proc macro crate. + crate raw_proc_macros: Option<&'static [ProcMacro]>, + /// Source maps for code from the crate. + crate source_map_import_info: Once>, + /// Used for decoding interpret::AllocIds in a cached & thread-safe manner. + crate alloc_decoding_state: AllocDecodingState, + /// The `DepNodeIndex` of the `DepNode` representing this upstream crate. + /// It is initialized on the first access in `get_crate_dep_node_index()`. + /// Do not access the value directly, as it might not have been initialized yet. + /// The field must always be initialized to `DepNodeIndex::INVALID`. + crate dep_node_index: AtomicCell, - pub trait_impls: FxHashMap<(u32, DefIndex), schema::Lazy<[DefIndex]>>, - - pub dep_kind: Lock, - pub source: CrateSource, + // --- Other significant crate properties --- + /// ID of this crate, from the current compilation session's point of view. + crate cnum: CrateNum, + /// Maps crate IDs as they are were seen from this crate's compilation sessions into + /// IDs as they are seen from the current compilation session. + crate cnum_map: CrateNumMap, + /// Same ID set as `cnum_map` plus maybe some injected crates like panic runtime. + crate dependencies: Lock>, + /// How to link (or not link) this crate to the currently compiled crate. + crate dep_kind: Lock, + /// Filesystem location of this crate. + crate source: CrateSource, /// Whether or not this crate should be consider a private dependency /// for purposes of the 'exported_private_dependencies' lint - pub private_dep: bool, + crate private_dep: bool, + /// The hash for the host proc macro. Used to support `-Z dual-proc-macro`. + crate host_hash: Option, - pub host_lib: Option, - pub span: Span, + // --- Data used only for improving diagnostics --- - pub raw_proc_macros: Option<&'static [ProcMacro]>, + /// Information about the `extern crate` item or path that caused this crate to be loaded. + /// If this is `None`, then the crate was injected (e.g., by the allocator). + crate extern_crate: Lock>, } +#[derive(Clone)] pub struct CStore { - metas: RwLock>>>, - /// Map from NodeId's of local extern crate statements to crate numbers - extern_mod_crate_map: Lock>, - pub metadata_loader: Box, + metas: IndexVec>>, } pub enum LoadedMacro { - MacroDef(ast::Item), + MacroDef(ast::Item, Edition), ProcMacro(SyntaxExtension), } -impl CStore { - pub fn new(metadata_loader: Box) -> CStore { +impl Default for CStore { + fn default() -> Self { CStore { // We add an empty entry for LOCAL_CRATE (which maps to zero) in // order to make array indices in `metas` match with the // corresponding `CrateNum`. This first entry will always remain // `None`. - metas: RwLock::new(IndexVec::from_elem_n(None, 1)), - extern_mod_crate_map: Default::default(), - metadata_loader, + metas: IndexVec::from_elem_n(None, 1), } } +} - pub(super) fn alloc_new_crate_num(&self) -> CrateNum { - let mut metas = self.metas.borrow_mut(); - let cnum = CrateNum::new(metas.len()); - metas.push(None); - cnum +impl CStore { + crate fn alloc_new_crate_num(&mut self) -> CrateNum { + self.metas.push(None); + CrateNum::new(self.metas.len() - 1) } - pub(super) fn get_crate_data(&self, cnum: CrateNum) -> Lrc { - self.metas.borrow()[cnum].clone() + crate fn get_crate_data(&self, cnum: CrateNum) -> &CrateMetadata { + self.metas[cnum].as_ref() .unwrap_or_else(|| panic!("Failed to get crate data for {:?}", cnum)) } - pub(super) fn set_crate_data(&self, cnum: CrateNum, data: Lrc) { - let mut metas = self.metas.borrow_mut(); - assert!(metas[cnum].is_none(), "Overwriting crate metadata entry"); - metas[cnum] = Some(data); + crate fn set_crate_data(&mut self, cnum: CrateNum, data: CrateMetadata) { + assert!(self.metas[cnum].is_none(), "Overwriting crate metadata entry"); + self.metas[cnum] = Some(Lrc::new(data)); } - pub(super) fn iter_crate_data(&self, mut i: I) - where I: FnMut(CrateNum, &Lrc) + crate fn iter_crate_data(&self, mut i: I) + where I: FnMut(CrateNum, &CrateMetadata) { - for (k, v) in self.metas.borrow().iter_enumerated() { + for (k, v) in self.metas.iter_enumerated() { if let &Some(ref v) = v { i(k, v); } } } - pub(super) fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec { + crate fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec { let mut ordering = Vec::new(); self.push_dependencies_in_postorder(&mut ordering, krate); ordering.reverse(); ordering } - pub(super) fn push_dependencies_in_postorder(&self, - ordering: &mut Vec, - krate: CrateNum) { + crate fn push_dependencies_in_postorder(&self, ordering: &mut Vec, krate: CrateNum) { if ordering.contains(&krate) { return; } @@ -172,21 +168,13 @@ impl CStore { ordering.push(krate); } - pub(super) fn do_postorder_cnums_untracked(&self) -> Vec { + crate fn do_postorder_cnums_untracked(&self) -> Vec { let mut ordering = Vec::new(); - for (num, v) in self.metas.borrow().iter_enumerated() { + for (num, v) in self.metas.iter_enumerated() { if let &Some(_) = v { self.push_dependencies_in_postorder(&mut ordering, num); } } return ordering } - - pub(super) fn add_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId, cnum: CrateNum) { - self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum); - } - - pub(super) fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { - self.extern_mod_crate_map.borrow().get(&emod_id).cloned() - } } diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 55cf3965aa..c5f830c0d0 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -6,8 +6,7 @@ use crate::foreign_modules; use crate::schema; use rustc::ty::query::QueryConfig; -use rustc::middle::cstore::{CrateStore, DepKind, - EncodedMetadata, NativeLibraryKind}; +use rustc::middle::cstore::{CrateSource, CrateStore, DepKind, EncodedMetadata, NativeLibraryKind}; use rustc::middle::exported_symbols::ExportedSymbol; use rustc::middle::stability::DeprecationEntry; use rustc::hir::def; @@ -29,12 +28,12 @@ use std::sync::Arc; use syntax::ast; use syntax::attr; use syntax::source_map; -use syntax::edition::Edition; use syntax::parse::source_file_to_stream; use syntax::parse::parser::emit_unclosed_delims; +use syntax::source_map::Spanned; use syntax::symbol::Symbol; use syntax_pos::{Span, FileName}; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; macro_rules! provide { (<$lt:tt> $tcx:ident, $def_id:ident, $other:ident, $cdata:ident, @@ -46,23 +45,22 @@ macro_rules! provide { $tcx: TyCtxt<$lt>, def_id_arg: T, ) -> as QueryConfig<$lt>>::Value { + let _prof_timer = + $tcx.prof.generic_activity("metadata_decode_entry"); + #[allow(unused_variables)] let ($def_id, $other) = def_id_arg.into_args(); assert!(!$def_id.is_local()); - let def_path_hash = $tcx.def_path_hash(DefId { - krate: $def_id.krate, - index: CRATE_DEF_INDEX - }); - let dep_node = def_path_hash - .to_dep_node(rustc::dep_graph::DepKind::CrateMetadata); - // The DepNodeIndex of the DepNode::CrateMetadata should be - // cached somewhere, so that we can use read_index(). - $tcx.dep_graph.read(dep_node); - - let $cdata = $tcx.crate_data_as_rc_any($def_id.krate); + let $cdata = $tcx.crate_data_as_any($def_id.krate); let $cdata = $cdata.downcast_ref::() .expect("CrateStore created data is not a CrateMetadata"); + + if $tcx.dep_graph.is_fully_enabled() { + let crate_dep_node_index = $cdata.get_crate_dep_node_index($tcx); + $tcx.dep_graph.read_index(crate_dep_node_index); + } + $compute })* @@ -97,11 +95,9 @@ provide! { <'tcx> tcx, def_id, other, cdata, generics_of => { tcx.arena.alloc(cdata.get_generics(def_id.index, tcx.sess)) } - predicates_of => { tcx.arena.alloc(cdata.get_predicates(def_id.index, tcx)) } - predicates_defined_on => { - tcx.arena.alloc(cdata.get_predicates_defined_on(def_id.index, tcx)) - } - super_predicates_of => { tcx.arena.alloc(cdata.get_super_predicates(def_id.index, tcx)) } + predicates_of => { cdata.get_predicates(def_id.index, tcx) } + predicates_defined_on => { cdata.get_predicates_defined_on(def_id.index, tcx) } + super_predicates_of => { cdata.get_super_predicates(def_id.index, tcx) } trait_def => { tcx.arena.alloc(cdata.get_trait_def(def_id.index, tcx.sess)) } @@ -153,9 +149,6 @@ provide! { <'tcx> tcx, def_id, other, cdata, rendered_const => { cdata.get_rendered_const(def_id.index) } impl_parent => { cdata.get_parent_impl(def_id.index) } trait_of_item => { cdata.get_trait_of_item(def_id.index) } - const_is_rvalue_promotable_to_static => { - cdata.const_is_rvalue_promotable_to_static(def_id.index) - } is_mir_available => { cdata.is_item_mir_available(def_id.index) } dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) } @@ -219,7 +212,7 @@ provide! { <'tcx> tcx, def_id, other, cdata, let r = *cdata.dep_kind.lock(); r } - crate_name => { cdata.name } + crate_name => { cdata.root.name } item_children => { let mut result = SmallVec::<[_; 8]>::new(); cdata.each_child_of_item(def_id.index, |child| result.push(child), tcx.sess); @@ -232,7 +225,7 @@ provide! { <'tcx> tcx, def_id, other, cdata, missing_extern_crate_item => { let r = match *cdata.extern_crate.borrow() { - Some(extern_crate) if !extern_crate.direct => true, + Some(extern_crate) if !extern_crate.is_direct() => true, _ => false, }; r @@ -240,7 +233,15 @@ provide! { <'tcx> tcx, def_id, other, cdata, used_crate_source => { Lrc::new(cdata.source.clone()) } - exported_symbols => { Arc::new(cdata.exported_symbols(tcx)) } + exported_symbols => { + let syms = cdata.exported_symbols(tcx); + + // FIXME rust-lang/rust#64319, rust-lang/rust#64872: We want + // to block export of generics from dylibs, but we must fix + // rust-lang/rust#65890 before we can do that robustly. + + Arc::new(syms) + } } pub fn provide(providers: &mut Providers<'_>) { @@ -249,7 +250,11 @@ pub fn provide(providers: &mut Providers<'_>) { // resolve! Does this work? Unsure! That's what the issue is about *providers = Providers { is_dllimport_foreign_item: |tcx, id| { - tcx.native_library_kind(id) == Some(NativeLibraryKind::NativeUnknown) + match tcx.native_library_kind(id) { + Some(NativeLibraryKind::NativeUnknown) | + Some(NativeLibraryKind::NativeRawDylib) => true, + _ => false, + } }, is_statically_included_foreign_item: |tcx, id| { match tcx.native_library_kind(id) { @@ -371,6 +376,11 @@ pub fn provide(providers: &mut Providers<'_>) { tcx.arena.alloc(visible_parent_map) }, + dependency_formats: |tcx, cnum| { + assert_eq!(cnum, LOCAL_CRATE); + Lrc::new(crate::dependency_format::calculate(tcx)) + }, + ..*providers }; } @@ -384,26 +394,8 @@ impl cstore::CStore { } } - pub fn dep_kind_untracked(&self, cnum: CrateNum) -> DepKind { - let data = self.get_crate_data(cnum); - let r = *data.dep_kind.lock(); - r - } - - pub fn crate_edition_untracked(&self, cnum: CrateNum) -> Edition { - self.get_crate_data(cnum).root.edition - } - - pub fn struct_field_names_untracked(&self, def: DefId) -> Vec { - self.get_crate_data(def.krate).get_struct_field_names(def.index) - } - - pub fn ctor_kind_untracked(&self, def: DefId) -> def::CtorKind { - self.get_crate_data(def.krate).get_ctor_kind(def.index) - } - - pub fn item_attrs_untracked(&self, def: DefId, sess: &Session) -> Lrc<[ast::Attribute]> { - self.get_crate_data(def.krate).get_item_attrs(def.index, sess) + pub fn struct_field_names_untracked(&self, def: DefId, sess: &Session) -> Vec> { + self.get_crate_data(def.krate).get_struct_field_names(def.index, sess) } pub fn item_children_untracked( @@ -418,20 +410,21 @@ impl cstore::CStore { } pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro { + let _prof_timer = sess.prof.generic_activity("metadata_load_macro"); + let data = self.get_crate_data(id.krate); if data.is_proc_macro_crate() { return LoadedMacro::ProcMacro(data.load_proc_macro(id.index, sess)); } let def = data.get_macro(id.index); - let macro_full_name = data.def_path(id.index) - .to_string_friendly(|_| data.imported_name); + let macro_full_name = data.def_path(id.index).to_string_friendly(|_| data.root.name); let source_name = FileName::Macros(macro_full_name); let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let local_span = Span::with_root_ctxt(source_file.start_pos, source_file.end_pos); let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None); - emit_unclosed_delims(&mut errors, &sess.diagnostic()); + emit_unclosed_delims(&mut errors, &sess.parse_sess); // Mark the attrs as used let attrs = data.get_item_attrs(id.index, sess); @@ -446,27 +439,31 @@ impl cstore::CStore { LoadedMacro::MacroDef(ast::Item { // FIXME: cross-crate hygiene - ident: ast::Ident::with_dummy_span(name.as_symbol()), + ident: ast::Ident::with_dummy_span(name), id: ast::DUMMY_NODE_ID, span: local_span, attrs: attrs.iter().cloned().collect(), - node: ast::ItemKind::MacroDef(ast::MacroDef { + kind: ast::ItemKind::MacroDef(ast::MacroDef { tokens: body.into(), legacy: def.legacy, }), vis: source_map::respan(local_span.shrink_to_lo(), ast::VisibilityKind::Inherited), tokens: None, - }) + }, data.root.edition) } pub fn associated_item_cloned_untracked(&self, def: DefId) -> ty::AssocItem { self.get_crate_data(def.krate).get_associated_item(def.index) } + + pub fn crate_source_untracked(&self, cnum: CrateNum) -> CrateSource { + self.get_crate_data(cnum).source.clone() + } } impl CrateStore for cstore::CStore { - fn crate_data_as_rc_any(&self, krate: CrateNum) -> Lrc { - self.get_crate_data(krate) + fn crate_data_as_any(&self, cnum: CrateNum) -> &dyn Any { + self.get_crate_data(cnum) } fn item_generics_cloned_untracked(&self, def: DefId, sess: &Session) -> ty::Generics { @@ -475,7 +472,7 @@ impl CrateStore for cstore::CStore { fn crate_name_untracked(&self, cnum: CrateNum) -> Symbol { - self.get_crate_data(cnum).name + self.get_crate_data(cnum).root.name } fn crate_is_private_dep_untracked(&self, cnum: CrateNum) -> bool { @@ -492,24 +489,18 @@ impl CrateStore for cstore::CStore { self.get_crate_data(cnum).root.hash } + fn crate_host_hash_untracked(&self, cnum: CrateNum) -> Option { + self.get_crate_data(cnum).host_hash + } + /// Returns the `DefKey` for a given `DefId`. This indicates the /// parent `DefId` as well as some idea of what kind of data the /// `DefId` refers to. fn def_key(&self, def: DefId) -> DefKey { - // Note: loading the def-key (or def-path) for a def-id is not - // a *read* of its metadata. This is because the def-id is - // really just an interned shorthand for a def-path, which is the - // canonical name for an item. - // - // self.dep_graph.read(DepNode::MetaData(def)); self.get_crate_data(def.krate).def_key(def.index) } fn def_path(&self, def: DefId) -> DefPath { - // See `Note` above in `def_key()` for why this read is - // commented out: - // - // self.dep_graph.read(DepNode::MetaData(def)); self.get_crate_data(def.krate).def_path(def.index) } @@ -517,8 +508,8 @@ impl CrateStore for cstore::CStore { self.get_crate_data(def.krate).def_path_hash(def.index) } - fn def_path_table(&self, cnum: CrateNum) -> Lrc { - self.get_crate_data(cnum).def_path_table.clone() + fn def_path_table(&self, cnum: CrateNum) -> &DefPathTable { + &self.get_crate_data(cnum).def_path_table } fn crates_untracked(&self) -> Vec @@ -528,11 +519,6 @@ impl CrateStore for cstore::CStore { result } - fn extern_mod_stmt_cnum_untracked(&self, emod_id: ast::NodeId) -> Option - { - self.do_extern_mod_stmt_cnum(emod_id) - } - fn postorder_cnums_untracked(&self) -> Vec { self.do_postorder_cnums_untracked() } diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 247748eb3e..c5954e1ea1 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -1,18 +1,20 @@ // Decoding metadata from a single crate's metadata -use crate::cstore::{self, CrateMetadata, MetadataBlob, NativeLibrary, ForeignModule}; +use crate::cstore::{self, CrateMetadata, MetadataBlob}; use crate::schema::*; +use crate::table::{FixedSizeEncoding, PerDefTable}; -use rustc_data_structures::indexed_vec::IndexVec; -use rustc_data_structures::sync::{Lrc, ReadGuard}; +use rustc_index::vec::IndexVec; +use rustc_data_structures::sync::Lrc; use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash}; use rustc::hir; -use rustc::middle::cstore::LinkagePreference; +use rustc::middle::cstore::{LinkagePreference, NativeLibrary, ForeignModule}; use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel}; use rustc::hir::def::{self, Res, DefKind, CtorOf, CtorKind}; use rustc::hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::FxHashMap; +use rustc::dep_graph::{DepNodeIndex, DepKind}; use rustc::middle::lang_items; use rustc::mir::{self, interpret}; use rustc::mir::interpret::AllocDecodingSession; @@ -24,20 +26,21 @@ use rustc::util::captures::Captures; use std::io; use std::mem; +use std::num::NonZeroUsize; use std::u32; -use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; +use rustc_serialize::{Decodable, Decoder, Encodable, SpecializedDecoder, opaque}; use syntax::attr; use syntax::ast::{self, Ident}; -use syntax::source_map; -use syntax::symbol::{Symbol, sym}; -use syntax::ext::base::{MacroKind, SyntaxExtensionKind, SyntaxExtension}; -use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, symbol::{InternedString}}; +use syntax::source_map::{self, respan, Spanned}; +use syntax_expand::base::{SyntaxExtensionKind, SyntaxExtension}; +use syntax_expand::proc_macro::{AttrProcMacro, ProcMacroDerive, BangProcMacro}; +use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, hygiene::MacroKind}; +use syntax_pos::symbol::{Symbol, sym}; use log::debug; use proc_macro::bridge::client::ProcMacro; -use syntax::ext::proc_macro::{AttrProcMacro, ProcMacroDerive, BangProcMacro}; -pub struct DecodeContext<'a, 'tcx> { +crate struct DecodeContext<'a, 'tcx> { opaque: opaque::Decoder<'a>, cdata: Option<&'a CrateMetadata>, sess: Option<&'tcx Session>, @@ -53,7 +56,7 @@ pub struct DecodeContext<'a, 'tcx> { } /// Abstract over the various ways one can create metadata decoders. -pub trait Metadata<'a, 'tcx>: Copy { +crate trait Metadata<'a, 'tcx>: Copy { fn raw_bytes(self) -> &'a [u8]; fn cdata(self) -> Option<&'a CrateMetadata> { None } fn sess(self) -> Option<&'tcx Session> { None } @@ -128,31 +131,31 @@ impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'tcx>) { } } -impl<'a, 'tcx, T: Decodable> Lazy { - pub fn decode>(self, meta: M) -> T { - let mut dcx = meta.decoder(self.position); +impl<'a, 'tcx, T: Encodable + Decodable> Lazy { + crate fn decode>(self, metadata: M) -> T { + let mut dcx = metadata.decoder(self.position.get()); dcx.lazy_state = LazyState::NodeStart(self.position); T::decode(&mut dcx).unwrap() } } -impl<'a: 'x, 'tcx: 'x, 'x, T: Decodable> Lazy<[T]> { - pub fn decode>( +impl<'a: 'x, 'tcx: 'x, 'x, T: Encodable + Decodable> Lazy<[T]> { + crate fn decode>( self, - meta: M, + metadata: M, ) -> impl ExactSizeIterator + Captures<'a> + Captures<'tcx> + 'x { - let mut dcx = meta.decoder(self.position); + let mut dcx = metadata.decoder(self.position.get()); dcx.lazy_state = LazyState::NodeStart(self.position); (0..self.meta).map(move |_| T::decode(&mut dcx).unwrap()) } } impl<'a, 'tcx> DecodeContext<'a, 'tcx> { - pub fn tcx(&self) -> TyCtxt<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { self.tcx.expect("missing TyCtxt in DecodeContext") } - pub fn cdata(&self) -> &'a CrateMetadata { + fn cdata(&self) -> &'a CrateMetadata { self.cdata.expect("missing CrateMetadata in DecodeContext") } @@ -165,13 +168,14 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { let position = match self.lazy_state { LazyState::NoNode => bug!("read_lazy_with_meta: outside of a metadata node"), LazyState::NodeStart(start) => { + let start = start.get(); assert!(distance + min_size <= start); start - distance - min_size } - LazyState::Previous(last_min_end) => last_min_end + distance, + LazyState::Previous(last_min_end) => last_min_end.get() + distance, }; - self.lazy_state = LazyState::Previous(position + min_size); - Ok(Lazy::from_position_and_meta(position, meta)) + self.lazy_state = LazyState::Previous(NonZeroUsize::new(position + min_size).unwrap()); + Ok(Lazy::from_position_and_meta(NonZeroUsize::new(position).unwrap(), meta)) } } @@ -234,13 +238,13 @@ impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> { } } -impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { +impl<'a, 'tcx, T: Encodable> SpecializedDecoder> for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result, Self::Error> { self.read_lazy_with_meta(()) } } -impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { +impl<'a, 'tcx, T: Encodable> SpecializedDecoder> for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result, Self::Error> { let len = self.read_usize()?; if len == 0 { @@ -251,6 +255,14 @@ impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { } } +impl<'a, 'tcx, T> SpecializedDecoder>> for DecodeContext<'a, 'tcx> + where Option: FixedSizeEncoding, +{ + fn specialized_decode(&mut self) -> Result>, Self::Error> { + let len = self.read_usize()?; + self.read_lazy_with_meta(len) + } +} impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { #[inline] @@ -378,24 +390,28 @@ for DecodeContext<'a, 'tcx> { implement_ty_decoder!( DecodeContext<'a, 'tcx> ); impl<'tcx> MetadataBlob { - pub fn is_compatible(&self) -> bool { + crate fn is_compatible(&self) -> bool { self.raw_bytes().starts_with(METADATA_HEADER) } - pub fn get_rustc_version(&self) -> String { - Lazy::::from_position(METADATA_HEADER.len() + 4).decode(self) + crate fn get_rustc_version(&self) -> String { + Lazy::::from_position( + NonZeroUsize::new(METADATA_HEADER.len() + 4).unwrap(), + ).decode(self) } - pub fn get_root(&self) -> CrateRoot<'tcx> { + crate fn get_root(&self) -> CrateRoot<'tcx> { let slice = self.raw_bytes(); let offset = METADATA_HEADER.len(); let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) | ((slice[offset + 2] as u32) << 8) | ((slice[offset + 3] as u32) << 0)) as usize; - Lazy::>::from_position(pos).decode(self) + Lazy::>::from_position( + NonZeroUsize::new(pos).unwrap(), + ).decode(self) } - pub fn list_crate_metadata(&self, + crate fn list_crate_metadata(&self, out: &mut dyn io::Write) -> io::Result<()> { write!(out, "=External Dependencies=\n")?; let root = self.get_root(); @@ -432,7 +448,7 @@ impl<'tcx> EntryKind<'tcx> { EntryKind::Mod(_) => DefKind::Mod, EntryKind::Variant(_) => DefKind::Variant, EntryKind::Trait(_) => DefKind::Trait, - EntryKind::TraitAlias(_) => DefKind::TraitAlias, + EntryKind::TraitAlias => DefKind::TraitAlias, EntryKind::Enum(..) => DefKind::Enum, EntryKind::MacroDef(_) => DefKind::Macro(MacroKind::Bang), EntryKind::ForeignType => DefKind::ForeignTy, @@ -442,13 +458,13 @@ impl<'tcx> EntryKind<'tcx> { EntryKind::Impl(_) | EntryKind::Field | EntryKind::Generator(_) | - EntryKind::Closure(_) => return None, + EntryKind::Closure => return None, }) } } impl<'a, 'tcx> CrateMetadata { - pub fn is_proc_macro_crate(&self) -> bool { + crate fn is_proc_macro_crate(&self) -> bool { self.root.proc_macro_decls_static.is_some() } @@ -457,27 +473,20 @@ impl<'a, 'tcx> CrateMetadata { self.root.proc_macro_data.unwrap().decode(self).find(|x| *x == id).is_some() } - fn entry_unless_proc_macro(&self, id: DefIndex) -> Option> { - match self.is_proc_macro(id) { - true => None, - false => Some(self.entry(id)), - } + fn maybe_kind(&self, item_id: DefIndex) -> Option> { + self.root.per_def.kind.get(self, item_id).map(|k| k.decode(self)) } - fn maybe_entry(&self, item_id: DefIndex) -> Option>> { - self.root.entries_index.lookup(self.blob.raw_bytes(), item_id) - } - - fn entry(&self, item_id: DefIndex) -> Entry<'tcx> { - match self.maybe_entry(item_id) { - None => { - bug!("entry: id not found: {:?} in crate {:?} with number {}", - item_id, - self.name, - self.cnum) - } - Some(d) => d.decode(self), - } + fn kind(&self, item_id: DefIndex) -> EntryKind<'tcx> { + assert!(!self.is_proc_macro(item_id)); + self.maybe_kind(item_id).unwrap_or_else(|| { + bug!( + "CrateMetadata::kind({:?}): id not found, in crate {:?} with number {}", + item_id, + self.root.name, + self.cnum, + ) + }) } fn local_def_id(&self, index: DefIndex) -> DefId { @@ -498,22 +507,21 @@ impl<'a, 'tcx> CrateMetadata { &self.raw_proc_macros.unwrap()[pos] } - pub fn item_name(&self, item_index: DefIndex) -> Symbol { + crate fn item_name(&self, item_index: DefIndex) -> Symbol { if !self.is_proc_macro(item_index) { self.def_key(item_index) .disambiguated_data .data .get_opt_name() .expect("no name in item_name") - .as_symbol() } else { Symbol::intern(self.raw_proc_macro(item_index).name()) } } - pub fn def_kind(&self, index: DefIndex) -> Option { + crate fn def_kind(&self, index: DefIndex) -> Option { if !self.is_proc_macro(index) { - self.entry(index).kind.def_kind() + self.kind(index).def_kind() } else { Some(DefKind::Macro( macro_kind(self.raw_proc_macro(index)) @@ -521,8 +529,8 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_span(&self, index: DefIndex, sess: &Session) -> Span { - self.entry(index).span.decode((self, sess)) + crate fn get_span(&self, index: DefIndex, sess: &Session) -> Span { + self.root.per_def.span.get(self, index).unwrap().decode((self, sess)) } crate fn load_proc_macro(&self, id: DefIndex, sess: &Session) -> SyntaxExtension { @@ -543,25 +551,20 @@ impl<'a, 'tcx> CrateMetadata { name, SyntaxExtensionKind::Bang(Box::new(BangProcMacro { client })), Vec::new() ) }; - let edition = if sess.opts.debugging_opts.dual_proc_macros { - self.host_lib.as_ref().unwrap().metadata.get_root().edition - } else { - self.root.edition - }; SyntaxExtension::new( &sess.parse_sess, kind, self.get_span(id, sess), helper_attrs, - edition, + self.root.edition, Symbol::intern(name), - &self.get_attributes(&self.entry(id), sess), + &self.get_item_attrs(id, sess), ) } - pub fn get_trait_def(&self, item_id: DefIndex, sess: &Session) -> ty::TraitDef { - match self.entry(item_id).kind { + crate fn get_trait_def(&self, item_id: DefIndex, sess: &Session) -> ty::TraitDef { + match self.kind(item_id) { EntryKind::Trait(data) => { let data = data.decode((self, sess)); ty::TraitDef::new(self.local_def_id(item_id), @@ -571,7 +574,7 @@ impl<'a, 'tcx> CrateMetadata { data.is_marker, self.def_path_table.def_path_hash(item_id)) }, - EntryKind::TraitAlias(_) => { + EntryKind::TraitAlias => { ty::TraitDef::new(self.local_def_id(item_id), hir::Unsafety::Normal, false, @@ -586,18 +589,24 @@ impl<'a, 'tcx> CrateMetadata { fn get_variant( &self, tcx: TyCtxt<'tcx>, - item: &Entry<'_>, + kind: &EntryKind<'_>, index: DefIndex, parent_did: DefId, - adt_kind: ty::AdtKind, ) -> ty::VariantDef { - let data = match item.kind { + let data = match kind { EntryKind::Variant(data) | EntryKind::Struct(data, _) | EntryKind::Union(data, _) => data.decode(self), _ => bug!(), }; + let adt_kind = match kind { + EntryKind::Variant(_) => ty::AdtKind::Enum, + EntryKind::Struct(..) => ty::AdtKind::Struct, + EntryKind::Union(..) => ty::AdtKind::Union, + _ => bug!(), + }; + let variant_did = if adt_kind == ty::AdtKind::Enum { Some(self.local_def_id(index)) } else { @@ -611,14 +620,12 @@ impl<'a, 'tcx> CrateMetadata { variant_did, ctor_did, data.discr, - item.children.decode(self).map(|index| { - let f = self.entry(index); - ty::FieldDef { + self.root.per_def.children.get(self, index).unwrap_or(Lazy::empty()) + .decode(self).map(|index| ty::FieldDef { did: self.local_def_id(index), ident: Ident::with_dummy_span(self.item_name(index)), - vis: f.visibility.decode(self) - } - }).collect(), + vis: self.get_visibility(index), + }).collect(), data.ctor_kind, adt_kind, parent_did, @@ -626,122 +633,115 @@ impl<'a, 'tcx> CrateMetadata { ) } - pub fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> &'tcx ty::AdtDef { - let item = self.entry(item_id); + crate fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> &'tcx ty::AdtDef { + let kind = self.kind(item_id); let did = self.local_def_id(item_id); - let (kind, repr) = match item.kind { + let (adt_kind, repr) = match kind { EntryKind::Enum(repr) => (ty::AdtKind::Enum, repr), EntryKind::Struct(_, repr) => (ty::AdtKind::Struct, repr), EntryKind::Union(_, repr) => (ty::AdtKind::Union, repr), _ => bug!("get_adt_def called on a non-ADT {:?}", did), }; - let variants = if let ty::AdtKind::Enum = kind { - item.children + let variants = if let ty::AdtKind::Enum = adt_kind { + self.root.per_def.children.get(self, item_id).unwrap_or(Lazy::empty()) .decode(self) .map(|index| { - self.get_variant(tcx, &self.entry(index), index, did, kind) + self.get_variant(tcx, &self.kind(index), index, did) }) .collect() } else { - std::iter::once(self.get_variant(tcx, &item, item_id, did, kind)).collect() + std::iter::once(self.get_variant(tcx, &kind, item_id, did)).collect() }; - tcx.alloc_adt_def(did, kind, variants, repr) + tcx.alloc_adt_def(did, adt_kind, variants, repr) } - pub fn get_predicates( + crate fn get_predicates( &self, item_id: DefIndex, tcx: TyCtxt<'tcx>, ) -> ty::GenericPredicates<'tcx> { - self.entry(item_id).predicates.unwrap().decode((self, tcx)) -} + self.root.per_def.predicates.get(self, item_id).unwrap().decode((self, tcx)) + } - pub fn get_predicates_defined_on( + crate fn get_predicates_defined_on( &self, item_id: DefIndex, tcx: TyCtxt<'tcx>, ) -> ty::GenericPredicates<'tcx> { - self.entry(item_id).predicates_defined_on.unwrap().decode((self, tcx)) + self.root.per_def.predicates_defined_on.get(self, item_id).unwrap().decode((self, tcx)) } - pub fn get_super_predicates( + crate fn get_super_predicates( &self, item_id: DefIndex, tcx: TyCtxt<'tcx>, ) -> ty::GenericPredicates<'tcx> { - let super_predicates = match self.entry(item_id).kind { - EntryKind::Trait(data) => data.decode(self).super_predicates, - EntryKind::TraitAlias(data) => data.decode(self).super_predicates, - _ => bug!("def-index does not refer to trait or trait alias"), - }; - - super_predicates.decode((self, tcx)) + self.root.per_def.super_predicates.get(self, item_id).unwrap().decode((self, tcx)) } - pub fn get_generics(&self, - item_id: DefIndex, - sess: &Session) - -> ty::Generics { - self.entry(item_id).generics.unwrap().decode((self, sess)) + crate fn get_generics(&self, item_id: DefIndex, sess: &Session) -> ty::Generics { + self.root.per_def.generics.get(self, item_id).unwrap().decode((self, sess)) } - pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { - self.entry(id).ty.unwrap().decode((self, tcx)) + crate fn get_type(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { + self.root.per_def.ty.get(self, id).unwrap().decode((self, tcx)) } - pub fn get_stability(&self, id: DefIndex) -> Option { + crate fn get_stability(&self, id: DefIndex) -> Option { match self.is_proc_macro(id) { true => self.root.proc_macro_stability.clone(), - false => self.entry(id).stability.map(|stab| stab.decode(self)), + false => self.root.per_def.stability.get(self, id).map(|stab| stab.decode(self)), } } - pub fn get_deprecation(&self, id: DefIndex) -> Option { - self.entry_unless_proc_macro(id) - .and_then(|entry| entry.deprecation.map(|depr| depr.decode(self))) + crate fn get_deprecation(&self, id: DefIndex) -> Option { + self.root.per_def.deprecation.get(self, id) + .filter(|_| !self.is_proc_macro(id)) + .map(|depr| depr.decode(self)) } - pub fn get_visibility(&self, id: DefIndex) -> ty::Visibility { + crate fn get_visibility(&self, id: DefIndex) -> ty::Visibility { match self.is_proc_macro(id) { true => ty::Visibility::Public, - false => self.entry(id).visibility.decode(self), + false => self.root.per_def.visibility.get(self, id).unwrap().decode(self), } } - fn get_impl_data(&self, id: DefIndex) -> ImplData<'tcx> { - match self.entry(id).kind { + fn get_impl_data(&self, id: DefIndex) -> ImplData { + match self.kind(id) { EntryKind::Impl(data) => data.decode(self), _ => bug!(), } } - pub fn get_parent_impl(&self, id: DefIndex) -> Option { + crate fn get_parent_impl(&self, id: DefIndex) -> Option { self.get_impl_data(id).parent_impl } - pub fn get_impl_polarity(&self, id: DefIndex) -> hir::ImplPolarity { + crate fn get_impl_polarity(&self, id: DefIndex) -> ty::ImplPolarity { self.get_impl_data(id).polarity } - pub fn get_impl_defaultness(&self, id: DefIndex) -> hir::Defaultness { + crate fn get_impl_defaultness(&self, id: DefIndex) -> hir::Defaultness { self.get_impl_data(id).defaultness } - pub fn get_coerce_unsized_info(&self, - id: DefIndex) - -> Option { + crate fn get_coerce_unsized_info( + &self, + id: DefIndex, + ) -> Option { self.get_impl_data(id).coerce_unsized_info } - pub fn get_impl_trait(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Option> { - self.get_impl_data(id).trait_ref.map(|tr| tr.decode((self, tcx))) + crate fn get_impl_trait(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Option> { + self.root.per_def.impl_trait_ref.get(self, id).map(|tr| tr.decode((self, tcx))) } /// Iterates over all the stability attributes in the given crate. - pub fn get_lib_features(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(ast::Name, Option)] { + crate fn get_lib_features(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(ast::Name, Option)] { // FIXME: For a proc macro crate, not sure whether we should return the "host" // features or an empty Vec. Both don't cause ICEs. tcx.arena.alloc_from_iter(self.root @@ -750,7 +750,7 @@ impl<'a, 'tcx> CrateMetadata { } /// Iterates over the language items in the given crate. - pub fn get_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] { + crate fn get_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] { if self.is_proc_macro_crate() { // Proc macro crates do not export any lang-items to the target. &[] @@ -763,7 +763,7 @@ impl<'a, 'tcx> CrateMetadata { } /// Iterates over the diagnostic items in the given crate. - pub fn get_diagnostic_items( + crate fn get_diagnostic_items( &self, tcx: TyCtxt<'tcx>, ) -> &'tcx FxHashMap { @@ -780,7 +780,7 @@ impl<'a, 'tcx> CrateMetadata { } /// Iterates over each child of the given item. - pub fn each_child_of_item(&self, id: DefIndex, mut callback: F, sess: &Session) + crate fn each_child_of_item(&self, id: DefIndex, mut callback: F, sess: &Session) where F: FnMut(def::Export) { if let Some(proc_macros_ids) = self.root.proc_macro_data.map(|d| d.decode(self)) { @@ -807,38 +807,42 @@ impl<'a, 'tcx> CrateMetadata { } // Find the item. - let item = match self.maybe_entry(id) { + let kind = match self.maybe_kind(id) { None => return, - Some(item) => item.decode((self, sess)), + Some(kind) => kind, }; // Iterate over all children. let macros_only = self.dep_kind.lock().macros_only(); - for child_index in item.children.decode((self, sess)) { + let children = self.root.per_def.children.get(self, id).unwrap_or(Lazy::empty()); + for child_index in children.decode((self, sess)) { if macros_only { continue } // Get the item. - if let Some(child) = self.maybe_entry(child_index) { - let child = child.decode((self, sess)); - match child.kind { + if let Some(child_kind) = self.maybe_kind(child_index) { + match child_kind { EntryKind::MacroDef(..) => {} _ if macros_only => continue, _ => {} } // Hand off the item to the callback. - match child.kind { + match child_kind { // FIXME(eddyb) Don't encode these in children. EntryKind::ForeignMod => { - for child_index in child.children.decode((self, sess)) { + let child_children = + self.root.per_def.children.get(self, child_index) + .unwrap_or(Lazy::empty()); + for child_index in child_children.decode((self, sess)) { if let Some(kind) = self.def_kind(child_index) { callback(def::Export { res: Res::Def(kind, self.local_def_id(child_index)), ident: Ident::with_dummy_span(self.item_name(child_index)), vis: self.get_visibility(child_index), - span: self.entry(child_index).span.decode((self, sess)), + span: self.root.per_def.span.get(self, child_index).unwrap() + .decode((self, sess)), }); } } @@ -850,10 +854,10 @@ impl<'a, 'tcx> CrateMetadata { } let def_key = self.def_key(child_index); - let span = child.span.decode((self, sess)); + let span = self.get_span(child_index, sess); if let (Some(kind), Some(name)) = (self.def_kind(child_index), def_key.disambiguated_data.data.get_opt_name()) { - let ident = Ident::from_interned_str(name); + let ident = Ident::with_dummy_span(name); let vis = self.get_visibility(child_index); let def_id = self.local_def_id(child_index); let res = Res::Def(kind, def_id); @@ -903,7 +907,7 @@ impl<'a, 'tcx> CrateMetadata { } } - if let EntryKind::Mod(data) = item.kind { + if let EntryKind::Mod(data) = kind { for exp in data.decode((self, sess)).reexports.decode((self, sess)) { match exp.res { Res::Def(DefKind::Macro(..), _) => {} @@ -915,41 +919,35 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn const_is_rvalue_promotable_to_static(&self, id: DefIndex) -> bool { - match self.entry(id).kind { - EntryKind::AssocConst(_, data, _) | - EntryKind::Const(data, _) => data.ast_promotable, - _ => bug!(), - } - } - - pub fn is_item_mir_available(&self, id: DefIndex) -> bool { + crate fn is_item_mir_available(&self, id: DefIndex) -> bool { !self.is_proc_macro(id) && - self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some() + self.root.per_def.mir.get(self, id).is_some() } - pub fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> { - self.entry_unless_proc_macro(id) - .and_then(|entry| entry.mir.map(|mir| mir.decode((self, tcx)))) + crate fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> { + self.root.per_def.mir.get(self, id) + .filter(|_| !self.is_proc_macro(id)) .unwrap_or_else(|| { - bug!("get_optimized_mir: missing MIR for `{:?}", self.local_def_id(id)) + bug!("get_optimized_mir: missing MIR for `{:?}`", self.local_def_id(id)) }) + .decode((self, tcx)) } - pub fn get_promoted_mir( + crate fn get_promoted_mir( &self, tcx: TyCtxt<'tcx>, id: DefIndex, ) -> IndexVec> { - self.entry_unless_proc_macro(id) - .and_then(|entry| entry.promoted_mir.map(|promoted| promoted.decode((self, tcx)))) + self.root.per_def.promoted_mir.get(self, id) + .filter(|_| !self.is_proc_macro(id)) .unwrap_or_else(|| { bug!("get_promoted_mir: missing MIR for `{:?}`", self.local_def_id(id)) }) + .decode((self, tcx)) } - pub fn mir_const_qualif(&self, id: DefIndex) -> u8 { - match self.entry(id).kind { + crate fn mir_const_qualif(&self, id: DefIndex) -> u8 { + match self.kind(id) { EntryKind::Const(qualif, _) | EntryKind::AssocConst(AssocContainer::ImplDefault, qualif, _) | EntryKind::AssocConst(AssocContainer::ImplFinal, qualif, _) => { @@ -959,13 +957,12 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_associated_item(&self, id: DefIndex) -> ty::AssocItem { - let item = self.entry(id); + crate fn get_associated_item(&self, id: DefIndex) -> ty::AssocItem { let def_key = self.def_key(id); let parent = self.local_def_id(def_key.parent.unwrap()); let name = def_key.disambiguated_data.data.get_opt_name().unwrap(); - let (kind, container, has_self) = match item.kind { + let (kind, container, has_self) = match self.kind(id) { EntryKind::AssocConst(container, _, _) => { (ty::AssocKind::Const, container, false) } @@ -983,9 +980,9 @@ impl<'a, 'tcx> CrateMetadata { }; ty::AssocItem { - ident: Ident::from_interned_str(name), + ident: Ident::with_dummy_span(name), kind, - vis: item.visibility.decode(self), + vis: self.get_visibility(id), defaultness: container.defaultness(), def_id: self.local_def_id(id), container: container.with_def_id(parent), @@ -993,12 +990,13 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_item_variances(&self, id: DefIndex) -> Vec { - self.entry(id).variances.decode(self).collect() + crate fn get_item_variances(&self, id: DefIndex) -> Vec { + self.root.per_def.variances.get(self, id).unwrap_or(Lazy::empty()) + .decode(self).collect() } - pub fn get_ctor_kind(&self, node_id: DefIndex) -> CtorKind { - match self.entry(node_id).kind { + crate fn get_ctor_kind(&self, node_id: DefIndex) -> CtorKind { + match self.kind(node_id) { EntryKind::Struct(data, _) | EntryKind::Union(data, _) | EntryKind::Variant(data) => data.decode(self).ctor_kind, @@ -1006,8 +1004,8 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_ctor_def_id(&self, node_id: DefIndex) -> Option { - match self.entry(node_id).kind { + crate fn get_ctor_def_id(&self, node_id: DefIndex) -> Option { + match self.kind(node_id) { EntryKind::Struct(data, _) => { data.decode(self).ctor.map(|index| self.local_def_id(index)) } @@ -1018,8 +1016,7 @@ impl<'a, 'tcx> CrateMetadata { } } - - pub fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Lrc<[ast::Attribute]> { + crate fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Lrc<[ast::Attribute]> { // The attributes for a tuple struct/variant are attached to the definition, not the ctor; // we assume that someone passing in a tuple struct ctor is actually wanting to // look at the definition @@ -1030,22 +1027,22 @@ impl<'a, 'tcx> CrateMetadata { node_id }; - let item = self.entry(item_id); - Lrc::from(self.get_attributes(&item, sess)) + Lrc::from(self.root.per_def.attributes.get(self, item_id).unwrap_or(Lazy::empty()) + .decode((self, sess)) + .collect::>()) } - pub fn get_struct_field_names(&self, id: DefIndex) -> Vec { - self.entry(id) - .children + crate fn get_struct_field_names( + &self, + id: DefIndex, + sess: &Session, + ) -> Vec> { + self.root.per_def.children.get(self, id).unwrap_or(Lazy::empty()) .decode(self) - .map(|index| self.item_name(index)) + .map(|index| respan(self.get_span(index, sess), self.item_name(index))) .collect() } - fn get_attributes(&self, item: &Entry<'tcx>, sess: &Session) -> Vec { - item.attributes.decode((self, sess)).collect() - } - // Translate a DefId from the current compilation environment to a DefId // for an external crate. fn reverse_translate_def_id(&self, did: DefId) -> Option { @@ -1061,18 +1058,19 @@ impl<'a, 'tcx> CrateMetadata { None } - pub fn get_inherent_implementations_for_type( + crate fn get_inherent_implementations_for_type( &self, tcx: TyCtxt<'tcx>, id: DefIndex, ) -> &'tcx [DefId] { - tcx.arena.alloc_from_iter(self.entry(id) - .inherent_impls - .decode(self) - .map(|index| self.local_def_id(index))) + tcx.arena.alloc_from_iter( + self.root.per_def.inherent_impls.get(self, id).unwrap_or(Lazy::empty()) + .decode(self) + .map(|index| self.local_def_id(index)) + ) } - pub fn get_implementations_for_trait( + crate fn get_implementations_for_trait( &self, tcx: TyCtxt<'tcx>, filter: Option, @@ -1103,7 +1101,7 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_trait_of_item(&self, id: DefIndex) -> Option { + crate fn get_trait_of_item(&self, id: DefIndex) -> Option { let def_key = self.def_key(id); match def_key.disambiguated_data.data { DefPathData::TypeNs(..) | DefPathData::ValueNs(..) => (), @@ -1111,16 +1109,16 @@ impl<'a, 'tcx> CrateMetadata { _ => return None, } def_key.parent.and_then(|parent_index| { - match self.entry(parent_index).kind { + match self.kind(parent_index) { EntryKind::Trait(_) | - EntryKind::TraitAlias(_) => Some(self.local_def_id(parent_index)), + EntryKind::TraitAlias => Some(self.local_def_id(parent_index)), _ => None, } }) } - pub fn get_native_libraries(&self, sess: &Session) -> Vec { + crate fn get_native_libraries(&self, sess: &Session) -> Vec { if self.is_proc_macro_crate() { // Proc macro crates do not have any *target* native libraries. vec![] @@ -1129,7 +1127,7 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_foreign_modules(&self, tcx: TyCtxt<'tcx>) -> &'tcx [ForeignModule] { + crate fn get_foreign_modules(&self, tcx: TyCtxt<'tcx>) -> &'tcx [ForeignModule] { if self.is_proc_macro_crate() { // Proc macro crates do not have any *target* foreign modules. &[] @@ -1138,7 +1136,7 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_dylib_dependency_formats( + crate fn get_dylib_dependency_formats( &self, tcx: TyCtxt<'tcx>, ) -> &'tcx [(CrateNum, LinkagePreference)] { @@ -1152,7 +1150,7 @@ impl<'a, 'tcx> CrateMetadata { })) } - pub fn get_missing_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] { + crate fn get_missing_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] { if self.is_proc_macro_crate() { // Proc macro crates do not depend on any target weak lang-items. &[] @@ -1163,8 +1161,8 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_fn_param_names(&self, id: DefIndex) -> Vec { - let param_names = match self.entry(id).kind { + crate fn get_fn_param_names(&self, id: DefIndex) -> Vec { + let param_names = match self.kind(id) { EntryKind::Fn(data) | EntryKind::ForeignFn(data) => data.decode(self).param_names, EntryKind::Method(data) => data.decode(self).fn_data.param_names, @@ -1173,7 +1171,7 @@ impl<'a, 'tcx> CrateMetadata { param_names.decode(self).collect() } - pub fn exported_symbols( + crate fn exported_symbols( &self, tcx: TyCtxt<'tcx>, ) -> Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)> { @@ -1186,24 +1184,23 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn get_rendered_const(&self, id: DefIndex) -> String { - match self.entry(id).kind { + crate fn get_rendered_const(&self, id: DefIndex) -> String { + match self.kind(id) { EntryKind::Const(_, data) | EntryKind::AssocConst(_, _, data) => data.decode(self).0, _ => bug!(), } } - pub fn get_macro(&self, id: DefIndex) -> MacroDef { - let entry = self.entry(id); - match entry.kind { + crate fn get_macro(&self, id: DefIndex) -> MacroDef { + match self.kind(id) { EntryKind::MacroDef(macro_def) => macro_def.decode(self), _ => bug!(), } } crate fn is_const_fn_raw(&self, id: DefIndex) -> bool { - let constness = match self.entry(id).kind { + let constness = match self.kind(id) { EntryKind::Method(data) => data.decode(self).fn_data.constness, EntryKind::Fn(data) => data.decode(self).constness, EntryKind::Variant(..) | EntryKind::Struct(..) => hir::Constness::Const, @@ -1212,17 +1209,17 @@ impl<'a, 'tcx> CrateMetadata { constness == hir::Constness::Const } - pub fn asyncness(&self, id: DefIndex) -> hir::IsAsync { - match self.entry(id).kind { + crate fn asyncness(&self, id: DefIndex) -> hir::IsAsync { + match self.kind(id) { EntryKind::Fn(data) => data.decode(self).asyncness, EntryKind::Method(data) => data.decode(self).fn_data.asyncness, EntryKind::ForeignFn(data) => data.decode(self).asyncness, - _ => bug!("asyncness: expect functions entry."), + _ => bug!("asyncness: expected function kind"), } } - pub fn is_foreign_item(&self, id: DefIndex) -> bool { - match self.entry(id).kind { + crate fn is_foreign_item(&self, id: DefIndex) -> bool { + match self.kind(id) { EntryKind::ForeignImmStatic | EntryKind::ForeignMutStatic | EntryKind::ForeignFn(_) => true, @@ -1231,7 +1228,7 @@ impl<'a, 'tcx> CrateMetadata { } crate fn static_mutability(&self, id: DefIndex) -> Option { - match self.entry(id).kind { + match self.kind(id) { EntryKind::ImmStatic | EntryKind::ForeignImmStatic => Some(hir::MutImmutable), EntryKind::MutStatic | @@ -1240,37 +1237,28 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn fn_sig(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> { - let sig = match self.entry(id).kind { - EntryKind::Fn(data) | - EntryKind::ForeignFn(data) => data.decode(self).sig, - EntryKind::Method(data) => data.decode(self).fn_data.sig, - EntryKind::Variant(data) | - EntryKind::Struct(data, _) => data.decode(self).ctor_sig.unwrap(), - EntryKind::Closure(data) => data.decode(self).sig, - _ => bug!(), - }; - sig.decode((self, tcx)) + crate fn fn_sig(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> { + self.root.per_def.fn_sig.get(self, id).unwrap().decode((self, tcx)) } #[inline] - pub fn def_key(&self, index: DefIndex) -> DefKey { + crate fn def_key(&self, index: DefIndex) -> DefKey { let mut key = self.def_path_table.def_key(index); if self.is_proc_macro(index) { let name = self.raw_proc_macro(index).name(); - key.disambiguated_data.data = DefPathData::MacroNs(InternedString::intern(name)); + key.disambiguated_data.data = DefPathData::MacroNs(Symbol::intern(name)); } key } // Returns the path leading to the thing with this `id`. - pub fn def_path(&self, id: DefIndex) -> DefPath { + crate fn def_path(&self, id: DefIndex) -> DefPath { debug!("def_path(cnum={:?}, id={:?})", self.cnum, id); DefPath::make(self.cnum, id, |parent| self.def_key(parent)) } #[inline] - pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash { + crate fn def_path_hash(&self, index: DefIndex) -> DefPathHash { self.def_path_table.def_path_hash(index) } @@ -1299,84 +1287,95 @@ impl<'a, 'tcx> CrateMetadata { /// /// Proc macro crates don't currently export spans, so this function does not have /// to work for them. - pub fn imported_source_files(&'a self, - local_source_map: &source_map::SourceMap) - -> ReadGuard<'a, Vec> { - { - let source_files = self.source_map_import_info.borrow(); - if !source_files.is_empty() { - return source_files; - } + fn imported_source_files( + &'a self, + local_source_map: &source_map::SourceMap, + ) -> &[cstore::ImportedSourceFile] { + self.source_map_import_info.init_locking(|| { + let external_source_map = self.root.source_map.decode(self); + + external_source_map.map(|source_file_to_import| { + // We can't reuse an existing SourceFile, so allocate a new one + // containing the information we need. + let syntax_pos::SourceFile { name, + name_was_remapped, + src_hash, + start_pos, + end_pos, + mut lines, + mut multibyte_chars, + mut non_narrow_chars, + mut normalized_pos, + name_hash, + .. } = source_file_to_import; + + let source_length = (end_pos - start_pos).to_usize(); + + // Translate line-start positions and multibyte character + // position into frame of reference local to file. + // `SourceMap::new_imported_source_file()` will then translate those + // coordinates to their new global frame of reference when the + // offset of the SourceFile is known. + for pos in &mut lines { + *pos = *pos - start_pos; + } + for mbc in &mut multibyte_chars { + mbc.pos = mbc.pos - start_pos; + } + for swc in &mut non_narrow_chars { + *swc = *swc - start_pos; + } + for np in &mut normalized_pos { + np.pos = np.pos - start_pos; + } + + let local_version = local_source_map.new_imported_source_file(name, + name_was_remapped, + self.cnum.as_u32(), + src_hash, + name_hash, + source_length, + lines, + multibyte_chars, + non_narrow_chars, + normalized_pos); + debug!("CrateMetaData::imported_source_files alloc \ + source_file {:?} original (start_pos {:?} end_pos {:?}) \ + translated (start_pos {:?} end_pos {:?})", + local_version.name, start_pos, end_pos, + local_version.start_pos, local_version.end_pos); + + cstore::ImportedSourceFile { + original_start_pos: start_pos, + original_end_pos: end_pos, + translated_source_file: local_version, + } + }).collect() + }) + } + + /// Get the `DepNodeIndex` corresponding this crate. The result of this + /// method is cached in the `dep_node_index` field. + pub(super) fn get_crate_dep_node_index(&self, tcx: TyCtxt<'tcx>) -> DepNodeIndex { + let mut dep_node_index = self.dep_node_index.load(); + + if unlikely!(dep_node_index == DepNodeIndex::INVALID) { + // We have not cached the DepNodeIndex for this upstream crate yet, + // so use the dep-graph to find it out and cache it. + // Note that multiple threads can enter this block concurrently. + // That is fine because the DepNodeIndex remains constant + // throughout the whole compilation session, and multiple stores + // would always write the same value. + + let def_path_hash = self.def_path_hash(CRATE_DEF_INDEX); + let dep_node = def_path_hash.to_dep_node(DepKind::CrateMetadata); + + dep_node_index = tcx.dep_graph.dep_node_index_of(&dep_node); + assert!(dep_node_index != DepNodeIndex::INVALID); + self.dep_node_index.store(dep_node_index); } - // Lock the source_map_import_info to ensure this only happens once - let mut source_map_import_info = self.source_map_import_info.borrow_mut(); - - if !source_map_import_info.is_empty() { - drop(source_map_import_info); - return self.source_map_import_info.borrow(); - } - - let external_source_map = self.root.source_map.decode(self); - - let imported_source_files = external_source_map.map(|source_file_to_import| { - // We can't reuse an existing SourceFile, so allocate a new one - // containing the information we need. - let syntax_pos::SourceFile { name, - name_was_remapped, - src_hash, - start_pos, - end_pos, - mut lines, - mut multibyte_chars, - mut non_narrow_chars, - name_hash, - .. } = source_file_to_import; - - let source_length = (end_pos - start_pos).to_usize(); - - // Translate line-start positions and multibyte character - // position into frame of reference local to file. - // `SourceMap::new_imported_source_file()` will then translate those - // coordinates to their new global frame of reference when the - // offset of the SourceFile is known. - for pos in &mut lines { - *pos = *pos - start_pos; - } - for mbc in &mut multibyte_chars { - mbc.pos = mbc.pos - start_pos; - } - for swc in &mut non_narrow_chars { - *swc = *swc - start_pos; - } - - let local_version = local_source_map.new_imported_source_file(name, - name_was_remapped, - self.cnum.as_u32(), - src_hash, - name_hash, - source_length, - lines, - multibyte_chars, - non_narrow_chars); - debug!("CrateMetaData::imported_source_files alloc \ - source_file {:?} original (start_pos {:?} end_pos {:?}) \ - translated (start_pos {:?} end_pos {:?})", - local_version.name, start_pos, end_pos, - local_version.start_pos, local_version.end_pos); - - cstore::ImportedSourceFile { - original_start_pos: start_pos, - original_end_pos: end_pos, - translated_source_file: local_version, - } - }).collect(); - - *source_map_import_info = imported_source_files; - drop(source_map_import_info); - - // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. - self.source_map_import_info.borrow() + dep_node_index } } diff --git a/src/librustc_metadata/dependency_format.rs b/src/librustc_metadata/dependency_format.rs new file mode 100644 index 0000000000..7f76a9730e --- /dev/null +++ b/src/librustc_metadata/dependency_format.rs @@ -0,0 +1,370 @@ +//! Resolution of mixing rlibs and dylibs +//! +//! When producing a final artifact, such as a dynamic library, the compiler has +//! a choice between linking an rlib or linking a dylib of all upstream +//! dependencies. The linking phase must guarantee, however, that a library only +//! show up once in the object file. For example, it is illegal for library A to +//! be statically linked to B and C in separate dylibs, and then link B and C +//! into a crate D (because library A appears twice). +//! +//! The job of this module is to calculate what format each upstream crate +//! should be used when linking each output type requested in this session. This +//! generally follows this set of rules: +//! +//! 1. Each library must appear exactly once in the output. +//! 2. Each rlib contains only one library (it's just an object file) +//! 3. Each dylib can contain more than one library (due to static linking), +//! and can also bring in many dynamic dependencies. +//! +//! With these constraints in mind, it's generally a very difficult problem to +//! find a solution that's not "all rlibs" or "all dylibs". I have suspicions +//! that NP-ness may come into the picture here... +//! +//! The current selection algorithm below looks mostly similar to: +//! +//! 1. If static linking is required, then require all upstream dependencies +//! to be available as rlibs. If not, generate an error. +//! 2. If static linking is requested (generating an executable), then +//! attempt to use all upstream dependencies as rlibs. If any are not +//! found, bail out and continue to step 3. +//! 3. Static linking has failed, at least one library must be dynamically +//! linked. Apply a heuristic by greedily maximizing the number of +//! dynamically linked libraries. +//! 4. Each upstream dependency available as a dynamic library is +//! registered. The dependencies all propagate, adding to a map. It is +//! possible for a dylib to add a static library as a dependency, but it +//! is illegal for two dylibs to add the same static library as a +//! dependency. The same dylib can be added twice. Additionally, it is +//! illegal to add a static dependency when it was previously found as a +//! dylib (and vice versa) +//! 5. After all dynamic dependencies have been traversed, re-traverse the +//! remaining dependencies and add them statically (if they haven't been +//! added already). +//! +//! While not perfect, this algorithm should help support use-cases such as leaf +//! dependencies being static while the larger tree of inner dependencies are +//! all dynamic. This isn't currently very well battle tested, so it will likely +//! fall short in some use cases. +//! +//! Currently, there is no way to specify the preference of linkage with a +//! particular library (other than a global dynamic/static switch). +//! Additionally, the algorithm is geared towards finding *any* solution rather +//! than finding a number of solutions (there are normally quite a few). + +use rustc::hir::def_id::CrateNum; +use rustc::middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic}; +use rustc::middle::cstore::{self, DepKind}; +use rustc::middle::dependency_format::{DependencyList, Dependencies, Linkage}; +use rustc::session::config; +use rustc::ty::TyCtxt; +use rustc::util::nodemap::FxHashMap; +use rustc_target::spec::PanicStrategy; + +crate fn calculate(tcx: TyCtxt<'_>) -> Dependencies { + tcx.sess.crate_types.borrow().iter().map(|&ty| { + let linkage = calculate_type(tcx, ty); + verify_ok(tcx, &linkage); + (ty, linkage) + }).collect::>() +} + +fn calculate_type(tcx: TyCtxt<'_>, ty: config::CrateType) -> DependencyList { + let sess = &tcx.sess; + + if !sess.opts.output_types.should_codegen() { + return Vec::new(); + } + + let preferred_linkage = match ty { + // cdylibs must have all static dependencies. + config::CrateType::Cdylib => Linkage::Static, + + // Generating a dylib without `-C prefer-dynamic` means that we're going + // to try to eagerly statically link all dependencies. This is normally + // done for end-product dylibs, not intermediate products. + config::CrateType::Dylib if !sess.opts.cg.prefer_dynamic => Linkage::Static, + config::CrateType::Dylib => Linkage::Dynamic, + + // If the global prefer_dynamic switch is turned off, or the final + // executable will be statically linked, prefer static crate linkage. + config::CrateType::Executable if !sess.opts.cg.prefer_dynamic || + sess.crt_static() => Linkage::Static, + config::CrateType::Executable => Linkage::Dynamic, + + // proc-macro crates are mostly cdylibs, but we also need metadata. + config::CrateType::ProcMacro => Linkage::Static, + + // No linkage happens with rlibs, we just needed the metadata (which we + // got long ago), so don't bother with anything. + config::CrateType::Rlib => Linkage::NotLinked, + + // staticlibs must have all static dependencies. + config::CrateType::Staticlib => Linkage::Static, + }; + + if preferred_linkage == Linkage::NotLinked { + // If the crate is not linked, there are no link-time dependencies. + return Vec::new(); + } + + if preferred_linkage == Linkage::Static { + // Attempt static linkage first. For dylibs and executables, we may be + // able to retry below with dynamic linkage. + if let Some(v) = attempt_static(tcx) { + return v; + } + + // Staticlibs, cdylibs, and static executables must have all static + // dependencies. If any are not found, generate some nice pretty errors. + if ty == config::CrateType::Cdylib || ty == config::CrateType::Staticlib || + (ty == config::CrateType::Executable && sess.crt_static() && + !sess.target.target.options.crt_static_allows_dylibs) { + for &cnum in tcx.crates().iter() { + if tcx.dep_kind(cnum).macros_only() { continue } + let src = tcx.used_crate_source(cnum); + if src.rlib.is_some() { continue } + sess.err(&format!("crate `{}` required to be available in rlib format, \ + but was not found in this form", + tcx.crate_name(cnum))); + } + return Vec::new(); + } + } + + let mut formats = FxHashMap::default(); + + // Sweep all crates for found dylibs. Add all dylibs, as well as their + // dependencies, ensuring there are no conflicts. The only valid case for a + // dependency to be relied upon twice is for both cases to rely on a dylib. + for &cnum in tcx.crates().iter() { + if tcx.dep_kind(cnum).macros_only() { continue } + let name = tcx.crate_name(cnum); + let src = tcx.used_crate_source(cnum); + if src.dylib.is_some() { + log::info!("adding dylib: {}", name); + add_library(tcx, cnum, RequireDynamic, &mut formats); + let deps = tcx.dylib_dependency_formats(cnum); + for &(depnum, style) in deps.iter() { + log::info!("adding {:?}: {}", style, tcx.crate_name(depnum)); + add_library(tcx, depnum, style, &mut formats); + } + } + } + + // Collect what we've got so far in the return vector. + let last_crate = tcx.crates().len(); + let mut ret = (1..last_crate+1).map(|cnum| { + match formats.get(&CrateNum::new(cnum)) { + Some(&RequireDynamic) => Linkage::Dynamic, + Some(&RequireStatic) => Linkage::IncludedFromDylib, + None => Linkage::NotLinked, + } + }).collect::>(); + + // Run through the dependency list again, and add any missing libraries as + // static libraries. + // + // If the crate hasn't been included yet and it's not actually required + // (e.g., it's an allocator) then we skip it here as well. + for &cnum in tcx.crates().iter() { + let src = tcx.used_crate_source(cnum); + if src.dylib.is_none() && + !formats.contains_key(&cnum) && + tcx.dep_kind(cnum) == DepKind::Explicit { + assert!(src.rlib.is_some() || src.rmeta.is_some()); + log::info!("adding staticlib: {}", tcx.crate_name(cnum)); + add_library(tcx, cnum, RequireStatic, &mut formats); + ret[cnum.as_usize() - 1] = Linkage::Static; + } + } + + // We've gotten this far because we're emitting some form of a final + // artifact which means that we may need to inject dependencies of some + // form. + // + // Things like allocators and panic runtimes may not have been activated + // quite yet, so do so here. + activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, + &|cnum| tcx.is_panic_runtime(cnum)); + + // When dylib B links to dylib A, then when using B we must also link to A. + // It could be the case, however, that the rlib for A is present (hence we + // found metadata), but the dylib for A has since been removed. + // + // For situations like this, we perform one last pass over the dependencies, + // making sure that everything is available in the requested format. + for (cnum, kind) in ret.iter().enumerate() { + let cnum = CrateNum::new(cnum + 1); + let src = tcx.used_crate_source(cnum); + match *kind { + Linkage::NotLinked | + Linkage::IncludedFromDylib => {} + Linkage::Static if src.rlib.is_some() => continue, + Linkage::Dynamic if src.dylib.is_some() => continue, + kind => { + let kind = match kind { + Linkage::Static => "rlib", + _ => "dylib", + }; + sess.err(&format!("crate `{}` required to be available in {} format, \ + but was not found in this form", + tcx.crate_name(cnum), kind)); + } + } + } + + ret +} + +fn add_library( + tcx: TyCtxt<'_>, + cnum: CrateNum, + link: LinkagePreference, + m: &mut FxHashMap, +) { + match m.get(&cnum) { + Some(&link2) => { + // If the linkages differ, then we'd have two copies of the library + // if we continued linking. If the linkages are both static, then we + // would also have two copies of the library (static from two + // different locations). + // + // This error is probably a little obscure, but I imagine that it + // can be refined over time. + if link2 != link || link == RequireStatic { + tcx.sess.struct_err(&format!("cannot satisfy dependencies so `{}` only \ + shows up once", tcx.crate_name(cnum))) + .help("having upstream crates all available in one format \ + will likely make this go away") + .emit(); + } + } + None => { m.insert(cnum, link); } + } +} + +fn attempt_static(tcx: TyCtxt<'_>) -> Option { + let sess = &tcx.sess; + let crates = cstore::used_crates(tcx, RequireStatic); + if !crates.iter().by_ref().all(|&(_, ref p)| p.is_some()) { + return None + } + + // All crates are available in an rlib format, so we're just going to link + // everything in explicitly so long as it's actually required. + let last_crate = tcx.crates().len(); + let mut ret = (1..last_crate+1).map(|cnum| { + if tcx.dep_kind(CrateNum::new(cnum)) == DepKind::Explicit { + Linkage::Static + } else { + Linkage::NotLinked + } + }).collect::>(); + + // Our allocator/panic runtime may not have been linked above if it wasn't + // explicitly linked, which is the case for any injected dependency. Handle + // that here and activate them. + activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, + &|cnum| tcx.is_panic_runtime(cnum)); + + Some(ret) +} + +// Given a list of how to link upstream dependencies so far, ensure that an +// injected dependency is activated. This will not do anything if one was +// transitively included already (e.g., via a dylib or explicitly so). +// +// If an injected dependency was not found then we're guaranteed the +// metadata::creader module has injected that dependency (not listed as +// a required dependency) in one of the session's field. If this field is not +// set then this compilation doesn't actually need the dependency and we can +// also skip this step entirely. +fn activate_injected_dep(injected: Option, + list: &mut DependencyList, + replaces_injected: &dyn Fn(CrateNum) -> bool) { + for (i, slot) in list.iter().enumerate() { + let cnum = CrateNum::new(i + 1); + if !replaces_injected(cnum) { + continue + } + if *slot != Linkage::NotLinked { + return + } + } + if let Some(injected) = injected { + let idx = injected.as_usize() - 1; + assert_eq!(list[idx], Linkage::NotLinked); + list[idx] = Linkage::Static; + } +} + +// After the linkage for a crate has been determined we need to verify that +// there's only going to be one allocator in the output. +fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) { + let sess = &tcx.sess; + if list.len() == 0 { + return + } + let mut panic_runtime = None; + for (i, linkage) in list.iter().enumerate() { + if let Linkage::NotLinked = *linkage { + continue + } + let cnum = CrateNum::new(i + 1); + + if tcx.is_panic_runtime(cnum) { + if let Some((prev, _)) = panic_runtime { + let prev_name = tcx.crate_name(prev); + let cur_name = tcx.crate_name(cnum); + sess.err(&format!("cannot link together two \ + panic runtimes: {} and {}", + prev_name, cur_name)); + } + panic_runtime = Some((cnum, tcx.panic_strategy(cnum))); + } + } + + // If we found a panic runtime, then we know by this point that it's the + // only one, but we perform validation here that all the panic strategy + // compilation modes for the whole DAG are valid. + if let Some((cnum, found_strategy)) = panic_runtime { + let desired_strategy = sess.panic_strategy(); + + // First up, validate that our selected panic runtime is indeed exactly + // our same strategy. + if found_strategy != desired_strategy { + sess.err(&format!("the linked panic runtime `{}` is \ + not compiled with this crate's \ + panic strategy `{}`", + tcx.crate_name(cnum), + desired_strategy.desc())); + } + + // Next up, verify that all other crates are compatible with this panic + // strategy. If the dep isn't linked, we ignore it, and if our strategy + // is abort then it's compatible with everything. Otherwise all crates' + // panic strategy must match our own. + for (i, linkage) in list.iter().enumerate() { + if let Linkage::NotLinked = *linkage { + continue + } + if desired_strategy == PanicStrategy::Abort { + continue + } + let cnum = CrateNum::new(i + 1); + let found_strategy = tcx.panic_strategy(cnum); + let is_compiler_builtins = tcx.is_compiler_builtins(cnum); + if is_compiler_builtins || desired_strategy == found_strategy { + continue + } + + sess.err(&format!("the crate `{}` is compiled with the \ + panic strategy `{}` which is \ + incompatible with this crate's \ + strategy of `{}`", + tcx.crate_name(cnum), + found_strategy.desc(), + desired_strategy.desc())); + } + } +} diff --git a/src/librustc_metadata/dynamic_lib.rs b/src/librustc_metadata/dynamic_lib.rs index 4c279361ff..3871eb89f7 100644 --- a/src/librustc_metadata/dynamic_lib.rs +++ b/src/librustc_metadata/dynamic_lib.rs @@ -32,30 +32,6 @@ impl DynamicLibrary { } } - /// Loads a dynamic library into the global namespace (RTLD_GLOBAL on Unix) - /// and do it now (don't use RTLD_LAZY on Unix). - pub fn open_global_now(filename: &Path) -> Result { - let maybe_library = dl::open_global_now(filename.as_os_str()); - match maybe_library { - Err(err) => Err(err), - Ok(handle) => Ok(DynamicLibrary { handle }) - } - } - - /// Returns the environment variable for this process's dynamic library - /// search path - pub fn envvar() -> &'static str { - if cfg!(windows) { - "PATH" - } else if cfg!(target_os = "macos") { - "DYLD_LIBRARY_PATH" - } else if cfg!(target_os = "haiku") { - "LIBRARY_PATH" - } else { - "LD_LIBRARY_PATH" - } - } - /// Accesses the value at the symbol of the dynamic library. pub unsafe fn symbol(&self, symbol: &str) -> Result<*mut T, String> { // This function should have a lifetime constraint of 'a on @@ -83,7 +59,7 @@ mod dl { use std::ptr; use std::str; - pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> { + pub(super) fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> { check_for_errors_in(|| { unsafe { match filename { @@ -94,13 +70,6 @@ mod dl { }) } - pub fn open_global_now(filename: &OsStr) -> Result<*mut u8, String> { - check_for_errors_in(|| unsafe { - let s = CString::new(filename.as_bytes()).unwrap(); - libc::dlopen(s.as_ptr(), libc::RTLD_GLOBAL | libc::RTLD_NOW) as *mut u8 - }) - } - unsafe fn open_external(filename: &OsStr) -> *mut u8 { let s = CString::new(filename.as_bytes()).unwrap(); libc::dlopen(s.as_ptr(), libc::RTLD_LAZY) as *mut u8 @@ -110,8 +79,8 @@ mod dl { libc::dlopen(ptr::null(), libc::RTLD_LAZY) as *mut u8 } - pub fn check_for_errors_in(f: F) -> Result where - F: FnOnce() -> T, + fn check_for_errors_in(f: F) -> Result + where F: FnOnce() -> T, { use std::sync::{Mutex, Once}; static INIT: Once = Once::new(); @@ -139,14 +108,15 @@ mod dl { } } - pub unsafe fn symbol(handle: *mut u8, - symbol: *const libc::c_char) - -> Result<*mut u8, String> { + pub(super) unsafe fn symbol( + handle: *mut u8, + symbol: *const libc::c_char, + ) -> Result<*mut u8, String> { check_for_errors_in(|| { libc::dlsym(handle as *mut libc::c_void, symbol) as *mut u8 }) } - pub unsafe fn close(handle: *mut u8) { + pub(super) unsafe fn close(handle: *mut u8) { libc::dlclose(handle as *mut libc::c_void); () } } @@ -178,11 +148,7 @@ mod dl { fn FreeLibrary(handle: HMODULE) -> BOOL; } - pub fn open_global_now(filename: &OsStr) -> Result<*mut u8, String> { - open(Some(filename)) - } - - pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> { + pub(super) fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> { // disable "dll load failed" error dialog. let prev_error_mode = unsafe { // SEM_FAILCRITICALERRORS 0x01 @@ -225,14 +191,15 @@ mod dl { result } - pub unsafe fn symbol(handle: *mut u8, - symbol: *const c_char) - -> Result<*mut u8, String> { + pub(super) unsafe fn symbol( + handle: *mut u8, + symbol: *const c_char, + ) -> Result<*mut u8, String> { let ptr = GetProcAddress(handle as HMODULE, symbol) as *mut u8; ptr_result(ptr) } - pub unsafe fn close(handle: *mut u8) { + pub(super) unsafe fn close(handle: *mut u8) { FreeLibrary(handle as HMODULE); } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 1e7c0829a2..f2b0cfa530 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -1,21 +1,21 @@ -use crate::index::Index; use crate::schema::*; +use crate::table::{FixedSizeEncoding, PerDefTable}; use rustc::middle::cstore::{LinkagePreference, NativeLibrary, EncodedMetadata, ForeignModule}; use rustc::hir::def::CtorKind; use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId, LocalDefId, LOCAL_CRATE}; -use rustc::hir::GenericParamKind; +use rustc::hir::{GenericParamKind, AnonConst}; use rustc::hir::map::definitions::DefPathTable; use rustc_data_structures::fingerprint::Fingerprint; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc::middle::dependency_format::Linkage; use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel, metadata_symbol_name}; use rustc::middle::lang_items; use rustc::mir::{self, interpret}; use rustc::traits::specialization_graph; -use rustc::ty::{self, Ty, TyCtxt, ReprOptions, SymbolName}; +use rustc::ty::{self, Ty, TyCtxt, SymbolName}; use rustc::ty::codec::{self as ty_codec, TyEncoder}; use rustc::ty::layout::VariantIdx; @@ -23,15 +23,16 @@ use rustc::session::config::{self, CrateType}; use rustc::util::nodemap::FxHashMap; use rustc_data_structures::stable_hasher::StableHasher; +use rustc_data_structures::sync::Lrc; use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque}; use std::hash::Hash; +use std::num::NonZeroUsize; use std::path::Path; -use rustc_data_structures::sync::Lrc; use std::u32; use syntax::ast; use syntax::attr; -use syntax::ext::proc_macro::is_proc_macro_attr; +use syntax::expand::is_proc_macro_attr; use syntax::source_map::Spanned; use syntax::symbol::{kw, sym, Ident, Symbol}; use syntax_pos::{self, FileName, SourceFile, Span}; @@ -42,11 +43,11 @@ use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::intravisit::{Visitor, NestedVisitorMap}; use rustc::hir::intravisit; -pub struct EncodeContext<'tcx> { +struct EncodeContext<'tcx> { opaque: opaque::Encoder, - pub tcx: TyCtxt<'tcx>, + tcx: TyCtxt<'tcx>, - entries_index: Index<'tcx>, + per_def: PerDefTables<'tcx>, lazy_state: LazyState, type_shorthands: FxHashMap, usize>, @@ -59,6 +60,30 @@ pub struct EncodeContext<'tcx> { source_file_cache: Lrc, } +#[derive(Default)] +struct PerDefTables<'tcx> { + kind: PerDefTable>>, + visibility: PerDefTable>, + span: PerDefTable>, + attributes: PerDefTable>, + children: PerDefTable>, + stability: PerDefTable>, + deprecation: PerDefTable>, + + ty: PerDefTable>>, + fn_sig: PerDefTable>>, + impl_trait_ref: PerDefTable>>, + inherent_impls: PerDefTable>, + variances: PerDefTable>, + generics: PerDefTable>, + predicates: PerDefTable>>, + predicates_defined_on: PerDefTable>>, + super_predicates: PerDefTable>>, + + mir: PerDefTable>>, + promoted_mir: PerDefTable>>>, +} + macro_rules! encoder_methods { ($($name:ident($ty:ty);)*) => { $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { @@ -97,13 +122,13 @@ impl<'tcx> Encoder for EncodeContext<'tcx> { } } -impl<'tcx, T> SpecializedEncoder> for EncodeContext<'tcx> { +impl<'tcx, T: Encodable> SpecializedEncoder> for EncodeContext<'tcx> { fn specialized_encode(&mut self, lazy: &Lazy) -> Result<(), Self::Error> { self.emit_lazy_distance(*lazy) } } -impl<'tcx, T> SpecializedEncoder> for EncodeContext<'tcx> { +impl<'tcx, T: Encodable> SpecializedEncoder> for EncodeContext<'tcx> { fn specialized_encode(&mut self, lazy: &Lazy<[T]>) -> Result<(), Self::Error> { self.emit_usize(lazy.meta)?; if lazy.meta == 0 { @@ -113,6 +138,15 @@ impl<'tcx, T> SpecializedEncoder> for EncodeContext<'tcx> { } } +impl<'tcx, T> SpecializedEncoder>> for EncodeContext<'tcx> + where Option: FixedSizeEncoding, +{ + fn specialized_encode(&mut self, lazy: &Lazy>) -> Result<(), Self::Error> { + self.emit_usize(lazy.meta)?; + self.emit_lazy_distance(*lazy) + } +} + impl<'tcx> SpecializedEncoder for EncodeContext<'tcx> { #[inline] fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> { @@ -163,6 +197,13 @@ impl<'tcx> SpecializedEncoder for EncodeContext<'tcx> { return TAG_INVALID_SPAN.encode(self) } + // HACK(eddyb) there's no way to indicate which crate a Span is coming + // from right now, so decoding would fail to find the SourceFile if + // it's not local to the crate the Span is found in. + if self.source_file_cache.is_imported() { + return TAG_INVALID_SPAN.encode(self) + } + TAG_VALID_SPAN.encode(self)?; span.lo.encode(self)?; @@ -212,11 +253,11 @@ impl<'tcx> SpecializedEncoder for EncodeContext<'tcx> { } } -impl<'tcx> SpecializedEncoder> for EncodeContext<'tcx> { +impl<'tcx> SpecializedEncoder<&'tcx [(ty::Predicate<'tcx>, Span)]> for EncodeContext<'tcx> { fn specialized_encode(&mut self, - predicates: &ty::GenericPredicates<'tcx>) + predicates: &&'tcx [(ty::Predicate<'tcx>, Span)]) -> Result<(), Self::Error> { - ty_codec::encode_predicates(self, predicates, |ecx| &mut ecx.predicate_shorthands) + ty_codec::encode_spanned_predicates(self, predicates, |ecx| &mut ecx.predicate_shorthands) } } @@ -257,7 +298,7 @@ impl EncodeContentsForLazy for T { } } -impl EncodeContentsForLazy<[T]> for I +impl EncodeContentsForLazy<[T]> for I where I: IntoIterator, I::Item: EncodeContentsForLazy, { @@ -266,15 +307,28 @@ impl EncodeContentsForLazy<[T]> for I } } +// Shorthand for `$self.$tables.$table.set($key, $self.lazy($value))`, which would +// normally need extra variables to avoid errors about multiple mutable borrows. +macro_rules! record { + ($self:ident.$tables:ident.$table:ident[$key:expr] <- $value:expr) => {{ + { + let value = $value; + let lazy = $self.lazy(value); + $self.$tables.$table.set($key, lazy); + } + }} +} + impl<'tcx> EncodeContext<'tcx> { fn emit_lazy_distance( &mut self, lazy: Lazy, ) -> Result<(), ::Error> { - let min_end = lazy.position + T::min_size(lazy.meta); + let min_end = lazy.position.get() + T::min_size(lazy.meta); let distance = match self.lazy_state { LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"), LazyState::NodeStart(start) => { + let start = start.get(); assert!(min_end <= start); start - min_end } @@ -284,10 +338,10 @@ impl<'tcx> EncodeContext<'tcx> { "make sure that the calls to `lazy*` \ are in the same order as the metadata fields", ); - lazy.position - last_min_end + lazy.position.get() - last_min_end.get() } }; - self.lazy_state = LazyState::Previous(min_end); + self.lazy_state = LazyState::Previous(NonZeroUsize::new(min_end).unwrap()); self.emit_usize(distance) } @@ -295,42 +349,22 @@ impl<'tcx> EncodeContext<'tcx> { &mut self, value: impl EncodeContentsForLazy, ) -> Lazy { - let pos = self.position(); + let pos = NonZeroUsize::new(self.position()).unwrap(); assert_eq!(self.lazy_state, LazyState::NoNode); self.lazy_state = LazyState::NodeStart(pos); let meta = value.encode_contents_for_lazy(self); self.lazy_state = LazyState::NoNode; - assert!(pos + ::min_size(meta) <= self.position()); + assert!(pos.get() + ::min_size(meta) <= self.position()); Lazy::from_position_and_meta(pos, meta) } - /// Emit the data for a `DefId` to the metadata. The function to - /// emit the data is `op`, and it will be given `data` as - /// arguments. This `record` function will call `op` to generate - /// the `Entry` (which may point to other encoded information) - /// and will then record the `Lazy` for use in the index. - // FIXME(eddyb) remove this. - pub fn record(&mut self, - id: DefId, - op: impl FnOnce(&mut Self, DATA) -> Entry<'tcx>, - data: DATA) - { - assert!(id.is_local()); - - let entry = op(self, data); - let entry = self.lazy(entry); - self.entries_index.record(id, entry); - } - fn encode_info_for_items(&mut self) { let krate = self.tcx.hir().krate(); let vis = Spanned { span: syntax_pos::DUMMY_SP, node: hir::VisibilityKind::Public }; - self.record(DefId::local(CRATE_DEF_INDEX), - EncodeContext::encode_info_for_mod, - (hir::CRATE_HIR_ID, &krate.module, &krate.attrs, &vis)); + self.encode_info_for_mod(hir::CRATE_HIR_ID, &krate.module, &krate.attrs, &vis); krate.visit_all_item_likes(&mut self.as_deep_visitor()); for macro_def in &krate.exported_macros { self.visit_macro_def(macro_def); @@ -352,6 +386,7 @@ impl<'tcx> EncodeContext<'tcx> { .filter(|source_file| { // No need to re-export imported source_files, as any downstream // crate will import them from their original source. + // FIXME(eddyb) the `Span` encoding should take that into account. !source_file.is_imported() }) .map(|source_file| { @@ -368,9 +403,9 @@ impl<'tcx> EncodeContext<'tcx> { let mut adapted = (**source_file).clone(); adapted.name = Path::new(&working_dir).join(name).into(); adapted.name_hash = { - let mut hasher: StableHasher = StableHasher::new(); + let mut hasher: StableHasher = StableHasher::new(); adapted.name.hash(&mut hasher); - hasher.finish() + hasher.finish::() }; Lrc::new(adapted) }, @@ -474,8 +509,29 @@ impl<'tcx> EncodeContext<'tcx> { i = self.position(); - let entries_index = self.entries_index.write_index(&mut self.opaque); - let entries_index_bytes = self.position() - i; + let per_def = LazyPerDefTables { + kind: self.per_def.kind.encode(&mut self.opaque), + visibility: self.per_def.visibility.encode(&mut self.opaque), + span: self.per_def.span.encode(&mut self.opaque), + attributes: self.per_def.attributes.encode(&mut self.opaque), + children: self.per_def.children.encode(&mut self.opaque), + stability: self.per_def.stability.encode(&mut self.opaque), + deprecation: self.per_def.deprecation.encode(&mut self.opaque), + + ty: self.per_def.ty.encode(&mut self.opaque), + fn_sig: self.per_def.fn_sig.encode(&mut self.opaque), + impl_trait_ref: self.per_def.impl_trait_ref.encode(&mut self.opaque), + inherent_impls: self.per_def.inherent_impls.encode(&mut self.opaque), + variances: self.per_def.variances.encode(&mut self.opaque), + generics: self.per_def.generics.encode(&mut self.opaque), + predicates: self.per_def.predicates.encode(&mut self.opaque), + predicates_defined_on: self.per_def.predicates_defined_on.encode(&mut self.opaque), + super_predicates: self.per_def.super_predicates.encode(&mut self.opaque), + + mir: self.per_def.mir.encode(&mut self.opaque), + promoted_mir: self.per_def.promoted_mir.encode(&mut self.opaque), + }; + let per_def_bytes = self.position() - i; // Encode the proc macro data i = self.position(); @@ -534,7 +590,7 @@ impl<'tcx> EncodeContext<'tcx> { impls, exported_symbols, interpret_alloc_index, - entries_index, + per_def, }); let total_bytes = self.position(); @@ -559,7 +615,7 @@ impl<'tcx> EncodeContext<'tcx> { println!(" def-path table bytes: {}", def_path_table_bytes); println!(" proc-macro-data-bytes: {}", proc_macro_data_bytes); println!(" item bytes: {}", item_bytes); - println!(" entries index bytes: {}", entries_index_bytes); + println!(" per-def table bytes: {}", per_def_bytes); println!(" zero bytes: {}", zero_bytes); println!(" total bytes: {}", total_bytes); } @@ -569,23 +625,21 @@ impl<'tcx> EncodeContext<'tcx> { } impl EncodeContext<'tcx> { - fn encode_variances_of(&mut self, def_id: DefId) -> Lazy<[ty::Variance]> { + fn encode_variances_of(&mut self, def_id: DefId) { debug!("EncodeContext::encode_variances_of({:?})", def_id); - let tcx = self.tcx; - self.lazy(&tcx.variances_of(def_id)[..]) + record!(self.per_def.variances[def_id] <- &self.tcx.variances_of(def_id)[..]); } - fn encode_item_type(&mut self, def_id: DefId) -> Lazy> { - let tcx = self.tcx; - let ty = tcx.type_of(def_id); - debug!("EncodeContext::encode_item_type({:?}) => {:?}", def_id, ty); - self.lazy(ty) + fn encode_item_type(&mut self, def_id: DefId) { + debug!("EncodeContext::encode_item_type({:?})", def_id); + record!(self.per_def.ty[def_id] <- self.tcx.type_of(def_id)); } fn encode_enum_variant_info( &mut self, - (enum_did, index): (DefId, VariantIdx), - ) -> Entry<'tcx> { + enum_did: DefId, + index: VariantIdx, + ) { let tcx = self.tcx; let def = tcx.adt_def(enum_did); let variant = &def.variants[index]; @@ -595,65 +649,54 @@ impl EncodeContext<'tcx> { let data = VariantData { ctor_kind: variant.ctor_kind, discr: variant.discr, - // FIXME(eddyb) deduplicate these with `encode_enum_variant_ctor`. ctor: variant.ctor_def_id.map(|did| did.index), - ctor_sig: if variant.ctor_kind == CtorKind::Fn { - variant.ctor_def_id.map(|ctor_def_id| self.lazy(&tcx.fn_sig(ctor_def_id))) - } else { - None - }, }; let enum_id = tcx.hir().as_local_hir_id(enum_did).unwrap(); let enum_vis = &tcx.hir().expect_item(enum_id).vis; - Entry { - kind: EntryKind::Variant(self.lazy(data)), - visibility: self.lazy(ty::Visibility::from_hir(enum_vis, enum_id, tcx)), - span: self.lazy(tcx.def_span(def_id)), - attributes: self.encode_attributes(&tcx.get_attrs(def_id)), - children: self.lazy(variant.fields.iter().map(|f| { - assert!(f.did.is_local()); - f.did.index - })), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: if variant.ctor_kind == CtorKind::Fn { - self.encode_variances_of(def_id) - } else { - Lazy::empty() - }, - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: self.encode_optimized_mir(def_id), - promoted_mir: self.encode_promoted_mir(def_id), + record!(self.per_def.kind[def_id] <- EntryKind::Variant(self.lazy(data))); + record!(self.per_def.visibility[def_id] <- + ty::Visibility::from_hir(enum_vis, enum_id, self.tcx)); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + record!(self.per_def.attributes[def_id] <- &self.tcx.get_attrs(def_id)[..]); + record!(self.per_def.children[def_id] <- variant.fields.iter().map(|f| { + assert!(f.did.is_local()); + f.did.index + })); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + self.encode_item_type(def_id); + if variant.ctor_kind == CtorKind::Fn { + // FIXME(eddyb) encode signature only in `encode_enum_variant_ctor`. + if let Some(ctor_def_id) = variant.ctor_def_id { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(ctor_def_id)); + } + // FIXME(eddyb) is this ever used? + self.encode_variances_of(def_id); } + self.encode_generics(def_id); + self.encode_predicates(def_id); + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } fn encode_enum_variant_ctor( &mut self, - (enum_did, index): (DefId, VariantIdx), - ) -> Entry<'tcx> { + enum_did: DefId, + index: VariantIdx, + ) { let tcx = self.tcx; let def = tcx.adt_def(enum_did); let variant = &def.variants[index]; let def_id = variant.ctor_def_id.unwrap(); debug!("EncodeContext::encode_enum_variant_ctor({:?})", def_id); + // FIXME(eddyb) encode only the `CtorKind` for constructors. let data = VariantData { ctor_kind: variant.ctor_kind, discr: variant.discr, ctor: Some(def_id.index), - ctor_sig: if variant.ctor_kind == CtorKind::Fn { - Some(self.lazy(tcx.fn_sig(def_id))) - } else { - None - } }; // Variant constructors have the same visibility as the parent enums, unless marked as @@ -665,35 +708,29 @@ impl EncodeContext<'tcx> { ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); } - Entry { - kind: EntryKind::Variant(self.lazy(data)), - visibility: self.lazy(ctor_vis), - span: self.lazy(tcx.def_span(def_id)), - attributes: Lazy::empty(), - children: Lazy::empty(), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: if variant.ctor_kind == CtorKind::Fn { - self.encode_variances_of(def_id) - } else { - Lazy::empty() - }, - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: self.encode_optimized_mir(def_id), - promoted_mir: self.encode_promoted_mir(def_id), + record!(self.per_def.kind[def_id] <- EntryKind::Variant(self.lazy(data))); + record!(self.per_def.visibility[def_id] <- ctor_vis); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + self.encode_item_type(def_id); + if variant.ctor_kind == CtorKind::Fn { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(def_id)); + self.encode_variances_of(def_id); } + self.encode_generics(def_id); + self.encode_predicates(def_id); + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } fn encode_info_for_mod( &mut self, - (id, md, attrs, vis): (hir::HirId, &hir::Mod, &[ast::Attribute], &hir::Visibility), - ) -> Entry<'tcx> { + id: hir::HirId, + md: &hir::Mod, + attrs: &[ast::Attribute], + vis: &hir::Visibility, + ) { let tcx = self.tcx; let def_id = tcx.hir().local_def_id(id); debug!("EncodeContext::encode_info_for_mod({:?})", def_id); @@ -705,33 +742,23 @@ impl EncodeContext<'tcx> { }, }; - Entry { - kind: EntryKind::Mod(self.lazy(data)), - visibility: self.lazy(ty::Visibility::from_hir(vis, id, tcx)), - span: self.lazy(tcx.def_span(def_id)), - attributes: self.encode_attributes(attrs), - children: self.lazy(md.item_ids.iter().map(|item_id| { - tcx.hir().local_def_id(item_id.id).index - })), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: None, - inherent_impls: Lazy::empty(), - variances: Lazy::empty(), - generics: None, - predicates: None, - predicates_defined_on: None, - - mir: None, - promoted_mir: None, - } + record!(self.per_def.kind[def_id] <- EntryKind::Mod(self.lazy(data))); + record!(self.per_def.visibility[def_id] <- ty::Visibility::from_hir(vis, id, self.tcx)); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + record!(self.per_def.attributes[def_id] <- attrs); + record!(self.per_def.children[def_id] <- md.item_ids.iter().map(|item_id| { + tcx.hir().local_def_id(item_id.id).index + })); + self.encode_stability(def_id); + self.encode_deprecation(def_id); } fn encode_field( &mut self, - (adt_def_id, variant_index, field_index): (DefId, VariantIdx, usize), - ) -> Entry<'tcx> { + adt_def_id: DefId, + variant_index: VariantIdx, + field_index: usize, + ) { let tcx = self.tcx; let variant = &tcx.adt_def(adt_def_id).variants[variant_index]; let field = &variant.fields[field_index]; @@ -742,28 +769,18 @@ impl EncodeContext<'tcx> { let variant_id = tcx.hir().as_local_hir_id(variant.def_id).unwrap(); let variant_data = tcx.hir().expect_variant_data(variant_id); - Entry { - kind: EntryKind::Field, - visibility: self.lazy(field.vis), - span: self.lazy(tcx.def_span(def_id)), - attributes: self.encode_attributes(&variant_data.fields()[field_index].attrs), - children: Lazy::empty(), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: Lazy::empty(), - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: None, - promoted_mir: None, - } + record!(self.per_def.kind[def_id] <- EntryKind::Field); + record!(self.per_def.visibility[def_id] <- field.vis); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + record!(self.per_def.attributes[def_id] <- &variant_data.fields()[field_index].attrs); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + self.encode_item_type(def_id); + self.encode_generics(def_id); + self.encode_predicates(def_id); } - fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> { + fn encode_struct_ctor(&mut self, adt_def_id: DefId, def_id: DefId) { debug!("EncodeContext::encode_struct_ctor({:?})", def_id); let tcx = self.tcx; let adt_def = tcx.adt_def(adt_def_id); @@ -773,11 +790,6 @@ impl EncodeContext<'tcx> { ctor_kind: variant.ctor_kind, discr: variant.discr, ctor: Some(def_id.index), - ctor_sig: if variant.ctor_kind == CtorKind::Fn { - Some(self.lazy(tcx.fn_sig(def_id))) - } else { - None - } }; let struct_id = tcx.hir().as_local_hir_id(adt_def_id).unwrap(); @@ -797,52 +809,44 @@ impl EncodeContext<'tcx> { ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); } - let repr_options = get_repr_options(tcx, adt_def_id); - - Entry { - kind: EntryKind::Struct(self.lazy(data), repr_options), - visibility: self.lazy(ctor_vis), - span: self.lazy(tcx.def_span(def_id)), - attributes: Lazy::empty(), - children: Lazy::empty(), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: if variant.ctor_kind == CtorKind::Fn { - self.encode_variances_of(def_id) - } else { - Lazy::empty() - }, - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: self.encode_optimized_mir(def_id), - promoted_mir: self.encode_promoted_mir(def_id), + record!(self.per_def.kind[def_id] <- EntryKind::Struct(self.lazy(data), adt_def.repr)); + record!(self.per_def.visibility[def_id] <- ctor_vis); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + self.encode_item_type(def_id); + if variant.ctor_kind == CtorKind::Fn { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(def_id)); + self.encode_variances_of(def_id); } + self.encode_generics(def_id); + self.encode_predicates(def_id); + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } - fn encode_generics(&mut self, def_id: DefId) -> Lazy { + fn encode_generics(&mut self, def_id: DefId) { debug!("EncodeContext::encode_generics({:?})", def_id); - let tcx = self.tcx; - self.lazy(tcx.generics_of(def_id)) + record!(self.per_def.generics[def_id] <- self.tcx.generics_of(def_id)); } - fn encode_predicates(&mut self, def_id: DefId) -> Lazy> { + fn encode_predicates(&mut self, def_id: DefId) { debug!("EncodeContext::encode_predicates({:?})", def_id); - let tcx = self.tcx; - self.lazy(&*tcx.predicates_of(def_id)) + record!(self.per_def.predicates[def_id] <- self.tcx.predicates_of(def_id)); } - fn encode_predicates_defined_on(&mut self, def_id: DefId) -> Lazy> { + fn encode_predicates_defined_on(&mut self, def_id: DefId) { debug!("EncodeContext::encode_predicates_defined_on({:?})", def_id); - let tcx = self.tcx; - self.lazy(&*tcx.predicates_defined_on(def_id)) + record!(self.per_def.predicates_defined_on[def_id] <- + self.tcx.predicates_defined_on(def_id)) } - fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> { + fn encode_super_predicates(&mut self, def_id: DefId) { + debug!("EncodeContext::encode_super_predicates({:?})", def_id); + record!(self.per_def.super_predicates[def_id] <- self.tcx.super_predicates_of(def_id)); + } + + fn encode_info_for_trait_item(&mut self, def_id: DefId) { debug!("EncodeContext::encode_info_for_trait_item({:?})", def_id); let tcx = self.tcx; @@ -859,23 +863,16 @@ impl EncodeContext<'tcx> { span_bug!(ast_item.span, "traits cannot have final items"), }; - let kind = match trait_item.kind { + record!(self.per_def.kind[def_id] <- match trait_item.kind { ty::AssocKind::Const => { - let const_qualif = - if let hir::TraitItemKind::Const(_, Some(body)) = ast_item.node { - self.const_qualif(0, body) - } else { - ConstQualif { mir: 0, ast_promotable: false } - }; - let rendered = hir::print::to_string(self.tcx.hir(), |s| s.print_trait_item(ast_item)); let rendered_const = self.lazy(RenderedConst(rendered)); - EntryKind::AssocConst(container, const_qualif, rendered_const) + EntryKind::AssocConst(container, ConstQualif { mir: 0 }, rendered_const) } ty::AssocKind::Method => { - let fn_data = if let hir::TraitItemKind::Method(method_sig, m) = &ast_item.node { + let fn_data = if let hir::TraitItemKind::Method(m_sig, m) = &ast_item.kind { let param_names = match *m { hir::TraitMethod::Required(ref names) => { self.encode_fn_param_names(names) @@ -885,10 +882,9 @@ impl EncodeContext<'tcx> { } }; FnData { - asyncness: method_sig.header.asyncness, + asyncness: m_sig.header.asyncness, constness: hir::Constness::NotConst, param_names, - sig: self.lazy(&tcx.fn_sig(def_id)), } } else { bug!() @@ -901,44 +897,32 @@ impl EncodeContext<'tcx> { } ty::AssocKind::Type => EntryKind::AssocType(container), ty::AssocKind::OpaqueTy => span_bug!(ast_item.span, "opaque type in trait"), - }; - - Entry { - kind, - visibility: self.lazy(trait_item.vis), - span: self.lazy(ast_item.span), - attributes: self.encode_attributes(&ast_item.attrs), - children: Lazy::empty(), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: match trait_item.kind { - ty::AssocKind::Const | - ty::AssocKind::Method => { - Some(self.encode_item_type(def_id)) + }); + record!(self.per_def.visibility[def_id] <- trait_item.vis); + record!(self.per_def.span[def_id] <- ast_item.span); + record!(self.per_def.attributes[def_id] <- &ast_item.attrs); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + match trait_item.kind { + ty::AssocKind::Const | + ty::AssocKind::Method => { + self.encode_item_type(def_id); + } + ty::AssocKind::Type => { + if trait_item.defaultness.has_value() { + self.encode_item_type(def_id); } - ty::AssocKind::Type => { - if trait_item.defaultness.has_value() { - Some(self.encode_item_type(def_id)) - } else { - None - } - } - ty::AssocKind::OpaqueTy => unreachable!(), - }, - inherent_impls: Lazy::empty(), - variances: if trait_item.kind == ty::AssocKind::Method { - self.encode_variances_of(def_id) - } else { - Lazy::empty() - }, - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: self.encode_optimized_mir(def_id), - promoted_mir: self.encode_promoted_mir(def_id), + } + ty::AssocKind::OpaqueTy => unreachable!(), } + if trait_item.kind == ty::AssocKind::Method { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(def_id)); + self.encode_variances_of(def_id); + } + self.encode_generics(def_id); + self.encode_predicates(def_id); + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } fn metadata_output_only(&self) -> bool { @@ -946,14 +930,7 @@ impl EncodeContext<'tcx> { !self.tcx.sess.opts.output_types.should_codegen() } - fn const_qualif(&self, mir: u8, body_id: hir::BodyId) -> ConstQualif { - let body_owner_def_id = self.tcx.hir().body_owner_def_id(body_id); - let ast_promotable = self.tcx.const_is_rvalue_promotable_to_static(body_owner_def_id); - - ConstQualif { mir, ast_promotable } - } - - fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> { + fn encode_info_for_impl_item(&mut self, def_id: DefId) { debug!("EncodeContext::encode_info_for_impl_item({:?})", def_id); let tcx = self.tcx; @@ -968,25 +945,24 @@ impl EncodeContext<'tcx> { span_bug!(ast_item.span, "impl items always have values (currently)"), }; - let kind = match impl_item.kind { + record!(self.per_def.kind[def_id] <- match impl_item.kind { ty::AssocKind::Const => { - if let hir::ImplItemKind::Const(_, body_id) = ast_item.node { + if let hir::ImplItemKind::Const(_, body_id) = ast_item.kind { let mir = self.tcx.at(ast_item.span).mir_const_qualif(def_id).0; EntryKind::AssocConst(container, - self.const_qualif(mir, body_id), + ConstQualif { mir }, self.encode_rendered_const_for_body(body_id)) } else { bug!() } } ty::AssocKind::Method => { - let fn_data = if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node { + let fn_data = if let hir::ImplItemKind::Method(ref sig, body) = ast_item.kind { FnData { asyncness: sig.header.asyncness, constness: sig.header.constness, param_names: self.encode_fn_param_names_for_body(body), - sig: self.lazy(&tcx.fn_sig(def_id)), } } else { bug!() @@ -999,46 +975,36 @@ impl EncodeContext<'tcx> { } ty::AssocKind::OpaqueTy => EntryKind::AssocOpaqueTy(container), ty::AssocKind::Type => EntryKind::AssocType(container) - }; - - let mir = - match ast_item.node { - hir::ImplItemKind::Const(..) => true, - hir::ImplItemKind::Method(ref sig, _) => { - let generics = self.tcx.generics_of(def_id); - let needs_inline = (generics.requires_monomorphization(self.tcx) || - tcx.codegen_fn_attrs(def_id).requests_inline()) && - !self.metadata_output_only(); - let is_const_fn = sig.header.constness == hir::Constness::Const; - let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir; - needs_inline || is_const_fn || always_encode_mir - }, - hir::ImplItemKind::OpaqueTy(..) | - hir::ImplItemKind::TyAlias(..) => false, - }; - - Entry { - kind, - visibility: self.lazy(impl_item.vis), - span: self.lazy(ast_item.span), - attributes: self.encode_attributes(&ast_item.attrs), - children: Lazy::empty(), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: if impl_item.kind == ty::AssocKind::Method { - self.encode_variances_of(def_id) - } else { - Lazy::empty() + }); + record!(self.per_def.visibility[def_id] <- impl_item.vis); + record!(self.per_def.span[def_id] <- ast_item.span); + record!(self.per_def.attributes[def_id] <- &ast_item.attrs); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + self.encode_item_type(def_id); + if impl_item.kind == ty::AssocKind::Method { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(def_id)); + self.encode_variances_of(def_id); + } + self.encode_generics(def_id); + self.encode_predicates(def_id); + let mir = match ast_item.kind { + hir::ImplItemKind::Const(..) => true, + hir::ImplItemKind::Method(ref sig, _) => { + let generics = self.tcx.generics_of(def_id); + let needs_inline = (generics.requires_monomorphization(self.tcx) || + tcx.codegen_fn_attrs(def_id).requests_inline()) && + !self.metadata_output_only(); + let is_const_fn = sig.header.constness == hir::Constness::Const; + let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir; + needs_inline || is_const_fn || always_encode_mir }, - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: if mir { self.encode_optimized_mir(def_id) } else { None }, - promoted_mir: if mir { self.encode_promoted_mir(def_id) } else { None }, + hir::ImplItemKind::OpaqueTy(..) | + hir::ImplItemKind::TyAlias(..) => false, + }; + if mir { + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } } @@ -1047,7 +1013,7 @@ impl EncodeContext<'tcx> { self.tcx.dep_graph.with_ignore(|| { let body = self.tcx.hir().body(body_id); self.lazy(body.params.iter().map(|arg| { - match arg.pat.node { + match arg.pat.kind { PatKind::Binding(_, _, ident, _) => ident.name, _ => kw::Invalid, } @@ -1059,51 +1025,44 @@ impl EncodeContext<'tcx> { self.lazy(param_names.iter().map(|ident| ident.name)) } - fn encode_optimized_mir(&mut self, def_id: DefId) -> Option>> { + fn encode_optimized_mir(&mut self, def_id: DefId) { debug!("EntryBuilder::encode_mir({:?})", def_id); if self.tcx.mir_keys(LOCAL_CRATE).contains(&def_id) { - let mir = self.tcx.optimized_mir(def_id); - Some(self.lazy(mir)) - } else { - None + record!(self.per_def.mir[def_id] <- self.tcx.optimized_mir(def_id)); } } - fn encode_promoted_mir( - &mut self, - def_id: DefId, - ) -> Option>>> { + fn encode_promoted_mir(&mut self, def_id: DefId) { debug!("EncodeContext::encode_promoted_mir({:?})", def_id); if self.tcx.mir_keys(LOCAL_CRATE).contains(&def_id) { - let promoted = self.tcx.promoted_mir(def_id); - Some(self.lazy(promoted)) - } else { - None + record!(self.per_def.promoted_mir[def_id] <- self.tcx.promoted_mir(def_id)); } } // Encodes the inherent implementations of a structure, enumeration, or trait. - fn encode_inherent_implementations(&mut self, def_id: DefId) -> Lazy<[DefIndex]> { + fn encode_inherent_implementations(&mut self, def_id: DefId) { debug!("EncodeContext::encode_inherent_implementations({:?})", def_id); let implementations = self.tcx.inherent_impls(def_id); - if implementations.is_empty() { - Lazy::empty() - } else { - self.lazy(implementations.iter().map(|&def_id| { + if !implementations.is_empty() { + record!(self.per_def.inherent_impls[def_id] <- implementations.iter().map(|&def_id| { assert!(def_id.is_local()); def_id.index - })) + })); } } - fn encode_stability(&mut self, def_id: DefId) -> Option> { + fn encode_stability(&mut self, def_id: DefId) { debug!("EncodeContext::encode_stability({:?})", def_id); - self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab)) + if let Some(stab) = self.tcx.lookup_stability(def_id) { + record!(self.per_def.stability[def_id] <- stab) + } } - fn encode_deprecation(&mut self, def_id: DefId) -> Option> { + fn encode_deprecation(&mut self, def_id: DefId) { debug!("EncodeContext::encode_deprecation({:?})", def_id); - self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(depr)) + if let Some(depr) = self.tcx.lookup_deprecation(def_id) { + record!(self.per_def.deprecation[def_id] <- depr); + } } fn encode_rendered_const_for_body(&mut self, body_id: hir::BodyId) -> Lazy { @@ -1113,18 +1072,18 @@ impl EncodeContext<'tcx> { self.lazy(rendered_const) } - fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) -> Entry<'tcx> { + fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item) { let tcx = self.tcx; debug!("EncodeContext::encode_info_for_item({:?})", def_id); - let kind = match item.node { + record!(self.per_def.kind[def_id] <- match item.kind { hir::ItemKind::Static(_, hir::MutMutable, _) => EntryKind::MutStatic, hir::ItemKind::Static(_, hir::MutImmutable, _) => EntryKind::ImmStatic, hir::ItemKind::Const(_, body_id) => { - let mir = tcx.at(item.span).mir_const_qualif(def_id).0; + let mir = self.tcx.at(item.span).mir_const_qualif(def_id).0; EntryKind::Const( - self.const_qualif(mir, body_id), + ConstQualif { mir }, self.encode_rendered_const_for_body(body_id) ) } @@ -1133,53 +1092,51 @@ impl EncodeContext<'tcx> { asyncness: header.asyncness, constness: header.constness, param_names: self.encode_fn_param_names_for_body(body), - sig: self.lazy(tcx.fn_sig(def_id)), }; EntryKind::Fn(self.lazy(data)) } hir::ItemKind::Mod(ref m) => { - return self.encode_info_for_mod((item.hir_id, m, &item.attrs, &item.vis)); + return self.encode_info_for_mod(item.hir_id, m, &item.attrs, &item.vis); } hir::ItemKind::ForeignMod(_) => EntryKind::ForeignMod, hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm, hir::ItemKind::TyAlias(..) => EntryKind::Type, hir::ItemKind::OpaqueTy(..) => EntryKind::OpaqueTy, - hir::ItemKind::Enum(..) => EntryKind::Enum(get_repr_options(tcx, def_id)), + hir::ItemKind::Enum(..) => EntryKind::Enum(self.tcx.adt_def(def_id).repr), hir::ItemKind::Struct(ref struct_def, _) => { - let variant = tcx.adt_def(def_id).non_enum_variant(); + let adt_def = self.tcx.adt_def(def_id); + let variant = adt_def.non_enum_variant(); // Encode def_ids for each field and method // for methods, write all the stuff get_trait_method // needs to know - let ctor = struct_def.ctor_hir_id() - .map(|ctor_hir_id| tcx.hir().local_def_id(ctor_hir_id).index); - - let repr_options = get_repr_options(tcx, def_id); + let ctor = struct_def.ctor_hir_id().map(|ctor_hir_id| { + self.tcx.hir().local_def_id(ctor_hir_id).index + }); EntryKind::Struct(self.lazy(VariantData { ctor_kind: variant.ctor_kind, discr: variant.discr, ctor, - ctor_sig: None, - }), repr_options) + }), adt_def.repr) } hir::ItemKind::Union(..) => { - let variant = tcx.adt_def(def_id).non_enum_variant(); - let repr_options = get_repr_options(tcx, def_id); + let adt_def = self.tcx.adt_def(def_id); + let variant = adt_def.non_enum_variant(); EntryKind::Union(self.lazy(VariantData { ctor_kind: variant.ctor_kind, discr: variant.discr, ctor: None, - ctor_sig: None, - }), repr_options) + }), adt_def.repr) } - hir::ItemKind::Impl(_, polarity, defaultness, ..) => { - let trait_ref = tcx.impl_trait_ref(def_id); + hir::ItemKind::Impl(_, _, defaultness, ..) => { + let trait_ref = self.tcx.impl_trait_ref(def_id); + let polarity = self.tcx.impl_polarity(def_id); let parent = if let Some(trait_ref) = trait_ref { - let trait_def = tcx.trait_def(trait_ref.def_id); - trait_def.ancestors(tcx, def_id).nth(1).and_then(|node| { + let trait_def = self.tcx.trait_def(trait_ref.def_id); + trait_def.ancestors(self.tcx, def_id).nth(1).and_then(|node| { match node { specialization_graph::Node::Impl(parent) => Some(parent), _ => None, @@ -1193,8 +1150,8 @@ impl EncodeContext<'tcx> { // "unsized info", else just store None let coerce_unsized_info = trait_ref.and_then(|t| { - if Some(t.def_id) == tcx.lang_items().coerce_unsized_trait() { - Some(tcx.at(item.span).coerce_unsized_info(def_id)) + if Some(t.def_id) == self.tcx.lang_items().coerce_unsized_trait() { + Some(self.tcx.at(item.span).coerce_unsized_info(def_id)) } else { None } @@ -1205,35 +1162,130 @@ impl EncodeContext<'tcx> { defaultness, parent_impl: parent, coerce_unsized_info, - trait_ref: trait_ref.map(|trait_ref| self.lazy(trait_ref)), }; EntryKind::Impl(self.lazy(data)) } hir::ItemKind::Trait(..) => { - let trait_def = tcx.trait_def(def_id); + let trait_def = self.tcx.trait_def(def_id); let data = TraitData { unsafety: trait_def.unsafety, paren_sugar: trait_def.paren_sugar, - has_auto_impl: tcx.trait_is_auto(def_id), + has_auto_impl: self.tcx.trait_is_auto(def_id), is_marker: trait_def.is_marker, - super_predicates: self.lazy(&*tcx.super_predicates_of(def_id)), }; EntryKind::Trait(self.lazy(data)) } - hir::ItemKind::TraitAlias(..) => { - let data = TraitAliasData { - super_predicates: self.lazy(&*tcx.super_predicates_of(def_id)), - }; - - EntryKind::TraitAlias(self.lazy(data)) - } + hir::ItemKind::TraitAlias(..) => EntryKind::TraitAlias, hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) => bug!("cannot encode info for item {:?}", item), - }; + }); + record!(self.per_def.visibility[def_id] <- + ty::Visibility::from_hir(&item.vis, item.hir_id, tcx)); + record!(self.per_def.span[def_id] <- item.span); + record!(self.per_def.attributes[def_id] <- &item.attrs); + // FIXME(eddyb) there should be a nicer way to do this. + match item.kind { + hir::ItemKind::ForeignMod(ref fm) => record!(self.per_def.children[def_id] <- + fm.items + .iter() + .map(|foreign_item| tcx.hir().local_def_id( + foreign_item.hir_id).index) + ), + hir::ItemKind::Enum(..) => record!(self.per_def.children[def_id] <- + self.tcx.adt_def(def_id).variants.iter().map(|v| { + assert!(v.def_id.is_local()); + v.def_id.index + }) + ), + hir::ItemKind::Struct(..) | + hir::ItemKind::Union(..) => record!(self.per_def.children[def_id] <- + self.tcx.adt_def(def_id).non_enum_variant().fields.iter().map(|f| { + assert!(f.did.is_local()); + f.did.index + }) + ), + hir::ItemKind::Impl(..) | + hir::ItemKind::Trait(..) => { + let associated_item_def_ids = self.tcx.associated_item_def_ids(def_id); + record!(self.per_def.children[def_id] <- + associated_item_def_ids.iter().map(|&def_id| { + assert!(def_id.is_local()); + def_id.index + }) + ); + } + _ => {} + } + self.encode_stability(def_id); + self.encode_deprecation(def_id); + match item.kind { + hir::ItemKind::Static(..) | + hir::ItemKind::Const(..) | + hir::ItemKind::Fn(..) | + hir::ItemKind::TyAlias(..) | + hir::ItemKind::OpaqueTy(..) | + hir::ItemKind::Enum(..) | + hir::ItemKind::Struct(..) | + hir::ItemKind::Union(..) | + hir::ItemKind::Impl(..) => self.encode_item_type(def_id), + _ => {} + } + if let hir::ItemKind::Fn(..) = item.kind { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(def_id)); + } + if let hir::ItemKind::Impl(..) = item.kind { + if let Some(trait_ref) = self.tcx.impl_trait_ref(def_id) { + record!(self.per_def.impl_trait_ref[def_id] <- trait_ref); + } + } + self.encode_inherent_implementations(def_id); + match item.kind { + hir::ItemKind::Enum(..) | + hir::ItemKind::Struct(..) | + hir::ItemKind::Union(..) | + hir::ItemKind::Fn(..) => self.encode_variances_of(def_id), + _ => {} + } + match item.kind { + hir::ItemKind::Static(..) | + hir::ItemKind::Const(..) | + hir::ItemKind::Fn(..) | + hir::ItemKind::TyAlias(..) | + hir::ItemKind::Enum(..) | + hir::ItemKind::Struct(..) | + hir::ItemKind::Union(..) | + hir::ItemKind::Impl(..) | + hir::ItemKind::OpaqueTy(..) | + hir::ItemKind::Trait(..) | + hir::ItemKind::TraitAlias(..) => { + self.encode_generics(def_id); + self.encode_predicates(def_id); + } + _ => {} + } + // The only time that `predicates_defined_on` is used (on + // an external item) is for traits, during chalk lowering, + // so only encode it in that case as an efficiency + // hack. (No reason not to expand it in the future if + // necessary.) + match item.kind { + hir::ItemKind::Trait(..) | + hir::ItemKind::TraitAlias(..) => { + self.encode_predicates_defined_on(def_id); + } + _ => {} // not *wrong* for other kinds of items, but not needed + } + match item.kind { + hir::ItemKind::Trait(..) | + hir::ItemKind::TraitAlias(..) => { + self.encode_super_predicates(def_id); + } + _ => {} + } - let mir = match item.node { + let mir = match item.kind { hir::ItemKind::Static(..) | hir::ItemKind::Const(..) => true, hir::ItemKind::Fn(_, header, ..) => { let generics = tcx.generics_of(def_id); @@ -1246,188 +1298,50 @@ impl EncodeContext<'tcx> { } _ => false, }; - - Entry { - kind, - visibility: self.lazy(ty::Visibility::from_hir(&item.vis, item.hir_id, tcx)), - span: self.lazy(item.span), - attributes: self.encode_attributes(&item.attrs), - children: match item.node { - hir::ItemKind::ForeignMod(ref fm) => { - self.lazy(fm.items - .iter() - .map(|foreign_item| tcx.hir().local_def_id( - foreign_item.hir_id).index)) - } - hir::ItemKind::Enum(..) => { - let def = self.tcx.adt_def(def_id); - self.lazy(def.variants.iter().map(|v| { - assert!(v.def_id.is_local()); - v.def_id.index - })) - } - hir::ItemKind::Struct(..) | - hir::ItemKind::Union(..) => { - let def = self.tcx.adt_def(def_id); - self.lazy(def.non_enum_variant().fields.iter().map(|f| { - assert!(f.did.is_local()); - f.did.index - })) - } - hir::ItemKind::Impl(..) | - hir::ItemKind::Trait(..) => { - self.lazy(tcx.associated_item_def_ids(def_id).iter().map(|&def_id| { - assert!(def_id.is_local()); - def_id.index - })) - } - _ => Lazy::empty(), - }, - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: match item.node { - hir::ItemKind::Static(..) | - hir::ItemKind::Const(..) | - hir::ItemKind::Fn(..) | - hir::ItemKind::TyAlias(..) | - hir::ItemKind::OpaqueTy(..) | - hir::ItemKind::Enum(..) | - hir::ItemKind::Struct(..) | - hir::ItemKind::Union(..) | - hir::ItemKind::Impl(..) => Some(self.encode_item_type(def_id)), - _ => None, - }, - inherent_impls: self.encode_inherent_implementations(def_id), - variances: match item.node { - hir::ItemKind::Enum(..) | - hir::ItemKind::Struct(..) | - hir::ItemKind::Union(..) | - hir::ItemKind::Fn(..) => self.encode_variances_of(def_id), - _ => Lazy::empty(), - }, - generics: match item.node { - hir::ItemKind::Static(..) | - hir::ItemKind::Const(..) | - hir::ItemKind::Fn(..) | - hir::ItemKind::TyAlias(..) | - hir::ItemKind::Enum(..) | - hir::ItemKind::Struct(..) | - hir::ItemKind::Union(..) | - hir::ItemKind::Impl(..) | - hir::ItemKind::OpaqueTy(..) | - hir::ItemKind::Trait(..) => Some(self.encode_generics(def_id)), - hir::ItemKind::TraitAlias(..) => Some(self.encode_generics(def_id)), - _ => None, - }, - predicates: match item.node { - hir::ItemKind::Static(..) | - hir::ItemKind::Const(..) | - hir::ItemKind::Fn(..) | - hir::ItemKind::TyAlias(..) | - hir::ItemKind::Enum(..) | - hir::ItemKind::Struct(..) | - hir::ItemKind::Union(..) | - hir::ItemKind::Impl(..) | - hir::ItemKind::OpaqueTy(..) | - hir::ItemKind::Trait(..) | - hir::ItemKind::TraitAlias(..) => Some(self.encode_predicates(def_id)), - _ => None, - }, - - // The only time that `predicates_defined_on` is used (on - // an external item) is for traits, during chalk lowering, - // so only encode it in that case as an efficiency - // hack. (No reason not to expand it in the future if - // necessary.) - predicates_defined_on: match item.node { - hir::ItemKind::Trait(..) | - hir::ItemKind::TraitAlias(..) => Some(self.encode_predicates_defined_on(def_id)), - _ => None, // not *wrong* for other kinds of items, but not needed - }, - - mir: if mir { self.encode_optimized_mir(def_id) } else { None }, - promoted_mir: if mir { self.encode_promoted_mir(def_id) } else { None }, + if mir { + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } } /// Serialize the text of exported macros - fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> { + fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) { use syntax::print::pprust; let def_id = self.tcx.hir().local_def_id(macro_def.hir_id); - Entry { - kind: EntryKind::MacroDef(self.lazy(MacroDef { - body: pprust::tts_to_string(macro_def.body.clone()), - legacy: macro_def.legacy, - })), - visibility: self.lazy(ty::Visibility::Public), - span: self.lazy(macro_def.span), - attributes: self.encode_attributes(¯o_def.attrs), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - children: Lazy::empty(), - ty: None, - inherent_impls: Lazy::empty(), - variances: Lazy::empty(), - generics: None, - predicates: None, - predicates_defined_on: None, - mir: None, - promoted_mir: None, - } + record!(self.per_def.kind[def_id] <- EntryKind::MacroDef(self.lazy(MacroDef { + body: pprust::tts_to_string(macro_def.body.clone()), + legacy: macro_def.legacy, + }))); + record!(self.per_def.visibility[def_id] <- ty::Visibility::Public); + record!(self.per_def.span[def_id] <- macro_def.span); + record!(self.per_def.attributes[def_id] <- ¯o_def.attrs); + self.encode_stability(def_id); + self.encode_deprecation(def_id); } fn encode_info_for_generic_param( &mut self, def_id: DefId, - entry_kind: EntryKind<'tcx>, + kind: EntryKind<'tcx>, encode_type: bool, - ) -> Entry<'tcx> { - let tcx = self.tcx; - Entry { - kind: entry_kind, - visibility: self.lazy(ty::Visibility::Public), - span: self.lazy(tcx.def_span(def_id)), - attributes: Lazy::empty(), - children: Lazy::empty(), - stability: None, - deprecation: None, - ty: if encode_type { Some(self.encode_item_type(def_id)) } else { None }, - inherent_impls: Lazy::empty(), - variances: Lazy::empty(), - generics: None, - predicates: None, - predicates_defined_on: None, - - mir: None, - promoted_mir: None, + ) { + record!(self.per_def.kind[def_id] <- kind); + record!(self.per_def.visibility[def_id] <- ty::Visibility::Public); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + if encode_type { + self.encode_item_type(def_id); } } - fn encode_info_for_ty_param( - &mut self, - (def_id, encode_type): (DefId, bool), - ) -> Entry<'tcx> { - debug!("EncodeContext::encode_info_for_ty_param({:?})", def_id); - self.encode_info_for_generic_param(def_id, EntryKind::TypeParam, encode_type) - } - - fn encode_info_for_const_param( - &mut self, - def_id: DefId, - ) -> Entry<'tcx> { - debug!("EncodeContext::encode_info_for_const_param({:?})", def_id); - self.encode_info_for_generic_param(def_id, EntryKind::ConstParam, true) - } - - fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> { + fn encode_info_for_closure(&mut self, def_id: DefId) { debug!("EncodeContext::encode_info_for_closure({:?})", def_id); - let tcx = self.tcx; - let tables = self.tcx.typeck_tables_of(def_id); + // NOTE(eddyb) `tcx.type_of(def_id)` isn't used because it's fully generic, + // including on the signature, which is inferred in `typeck_tables_of. let hir_id = self.tcx.hir().as_local_hir_id(def_id).unwrap(); - let kind = match tables.node_type(hir_id).sty { + let ty = self.tcx.typeck_tables_of(def_id).node_type(hir_id); + + record!(self.per_def.kind[def_id] <- match ty.kind { ty::Generator(def_id, ..) => { let layout = self.tcx.generator_layout(def_id); let data = GeneratorData { @@ -1436,67 +1350,37 @@ impl EncodeContext<'tcx> { EntryKind::Generator(self.lazy(data)) } - ty::Closure(def_id, substs) => { - let sig = substs.closure_sig(def_id, self.tcx); - let data = ClosureData { sig: self.lazy(sig) }; - EntryKind::Closure(self.lazy(data)) - } + ty::Closure(..) => EntryKind::Closure, - _ => bug!("closure that is neither generator nor closure") - }; - - Entry { - kind, - visibility: self.lazy(ty::Visibility::Public), - span: self.lazy(tcx.def_span(def_id)), - attributes: self.encode_attributes(&tcx.get_attrs(def_id)), - children: Lazy::empty(), - stability: None, - deprecation: None, - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: Lazy::empty(), - generics: Some(self.encode_generics(def_id)), - predicates: None, - predicates_defined_on: None, - - mir: self.encode_optimized_mir(def_id), - promoted_mir: self.encode_promoted_mir(def_id), + _ => bug!("closure that is neither generator nor closure"), + }); + record!(self.per_def.visibility[def_id] <- ty::Visibility::Public); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + record!(self.per_def.attributes[def_id] <- &self.tcx.get_attrs(def_id)[..]); + self.encode_item_type(def_id); + if let ty::Closure(def_id, substs) = ty.kind { + record!(self.per_def.fn_sig[def_id] <- substs.as_closure().sig(def_id, self.tcx)); } + self.encode_generics(def_id); + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } - fn encode_info_for_anon_const(&mut self, def_id: DefId) -> Entry<'tcx> { + fn encode_info_for_anon_const(&mut self, def_id: DefId) { debug!("EncodeContext::encode_info_for_anon_const({:?})", def_id); - let tcx = self.tcx; - let id = tcx.hir().as_local_hir_id(def_id).unwrap(); - let body_id = tcx.hir().body_owned_by(id); + let id = self.tcx.hir().as_local_hir_id(def_id).unwrap(); + let body_id = self.tcx.hir().body_owned_by(id); let const_data = self.encode_rendered_const_for_body(body_id); - let mir = tcx.mir_const_qualif(def_id).0; + let mir = self.tcx.mir_const_qualif(def_id).0; - Entry { - kind: EntryKind::Const(self.const_qualif(mir, body_id), const_data), - visibility: self.lazy(ty::Visibility::Public), - span: self.lazy(tcx.def_span(def_id)), - attributes: Lazy::empty(), - children: Lazy::empty(), - stability: None, - deprecation: None, - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: Lazy::empty(), - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: self.encode_optimized_mir(def_id), - promoted_mir: self.encode_promoted_mir(def_id), - } - } - - fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> Lazy<[ast::Attribute]> { - self.lazy(attrs) + record!(self.per_def.kind[def_id] <- EntryKind::Const(ConstQualif { mir }, const_data)); + record!(self.per_def.visibility[def_id] <- ty::Visibility::Public); + record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id)); + self.encode_item_type(def_id); + self.encode_generics(def_id); + self.encode_predicates(def_id); + self.encode_optimized_mir(def_id); + self.encode_promoted_mir(def_id); } fn encode_native_libraries(&mut self) -> Lazy<[NativeLibrary]> { @@ -1534,6 +1418,7 @@ impl EncodeContext<'tcx> { let dep = CrateDep { name: self.tcx.original_crate_name(cnum), hash: self.tcx.crate_hash(cnum), + host_hash: self.tcx.crate_host_hash(cnum), kind: self.tcx.dep_kind(cnum), extra_filename: self.tcx.extra_filename(cnum), }; @@ -1652,69 +1537,63 @@ impl EncodeContext<'tcx> { } fn encode_dylib_dependency_formats(&mut self) -> Lazy<[Option]> { - match self.tcx.sess.dependency_formats.borrow().get(&config::CrateType::Dylib) { - Some(arr) => { - self.lazy(arr.iter().map(|slot| { - match *slot { - Linkage::NotLinked | - Linkage::IncludedFromDylib => None, - - Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), - Linkage::Static => Some(LinkagePreference::RequireStatic), - } - })) + let formats = self.tcx.dependency_formats(LOCAL_CRATE); + for (ty, arr) in formats.iter() { + if *ty != config::CrateType::Dylib { + continue; } - None => Lazy::empty(), + return self.lazy(arr.iter().map(|slot| { + match *slot { + Linkage::NotLinked | + Linkage::IncludedFromDylib => None, + + Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), + Linkage::Static => Some(LinkagePreference::RequireStatic), + } + })); } + Lazy::empty() } - fn encode_info_for_foreign_item(&mut self, - (def_id, nitem): (DefId, &hir::ForeignItem)) - -> Entry<'tcx> { + fn encode_info_for_foreign_item( + &mut self, + def_id: DefId, + nitem: &hir::ForeignItem, + ) { let tcx = self.tcx; debug!("EncodeContext::encode_info_for_foreign_item({:?})", def_id); - let kind = match nitem.node { + record!(self.per_def.kind[def_id] <- match nitem.kind { hir::ForeignItemKind::Fn(_, ref names, _) => { let data = FnData { asyncness: hir::IsAsync::NotAsync, constness: hir::Constness::NotConst, param_names: self.encode_fn_param_names(names), - sig: self.lazy(tcx.fn_sig(def_id)), }; EntryKind::ForeignFn(self.lazy(data)) } hir::ForeignItemKind::Static(_, hir::MutMutable) => EntryKind::ForeignMutStatic, hir::ForeignItemKind::Static(_, hir::MutImmutable) => EntryKind::ForeignImmStatic, hir::ForeignItemKind::Type => EntryKind::ForeignType, - }; - - Entry { - kind, - visibility: self.lazy(ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, tcx)), - span: self.lazy(nitem.span), - attributes: self.encode_attributes(&nitem.attrs), - children: Lazy::empty(), - stability: self.encode_stability(def_id), - deprecation: self.encode_deprecation(def_id), - - ty: Some(self.encode_item_type(def_id)), - inherent_impls: Lazy::empty(), - variances: match nitem.node { - hir::ForeignItemKind::Fn(..) => self.encode_variances_of(def_id), - _ => Lazy::empty(), - }, - generics: Some(self.encode_generics(def_id)), - predicates: Some(self.encode_predicates(def_id)), - predicates_defined_on: None, - - mir: None, - promoted_mir: None, + }); + record!(self.per_def.visibility[def_id] <- + ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, self.tcx)); + record!(self.per_def.span[def_id] <- nitem.span); + record!(self.per_def.attributes[def_id] <- &nitem.attrs); + self.encode_stability(def_id); + self.encode_deprecation(def_id); + self.encode_item_type(def_id); + if let hir::ForeignItemKind::Fn(..) = nitem.kind { + record!(self.per_def.fn_sig[def_id] <- tcx.fn_sig(def_id)); + self.encode_variances_of(def_id); } + self.encode_generics(def_id); + self.encode_predicates(def_id); } } +// FIXME(eddyb) make metadata encoding walk over all definitions, instead of HIR. impl Visitor<'tcx> for EncodeContext<'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::OnlyBodies(&self.tcx.hir()) @@ -1723,45 +1602,32 @@ impl Visitor<'tcx> for EncodeContext<'tcx> { intravisit::walk_expr(self, ex); self.encode_info_for_expr(ex); } + fn visit_anon_const(&mut self, c: &'tcx AnonConst) { + intravisit::walk_anon_const(self, c); + let def_id = self.tcx.hir().local_def_id(c.hir_id); + self.encode_info_for_anon_const(def_id); + } fn visit_item(&mut self, item: &'tcx hir::Item) { intravisit::walk_item(self, item); let def_id = self.tcx.hir().local_def_id(item.hir_id); - match item.node { + match item.kind { hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) => {} // ignore these - _ => self.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)), + _ => self.encode_info_for_item(def_id, item), } self.encode_addl_info_for_item(item); } fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem) { intravisit::walk_foreign_item(self, ni); let def_id = self.tcx.hir().local_def_id(ni.hir_id); - self.record(def_id, - EncodeContext::encode_info_for_foreign_item, - (def_id, ni)); - } - fn visit_variant(&mut self, - v: &'tcx hir::Variant, - g: &'tcx hir::Generics, - id: hir::HirId) { - intravisit::walk_variant(self, v, g, id); - - if let Some(ref discr) = v.disr_expr { - let def_id = self.tcx.hir().local_def_id(discr.hir_id); - self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id); - } + self.encode_info_for_foreign_item(def_id, ni); } fn visit_generics(&mut self, generics: &'tcx hir::Generics) { intravisit::walk_generics(self, generics); self.encode_info_for_generics(generics); } - fn visit_ty(&mut self, ty: &'tcx hir::Ty) { - intravisit::walk_ty(self, ty); - self.encode_info_for_ty(ty); - } fn visit_macro_def(&mut self, macro_def: &'tcx hir::MacroDef) { - let def_id = self.tcx.hir().local_def_id(macro_def.hir_id); - self.record(def_id, EncodeContext::encode_info_for_macro_def, macro_def); + self.encode_info_for_macro_def(macro_def); } } @@ -1769,10 +1635,10 @@ impl EncodeContext<'tcx> { fn encode_fields(&mut self, adt_def_id: DefId) { let def = self.tcx.adt_def(adt_def_id); for (variant_index, variant) in def.variants.iter_enumerated() { - for (field_index, field) in variant.fields.iter().enumerate() { - self.record(field.did, - EncodeContext::encode_field, - (adt_def_id, variant_index, field_index)); + for (field_index, _field) in variant.fields.iter().enumerate() { + // FIXME(eddyb) `adt_def_id` is leftover from incremental isolation, + // pass `def`, `variant` or `field` instead. + self.encode_field(adt_def_id, variant_index, field_index); } } } @@ -1783,34 +1649,24 @@ impl EncodeContext<'tcx> { match param.kind { GenericParamKind::Lifetime { .. } => continue, GenericParamKind::Type { ref default, .. } => { - self.record( + self.encode_info_for_generic_param( def_id, - EncodeContext::encode_info_for_ty_param, - (def_id, default.is_some()), + EntryKind::TypeParam, + default.is_some(), ); } GenericParamKind::Const { .. } => { - self.record(def_id, EncodeContext::encode_info_for_const_param, def_id); + self.encode_info_for_generic_param(def_id, EntryKind::ConstParam, true); } } } } - fn encode_info_for_ty(&mut self, ty: &hir::Ty) { - match ty.node { - hir::TyKind::Array(_, ref length) => { - let def_id = self.tcx.hir().local_def_id(length.hir_id); - self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id); - } - _ => {} - } - } - fn encode_info_for_expr(&mut self, expr: &hir::Expr) { - match expr.node { + match expr.kind { hir::ExprKind::Closure(..) => { let def_id = self.tcx.hir().local_def_id(expr.hir_id); - self.record(def_id, EncodeContext::encode_info_for_closure, def_id); + self.encode_info_for_closure(def_id); } _ => {} } @@ -1822,7 +1678,7 @@ impl EncodeContext<'tcx> { /// normally in the visitor walk. fn encode_addl_info_for_item(&mut self, item: &hir::Item) { let def_id = self.tcx.hir().local_def_id(item.hir_id); - match item.node { + match item.kind { hir::ItemKind::Static(..) | hir::ItemKind::Const(..) | hir::ItemKind::Fn(..) | @@ -1841,14 +1697,14 @@ impl EncodeContext<'tcx> { let def = self.tcx.adt_def(def_id); for (i, variant) in def.variants.iter_enumerated() { - self.record(variant.def_id, - EncodeContext::encode_enum_variant_info, - (def_id, i)); + // FIXME(eddyb) `def_id` is leftover from incremental isolation, + // pass `def` or `variant` instead. + self.encode_enum_variant_info(def_id, i); - if let Some(ctor_def_id) = variant.ctor_def_id { - self.record(ctor_def_id, - EncodeContext::encode_enum_variant_ctor, - (def_id, i)); + // FIXME(eddyb) `def_id` is leftover from incremental isolation, + // pass `def`, `variant` or `ctor_def_id` instead. + if let Some(_ctor_def_id) = variant.ctor_def_id { + self.encode_enum_variant_ctor(def_id, i); } } } @@ -1858,9 +1714,7 @@ impl EncodeContext<'tcx> { // If the struct has a constructor, encode it. if let Some(ctor_hir_id) = struct_def.ctor_hir_id() { let ctor_def_id = self.tcx.hir().local_def_id(ctor_hir_id); - self.record(ctor_def_id, - EncodeContext::encode_struct_ctor, - (def_id, ctor_def_id)); + self.encode_struct_ctor(def_id, ctor_def_id); } } hir::ItemKind::Union(..) => { @@ -1868,16 +1722,12 @@ impl EncodeContext<'tcx> { } hir::ItemKind::Impl(..) => { for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() { - self.record(trait_item_def_id, - EncodeContext::encode_info_for_impl_item, - trait_item_def_id); + self.encode_info_for_impl_item(trait_item_def_id); } } hir::ItemKind::Trait(..) => { for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() { - self.record(item_def_id, - EncodeContext::encode_info_for_trait_item, - item_def_id); + self.encode_info_for_trait_item(item_def_id); } } } @@ -1891,7 +1741,7 @@ struct ImplVisitor<'tcx> { impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> { fn visit_item(&mut self, item: &hir::Item) { - if let hir::ItemKind::Impl(..) = item.node { + if let hir::ItemKind::Impl(..) = item.kind { let impl_id = self.tcx.hir().local_def_id(item.hir_id); if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) { self.impls @@ -1932,7 +1782,7 @@ impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> { // will allow us to slice the metadata to the precise length that we just // generated regardless of trailing bytes that end up in it. -pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { +crate fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { let mut encoder = opaque::Encoder::new(vec![]); encoder.emit_raw_bytes(METADATA_HEADER); @@ -1945,7 +1795,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { let mut ecx = EncodeContext { opaque: encoder, tcx, - entries_index: Index::new(tcx.hir().definitions().def_index_count()), + per_def: Default::default(), lazy_state: LazyState::NoNode, type_shorthands: Default::default(), predicate_shorthands: Default::default(), @@ -1965,7 +1815,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { // Encode the root position. let header = METADATA_HEADER.len(); - let pos = root.position; + let pos = root.position.get(); result[header + 0] = (pos >> 24) as u8; result[header + 1] = (pos >> 16) as u8; result[header + 2] = (pos >> 8) as u8; @@ -1973,11 +1823,3 @@ pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { EncodedMetadata { raw_data: result } } - -pub fn get_repr_options(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions { - let ty = tcx.type_of(did); - match ty.sty { - ty::Adt(ref def, _) => return def.repr, - _ => bug!("{} is not an ADT", ty), - } -} diff --git a/src/librustc_metadata/foreign_modules.rs b/src/librustc_metadata/foreign_modules.rs index b2e40282d9..fa1402584e 100644 --- a/src/librustc_metadata/foreign_modules.rs +++ b/src/librustc_metadata/foreign_modules.rs @@ -3,7 +3,7 @@ use rustc::hir; use rustc::middle::cstore::ForeignModule; use rustc::ty::TyCtxt; -pub fn collect(tcx: TyCtxt<'_>) -> Vec { +crate fn collect(tcx: TyCtxt<'_>) -> Vec { let mut collector = Collector { tcx, modules: Vec::new(), @@ -19,7 +19,7 @@ struct Collector<'tcx> { impl ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_item(&mut self, it: &'tcx hir::Item) { - let fm = match it.node { + let fm = match it.kind { hir::ItemKind::ForeignMod(ref fm) => fm, _ => return, }; diff --git a/src/librustc_metadata/index.rs b/src/librustc_metadata/index.rs deleted file mode 100644 index 6f248f22cf..0000000000 --- a/src/librustc_metadata/index.rs +++ /dev/null @@ -1,141 +0,0 @@ -use crate::schema::*; - -use rustc::hir::def_id::{DefId, DefIndex}; -use rustc_serialize::opaque::Encoder; -use std::marker::PhantomData; -use std::u32; -use log::debug; - -/// Helper trait, for encoding to, and decoding from, a fixed number of bytes. -pub trait FixedSizeEncoding { - const BYTE_LEN: usize; - - // FIXME(eddyb) convert to and from `[u8; Self::BYTE_LEN]` instead, - // once that starts being allowed by the compiler (i.e. lazy normalization). - fn from_bytes(b: &[u8]) -> Self; - fn write_to_bytes(self, b: &mut [u8]); - - // FIXME(eddyb) make these generic functions, or at least defaults here. - // (same problem as above, needs `[u8; Self::BYTE_LEN]`) - // For now, a macro (`fixed_size_encoding_byte_len_and_defaults`) is used. - fn read_from_bytes_at(b: &[u8], i: usize) -> Self; - fn write_to_bytes_at(self, b: &mut [u8], i: usize); -} - -// HACK(eddyb) this shouldn't be needed (see comments on the methods above). -macro_rules! fixed_size_encoding_byte_len_and_defaults { - ($byte_len:expr) => { - const BYTE_LEN: usize = $byte_len; - fn read_from_bytes_at(b: &[u8], i: usize) -> Self { - const BYTE_LEN: usize = $byte_len; - // HACK(eddyb) ideally this would be done with fully safe code, - // but slicing `[u8]` with `i * N..` is optimized worse, due to the - // possibility of `i * N` overflowing, than indexing `[[u8; N]]`. - let b = unsafe { - std::slice::from_raw_parts( - b.as_ptr() as *const [u8; BYTE_LEN], - b.len() / BYTE_LEN, - ) - }; - Self::from_bytes(&b[i]) - } - fn write_to_bytes_at(self, b: &mut [u8], i: usize) { - const BYTE_LEN: usize = $byte_len; - // HACK(eddyb) ideally this would be done with fully safe code, - // see similar comment in `read_from_bytes_at` for why it can't yet. - let b = unsafe { - std::slice::from_raw_parts_mut( - b.as_mut_ptr() as *mut [u8; BYTE_LEN], - b.len() / BYTE_LEN, - ) - }; - self.write_to_bytes(&mut b[i]); - } - } -} - -impl FixedSizeEncoding for u32 { - fixed_size_encoding_byte_len_and_defaults!(4); - - fn from_bytes(b: &[u8]) -> Self { - let mut bytes = [0; Self::BYTE_LEN]; - bytes.copy_from_slice(&b[..Self::BYTE_LEN]); - Self::from_le_bytes(bytes) - } - - fn write_to_bytes(self, b: &mut [u8]) { - b[..Self::BYTE_LEN].copy_from_slice(&self.to_le_bytes()); - } -} - -/// While we are generating the metadata, we also track the position -/// of each DefIndex. It is not required that all definitions appear -/// in the metadata, nor that they are serialized in order, and -/// therefore we first allocate the vector here and fill it with -/// `u32::MAX`. Whenever an index is visited, we fill in the -/// appropriate spot by calling `record_position`. We should never -/// visit the same index twice. -pub struct Index<'tcx> { - positions: Vec, - _marker: PhantomData<&'tcx ()>, -} - -impl Index<'tcx> { - pub fn new(max_index: usize) -> Self { - Index { - positions: vec![0xff; max_index * 4], - _marker: PhantomData, - } - } - - pub fn record(&mut self, def_id: DefId, entry: Lazy>) { - assert!(def_id.is_local()); - self.record_index(def_id.index, entry); - } - - pub fn record_index(&mut self, item: DefIndex, entry: Lazy>) { - assert!(entry.position < (u32::MAX as usize)); - let position = entry.position as u32; - let array_index = item.index(); - - let positions = &mut self.positions; - assert!(u32::read_from_bytes_at(positions, array_index) == u32::MAX, - "recorded position for item {:?} twice, first at {:?} and now at {:?}", - item, - u32::read_from_bytes_at(positions, array_index), - position); - - position.write_to_bytes_at(positions, array_index) - } - - pub fn write_index(&self, buf: &mut Encoder) -> Lazy<[Self]> { - let pos = buf.position(); - - // First we write the length of the lower range ... - buf.emit_raw_bytes(&(self.positions.len() as u32 / 4).to_le_bytes()); - // ... then the values. - buf.emit_raw_bytes(&self.positions); - Lazy::from_position_and_meta(pos as usize, self.positions.len() / 4 + 1) - } -} - -impl Lazy<[Index<'tcx>]> { - /// Given the metadata, extract out the offset of a particular - /// DefIndex (if any). - #[inline(never)] - pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option>> { - let bytes = &bytes[self.position..]; - debug!("Index::lookup: index={:?} len={:?}", - def_index, - self.meta); - - let position = u32::read_from_bytes_at(bytes, 1 + def_index.index()); - if position == u32::MAX { - debug!("Index::lookup: position=u32::MAX"); - None - } else { - debug!("Index::lookup: position={:?}", position); - Some(Lazy::from_position(position as usize)) - } - } -} diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index e6104e629e..291ee23ff7 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -1,6 +1,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_patterns)] +#![feature(core_intrinsics)] #![feature(crate_visibility_modifier)] #![feature(drain_filter)] #![feature(in_band_lifetimes)] @@ -11,6 +12,7 @@ #![feature(rustc_private)] #![feature(slice_patterns)] #![feature(specialization)] +#![feature(stmt_expr_attributes)] #![recursion_limit="256"] @@ -24,14 +26,15 @@ extern crate rustc_data_structures; pub mod error_codes; -mod index; mod encoder; mod decoder; +mod dependency_format; mod cstore_impl; -mod schema; -mod native_libs; -mod link_args; mod foreign_modules; +mod link_args; +mod native_libs; +mod schema; +mod table; pub mod creader; pub mod cstore; diff --git a/src/librustc_metadata/link_args.rs b/src/librustc_metadata/link_args.rs index 728fd004fc..4291f3a4ae 100644 --- a/src/librustc_metadata/link_args.rs +++ b/src/librustc_metadata/link_args.rs @@ -4,7 +4,7 @@ use rustc::ty::TyCtxt; use rustc_target::spec::abi::Abi; use syntax::symbol::sym; -pub fn collect(tcx: TyCtxt<'_>) -> Vec { +crate fn collect(tcx: TyCtxt<'_>) -> Vec { let mut collector = Collector { args: Vec::new(), }; @@ -27,7 +27,7 @@ struct Collector { impl<'tcx> ItemLikeVisitor<'tcx> for Collector { fn visit_item(&mut self, it: &'tcx hir::Item) { - let fm = match it.node { + let fm = match it.kind { hir::ItemKind::ForeignMod(ref fm) => fm, _ => return, }; diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index ceba7cf0fe..4a263250f9 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -212,19 +212,21 @@ //! no means all of the necessary details. Take a look at the rest of //! metadata::locator or metadata::creader for all the juicy details! -use crate::cstore::{MetadataRef, MetadataBlob}; +use crate::cstore::MetadataBlob; use crate::creader::Library; use crate::schema::{METADATA_HEADER, rustc_version}; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::svh::Svh; -use rustc::middle::cstore::MetadataLoader; -use rustc::session::{config, Session}; +use rustc_data_structures::sync::MetadataRef; +use rustc::middle::cstore::{CrateSource, MetadataLoader}; +use rustc::session::{config, Session, CrateDisambiguator}; use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch}; use rustc::session::search_paths::PathKind; use rustc::util::nodemap::FxHashMap; use errors::DiagnosticBuilder; +use syntax::{span_err, span_fatal}; use syntax::symbol::{Symbol, sym}; use syntax::struct_span_err; use syntax_pos::Span; @@ -245,24 +247,24 @@ use rustc_data_structures::owning_ref::OwningRef; use log::{debug, info, warn}; #[derive(Clone)] -pub struct CrateMismatch { +crate struct CrateMismatch { path: PathBuf, got: String, } #[derive(Clone)] -pub struct Context<'a> { +crate struct Context<'a> { pub sess: &'a Session, pub span: Span, - pub ident: Symbol, pub crate_name: Symbol, pub hash: Option<&'a Svh>, + pub host_hash: Option<&'a Svh>, pub extra_filename: Option<&'a str>, // points to either self.sess.target.target or self.sess.host, must match triple pub target: &'a Target, pub triple: TargetTriple, pub filesearch: FileSearch<'a>, - pub root: &'a Option, + pub root: Option<&'a CratePaths>, pub rejected_via_hash: Vec, pub rejected_via_triple: Vec, pub rejected_via_kind: Vec, @@ -273,11 +275,9 @@ pub struct Context<'a> { pub metadata_loader: &'a dyn MetadataLoader, } -pub struct CratePaths { - pub ident: String, - pub dylib: Option, - pub rlib: Option, - pub rmeta: Option, +crate struct CratePaths { + pub name: Symbol, + pub source: CrateSource, } #[derive(Copy, Clone, PartialEq)] @@ -297,14 +297,8 @@ impl fmt::Display for CrateFlavor { } } -impl CratePaths { - fn paths(&self) -> Vec { - self.dylib.iter().chain(self.rlib.iter()).chain(self.rmeta.iter()).cloned().collect() - } -} - impl<'a> Context<'a> { - pub fn reset(&mut self) { + crate fn reset(&mut self) { self.rejected_via_hash.clear(); self.rejected_via_triple.clear(); self.rejected_via_kind.clear(); @@ -312,7 +306,7 @@ impl<'a> Context<'a> { self.rejected_via_filename.clear(); } - pub fn maybe_load_library_crate(&mut self) -> Option { + crate fn maybe_load_library_crate(&mut self) -> Option { let mut seen_paths = FxHashSet::default(); match self.extra_filename { Some(s) => self.find_library_crate(s, &mut seen_paths) @@ -321,10 +315,10 @@ impl<'a> Context<'a> { } } - pub fn report_errs(self) -> ! { + crate fn report_errs(self) -> ! { let add = match self.root { - &None => String::new(), - &Some(ref r) => format!(" which `{}` depends on", r.ident), + None => String::new(), + Some(r) => format!(" which `{}` depends on", r.name), }; let mut msg = "the following crate versions were found:".to_string(); let mut err = if !self.rejected_via_hash.is_empty() { @@ -332,18 +326,18 @@ impl<'a> Context<'a> { self.span, E0460, "found possibly newer version of crate `{}`{}", - self.ident, + self.crate_name, add); err.note("perhaps that crate needs to be recompiled?"); let mismatches = self.rejected_via_hash.iter(); for &CrateMismatch { ref path, .. } in mismatches { - msg.push_str(&format!("\ncrate `{}`: {}", self.ident, path.display())); + msg.push_str(&format!("\ncrate `{}`: {}", self.crate_name, path.display())); } match self.root { - &None => {} - &Some(ref r) => { - for path in r.paths().iter() { - msg.push_str(&format!("\ncrate `{}`: {}", r.ident, path.display())); + None => {} + Some(r) => { + for path in r.source.paths() { + msg.push_str(&format!("\ncrate `{}`: {}", r.name, path.display())); } } } @@ -355,13 +349,13 @@ impl<'a> Context<'a> { E0461, "couldn't find crate `{}` \ with expected target triple {}{}", - self.ident, + self.crate_name, self.triple, add); let mismatches = self.rejected_via_triple.iter(); for &CrateMismatch { ref path, ref got } in mismatches { msg.push_str(&format!("\ncrate `{}`, target triple {}: {}", - self.ident, + self.crate_name, got, path.display())); } @@ -372,12 +366,12 @@ impl<'a> Context<'a> { self.span, E0462, "found staticlib `{}` instead of rlib or dylib{}", - self.ident, + self.crate_name, add); err.help("please recompile that crate using --crate-type lib"); let mismatches = self.rejected_via_kind.iter(); for &CrateMismatch { ref path, .. } in mismatches { - msg.push_str(&format!("\ncrate `{}`: {}", self.ident, path.display())); + msg.push_str(&format!("\ncrate `{}`: {}", self.crate_name, path.display())); } err.note(&msg); err @@ -387,14 +381,14 @@ impl<'a> Context<'a> { E0514, "found crate `{}` compiled by an incompatible version \ of rustc{}", - self.ident, + self.crate_name, add); err.help(&format!("please recompile that crate using this compiler ({})", rustc_version())); let mismatches = self.rejected_via_version.iter(); for &CrateMismatch { ref path, ref got } in mismatches { msg.push_str(&format!("\ncrate `{}` compiled by {}: {}", - self.ident, + self.crate_name, got, path.display())); } @@ -405,10 +399,10 @@ impl<'a> Context<'a> { self.span, E0463, "can't find crate for `{}`{}", - self.ident, + self.crate_name, add); - if (self.ident == sym::std || self.ident == sym::core) + if (self.crate_name == sym::std || self.crate_name == sym::core) && self.triple != TargetTriple::from_triple(config::host_triple()) { err.note(&format!("the `{}` target may not be installed", self.triple)); } @@ -535,18 +529,8 @@ impl<'a> Context<'a> { // search is being performed for. let mut libraries = FxHashMap::default(); for (_hash, (rlibs, rmetas, dylibs)) in candidates { - let mut slot = None; - let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); - let rmeta = self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot); - let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); - if let Some((h, m)) = slot { - libraries.insert(h, - Library { - dylib, - rlib, - rmeta, - metadata: m, - }); + if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs) { + libraries.insert(svh, lib); } } @@ -564,7 +548,7 @@ impl<'a> Context<'a> { self.crate_name); let candidates = libraries.iter().filter_map(|(_, lib)| { let crate_name = &lib.metadata.get_root().name.as_str(); - match &(&lib.dylib, &lib.rlib) { + match &(&lib.source.dylib, &lib.source.rlib) { &(&Some((ref pd, _)), &Some((ref pr, _))) => { Some(format!("\ncrate `{}`: {}\n{:>padding$}", crate_name, @@ -585,6 +569,21 @@ impl<'a> Context<'a> { } } + fn extract_lib( + &mut self, + rlibs: FxHashMap, + rmetas: FxHashMap, + dylibs: FxHashMap, + ) -> Option<(Svh, Library)> { + let mut slot = None; + let source = CrateSource { + rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot), + rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot), + dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot), + }; + slot.map(|(svh, metadata)| (svh, Library { source, metadata })) + } + // Attempts to extract *one* library from the set `m`. If the set has no // elements, `None` is returned. If the set has more than one element, then // the errors and notes are emitted about the set of libraries. @@ -829,23 +828,8 @@ impl<'a> Context<'a> { } }; - // Extract the rlib/dylib pair. - let mut slot = None; - let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); - let rmeta = self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot); - let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); - - if rlib.is_none() && rmeta.is_none() && dylib.is_none() { - return None; - } - slot.map(|(_, metadata)| - Library { - dylib, - rlib, - rmeta, - metadata, - } - ) + // Extract the dylib/rlib/rmeta triple. + self.extract_lib(rlibs, rmetas, dylibs).map(|(_, lib)| lib) } } @@ -929,10 +913,88 @@ fn get_metadata_section_imp(target: &Target, } } +/// Look for a plugin registrar. Returns its library path and crate disambiguator. +pub fn find_plugin_registrar( + sess: &Session, + metadata_loader: &dyn MetadataLoader, + span: Span, + name: Symbol, +) -> Option<(PathBuf, CrateDisambiguator)> { + info!("find plugin registrar `{}`", name); + let target_triple = sess.opts.target_triple.clone(); + let host_triple = TargetTriple::from_triple(config::host_triple()); + let is_cross = target_triple != host_triple; + let mut target_only = false; + let mut locate_ctxt = Context { + sess, + span, + crate_name: name, + hash: None, + host_hash: None, + extra_filename: None, + filesearch: sess.host_filesearch(PathKind::Crate), + target: &sess.host, + triple: host_triple, + root: None, + rejected_via_hash: vec![], + rejected_via_triple: vec![], + rejected_via_kind: vec![], + rejected_via_version: vec![], + rejected_via_filename: vec![], + should_match_name: true, + is_proc_macro: None, + metadata_loader, + }; + + let library = locate_ctxt.maybe_load_library_crate().or_else(|| { + if !is_cross { + return None + } + // Try loading from target crates. This will abort later if we + // try to load a plugin registrar function, + target_only = true; + + locate_ctxt.target = &sess.target.target; + locate_ctxt.triple = target_triple; + locate_ctxt.filesearch = sess.target_filesearch(PathKind::Crate); + + locate_ctxt.maybe_load_library_crate() + }); + let library = match library { + Some(l) => l, + None => locate_ctxt.report_errs(), + }; + + if target_only { + // Need to abort before syntax expansion. + let message = format!("plugin `{}` is not available for triple `{}` \ + (only found {})", + name, + config::host_triple(), + sess.opts.target_triple); + span_fatal!(sess, span, E0456, "{}", &message); + } + + match library.source.dylib { + Some(dylib) => { + Some((dylib.0, library.metadata.get_root().disambiguator)) + } + None => { + span_err!(sess, span, E0457, + "plugin `{}` only found in rlib format, but must be available \ + in dylib format", + name); + // No need to abort because the loading code will just ignore this + // empty dylib. + None + } + } +} + /// A diagnostic function for dumping crate metadata to an output stream. pub fn list_file_metadata(target: &Target, path: &Path, - loader: &dyn MetadataLoader, + metadata_loader: &dyn MetadataLoader, out: &mut dyn io::Write) -> io::Result<()> { let filename = path.file_name().unwrap().to_str().unwrap(); @@ -943,7 +1005,7 @@ pub fn list_file_metadata(target: &Target, } else { CrateFlavor::Dylib }; - match get_metadata_section(target, flavor, path, loader) { + match get_metadata_section(target, flavor, path, metadata_loader) { Ok(metadata) => metadata.list_crate_metadata(out), Err(msg) => write!(out, "{}\n", msg), } diff --git a/src/librustc_metadata/native_libs.rs b/src/librustc_metadata/native_libs.rs index ada1a8c615..a58db6a903 100644 --- a/src/librustc_metadata/native_libs.rs +++ b/src/librustc_metadata/native_libs.rs @@ -11,7 +11,7 @@ use syntax::feature_gate::{self, GateIssue}; use syntax::symbol::{kw, sym, Symbol}; use syntax::{span_err, struct_span_err}; -pub fn collect(tcx: TyCtxt<'_>) -> Vec { +crate fn collect(tcx: TyCtxt<'_>) -> Vec { let mut collector = Collector { tcx, libs: Vec::new(), @@ -21,7 +21,7 @@ pub fn collect(tcx: TyCtxt<'_>) -> Vec { return collector.libs; } -pub fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool { +crate fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool { match lib.cfg { Some(ref cfg) => attr::cfg_matches(cfg, &sess.parse_sess, None), None => true, @@ -35,7 +35,7 @@ struct Collector<'tcx> { impl ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_item(&mut self, it: &'tcx hir::Item) { - let fm = match it.node { + let fm = match it.kind { hir::ItemKind::ForeignMod(ref fm) => fm, _ => return, }; @@ -73,6 +73,7 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> { "static-nobundle" => cstore::NativeStaticNobundle, "dylib" => cstore::NativeUnknown, "framework" => cstore::NativeFramework, + "raw-dylib" => cstore::NativeRawDylib, k => { struct_span_err!(self.tcx.sess, item.span(), E0458, "unknown kind: `{}`", k) @@ -169,6 +170,14 @@ impl Collector<'tcx> { GateIssue::Language, "kind=\"static-nobundle\" is unstable"); } + if lib.kind == cstore::NativeRawDylib && + !self.tcx.features().raw_dylib { + feature_gate::emit_feature_err(&self.tcx.sess.parse_sess, + sym::raw_dylib, + span.unwrap_or_else(|| syntax_pos::DUMMY_SP), + GateIssue::Language, + "kind=\"raw-dylib\" is unstable"); + } self.libs.push(lib); } @@ -189,12 +198,10 @@ impl Collector<'tcx> { self.tcx.sess.err(&format!("renaming of the library `{}` was specified, \ however this crate contains no `#[link(...)]` \ attributes referencing this library.", name)); - } else if renames.contains(name) { + } else if !renames.insert(name) { self.tcx.sess.err(&format!("multiple renamings were \ specified for library `{}` .", name)); - } else { - renames.insert(name); } } } diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 92534ab056..8bece25111 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -1,4 +1,4 @@ -use crate::index; +use crate::table::PerDefTable; use rustc::hir; use rustc::hir::def::{self, CtorKind}; @@ -11,17 +11,19 @@ use rustc::session::CrateDisambiguator; use rustc::session::config::SymbolManglingVersion; use rustc::ty::{self, Ty, ReprOptions}; use rustc_target::spec::{PanicStrategy, TargetTriple}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_data_structures::svh::Svh; +use rustc_serialize::Encodable; use syntax::{ast, attr}; use syntax::edition::Edition; use syntax::symbol::Symbol; use syntax_pos::{self, Span}; use std::marker::PhantomData; +use std::num::NonZeroUsize; -pub fn rustc_version() -> String { +crate fn rustc_version() -> String { format!("rustc {}", option_env!("CFG_VERSION").unwrap_or("unknown version")) } @@ -29,7 +31,7 @@ pub fn rustc_version() -> String { /// Metadata encoding version. /// N.B., increment this if you change the format of metadata such that /// the rustc version can't be found to compare with `rustc_version()`. -pub const METADATA_VERSION: u8 = 4; +const METADATA_VERSION: u8 = 4; /// Metadata header which includes `METADATA_VERSION`. /// To get older versions of rustc to ignore this metadata, @@ -39,12 +41,12 @@ pub const METADATA_VERSION: u8 = 4; /// This header is followed by the position of the `CrateRoot`, /// which is encoded as a 32-bit big-endian unsigned integer, /// and further followed by the rustc version string. -pub const METADATA_HEADER: &[u8; 12] = +crate const METADATA_HEADER: &[u8; 12] = &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION]; /// Additional metadata for a `Lazy` where `T` may not be `Sized`, /// e.g. for `Lazy<[T]>`, this is the length (count of `T` values). -pub trait LazyMeta { +crate trait LazyMeta { type Meta: Copy + 'static; /// Returns the minimum encoded size. @@ -52,7 +54,7 @@ pub trait LazyMeta { fn min_size(meta: Self::Meta) -> usize; } -impl LazyMeta for T { +impl LazyMeta for T { type Meta = (); fn min_size(_: ()) -> usize { @@ -61,7 +63,7 @@ impl LazyMeta for T { } } -impl LazyMeta for [T] { +impl LazyMeta for [T] { type Meta = usize; fn min_size(len: usize) -> usize { @@ -98,17 +100,17 @@ impl LazyMeta for [T] { #[must_use] // FIXME(#59875) the `Meta` parameter only exists to dodge // invariance wrt `T` (coming from the `meta: T::Meta` field). -pub struct Lazy::Meta> +crate struct Lazy::Meta> where T: ?Sized + LazyMeta, Meta: 'static + Copy, { - pub position: usize, + pub position: NonZeroUsize, pub meta: Meta, _marker: PhantomData, } impl Lazy { - pub fn from_position_and_meta(position: usize, meta: T::Meta) -> Lazy { + crate fn from_position_and_meta(position: NonZeroUsize, meta: T::Meta) -> Lazy { Lazy { position, meta, @@ -117,15 +119,15 @@ impl Lazy { } } -impl Lazy { - pub fn from_position(position: usize) -> Lazy { +impl Lazy { + crate fn from_position(position: NonZeroUsize) -> Lazy { Lazy::from_position_and_meta(position, ()) } } -impl Lazy<[T]> { - pub fn empty() -> Lazy<[T]> { - Lazy::from_position_and_meta(0, 0) +impl Lazy<[T]> { + crate fn empty() -> Lazy<[T]> { + Lazy::from_position_and_meta(NonZeroUsize::new(1).unwrap(), 0) } } @@ -141,22 +143,32 @@ impl rustc_serialize::UseSpecializedDecodable for Lazy /// Encoding / decoding state for `Lazy`. #[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum LazyState { +crate enum LazyState { /// Outside of a metadata node. NoNode, /// Inside a metadata node, and before any `Lazy`. /// The position is that of the node itself. - NodeStart(usize), + NodeStart(NonZeroUsize), /// Inside a metadata node, with a previous `Lazy`. /// The position is a conservative estimate of where that /// previous `Lazy` would end (see their comments). - Previous(usize), + Previous(NonZeroUsize), +} + +// FIXME(#59875) `Lazy!(T)` replaces `Lazy`, passing the `Meta` parameter +// manually, instead of relying on the default, to get the correct variance. +// Only needed when `T` itself contains a parameter (e.g. `'tcx`). +macro_rules! Lazy { + (Table<$T:ty>) => {Lazy, usize>}; + (PerDefTable<$T:ty>) => {Lazy, usize>}; + ([$T:ty]) => {Lazy<[$T], usize>}; + ($T:ty) => {Lazy<$T, ()>}; } #[derive(RustcEncodable, RustcDecodable)] -pub struct CrateRoot<'tcx> { +crate struct CrateRoot<'tcx> { pub name: Symbol, pub triple: TargetTriple, pub extra_filename: String, @@ -182,10 +194,10 @@ pub struct CrateRoot<'tcx> { pub source_map: Lazy<[syntax_pos::SourceFile]>, pub def_path_table: Lazy, pub impls: Lazy<[TraitImpls]>, - pub exported_symbols: Lazy<[(ExportedSymbol<'tcx>, SymbolExportLevel)]>, + pub exported_symbols: Lazy!([(ExportedSymbol<'tcx>, SymbolExportLevel)]), pub interpret_alloc_index: Lazy<[u32]>, - pub entries_index: Lazy<[index::Index<'tcx>]>, + pub per_def: LazyPerDefTables<'tcx>, /// The DefIndex's of any proc macros delcared by /// this crate @@ -202,42 +214,46 @@ pub struct CrateRoot<'tcx> { } #[derive(RustcEncodable, RustcDecodable)] -pub struct CrateDep { +crate struct CrateDep { pub name: ast::Name, pub hash: Svh, + pub host_hash: Option, pub kind: DepKind, pub extra_filename: String, } #[derive(RustcEncodable, RustcDecodable)] -pub struct TraitImpls { +crate struct TraitImpls { pub trait_id: (u32, DefIndex), pub impls: Lazy<[DefIndex]>, } #[derive(RustcEncodable, RustcDecodable)] -pub struct Entry<'tcx> { - pub kind: EntryKind<'tcx>, - pub visibility: Lazy, - pub span: Lazy, - pub attributes: Lazy<[ast::Attribute]>, - pub children: Lazy<[DefIndex]>, - pub stability: Option>, - pub deprecation: Option>, +crate struct LazyPerDefTables<'tcx> { + pub kind: Lazy!(PerDefTable)>), + pub visibility: Lazy!(PerDefTable>), + pub span: Lazy!(PerDefTable>), + pub attributes: Lazy!(PerDefTable>), + pub children: Lazy!(PerDefTable>), + pub stability: Lazy!(PerDefTable>), + pub deprecation: Lazy!(PerDefTable>), - pub ty: Option>>, - pub inherent_impls: Lazy<[DefIndex]>, - pub variances: Lazy<[ty::Variance]>, - pub generics: Option>, - pub predicates: Option>>, - pub predicates_defined_on: Option>>, + pub ty: Lazy!(PerDefTable)>), + pub fn_sig: Lazy!(PerDefTable)>), + pub impl_trait_ref: Lazy!(PerDefTable)>), + pub inherent_impls: Lazy!(PerDefTable>), + pub variances: Lazy!(PerDefTable>), + pub generics: Lazy!(PerDefTable>), + pub predicates: Lazy!(PerDefTable)>), + pub predicates_defined_on: Lazy!(PerDefTable)>), + pub super_predicates: Lazy!(PerDefTable)>), - pub mir: Option>>, - pub promoted_mir: Option>>>, + pub mir: Lazy!(PerDefTable)>), + pub promoted_mir: Lazy!(PerDefTable>)>), } #[derive(Copy, Clone, RustcEncodable, RustcDecodable)] -pub enum EntryKind<'tcx> { +crate enum EntryKind<'tcx> { Const(ConstQualif, Lazy), ImmStatic, MutStatic, @@ -252,89 +268,78 @@ pub enum EntryKind<'tcx> { OpaqueTy, Enum(ReprOptions), Field, - Variant(Lazy>), - Struct(Lazy>, ReprOptions), - Union(Lazy>, ReprOptions), - Fn(Lazy>), - ForeignFn(Lazy>), + Variant(Lazy), + Struct(Lazy, ReprOptions), + Union(Lazy, ReprOptions), + Fn(Lazy), + ForeignFn(Lazy), Mod(Lazy), MacroDef(Lazy), - Closure(Lazy>), - Generator(Lazy>), - Trait(Lazy>), - Impl(Lazy>), - Method(Lazy>), + Closure, + Generator(Lazy!(GeneratorData<'tcx>)), + Trait(Lazy), + Impl(Lazy), + Method(Lazy), AssocType(AssocContainer), AssocOpaqueTy(AssocContainer), AssocConst(AssocContainer, ConstQualif, Lazy), - TraitAlias(Lazy>), + TraitAlias, } /// Additional data for EntryKind::Const and EntryKind::AssocConst #[derive(Clone, Copy, RustcEncodable, RustcDecodable)] -pub struct ConstQualif { +crate struct ConstQualif { pub mir: u8, - pub ast_promotable: bool, } /// Contains a constant which has been rendered to a String. /// Used by rustdoc. #[derive(RustcEncodable, RustcDecodable)] -pub struct RenderedConst(pub String); +crate struct RenderedConst(pub String); #[derive(RustcEncodable, RustcDecodable)] -pub struct ModData { +crate struct ModData { pub reexports: Lazy<[def::Export]>, } #[derive(RustcEncodable, RustcDecodable)] -pub struct MacroDef { +crate struct MacroDef { pub body: String, pub legacy: bool, } #[derive(RustcEncodable, RustcDecodable)] -pub struct FnData<'tcx> { +crate struct FnData { pub asyncness: hir::IsAsync, pub constness: hir::Constness, pub param_names: Lazy<[ast::Name]>, - pub sig: Lazy>, } #[derive(RustcEncodable, RustcDecodable)] -pub struct VariantData<'tcx> { +crate struct VariantData { pub ctor_kind: CtorKind, pub discr: ty::VariantDiscr, /// If this is unit or tuple-variant/struct, then this is the index of the ctor id. pub ctor: Option, - /// If this is a tuple struct or variant - /// ctor, this is its "function" signature. - pub ctor_sig: Option>>, } #[derive(RustcEncodable, RustcDecodable)] -pub struct TraitData<'tcx> { +crate struct TraitData { pub unsafety: hir::Unsafety, pub paren_sugar: bool, pub has_auto_impl: bool, pub is_marker: bool, - pub super_predicates: Lazy>, } #[derive(RustcEncodable, RustcDecodable)] -pub struct TraitAliasData<'tcx> { - pub super_predicates: Lazy>, -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ImplData<'tcx> { - pub polarity: hir::ImplPolarity, +crate struct ImplData { + pub polarity: ty::ImplPolarity, pub defaultness: hir::Defaultness, pub parent_impl: Option, /// This is `Some` only for impls of `CoerceUnsized`. + // FIXME(eddyb) perhaps compute this on the fly if cheap enough? pub coerce_unsized_info: Option, - pub trait_ref: Option>>, } @@ -342,7 +347,7 @@ pub struct ImplData<'tcx> { /// is a trait or an impl and whether, in a trait, it has /// a default, or an in impl, whether it's marked "default". #[derive(Copy, Clone, RustcEncodable, RustcDecodable)] -pub enum AssocContainer { +crate enum AssocContainer { TraitRequired, TraitWithDefault, ImplDefault, @@ -350,7 +355,7 @@ pub enum AssocContainer { } impl AssocContainer { - pub fn with_def_id(&self, def_id: DefId) -> ty::AssocItemContainer { + crate fn with_def_id(&self, def_id: DefId) -> ty::AssocItemContainer { match *self { AssocContainer::TraitRequired | AssocContainer::TraitWithDefault => ty::TraitContainer(def_id), @@ -360,7 +365,7 @@ impl AssocContainer { } } - pub fn defaultness(&self) -> hir::Defaultness { + crate fn defaultness(&self) -> hir::Defaultness { match *self { AssocContainer::TraitRequired => hir::Defaultness::Default { has_value: false, @@ -377,22 +382,17 @@ impl AssocContainer { } #[derive(RustcEncodable, RustcDecodable)] -pub struct MethodData<'tcx> { - pub fn_data: FnData<'tcx>, +crate struct MethodData { + pub fn_data: FnData, pub container: AssocContainer, pub has_self: bool, } #[derive(RustcEncodable, RustcDecodable)] -pub struct ClosureData<'tcx> { - pub sig: Lazy>, -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct GeneratorData<'tcx> { +crate struct GeneratorData<'tcx> { pub layout: mir::GeneratorLayout<'tcx>, } // Tags used for encoding Spans: -pub const TAG_VALID_SPAN: u8 = 0; -pub const TAG_INVALID_SPAN: u8 = 1; +crate const TAG_VALID_SPAN: u8 = 0; +crate const TAG_INVALID_SPAN: u8 = 1; diff --git a/src/librustc_metadata/table.rs b/src/librustc_metadata/table.rs new file mode 100644 index 0000000000..e164c28c95 --- /dev/null +++ b/src/librustc_metadata/table.rs @@ -0,0 +1,239 @@ +use crate::decoder::Metadata; +use crate::schema::*; + +use rustc::hir::def_id::{DefId, DefIndex}; +use rustc_serialize::{Encodable, opaque::Encoder}; +use std::convert::TryInto; +use std::marker::PhantomData; +use std::num::NonZeroUsize; +use log::debug; + +/// Helper trait, for encoding to, and decoding from, a fixed number of bytes. +/// Used mainly for Lazy positions and lengths. +/// Unchecked invariant: `Self::default()` should encode as `[0; BYTE_LEN]`, +/// but this has no impact on safety. +crate trait FixedSizeEncoding: Default { + const BYTE_LEN: usize; + + // FIXME(eddyb) convert to and from `[u8; Self::BYTE_LEN]` instead, + // once that starts being allowed by the compiler (i.e. lazy normalization). + fn from_bytes(b: &[u8]) -> Self; + fn write_to_bytes(self, b: &mut [u8]); + + // FIXME(eddyb) make these generic functions, or at least defaults here. + // (same problem as above, needs `[u8; Self::BYTE_LEN]`) + // For now, a macro (`fixed_size_encoding_byte_len_and_defaults`) is used. + + /// Read a `Self` value (encoded as `Self::BYTE_LEN` bytes), + /// from `&b[i * Self::BYTE_LEN..]`, returning `None` if `i` + /// is not in bounds, or `Some(Self::from_bytes(...))` otherwise. + fn maybe_read_from_bytes_at(b: &[u8], i: usize) -> Option; + /// Write a `Self` value (encoded as `Self::BYTE_LEN` bytes), + /// at `&mut b[i * Self::BYTE_LEN..]`, using `Self::write_to_bytes`. + fn write_to_bytes_at(self, b: &mut [u8], i: usize); +} + +// HACK(eddyb) this shouldn't be needed (see comments on the methods above). +macro_rules! fixed_size_encoding_byte_len_and_defaults { + ($byte_len:expr) => { + const BYTE_LEN: usize = $byte_len; + fn maybe_read_from_bytes_at(b: &[u8], i: usize) -> Option { + const BYTE_LEN: usize = $byte_len; + // HACK(eddyb) ideally this would be done with fully safe code, + // but slicing `[u8]` with `i * N..` is optimized worse, due to the + // possibility of `i * N` overflowing, than indexing `[[u8; N]]`. + let b = unsafe { + std::slice::from_raw_parts( + b.as_ptr() as *const [u8; BYTE_LEN], + b.len() / BYTE_LEN, + ) + }; + b.get(i).map(|b| FixedSizeEncoding::from_bytes(b)) + } + fn write_to_bytes_at(self, b: &mut [u8], i: usize) { + const BYTE_LEN: usize = $byte_len; + // HACK(eddyb) ideally this would be done with fully safe code, + // see similar comment in `read_from_bytes_at` for why it can't yet. + let b = unsafe { + std::slice::from_raw_parts_mut( + b.as_mut_ptr() as *mut [u8; BYTE_LEN], + b.len() / BYTE_LEN, + ) + }; + self.write_to_bytes(&mut b[i]); + } + } +} + +impl FixedSizeEncoding for u32 { + fixed_size_encoding_byte_len_and_defaults!(4); + + fn from_bytes(b: &[u8]) -> Self { + let mut bytes = [0; Self::BYTE_LEN]; + bytes.copy_from_slice(&b[..Self::BYTE_LEN]); + Self::from_le_bytes(bytes) + } + + fn write_to_bytes(self, b: &mut [u8]) { + b[..Self::BYTE_LEN].copy_from_slice(&self.to_le_bytes()); + } +} + +// NOTE(eddyb) there could be an impl for `usize`, which would enable a more +// generic `Lazy` impl, but in the general case we might not need / want to +// fit every `usize` in `u32`. +impl FixedSizeEncoding for Option> { + fixed_size_encoding_byte_len_and_defaults!(u32::BYTE_LEN); + + fn from_bytes(b: &[u8]) -> Self { + Some(Lazy::from_position(NonZeroUsize::new(u32::from_bytes(b) as usize)?)) + } + + fn write_to_bytes(self, b: &mut [u8]) { + let position = self.map_or(0, |lazy| lazy.position.get()); + let position: u32 = position.try_into().unwrap(); + + position.write_to_bytes(b) + } +} + +impl FixedSizeEncoding for Option> { + fixed_size_encoding_byte_len_and_defaults!(u32::BYTE_LEN * 2); + + fn from_bytes(b: &[u8]) -> Self { + Some(Lazy::from_position_and_meta( + >>::from_bytes(b)?.position, + u32::from_bytes(&b[u32::BYTE_LEN..]) as usize, + )) + } + + fn write_to_bytes(self, b: &mut [u8]) { + self.map(|lazy| Lazy::::from_position(lazy.position)) + .write_to_bytes(b); + + let len = self.map_or(0, |lazy| lazy.meta); + let len: u32 = len.try_into().unwrap(); + + len.write_to_bytes(&mut b[u32::BYTE_LEN..]); + } +} + +/// Random-access table (i.e. offeringconstant-time `get`/`set`), similar to +/// `Vec>`, but without requiring encoding or decoding all the values +/// eagerly and in-order. +/// A total of `(max_idx + 1) * as FixedSizeEncoding>::BYTE_LEN` bytes +/// are used for a table, where `max_idx` is the largest index passed to `set`. +// FIXME(eddyb) replace `Vec` with `[_]` here, such that `Box>` would be used +// when building it, and `Lazy>` or `&Table` when reading it. +// (not sure if that is possible given that the `Vec` is being resized now) +crate struct Table where Option: FixedSizeEncoding { + // FIXME(eddyb) store `[u8; >::BYTE_LEN]` instead of `u8` in `Vec`, + // once that starts being allowed by the compiler (i.e. lazy normalization). + bytes: Vec, + _marker: PhantomData, +} + +impl Default for Table where Option: FixedSizeEncoding { + fn default() -> Self { + Table { + bytes: vec![], + _marker: PhantomData, + } + } +} + +impl Table where Option: FixedSizeEncoding { + crate fn set(&mut self, i: usize, value: T) { + // FIXME(eddyb) investigate more compact encodings for sparse tables. + // On the PR @michaelwoerister mentioned: + // > Space requirements could perhaps be optimized by using the HAMT `popcnt` + // > trick (i.e. divide things into buckets of 32 or 64 items and then + // > store bit-masks of which item in each bucket is actually serialized). + let needed = (i + 1) * >::BYTE_LEN; + if self.bytes.len() < needed { + self.bytes.resize(needed, 0); + } + + Some(value).write_to_bytes_at(&mut self.bytes, i); + } + + crate fn encode(&self, buf: &mut Encoder) -> Lazy { + let pos = buf.position(); + buf.emit_raw_bytes(&self.bytes); + Lazy::from_position_and_meta( + NonZeroUsize::new(pos as usize).unwrap(), + self.bytes.len(), + ) + } +} + +impl LazyMeta for Table where Option: FixedSizeEncoding { + type Meta = usize; + + fn min_size(len: usize) -> usize { + len + } +} + +impl Lazy> where Option: FixedSizeEncoding { + /// Given the metadata, extract out the value at a particular index (if any). + #[inline(never)] + crate fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>( + &self, + metadata: M, + i: usize, + ) -> Option { + debug!("Table::lookup: index={:?} len={:?}", i, self.meta); + + let start = self.position.get(); + let bytes = &metadata.raw_bytes()[start..start + self.meta]; + >::maybe_read_from_bytes_at(bytes, i)? + } +} + +/// Like a `Table` but using `DefIndex` instead of `usize` as keys. +// FIXME(eddyb) replace by making `Table` behave like `IndexVec`, +// and by using `newtype_index!` to define `DefIndex`. +crate struct PerDefTable(Table) where Option: FixedSizeEncoding; + +impl Default for PerDefTable where Option: FixedSizeEncoding { + fn default() -> Self { + PerDefTable(Table::default()) + } +} + +impl PerDefTable where Option: FixedSizeEncoding { + crate fn set(&mut self, def_id: DefId, value: T) { + assert!(def_id.is_local()); + self.0.set(def_id.index.index(), value); + } + + crate fn encode(&self, buf: &mut Encoder) -> Lazy { + let lazy = self.0.encode(buf); + Lazy::from_position_and_meta(lazy.position, lazy.meta) + } +} + +impl LazyMeta for PerDefTable where Option: FixedSizeEncoding { + type Meta = as LazyMeta>::Meta; + + fn min_size(meta: Self::Meta) -> usize { + Table::::min_size(meta) + } +} + +impl Lazy> where Option: FixedSizeEncoding { + fn as_table(&self) -> Lazy> { + Lazy::from_position_and_meta(self.position, self.meta) + } + + /// Given the metadata, extract out the value at a particular DefIndex (if any). + #[inline(never)] + crate fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>( + &self, + metadata: M, + def_index: DefIndex, + ) -> Option { + self.as_table().get(metadata, def_index.index()) + } +} diff --git a/src/librustc_mir/Cargo.toml b/src/librustc_mir/Cargo.toml index 0691390bea..f0cdcf2136 100644 --- a/src/librustc_mir/Cargo.toml +++ b/src/librustc_mir/Cargo.toml @@ -19,11 +19,11 @@ polonius-engine = "0.10.0" rustc = { path = "../librustc" } rustc_target = { path = "../librustc_target" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_index = { path = "../librustc_index" } rustc_errors = { path = "../librustc_errors" } rustc_lexer = { path = "../librustc_lexer" } rustc_serialize = { path = "../libserialize", package = "serialize" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } -byteorder = { version = "1.3" } rustc_apfloat = { path = "../librustc_apfloat" } smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } diff --git a/src/librustc_mir/borrow_check/borrow_set.rs b/src/librustc_mir/borrow_check/borrow_set.rs index db19cbc317..98641031c1 100644 --- a/src/librustc_mir/borrow_check/borrow_set.rs +++ b/src/librustc_mir/borrow_check/borrow_set.rs @@ -8,8 +8,8 @@ use rustc::mir::visit::{PlaceContext, Visitor, NonUseContext, MutatingUseContext use rustc::mir::{self, Location, Body, Local}; use rustc::ty::{RegionVid, TyCtxt}; use rustc::util::nodemap::{FxHashMap, FxHashSet}; -use rustc_data_structures::indexed_vec::IndexVec; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::vec::IndexVec; +use rustc_index::bit_set::BitSet; use std::fmt; use std::ops::Index; @@ -315,10 +315,7 @@ impl<'a, 'tcx> GatherBorrows<'a, 'tcx> { // TEMP = &foo // // so extract `temp`. - let temp = if let &mir::Place { - base: mir::PlaceBase::Local(temp), - projection: box [], - } = assigned_place { + let temp = if let Some(temp) = assigned_place.as_local() { temp } else { span_bug!( diff --git a/src/librustc_mir/borrow_check/conflict_errors.rs b/src/librustc_mir/borrow_check/conflict_errors.rs index 599a0ad0d0..36db68a337 100644 --- a/src/librustc_mir/borrow_check/conflict_errors.rs +++ b/src/librustc_mir/borrow_check/conflict_errors.rs @@ -1,5 +1,6 @@ use rustc::hir; use rustc::hir::def_id::DefId; +use rustc::hir::{AsyncGeneratorKind, GeneratorKind}; use rustc::mir::{ self, AggregateKind, BindingForm, BorrowKind, ClearCrossCrate, ConstraintCategory, Local, LocalDecl, LocalKind, Location, Operand, Place, PlaceBase, PlaceRef, ProjectionElem, Rvalue, @@ -7,7 +8,7 @@ use rustc::mir::{ }; use rustc::ty::{self, Ty}; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use rustc_errors::{Applicability, DiagnosticBuilder}; use syntax_pos::Span; use syntax::source_map::DesugaringKind; @@ -77,7 +78,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { .last() .unwrap(); - if self.uninitialized_error_reported.contains(&root_place) { + if !self.uninitialized_error_reported.insert(root_place) { debug!( "report_use_of_moved_or_uninitialized place: error about {:?} suppressed", root_place @@ -85,8 +86,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { return; } - self.uninitialized_error_reported.insert(root_place); - let item_msg = match self.describe_place_with_options(used_place, IncludingDowncast(true)) { Some(name) => format!("`{}`", name), @@ -105,9 +104,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { format!("{} occurs due to use{}", desired_action.as_noun(), use_spans.describe()), ); - // This error should not be downgraded to a warning, - // even in migrate mode. - self.disable_error_downgrading(); err.buffer(&mut self.errors_buffer); } else { if let Some((reported_place, _)) = self.move_error_reported.get(&move_out_indices) { @@ -211,7 +207,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let ty = Place::ty_from(used_place.base, used_place.projection, self.body, self.infcx.tcx) .ty; - let needs_note = match ty.sty { + let needs_note = match ty.kind { ty::Closure(id, _) => { let tables = self.infcx.tcx.typeck_tables_of(id); let hir_id = self.infcx.tcx.hir().as_local_hir_id(id).unwrap(); @@ -232,7 +228,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { Some(ref name) => format!("`{}`", name), None => "value".to_owned(), }; - if let ty::Param(param_ty) = ty.sty { + if let ty::Param(param_ty) = ty.kind { let tcx = self.infcx.tcx; let generics = tcx.generics_of(self.mir_def_id); let def_id = generics.type_param(¶m_ty, tcx).def_id; @@ -243,11 +239,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { ); } } - let span = if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = place { - let decl = &self.body.local_decls[*local]; + let span = if let Some(local) = place.as_local() { + let decl = &self.body.local_decls[local]; Some(decl.source_info.span) } else { None @@ -615,7 +608,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { projection, } = first_borrowed_place; - let mut cursor = &**projection; + let mut cursor = projection.as_ref(); while let [proj_base @ .., elem] = cursor { cursor = proj_base; @@ -639,7 +632,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { projection, } = second_borrowed_place; - let mut cursor = &**projection; + let mut cursor = projection.as_ref(); while let [proj_base @ .., elem] = cursor { cursor = proj_base; @@ -714,10 +707,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { _ => drop_span, }; + let root_place_projection = self.infcx.tcx.intern_place_elems(root_place.projection); + if self.access_place_error_reported .contains(&(Place { base: root_place.base.clone(), - projection: root_place.projection.to_vec().into_boxed_slice(), + projection: root_place_projection, }, borrow_span)) { debug!( @@ -730,7 +725,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { self.access_place_error_reported .insert((Place { base: root_place.base.clone(), - projection: root_place.projection.to_vec().into_boxed_slice(), + projection: root_place_projection, }, borrow_span)); if let StorageDeadOrDrop::Destructor(dropped_ty) = @@ -753,6 +748,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let kind_place = kind.filter(|_| place_desc.is_some()).map(|k| (k, place_span.0)); let explanation = self.explain_why_borrow_contains_point(location, &borrow, kind_place); + debug!( + "report_borrowed_value_does_not_live_long_enough(place_desc: {:?}, explanation: {:?})", + place_desc, + explanation + ); let err = match (place_desc, explanation) { (Some(_), _) if self.is_place_thread_local(root_place) => { self.report_thread_local_value_does_not_live_long_enough(drop_span, borrow_span) @@ -786,7 +786,25 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { .. }, ) if borrow_spans.for_closure() => self.report_escaping_closure_capture( - borrow_spans.args_or_use(), + borrow_spans, + borrow_span, + region_name, + category, + span, + &format!("`{}`", name), + ), + ( + Some(ref name), + BorrowExplanation::MustBeValidFor { + category: category @ ConstraintCategory::OpaqueType, + from_closure: false, + ref region_name, + span, + .. + }, + + ) if borrow_spans.for_generator() => self.report_escaping_closure_capture( + borrow_spans, borrow_span, region_name, category, @@ -1105,26 +1123,22 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { }; let (place_desc, note) = if let Some(place_desc) = opt_place_desc { - let local_kind = match borrow.borrowed_place { - Place { - base: PlaceBase::Local(local), - projection: box [], - } => { - match self.body.local_kind(local) { - LocalKind::ReturnPointer - | LocalKind::Temp => bug!("temporary or return pointer with a name"), - LocalKind::Var => "local variable ", - LocalKind::Arg - if !self.upvars.is_empty() - && local == Local::new(1) => { - "variable captured by `move` " - } - LocalKind::Arg => { - "function parameter " - } + let local_kind = if let Some(local) = borrow.borrowed_place.as_local() { + match self.body.local_kind(local) { + LocalKind::ReturnPointer + | LocalKind::Temp => bug!("temporary or return pointer with a name"), + LocalKind::Var => "local variable ", + LocalKind::Arg + if !self.upvars.is_empty() + && local == Local::new(1) => { + "variable captured by `move` " + } + LocalKind::Arg => { + "function parameter " } } - _ => "local data ", + } else { + "local data " }; ( format!("{}`{}`", local_kind, place_desc), @@ -1175,7 +1189,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { fn report_escaping_closure_capture( &mut self, - args_span: Span, + use_span: UseSpans, var_span: Span, fr_name: &RegionName, category: ConstraintCategory, @@ -1183,7 +1197,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { captured_var: &str, ) -> DiagnosticBuilder<'cx> { let tcx = self.infcx.tcx; - + let args_span = use_span.args_or_use(); let mut err = self.cannot_capture_in_long_lived_closure( args_span, captured_var, @@ -1203,12 +1217,25 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { }, Err(_) => "move || ".to_string() }; - + let kind = match use_span.generator_kind() { + Some(generator_kind) => match generator_kind { + GeneratorKind::Async(async_kind) => match async_kind { + AsyncGeneratorKind::Block => "async block", + AsyncGeneratorKind::Closure => "async closure", + _ => bug!("async block/closure expected, but async funtion found."), + }, + GeneratorKind::Gen => "generator", + } + None => "closure", + }; err.span_suggestion( args_span, - &format!("to force the closure to take ownership of {} (and any \ - other referenced variables), use the `move` keyword", - captured_var), + &format!( + "to force the {} to take ownership of {} (and any \ + other referenced variables), use the `move` keyword", + kind, + captured_var + ), suggestion, Applicability::MachineApplicable, ); @@ -1217,6 +1244,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { ConstraintCategory::Return => { err.span_note(constraint_span, "closure is returned here"); } + ConstraintCategory::OpaqueType => { + err.span_note(constraint_span, "generator is returned here"); + } ConstraintCategory::CallArgument => { fr_name.highlight_region_name(&mut err); err.span_note( @@ -1243,7 +1273,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let escapes_from = if tcx.is_closure(self.mir_def_id) { let tables = tcx.typeck_tables_of(self.mir_def_id); let mir_hir_id = tcx.hir().def_index_to_hir_id(self.mir_def_id.index); - match tables.node_type(mir_hir_id).sty { + match tables.node_type(mir_hir_id).kind { ty::Closure(..) => "closure", ty::Generator(..) => "generator", _ => bug!("Closure body doesn't have a closure or generator type"), @@ -1445,10 +1475,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { assigned_span: Span, err_place: &Place<'tcx>, ) { - let (from_arg, local_decl) = if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = *err_place { + let (from_arg, local_decl) = if let Some(local) = err_place.as_local() { if let LocalKind::Arg = self.body.local_kind(local) { (true, Some(&self.body.local_decls[local])) } else { @@ -1543,7 +1570,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { }, ProjectionElem::Field(..) | ProjectionElem::Downcast(..) => { let base_ty = Place::ty_from(&place.base, proj_base, self.body, tcx).ty; - match base_ty.sty { + match base_ty.kind { ty::Adt(def, _) if def.has_dtor(tcx) => { // Report the outermost adt with a destructor match base_access { @@ -1579,7 +1606,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { None } else { let ty = self.infcx.tcx.type_of(self.mir_def_id); - match ty.sty { + match ty.kind { ty::FnDef(_, _) | ty::FnPtr(_) => self.annotate_fn_sig( self.mir_def_id, self.infcx.tcx.fn_sig(self.mir_def_id), @@ -1608,11 +1635,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { reservation ); // Check that the initial assignment of the reserve location is into a temporary. - let mut target = *match reservation { - Place { - base: PlaceBase::Local(local), - projection: box [], - } if self.body.local_kind(*local) == LocalKind::Temp => local, + let mut target = match reservation.as_local() { + Some(local) if self.body.local_kind(local) == LocalKind::Temp => local, _ => return None, }; @@ -1624,127 +1648,122 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { "annotate_argument_and_return_for_borrow: target={:?} stmt={:?}", target, stmt ); - if let StatementKind::Assign( - box( - Place { - base: PlaceBase::Local(assigned_to), - projection: box [], - }, - rvalue - ) - ) = &stmt.kind { - debug!( - "annotate_argument_and_return_for_borrow: assigned_to={:?} \ - rvalue={:?}", - assigned_to, rvalue - ); - // Check if our `target` was captured by a closure. - if let Rvalue::Aggregate( - box AggregateKind::Closure(def_id, substs), - operands, - ) = rvalue - { - for operand in operands { - let assigned_from = match operand { + if let StatementKind::Assign(box(place, rvalue)) = &stmt.kind { + if let Some(assigned_to) = place.as_local() { + debug!( + "annotate_argument_and_return_for_borrow: assigned_to={:?} \ + rvalue={:?}", + assigned_to, rvalue + ); + // Check if our `target` was captured by a closure. + if let Rvalue::Aggregate( + box AggregateKind::Closure(def_id, substs), + operands, + ) = rvalue + { + for operand in operands { + let assigned_from = match operand { + Operand::Copy(assigned_from) | Operand::Move(assigned_from) => { + assigned_from + } + _ => continue, + }; + debug!( + "annotate_argument_and_return_for_borrow: assigned_from={:?}", + assigned_from + ); + + // Find the local from the operand. + let assigned_from_local = match assigned_from.local_or_deref_local() + { + Some(local) => local, + None => continue, + }; + + if assigned_from_local != target { + continue; + } + + // If a closure captured our `target` and then assigned + // into a place then we should annotate the closure in + // case it ends up being assigned into the return place. + annotated_closure = self.annotate_fn_sig( + *def_id, + self.infcx.closure_sig(*def_id, *substs), + ); + debug!( + "annotate_argument_and_return_for_borrow: \ + annotated_closure={:?} assigned_from_local={:?} \ + assigned_to={:?}", + annotated_closure, assigned_from_local, assigned_to + ); + + if assigned_to == mir::RETURN_PLACE { + // If it was assigned directly into the return place, then + // return now. + return annotated_closure; + } else { + // Otherwise, update the target. + target = assigned_to; + } + } + + // If none of our closure's operands matched, then skip to the next + // statement. + continue; + } + + // Otherwise, look at other types of assignment. + let assigned_from = match rvalue { + Rvalue::Ref(_, _, assigned_from) => assigned_from, + Rvalue::Use(operand) => match operand { Operand::Copy(assigned_from) | Operand::Move(assigned_from) => { assigned_from } _ => continue, - }; - debug!( - "annotate_argument_and_return_for_borrow: assigned_from={:?}", - assigned_from - ); + }, + _ => continue, + }; + debug!( + "annotate_argument_and_return_for_borrow: \ + assigned_from={:?}", + assigned_from, + ); - // Find the local from the operand. - let assigned_from_local = match assigned_from.local_or_deref_local() { - Some(local) => local, - None => continue, - }; + // Find the local from the rvalue. + let assigned_from_local = match assigned_from.local_or_deref_local() { + Some(local) => local, + None => continue, + }; + debug!( + "annotate_argument_and_return_for_borrow: \ + assigned_from_local={:?}", + assigned_from_local, + ); - if assigned_from_local != target { - continue; - } - - // If a closure captured our `target` and then assigned - // into a place then we should annotate the closure in - // case it ends up being assigned into the return place. - annotated_closure = self.annotate_fn_sig( - *def_id, - self.infcx.closure_sig(*def_id, *substs), - ); - debug!( - "annotate_argument_and_return_for_borrow: \ - annotated_closure={:?} assigned_from_local={:?} \ - assigned_to={:?}", - annotated_closure, assigned_from_local, assigned_to - ); - - if *assigned_to == mir::RETURN_PLACE { - // If it was assigned directly into the return place, then - // return now. - return annotated_closure; - } else { - // Otherwise, update the target. - target = *assigned_to; - } + // Check if our local matches the target - if so, we've assigned our + // borrow to a new place. + if assigned_from_local != target { + continue; } - // If none of our closure's operands matched, then skip to the next - // statement. - continue; + // If we assigned our `target` into a new place, then we should + // check if it was the return place. + debug!( + "annotate_argument_and_return_for_borrow: \ + assigned_from_local={:?} assigned_to={:?}", + assigned_from_local, assigned_to + ); + if assigned_to == mir::RETURN_PLACE { + // If it was then return the annotated closure if there was one, + // else, annotate this function. + return annotated_closure.or_else(fallback); + } + + // If we didn't assign into the return place, then we just update + // the target. + target = assigned_to; } - - // Otherwise, look at other types of assignment. - let assigned_from = match rvalue { - Rvalue::Ref(_, _, assigned_from) => assigned_from, - Rvalue::Use(operand) => match operand { - Operand::Copy(assigned_from) | Operand::Move(assigned_from) => { - assigned_from - } - _ => continue, - }, - _ => continue, - }; - debug!( - "annotate_argument_and_return_for_borrow: \ - assigned_from={:?}", - assigned_from, - ); - - // Find the local from the rvalue. - let assigned_from_local = match assigned_from.local_or_deref_local() { - Some(local) => local, - None => continue, - }; - debug!( - "annotate_argument_and_return_for_borrow: \ - assigned_from_local={:?}", - assigned_from_local, - ); - - // Check if our local matches the target - if so, we've assigned our - // borrow to a new place. - if assigned_from_local != target { - continue; - } - - // If we assigned our `target` into a new place, then we should - // check if it was the return place. - debug!( - "annotate_argument_and_return_for_borrow: \ - assigned_from_local={:?} assigned_to={:?}", - assigned_from_local, assigned_to - ); - if *assigned_to == mir::RETURN_PLACE { - // If it was then return the annotated closure if there was one, - // else, annotate this function. - return annotated_closure.or_else(fallback); - } - - // If we didn't assign into the return place, then we just update - // the target. - target = *assigned_to; } } @@ -1755,38 +1774,37 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { target, terminator ); if let TerminatorKind::Call { - destination: Some((Place { - base: PlaceBase::Local(assigned_to), - projection: box [], - }, _)), + destination: Some((place, _)), args, .. } = &terminator.kind { - debug!( - "annotate_argument_and_return_for_borrow: assigned_to={:?} args={:?}", - assigned_to, args - ); - for operand in args { - let assigned_from = match operand { - Operand::Copy(assigned_from) | Operand::Move(assigned_from) => { - assigned_from - } - _ => continue, - }; + if let Some(assigned_to) = place.as_local() { debug!( - "annotate_argument_and_return_for_borrow: assigned_from={:?}", - assigned_from, + "annotate_argument_and_return_for_borrow: assigned_to={:?} args={:?}", + assigned_to, args ); - - if let Some(assigned_from_local) = assigned_from.local_or_deref_local() { + for operand in args { + let assigned_from = match operand { + Operand::Copy(assigned_from) | Operand::Move(assigned_from) => { + assigned_from + } + _ => continue, + }; debug!( - "annotate_argument_and_return_for_borrow: assigned_from_local={:?}", - assigned_from_local, + "annotate_argument_and_return_for_borrow: assigned_from={:?}", + assigned_from, ); - if *assigned_to == mir::RETURN_PLACE && assigned_from_local == target { - return annotated_closure.or_else(fallback); + if let Some(assigned_from_local) = assigned_from.local_or_deref_local() { + debug!( + "annotate_argument_and_return_for_borrow: assigned_from_local={:?}", + assigned_from_local, + ); + + if assigned_to == mir::RETURN_PLACE && assigned_from_local == target { + return annotated_closure.or_else(fallback); + } } } } @@ -1834,18 +1852,18 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // 3. The return type is not a reference. In this case, we don't highlight // anything. let return_ty = sig.output(); - match return_ty.skip_binder().sty { + match return_ty.skip_binder().kind { ty::Ref(return_region, _, _) if return_region.has_name() && !is_closure => { // This is case 1 from above, return type is a named reference so we need to // search for relevant arguments. let mut arguments = Vec::new(); for (index, argument) in sig.inputs().skip_binder().iter().enumerate() { - if let ty::Ref(argument_region, _, _) = argument.sty { + if let ty::Ref(argument_region, _, _) = argument.kind { if argument_region == return_region { // Need to use the `rustc::ty` types to compare against the // `return_region`. Then use the `rustc::hir` type to get only // the lifetime span. - if let hir::TyKind::Rptr(lifetime, _) = &fn_decl.inputs[index].node { + if let hir::TyKind::Rptr(lifetime, _) = &fn_decl.inputs[index].kind { // With access to the lifetime, we can get // the span of it. arguments.push((*argument, lifetime.span)); @@ -1866,7 +1884,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let return_ty = *sig.output().skip_binder(); let mut return_span = fn_decl.output.span(); if let hir::FunctionRetTy::Return(ty) = &fn_decl.output { - if let hir::TyKind::Rptr(lifetime, _) = ty.node { + if let hir::TyKind::Rptr(lifetime, _) = ty.kind { return_span = lifetime.span; } } @@ -1886,9 +1904,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // Closure arguments are wrapped in a tuple, so we need to get the first // from that. - if let ty::Tuple(elems) = argument_ty.sty { + if let ty::Tuple(elems) = argument_ty.kind { let argument_ty = elems.first()?.expect_ty(); - if let ty::Ref(_, _, _) = argument_ty.sty { + if let ty::Ref(_, _, _) = argument_ty.kind { return Some(AnnotatedBorrowFnSignature::Closure { argument_ty, argument_span, @@ -1908,7 +1926,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let return_ty = *sig.output().skip_binder(); // We expect the first argument to be a reference. - match argument_ty.sty { + match argument_ty.kind { ty::Ref(_, _, _) => {} _ => return None, } diff --git a/src/librustc_mir/borrow_check/error_reporting.rs b/src/librustc_mir/borrow_check/error_reporting.rs index 5bccd2835c..4036e9db33 100644 --- a/src/librustc_mir/borrow_check/error_reporting.rs +++ b/src/librustc_mir/borrow_check/error_reporting.rs @@ -1,6 +1,7 @@ use rustc::hir; use rustc::hir::def::Namespace; use rustc::hir::def_id::DefId; +use rustc::hir::GeneratorKind; use rustc::mir::{ AggregateKind, Constant, Field, Local, LocalKind, Location, Operand, Place, PlaceBase, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind, @@ -14,7 +15,7 @@ use syntax_pos::Span; use syntax::symbol::sym; use super::borrow_set::BorrowData; -use super::{MirBorrowckCtxt}; +use super::MirBorrowckCtxt; use crate::dataflow::move_paths::{InitLocation, LookupResult}; pub(super) struct IncludingDowncast(pub(super) bool); @@ -58,7 +59,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { if let TerminatorKind::Call { func: Operand::Constant(box Constant { literal: ty::Const { - ty: &ty::TyS { sty: ty::FnDef(id, _), .. }, + ty: &ty::TyS { kind: ty::FnDef(id, _), .. }, .. }, .. @@ -76,7 +77,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { }; debug!("add_moved_or_invoked_closure_note: closure={:?}", closure); - if let ty::Closure(did, _) = self.body.local_decls[closure].ty.sty { + if let ty::Closure(did, _) = self.body.local_decls[closure].ty.kind { let hir_id = self.infcx.tcx.hir().as_local_hir_id(did).unwrap(); if let Some((span, name)) = self.infcx.tcx.typeck_tables_of(did) @@ -99,7 +100,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // Check if we are just moving a closure after it has been invoked. if let Some(target) = target { - if let ty::Closure(did, _) = self.body.local_decls[target].ty.sty { + if let ty::Closure(did, _) = self.body.local_decls[target].ty.kind { let hir_id = self.infcx.tcx.hir().as_local_hir_id(did).unwrap(); if let Some((span, name)) = self.infcx.tcx.typeck_tables_of(did) @@ -400,7 +401,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // If the type is a box, the field is described from the boxed type self.describe_field_from_ty(&ty.boxed_ty(), field, variant_index) } else { - match ty.sty { + match ty.kind { ty::Adt(def, _) => { let variant = if let Some(idx) = variant_index { assert!(def.is_enum()); @@ -558,7 +559,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // We need to add synthesized lifetimes where appropriate. We do // this by hooking into the pretty printer and telling it to label the // lifetimes without names with the value `'0`. - match ty.sty { + match ty.kind { ty::Ref(ty::RegionKind::ReLateBound(_, br), _, _) | ty::Ref( ty::RegionKind::RePlaceholder(ty::PlaceholderRegion { name: br, .. }), @@ -578,7 +579,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let mut s = String::new(); let mut printer = ty::print::FmtPrinter::new(self.infcx.tcx, &mut s, Namespace::TypeNS); - let region = match ty.sty { + let region = match ty.kind { ty::Ref(region, _, _) => { match region { ty::RegionKind::ReLateBound(_, br) @@ -604,7 +605,7 @@ pub(super) enum UseSpans { // The access is caused by capturing a variable for a closure. ClosureUse { // This is true if the captured variable was from a generator. - is_generator: bool, + generator_kind: Option, // The span of the args of the closure, including the `move` keyword if // it's present. args_span: Span, @@ -631,6 +632,13 @@ impl UseSpans { } } + pub(super) fn generator_kind(self) -> Option { + match self { + UseSpans::ClosureUse { generator_kind, .. } => generator_kind, + _ => None, + } + } + // Add a span label to the arguments of the closure, if it exists. pub(super) fn args_span_label( self, @@ -656,7 +664,7 @@ impl UseSpans { /// Returns `false` if this place is not used in a closure. pub(super) fn for_closure(&self) -> bool { match *self { - UseSpans::ClosureUse { is_generator, .. } => !is_generator, + UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_none(), _ => false, } } @@ -664,7 +672,7 @@ impl UseSpans { /// Returns `false` if this place is not used in a generator. pub(super) fn for_generator(&self) -> bool { match *self { - UseSpans::ClosureUse { is_generator, .. } => is_generator, + UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_some(), _ => false, } } @@ -672,7 +680,7 @@ impl UseSpans { /// Describe the span associated with a use of a place. pub(super) fn describe(&self) -> String { match *self { - UseSpans::ClosureUse { is_generator, .. } => if is_generator { + UseSpans::ClosureUse { generator_kind, .. } => if generator_kind.is_some() { " in generator".to_string() } else { " in closure".to_string() @@ -754,7 +762,7 @@ impl BorrowedContentSource<'tcx> { } fn from_call(func: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option { - match func.sty { + match func.kind { ty::FnDef(def_id, substs) => { let trait_id = tcx.trait_of_item(def_id)?; @@ -794,19 +802,20 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { if let StatementKind::Assign( box(_, Rvalue::Aggregate(ref kind, ref places)) ) = stmt.kind { - let (def_id, is_generator) = match kind { - box AggregateKind::Closure(def_id, _) => (def_id, false), - box AggregateKind::Generator(def_id, _, _) => (def_id, true), + let def_id = match kind { + box AggregateKind::Closure(def_id, _) + | box AggregateKind::Generator(def_id, _, _) => def_id, _ => return OtherUse(stmt.source_info.span), }; debug!( - "move_spans: def_id={:?} is_generator={:?} places={:?}", - def_id, is_generator, places + "move_spans: def_id={:?} places={:?}", + def_id, places ); - if let Some((args_span, var_span)) = self.closure_span(*def_id, moved_place, places) { + if let Some((args_span, generator_kind, var_span)) + = self.closure_span(*def_id, moved_place, places) { return ClosureUse { - is_generator, + generator_kind, args_span, var_span, }; @@ -829,12 +838,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { .get(location.statement_index) { Some(&Statement { - kind: StatementKind::Assign(box(Place { - base: PlaceBase::Local(local), - projection: box [], - }, _)), + kind: StatementKind::Assign(box(ref place, _)), .. - }) => local, + }) => { + if let Some(local) = place.as_local() { + local + } else { + return OtherUse(use_span); + } + } _ => return OtherUse(use_span), }; @@ -857,11 +869,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { "borrow_spans: def_id={:?} is_generator={:?} places={:?}", def_id, is_generator, places ); - if let Some((args_span, var_span)) = self.closure_span( + if let Some((args_span, generator_kind, var_span)) = self.closure_span( *def_id, Place::from(target).as_ref(), places ) { return ClosureUse { - is_generator, + generator_kind, args_span, var_span, }; @@ -884,23 +896,25 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { def_id: DefId, target_place: PlaceRef<'cx, 'tcx>, places: &Vec>, - ) -> Option<(Span, Span)> { + ) -> Option<(Span, Option, Span)> { debug!( "closure_span: def_id={:?} target_place={:?} places={:?}", def_id, target_place, places ); let hir_id = self.infcx.tcx.hir().as_local_hir_id(def_id)?; - let expr = &self.infcx.tcx.hir().expect_expr(hir_id).node; + let expr = &self.infcx.tcx.hir().expect_expr(hir_id).kind; debug!("closure_span: hir_id={:?} expr={:?}", hir_id, expr); if let hir::ExprKind::Closure( - .., args_span, _ + .., body_id, args_span, _ ) = expr { for (upvar, place) in self.infcx.tcx.upvars(def_id)?.values().zip(places) { match place { Operand::Copy(place) | Operand::Move(place) if target_place == place.as_ref() => { debug!("closure_span: found captured local {:?}", place); - return Some((*args_span, upvar.span)); + let body = self.infcx.tcx.hir().body(*body_id); + let generator_kind = body.generator_kind(); + return Some((*args_span, generator_kind, upvar.span)); }, _ => {} } diff --git a/src/librustc_mir/borrow_check/flows.rs b/src/librustc_mir/borrow_check/flows.rs index 1f17ab69f6..ce5d2a14bd 100644 --- a/src/librustc_mir/borrow_check/flows.rs +++ b/src/librustc_mir/borrow_check/flows.rs @@ -5,7 +5,7 @@ use rustc::mir::{BasicBlock, Local, Location}; use rustc::ty::RegionVid; -use rustc_data_structures::bit_set::BitIter; +use rustc_index::bit_set::BitIter; use crate::borrow_check::location::LocationIndex; diff --git a/src/librustc_mir/borrow_check/location.rs b/src/librustc_mir/borrow_check/location.rs index cc44dc3f5d..9e94317b87 100644 --- a/src/librustc_mir/borrow_check/location.rs +++ b/src/librustc_mir/borrow_check/location.rs @@ -1,5 +1,5 @@ use rustc::mir::{BasicBlock, Location, Body}; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; /// Maps between a MIR Location, which identifies a particular /// statement within a basic block, to a "rich location", which @@ -17,7 +17,7 @@ crate struct LocationTable { statements_before_block: IndexVec, } -newtype_index! { +rustc_index::newtype_index! { pub struct LocationIndex { DEBUG_FORMAT = "LocationIndex({})" } diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs index 32c6dd67a4..c3369e8721 100644 --- a/src/librustc_mir/borrow_check/mod.rs +++ b/src/librustc_mir/borrow_check/mod.rs @@ -7,7 +7,6 @@ use rustc::hir::def_id::DefId; use rustc::infer::InferCtxt; use rustc::lint::builtin::UNUSED_MUT; use rustc::lint::builtin::{MUTABLE_BORROW_RESERVATION_CONFLICT}; -use rustc::middle::borrowck::SignalledError; use rustc::mir::{AggregateKind, BasicBlock, BorrowCheckResult, BorrowKind}; use rustc::mir::{ ClearCrossCrate, Local, Location, Body, Mutability, Operand, Place, PlaceBase, PlaceElem, @@ -18,11 +17,11 @@ use rustc::mir::{Terminator, TerminatorKind}; use rustc::ty::query::Providers; use rustc::ty::{self, TyCtxt}; -use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, Level}; -use rustc_data_structures::bit_set::BitSet; +use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder}; +use rustc_index::bit_set::BitSet; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::graph::dominators::Dominators; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use smallvec::SmallVec; use std::collections::BTreeMap; @@ -236,7 +235,7 @@ fn do_mir_borrowck<'a, 'tcx>( let movable_generator = match tcx.hir().get(id) { Node::Expr(&hir::Expr { - node: hir::ExprKind::Closure(.., Some(hir::GeneratorMovability::Static)), + kind: hir::ExprKind::Closure(.., Some(hir::GeneratorMovability::Static)), .. }) => false, _ => true, @@ -259,10 +258,6 @@ fn do_mir_borrowck<'a, 'tcx>( move_error_reported: BTreeMap::new(), uninitialized_error_reported: Default::default(), errors_buffer, - // Only downgrade errors on Rust 2015 and refuse to do so on Rust 2018. - // FIXME(Centril): In Rust 1.40.0, refuse doing so on 2015 as well and - // proceed to throwing out the migration infrastructure. - disable_error_downgrading: body.span.rust_2018(), nonlexical_regioncx: regioncx, used_mut: Default::default(), used_mut_upvars: SmallVec::new(), @@ -374,33 +369,6 @@ fn do_mir_borrowck<'a, 'tcx>( if !mbcx.errors_buffer.is_empty() { mbcx.errors_buffer.sort_by_key(|diag| diag.span.primary_span()); - if !mbcx.disable_error_downgrading && tcx.migrate_borrowck() { - // When borrowck=migrate, check if AST-borrowck would - // error on the given code. - - // rust-lang/rust#55492, rust-lang/rust#58776 check the base def id - // for errors. AST borrowck is responsible for aggregating - // `signalled_any_error` from all of the nested closures here. - let base_def_id = tcx.closure_base_def_id(def_id); - - match tcx.borrowck(base_def_id).signalled_any_error { - SignalledError::NoErrorsSeen => { - // if AST-borrowck signalled no errors, then - // downgrade all the buffered MIR-borrowck errors - // to warnings. - - for err in mbcx.errors_buffer.iter_mut() { - downgrade_if_error(err); - } - } - SignalledError::SawSomeError => { - // if AST-borrowck signalled a (cancelled) error, - // then we will just emit the buffered - // MIR-borrowck errors as normal. - } - } - } - for diag in mbcx.errors_buffer.drain(..) { mbcx.infcx.tcx.sess.diagnostic().emit_diagnostic(&diag); } @@ -416,21 +384,6 @@ fn do_mir_borrowck<'a, 'tcx>( result } -fn downgrade_if_error(diag: &mut Diagnostic) { - if diag.is_error() { - diag.level = Level::Warning; - diag.warn( - "this error has been downgraded to a warning for backwards \ - compatibility with previous releases", - ).warn( - "this represents potential undefined behavior in your code and \ - this warning will become a hard error in the future", - ).note( - "for more information, try `rustc --explain E0729`" - ); - } -} - crate struct MirBorrowckCtxt<'cx, 'tcx> { crate infcx: &'cx InferCtxt<'cx, 'tcx>, body: &'cx Body<'tcx>, @@ -491,9 +444,6 @@ crate struct MirBorrowckCtxt<'cx, 'tcx> { uninitialized_error_reported: FxHashSet>, /// Errors to be reported buffer errors_buffer: Vec, - /// If there are no errors reported by the HIR borrow checker, we downgrade - /// all NLL errors to warnings. Setting this flag disables downgrading. - disable_error_downgrading: bool, /// This field keeps track of all the local variables that are declared mut and are mutated. /// Used for the warning issued by an unused mutable local variable. used_mut: FxHashSet, @@ -671,7 +621,7 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx target: _, unwind: _, } => { - let gcx = self.infcx.tcx.global_tcx(); + let tcx = self.infcx.tcx; // Compute the type with accurate region information. let drop_place_ty = drop_place.ty(self.body, self.infcx.tcx); @@ -679,10 +629,10 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx // Erase the regions. let drop_place_ty = self.infcx.tcx.erase_regions(&drop_place_ty).ty; - // "Lift" into the gcx -- once regions are erased, this type should be in the + // "Lift" into the tcx -- once regions are erased, this type should be in the // global arenas; this "lift" operation basically just asserts that is true, but // that is useful later. - gcx.lift_to_global(&drop_place_ty).unwrap(); + tcx.lift(&drop_place_ty).unwrap(); debug!("visit_terminator_drop \ loc: {:?} term: {:?} drop_place: {:?} drop_place_ty: {:?} span: {:?}", @@ -934,12 +884,6 @@ impl InitializationRequiringAction { } impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { - /// If there are no errors reported by the HIR borrow checker, we downgrade - /// all NLL errors to warnings. Calling this disables downgrading. - crate fn disable_error_downgrading(&mut self) { - self.disable_error_downgrading = true; - } - /// Checks an access to the given place to see if it is allowed. Examines the set of borrows /// that are in scope, as well as which paths have been initialized, to ensure that (a) the /// place is initialized and (b) it is not borrowed in some way that would prevent this @@ -1189,15 +1133,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // Special case: you can assign a immutable local variable // (e.g., `x = ...`) so long as it has never been initialized // before (at this point in the flow). - if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = place_span.0 { - if let Mutability::Not = self.body.local_decls[*local].mutability { + if let Some(local) = place_span.0.as_local() { + if let Mutability::Not = self.body.local_decls[local].mutability { // check for reassignments to immutable local variables self.check_if_reassignment_to_immutable_state( location, - *local, + local, place_span, flow_state, ); @@ -1344,59 +1285,57 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // captures of a closure are copied/moved directly // when generating MIR. match *operand { - Operand::Move(Place { - base: PlaceBase::Local(local), - projection: box [], - }) | - Operand::Copy(Place { - base: PlaceBase::Local(local), - projection: box [], - }) if self.body.local_decls[local].is_user_variable.is_none() => { - if self.body.local_decls[local].ty.is_mutable_ptr() { - // The variable will be marked as mutable by the borrow. - return; + Operand::Move(ref place) | Operand::Copy(ref place) => { + match place.as_local() { + Some(local) if self.body.local_decls[local].is_user_variable.is_none() => { + if self.body.local_decls[local].ty.is_mutable_ptr() { + // The variable will be marked as mutable by the borrow. + return; + } + // This is an edge case where we have a `move` closure + // inside a non-move closure, and the inner closure + // contains a mutation: + // + // let mut i = 0; + // || { move || { i += 1; }; }; + // + // In this case our usual strategy of assuming that the + // variable will be captured by mutable reference is + // wrong, since `i` can be copied into the inner + // closure from a shared reference. + // + // As such we have to search for the local that this + // capture comes from and mark it as being used as mut. + + let temp_mpi = self.move_data.rev_lookup.find_local(local); + let init = if let [init_index] = *self.move_data.init_path_map[temp_mpi] { + &self.move_data.inits[init_index] + } else { + bug!("temporary should be initialized exactly once") + }; + + let loc = match init.location { + InitLocation::Statement(stmt) => stmt, + _ => bug!("temporary initialized in arguments"), + }; + + let bbd = &self.body[loc.block]; + let stmt = &bbd.statements[loc.statement_index]; + debug!("temporary assigned in: stmt={:?}", stmt); + + if let StatementKind::Assign(box (_, Rvalue::Ref(_, _, ref source))) = + stmt.kind + { + propagate_closure_used_mut_place(self, source); + } else { + bug!( + "closures should only capture user variables \ + or references to user variables" + ); + } + } + _ => propagate_closure_used_mut_place(self, place), } - // This is an edge case where we have a `move` closure - // inside a non-move closure, and the inner closure - // contains a mutation: - // - // let mut i = 0; - // || { move || { i += 1; }; }; - // - // In this case our usual strategy of assuming that the - // variable will be captured by mutable reference is - // wrong, since `i` can be copied into the inner - // closure from a shared reference. - // - // As such we have to search for the local that this - // capture comes from and mark it as being used as mut. - - let temp_mpi = self.move_data.rev_lookup.find_local(local); - let init = if let [init_index] = *self.move_data.init_path_map[temp_mpi] { - &self.move_data.inits[init_index] - } else { - bug!("temporary should be initialized exactly once") - }; - - let loc = match init.location { - InitLocation::Statement(stmt) => stmt, - _ => bug!("temporary initialized in arguments"), - }; - - let bbd = &self.body[loc.block]; - let stmt = &bbd.statements[loc.statement_index]; - debug!("temporary assigned in: stmt={:?}", stmt); - - if let StatementKind::Assign(box(_, Rvalue::Ref(_, _, ref source))) = stmt.kind { - propagate_closure_used_mut_place(self, source); - } else { - bug!("closures should only capture user variables \ - or references to user variables"); - } - } - Operand::Move(ref place) - | Operand::Copy(ref place) => { - propagate_closure_used_mut_place(self, place); } Operand::Constant(..) => {} } @@ -1758,7 +1697,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { debug!("check_if_assigned_path_is_moved place: {:?}", place); // None case => assigning to `x` does not require `x` be initialized. - let mut cursor = &*place.projection; + let mut cursor = &*place.projection.as_ref(); while let [proj_base @ .., elem] = cursor { cursor = proj_base; @@ -1796,7 +1735,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // be already initialized let tcx = self.infcx.tcx; let base_ty = Place::ty_from(&place.base, proj_base, self.body, tcx).ty; - match base_ty.sty { + match base_ty.kind { ty::Adt(def, _) if def.has_dtor(tcx) => { self.check_if_path_or_subpath_is_moved( location, InitializationRequiringAction::Assignment, @@ -1902,7 +1841,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // of the union - we should error in that case. let tcx = this.infcx.tcx; if let ty::Adt(def, _) = - Place::ty_from(base.base, base.projection, this.body, tcx).ty.sty + Place::ty_from(base.base, base.projection, this.body, tcx).ty.kind { if def.is_union() { if this.move_data.path_map[mpi].iter().any(|moi| { @@ -1988,48 +1927,28 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { } } - Reservation(wk @ WriteKind::Move) - | Write(wk @ WriteKind::Move) - | Reservation(wk @ WriteKind::StorageDeadOrDrop) - | Reservation(wk @ WriteKind::MutableBorrow(BorrowKind::Shared)) - | Reservation(wk @ WriteKind::MutableBorrow(BorrowKind::Shallow)) - | Write(wk @ WriteKind::StorageDeadOrDrop) - | Write(wk @ WriteKind::MutableBorrow(BorrowKind::Shared)) - | Write(wk @ WriteKind::MutableBorrow(BorrowKind::Shallow)) => { - if let (Err(place_err), true) = ( + Reservation(WriteKind::Move) + | Write(WriteKind::Move) + | Reservation(WriteKind::StorageDeadOrDrop) + | Reservation(WriteKind::MutableBorrow(BorrowKind::Shared)) + | Reservation(WriteKind::MutableBorrow(BorrowKind::Shallow)) + | Write(WriteKind::StorageDeadOrDrop) + | Write(WriteKind::MutableBorrow(BorrowKind::Shared)) + | Write(WriteKind::MutableBorrow(BorrowKind::Shallow)) => { + if let (Err(_), true) = ( self.is_mutable(place.as_ref(), is_local_mutation_allowed), self.errors_buffer.is_empty() ) { - if self.infcx.tcx.migrate_borrowck() { - // rust-lang/rust#46908: In pure NLL mode this - // code path should be unreachable (and thus - // we signal an ICE in the else branch - // here). But we can legitimately get here - // under borrowck=migrate mode, so instead of - // ICE'ing we instead report a legitimate - // error (which will then be downgraded to a - // warning by the migrate machinery). - error_access = match wk { - WriteKind::MutableBorrow(_) => AccessKind::MutableBorrow, - WriteKind::Move => AccessKind::Move, - WriteKind::StorageDeadOrDrop | - WriteKind::Mutate => AccessKind::Mutate, - }; - self.report_mutability_error( - place, - span, - place_err, - error_access, - location, - ); - } else { - span_bug!( - span, - "Accessing `{:?}` with the kind `{:?}` shouldn't be possible", - place, - kind, - ); - } + // rust-lang/rust#46908: In pure NLL mode this code path should be + // unreachable, but we use `delay_span_bug` because we can hit this when + // dereferencing a non-Copy raw pointer *and* have `-Ztreat-err-as-bug` + // enabled. We don't want to ICE for that case, as other errors will have + // been emitted (#52262). + self.infcx.tcx.sess.delay_span_bug(span, &format!( + "Accessing `{:?}` with the kind `{:?}` shouldn't be possible", + place, + kind, + )); } return false; } @@ -2195,7 +2114,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { Place::ty_from(place.base, proj_base, self.body, self.infcx.tcx).ty; // Check the kind of deref to decide - match base_ty.sty { + match base_ty.kind { ty::Ref(_, _, mutbl) => { match mutbl { // Shared borrowed data is never mutable diff --git a/src/librustc_mir/borrow_check/move_errors.rs b/src/librustc_mir/borrow_check/move_errors.rs index aa732b0092..d9e958d945 100644 --- a/src/librustc_mir/borrow_check/move_errors.rs +++ b/src/librustc_mir/borrow_check/move_errors.rs @@ -89,45 +89,41 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { // If that ever stops being the case, then the ever initialized // flow could be used. if let Some(StatementKind::Assign( - box( - Place { - base: PlaceBase::Local(local), - projection: box [], - }, - Rvalue::Use(Operand::Move(move_from)) - ) + box(place, Rvalue::Use(Operand::Move(move_from))) )) = self.body.basic_blocks()[location.block] .statements .get(location.statement_index) .map(|stmt| &stmt.kind) { - let local_decl = &self.body.local_decls[*local]; - // opt_match_place is the - // match_span is the span of the expression being matched on - // match *x.y { ... } match_place is Some(*x.y) - // ^^^^ match_span is the span of *x.y - // - // opt_match_place is None for let [mut] x = ... statements, - // whether or not the right-hand side is a place expression - if let Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm { - opt_match_place: Some((ref opt_match_place, match_span)), - binding_mode: _, - opt_ty_info: _, - pat_span: _, - }))) = local_decl.is_user_variable - { - let stmt_source_info = self.body.source_info(location); - self.append_binding_error( - grouped_errors, - kind, - original_path, - move_from, - *local, - opt_match_place, - match_span, - stmt_source_info.span, - ); - return; + if let Some(local) = place.as_local() { + let local_decl = &self.body.local_decls[local]; + // opt_match_place is the + // match_span is the span of the expression being matched on + // match *x.y { ... } match_place is Some(*x.y) + // ^^^^ match_span is the span of *x.y + // + // opt_match_place is None for let [mut] x = ... statements, + // whether or not the right-hand side is a place expression + if let Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm { + opt_match_place: Some((ref opt_match_place, match_span)), + binding_mode: _, + opt_ty_info: _, + pat_span: _, + }))) = local_decl.is_user_variable + { + let stmt_source_info = self.body.source_info(location); + self.append_binding_error( + grouped_errors, + kind, + original_path, + move_from, + local, + opt_match_place, + match_span, + stmt_source_info.span, + ); + return; + } } } @@ -307,11 +303,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { let upvar_field = self.prefixes(move_place.as_ref(), PrefixSet::All) .find_map(|p| self.is_upvar_field_projection(p)); - let deref_base = match &deref_target_place.projection { - box [proj_base @ .., ProjectionElem::Deref] => { + let deref_base = match deref_target_place.projection.as_ref() { + &[ref proj_base @ .., ProjectionElem::Deref] => { PlaceRef { base: &deref_target_place.base, - projection: proj_base, + projection: &proj_base, } } _ => bug!("deref_target_place is not a deref projection"), @@ -335,13 +331,14 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } debug!("report: ty={:?}", ty); - let mut err = match ty.sty { + let mut err = match ty.kind { ty::Array(..) | ty::Slice(..) => self.cannot_move_out_of_interior_noncopy(span, ty, None), ty::Closure(def_id, closure_substs) if def_id == self.mir_def_id && upvar_field.is_some() => { - let closure_kind_ty = closure_substs.closure_kind_ty(def_id, self.infcx.tcx); + let closure_kind_ty = closure_substs + .as_closure().kind_ty(def_id, self.infcx.tcx); let closure_kind = closure_kind_ty.to_opt_closure_kind(); let capture_description = match closure_kind { Some(ty::ClosureKind::Fn) => { diff --git a/src/librustc_mir/borrow_check/mutability_errors.rs b/src/librustc_mir/borrow_check/mutability_errors.rs index 14b76d97b3..68b33331a1 100644 --- a/src/librustc_mir/borrow_check/mutability_errors.rs +++ b/src/librustc_mir/borrow_check/mutability_errors.rs @@ -5,7 +5,7 @@ use rustc::mir::{ Mutability, Place, PlaceRef, PlaceBase, ProjectionElem, Static, StaticKind }; use rustc::ty::{self, Ty, TyCtxt}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use syntax_pos::Span; use syntax_pos::symbol::kw; @@ -18,7 +18,6 @@ use rustc_errors::Applicability; pub(super) enum AccessKind { MutableBorrow, Mutate, - Move, } impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { @@ -50,10 +49,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { projection: [], } => { item_msg = format!("`{}`", access_place_desc.unwrap()); - if let Place { - base: PlaceBase::Local(_), - projection: box [], - } = access_place { + if access_place.as_local().is_some() { reason = ", as it is not declared as mutable".to_string(); } else { let name = self.body.local_decls[*local] @@ -124,7 +120,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { if let Some(desc) = access_place_desc { item_msg = format!("`{}`", desc); reason = match error_access { - AccessKind::Move | AccessKind::Mutate => format!(" which is behind {}", pointer_type), AccessKind::MutableBorrow => { format!(", as it is behind {}", pointer_type) @@ -155,10 +150,10 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { }), projection: [], } => { - if let Place { - base: PlaceBase::Static(_), - projection: box [], - } = access_place { + if let PlaceRef { + base: &PlaceBase::Static(_), + projection: &[], + } = access_place.as_ref() { item_msg = format!("immutable static item `{}`", access_place_desc.unwrap()); reason = String::new(); } else { @@ -194,12 +189,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { let acted_on; let span = match error_access { - AccessKind::Move => { - err = self.cannot_move_out_of(span, &(item_msg + &reason)); - err.span_label(span, "cannot move"); - err.buffer(&mut self.errors_buffer); - return; - } AccessKind::Mutate => { err = self.cannot_assign(span, &(item_msg + &reason)); act = "assign"; @@ -283,7 +272,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { // for a `self: &mut Self` to suggest removing the `&mut`. if let ty::Ref( _, _, hir::Mutability::MutMutable - ) = local_decl.ty.sty { + ) = local_decl.ty.kind { true } else { false @@ -338,7 +327,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { _, upvar_ident, _, - ) = pat.node + ) = pat.kind { err.span_suggestion( upvar_ident.span, @@ -630,8 +619,8 @@ fn annotate_struct_field( field: &mir::Field, ) -> Option<(Span, String)> { // Expect our local to be a reference to a struct of some kind. - if let ty::Ref(_, ty, _) = ty.sty { - if let ty::Adt(def, _) = ty.sty { + if let ty::Ref(_, ty, _) = ty.kind { + if let ty::Adt(def, _) = ty.kind { let field = def.all_fields().nth(field.index())?; // Use the HIR types to construct the diagnostic message. let hir_id = tcx.hir().as_local_hir_id(field.did)?; @@ -642,7 +631,7 @@ fn annotate_struct_field( if let hir::TyKind::Rptr(lifetime, hir::MutTy { mutbl: hir::Mutability::MutImmutable, ref ty - }) = field.ty.node { + }) = field.ty.kind { // Get the snippets in two parts - the named lifetime (if there is one) and // type being referenced, that way we can reconstruct the snippet without loss // of detail. diff --git a/src/librustc_mir/borrow_check/nll/constraint_generation.rs b/src/librustc_mir/borrow_check/nll/constraint_generation.rs index 1e5f613aed..cae303039a 100644 --- a/src/librustc_mir/borrow_check/nll/constraint_generation.rs +++ b/src/librustc_mir/borrow_check/nll/constraint_generation.rs @@ -8,11 +8,11 @@ use rustc::infer::InferCtxt; use rustc::mir::visit::TyContext; use rustc::mir::visit::Visitor; use rustc::mir::{ - BasicBlock, BasicBlockData, Body, Local, Location, Place, PlaceBase, ProjectionElem, Rvalue, - SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, UserTypeProjection, + BasicBlock, BasicBlockData, Body, Local, Location, Place, PlaceBase, PlaceRef, ProjectionElem, + Rvalue, SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, UserTypeProjection, }; use rustc::ty::fold::TypeFoldable; -use rustc::ty::{self, ClosureSubsts, GeneratorSubsts, RegionVid, Ty}; +use rustc::ty::{self, RegionVid, Ty}; use rustc::ty::subst::SubstsRef; pub(super) fn generate_constraints<'cx, 'tcx>( @@ -91,20 +91,6 @@ impl<'cg, 'cx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'tcx> { self.super_ty(ty); } - /// We sometimes have `generator_substs` within an rvalue, or within a - /// call. Make them live at the location where they appear. - fn visit_generator_substs(&mut self, substs: &GeneratorSubsts<'tcx>, location: Location) { - self.add_regular_live_constraint(*substs, location); - self.super_generator_substs(substs); - } - - /// We sometimes have `closure_substs` within an rvalue, or within a - /// call. Make them live at the location where they appear. - fn visit_closure_substs(&mut self, substs: &ClosureSubsts<'tcx>, location: Location) { - self.add_regular_live_constraint(*substs, location); - self.super_closure_substs(substs); - } - fn visit_statement( &mut self, statement: &Statement<'tcx>, @@ -225,14 +211,14 @@ impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> { // - if it's a deeper projection, we have to filter which // of the borrows are killed: the ones whose `borrowed_place` // conflicts with the `place`. - match place { - Place { - base: PlaceBase::Local(local), - projection: box [], + match place.as_ref() { + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[], } | - Place { - base: PlaceBase::Local(local), - projection: box [ProjectionElem::Deref], + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[ProjectionElem::Deref], } => { debug!( "Recording `killed` facts for borrows of local={:?} at location={:?}", @@ -243,21 +229,21 @@ impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> { all_facts, self.borrow_set, self.location_table, - local, + &local, location, ); } - Place { - base: PlaceBase::Static(_), + PlaceRef { + base: &PlaceBase::Static(_), .. } => { // Ignore kills of static or static mut variables. } - Place { - base: PlaceBase::Local(local), - projection: box [.., _], + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[.., _], } => { // Kill conflicting borrows of the innermost local. debug!( @@ -266,7 +252,7 @@ impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> { local, location ); - if let Some(borrow_indices) = self.borrow_set.local_map.get(local) { + if let Some(borrow_indices) = self.borrow_set.local_map.get(&local) { for &borrow_index in borrow_indices { let places_conflict = places_conflict::places_conflict( self.infcx.tcx, diff --git a/src/librustc_mir/borrow_check/nll/constraints/graph.rs b/src/librustc_mir/borrow_check/nll/constraints/graph.rs index b5630251e5..b6a9a7ee65 100644 --- a/src/librustc_mir/borrow_check/nll/constraints/graph.rs +++ b/src/librustc_mir/borrow_check/nll/constraints/graph.rs @@ -4,7 +4,7 @@ use crate::borrow_check::nll::constraints::{OutlivesConstraintSet, OutlivesConst use rustc::mir::ConstraintCategory; use rustc::ty::RegionVid; use rustc_data_structures::graph; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use syntax_pos::DUMMY_SP; /// The construct graph organizes the constraints by their end-points. diff --git a/src/librustc_mir/borrow_check/nll/constraints/mod.rs b/src/librustc_mir/borrow_check/nll/constraints/mod.rs index 6121ed0cf0..8a242b7ee2 100644 --- a/src/librustc_mir/borrow_check/nll/constraints/mod.rs +++ b/src/librustc_mir/borrow_check/nll/constraints/mod.rs @@ -2,7 +2,7 @@ use crate::borrow_check::nll::type_check::Locations; use rustc::mir::ConstraintCategory; use rustc::ty::RegionVid; use rustc_data_structures::graph::scc::Sccs; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use std::fmt; use std::ops::Index; @@ -71,7 +71,7 @@ impl Index for OutlivesConstraintSet { } } -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct OutlivesConstraint { // NB. The ordering here is not significant for correctness, but // it is for convenience. Before we dump the constraints in the @@ -100,13 +100,13 @@ impl fmt::Debug for OutlivesConstraint { } } -newtype_index! { +rustc_index::newtype_index! { pub struct OutlivesConstraintIndex { DEBUG_FORMAT = "OutlivesConstraintIndex({})" } } -newtype_index! { +rustc_index::newtype_index! { pub struct ConstraintSccIndex { DEBUG_FORMAT = "ConstraintSccIndex({})" } diff --git a/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs b/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs index 3ba8d7044b..26bead3047 100644 --- a/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs +++ b/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs @@ -6,8 +6,8 @@ use crate::borrow_check::nll::region_infer::{Cause, RegionName}; use crate::borrow_check::nll::ConstraintDescription; use crate::borrow_check::{MirBorrowckCtxt, WriteKind}; use rustc::mir::{ - CastKind, ConstraintCategory, FakeReadCause, Local, Location, Body, Operand, Place, PlaceBase, - Rvalue, Statement, StatementKind, TerminatorKind, + CastKind, ConstraintCategory, FakeReadCause, Local, Location, Body, Operand, Place, Rvalue, + Statement, StatementKind, TerminatorKind, }; use rustc::ty::{self, TyCtxt}; use rustc::ty::adjustment::{PointerCast}; @@ -17,6 +17,7 @@ use syntax_pos::Span; mod find_use; +#[derive(Debug)] pub(in crate::borrow_check) enum BorrowExplanation { UsedLater(LaterUseKind, Span), UsedLaterInLoop(LaterUseKind, Span), @@ -35,7 +36,7 @@ pub(in crate::borrow_check) enum BorrowExplanation { Unexplained, } -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Debug)] pub(in crate::borrow_check) enum LaterUseKind { TraitCapture, ClosureCapture, @@ -95,7 +96,7 @@ impl BorrowExplanation { should_note_order, } => { let local_decl = &body.local_decls[dropped_local]; - let (dtor_desc, type_desc) = match local_decl.ty.sty { + let (dtor_desc, type_desc) = match local_decl.ty.kind { // If type is an ADT that implements Drop, then // simplify output by reporting just the ADT name. ty::Adt(adt, _substs) if adt.has_dtor(tcx) && !adt.is_box() => ( @@ -272,12 +273,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let mut should_note_order = false; if body.local_decls[local].name.is_some() { if let Some((WriteKind::StorageDeadOrDrop, place)) = kind_place { - if let Place { - base: PlaceBase::Local(borrowed_local), - projection: box [], - } = place { - if body.local_decls[*borrowed_local].name.is_some() - && local != *borrowed_local + if let Some(borrowed_local) = place.as_local() { + if body.local_decls[borrowed_local].name.is_some() + && local != borrowed_local { should_note_order = true; } @@ -493,22 +491,19 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // Just point to the function, to reduce the chance of overlapping spans. let function_span = match func { Operand::Constant(c) => c.span, - Operand::Copy(Place { - base: PlaceBase::Local(l), - projection: box [], - }) | - Operand::Move(Place { - base: PlaceBase::Local(l), - projection: box [], - }) => { - let local_decl = &self.body.local_decls[*l]; - if local_decl.name.is_none() { - local_decl.source_info.span + Operand::Copy(place) | + Operand::Move(place) => { + if let Some(l) = place.as_local() { + let local_decl = &self.body.local_decls[l]; + if local_decl.name.is_none() { + local_decl.source_info.span + } else { + span + } } else { span } } - _ => span, }; return (LaterUseKind::Call, function_span); } else { @@ -541,14 +536,14 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // it which simplifies the termination logic. let mut queue = vec![location]; let mut target = if let Some(&Statement { - kind: StatementKind::Assign(box(Place { - base: PlaceBase::Local(local), - projection: box [], - }, _)), + kind: StatementKind::Assign(box(ref place, _)), .. - }) = stmt - { - local + }) = stmt { + if let Some(local) = place.as_local() { + local + } else { + return false; + } } else { return false; }; @@ -581,17 +576,13 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // If we see a use, we should check whether it is our data, and if so // update the place that we're looking for to that new place. Rvalue::Use(operand) => match operand { - Operand::Copy(Place { - base: PlaceBase::Local(from), - projection: box [], - }) - | Operand::Move(Place { - base: PlaceBase::Local(from), - projection: box [], - }) - if *from == target => - { - target = into; + Operand::Copy(place) + | Operand::Move(place) => { + if let Some(from) = place.as_local() { + if from == target { + target = into; + } + } } _ => {} }, @@ -600,28 +591,25 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { Rvalue::Cast( CastKind::Pointer(PointerCast::Unsize), operand, ty ) => match operand { - Operand::Copy(Place { - base: PlaceBase::Local(from), - projection: box [], - }) - | Operand::Move(Place { - base: PlaceBase::Local(from), - projection: box [], - }) - if *from == target => - { - debug!("was_captured_by_trait_object: ty={:?}", ty); - // Check the type for a trait object. - return match ty.sty { - // `&dyn Trait` - ty::Ref(_, ty, _) if ty.is_trait() => true, - // `Box` - _ if ty.is_box() && ty.boxed_ty().is_trait() => true, - // `dyn Trait` - _ if ty.is_trait() => true, - // Anything else. - _ => false, - }; + Operand::Copy(place) + | Operand::Move(place) => { + if let Some(from) = place.as_local() { + if from == target { + debug!("was_captured_by_trait_object: ty={:?}", ty); + // Check the type for a trait object. + return match ty.kind { + // `&dyn Trait` + ty::Ref(_, ty, _) if ty.is_trait() => true, + // `Box` + _ if ty.is_box() && ty.boxed_ty().is_trait() => true, + // `dyn Trait` + _ if ty.is_trait() => true, + // Anything else. + _ => false, + }; + } + } + return false; } _ => return false, }, @@ -637,34 +625,33 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { debug!("was_captured_by_trait_object: terminator={:?}", terminator); if let TerminatorKind::Call { - destination: Some((Place { - base: PlaceBase::Local(dest), - projection: box [], - }, block)), + destination: Some((place, block)), args, .. - } = &terminator.kind - { - debug!( - "was_captured_by_trait_object: target={:?} dest={:?} args={:?}", - target, dest, args - ); - // Check if one of the arguments to this function is the target place. - let found_target = args.iter().any(|arg| { - if let Operand::Move(Place { - base: PlaceBase::Local(potential), - projection: box [], - }) = arg { - *potential == target - } else { - false - } - }); + } = &terminator.kind { + if let Some(dest) = place.as_local() { + debug!( + "was_captured_by_trait_object: target={:?} dest={:?} args={:?}", + target, dest, args + ); + // Check if one of the arguments to this function is the target place. + let found_target = args.iter().any(|arg| { + if let Operand::Move(place) = arg { + if let Some(potential) = place.as_local() { + potential == target + } else { + false + } + } else { + false + } + }); - // If it is, follow this to the next block and update the target. - if found_target { - target = *dest; - queue.push(block.start_location()); + // If it is, follow this to the next block and update the target. + if found_target { + target = dest; + queue.push(block.start_location()); + } } } } diff --git a/src/librustc_mir/borrow_check/nll/facts.rs b/src/librustc_mir/borrow_check/nll/facts.rs index f0beb4d3ae..13e5769c5b 100644 --- a/src/librustc_mir/borrow_check/nll/facts.rs +++ b/src/librustc_mir/borrow_check/nll/facts.rs @@ -4,7 +4,7 @@ use polonius_engine::AllFacts as PoloniusAllFacts; use polonius_engine::Atom; use rustc::mir::Local; use rustc::ty::{RegionVid, TyCtxt}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use std::error::Error; use std::fmt::Debug; use std::fs::{self, File}; diff --git a/src/librustc_mir/borrow_check/nll/member_constraints.rs b/src/librustc_mir/borrow_check/nll/member_constraints.rs index b5e2e111f3..75213d3098 100644 --- a/src/librustc_mir/borrow_check/nll/member_constraints.rs +++ b/src/librustc_mir/borrow_check/nll/member_constraints.rs @@ -2,7 +2,7 @@ use crate::rustc::ty::{self, Ty}; use rustc::hir::def_id::DefId; use rustc::infer::region_constraints::MemberConstraint; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use std::hash::Hash; use std::ops::Index; use syntax_pos::Span; @@ -11,7 +11,7 @@ use syntax_pos::Span; /// indexed by the region `R0`. crate struct MemberConstraintSet<'tcx, R> where - R: Copy + Hash + Eq, + R: Copy + Eq, { /// Stores the first "member" constraint for a given `R0`. This is an /// index into the `constraints` vector below. @@ -51,7 +51,7 @@ crate struct NllMemberConstraint<'tcx> { end_index: usize, } -newtype_index! { +rustc_index::newtype_index! { crate struct NllMemberConstraintIndex { DEBUG_FORMAT = "MemberConstraintIndex({})" } @@ -191,7 +191,7 @@ where impl<'tcx, R> Index for MemberConstraintSet<'tcx, R> where - R: Copy + Hash + Eq, + R: Copy + Eq, { type Output = NllMemberConstraint<'tcx>; diff --git a/src/librustc_mir/borrow_check/nll/mod.rs b/src/librustc_mir/borrow_check/nll/mod.rs index 1ff3228afa..b2e5751b90 100644 --- a/src/librustc_mir/borrow_check/nll/mod.rs +++ b/src/librustc_mir/borrow_check/nll/mod.rs @@ -14,7 +14,7 @@ use rustc::infer::InferCtxt; use rustc::mir::{ClosureOutlivesSubject, ClosureRegionRequirements, Local, Location, Body, LocalKind, BasicBlock, Promoted}; use rustc::ty::{self, RegionKind, RegionVid}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_errors::Diagnostic; use std::fmt::Debug; use std::env; diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs index 26a89b4e7a..7362ae9c63 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs @@ -1,5 +1,4 @@ use crate::borrow_check::nll::constraints::OutlivesConstraint; -use crate::borrow_check::nll::region_infer::AppliedMemberConstraint; use crate::borrow_check::nll::region_infer::RegionInferenceContext; use crate::borrow_check::nll::type_check::Locations; use crate::borrow_check::nll::universal_regions::DefiningTy; @@ -12,7 +11,7 @@ use rustc::infer::InferCtxt; use rustc::infer::NLLRegionVariableOrigin; use rustc::mir::{ConstraintCategory, Location, Body}; use rustc::ty::{self, RegionVid}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_errors::DiagnosticBuilder; use std::collections::VecDeque; use syntax::errors::Applicability; @@ -98,9 +97,11 @@ impl<'tcx> RegionInferenceContext<'tcx> { &self, body: &Body<'tcx>, from_region: RegionVid, + from_region_origin: NLLRegionVariableOrigin, target_test: impl Fn(RegionVid) -> bool, ) -> (ConstraintCategory, bool, Span) { - debug!("best_blame_constraint(from_region={:?})", from_region); + debug!("best_blame_constraint(from_region={:?}, from_region_origin={:?})", + from_region, from_region_origin); // Find all paths let (path, target_region) = @@ -153,19 +154,85 @@ impl<'tcx> RegionInferenceContext<'tcx> { // we still want to screen for an "interesting" point to // highlight (e.g., a call site or something). let target_scc = self.constraint_sccs.scc(target_region); - let best_choice = (0..path.len()).rev().find(|&i| { - let constraint = path[i]; + let mut range = 0..path.len(); + + // As noted above, when reporting an error, there is typically a chain of constraints + // leading from some "source" region which must outlive some "target" region. + // In most cases, we prefer to "blame" the constraints closer to the target -- + // but there is one exception. When constraints arise from higher-ranked subtyping, + // we generally prefer to blame the source value, + // as the "target" in this case tends to be some type annotation that the user gave. + // Therefore, if we find that the region origin is some instantiation + // of a higher-ranked region, we start our search from the "source" point + // rather than the "target", and we also tweak a few other things. + // + // An example might be this bit of Rust code: + // + // ```rust + // let x: fn(&'static ()) = |_| {}; + // let y: for<'a> fn(&'a ()) = x; + // ``` + // + // In MIR, this will be converted into a combination of assignments and type ascriptions. + // In particular, the 'static is imposed through a type ascription: + // + // ```rust + // x = ...; + // AscribeUserType(x, fn(&'static ()) + // y = x; + // ``` + // + // We wind up ultimately with constraints like + // + // ```rust + // !a: 'temp1 // from the `y = x` statement + // 'temp1: 'temp2 + // 'temp2: 'static // from the AscribeUserType + // ``` + // + // and here we prefer to blame the source (the y = x statement). + let blame_source = match from_region_origin { + NLLRegionVariableOrigin::FreeRegion + | NLLRegionVariableOrigin::Existential { from_forall: false } => { + true + } + NLLRegionVariableOrigin::Placeholder(_) + | NLLRegionVariableOrigin::Existential { from_forall: true } => { + false + } + }; + + let find_region = |i: &usize| { + let constraint = path[*i]; let constraint_sup_scc = self.constraint_sccs.scc(constraint.sup); - match categorized_path[i].0 { - ConstraintCategory::OpaqueType | ConstraintCategory::Boring | - ConstraintCategory::BoringNoLocation | ConstraintCategory::Internal => false, - ConstraintCategory::TypeAnnotation | ConstraintCategory::Return | - ConstraintCategory::Yield => true, - _ => constraint_sup_scc != target_scc, + if blame_source { + match categorized_path[*i].0 { + ConstraintCategory::OpaqueType | ConstraintCategory::Boring | + ConstraintCategory::BoringNoLocation | ConstraintCategory::Internal => false, + ConstraintCategory::TypeAnnotation | ConstraintCategory::Return | + ConstraintCategory::Yield => true, + _ => constraint_sup_scc != target_scc, + } + } else { + match categorized_path[*i].0 { + ConstraintCategory::OpaqueType | ConstraintCategory::Boring | + ConstraintCategory::BoringNoLocation | ConstraintCategory::Internal => false, + _ => true + } } - }); + }; + + let best_choice = if blame_source { + range.rev().find(find_region) + } else { + range.find(find_region) + }; + + debug!("best_blame_constraint: best_choice={:?} blame_source={}", + best_choice, blame_source); + if let Some(i) = best_choice { if let Some(next) = categorized_path.get(i + 1) { if categorized_path[i].0 == ConstraintCategory::Return @@ -253,29 +320,33 @@ impl<'tcx> RegionInferenceContext<'tcx> { let outgoing_edges_from_graph = self.constraint_graph .outgoing_edges(r, &self.constraints, fr_static); - - // But member constraints can also give rise to `'r: 'x` - // edges that were not part of the graph initially, so - // watch out for those. - let outgoing_edges_from_picks = self.applied_member_constraints(r) - .iter() - .map(|&AppliedMemberConstraint { min_choice, member_constraint_index, .. }| { - let p_c = &self.member_constraints[member_constraint_index]; - OutlivesConstraint { - sup: r, - sub: min_choice, - locations: Locations::All(p_c.definition_span), - category: ConstraintCategory::OpaqueType, - } - }); - - for constraint in outgoing_edges_from_graph.chain(outgoing_edges_from_picks) { + // Always inline this closure because it can be hot. + let mut handle_constraint = #[inline(always)] |constraint: OutlivesConstraint| { debug_assert_eq!(constraint.sup, r); let sub_region = constraint.sub; if let Trace::NotVisited = context[sub_region] { context[sub_region] = Trace::FromOutlivesConstraint(constraint); deque.push_back(sub_region); } + }; + + // This loop can be hot. + for constraint in outgoing_edges_from_graph { + handle_constraint(constraint); + } + + // Member constraints can also give rise to `'r: 'x` edges that + // were not part of the graph initially, so watch out for those. + // (But they are extremely rare; this loop is very cold.) + for constraint in self.applied_member_constraints(r) { + let p_c = &self.member_constraints[constraint.member_constraint_index]; + let constraint = OutlivesConstraint { + sup: r, + sub: constraint.min_choice, + locations: Locations::All(p_c.definition_span), + category: ConstraintCategory::OpaqueType, + }; + handle_constraint(constraint); } } @@ -297,12 +368,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { infcx: &'a InferCtxt<'a, 'tcx>, mir_def_id: DefId, fr: RegionVid, + fr_origin: NLLRegionVariableOrigin, outlived_fr: RegionVid, renctx: &mut RegionErrorNamingCtx, ) -> DiagnosticBuilder<'a> { debug!("report_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr); - let (category, _, span) = self.best_blame_constraint(body, fr, |r| { + let (category, _, span) = self.best_blame_constraint(body, fr, fr_origin, |r| { self.provides_universal_region(r, fr, outlived_fr) }); @@ -627,7 +699,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { (self.to_error_region(fr), self.to_error_region(outlived_fr)) { if let Some(ty::TyS { - sty: ty::Opaque(did, substs), + kind: ty::Opaque(did, substs), .. }) = infcx .tcx @@ -709,6 +781,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { let (category, from_closure, span) = self.best_blame_constraint( body, borrow_region, + NLLRegionVariableOrigin::FreeRegion, |r| self.provides_universal_region(r, borrow_region, outlived_region) ); @@ -768,11 +841,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { &self, body: &Body<'tcx>, fr1: RegionVid, + fr1_origin: NLLRegionVariableOrigin, fr2: RegionVid, ) -> (ConstraintCategory, Span) { let (category, _, span) = self.best_blame_constraint( body, fr1, + fr1_origin, |r| self.provides_universal_region(r, fr1, fr2), ); (category, span) @@ -800,7 +875,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { if let Some(ty::ReFree(free_region)) = self.to_error_region(fr) { if let ty::BoundRegion::BrEnv = free_region.bound_region { if let DefiningTy::Closure(def_id, substs) = self.universal_regions.defining_ty { - let closure_kind_ty = substs.closure_kind_ty(def_id, infcx.tcx); + let closure_kind_ty = substs.as_closure().kind_ty(def_id, infcx.tcx); return Some(ty::ClosureKind::FnMut) == closure_kind_ty.to_opt_closure_kind(); } } @@ -825,7 +900,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { universe1.cannot_name(placeholder.universe) } - NLLRegionVariableOrigin::FreeRegion | NLLRegionVariableOrigin::Existential => false, + NLLRegionVariableOrigin::FreeRegion | NLLRegionVariableOrigin::Existential { .. } => { + false + } } } } diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs index 6fa9426910..d6e8494029 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs @@ -12,20 +12,20 @@ use rustc::hir::def::{Res, DefKind}; use rustc::hir::def_id::DefId; use rustc::infer::InferCtxt; use rustc::mir::Body; -use rustc::ty::subst::{SubstsRef, UnpackedKind}; +use rustc::ty::subst::{SubstsRef, GenericArgKind}; use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt}; use rustc::ty::print::RegionHighlightMode; use rustc_errors::DiagnosticBuilder; use syntax::symbol::kw; use rustc_data_structures::fx::FxHashMap; -use syntax_pos::{Span, symbol::InternedString}; +use syntax_pos::{Span, symbol::Symbol}; /// A name for a particular region used in emitting diagnostics. This name could be a generated /// name like `'1`, a name used by the user like `'a`, or a name like `'static`. #[derive(Debug, Clone)] crate struct RegionName { /// The name of the region (interned). - crate name: InternedString, + crate name: Symbol, /// Where the region comes from. crate source: RegionNameSource, } @@ -109,7 +109,7 @@ impl RegionName { } #[allow(dead_code)] - crate fn name(&self) -> InternedString { + crate fn name(&self) -> Symbol { self.name } @@ -273,7 +273,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } ty::ReStatic => Some(RegionName { - name: kw::StaticLifetime.as_interned_str(), + name: kw::StaticLifetime, source: RegionNameSource::Static }), @@ -292,7 +292,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { if let DefiningTy::Closure(def_id, substs) = def_ty { let args_span = if let hir::ExprKind::Closure(_, _, _, span, _) = - tcx.hir().expect_expr(mir_hir_id).node + tcx.hir().expect_expr(mir_hir_id).kind { span } else { @@ -300,7 +300,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { }; let region_name = self.synthesize_region_name(renctx); - let closure_kind_ty = substs.closure_kind_ty(def_id, tcx); + let closure_kind_ty = substs.as_closure().kind_ty(def_id, tcx); let note = match closure_kind_ty.to_opt_closure_kind() { Some(ty::ClosureKind::Fn) => { "closure implements `Fn`, so references to captured variables \ @@ -360,7 +360,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { &self, tcx: TyCtxt<'tcx>, error_region: &RegionKind, - name: InternedString, + name: Symbol, ) -> Span { let scope = error_region.free_region_binding_scope(tcx); let node = tcx.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID); @@ -399,7 +399,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.universal_regions.unnormalized_input_tys[implicit_inputs + argument_index]; if let Some(region_name) = self.give_name_if_we_can_match_hir_ty_from_argument( infcx, - body, mir_def_id, fr, arg_ty, @@ -415,7 +414,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn give_name_if_we_can_match_hir_ty_from_argument( &self, infcx: &InferCtxt<'_, 'tcx>, - body: &Body<'tcx>, mir_def_id: DefId, needle_fr: RegionVid, argument_ty: Ty<'tcx>, @@ -424,18 +422,14 @@ impl<'tcx> RegionInferenceContext<'tcx> { ) -> Option { let mir_hir_id = infcx.tcx.hir().as_local_hir_id(mir_def_id)?; let fn_decl = infcx.tcx.hir().fn_decl_by_hir_id(mir_hir_id)?; - let argument_hir_ty: &hir::Ty = &fn_decl.inputs[argument_index]; - match argument_hir_ty.node { + let argument_hir_ty: &hir::Ty = fn_decl.inputs.get(argument_index)?; + match argument_hir_ty.kind { // This indicates a variable with no type annotation, like // `|x|`... in that case, we can't highlight the type but // must highlight the variable. - hir::TyKind::Infer => self.give_name_if_we_cannot_match_hir_ty( - infcx, - body, - needle_fr, - argument_ty, - renctx, - ), + // NOTE(eddyb) this is handled in/by the sole caller + // (`give_name_if_anonymous_region_appears_in_arguments`). + hir::TyKind::Infer => None, _ => self.give_name_if_we_can_match_hir_ty( infcx.tcx, @@ -527,7 +521,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { &mut vec![(argument_ty, argument_hir_ty)]; while let Some((ty, hir_ty)) = search_stack.pop() { - match (&ty.sty, &hir_ty.node) { + match (&ty.kind, &hir_ty.kind) { // Check if the `argument_ty` is `&'X ..` where `'X` // is the region we are looking for -- if so, and we have a `&T` // on the RHS, then we want to highlight the `&` like so: @@ -667,24 +661,24 @@ impl<'tcx> RegionInferenceContext<'tcx> { ) -> Option<&'hir hir::Lifetime> { for (kind, hir_arg) in substs.iter().zip(&args.args) { match (kind.unpack(), hir_arg) { - (UnpackedKind::Lifetime(r), hir::GenericArg::Lifetime(lt)) => { + (GenericArgKind::Lifetime(r), hir::GenericArg::Lifetime(lt)) => { if r.to_region_vid() == needle_fr { return Some(lt); } } - (UnpackedKind::Type(ty), hir::GenericArg::Type(hir_ty)) => { + (GenericArgKind::Type(ty), hir::GenericArg::Type(hir_ty)) => { search_stack.push((ty, hir_ty)); } - (UnpackedKind::Const(_ct), hir::GenericArg::Const(_hir_ct)) => { + (GenericArgKind::Const(_ct), hir::GenericArg::Const(_hir_ct)) => { // Lifetimes cannot be found in consts, so we don't need // to search anything here. } - (UnpackedKind::Lifetime(_), _) - | (UnpackedKind::Type(_), _) - | (UnpackedKind::Const(_), _) => { + (GenericArgKind::Lifetime(_), _) + | (GenericArgKind::Type(_), _) + | (GenericArgKind::Const(_), _) => { // I *think* that HIR lowering should ensure this // doesn't happen, even in erroneous // programs. Else we should use delay-span-bug. @@ -758,7 +752,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { let (return_span, mir_description) = match tcx.hir().get(mir_hir_id) { hir::Node::Expr(hir::Expr { - node: hir::ExprKind::Closure(_, return_ty, _, span, gen_move), + kind: hir::ExprKind::Closure(_, return_ty, _, span, gen_move), .. }) => ( match return_ty.output { @@ -772,7 +766,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { }, ), hir::Node::ImplItem(hir::ImplItem { - node: hir::ImplItemKind::Method(method_sig, _), + kind: hir::ImplItemKind::Method(method_sig, _), .. }) => (method_sig.decl.output.span(), ""), _ => (body.span, ""), @@ -821,7 +815,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { let yield_span = match tcx.hir().get(mir_hir_id) { hir::Node::Expr(hir::Expr { - node: hir::ExprKind::Closure(_, _, _, span, _), + kind: hir::ExprKind::Closure(_, _, _, span, _), .. }) => ( tcx.sess.source_map().end_point(*span) @@ -843,10 +837,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { } /// Creates a synthetic region named `'1`, incrementing the counter. - fn synthesize_region_name(&self, renctx: &mut RegionErrorNamingCtx) -> InternedString { + fn synthesize_region_name(&self, renctx: &mut RegionErrorNamingCtx) -> Symbol { let c = renctx.counter; renctx.counter += 1; - InternedString::intern(&format!("'{:?}", c)) + Symbol::intern(&format!("'{:?}", c)) } } diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs index 750a1324fa..7f0e97c9ae 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs @@ -3,7 +3,7 @@ use crate::borrow_check::nll::ToRegionVid; use crate::borrow_check::Upvar; use rustc::mir::{Local, Body}; use rustc::ty::{RegionVid, TyCtxt}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use syntax::source_map::Span; use syntax_pos::symbol::Symbol; diff --git a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs index 78e7943598..dbb810db55 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs @@ -27,12 +27,12 @@ use rustc::mir::{ use rustc::ty::{self, subst::SubstsRef, RegionVid, Ty, TyCtxt, TypeFoldable}; use rustc::util::common::ErrorReported; use rustc_data_structures::binary_search_util; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::graph::WithSuccessors; use rustc_data_structures::graph::scc::Sccs; use rustc_data_structures::graph::vec_graph::VecGraph; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_errors::{Diagnostic, DiagnosticBuilder}; use syntax_pos::Span; @@ -406,7 +406,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - NLLRegionVariableOrigin::Existential => { + NLLRegionVariableOrigin::Existential { .. } => { // For existential, regions, nothing to do. } } @@ -1348,7 +1348,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.check_bound_universal_region(infcx, body, mir_def_id, fr, placeholder); } - NLLRegionVariableOrigin::Existential => { + NLLRegionVariableOrigin::Existential { .. } => { // nothing to check here } } @@ -1461,7 +1461,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { debug!("check_universal_region: fr_minus={:?}", fr_minus); let blame_span_category = - self.find_outlives_blame_span(body, longer_fr, shorter_fr); + self.find_outlives_blame_span(body, longer_fr, + NLLRegionVariableOrigin::FreeRegion,shorter_fr); // Grow `shorter_fr` until we find some non-local regions. (We // always will.) We'll call them `shorter_fr+` -- they're ever @@ -1494,6 +1495,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { infcx, mir_def_id, longer_fr, + NLLRegionVariableOrigin::FreeRegion, shorter_fr, region_naming, ); @@ -1547,7 +1549,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { }; // Find the code to blame for the fact that `longer_fr` outlives `error_fr`. - let (_, span) = self.find_outlives_blame_span(body, longer_fr, error_region); + let (_, span) = self.find_outlives_blame_span( + body, longer_fr, NLLRegionVariableOrigin::Placeholder(placeholder), error_region + ); // Obviously, this error message is far from satisfactory. // At present, though, it only appears in unit tests -- @@ -1608,7 +1612,7 @@ impl<'tcx> RegionDefinition<'tcx> { let origin = match rv_origin { RegionVariableOrigin::NLL(origin) => origin, - _ => NLLRegionVariableOrigin::Existential, + _ => NLLRegionVariableOrigin::Existential { from_forall: false }, }; Self { origin, universe, external_name: None } diff --git a/src/librustc_mir/borrow_check/nll/region_infer/values.rs b/src/librustc_mir/borrow_check/nll/region_infer/values.rs index 6f9f570793..7a86536573 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/values.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/values.rs @@ -1,9 +1,9 @@ use rustc::mir::{BasicBlock, Location, Body}; use rustc::ty::{self, RegionVid}; -use rustc_data_structures::bit_set::{HybridBitSet, SparseBitMatrix}; +use rustc_index::bit_set::{HybridBitSet, SparseBitMatrix}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::Idx; +use rustc_index::vec::IndexVec; use std::fmt::Debug; use std::rc::Rc; @@ -116,20 +116,20 @@ impl RegionValueElements { } } -newtype_index! { +rustc_index::newtype_index! { /// A single integer representing a `Location` in the MIR control-flow /// graph. Constructed efficiently from `RegionValueElements`. pub struct PointIndex { DEBUG_FORMAT = "PointIndex({})" } } -newtype_index! { +rustc_index::newtype_index! { /// A single integer representing a `ty::Placeholder`. pub struct PlaceholderIndex { DEBUG_FORMAT = "PlaceholderIndex({})" } } /// An individual element in a region value -- the value of a /// particular region variable consists of a set of these elements. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Debug)] crate enum RegionElement { /// A point in the control-flow graph. Location(Location), diff --git a/src/librustc_mir/borrow_check/nll/renumber.rs b/src/librustc_mir/borrow_check/nll/renumber.rs index c479c38f30..d949c7e01a 100644 --- a/src/librustc_mir/borrow_check/nll/renumber.rs +++ b/src/librustc_mir/borrow_check/nll/renumber.rs @@ -1,9 +1,9 @@ use rustc::ty::subst::SubstsRef; -use rustc::ty::{self, ClosureSubsts, GeneratorSubsts, Ty, TypeFoldable}; -use rustc::mir::{Location, Body, Promoted}; +use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; +use rustc::mir::{Body, Location, PlaceElem, Promoted}; use rustc::mir::visit::{MutVisitor, TyContext}; use rustc::infer::{InferCtxt, NLLRegionVariableOrigin}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; /// Replaces all free regions appearing in the MIR with fresh /// inference variables, returning the number of variables created. @@ -35,7 +35,7 @@ where infcx .tcx .fold_regions(value, &mut false, |_region, _depth| { - let origin = NLLRegionVariableOrigin::Existential; + let origin = NLLRegionVariableOrigin::Existential { from_forall: false }; infcx.next_nll_region_var(origin) }) } @@ -54,6 +54,10 @@ impl<'a, 'tcx> NLLVisitor<'a, 'tcx> { } impl<'a, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.infcx.tcx + } + fn visit_ty(&mut self, ty: &mut Ty<'tcx>, ty_context: TyContext) { debug!("visit_ty(ty={:?}, ty_context={:?})", ty, ty_context); @@ -62,6 +66,21 @@ impl<'a, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'tcx> { debug!("visit_ty: ty={:?}", ty); } + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + if let PlaceElem::Field(field, ty) = elem { + let new_ty = self.renumber_regions(ty); + + if new_ty != *ty { + return Some(PlaceElem::Field(*field, new_ty)); + } + } + + None + } + fn visit_substs(&mut self, substs: &mut SubstsRef<'tcx>, location: Location) { debug!("visit_substs(substs={:?}, location={:?})", substs, location); @@ -82,30 +101,4 @@ impl<'a, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'tcx> { fn visit_const(&mut self, constant: &mut &'tcx ty::Const<'tcx>, _location: Location) { *constant = self.renumber_regions(&*constant); } - - fn visit_generator_substs(&mut self, - substs: &mut GeneratorSubsts<'tcx>, - location: Location) { - debug!( - "visit_generator_substs(substs={:?}, location={:?})", - substs, - location, - ); - - *substs = self.renumber_regions(substs); - - debug!("visit_generator_substs: substs={:?}", substs); - } - - fn visit_closure_substs(&mut self, substs: &mut ClosureSubsts<'tcx>, location: Location) { - debug!( - "visit_closure_substs(substs={:?}, location={:?})", - substs, - location - ); - - *substs = self.renumber_regions(substs); - - debug!("visit_closure_substs: substs={:?}", substs); - } } diff --git a/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs b/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs index 8de014522d..34ac96beb5 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs @@ -10,7 +10,7 @@ use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate}; use rustc::infer::region_constraints::{GenericKind, VerifyBound}; use rustc::infer::{self, InferCtxt, SubregionOrigin}; use rustc::mir::ConstraintCategory; -use rustc::ty::subst::UnpackedKind; +use rustc::ty::subst::GenericArgKind; use rustc::ty::{self, TyCtxt}; use syntax_pos::DUMMY_SP; @@ -101,13 +101,13 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { }); match k1.unpack() { - UnpackedKind::Lifetime(r1) => { + GenericArgKind::Lifetime(r1) => { let r1_vid = self.to_region_vid(r1); let r2_vid = self.to_region_vid(r2); self.add_outlives(r1_vid, r2_vid); } - UnpackedKind::Type(t1) => { + GenericArgKind::Type(t1) => { // we don't actually use this for anything, but // the `TypeOutlives` code needs an origin. let origin = infer::RelateParamBound(DUMMY_SP, t1); @@ -121,7 +121,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ).type_must_outlive(origin, t1, r2); } - UnpackedKind::Const(_) => { + GenericArgKind::Const(_) => { // Consts cannot outlive one another, so we // don't need to handle any relations here. } @@ -178,6 +178,9 @@ impl<'a, 'b, 'tcx> TypeOutlivesDelegate<'tcx> for &'a mut ConstraintConversion<' a: ty::Region<'tcx>, b: ty::Region<'tcx>, ) { + if let ty::ReEmpty = a { + return; + } let b = self.to_region_vid(b); let a = self.to_region_vid(a); self.add_outlives(b, a); @@ -190,6 +193,9 @@ impl<'a, 'b, 'tcx> TypeOutlivesDelegate<'tcx> for &'a mut ConstraintConversion<' a: ty::Region<'tcx>, bound: VerifyBound<'tcx>, ) { + if let ty::ReEmpty = a { + return; + } let type_test = self.verify_to_type_test(kind, a, bound); self.add_type_test(type_test); } diff --git a/src/librustc_mir/borrow_check/nll/type_check/input_output.rs b/src/librustc_mir/borrow_check/nll/type_check/input_output.rs index 99661b1f73..d74dd0fc0f 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/input_output.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/input_output.rs @@ -12,7 +12,7 @@ use rustc::infer::LateBoundRegionConversionTime; use rustc::mir::*; use rustc::ty::Ty; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use syntax_pos::Span; use super::{Locations, TypeChecker}; diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/local_use_map.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/local_use_map.rs index 049d83bb22..7dee00b3ec 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/local_use_map.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/local_use_map.rs @@ -2,7 +2,7 @@ use crate::borrow_check::nll::region_infer::values::{PointIndex, RegionValueElem use crate::util::liveness::{categorize, DefUse}; use rustc::mir::visit::{PlaceContext, Visitor}; use rustc::mir::{Body, Local, Location}; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use rustc_data_structures::vec_linked_list as vll; /// A map that cross references each local with the locations where it @@ -44,7 +44,7 @@ struct Appearance { next: Option, } -newtype_index! { +rustc_index::newtype_index! { pub struct AppearanceIndex { .. } } @@ -70,6 +70,10 @@ impl LocalUseMap { appearances: IndexVec::new(), }; + if live_locals.is_empty() { + return local_use_map; + } + let mut locals_with_use_data: IndexVec = IndexVec::from_elem_n(false, body.local_decls.len()); live_locals.iter().for_each(|&local| locals_with_use_data[local] = true); diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs index 3f2ec1ba97..a01b528833 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs @@ -36,31 +36,39 @@ pub(super) fn generate<'tcx>( ) { debug!("liveness::generate"); - let live_locals: Vec = if AllFacts::enabled(typeck.tcx()) { - // If "dump facts from NLL analysis" was requested perform - // the liveness analysis for all `Local`s. This case opens - // the possibility of the variables being analyzed in `trace` - // to be *any* `Local`, not just the "live" ones, so we can't - // make any assumptions past this point as to the characteristics - // of the `live_locals`. - // FIXME: Review "live" terminology past this point, we should - // not be naming the `Local`s as live. - body.local_decls.indices().collect() + let free_regions = regions_that_outlive_free_regions( + typeck.infcx.num_region_vars(), + &typeck.borrowck_context.universal_regions, + &typeck.borrowck_context.constraints.outlives_constraints, + ); + let live_locals = compute_live_locals(typeck.tcx(), &free_regions, body); + let facts_enabled = AllFacts::enabled(typeck.tcx()); + + + let polonius_drop_used = if facts_enabled { + let mut drop_used = Vec::new(); + polonius::populate_access_facts( + typeck, + body, + location_table, + move_data, + &mut drop_used, + ); + Some(drop_used) } else { - let free_regions = { - regions_that_outlive_free_regions( - typeck.infcx.num_region_vars(), - &typeck.borrowck_context.universal_regions, - &typeck.borrowck_context.constraints.outlives_constraints, - ) - }; - compute_live_locals(typeck.tcx(), &free_regions, body) + None }; - if !live_locals.is_empty() { - trace::trace(typeck, body, elements, flow_inits, move_data, live_locals); - - polonius::populate_access_facts(typeck, body, location_table, move_data); + if !live_locals.is_empty() || facts_enabled { + trace::trace( + typeck, + body, + elements, + flow_inits, + move_data, + live_locals, + polonius_drop_used, + ); } } diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/polonius.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/polonius.rs index d61464b3f3..526ad7fb90 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/polonius.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/polonius.rs @@ -4,7 +4,7 @@ use crate::dataflow::move_paths::{LookupResult, MoveData}; use crate::util::liveness::{categorize, DefUse}; use rustc::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; use rustc::mir::{Body, Local, Location, Place}; -use rustc::ty::subst::Kind; +use rustc::ty::subst::GenericArg; use rustc::ty::Ty; use super::TypeChecker; @@ -16,7 +16,7 @@ struct UseFactsExtractor<'me> { var_defined: &'me mut VarPointRelations, var_used: &'me mut VarPointRelations, location_table: &'me LocationTable, - var_drop_used: &'me mut VarPointRelations, + var_drop_used: &'me mut Vec<(Local, Location)>, move_data: &'me MoveData<'me>, path_accessed_at: &'me mut MovePathPointRelations, } @@ -39,7 +39,7 @@ impl UseFactsExtractor<'_> { fn insert_drop_use(&mut self, local: Local, location: Location) { debug!("LivenessFactsExtractor::insert_drop_use()"); - self.var_drop_used.push((local, self.location_to_index(location))); + self.var_drop_used.push((local, location)); } fn insert_path_access(&mut self, path: MovePathIndex, location: Location) { @@ -100,6 +100,7 @@ pub(super) fn populate_access_facts( body: &Body<'tcx>, location_table: &LocationTable, move_data: &MoveData<'_>, + drop_used: &mut Vec<(Local, Location)>, ) { debug!("populate_var_liveness_facts()"); @@ -107,12 +108,16 @@ pub(super) fn populate_access_facts( UseFactsExtractor { var_defined: &mut facts.var_defined, var_used: &mut facts.var_used, - var_drop_used: &mut facts.var_drop_used, + var_drop_used: drop_used, path_accessed_at: &mut facts.path_accessed_at, location_table, move_data, } .visit_body(body); + + facts.var_drop_used.extend(drop_used.iter().map(|&(local, location)| { + (local, location_table.mid_index(location)) + })); } for (local, local_decl) in body.local_decls.iter_enumerated() { @@ -125,7 +130,7 @@ pub(super) fn populate_access_facts( pub(super) fn add_var_drops_regions( typeck: &mut TypeChecker<'_, 'tcx>, local: Local, - kind: &Kind<'tcx>, + kind: &GenericArg<'tcx>, ) { debug!("add_var_drops_region(local={:?}, kind={:?}", local, kind); let tcx = typeck.tcx(); diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs index 9b55881cb1..eacc4d084d 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs @@ -12,8 +12,8 @@ use rustc::traits::query::dropck_outlives::DropckOutlivesResult; use rustc::traits::query::type_op::outlives::DropckOutlives; use rustc::traits::query::type_op::TypeOp; use rustc::ty::{Ty, TypeFoldable}; -use rustc_data_structures::bit_set::HybridBitSet; -use rustc_data_structures::fx::FxHashMap; +use rustc_index::bit_set::HybridBitSet; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use std::rc::Rc; /// This is the heart of the liveness computation. For each variable X @@ -37,6 +37,7 @@ pub(super) fn trace( flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'tcx>>, move_data: &MoveData<'tcx>, live_locals: Vec, + polonius_drop_used: Option>, ) { debug!("trace()"); @@ -52,7 +53,13 @@ pub(super) fn trace( drop_data: FxHashMap::default(), }; - LivenessResults::new(cx).compute_for_all_locals(live_locals); + let mut results = LivenessResults::new(cx); + + if let Some(drop_used) = polonius_drop_used { + results.add_extra_drop_facts(drop_used, live_locals.iter().copied().collect()) + } + + results.compute_for_all_locals(live_locals); } /// Contextual state for the type-liveness generator. @@ -145,6 +152,32 @@ impl LivenessResults<'me, 'typeck, 'flow, 'tcx> { } } + /// Add extra drop facts needed for Polonius. + /// + /// Add facts for all locals with free regions, since regions may outlive + /// the function body only at certain nodes in the CFG. + fn add_extra_drop_facts( + &mut self, + drop_used: Vec<(Local, Location)>, + live_locals: FxHashSet, + ) { + let locations = HybridBitSet::new_empty(self.cx.elements.num_points()); + + for (local, location) in drop_used { + if !live_locals.contains(&local) { + let local_ty = self.cx.body.local_decls[local].ty; + if local_ty.has_free_regions() { + self.cx.add_drop_live_facts_for( + local, + local_ty, + &[location], + &locations, + ); + } + } + } + } + /// Clear the value of fields that are "per local variable". fn reset_local_state(&mut self) { self.defs.clear(); diff --git a/src/librustc_mir/borrow_check/nll/type_check/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/mod.rs index 6a764b19c4..9f2f174553 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/mod.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/mod.rs @@ -16,6 +16,7 @@ use crate::borrow_check::nll::type_check::free_region_relations::{ }; use crate::borrow_check::nll::universal_regions::{DefiningTy, UniversalRegions}; use crate::borrow_check::nll::ToRegionVid; +use crate::transform::promote_consts::should_suggest_const_in_array_repeat_expressions_attribute; use crate::dataflow::move_paths::MoveData; use crate::dataflow::FlowAtLocation; use crate::dataflow::MaybeInitializedPlaces; @@ -36,14 +37,14 @@ use rustc::traits::query::{Fallible, NoSolution}; use rustc::traits::{self, ObligationCause, PredicateObligations}; use rustc::ty::adjustment::{PointerCast}; use rustc::ty::fold::TypeFoldable; -use rustc::ty::subst::{Subst, SubstsRef, UnpackedKind, UserSubsts}; +use rustc::ty::subst::{Subst, SubstsRef, GenericArgKind, UserSubsts}; use rustc::ty::{ self, RegionVid, ToPolyTraitRef, Ty, TyCtxt, UserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, UserTypeAnnotationIndex, }; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use rustc::ty::layout::VariantIdx; use std::rc::Rc; use std::{fmt, iter, mem}; @@ -276,7 +277,17 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) { self.super_constant(constant, location); - self.sanitize_type(constant, constant.literal.ty); + let ty = self.sanitize_type(constant, constant.literal.ty); + + self.cx.infcx.tcx.for_each_free_region(&ty, |live_region| { + let live_region_vid = + self.cx.borrowck_context.universal_regions.to_region_vid(live_region); + self.cx + .borrowck_context + .constraints + .liveness_constraints + .add_element(live_region_vid, location); + }); if let Some(annotation_index) = constant.user_ty { if let Err(terr) = self.cx.relate_type_and_user_type( @@ -314,7 +325,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { ); } } - if let ty::FnDef(def_id, substs) = constant.literal.ty.sty { + if let ty::FnDef(def_id, substs) = constant.literal.ty.kind { let tcx = self.tcx(); let instantiated_predicates = tcx @@ -342,7 +353,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { let ty = if !local_decl.is_nonref_binding() { // If we have a binding of the form `let ref x: T = ..` then remove the outermost // reference so we can check the type annotation for the remaining type. - if let ty::Ref(_, rty, _) = local_decl.ty.sty { + if let ty::Ref(_, rty, _) = local_decl.ty.kind { rty } else { bug!("{:?} with ref binding has wrong type {}", local, local_decl.ty); @@ -424,15 +435,15 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let mut place_ty = match &place.base { PlaceBase::Local(index) => PlaceTy::from_ty(self.body.local_decls[*index].ty), - PlaceBase::Static(box Static { kind, ty: sty, def_id }) => { - let sty = self.sanitize_type(place, sty); + PlaceBase::Static(box Static { kind, ty, def_id }) => { + let san_ty = self.sanitize_type(place, ty); let check_err = |verifier: &mut TypeVerifier<'a, 'b, 'tcx>, place: &Place<'tcx>, ty, - sty| { + san_ty| { if let Err(terr) = verifier.cx.eq_types( - sty, + san_ty, ty, location.to_locations(), ConstraintCategory::Boring, @@ -442,7 +453,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { place, "bad promoted type ({:?}: {:?}): {:?}", ty, - sty, + san_ty, terr ); }; @@ -454,29 +465,29 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { self.sanitize_promoted(promoted_body, location); let promoted_ty = promoted_body.return_ty(); - check_err(self, place, promoted_ty, sty); + check_err(self, place, promoted_ty, san_ty); } } StaticKind::Static => { let ty = self.tcx().type_of(*def_id); let ty = self.cx.normalize(ty, location); - check_err(self, place, ty, sty); + check_err(self, place, ty, san_ty); } } - PlaceTy::from_ty(sty) + PlaceTy::from_ty(san_ty) } }; if place.projection.is_empty() { if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context { - let is_promoted = match place { - Place { - base: PlaceBase::Static(box Static { + let is_promoted = match place.as_ref() { + PlaceRef { + base: &PlaceBase::Static(box Static { kind: StaticKind::Promoted(..), .. }), - projection: box [], + projection: &[], } => true, _ => false, }; @@ -528,25 +539,37 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let parent_body = mem::replace(&mut self.body, promoted_body); + // Use new sets of constraints and closure bounds so that we can + // modify their locations. let all_facts = &mut None; let mut constraints = Default::default(); let mut closure_bounds = Default::default(); + let mut liveness_constraints = LivenessValues::new( + Rc::new(RegionValueElements::new(promoted_body)), + ); // Don't try to add borrow_region facts for the promoted MIR - mem::swap(self.cx.borrowck_context.all_facts, all_facts); - // Use a new sets of constraints and closure bounds so that we can - // modify their locations. - mem::swap( - &mut self.cx.borrowck_context.constraints.outlives_constraints, - &mut constraints - ); - mem::swap( - &mut self.cx.borrowck_context.constraints.closure_bounds_mapping, - &mut closure_bounds - ); + let mut swap_constraints = |this: &mut Self| { + mem::swap(this.cx.borrowck_context.all_facts, all_facts); + mem::swap( + &mut this.cx.borrowck_context.constraints.outlives_constraints, + &mut constraints + ); + mem::swap( + &mut this.cx.borrowck_context.constraints.closure_bounds_mapping, + &mut closure_bounds + ); + mem::swap( + &mut this.cx.borrowck_context.constraints.liveness_constraints, + &mut liveness_constraints + ); + }; + + swap_constraints(self); self.visit_body(promoted_body); + if !self.errors_reported { // if verifier failed, don't do further checks to avoid ICEs self.cx.typeck_mir(promoted_body); @@ -554,23 +577,15 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { self.body = parent_body; // Merge the outlives constraints back in, at the given location. - mem::swap(self.cx.borrowck_context.all_facts, all_facts); - mem::swap( - &mut self.cx.borrowck_context.constraints.outlives_constraints, - &mut constraints - ); - mem::swap( - &mut self.cx.borrowck_context.constraints.closure_bounds_mapping, - &mut closure_bounds - ); + swap_constraints(self); let locations = location.to_locations(); for constraint in constraints.outlives().iter() { let mut constraint = *constraint; constraint.locations = locations; if let ConstraintCategory::Return - | ConstraintCategory::UseAsConst - | ConstraintCategory::UseAsStatic = constraint.category + | ConstraintCategory::UseAsConst + | ConstraintCategory::UseAsStatic = constraint.category { // "Returning" from a promoted is an assigment to a // temporary from the user's point of view. @@ -578,6 +593,10 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { } self.cx.borrowck_context.constraints.outlives_constraints.push(constraint) } + for live_region in liveness_constraints.rows() { + self.cx.borrowck_context.constraints.liveness_constraints + .add_element(live_region, location); + } if !closure_bounds.is_empty() { let combined_bounds_mapping = closure_bounds @@ -637,7 +656,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { ) } ProjectionElem::Subslice { from, to } => PlaceTy::from_ty( - match base_ty.sty { + match base_ty.kind { ty::Array(inner, size) => { let size = size.eval_usize(tcx, self.cx.param_env); let min_size = (from as u64) + (to as u64); @@ -656,7 +675,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { _ => span_mirbug_and_err!(self, place, "slice of non-array {:?}", base_ty), }, ), - ProjectionElem::Downcast(maybe_name, index) => match base_ty.sty { + ProjectionElem::Downcast(maybe_name, index) => match base_ty.kind { ty::Adt(adt_def, _substs) if adt_def.is_enum() => { if index.as_usize() >= adt_def.variants.len() { PlaceTy::from_ty( @@ -738,16 +757,16 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let tcx = self.tcx(); let (variant, substs) = match base_ty { - PlaceTy { ty, variant_index: Some(variant_index) } => match ty.sty { + PlaceTy { ty, variant_index: Some(variant_index) } => match ty.kind { ty::Adt(adt_def, substs) => (&adt_def.variants[variant_index], substs), ty::Generator(def_id, substs, _) => { - let mut variants = substs.state_tys(def_id, tcx); + let mut variants = substs.as_generator().state_tys(def_id, tcx); let mut variant = match variants.nth(variant_index.into()) { Some(v) => v, None => { bug!("variant_index of generator out of range: {:?}/{:?}", variant_index, - substs.state_tys(def_id, tcx).count()) + substs.as_generator().state_tys(def_id, tcx).count()) } }; return match variant.nth(field.index()) { @@ -759,24 +778,24 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { } _ => bug!("can't have downcast of non-adt non-generator type"), } - PlaceTy { ty, variant_index: None } => match ty.sty { + PlaceTy { ty, variant_index: None } => match ty.kind { ty::Adt(adt_def, substs) if !adt_def.is_enum() => (&adt_def.variants[VariantIdx::new(0)], substs), ty::Closure(def_id, substs) => { - return match substs.upvar_tys(def_id, tcx).nth(field.index()) { + return match substs.as_closure().upvar_tys(def_id, tcx).nth(field.index()) { Some(ty) => Ok(ty), None => Err(FieldAccessError::OutOfRange { - field_count: substs.upvar_tys(def_id, tcx).count(), + field_count: substs.as_closure().upvar_tys(def_id, tcx).count(), }), } } ty::Generator(def_id, substs, _) => { // Only prefix fields (upvars and current state) are // accessible without a variant index. - return match substs.prefix_tys(def_id, tcx).nth(field.index()) { + return match substs.as_generator().prefix_tys(def_id, tcx).nth(field.index()) { Some(ty) => Ok(ty), None => Err(FieldAccessError::OutOfRange { - field_count: substs.prefix_tys(def_id, tcx).count(), + field_count: substs.as_generator().prefix_tys(def_id, tcx).count(), }), } } @@ -1142,7 +1161,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { category: ConstraintCategory, ) -> Fallible<()> { if let Err(terr) = self.sub_types(sub, sup, locations, category) { - if let ty::Opaque(..) = sup.sty { + if let ty::Opaque(..) = sup.kind { // When you have `let x: impl Foo = ...` in a closure, // the resulting inferend values are stored with the // def-id of the base function. @@ -1348,11 +1367,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // they are not caused by the user, but rather artifacts // of lowering. Assignments to other sorts of places *are* interesting // though. - let category = match *place { - Place { - base: PlaceBase::Local(RETURN_PLACE), - projection: box [], - } => if let BorrowCheckContext { + let category = match place.as_local() { + Some(RETURN_PLACE) => if let BorrowCheckContext { universal_regions: UniversalRegions { defining_ty: DefiningTy::Const(def_id, _), @@ -1368,17 +1384,16 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } else { ConstraintCategory::Return }, - Place { - base: PlaceBase::Local(l), - projection: box [], - } if !body.local_decls[l].is_user_variable.is_some() => { + Some(l) if !body.local_decls[l].is_user_variable.is_some() => { ConstraintCategory::Boring } _ => ConstraintCategory::Assignment, }; let place_ty = place.ty(body, tcx).ty; + let place_ty = self.normalize(place_ty, location); let rv_ty = rv.ty(body, tcx); + let rv_ty = self.normalize(rv_ty, location); if let Err(terr) = self.sub_types_or_anon(rv_ty, place_ty, location.to_locations(), category) { @@ -1430,7 +1445,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { variant_index, } => { let place_type = place.ty(body, tcx).ty; - let adt = match place_type.sty { + let adt = match place_type.kind { ty::Adt(adt, _) if adt.is_enum() => adt, _ => { span_bug!( @@ -1559,7 +1574,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } => { let func_ty = func.ty(body, tcx); debug!("check_terminator: call, func_ty={:?}", func_ty); - let sig = match func_ty.sty { + let sig = match func_ty.kind { ty::FnDef(..) | ty::FnPtr(_) => func_ty.fn_sig(tcx), _ => { span_mirbug!(self, term, "call to non-function {:?}", func_ty); @@ -1654,11 +1669,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { match *destination { Some((ref dest, _target_block)) => { let dest_ty = dest.ty(body, tcx).ty; - let category = match *dest { - Place { - base: PlaceBase::Local(RETURN_PLACE), - projection: box [], - } => { + let dest_ty = self.normalize(dest_ty, term_location); + let category = match dest.as_local() { + Some(RETURN_PLACE) => { if let BorrowCheckContext { universal_regions: UniversalRegions { @@ -1677,10 +1690,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ConstraintCategory::Return } } - Place { - base: PlaceBase::Local(l), - projection: box [], - } if !body.local_decls[l].is_user_variable.is_some() => { + Some(l) if !body.local_decls[l].is_user_variable.is_some() => { ConstraintCategory::Boring } _ => ConstraintCategory::Assignment, @@ -1726,17 +1736,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { from_hir_call: bool, ) { debug!("check_call_inputs({:?}, {:?})", sig, args); - // Do not count the `VaListImpl` argument as a "true" argument to - // a C-variadic function. - let inputs = if sig.c_variadic { - &sig.inputs()[..sig.inputs().len() - 1] - } else { - &sig.inputs()[..] - }; - if args.len() < inputs.len() || (args.len() > inputs.len() && !sig.c_variadic) { + if args.len() < sig.inputs().len() || (args.len() > sig.inputs().len() && !sig.c_variadic) { span_mirbug!(self, term, "call to {:?} with wrong # of args", sig); } - for (n, (fn_arg, op_arg)) in inputs.iter().zip(args).enumerate() { + for (n, (fn_arg, op_arg)) in sig.inputs().iter().zip(args).enumerate() { let op_arg_ty = op_arg.ty(body, self.tcx()); let category = if from_hir_call { ConstraintCategory::CallArgument @@ -1894,9 +1897,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // Erase the regions from `ty` to get a global type. The // `Sized` bound in no way depends on precise regions, so this // shouldn't affect `is_sized`. - let gcx = tcx.global_tcx(); let erased_ty = tcx.erase_regions(&ty); - if !erased_ty.is_sized(gcx.at(span), self.param_env) { + if !erased_ty.is_sized(tcx.at(span), self.param_env) { // in current MIR construction, all non-control-flow rvalue // expressions evaluate through `as_temp` or `into` a return // slot or local, so to find all unsized rvalues it is enough @@ -1942,10 +1944,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } } AggregateKind::Closure(def_id, substs) => { - match substs.upvar_tys(def_id, tcx).nth(field_index) { + match substs.as_closure().upvar_tys(def_id, tcx).nth(field_index) { Some(ty) => Ok(ty), None => Err(FieldAccessError::OutOfRange { - field_count: substs.upvar_tys(def_id, tcx).count(), + field_count: substs.as_closure().upvar_tys(def_id, tcx).count(), }), } } @@ -1953,10 +1955,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // It doesn't make sense to look at a field beyond the prefix; // these require a variant index, and are not initialized in // aggregate rvalues. - match substs.prefix_tys(def_id, tcx).nth(field_index) { + match substs.as_generator().prefix_tys(def_id, tcx).nth(field_index) { Some(ty) => Ok(ty), None => Err(FieldAccessError::OutOfRange { - field_count: substs.prefix_tys(def_id, tcx).count(), + field_count: substs.as_generator().prefix_tys(def_id, tcx).count(), }), } } @@ -1982,12 +1984,19 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let span = body.source_info(location).span; let ty = operand.ty(body, tcx); if !self.infcx.type_is_copy_modulo_regions(self.param_env, ty, span) { + // To determine if `const_in_array_repeat_expression` feature gate should + // be mentioned, need to check if the rvalue is promotable. + let should_suggest = + should_suggest_const_in_array_repeat_expressions_attribute( + tcx, self.mir_def_id, body, operand); + debug!("check_rvalue: should_suggest={:?}", should_suggest); + self.infcx.report_selection_error( &traits::Obligation::new( ObligationCause::new( span, self.tcx().hir().def_index_to_hir_id(self.mir_def_id.index), - traits::ObligationCauseCode::RepeatVec, + traits::ObligationCauseCode::RepeatVec(should_suggest), ), self.param_env, ty::Predicate::Trait(ty::Binder::bind(ty::TraitPredicate { @@ -2056,9 +2065,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } CastKind::Pointer(PointerCast::ClosureFnPointer(unsafety)) => { - let sig = match op.ty(body, tcx).sty { + let sig = match op.ty(body, tcx).kind { ty::Closure(def_id, substs) => { - substs.closure_sig_ty(def_id, tcx).fn_sig(tcx) + substs.as_closure().sig_ty(def_id, tcx).fn_sig(tcx) } _ => bug!(), }; @@ -2125,7 +2134,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } CastKind::Pointer(PointerCast::MutToConstPointer) => { - let ty_from = match op.ty(body, tcx).sty { + let ty_from = match op.ty(body, tcx).kind { ty::RawPtr(ty::TypeAndMut { ty: ty_from, mutbl: hir::MutMutable, @@ -2140,7 +2149,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { return; } }; - let ty_to = match ty.sty { + let ty_to = match ty.kind { ty::RawPtr(ty::TypeAndMut { ty: ty_to, mutbl: hir::MutImmutable, @@ -2173,11 +2182,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } CastKind::Misc => { - if let ty::Ref(_, mut ty_from, _) = op.ty(body, tcx).sty { + if let ty::Ref(_, mut ty_from, _) = op.ty(body, tcx).kind { let (mut ty_to, mutability) = if let ty::RawPtr(ty::TypeAndMut { ty: ty_to, mutbl, - }) = ty.sty { + }) = ty.kind { (ty_to, mutbl) } else { span_mirbug!( @@ -2192,9 +2201,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // Handle the direct cast from `&[T; N]` to `*const T` by unwrapping // any array we find. - while let ty::Array(ty_elem_from, _) = ty_from.sty { + while let ty::Array(ty_elem_from, _) = ty_from.kind { ty_from = ty_elem_from; - if let ty::Array(ty_elem_to, _) = ty_to.sty { + if let ty::Array(ty_elem_to, _) = ty_to.kind { ty_to = ty_elem_to; } else { break; @@ -2250,7 +2259,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | Rvalue::BinaryOp(BinOp::Gt, left, right) | Rvalue::BinaryOp(BinOp::Ge, left, right) => { let ty_left = left.ty(body, tcx); - if let ty::RawPtr(_) | ty::FnPtr(_) = ty_left.sty { + if let ty::RawPtr(_) | ty::FnPtr(_) = ty_left.kind { let ty_right = right.ty(body, tcx); let common_ty = self.infcx.next_ty_var( TypeVariableOrigin { @@ -2419,7 +2428,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { location, borrow_region, borrowed_place ); - let mut cursor = &*borrowed_place.projection; + let mut cursor = borrowed_place.projection.as_ref(); while let [proj_base @ .., elem] = cursor { cursor = proj_base; @@ -2431,7 +2440,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let base_ty = Place::ty_from(&borrowed_place.base, proj_base, body, tcx).ty; debug!("add_reborrow_constraint - base_ty = {:?}", base_ty); - match base_ty.sty { + match base_ty.kind { ty::Ref(ref_region, _, mutbl) => { constraints.outlives_constraints.push(OutlivesConstraint { sup: ref_region.to_region_vid(), @@ -2530,8 +2539,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // desugaring. A closure gets desugared to a struct, and // these extra requirements are basically like where // clauses on the struct. - AggregateKind::Closure(def_id, ty::ClosureSubsts { substs }) - | AggregateKind::Generator(def_id, ty::GeneratorSubsts { substs }, _) => { + AggregateKind::Closure(def_id, substs) + | AggregateKind::Generator(def_id, substs, _) => { self.prove_closure_bounds(tcx, *def_id, substs, location) } @@ -2575,7 +2584,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { }); match k1.unpack() { - UnpackedKind::Lifetime(r1) => { + GenericArgKind::Lifetime(r1) => { // constraint is r1: r2 let r1_vid = self.borrowck_context.universal_regions.to_region_vid(r1); let r2_vid = self.borrowck_context.universal_regions.to_region_vid(r2); @@ -2589,7 +2598,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ), )) } - UnpackedKind::Type(_) | UnpackedKind::Const(_) => None, + GenericArgKind::Type(_) | GenericArgKind::Const(_) => None, } }) .collect(); diff --git a/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs b/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs index 2549aa4fbf..80bf047812 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs @@ -66,9 +66,9 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> { self.infcx.create_next_universe() } - fn next_existential_region_var(&mut self) -> ty::Region<'tcx> { + fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> { if let Some(_) = &mut self.borrowck_context { - let origin = NLLRegionVariableOrigin::Existential; + let origin = NLLRegionVariableOrigin::Existential { from_forall }; self.infcx.next_nll_region_var(origin) } else { self.infcx.tcx.lifetimes.re_erased @@ -88,7 +88,9 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> { fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> { self.infcx - .next_nll_region_var_in_universe(NLLRegionVariableOrigin::Existential, universe) + .next_nll_region_var_in_universe(NLLRegionVariableOrigin::Existential { + from_forall: false + }, universe) } fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>) { diff --git a/src/librustc_mir/borrow_check/nll/universal_regions.rs b/src/librustc_mir/borrow_check/nll/universal_regions.rs index 3e090aed52..b876fd83ec 100644 --- a/src/librustc_mir/borrow_check/nll/universal_regions.rs +++ b/src/librustc_mir/borrow_check/nll/universal_regions.rs @@ -16,11 +16,12 @@ use either::Either; use rustc::hir::def_id::DefId; use rustc::hir::{self, BodyOwnerKind, HirId}; use rustc::infer::{InferCtxt, NLLRegionVariableOrigin}; +use rustc::middle::lang_items; use rustc::ty::fold::TypeFoldable; -use rustc::ty::subst::{InternalSubsts, SubstsRef}; -use rustc::ty::{self, ClosureSubsts, GeneratorSubsts, RegionVid, Ty, TyCtxt}; +use rustc::ty::subst::{InternalSubsts, SubstsRef, Subst}; +use rustc::ty::{self, RegionVid, Ty, TyCtxt}; use rustc::util::nodemap::FxHashMap; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use rustc_errors::DiagnosticBuilder; use std::iter; @@ -84,12 +85,12 @@ pub struct UniversalRegions<'tcx> { pub enum DefiningTy<'tcx> { /// The MIR is a closure. The signature is found via /// `ClosureSubsts::closure_sig_ty`. - Closure(DefId, ty::ClosureSubsts<'tcx>), + Closure(DefId, SubstsRef<'tcx>), /// The MIR is a generator. The signature is that generators take /// no parameters and return the result of /// `ClosureSubsts::generator_return_ty`. - Generator(DefId, ty::GeneratorSubsts<'tcx>, hir::GeneratorMovability), + Generator(DefId, SubstsRef<'tcx>, hir::GeneratorMovability), /// The MIR is a fn item with the given `DefId` and substs. The signature /// of the function can be bound then with the `fn_sig` query. @@ -108,9 +109,11 @@ impl<'tcx> DefiningTy<'tcx> { /// match up with the upvar order in the HIR, typesystem, and MIR. pub fn upvar_tys(self, tcx: TyCtxt<'tcx>) -> impl Iterator> + 'tcx { match self { - DefiningTy::Closure(def_id, substs) => Either::Left(substs.upvar_tys(def_id, tcx)), + DefiningTy::Closure(def_id, substs) => Either::Left( + substs.as_closure().upvar_tys(def_id, tcx) + ), DefiningTy::Generator(def_id, substs, _) => { - Either::Right(Either::Left(substs.upvar_tys(def_id, tcx))) + Either::Right(Either::Left(substs.as_generator().upvar_tys(def_id, tcx))) } DefiningTy::FnDef(..) | DefiningTy::Const(..) => { Either::Right(Either::Right(iter::empty())) @@ -143,7 +146,7 @@ struct UniversalRegionIndices<'tcx> { indices: FxHashMap, RegionVid>, } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Debug, PartialEq)] pub enum RegionClassification { /// A **global** region is one that can be named from /// anywhere. There is only one, `'static`. @@ -311,7 +314,7 @@ impl<'tcx> UniversalRegions<'tcx> { err.note(&format!( "defining type: {:?} with closure substs {:#?}", def_id, - &substs.substs[..] + &substs[..] )); // FIXME: It'd be nice to print the late-bound regions @@ -331,7 +334,7 @@ impl<'tcx> UniversalRegions<'tcx> { err.note(&format!( "defining type: {:?} with generator substs {:#?}", def_id, - &substs.substs[..] + &substs[..] )); // FIXME: As above, we'd like to print out the region @@ -425,12 +428,33 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { .replace_late_bound_regions_with_nll_infer_vars(self.mir_def_id, &mut indices); } + let (unnormalized_output_ty, mut unnormalized_input_tys) = + inputs_and_output.split_last().unwrap(); + + // C-variadic fns also have a `VaList` input that's not listed in the signature + // (as it's created inside the body itself, not passed in from outside). + if let DefiningTy::FnDef(def_id, _) = defining_ty { + if self.infcx.tcx.fn_sig(def_id).c_variadic() { + let va_list_did = self.infcx.tcx.require_lang_item( + lang_items::VaListTypeLangItem, + Some(self.infcx.tcx.def_span(self.mir_def_id),), + ); + let region = self.infcx.tcx.mk_region(ty::ReVar( + self.infcx.next_nll_region_var(FR).to_region_vid(), + )); + let va_list_ty = self.infcx.tcx.type_of(va_list_did) + .subst(self.infcx.tcx, &[region.into()]); + + unnormalized_input_tys = self.infcx.tcx.mk_type_list( + unnormalized_input_tys.iter().copied() + .chain(iter::once(va_list_ty)), + ); + } + } + let fr_fn_body = self.infcx.next_nll_region_var(FR).to_region_vid(); let num_universals = self.infcx.num_region_vars(); - let (unnormalized_output_ty, unnormalized_input_tys) = - inputs_and_output.split_last().unwrap(); - debug!( "build: global regions = {}..{}", FIRST_GLOBAL_INDEX, first_extern_index @@ -446,7 +470,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { let yield_ty = match defining_ty { DefiningTy::Generator(def_id, substs, _) => { - Some(substs.yield_ty(def_id, self.infcx.tcx)) + Some(substs.as_generator().yield_ty(def_id, self.infcx.tcx)) } _ => None, }; @@ -486,7 +510,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { let defining_ty = self.infcx .replace_free_regions_with_nll_infer_vars(FR, &defining_ty); - match defining_ty.sty { + match defining_ty.kind { ty::Closure(def_id, substs) => DefiningTy::Closure(def_id, substs), ty::Generator(def_id, substs, movability) => { DefiningTy::Generator(def_id, substs, movability) @@ -521,12 +545,11 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { defining_ty: DefiningTy<'tcx>, ) -> UniversalRegionIndices<'tcx> { let tcx = self.infcx.tcx; - let gcx = tcx.global_tcx(); let closure_base_def_id = tcx.closure_base_def_id(self.mir_def_id); - let identity_substs = InternalSubsts::identity_for_item(gcx, closure_base_def_id); + let identity_substs = InternalSubsts::identity_for_item(tcx, closure_base_def_id); let fr_substs = match defining_ty { - DefiningTy::Closure(_, ClosureSubsts { ref substs }) - | DefiningTy::Generator(_, GeneratorSubsts { ref substs }, _) => { + DefiningTy::Closure(_, ref substs) + | DefiningTy::Generator(_, ref substs, _) => { // In the case of closures, we rely on the fact that // the first N elements in the ClosureSubsts are // inherited from the `closure_base_def_id`. @@ -542,7 +565,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { DefiningTy::FnDef(_, substs) | DefiningTy::Const(_, substs) => substs, }; - let global_mapping = iter::once((gcx.lifetimes.re_static, fr_static)); + let global_mapping = iter::once((tcx.lifetimes.re_static, fr_static)); let subst_mapping = identity_substs .regions() .zip(fr_substs.regions().map(|r| r.to_region_vid())); @@ -561,7 +584,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { match defining_ty { DefiningTy::Closure(def_id, substs) => { assert_eq!(self.mir_def_id, def_id); - let closure_sig = substs.closure_sig_ty(def_id, tcx).fn_sig(tcx); + let closure_sig = substs.as_closure().sig_ty(def_id, tcx).fn_sig(tcx); let inputs_and_output = closure_sig.inputs_and_output(); let closure_ty = tcx.closure_env_ty(def_id, substs).unwrap(); ty::Binder::fuse( @@ -573,7 +596,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { // flattens this tuple. let (&output, tuplized_inputs) = inputs_and_output.split_last().unwrap(); assert_eq!(tuplized_inputs.len(), 1, "multiple closure inputs"); - let inputs = match tuplized_inputs[0].sty { + let inputs = match tuplized_inputs[0].kind { ty::Tuple(inputs) => inputs, _ => bug!("closure inputs not a tuple: {:?}", tuplized_inputs[0]), }; @@ -589,7 +612,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { DefiningTy::Generator(def_id, substs, movability) => { assert_eq!(self.mir_def_id, def_id); - let output = substs.return_ty(def_id, tcx); + let output = substs.as_generator().return_ty(def_id, tcx); let generator_ty = tcx.mk_generator(def_id, substs, movability); let inputs_and_output = self.infcx.tcx.intern_type_list(&[generator_ty, output]); ty::Binder::dummy(inputs_and_output) @@ -767,7 +790,7 @@ fn for_each_late_bound_region_defined_on<'tcx>( owner: fn_def_id.index, local_id: *late_bound, }; - let name = tcx.hir().name(hir_id).as_interned_str(); + let name = tcx.hir().name(hir_id); let region_def_id = tcx.hir().local_def_id(hir_id); let liberated_region = tcx.mk_region(ty::ReFree(ty::FreeRegion { scope: fn_def_id, diff --git a/src/librustc_mir/borrow_check/place_ext.rs b/src/librustc_mir/borrow_check/place_ext.rs index 411fa5b596..f437c71729 100644 --- a/src/librustc_mir/borrow_check/place_ext.rs +++ b/src/librustc_mir/borrow_check/place_ext.rs @@ -57,7 +57,7 @@ impl<'tcx> PlaceExt<'tcx> for Place<'tcx> { if *elem == ProjectionElem::Deref { let ty = Place::ty_from(&self.base, proj_base, body, tcx).ty; - if let ty::RawPtr(..) | ty::Ref(_, _, hir::MutImmutable) = ty.sty { + if let ty::RawPtr(..) | ty::Ref(_, _, hir::MutImmutable) = ty.kind { // For both derefs of raw pointers and `&T` // references, the original path is `Copy` and // therefore not significant. In particular, diff --git a/src/librustc_mir/borrow_check/places_conflict.rs b/src/librustc_mir/borrow_check/places_conflict.rs index dafa0b6631..264e4807af 100644 --- a/src/librustc_mir/borrow_check/places_conflict.rs +++ b/src/librustc_mir/borrow_check/places_conflict.rs @@ -64,14 +64,8 @@ pub(super) fn borrow_conflicts_with_place<'tcx>( // This Local/Local case is handled by the more general code below, but // it's so common that it's a speed win to check for it first. - if let Place { - base: PlaceBase::Local(l1), - projection: box [], - } = borrow_place { - if let PlaceRef { - base: PlaceBase::Local(l2), - projection: [], - } = access_place { + if let Some(l1) = borrow_place.as_local() { + if let Some(l2) = access_place.as_local() { return l1 == l2; } } @@ -231,7 +225,7 @@ fn place_components_conflict<'tcx>( let proj_base = &borrow_place.projection[..access_place.projection.len() + i]; let base_ty = Place::ty_from(borrow_base, proj_base, body, tcx).ty; - match (elem, &base_ty.sty, access) { + match (elem, &base_ty.kind, access) { (_, _, Shallow(Some(ArtificialField::ArrayLength))) | (_, _, Shallow(Some(ArtificialField::ShallowBorrow))) => { // The array length is like additional fields on the @@ -349,7 +343,7 @@ fn place_base_conflict<'tcx>( }, (StaticKind::Promoted(promoted_1, _), StaticKind::Promoted(promoted_2, _)) => { if promoted_1 == promoted_2 { - if let ty::Array(_, len) = s1.ty.sty { + if let ty::Array(_, len) = s1.ty.kind { if let Some(0) = len.try_eval_usize(tcx, param_env) { // Ignore conflicts with promoted [T; 0]. debug!("place_element_conflict: IGNORE-LEN-0-PROMOTED"); @@ -404,7 +398,7 @@ fn place_projection_conflict<'tcx>( Overlap::EqualOrDisjoint } else { let ty = Place::ty_from(pi1_base, pi1_proj_base, body, tcx).ty; - match ty.sty { + match ty.kind { ty::Adt(def, _) if def.is_union() => { // Different fields of a union, we are basically stuck. debug!("place_element_conflict: STUCK-UNION"); diff --git a/src/librustc_mir/borrow_check/prefixes.rs b/src/librustc_mir/borrow_check/prefixes.rs index 0a268ec134..1be1fcef61 100644 --- a/src/librustc_mir/borrow_check/prefixes.rs +++ b/src/librustc_mir/borrow_check/prefixes.rs @@ -29,7 +29,7 @@ pub(super) struct Prefixes<'cx, 'tcx> { body: &'cx Body<'tcx>, tcx: TyCtxt<'tcx>, kind: PrefixSet, - next: Option<(PlaceRef<'cx, 'tcx>)>, + next: Option>, } #[derive(Copy, Clone, PartialEq, Eq, Debug)] @@ -144,7 +144,7 @@ impl<'cx, 'tcx> Iterator for Prefixes<'cx, 'tcx> { // reference. let ty = Place::ty_from(cursor.base, proj_base, self.body, self.tcx).ty; - match ty.sty { + match ty.kind { ty::RawPtr(_) | ty::Ref( _, /*rgn*/ diff --git a/src/librustc_mir/borrow_check/used_muts.rs b/src/librustc_mir/borrow_check/used_muts.rs index 695080dfe2..95471afb78 100644 --- a/src/librustc_mir/borrow_check/used_muts.rs +++ b/src/librustc_mir/borrow_check/used_muts.rs @@ -1,7 +1,5 @@ use rustc::mir::visit::{PlaceContext, Visitor}; -use rustc::mir::{ - Local, Location, Place, PlaceBase, Statement, StatementKind, TerminatorKind -}; +use rustc::mir::{Local, Location, Place, PlaceBase, Statement, StatementKind, TerminatorKind}; use rustc_data_structures::fx::FxHashSet; @@ -118,10 +116,7 @@ impl<'visit, 'cx, 'tcx> Visitor<'tcx> for GatherUsedMutsVisitor<'visit, 'cx, 'tc "assignment of {:?} to {:?}, adding {:?} to used mutable set", path.place, local, path.place ); - if let Place { - base: PlaceBase::Local(user_local), - projection: box [], - } = path.place { + if let Some(user_local) = path.place.as_local() { self.mbcx.used_mut.insert(user_local); } } diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 7ea08b15b4..7353ca9285 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -98,7 +98,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { initializer, lint_level } => { - let ignores_expr_result = if let PatternKind::Wild = *pattern.kind { + let ignores_expr_result = if let PatKind::Wild = *pattern.kind { true } else { false diff --git a/src/librustc_mir/build/expr/as_place.rs b/src/librustc_mir/build/expr/as_place.rs index 09b33c6654..8d2bef39be 100644 --- a/src/librustc_mir/build/expr/as_place.rs +++ b/src/librustc_mir/build/expr/as_place.rs @@ -6,13 +6,79 @@ use crate::build::{BlockAnd, BlockAndExtension, Builder}; use crate::hair::*; use rustc::mir::interpret::{PanicInfo::BoundsCheck}; use rustc::mir::*; -use rustc::ty::{CanonicalUserTypeAnnotation, Variance}; +use rustc::ty::{CanonicalUserTypeAnnotation, Ty, TyCtxt, Variance}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; + +/// `PlaceBuilder` is used to create places during MIR construction. It allows you to "build up" a +/// place by pushing more and more projections onto the end, and then convert the final set into a +/// place using the `into_place` method. +/// +/// This is used internally when building a place for an expression like `a.b.c`. The fields `b` +/// and `c` can be progressively pushed onto the place builder that is created when converting `a`. +#[derive(Clone)] +struct PlaceBuilder<'tcx> { + base: PlaceBase<'tcx>, + projection: Vec>, +} + +impl PlaceBuilder<'tcx> { + fn into_place(self, tcx: TyCtxt<'tcx>) -> Place<'tcx> { + Place { + base: self.base, + projection: tcx.intern_place_elems(&self.projection), + } + } + + fn field(self, f: Field, ty: Ty<'tcx>) -> Self { + self.project(PlaceElem::Field(f, ty)) + } + + fn deref(self) -> Self { + self.project(PlaceElem::Deref) + } + + fn index(self, index: Local) -> Self { + self.project(PlaceElem::Index(index)) + } + + fn project(mut self, elem: PlaceElem<'tcx>) -> Self { + self.projection.push(elem); + self + } +} + +impl From for PlaceBuilder<'tcx> { + fn from(local: Local) -> Self { + Self { + base: local.into(), + projection: Vec::new(), + } + } +} + +impl From> for PlaceBuilder<'tcx> { + fn from(base: PlaceBase<'tcx>) -> Self { + Self { + base, + projection: Vec::new(), + } + } +} impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr`, yielding a place that we can move from etc. - pub fn as_place(&mut self, block: BasicBlock, expr: M) -> BlockAnd> + pub fn as_place(&mut self, mut block: BasicBlock, expr: M) -> BlockAnd> + where + M: Mirror<'tcx, Output = Expr<'tcx>>, + { + let place_builder = unpack!(block = self.as_place_builder(block, expr)); + block.and(place_builder.into_place(self.hir.tcx())) + } + + /// This is used when constructing a compound `Place`, so that we can avoid creating + /// intermediate `Place` values until we know the full set of projections. + fn as_place_builder(&mut self, block: BasicBlock, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>>, { @@ -25,7 +91,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// place. The place itself may or may not be mutable: /// * If this expr is a place expr like a.b, then we will return that place. /// * Otherwise, a temporary is created: in that event, it will be an immutable temporary. - pub fn as_read_only_place(&mut self, block: BasicBlock, expr: M) -> BlockAnd> + pub fn as_read_only_place(&mut self, mut block: BasicBlock, expr: M) -> BlockAnd> + where + M: Mirror<'tcx, Output = Expr<'tcx>>, + { + let place_builder = unpack!(block = self.as_read_only_place_builder(block, expr)); + block.and(place_builder.into_place(self.hir.tcx())) + } + + /// This is used when constructing a compound `Place`, so that we can avoid creating + /// intermediate `Place` values until we know the full set of projections. + /// Mutability note: The caller of this method promises only to read from the resulting + /// place. The place itself may or may not be mutable: + /// * If this expr is a place expr like a.b, then we will return that place. + /// * Otherwise, a temporary is created: in that event, it will be an immutable temporary. + fn as_read_only_place_builder( + &mut self, + block: BasicBlock, + expr: M, + ) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>>, { @@ -38,7 +122,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { mut block: BasicBlock, expr: Expr<'tcx>, mutability: Mutability, - ) -> BlockAnd> { + ) -> BlockAnd> { debug!( "expr_as_place(block={:?}, expr={:?}, mutability={:?})", block, expr, mutability @@ -54,25 +138,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { value, } => this.in_scope((region_scope, source_info), lint_level, |this| { if mutability == Mutability::Not { - this.as_read_only_place(block, value) + this.as_read_only_place_builder(block, value) } else { - this.as_place(block, value) + this.as_place_builder(block, value) } }), ExprKind::Field { lhs, name } => { - let place = unpack!(block = this.as_place(block, lhs)); - let place = place.field(name, expr.ty); - block.and(place) + let place_builder = unpack!(block = this.as_place_builder(block, lhs)); + block.and(place_builder.field(name, expr.ty)) } ExprKind::Deref { arg } => { - let place = unpack!(block = this.as_place(block, arg)); - let place = place.deref(); - block.and(place) + let place_builder = unpack!(block = this.as_place_builder(block, arg)); + block.and(place_builder.deref()) } ExprKind::Index { lhs, index } => { let (usize_ty, bool_ty) = (this.hir.usize_ty(), this.hir.bool_ty()); - let slice = unpack!(block = this.as_place(block, lhs)); + let place_builder = unpack!(block = this.as_place_builder(block, lhs)); // Making this a *fresh* temporary also means we do not have to worry about // the index changing later: Nothing will ever change this temporary. // The "retagging" transformation (for Stacked Borrows) relies on this. @@ -83,6 +165,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Mutability::Not, )); + let slice = place_builder.clone().into_place(this.hir.tcx()); // bounds check: let (len, lt) = ( this.temp(usize_ty.clone(), expr_span), @@ -92,7 +175,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { block, source_info, // len = len(slice) &len, - Rvalue::Len(slice.clone()), + Rvalue::Len(slice), ); this.cfg.push_assign( block, @@ -110,30 +193,29 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { index: Operand::Copy(Place::from(idx)), }; let success = this.assert(block, Operand::Move(lt), true, msg, expr_span); - success.and(slice.index(idx)) + success.and(place_builder.index(idx)) } - ExprKind::SelfRef => block.and(Place::from(Local::new(1))), + ExprKind::SelfRef => block.and(PlaceBuilder::from(Local::new(1))), ExprKind::VarRef { id } => { - let place = if this.is_bound_var_in_guard(id) { + let place_builder = if this.is_bound_var_in_guard(id) { let index = this.var_local_id(id, RefWithinGuard); - Place::from(index).deref() + PlaceBuilder::from(index).deref() } else { let index = this.var_local_id(id, OutsideGuard); - Place::from(index) + PlaceBuilder::from(index) }; - block.and(place) + block.and(place_builder) } - ExprKind::StaticRef { id } => block.and(Place { - base: PlaceBase::Static(Box::new(Static { + ExprKind::StaticRef { id } => block.and(PlaceBuilder::from( + PlaceBase::Static(Box::new(Static { ty: expr.ty, kind: StaticKind::Static, def_id: id, - })), - projection: box [], - }), + })) + )), ExprKind::PlaceTypeAscription { source, user_ty } => { - let place = unpack!(block = this.as_place(block, source)); + let place_builder = unpack!(block = this.as_place_builder(block, source)); if let Some(user_ty) = user_ty { let annotation_index = this.canonical_user_type_annotations.push( CanonicalUserTypeAnnotation { @@ -142,13 +224,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { inferred_ty: expr.ty, } ); + + let place = place_builder.clone().into_place(this.hir.tcx()); this.cfg.push( block, Statement { source_info, kind: StatementKind::AscribeUserType( box( - place.clone(), + place, UserTypeProjection { base: annotation_index, projs: vec![], } ), Variance::Invariant, @@ -156,7 +240,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); } - block.and(place) + block.and(place_builder) } ExprKind::ValueTypeAscription { source, user_ty } => { let source = this.hir.mirror(source); @@ -185,7 +269,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); } - block.and(Place::from(temp)) + block.and(PlaceBuilder::from(temp)) } ExprKind::Array { .. } @@ -221,7 +305,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }); let temp = unpack!(block = this.as_temp(block, expr.temp_lifetime, expr, mutability)); - block.and(Place::from(temp)) + block.and(PlaceBuilder::from(temp)) } } } diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index 7dfe98cbeb..4f1ac8e51d 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -1,7 +1,7 @@ //! See docs in `build/expr/mod.rs`. use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use crate::build::expr::category::{Category, RvalueFunc}; use crate::build::{BlockAnd, BlockAndExtension, Builder}; @@ -128,7 +128,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr_span, scope, result, - expr.ty, ); } @@ -140,7 +139,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // initialize the box contents: unpack!( block = this.into( - &Place::from(result).deref(), + &this.hir.tcx().mk_place_deref(Place::from(result)), block, value ) ); @@ -297,8 +296,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .zip(field_types.into_iter()) .map(|(n, ty)| match fields_map.get(&n) { Some(v) => v.clone(), - None => this.consume_by_copy_or_move(base.clone().field(n, ty)), - }).collect() + None => this.consume_by_copy_or_move(this.hir.tcx().mk_place_field( + base.clone(), + n, + ty, + )), + }) + .collect() } else { field_names .iter() @@ -398,8 +402,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let val_fld = Field::new(0); let of_fld = Field::new(1); - let val = result_value.clone().field(val_fld, ty); - let of = result_value.field(of_fld, bool_ty); + let tcx = self.hir.tcx(); + let val = tcx.mk_place_field(result_value.clone(), val_fld, ty); + let of = tcx.mk_place_field(result_value, of_fld, bool_ty); let err = PanicInfo::Overflow(op); @@ -497,14 +502,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let arg_place = unpack!(block = this.as_place(block, arg)); - let mutability = match arg_place { - Place { - base: PlaceBase::Local(local), - projection: box [], + let mutability = match arg_place.as_ref() { + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[], } => this.local_decls[local].mutability, - Place { - base: PlaceBase::Local(local), - projection: box [ProjectionElem::Deref], + PlaceRef { + base: &PlaceBase::Local(local), + projection: &[ProjectionElem::Deref], } => { debug_assert!( this.local_decls[local].is_ref_for_guard(), @@ -512,13 +517,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); this.local_decls[local].mutability } - Place { + PlaceRef { ref base, - projection: box [ref proj_base @ .., ProjectionElem::Field(upvar_index, _)], + projection: &[ref proj_base @ .., ProjectionElem::Field(upvar_index, _)], } - | Place { + | PlaceRef { ref base, - projection: box [ + projection: &[ ref proj_base @ .., ProjectionElem::Field(upvar_index, _), ProjectionElem::Deref @@ -569,7 +574,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { upvar_span, temp_lifetime, temp, - upvar_ty, ); } diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index dbcc330eca..18332ed68f 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -103,7 +103,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr_span, temp_lifetime, temp, - expr_ty, DropKind::Storage, ); } @@ -117,7 +116,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr_span, temp_lifetime, temp, - expr_ty, DropKind::Value, ); } diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index 30d53502b1..e7388b9205 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -196,7 +196,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { exit_block.unit() } ExprKind::Call { ty, fun, args, from_hir_call } => { - let intrinsic = match ty.sty { + let intrinsic = match ty.kind { ty::FnDef(def_id, _) => { let f = ty.fn_sig(this.hir.tcx()); if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic { @@ -235,7 +235,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }); let ptr_temp = Place::from(ptr_temp); let block = unpack!(this.into(&ptr_temp, block, ptr)); - this.into(&ptr_temp.deref(), block, val) + this.into(&this.hir.tcx().mk_place_deref(ptr_temp), block, val) } else { let args: Vec<_> = args .into_iter() diff --git a/src/librustc_mir/build/expr/stmt.rs b/src/librustc_mir/build/expr/stmt.rs index cf3d8778da..0cd32acdb6 100644 --- a/src/librustc_mir/build/expr/stmt.rs +++ b/src/librustc_mir/build/expr/stmt.rs @@ -159,7 +159,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { if let ExprKind::Block { body } = expr.kind { if let Some(tail_expr) = &body.expr { let mut expr = tail_expr; - while let rustc::hir::ExprKind::Block(subblock, _label) = &expr.node { + while let rustc::hir::ExprKind::Block(subblock, _label) = &expr.kind { if let Some(subtail_expr) = &subblock.expr { expr = subtail_expr } else { diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 2b0237c7c0..667b37bbd8 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -15,7 +15,7 @@ use rustc::mir::*; use rustc::middle::region; use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty}; use rustc::ty::layout::VariantIdx; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use syntax::ast::Name; use syntax_pos::Span; @@ -142,7 +142,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Step 2. Create the otherwise and prebinding blocks. // create binding start block for link them by false edges - let candidate_count = arms.iter().map(|c| c.patterns.len()).sum::(); + let candidate_count = arms.iter().map(|c| c.top_pats_hack().len()).sum::(); let pre_binding_blocks: Vec<_> = (0..candidate_count) .map(|_| self.cfg.start_new_block()) .collect(); @@ -159,7 +159,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .map(|arm| { let arm_has_guard = arm.guard.is_some(); match_has_guard |= arm_has_guard; - let arm_candidates: Vec<_> = arm.patterns + let arm_candidates: Vec<_> = arm.top_pats_hack() .iter() .zip(candidate_pre_binding_blocks.by_ref()) .map( @@ -238,7 +238,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let scope = this.declare_bindings( None, arm.span, - &arm.patterns[0], + &arm.top_pats_hack()[0], ArmHasGuard(arm.guard.is_some()), Some((Some(&scrutinee_place), scrutinee_span)), ); @@ -298,12 +298,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub(super) fn expr_into_pattern( &mut self, mut block: BasicBlock, - irrefutable_pat: Pattern<'tcx>, + irrefutable_pat: Pat<'tcx>, initializer: ExprRef<'tcx>, ) -> BlockAnd<()> { match *irrefutable_pat.kind { // Optimize the case of `let x = ...` to write directly into `x` - PatternKind::Binding { + PatKind::Binding { mode: BindingMode::ByValue, var, subpattern: None, @@ -336,9 +336,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // test works with uninitialized values in a rather // dubious way, so it may be that the test is kind of // broken. - PatternKind::AscribeUserType { - subpattern: Pattern { - kind: box PatternKind::Binding { + PatKind::AscribeUserType { + subpattern: Pat { + kind: box PatKind::Binding { mode: BindingMode::ByValue, var, subpattern: None, @@ -414,7 +414,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub fn place_into_pattern( &mut self, block: BasicBlock, - irrefutable_pat: Pattern<'tcx>, + irrefutable_pat: Pat<'tcx>, initializer: &Place<'tcx>, set_match_place: bool, ) -> BlockAnd<()> { @@ -486,7 +486,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, mut visibility_scope: Option, scope_span: Span, - pattern: &Pattern<'tcx>, + pattern: &Pat<'tcx>, has_guard: ArmHasGuard, opt_match_place: Option<(Option<&Place<'tcx>>, Span)>, ) -> Option { @@ -535,28 +535,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { kind: StatementKind::StorageLive(local_id), }, ); - let var_ty = self.local_decls[local_id].ty; let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); - self.schedule_drop(span, region_scope, local_id, var_ty, DropKind::Storage); + self.schedule_drop(span, region_scope, local_id, DropKind::Storage); Place::from(local_id) } pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) { let local_id = self.var_local_id(var, for_guard); - let var_ty = self.local_decls[local_id].ty; let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); self.schedule_drop( span, region_scope, local_id, - var_ty, DropKind::Value, ); } pub(super) fn visit_bindings( &mut self, - pattern: &Pattern<'tcx>, + pattern: &Pat<'tcx>, pattern_user_ty: UserTypeProjections, f: &mut impl FnMut( &mut Self, @@ -571,7 +568,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) { debug!("visit_bindings: pattern={:?} pattern_user_ty={:?}", pattern, pattern_user_ty); match *pattern.kind { - PatternKind::Binding { + PatKind::Binding { mutability, name, mode, @@ -586,12 +583,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Array { + PatKind::Array { ref prefix, ref slice, ref suffix, } - | PatternKind::Slice { + | PatKind::Slice { ref prefix, ref slice, ref suffix, @@ -609,13 +606,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {} + PatKind::Constant { .. } | PatKind::Range { .. } | PatKind::Wild => {} - PatternKind::Deref { ref subpattern } => { + PatKind::Deref { ref subpattern } => { self.visit_bindings(subpattern, pattern_user_ty.deref(), f); } - PatternKind::AscribeUserType { + PatKind::AscribeUserType { ref subpattern, ascription: hair::pattern::Ascription { ref user_ty, @@ -644,7 +641,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.visit_bindings(subpattern, subpattern_user_ty, f) } - PatternKind::Leaf { ref subpatterns } => { + PatKind::Leaf { ref subpatterns } => { for subpattern in subpatterns { let subpattern_user_ty = pattern_user_ty.clone().leaf(subpattern.field); debug!("visit_bindings: subpattern_user_ty={:?}", subpattern_user_ty); @@ -652,14 +649,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Variant { adt_def, substs: _, variant_index, ref subpatterns } => { + PatKind::Variant { adt_def, substs: _, variant_index, ref subpatterns } => { for subpattern in subpatterns { let subpattern_user_ty = pattern_user_ty.clone().variant( adt_def, variant_index, subpattern.field); self.visit_bindings(&subpattern.pattern, subpattern_user_ty, f); } } - PatternKind::Or { ref pats } => { + PatKind::Or { ref pats } => { for pat in pats { self.visit_bindings(&pat, pattern_user_ty.clone(), f); } @@ -708,7 +705,7 @@ struct Binding<'tcx> { struct Ascription<'tcx> { span: Span, source: Place<'tcx>, - user_ty: PatternTypeProjection<'tcx>, + user_ty: PatTyProj<'tcx>, variance: ty::Variance, } @@ -718,7 +715,7 @@ pub struct MatchPair<'pat, 'tcx> { place: Place<'tcx>, // ... must match this pattern. - pattern: &'pat Pattern<'tcx>, + pattern: &'pat Pat<'tcx>, } #[derive(Clone, Debug, PartialEq)] @@ -760,7 +757,7 @@ enum TestKind<'tcx> { }, /// Test whether the value falls within an inclusive or exclusive range - Range(PatternRange<'tcx>), + Range(PatRange<'tcx>), /// Test length of the slice is equal to len Len { @@ -951,7 +948,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fake_borrows.insert(Place { base: source.base.clone(), - projection: proj_base.to_vec().into_boxed_slice(), + projection: self.hir.tcx().intern_place_elems(proj_base), }); } } @@ -1296,7 +1293,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Insert a Shallow borrow of the prefixes of any fake borrows. for place in fake_borrows { - let mut cursor = &*place.projection; + let mut cursor = place.projection.as_ref(); while let [proj_base @ .., elem] = cursor { cursor = proj_base; @@ -1339,7 +1336,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } /////////////////////////////////////////////////////////////////////////// -// Pattern binding - used for `let` and function parameters as well. +// Pat binding - used for `let` and function parameters as well. impl<'a, 'tcx> Builder<'a, 'tcx> { /// Initializes each of the bindings from the candidate by @@ -1491,7 +1488,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { BorrowKind::Shallow, Place { base: place.base.clone(), - projection: place.projection.to_vec().into_boxed_slice(), + projection: tcx.intern_place_elems(place.projection), }, ); self.cfg.push_assign( diff --git a/src/librustc_mir/build/matches/simplify.rs b/src/librustc_mir/build/matches/simplify.rs index 8d049b5398..3e71b87180 100644 --- a/src/librustc_mir/build/matches/simplify.rs +++ b/src/librustc_mir/build/matches/simplify.rs @@ -57,7 +57,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { -> Result<(), MatchPair<'pat, 'tcx>> { let tcx = self.hir.tcx(); match *match_pair.pattern.kind { - PatternKind::AscribeUserType { + PatKind::AscribeUserType { ref subpattern, ascription: hair::pattern::Ascription { variance, @@ -79,12 +79,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Ok(()) } - PatternKind::Wild => { + PatKind::Wild => { // nothing left to do Ok(()) } - PatternKind::Binding { name, mutability, mode, var, ty, ref subpattern } => { + PatKind::Binding { name, mutability, mode, var, ty, ref subpattern } => { candidate.bindings.push(Binding { name, mutability, @@ -103,13 +103,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Ok(()) } - PatternKind::Constant { .. } => { + PatKind::Constant { .. } => { // FIXME normalize patterns when possible Err(match_pair) } - PatternKind::Range(PatternRange { lo, hi, end }) => { - let (range, bias) = match lo.ty.sty { + PatKind::Range(PatRange { lo, hi, end }) => { + let (range, bias) = match lo.ty.kind { ty::Char => { (Some(('\u{0000}' as u128, '\u{10FFFF}' as u128, Size::from_bits(32))), 0) } @@ -144,7 +144,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Err(match_pair) } - PatternKind::Slice { ref prefix, ref slice, ref suffix } => { + PatKind::Slice { ref prefix, ref slice, ref suffix } => { if prefix.is_empty() && slice.is_some() && suffix.is_empty() { // irrefutable self.prefix_slice_suffix(&mut candidate.match_pairs, @@ -158,15 +158,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Variant { adt_def, substs, variant_index, ref subpatterns } => { + PatKind::Variant { adt_def, substs, variant_index, ref subpatterns } => { let irrefutable = adt_def.variants.iter_enumerated().all(|(i, v)| { i == variant_index || { self.hir.tcx().features().exhaustive_patterns && !v.uninhabited_from(self.hir.tcx(), substs, adt_def.adt_kind()).is_empty() } - }); + }) && (adt_def.did.is_local() || !adt_def.is_variant_list_non_exhaustive()); if irrefutable { - let place = match_pair.place.downcast(adt_def, variant_index); + let place = tcx.mk_place_downcast(match_pair.place, adt_def, variant_index); candidate.match_pairs.extend(self.field_match_pairs(place, subpatterns)); Ok(()) } else { @@ -174,7 +174,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Array { ref prefix, ref slice, ref suffix } => { + PatKind::Array { ref prefix, ref slice, ref suffix } => { self.prefix_slice_suffix(&mut candidate.match_pairs, &match_pair.place, prefix, @@ -183,20 +183,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Ok(()) } - PatternKind::Leaf { ref subpatterns } => { + PatKind::Leaf { ref subpatterns } => { // tuple struct, match subpats (if any) candidate.match_pairs .extend(self.field_match_pairs(match_pair.place, subpatterns)); Ok(()) } - PatternKind::Deref { ref subpattern } => { - let place = match_pair.place.deref(); + PatKind::Deref { ref subpattern } => { + let place = tcx.mk_place_deref(match_pair.place); candidate.match_pairs.push(MatchPair::new(place, subpattern)); Ok(()) } - PatternKind::Or { .. } => { + PatKind::Or { .. } => { Err(match_pair) } } diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index d5890d00ea..5c2f72c0a0 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -9,9 +9,9 @@ use crate::build::Builder; use crate::build::matches::{Candidate, MatchPair, Test, TestKind}; use crate::hair::*; use crate::hair::pattern::compare_const_vals; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use rustc_data_structures::fx::FxHashMap; -use rustc::ty::{self, Ty, adjustment::{PointerCast}}; +use rustc::ty::{self, Ty, adjustment::PointerCast}; use rustc::ty::util::IntTypeExt; use rustc::ty::layout::VariantIdx; use rustc::mir::*; @@ -26,7 +26,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// It is a bug to call this with a simplifiable pattern. pub fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> { match *match_pair.pattern.kind { - PatternKind::Variant { ref adt_def, substs: _, variant_index: _, subpatterns: _ } => { + PatKind::Variant { ref adt_def, substs: _, variant_index: _, subpatterns: _ } => { Test { span: match_pair.pattern.span, kind: TestKind::Switch { @@ -36,7 +36,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Constant { .. } if is_switch_ty(match_pair.pattern.ty) => { + PatKind::Constant { .. } if is_switch_ty(match_pair.pattern.ty) => { // For integers, we use a `SwitchInt` match, which allows // us to handle more cases. Test { @@ -52,7 +52,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Constant { value } => { + PatKind::Constant { value } => { Test { span: match_pair.pattern.span, kind: TestKind::Eq { @@ -62,7 +62,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Range(range) => { + PatKind::Range(range) => { assert_eq!(range.lo.ty, match_pair.pattern.ty); assert_eq!(range.hi.ty, match_pair.pattern.ty); Test { @@ -71,7 +71,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::Slice { ref prefix, ref slice, ref suffix } => { + PatKind::Slice { ref prefix, ref slice, ref suffix } => { let len = prefix.len() + suffix.len(); let op = if slice.is_some() { BinOp::Ge @@ -84,13 +84,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - PatternKind::AscribeUserType { .. } | - PatternKind::Array { .. } | - PatternKind::Wild | - PatternKind::Or { .. } | - PatternKind::Binding { .. } | - PatternKind::Leaf { .. } | - PatternKind::Deref { .. } => { + PatKind::AscribeUserType { .. } | + PatKind::Array { .. } | + PatKind::Wild | + PatKind::Or { .. } | + PatKind::Binding { .. } | + PatKind::Leaf { .. } | + PatKind::Deref { .. } => { self.error_simplifyable(match_pair) } } @@ -110,7 +110,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; match *match_pair.pattern.kind { - PatternKind::Constant { value } => { + PatKind::Constant { value } => { indices.entry(value) .or_insert_with(|| { options.push(value.eval_bits( @@ -120,22 +120,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }); true } - PatternKind::Variant { .. } => { + PatKind::Variant { .. } => { panic!("you should have called add_variants_to_switch instead!"); } - PatternKind::Range(range) => { + PatKind::Range(range) => { // Check that none of the switch values are in the range. self.values_not_contained_in_range(range, indices) .unwrap_or(false) } - PatternKind::Slice { .. } | - PatternKind::Array { .. } | - PatternKind::Wild | - PatternKind::Or { .. } | - PatternKind::Binding { .. } | - PatternKind::AscribeUserType { .. } | - PatternKind::Leaf { .. } | - PatternKind::Deref { .. } => { + PatKind::Slice { .. } | + PatKind::Array { .. } | + PatKind::Wild | + PatKind::Or { .. } | + PatKind::Binding { .. } | + PatKind::AscribeUserType { .. } | + PatKind::Leaf { .. } | + PatKind::Deref { .. } => { // don't know how to add these patterns to a switch false } @@ -154,7 +154,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; match *match_pair.pattern.kind { - PatternKind::Variant { adt_def: _ , variant_index, .. } => { + PatKind::Variant { adt_def: _ , variant_index, .. } => { // We have a pattern testing for variant `variant_index` // set the corresponding index to true variants.insert(variant_index); @@ -229,7 +229,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TestKind::SwitchInt { switch_ty, ref options, indices: _ } => { let target_blocks = make_target_blocks(self); - let terminator = if switch_ty.sty == ty::Bool { + let terminator = if switch_ty.kind == ty::Bool { assert!(options.len() > 0 && options.len() <= 2); if let [first_bb, second_bb] = *target_blocks { let (true_bb, false_bb) = match options[0] { @@ -283,7 +283,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - TestKind::Range(PatternRange { ref lo, ref hi, ref end }) => { + TestKind::Range(PatRange { ref lo, ref hi, ref end }) => { let lower_bound_success = self.cfg.start_new_block(); let target_blocks = make_target_blocks(self); @@ -400,8 +400,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // We want to do this even when the scrutinee is a reference to an // array, so we can call `<[u8]>::eq` rather than having to find an // `<[u8; N]>::eq`. - let unsize = |ty: Ty<'tcx>| match ty.sty { - ty::Ref(region, rty, _) => match rty.sty { + let unsize = |ty: Ty<'tcx>| match ty.kind { + ty::Ref(region, rty, _) => match rty.kind { ty::Array(inner_ty, n) => Some((region, inner_ty, n)), _ => None, }, @@ -438,7 +438,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, } - let deref_ty = match ty.sty { + let deref_ty = match ty.kind { ty::Ref(_, deref_ty, _) => deref_ty, _ => bug!("non_scalar_compare called on non-reference type: {}", ty), }; @@ -533,7 +533,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // If we are performing a variant switch, then this // informs variant patterns, but nothing else. (&TestKind::Switch { adt_def: tested_adt_def, .. }, - &PatternKind::Variant { adt_def, variant_index, ref subpatterns, .. }) => { + &PatKind::Variant { adt_def, variant_index, ref subpatterns, .. }) => { assert_eq!(adt_def, tested_adt_def); self.candidate_after_variant_switch(match_pair_index, adt_def, @@ -548,10 +548,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // If we are performing a switch over integers, then this informs integer // equality, but nothing else. // - // FIXME(#29623) we could use PatternKind::Range to rule + // FIXME(#29623) we could use PatKind::Range to rule // things out here, in some cases. (&TestKind::SwitchInt { switch_ty: _, options: _, ref indices }, - &PatternKind::Constant { ref value }) + &PatKind::Constant { ref value }) if is_switch_ty(match_pair.pattern.ty) => { let index = indices[value]; self.candidate_without_match_pair(match_pair_index, candidate); @@ -559,7 +559,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } (&TestKind::SwitchInt { switch_ty: _, ref options, ref indices }, - &PatternKind::Range(range)) => { + &PatKind::Range(range)) => { let not_contained = self .values_not_contained_in_range(range, indices) .unwrap_or(false); @@ -577,7 +577,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { (&TestKind::SwitchInt { .. }, _) => None, (&TestKind::Len { len: test_len, op: BinOp::Eq }, - &PatternKind::Slice { ref prefix, ref slice, ref suffix }) => { + &PatKind::Slice { ref prefix, ref slice, ref suffix }) => { let pat_len = (prefix.len() + suffix.len()) as u64; match (test_len.cmp(&pat_len), slice) { (Ordering::Equal, &None) => { @@ -610,7 +610,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } (&TestKind::Len { len: test_len, op: BinOp::Ge }, - &PatternKind::Slice { ref prefix, ref slice, ref suffix }) => { + &PatKind::Slice { ref prefix, ref slice, ref suffix }) => { // the test is `$actual_len >= test_len` let pat_len = (prefix.len() + suffix.len()) as u64; match (test_len.cmp(&pat_len), slice) { @@ -644,7 +644,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } (&TestKind::Range(test), - &PatternKind::Range(pat)) => { + &PatKind::Range(pat)) => { if test == pat { self.candidate_without_match_pair( match_pair_index, @@ -683,7 +683,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - (&TestKind::Range(range), &PatternKind::Constant { value }) => { + (&TestKind::Range(range), &PatKind::Constant { value }) => { if self.const_range_contains(range, value) == Some(false) { // `value` is not contained in the testing range, // so `value` can be matched only if this test fails. @@ -722,9 +722,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn candidate_after_slice_test<'pat>(&mut self, match_pair_index: usize, candidate: &mut Candidate<'pat, 'tcx>, - prefix: &'pat [Pattern<'tcx>], - opt_slice: Option<&'pat Pattern<'tcx>>, - suffix: &'pat [Pattern<'tcx>]) { + prefix: &'pat [Pat<'tcx>], + opt_slice: Option<&'pat Pat<'tcx>>, + suffix: &'pat [Pat<'tcx>]) { let removed_place = candidate.match_pairs.remove(match_pair_index).place; self.prefix_slice_suffix( &mut candidate.match_pairs, @@ -739,26 +739,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { match_pair_index: usize, adt_def: &'tcx ty::AdtDef, variant_index: VariantIdx, - subpatterns: &'pat [FieldPattern<'tcx>], + subpatterns: &'pat [FieldPat<'tcx>], candidate: &mut Candidate<'pat, 'tcx>, ) { let match_pair = candidate.match_pairs.remove(match_pair_index); + let tcx = self.hir.tcx(); // So, if we have a match-pattern like `x @ Enum::Variant(P1, P2)`, // we want to create a set of derived match-patterns like // `(x as Variant).0 @ P1` and `(x as Variant).1 @ P1`. let elem = ProjectionElem::Downcast( Some(adt_def.variants[variant_index].ident.name), variant_index); - let downcast_place = match_pair.place.elem(elem); // `(x as Variant)` - let consequent_match_pairs = - subpatterns.iter() - .map(|subpattern| { - // e.g., `(x as Variant).0` - let place = downcast_place.clone().field(subpattern.field, - subpattern.pattern.ty); - // e.g., `(x as Variant).0 @ P1` - MatchPair::new(place, &subpattern.pattern) - }); + let downcast_place = tcx.mk_place_elem(match_pair.place, elem); // `(x as Variant)` + let consequent_match_pairs = subpatterns.iter().map(|subpattern| { + // e.g., `(x as Variant).0` + let place = + tcx.mk_place_field(downcast_place.clone(), subpattern.field, subpattern.pattern.ty); + // e.g., `(x as Variant).0 @ P1` + MatchPair::new(place, &subpattern.pattern) + }); candidate.match_pairs.extend(consequent_match_pairs); } @@ -771,7 +770,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn const_range_contains( &self, - range: PatternRange<'tcx>, + range: PatRange<'tcx>, value: &'tcx ty::Const<'tcx>, ) -> Option { use std::cmp::Ordering::*; @@ -790,7 +789,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn values_not_contained_in_range( &self, - range: PatternRange<'tcx>, + range: PatRange<'tcx>, indices: &FxHashMap<&'tcx ty::Const<'tcx>, usize>, ) -> Option { for &val in indices.keys() { diff --git a/src/librustc_mir/build/matches/util.rs b/src/librustc_mir/build/matches/util.rs index 011b3a8688..917535f31d 100644 --- a/src/librustc_mir/build/matches/util.rs +++ b/src/librustc_mir/build/matches/util.rs @@ -6,27 +6,33 @@ use std::u32; use std::convert::TryInto; impl<'a, 'tcx> Builder<'a, 'tcx> { - pub fn field_match_pairs<'pat>(&mut self, - place: Place<'tcx>, - subpatterns: &'pat [FieldPattern<'tcx>]) - -> Vec> { - subpatterns.iter() - .map(|fieldpat| { - let place = place.clone().field(fieldpat.field, - fieldpat.pattern.ty); - MatchPair::new(place, &fieldpat.pattern) - }) - .collect() + pub fn field_match_pairs<'pat>( + &mut self, + place: Place<'tcx>, + subpatterns: &'pat [FieldPat<'tcx>], + ) -> Vec> { + subpatterns + .iter() + .map(|fieldpat| { + let place = self.hir.tcx().mk_place_field( + place.clone(), + fieldpat.field, + fieldpat.pattern.ty, + ); + MatchPair::new(place, &fieldpat.pattern) + }) + .collect() } pub fn prefix_slice_suffix<'pat>(&mut self, match_pairs: &mut Vec>, place: &Place<'tcx>, - prefix: &'pat [Pattern<'tcx>], - opt_slice: Option<&'pat Pattern<'tcx>>, - suffix: &'pat [Pattern<'tcx>]) { + prefix: &'pat [Pat<'tcx>], + opt_slice: Option<&'pat Pat<'tcx>>, + suffix: &'pat [Pat<'tcx>]) { let min_length = prefix.len() + suffix.len(); let min_length = min_length.try_into().unwrap(); + let tcx = self.hir.tcx(); match_pairs.extend( prefix.iter() @@ -37,13 +43,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { min_length, from_end: false, }; - let place = place.clone().elem(elem); + let place = tcx.mk_place_elem(place.clone(), elem); MatchPair::new(place, subpattern) }) ); if let Some(subslice_pat) = opt_slice { - let subslice = place.clone().elem(ProjectionElem::Subslice { + let subslice = tcx.mk_place_elem(place.clone(),ProjectionElem::Subslice { from: prefix.len() as u32, to: suffix.len() as u32 }); @@ -60,7 +66,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { min_length, from_end: true, }; - let place = place.clone().elem(elem); + let place = tcx.mk_place_elem(place.clone(), elem); MatchPair::new(place, subpattern) }) ); @@ -101,7 +107,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } impl<'pat, 'tcx> MatchPair<'pat, 'tcx> { - pub fn new(place: Place<'tcx>, pattern: &'pat Pattern<'tcx>) -> MatchPair<'pat, 'tcx> { + pub fn new(place: Place<'tcx>, pattern: &'pat Pat<'tcx>) -> MatchPair<'pat, 'tcx> { MatchPair { place, pattern, diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 647d7515fe..ffb70180bb 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -1,18 +1,20 @@ use crate::build; use crate::build::scope::DropKind; use crate::hair::cx::Cx; -use crate::hair::{LintLevel, BindingMode, PatternKind}; +use crate::hair::{LintLevel, BindingMode, PatKind}; use crate::transform::MirSource; use crate::util as mir_util; use rustc::hir; use rustc::hir::Node; use rustc::hir::def_id::DefId; +use rustc::middle::lang_items; use rustc::middle::region; use rustc::mir::*; use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::subst::Subst; use rustc::util::nodemap::HirIdMap; use rustc_target::spec::PanicStrategy; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use std::u32; use rustc_target::spec::abi::Abi; use syntax::attr::{self, UnwindAttr}; @@ -27,17 +29,17 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> Body<'_> { // Figure out what primary body this item has. let (body_id, return_ty_span) = match tcx.hir().get(id) { - Node::Expr(hir::Expr { node: hir::ExprKind::Closure(_, decl, body_id, _, _), .. }) - | Node::Item(hir::Item { node: hir::ItemKind::Fn(decl, _, _, body_id), .. }) + Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(_, decl, body_id, _, _), .. }) + | Node::Item(hir::Item { kind: hir::ItemKind::Fn(decl, _, _, body_id), .. }) | Node::ImplItem( hir::ImplItem { - node: hir::ImplItemKind::Method(hir::MethodSig { decl, .. }, body_id), + kind: hir::ImplItemKind::Method(hir::MethodSig { decl, .. }, body_id), .. } ) | Node::TraitItem( hir::TraitItem { - node: hir::TraitItemKind::Method( + kind: hir::TraitItemKind::Method( hir::MethodSig { decl, .. }, hir::TraitMethod::Provided(body_id), ), @@ -46,11 +48,11 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> Body<'_> { ) => { (*body_id, decl.output.span()) } - Node::Item(hir::Item { node: hir::ItemKind::Static(ty, _, body_id), .. }) - | Node::Item(hir::Item { node: hir::ItemKind::Const(ty, body_id), .. }) - | Node::ImplItem(hir::ImplItem { node: hir::ImplItemKind::Const(ty, body_id), .. }) + Node::Item(hir::Item { kind: hir::ItemKind::Static(ty, _, body_id), .. }) + | Node::Item(hir::Item { kind: hir::ItemKind::Const(ty, body_id), .. }) + | Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Const(ty, body_id), .. }) | Node::TraitItem( - hir::TraitItem { node: hir::TraitItemKind::Const(ty, Some(body_id)), .. } + hir::TraitItem { kind: hir::TraitItemKind::Const(ty, Some(body_id)), .. } ) => { (*body_id, ty.span) } @@ -73,7 +75,7 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> Body<'_> { let ty = tcx.type_of(fn_def_id); let mut abi = fn_sig.abi; - let implicit_argument = match ty.sty { + let implicit_argument = match ty.kind { ty::Closure(..) => { // HACK(eddyb) Avoid having RustCall on closures, // as it adds unnecessary (and wrong) auto-tupling. @@ -102,9 +104,7 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> Body<'_> { let opt_ty_info; let self_arg; if let Some(ref fn_decl) = tcx.hir().fn_decl_by_hir_id(owner_id) { - let ty_hir_id = fn_decl.inputs[index].hir_id; - let ty_span = tcx.hir().span(ty_hir_id); - opt_ty_info = Some(ty_span); + opt_ty_info = fn_decl.inputs.get(index).map(|ty| ty.span); self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() { match fn_decl.implicit_self { hir::ImplicitSelfKind::Imm => Some(ImplicitSelfKind::Imm), @@ -121,15 +121,32 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> Body<'_> { self_arg = None; } - ArgInfo(fn_sig.inputs()[index], opt_ty_info, Some(&arg), self_arg) + // C-variadic fns also have a `VaList` input that's not listed in `fn_sig` + // (as it's created inside the body itself, not passed in from outside). + let ty = if fn_sig.c_variadic && index == fn_sig.inputs().len() { + let va_list_did = tcx.require_lang_item( + lang_items::VaListTypeLangItem, + Some(arg.span), + ); + let region = tcx.mk_region(ty::ReScope(region::Scope { + id: body.value.hir_id.local_id, + data: region::ScopeData::CallSite + })); + + tcx.type_of(va_list_did).subst(tcx, &[region.into()]) + } else { + fn_sig.inputs()[index] + }; + + ArgInfo(ty, opt_ty_info, Some(&arg), self_arg) }); let arguments = implicit_argument.into_iter().chain(explicit_arguments); let (yield_ty, return_ty) = if body.generator_kind.is_some() { - let gen_sig = match ty.sty { + let gen_sig = match ty.kind { ty::Generator(gen_def_id, gen_substs, ..) => - gen_substs.sig(gen_def_id, tcx), + gen_substs.as_generator().sig(gen_def_id, tcx), _ => span_bug!(tcx.hir().span(id), "generator w/o generator type: {:?}", ty), @@ -178,7 +195,7 @@ fn liberated_closure_env_ty( ) -> Ty<'_> { let closure_ty = tcx.body_tables(body_id).node_type(closure_expr_id); - let (closure_def_id, closure_substs) = match closure_ty.sty { + let (closure_def_id, closure_substs) = match closure_ty.kind { ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs), _ => bug!("closure expr does not have closure type: {:?}", closure_ty) }; @@ -438,7 +455,7 @@ struct CFG<'tcx> { basic_blocks: IndexVec>, } -newtype_index! { +rustc_index::newtype_index! { pub struct ScopeId { .. } } @@ -485,24 +502,21 @@ macro_rules! unpack { }; } -fn should_abort_on_panic(tcx: TyCtxt<'_>, fn_def_id: DefId, abi: Abi) -> bool { - // Not callable from C, so we can safely unwind through these - if abi == Abi::Rust || abi == Abi::RustCall { return false; } - - // Validate `#[unwind]` syntax regardless of platform-specific panic strategy +fn should_abort_on_panic(tcx: TyCtxt<'_>, fn_def_id: DefId, _abi: Abi) -> bool { + // Validate `#[unwind]` syntax regardless of platform-specific panic strategy. let attrs = &tcx.get_attrs(fn_def_id); let unwind_attr = attr::find_unwind_attr(Some(tcx.sess.diagnostic()), attrs); - // We never unwind, so it's not relevant to stop an unwind + // We never unwind, so it's not relevant to stop an unwind. if tcx.sess.panic_strategy() != PanicStrategy::Unwind { return false; } - // We cannot add landing pads, so don't add one + // We cannot add landing pads, so don't add one. if tcx.sess.no_landing_pads() { return false; } // This is a special case: some functions have a C abi but are meant to // unwind anyway. Don't stop them. match unwind_attr { - None => false, // FIXME(#58794) + None => false, // FIXME(#58794); should be `!(abi == Abi::Rust || abi == Abi::RustCall)` Some(UnwindAttr::Allowed) => false, Some(UnwindAttr::Aborts) => true, } @@ -559,7 +573,7 @@ where }; let mut mutability = Mutability::Not; if let Some(Node::Binding(pat)) = tcx_hir.find(var_hir_id) { - if let hir::PatKind::Binding(_, _, ident, _) = pat.node { + if let hir::PatKind::Binding(_, _, ident, _) = pat.kind { debuginfo.debug_name = ident.name; if let Some(&bm) = hir.tables.pat_binding_modes().get(pat.hir_id) { if bm == ty::BindByValue(hir::MutMutable) { @@ -812,12 +826,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Function arguments always get the first Local indices after the return place let local = Local::new(index + 1); let place = Place::from(local); - let &ArgInfo(ty, opt_ty_info, arg_opt, ref self_binding) = arg_info; + let &ArgInfo(_, opt_ty_info, arg_opt, ref self_binding) = arg_info; // Make sure we drop (parts of) the argument even when not matched on. self.schedule_drop( arg_opt.as_ref().map_or(ast_body.span, |arg| arg.pat.span), - argument_scope, local, ty, DropKind::Value, + argument_scope, local, DropKind::Value, ); if let Some(arg) = arg_opt { @@ -827,7 +841,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.set_correct_source_scope_for_arg(arg.hir_id, original_source_scope, span); match *pattern.kind { // Don't introduce extra copies for simple bindings - PatternKind::Binding { + PatKind::Binding { mutability, var, mode: BindingMode::ByValue, diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index a26ec72584..1b3d8641f2 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -85,7 +85,6 @@ should go to. use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; use crate::hair::{Expr, ExprRef, LintLevel}; use rustc::middle::region; -use rustc::ty::Ty; use rustc::hir; use rustc::mir::*; use syntax_pos::{DUMMY_SP, Span}; @@ -173,11 +172,11 @@ struct BreakableScope<'tcx> { region_scope: region::Scope, /// Where the body of the loop begins. `None` if block continue_block: Option, - /// Block to branch into when the loop or block terminates (either by being `break`-en out - /// from, or by having its condition to become false) + /// Block to branch into when the loop or block terminates (either by being + /// `break`-en out from, or by having its condition to become false) break_block: BasicBlock, - /// The destination of the loop/block expression itself (i.e., where to put the result of a - /// `break` expression) + /// The destination of the loop/block expression itself (i.e., where to put + /// the result of a `break` expression) break_destination: Place<'tcx>, } @@ -728,10 +727,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: Span, region_scope: region::Scope, local: Local, - place_ty: Ty<'tcx>, ) { - self.schedule_drop(span, region_scope, local, place_ty, DropKind::Storage); - self.schedule_drop(span, region_scope, local, place_ty, DropKind::Value); + self.schedule_drop(span, region_scope, local, DropKind::Storage); + self.schedule_drop(span, region_scope, local, DropKind::Value); } /// Indicates that `place` should be dropped on exit from @@ -744,12 +742,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: Span, region_scope: region::Scope, local: Local, - place_ty: Ty<'tcx>, drop_kind: DropKind, ) { - let needs_drop = self.hir.needs_drop(place_ty); - match drop_kind { - DropKind::Value => if !needs_drop { return }, + let needs_drop = match drop_kind { + DropKind::Value => { + if !self.hir.needs_drop(self.local_decls[local].ty) { return } + true + }, DropKind::Storage => { if local.index() <= self.arg_count { span_bug!( @@ -758,8 +757,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.arg_count, ) } + false } - } + }; for scope in self.scopes.iter_mut() { let this_scope = scope.region_scope == region_scope; @@ -926,46 +926,43 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // If constants and statics, we don't generate StorageLive for this // temporary, so don't try to generate StorageDead for it either. _ if self.local_scope().is_none() => (), - Operand::Copy(Place { - base: PlaceBase::Local(cond_temp), - projection: box [], - }) - | Operand::Move(Place { - base: PlaceBase::Local(cond_temp), - projection: box [], - }) => { - // Manually drop the condition on both branches. - let top_scope = self.scopes.scopes.last_mut().unwrap(); - let top_drop_data = top_scope.drops.pop().unwrap(); + Operand::Copy(place) + | Operand::Move(place) => { + if let Some(cond_temp) = place.as_local() { + // Manually drop the condition on both branches. + let top_scope = self.scopes.scopes.last_mut().unwrap(); + let top_drop_data = top_scope.drops.pop().unwrap(); - match top_drop_data.kind { - DropKind::Value { .. } => { - bug!("Drop scheduled on top of condition variable") - } - DropKind::Storage => { - let source_info = top_scope.source_info(top_drop_data.span); - let local = top_drop_data.local; - assert_eq!(local, cond_temp, "Drop scheduled on top of condition"); - self.cfg.push( - true_block, - Statement { - source_info, - kind: StatementKind::StorageDead(local) - }, - ); - self.cfg.push( - false_block, - Statement { - source_info, - kind: StatementKind::StorageDead(local) - }, - ); + match top_drop_data.kind { + DropKind::Value { .. } => { + bug!("Drop scheduled on top of condition variable") + } + DropKind::Storage => { + let source_info = top_scope.source_info(top_drop_data.span); + let local = top_drop_data.local; + assert_eq!(local, cond_temp, "Drop scheduled on top of condition"); + self.cfg.push( + true_block, + Statement { + source_info, + kind: StatementKind::StorageDead(local) + }, + ); + self.cfg.push( + false_block, + Statement { + source_info, + kind: StatementKind::StorageDead(local) + }, + ); + } } + + top_scope.invalidate_cache(true, self.is_generator, true); + } else { + bug!("Expected as_local_operand to produce a temporary"); } - - top_scope.invalidate_cache(true, self.is_generator, true); } - _ => bug!("Expected as_local_operand to produce a temporary"), } (true_block, false_block) diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 435159827e..89bdf7391c 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -9,6 +9,7 @@ use std::convert::TryInto; use rustc::hir::def::DefKind; use rustc::hir::def_id::DefId; +use rustc::middle::lang_items::PanicLocationLangItem; use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef}; use rustc::mir; use rustc::ty::{self, Ty, TyCtxt, subst::Subst}; @@ -17,11 +18,11 @@ use rustc::traits::Reveal; use rustc_data_structures::fx::FxHashMap; use crate::interpret::eval_nullary_intrinsic; -use syntax::source_map::{Span, DUMMY_SP}; +use syntax::{source_map::{Span, DUMMY_SP}, symbol::Symbol}; use crate::interpret::{self, PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar, Pointer, - RawConst, ConstValue, + RawConst, ConstValue, Machine, InterpResult, InterpErrorInfo, GlobalId, InterpCx, StackPopCleanup, Allocation, AllocId, MemoryKind, Memory, snapshot, RefTracking, intern_const_alloc_recursive, @@ -41,7 +42,7 @@ const DETECTOR_SNAPSHOT_PERIOD: isize = 256; /// that inform us about the generic bounds of the constant. E.g., using an associated constant /// of a function's generic parameter will require knowledge about the bounds on the generic /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument. -pub(crate) fn mk_eval_cx<'mir, 'tcx>( +fn mk_eval_cx<'mir, 'tcx>( tcx: TyCtxt<'tcx>, span: Span, param_env: ty::ParamEnv<'tcx>, @@ -63,8 +64,8 @@ fn op_to_const<'tcx>( // `Undef` situation. let try_as_immediate = match op.layout.abi { layout::Abi::Scalar(..) => true, - layout::Abi::ScalarPair(..) => match op.layout.ty.sty { - ty::Ref(_, inner, _) => match inner.sty { + layout::Abi::ScalarPair(..) => match op.layout.ty.kind { + ty::Ref(_, inner, _) => match inner.kind { ty::Slice(elem) => elem == ecx.tcx.types.u8, ty::Str => true, _ => false, @@ -158,18 +159,14 @@ fn eval_body_using_ecx<'mir, 'tcx>( ecx.run()?; // Intern the result - intern_const_alloc_recursive( - ecx, - cid.instance.def_id(), - ret, - )?; + intern_const_alloc_recursive(ecx, tcx.static_mutability(cid.instance.def_id()), ret)?; debug!("eval_body_using_ecx done: {:?}", *ret); Ok(ret) } #[derive(Clone, Debug)] -enum ConstEvalError { +pub enum ConstEvalError { NeedsRfc(String), } @@ -374,11 +371,12 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, fn call_intrinsic( ecx: &mut InterpCx<'mir, 'tcx, Self>, + span: Span, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], dest: PlaceTy<'tcx>, ) -> InterpResult<'tcx> { - if ecx.emulate_intrinsic(instance, args, dest)? { + if ecx.emulate_intrinsic(span, instance, args, dest)? { return Ok(()); } // An intrinsic that we do not support @@ -505,6 +503,28 @@ pub fn const_field<'tcx>( op_to_const(&ecx, field) } +pub fn const_caller_location<'tcx>( + tcx: TyCtxt<'tcx>, + (file, line, col): (Symbol, u32, u32), +) -> &'tcx ty::Const<'tcx> { + trace!("const_caller_location: {}:{}:{}", file, line, col); + let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all()); + + let loc_ty = tcx.mk_imm_ref( + tcx.lifetimes.re_static, + tcx.type_of(tcx.require_lang_item(PanicLocationLangItem, None)) + .subst(tcx, tcx.mk_substs([tcx.lifetimes.re_static.into()].iter())), + ); + let loc_place = ecx.alloc_caller_location(file, line, col).unwrap(); + intern_const_alloc_recursive(&mut ecx, None, loc_place).unwrap(); + let loc_const = ty::Const { + ty: loc_ty, + val: ConstValue::Scalar(loc_place.ptr.into()), + }; + + tcx.mk_const(loc_const) +} + // this function uses `unwrap` copiously, because an already validated constant must have valid // fields and can thus never fail outside of compiler bugs pub fn const_variant_index<'tcx>( @@ -521,8 +541,8 @@ pub fn const_variant_index<'tcx>( /// Turn an interpreter error into something to report to the user. /// As a side-effect, if RUSTC_CTFE_BACKTRACE is set, this prints the backtrace. /// Should be called only if the error is actually going to to be reported! -pub fn error_to_const_error<'mir, 'tcx>( - ecx: &InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>, +pub fn error_to_const_error<'mir, 'tcx, M: Machine<'mir, 'tcx>>( + ecx: &InterpCx<'mir, 'tcx, M>, mut error: InterpErrorInfo<'tcx>, ) -> ConstEvalErr<'tcx> { error.print_backtrace(); @@ -608,7 +628,7 @@ pub fn const_eval_provider<'tcx>( // Catch such calls and evaluate them instead of trying to load a constant's MIR. if let ty::InstanceDef::Intrinsic(def_id) = key.value.instance.def { let ty = key.value.instance.ty(tcx); - let substs = match ty.sty { + let substs = match ty.kind { ty::FnDef(_, substs) => substs, _ => bug!("intrinsic with type {:?}", ty), }; diff --git a/src/librustc_mir/dataflow/at_location.rs b/src/librustc_mir/dataflow/at_location.rs index f0014602e2..e0ca105352 100644 --- a/src/librustc_mir/dataflow/at_location.rs +++ b/src/librustc_mir/dataflow/at_location.rs @@ -2,7 +2,7 @@ //! locations. use rustc::mir::{BasicBlock, Location}; -use rustc_data_structures::bit_set::{BitIter, BitSet, HybridBitSet}; +use rustc_index::bit_set::{BitIter, BitSet, HybridBitSet}; use crate::dataflow::{BitDenotation, DataflowResults, GenKillSet}; use crate::dataflow::move_paths::{HasMoveData, MovePathIndex}; diff --git a/src/librustc_mir/dataflow/drop_flag_effects.rs b/src/librustc_mir/dataflow/drop_flag_effects.rs index 444cc008ae..0fe58c07b1 100644 --- a/src/librustc_mir/dataflow/drop_flag_effects.rs +++ b/src/librustc_mir/dataflow/drop_flag_effects.rs @@ -50,7 +50,7 @@ fn place_contents_drop_state_cannot_differ<'tcx>( place: &mir::Place<'tcx>, ) -> bool { let ty = place.ty(body, tcx).ty; - match ty.sty { + match ty.kind { ty::Array(..) => { debug!("place_contents_drop_state_cannot_differ place: {:?} ty: {:?} => false", place, ty); @@ -148,9 +148,8 @@ pub(crate) fn on_all_drop_children_bits<'tcx, F>( let ty = place.ty(body, tcx).ty; debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty); - let gcx = tcx.global_tcx(); let erased_ty = tcx.erase_regions(&ty); - if erased_ty.needs_drop(gcx, ctxt.param_env) { + if erased_ty.needs_drop(tcx, ctxt.param_env) { each_child(child); } else { debug!("on_all_drop_children_bits - skipping") diff --git a/src/librustc_mir/dataflow/generic.rs b/src/librustc_mir/dataflow/generic.rs index 886044c069..dd6238b80d 100644 --- a/src/librustc_mir/dataflow/generic.rs +++ b/src/librustc_mir/dataflow/generic.rs @@ -16,16 +16,24 @@ //! [gk]: https://en.wikipedia.org/wiki/Data-flow_analysis#Bit_vector_problems //! [#64566]: https://github.com/rust-lang/rust/pull/64566 +use std::borrow::Borrow; use std::cmp::Ordering; -use std::ops; +use std::ffi::OsString; +use std::path::{Path, PathBuf}; +use std::{fs, io, ops}; +use rustc::hir::def_id::DefId; use rustc::mir::{self, traversal, BasicBlock, Location}; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc::ty::{self, TyCtxt}; use rustc_data_structures::work_queue::WorkQueue; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::{Idx, IndexVec}; +use syntax::symbol::sym; use crate::dataflow::BottomValue; +mod graphviz; + /// A specific kind of dataflow analysis. /// /// To run a dataflow analysis, one must set the initial state of the `START_BLOCK` via @@ -62,6 +70,13 @@ pub trait Analysis<'tcx>: BottomValue { /// and try to keep it short. const NAME: &'static str; + /// How each element of your dataflow state will be displayed during debugging. + /// + /// By default, this is the `fmt::Debug` representation of `Self::Idx`. + fn pretty_print_idx(&self, w: &mut impl io::Write, idx: Self::Idx) -> io::Result<()> { + write!(w, "{:?}", idx) + } + /// The size of each bitvector allocated for each block. fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize; @@ -77,7 +92,7 @@ pub trait Analysis<'tcx>: BottomValue { location: Location, ); - /// Updates the current dataflow state with the effect of evaluating a statement. + /// Updates the current dataflow state with the effect of evaluating a terminator. /// /// Note that the effect of a successful return from a `Call` terminator should **not** be /// acounted for in this function. That should go in `apply_call_return_effect`. For example, @@ -180,17 +195,20 @@ impl CursorPosition { } } +type ResultsRefCursor<'a, 'mir, 'tcx, A> = + ResultsCursor<'mir, 'tcx, A, &'a Results<'tcx, A>>; + /// Inspect the results of dataflow analysis. /// /// This cursor has linear performance when visiting statements in a block in order. Visiting /// statements within a block in reverse order is `O(n^2)`, where `n` is the number of statements /// in that block. -pub struct ResultsCursor<'mir, 'tcx, A> +pub struct ResultsCursor<'mir, 'tcx, A, R = Results<'tcx, A>> where A: Analysis<'tcx>, { body: &'mir mir::Body<'tcx>, - results: Results<'tcx, A>, + results: R, state: BitSet, pos: CursorPosition, @@ -202,24 +220,29 @@ where is_call_return_effect_applied: bool, } -impl<'mir, 'tcx, A> ResultsCursor<'mir, 'tcx, A> +impl<'mir, 'tcx, A, R> ResultsCursor<'mir, 'tcx, A, R> where A: Analysis<'tcx>, + R: Borrow>, { /// Returns a new cursor for `results` that points to the start of the `START_BLOCK`. - pub fn new(body: &'mir mir::Body<'tcx>, results: Results<'tcx, A>) -> Self { + pub fn new(body: &'mir mir::Body<'tcx>, results: R) -> Self { ResultsCursor { body, pos: CursorPosition::AtBlockStart(mir::START_BLOCK), is_call_return_effect_applied: false, - state: results.entry_sets[mir::START_BLOCK].clone(), + state: results.borrow().entry_sets[mir::START_BLOCK].clone(), results, } } + pub fn analysis(&self) -> &A { + &self.results.borrow().analysis + } + /// Resets the cursor to the start of the given `block`. pub fn seek_to_block_start(&mut self, block: BasicBlock) { - self.state.overwrite(&self.results.entry_sets[block]); + self.state.overwrite(&self.results.borrow().entry_sets[block]); self.pos = CursorPosition::AtBlockStart(block); self.is_call_return_effect_applied = false; } @@ -275,7 +298,7 @@ where } = &term.kind { if !self.is_call_return_effect_applied { self.is_call_return_effect_applied = true; - self.results.analysis.apply_call_return_effect( + self.results.borrow().analysis.apply_call_return_effect( &mut self.state, target.block, func, @@ -316,7 +339,7 @@ where }; let block_data = &self.body.basic_blocks()[target_block]; - self.results.analysis.apply_partial_block_effect( + self.results.borrow().analysis.apply_partial_block_effect( &mut self.state, target_block, block_data, @@ -349,7 +372,9 @@ where { analysis: A, bits_per_block: usize, + tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, + def_id: DefId, dead_unwinds: &'a BitSet, entry_sets: IndexVec>, } @@ -359,7 +384,9 @@ where A: Analysis<'tcx>, { pub fn new( + tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, + def_id: DefId, dead_unwinds: &'a BitSet, analysis: A, ) -> Self { @@ -377,7 +404,9 @@ where Engine { analysis, bits_per_block, + tcx, body, + def_id, dead_unwinds, entry_sets, } @@ -413,10 +442,26 @@ where ); } - Results { - analysis: self.analysis, - entry_sets: self.entry_sets, + let Engine { + tcx, + body, + def_id, + analysis, + entry_sets, + .. + } = self; + + let results = Results { analysis, entry_sets }; + + let attrs = tcx.get_attrs(def_id); + if let Some(path) = get_dataflow_graphviz_output_path(tcx, attrs, A::NAME) { + let result = write_dataflow_graphviz_results(body, def_id, &path, &results); + if let Err(e) = result { + warn!("Failed to write dataflow results to {}: {}", path.display(), e); + } } + + results } fn propagate_bits_into_graph_successors_of( @@ -510,3 +555,59 @@ where } } } + +/// Looks for attributes like `#[rustc_mir(borrowck_graphviz_postflow="./path/to/suffix.dot")]` and +/// extracts the path with the given analysis name prepended to the suffix. +/// +/// Returns `None` if no such attribute exists. +fn get_dataflow_graphviz_output_path( + tcx: TyCtxt<'tcx>, + attrs: ty::Attributes<'tcx>, + analysis: &str, +) -> Option { + let mut rustc_mir_attrs = attrs + .into_iter() + .filter(|attr| attr.check_name(sym::rustc_mir)) + .flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter())); + + let borrowck_graphviz_postflow = rustc_mir_attrs + .find(|attr| attr.check_name(sym::borrowck_graphviz_postflow))?; + + let path_and_suffix = match borrowck_graphviz_postflow.value_str() { + Some(p) => p, + None => { + tcx.sess.span_err( + borrowck_graphviz_postflow.span(), + "borrowck_graphviz_postflow requires a path", + ); + + return None; + } + }; + + // Change "path/suffix.dot" to "path/analysis_name_suffix.dot" + let mut ret = PathBuf::from(path_and_suffix.to_string()); + let suffix = ret.file_name().unwrap(); + + let mut file_name: OsString = analysis.into(); + file_name.push("_"); + file_name.push(suffix); + ret.set_file_name(file_name); + + Some(ret) +} + +fn write_dataflow_graphviz_results>( + body: &mir::Body<'tcx>, + def_id: DefId, + path: &Path, + results: &Results<'tcx, A> +) -> io::Result<()> { + debug!("printing dataflow results for {:?} to {}", def_id, path.display()); + + let mut buf = Vec::new(); + let graphviz = graphviz::Formatter::new(body, def_id, results); + + dot::render(&graphviz, &mut buf)?; + fs::write(path, buf) +} diff --git a/src/librustc_mir/dataflow/generic/graphviz.rs b/src/librustc_mir/dataflow/generic/graphviz.rs new file mode 100644 index 0000000000..47ace8f33e --- /dev/null +++ b/src/librustc_mir/dataflow/generic/graphviz.rs @@ -0,0 +1,413 @@ +use std::cell::RefCell; +use std::io::{self, Write}; +use std::{ops, str}; + +use rustc::hir::def_id::DefId; +use rustc::mir::{self, BasicBlock, Body, Location}; +use rustc_index::bit_set::{BitSet, HybridBitSet}; +use rustc_index::vec::Idx; + +use crate::util::graphviz_safe_def_name; +use super::{Analysis, Results, ResultsRefCursor}; + +pub struct Formatter<'a, 'tcx, A> +where + A: Analysis<'tcx>, +{ + body: &'a Body<'tcx>, + def_id: DefId, + + // This must be behind a `RefCell` because `dot::Labeller` takes `&self`. + block_formatter: RefCell>, +} + +impl Formatter<'a, 'tcx, A> +where + A: Analysis<'tcx>, +{ + pub fn new( + body: &'a Body<'tcx>, + def_id: DefId, + results: &'a Results<'tcx, A>, + ) -> Self { + let block_formatter = BlockFormatter { + bg: Background::Light, + prev_state: BitSet::new_empty(results.analysis.bits_per_block(body)), + results: ResultsRefCursor::new(body, results), + }; + + Formatter { + body, + def_id, + block_formatter: RefCell::new(block_formatter), + } + } +} + +/// A pair of a basic block and an index into that basic blocks `successors`. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct CfgEdge { + source: BasicBlock, + index: usize, +} + +fn outgoing_edges(body: &Body<'_>, bb: BasicBlock) -> Vec { + body[bb] + .terminator() + .successors() + .enumerate() + .map(|(index, _)| CfgEdge { source: bb, index }) + .collect() +} + +impl dot::Labeller<'_> for Formatter<'a, 'tcx, A> +where + A: Analysis<'tcx>, +{ + type Node = BasicBlock; + type Edge = CfgEdge; + + fn graph_id(&self) -> dot::Id<'_> { + let name = graphviz_safe_def_name(self.def_id); + dot::Id::new(format!("graph_for_def_id_{}", name)).unwrap() + } + + fn node_id(&self, n: &Self::Node) -> dot::Id<'_> { + dot::Id::new(format!("bb_{}", n.index())).unwrap() + } + + fn node_label(&self, block: &Self::Node) -> dot::LabelText<'_> { + let mut label = Vec::new(); + self.block_formatter + .borrow_mut() + .write_node_label(&mut label, self.body, *block) + .unwrap(); + dot::LabelText::html(String::from_utf8(label).unwrap()) + } + + fn node_shape(&self, _n: &Self::Node) -> Option> { + Some(dot::LabelText::label("none")) + } + + fn edge_label(&self, e: &Self::Edge) -> dot::LabelText<'_> { + let label = &self.body + [e.source] + .terminator() + .kind + .fmt_successor_labels() + [e.index]; + dot::LabelText::label(label.clone()) + } +} + +impl dot::GraphWalk<'a> for Formatter<'a, 'tcx, A> +where + A: Analysis<'tcx>, +{ + type Node = BasicBlock; + type Edge = CfgEdge; + + fn nodes(&self) -> dot::Nodes<'_, Self::Node> { + self.body + .basic_blocks() + .indices() + .collect::>() + .into() + } + + fn edges(&self) -> dot::Edges<'_, Self::Edge> { + self.body + .basic_blocks() + .indices() + .flat_map(|bb| outgoing_edges(self.body, bb)) + .collect::>() + .into() + } + + fn source(&self, edge: &Self::Edge) -> Self::Node { + edge.source + } + + fn target(&self, edge: &Self::Edge) -> Self::Node { + self.body + [edge.source] + .terminator() + .successors() + .nth(edge.index) + .copied() + .unwrap() + } +} + +struct BlockFormatter<'a, 'tcx, A> +where + A: Analysis<'tcx>, +{ + prev_state: BitSet, + results: ResultsRefCursor<'a, 'a, 'tcx, A>, + bg: Background, +} + +impl BlockFormatter<'a, 'tcx, A> +where + A: Analysis<'tcx>, +{ + fn toggle_background(&mut self) -> Background { + let bg = self.bg; + self.bg = !bg; + bg + } + + fn write_node_label( + &mut self, + w: &mut impl io::Write, + body: &'a Body<'tcx>, + block: BasicBlock, + ) -> io::Result<()> { + // Sample output: + // +-+-----------------------------------------------+ + // A | bb4 | + // +-+----------------------------------+------------+ + // B | MIR | STATE | + // +-+----------------------------------+------------+ + // C | | (on entry) | {_0,_2,_3} | + // +-+----------------------------------+------------+ + // D |0| StorageLive(_7) | | + // +-+----------------------------------+------------+ + // |1| StorageLive(_8) | | + // +-+----------------------------------+------------+ + // |2| _8 = &mut _1 | +_8 | + // +-+----------------------------------+------------+ + // E |T| _4 = const Foo::twiddle(move _2) | -_2 | + // +-+----------------------------------+------------+ + // F | | (on unwind) | {_0,_3,_8} | + // +-+----------------------------------+------------+ + // | | (on successful return) | +_4 | + // +-+----------------------------------+------------+ + + write!( + w, + r#""#, + )?; + + // A: Block info + write!( + w, + r#" + + "#, + num_headers = 3, + block_id = block.index(), + )?; + + // B: Column headings + write!( + w, + r#" + + + "#, + fmt = r##"bgcolor="#a0a0a0" sides="tl""##, + )?; + + // C: Entry state + self.bg = Background::Light; + self.results.seek_to_block_start(block); + self.write_row_with_curr_state(w, "", "(on entry)")?; + self.prev_state.overwrite(self.results.get()); + + // D: Statement transfer functions + for (i, statement) in body[block].statements.iter().enumerate() { + let location = Location { block, statement_index: i }; + + let mir_col = format!("{:?}", statement); + let i_col = i.to_string(); + + self.results.seek_after(location); + self.write_row_with_curr_diff(w, &i_col, &mir_col)?; + self.prev_state.overwrite(self.results.get()); + } + + // E: Terminator transfer function + let terminator = body[block].terminator(); + let location = body.terminator_loc(block); + + let mut mir_col = String::new(); + terminator.kind.fmt_head(&mut mir_col).unwrap(); + + self.results.seek_after(location); + self.write_row_with_curr_diff(w, "T", &mir_col)?; + self.prev_state.overwrite(self.results.get()); + + // F: Exit state + if let mir::TerminatorKind::Call { destination: Some(_), .. } = &terminator.kind { + self.write_row_with_curr_state(w, "", "(on unwind)")?; + + self.results.seek_after_assume_call_returns(location); + self.write_row_with_curr_diff(w, "", "(on successful return)")?; + } else { + self.write_row_with_curr_state(w, "", "(on exit)")?; + } + + write!(w, "
bb{block_id}
MIRSTATE
") + } + + fn write_row_with_curr_state( + &mut self, + w: &mut impl io::Write, + i: &str, + mir: &str, + ) -> io::Result<()> { + let bg = self.toggle_background(); + + let mut out = Vec::new(); + write!(&mut out, "{{")?; + pretty_print_state_elems(&mut out, self.results.analysis(), self.results.get().iter())?; + write!(&mut out, "}}")?; + + write!( + w, + r#" + {i} + {mir} + {state} + "#, + fmt = &["sides=\"tl\"", bg.attr()].join(" "), + i = i, + mir = dot::escape_html(mir), + state = dot::escape_html(str::from_utf8(&out).unwrap()), + ) + } + + fn write_row_with_curr_diff( + &mut self, + w: &mut impl io::Write, + i: &str, + mir: &str, + ) -> io::Result<()> { + let bg = self.toggle_background(); + let analysis = self.results.analysis(); + + let diff = BitSetDiff::compute(&self.prev_state, self.results.get()); + + let mut set = Vec::new(); + pretty_print_state_elems(&mut set, analysis, diff.set.iter())?; + + let mut clear = Vec::new(); + pretty_print_state_elems(&mut clear, analysis, diff.clear.iter())?; + + write!( + w, + r#" + {i} + {mir} + "#, + i = i, + fmt = &["sides=\"tl\"", bg.attr()].join(" "), + mir = dot::escape_html(mir), + )?; + + if !set.is_empty() { + write!( + w, + r#"+{}"#, + dot::escape_html(str::from_utf8(&set).unwrap()), + )?; + } + + if !set.is_empty() && !clear.is_empty() { + write!(w, " ")?; + } + + if !clear.is_empty() { + write!( + w, + r#"-{}"#, + dot::escape_html(str::from_utf8(&clear).unwrap()), + )?; + } + + write!(w, "") + } +} + +/// The operations required to transform one `BitSet` into another. +struct BitSetDiff { + set: HybridBitSet, + clear: HybridBitSet, +} + +impl BitSetDiff { + fn compute(from: &BitSet, to: &BitSet) -> Self { + assert_eq!(from.domain_size(), to.domain_size()); + let len = from.domain_size(); + + let mut set = HybridBitSet::new_empty(len); + let mut clear = HybridBitSet::new_empty(len); + + // FIXME: This could be made faster if `BitSet::xor` were implemented. + for i in (0..len).map(|i| T::new(i)) { + match (from.contains(i), to.contains(i)) { + (false, true) => set.insert(i), + (true, false) => clear.insert(i), + _ => continue, + }; + } + + BitSetDiff { + set, + clear, + } + } +} + +/// Formats each `elem` using the pretty printer provided by `analysis` into a comma-separated +/// list. +fn pretty_print_state_elems
( + w: &mut impl io::Write, + analysis: &A, + elems: impl Iterator, +) -> io::Result<()> +where + A: Analysis<'tcx>, +{ + let mut first = true; + for idx in elems { + if first { + first = false; + } else { + write!(w, ",")?; + } + + analysis.pretty_print_idx(w, idx)?; + } + + Ok(()) +} + +/// The background color used for zebra-striping the table. +#[derive(Clone, Copy)] +enum Background { + Light, + Dark, +} + +impl Background { + fn attr(self) -> &'static str { + match self { + Self::Dark => "bgcolor=\"#f0f0f0\"", + Self::Light => "", + } + } +} + +impl ops::Not for Background { + type Output = Self; + + fn not(self) -> Self { + match self { + Self::Light => Self::Dark, + Self::Dark => Self::Light, + } + } +} diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs index a86fcb30f4..5e64144df2 100644 --- a/src/librustc_mir/dataflow/impls/borrows.rs +++ b/src/librustc_mir/dataflow/impls/borrows.rs @@ -5,9 +5,9 @@ use rustc::mir::{self, Location, Place, PlaceBase, Body}; use rustc::ty::{self, TyCtxt}; use rustc::ty::RegionVid; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use crate::dataflow::{BitDenotation, BottomValue, GenKillSet}; use crate::borrow_check::nll::region_infer::RegionInferenceContext; @@ -16,7 +16,7 @@ use crate::borrow_check::places_conflict; use std::rc::Rc; -newtype_index! { +rustc_index::newtype_index! { pub struct BorrowIndex { DEBUG_FORMAT = "bw{}" } diff --git a/src/librustc_mir/dataflow/impls/indirect_mutation.rs b/src/librustc_mir/dataflow/impls/indirect_mutation.rs new file mode 100644 index 0000000000..bc09e32717 --- /dev/null +++ b/src/librustc_mir/dataflow/impls/indirect_mutation.rs @@ -0,0 +1,155 @@ +use rustc::mir::visit::Visitor; +use rustc::mir::{self, Local, Location}; +use rustc::ty::{self, TyCtxt}; +use rustc_index::bit_set::BitSet; +use syntax_pos::DUMMY_SP; + +use crate::dataflow::{self, GenKillSet}; + +/// Whether a borrow to a `Local` has been created that could allow that `Local` to be mutated +/// indirectly. This could either be a mutable reference (`&mut`) or a shared borrow if the type of +/// that `Local` allows interior mutability. Operations that can mutate local's indirectly include: +/// assignments through a pointer (`*p = 42`), function calls, drop terminators and inline assembly. +/// +/// If this returns false for a `Local` at a given statement (or terminator), that `Local` could +/// not possibly have been mutated indirectly prior to that statement. +#[derive(Copy, Clone)] +pub struct IndirectlyMutableLocals<'mir, 'tcx> { + body: &'mir mir::Body<'tcx>, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, +} + +impl<'mir, 'tcx> IndirectlyMutableLocals<'mir, 'tcx> { + pub fn new( + tcx: TyCtxt<'tcx>, + body: &'mir mir::Body<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Self { + IndirectlyMutableLocals { body, tcx, param_env } + } + + fn transfer_function<'a>( + &self, + trans: &'a mut GenKillSet, + ) -> TransferFunction<'a, 'mir, 'tcx> { + TransferFunction { + body: self.body, + tcx: self.tcx, + param_env: self.param_env, + trans + } + } +} + +impl<'mir, 'tcx> dataflow::BitDenotation<'tcx> for IndirectlyMutableLocals<'mir, 'tcx> { + type Idx = Local; + + fn name() -> &'static str { "mut_borrowed_locals" } + + fn bits_per_block(&self) -> usize { + self.body.local_decls.len() + } + + fn start_block_effect(&self, _entry_set: &mut BitSet) { + // Nothing is borrowed on function entry + } + + fn statement_effect( + &self, + trans: &mut GenKillSet, + loc: Location, + ) { + let stmt = &self.body[loc.block].statements[loc.statement_index]; + self.transfer_function(trans).visit_statement(stmt, loc); + } + + fn terminator_effect( + &self, + trans: &mut GenKillSet, + loc: Location, + ) { + let terminator = self.body[loc.block].terminator(); + self.transfer_function(trans).visit_terminator(terminator, loc); + } + + fn propagate_call_return( + &self, + _in_out: &mut BitSet, + _call_bb: mir::BasicBlock, + _dest_bb: mir::BasicBlock, + _dest_place: &mir::Place<'tcx>, + ) { + // Nothing to do when a call returns successfully + } +} + +impl<'mir, 'tcx> dataflow::BottomValue for IndirectlyMutableLocals<'mir, 'tcx> { + // bottom = unborrowed + const BOTTOM_VALUE: bool = false; +} + +/// A `Visitor` that defines the transfer function for `IndirectlyMutableLocals`. +struct TransferFunction<'a, 'mir, 'tcx> { + trans: &'a mut GenKillSet, + body: &'mir mir::Body<'tcx>, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, +} + +impl<'tcx> TransferFunction<'_, '_, 'tcx> { + /// Returns `true` if this borrow would allow mutation of the `borrowed_place`. + fn borrow_allows_mutation( + &self, + kind: mir::BorrowKind, + borrowed_place: &mir::Place<'tcx>, + ) -> bool { + match kind { + mir::BorrowKind::Mut { .. } => true, + + | mir::BorrowKind::Shared + | mir::BorrowKind::Shallow + | mir::BorrowKind::Unique + => !borrowed_place + .ty(self.body, self.tcx) + .ty + .is_freeze(self.tcx, self.param_env, DUMMY_SP), + } + } +} + +impl<'tcx> Visitor<'tcx> for TransferFunction<'_, '_, 'tcx> { + fn visit_rvalue( + &mut self, + rvalue: &mir::Rvalue<'tcx>, + location: Location, + ) { + if let mir::Rvalue::Ref(_, kind, ref borrowed_place) = *rvalue { + if self.borrow_allows_mutation(kind, borrowed_place) { + match borrowed_place.base { + mir::PlaceBase::Local(borrowed_local) if !borrowed_place.is_indirect() + => self.trans.gen(borrowed_local), + + _ => (), + } + } + } + + self.super_rvalue(rvalue, location); + } + + + fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) { + // This method purposely does nothing except call `super_terminator`. It exists solely to + // document the subtleties around drop terminators. + + self.super_terminator(terminator, location); + + if let mir::TerminatorKind::Drop { location: _, .. } + | mir::TerminatorKind::DropAndReplace { location: _, .. } = &terminator.kind + { + // Although drop terminators mutably borrow the location being dropped, that borrow + // cannot live beyond the drop terminator because the dropped location is invalidated. + } + } +} diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs index 69bbe08792..6f860d00a2 100644 --- a/src/librustc_mir/dataflow/impls/mod.rs +++ b/src/librustc_mir/dataflow/impls/mod.rs @@ -4,8 +4,8 @@ use rustc::ty::TyCtxt; use rustc::mir::{self, Body, Location}; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::Idx; use super::MoveDataParamEnv; @@ -18,13 +18,13 @@ use super::drop_flag_effects_for_function_entry; use super::drop_flag_effects_for_location; use super::on_lookup_result_bits; +mod borrowed_locals; +mod indirect_mutation; mod storage_liveness; -pub use self::storage_liveness::*; - -mod borrowed_locals; - pub use self::borrowed_locals::*; +pub use self::indirect_mutation::IndirectlyMutableLocals; +pub use self::storage_liveness::*; pub(super) mod borrows; diff --git a/src/librustc_mir/dataflow/impls/storage_liveness.rs b/src/librustc_mir/dataflow/impls/storage_liveness.rs index 0f66b13fdc..1b81032bfe 100644 --- a/src/librustc_mir/dataflow/impls/storage_liveness.rs +++ b/src/librustc_mir/dataflow/impls/storage_liveness.rs @@ -109,15 +109,13 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> { assert_eq!(1, self.body.arg_count); } - fn statement_effect(&self, - sets: &mut GenKillSet, - loc: Location) { - self.check_for_move(sets, loc); + fn before_statement_effect(&self, sets: &mut GenKillSet, loc: Location) { + // If we borrow or assign to a place then it needs storage for that + // statement. self.check_for_borrow(sets, loc); let stmt = &self.body[loc.block].statements[loc.statement_index]; match stmt.kind { - StatementKind::StorageLive(l) => sets.gen(l), StatementKind::StorageDead(l) => sets.kill(l), StatementKind::Assign(box(ref place, _)) | StatementKind::SetDiscriminant { box ref place, .. } => { @@ -136,11 +134,37 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> { } } - fn terminator_effect(&self, - sets: &mut GenKillSet, - loc: Location) { + fn statement_effect(&self, sets: &mut GenKillSet, loc: Location) { + // If we move from a place then only stops needing storage *after* + // that statement. self.check_for_move(sets, loc); + } + + fn before_terminator_effect(&self, sets: &mut GenKillSet, loc: Location) { self.check_for_borrow(sets, loc); + + if let TerminatorKind::Call { + destination: Some((Place { base: PlaceBase::Local(local), .. }, _)), + .. + } = self.body[loc.block].terminator().kind { + sets.gen(local); + } + } + + fn terminator_effect(&self, sets: &mut GenKillSet, loc: Location) { + // For call terminators the destination requires storage for the call + // and after the call returns successfully, but not after a panic. + // Since `propagate_call_unwind` doesn't exist, we have to kill the + // destination here, and then gen it again in `propagate_call_return`. + if let TerminatorKind::Call { + destination: Some((ref place, _)), + .. + } = self.body[loc.block].terminator().kind { + if let Some(local) = place.as_local() { + sets.kill(local); + } + } + self.check_for_move(sets, loc); } fn propagate_call_return( diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs index 5ab4e25b68..ad0f75d772 100644 --- a/src/librustc_mir/dataflow/mod.rs +++ b/src/librustc_mir/dataflow/mod.rs @@ -1,8 +1,9 @@ use syntax::ast::{self, MetaItem}; +use syntax::print::pprust; use syntax::symbol::{Symbol, sym}; -use rustc_data_structures::bit_set::{BitSet, HybridBitSet}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::bit_set::{BitSet, HybridBitSet}; +use rustc_index::vec::Idx; use rustc_data_structures::work_queue::WorkQueue; use rustc::hir::def_id::DefId; @@ -23,6 +24,7 @@ pub use self::impls::DefinitelyInitializedPlaces; pub use self::impls::EverInitializedPlaces; pub use self::impls::borrows::Borrows; pub use self::impls::HaveBeenBorrowedLocals; +pub use self::impls::IndirectlyMutableLocals; pub use self::at_location::{FlowAtLocation, FlowsAtLocation}; pub(crate) use self::drop_flag_effects::*; @@ -158,9 +160,8 @@ where if let Some(s) = item.value_str() { return Some(s.to_string()) } else { - sess.span_err( - item.span, - &format!("{} attribute requires a path", item.path)); + let path = pprust::path_to_string(&item.path); + sess.span_err(item.span, &format!("{} attribute requires a path", path)); return None; } } diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs index 698c501662..52016d4c93 100644 --- a/src/librustc_mir/dataflow/move_paths/builder.rs +++ b/src/librustc_mir/dataflow/move_paths/builder.rs @@ -1,7 +1,7 @@ use rustc::mir::tcx::RvalueInitializationState; use rustc::mir::*; use rustc::ty::{self, TyCtxt}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use smallvec::{smallvec, SmallVec}; use std::collections::hash_map::Entry; @@ -106,7 +106,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { let body = self.builder.body; let tcx = self.builder.tcx; let place_ty = Place::ty_from(&place.base, proj_base, body, tcx).ty; - match place_ty.sty { + match place_ty.kind { ty::Ref(..) | ty::RawPtr(..) => { let proj = &place.projection[..i+1]; return Err(MoveError::cannot_move_out_of( @@ -114,7 +114,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { BorrowedContent { target_place: Place { base: place.base.clone(), - projection: proj.to_vec().into_boxed_slice(), + projection: tcx.intern_place_elems(proj), }, }, )); @@ -172,7 +172,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { Some(base), Place { base: place.base.clone(), - projection: proj.to_vec().into_boxed_slice(), + projection: tcx.intern_place_elems(proj), }, ); ent.insert(path); @@ -274,7 +274,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { // Box starts out uninitialized - need to create a separate // move-path for the interior so it will be separate from // the exterior. - self.create_move_path(&place.clone().deref()); + self.create_move_path(&self.builder.tcx.mk_place_deref(place.clone())); self.gather_init(place.as_ref(), InitKind::Shallow); } else { self.gather_init(place.as_ref(), InitKind::Deep); @@ -438,7 +438,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { // of the union so it is marked as initialized again. if let [proj_base @ .., ProjectionElem::Field(_, _)] = place.projection { if let ty::Adt(def, _) = - Place::ty_from(place.base, proj_base, self.builder.body, self.builder.tcx).ty.sty + Place::ty_from(place.base, proj_base, self.builder.body, self.builder.tcx).ty.kind { if def.is_union() { place = PlaceRef { base: place.base, projection: proj_base } diff --git a/src/librustc_mir/dataflow/move_paths/mod.rs b/src/librustc_mir/dataflow/move_paths/mod.rs index 156c19c636..b599f47994 100644 --- a/src/librustc_mir/dataflow/move_paths/mod.rs +++ b/src/librustc_mir/dataflow/move_paths/mod.rs @@ -2,7 +2,7 @@ use core::slice::Iter; use rustc::mir::*; use rustc::ty::{Ty, TyCtxt}; use rustc::util::nodemap::FxHashMap; -use rustc_data_structures::indexed_vec::{Enumerated, Idx, IndexVec}; +use rustc_index::vec::{Enumerated, Idx, IndexVec}; use smallvec::SmallVec; use syntax_pos::Span; @@ -13,19 +13,19 @@ use self::abs_domain::{AbstractElem, Lift}; mod abs_domain; -newtype_index! { +rustc_index::newtype_index! { pub struct MovePathIndex { DEBUG_FORMAT = "mp{}" } } -newtype_index! { +rustc_index::newtype_index! { pub struct MoveOutIndex { DEBUG_FORMAT = "mo{}" } } -newtype_index! { +rustc_index::newtype_index! { pub struct InitIndex { DEBUG_FORMAT = "in{}" } @@ -327,7 +327,7 @@ impl<'tcx> MoveData<'tcx> { pub fn base_local(&self, mut mpi: MovePathIndex) -> Option { loop { let path = &self.move_paths[mpi]; - if let Place { base: PlaceBase::Local(l), projection: box [] } = path.place { + if let Some(l) = path.place.as_local() { return Some(l); } if let Some(parent) = path.parent { diff --git a/src/librustc_mir/error_codes.rs b/src/librustc_mir/error_codes.rs index ba299e9463..c119ca536f 100644 --- a/src/librustc_mir/error_codes.rs +++ b/src/librustc_mir/error_codes.rs @@ -64,7 +64,9 @@ E0004: r##" This error indicates that the compiler cannot guarantee a matching pattern for one or more possible inputs to a match expression. Guaranteed matches are required in order to assign values to match expressions, or alternatively, -determine the flow of execution. Erroneous code example: +determine the flow of execution. + +Erroneous code example: ```compile_fail,E0004 enum Terminator { @@ -109,7 +111,9 @@ match x { E0005: r##" Patterns used to bind names must be irrefutable, that is, they must guarantee -that a name will be extracted in all cases. Erroneous code example: +that a name will be extracted in all cases. + +Erroneous code example: ```compile_fail,E0005 let x = Some(1); @@ -145,6 +149,8 @@ like the following is invalid as it requires the entire `Option` to be moved into a variable called `op_string` while simultaneously requiring the inner `String` to be moved into a variable called `s`. +Erroneous code example: + ```compile_fail,E0007 let x = Some("s".to_string()); @@ -208,15 +214,130 @@ match x { ``` "##, +E0010: r##" +The value of statics and constants must be known at compile time, and they live +for the entire lifetime of a program. Creating a boxed value allocates memory on +the heap at runtime, and therefore cannot be done at compile time. + +Erroneous code example: + +```compile_fail,E0010 +#![feature(box_syntax)] + +const CON : Box = box 0; +``` +"##, + +E0013: r##" +Static and const variables can refer to other const variables. But a const +variable cannot refer to a static variable. + +Erroneous code example: + +```compile_fail,E0013 +static X: i32 = 42; +const Y: i32 = X; +``` + +In this example, `Y` cannot refer to `X` here. To fix this, the value can be +extracted as a const and then used: + +``` +const A: i32 = 42; +static X: i32 = A; +const Y: i32 = A; +``` +"##, + +// FIXME(#57563) Change the language here when const fn stabilizes +E0015: r##" +The only functions that can be called in static or constant expressions are +`const` functions, and struct/enum constructors. `const` functions are only +available on a nightly compiler. Rust currently does not support more general +compile-time function execution. + +``` +const FOO: Option = Some(1); // enum constructor +struct Bar {x: u8} +const BAR: Bar = Bar {x: 1}; // struct constructor +``` + +See [RFC 911] for more details on the design of `const fn`s. + +[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md +"##, + +E0017: r##" +References in statics and constants may only refer to immutable values. + +Erroneous code example: + +```compile_fail,E0017 +static X: i32 = 1; +const C: i32 = 2; + +// these three are not allowed: +const CR: &mut i32 = &mut C; +static STATIC_REF: &'static mut i32 = &mut X; +static CONST_REF: &'static mut i32 = &mut C; +``` + +Statics are shared everywhere, and if they refer to mutable data one might +violate memory safety since holding multiple mutable references to shared data +is not allowed. + +If you really want global mutable state, try using `static mut` or a global +`UnsafeCell`. +"##, + +E0019: r##" +A function call isn't allowed in the const's initialization expression +because the expression's value must be known at compile-time. + +Erroneous code example: + +```compile_fail,E0019 +#![feature(box_syntax)] + +fn main() { + struct MyOwned; + + static STATIC11: Box = box MyOwned; // error! +} +``` + +Remember: you can't use a function call inside a const's initialization +expression! However, you can totally use it anywhere else: + +``` +enum Test { + V1 +} + +impl Test { + fn func(&self) -> i32 { + 12 + } +} + +fn main() { + const FOO: Test = Test::V1; + + FOO.func(); // here is good + let x = FOO.func(); // or even here! +} +``` +"##, + E0030: r##" When matching against a range, the compiler verifies that the range is -non-empty. Range patterns include both end-points, so this is equivalent to +non-empty. Range patterns include both end-points, so this is equivalent to requiring the start of the range to be less than or equal to the end of the range. -For example: +Erroneous code example: -```compile_fail +```compile_fail,E0030 match 5u32 { // This range is ok, albeit pointless. 1 ..= 1 => {} @@ -226,7 +347,61 @@ match 5u32 { ``` "##, +E0133: r##" +Unsafe code was used outside of an unsafe function or block. + +Erroneous code example: + +```compile_fail,E0133 +unsafe fn f() { return; } // This is the unsafe code + +fn main() { + f(); // error: call to unsafe function requires unsafe function or block +} +``` + +Using unsafe functionality is potentially dangerous and disallowed by safety +checks. Examples: + +* Dereferencing raw pointers +* Calling functions via FFI +* Calling functions marked unsafe + +These safety checks can be relaxed for a section of the code by wrapping the +unsafe instructions with an `unsafe` block. For instance: + +``` +unsafe fn f() { return; } + +fn main() { + unsafe { f(); } // ok! +} +``` + +See also https://doc.rust-lang.org/book/ch19-01-unsafe-rust.html +"##, + E0158: r##" +An associated const has been referenced in a pattern. + +Erroneous code example: + +```compile_fail,E0158 +enum EFoo { A, B, C, D } + +trait Foo { + const X: EFoo; +} + +fn test(arg: EFoo) { + match arg { + A::X => { // error! + println!("A::X"); + } + } +} +``` + `const` and `static` mean different things. A `const` is a compile-time constant, an alias for a literal value. This property means you can match it directly within a pattern. @@ -247,6 +422,39 @@ match Some(42) { ``` "##, +E0161: r##" +A value was moved. However, its size was not known at compile time, and only +values of a known size can be moved. + +Erroneous code example: + +```compile_fail,E0161 +#![feature(box_syntax)] + +fn main() { + let array: &[isize] = &[1, 2, 3]; + let _x: Box<[isize]> = box *array; + // error: cannot move a value of type [isize]: the size of [isize] cannot + // be statically determined +} +``` + +In Rust, you can only move a value when its size is known at compile time. + +To work around this restriction, consider "hiding" the value behind a reference: +either `&x` or `&mut x`. Since a reference has a fixed size, this lets you move +it around as usual. Example: + +``` +#![feature(box_syntax)] + +fn main() { + let array: &[isize] = &[1, 2, 3]; + let _x: Box<&[isize]> = box array; // ok! +} +``` +"##, + E0162: r##" #### Note: this error code is no longer emitted by the compiler. @@ -468,158 +676,6 @@ The `op_string_ref` binding has type `&Option<&String>` in both cases. See also https://github.com/rust-lang/rust/issues/14587 "##, -E0010: r##" -The value of statics and constants must be known at compile time, and they live -for the entire lifetime of a program. Creating a boxed value allocates memory on -the heap at runtime, and therefore cannot be done at compile time. Erroneous -code example: - -```compile_fail,E0010 -#![feature(box_syntax)] - -const CON : Box = box 0; -``` -"##, - -E0013: r##" -Static and const variables can refer to other const variables. But a const -variable cannot refer to a static variable. For example, `Y` cannot refer to -`X` here: - -```compile_fail,E0013 -static X: i32 = 42; -const Y: i32 = X; -``` - -To fix this, the value can be extracted as a const and then used: - -``` -const A: i32 = 42; -static X: i32 = A; -const Y: i32 = A; -``` -"##, - -// FIXME(#57563) Change the language here when const fn stabilizes -E0015: r##" -The only functions that can be called in static or constant expressions are -`const` functions, and struct/enum constructors. `const` functions are only -available on a nightly compiler. Rust currently does not support more general -compile-time function execution. - -``` -const FOO: Option = Some(1); // enum constructor -struct Bar {x: u8} -const BAR: Bar = Bar {x: 1}; // struct constructor -``` - -See [RFC 911] for more details on the design of `const fn`s. - -[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md -"##, - -E0017: r##" -References in statics and constants may only refer to immutable values. -Erroneous code example: - -```compile_fail,E0017 -static X: i32 = 1; -const C: i32 = 2; - -// these three are not allowed: -const CR: &mut i32 = &mut C; -static STATIC_REF: &'static mut i32 = &mut X; -static CONST_REF: &'static mut i32 = &mut C; -``` - -Statics are shared everywhere, and if they refer to mutable data one might -violate memory safety since holding multiple mutable references to shared data -is not allowed. - -If you really want global mutable state, try using `static mut` or a global -`UnsafeCell`. -"##, - -E0019: r##" -A function call isn't allowed in the const's initialization expression -because the expression's value must be known at compile-time. Erroneous code -example: - -```compile_fail -enum Test { - V1 -} - -impl Test { - fn test(&self) -> i32 { - 12 - } -} - -fn main() { - const FOO: Test = Test::V1; - - const A: i32 = FOO.test(); // You can't call Test::func() here! -} -``` - -Remember: you can't use a function call inside a const's initialization -expression! However, you can totally use it anywhere else: - -``` -enum Test { - V1 -} - -impl Test { - fn func(&self) -> i32 { - 12 - } -} - -fn main() { - const FOO: Test = Test::V1; - - FOO.func(); // here is good - let x = FOO.func(); // or even here! -} -``` -"##, - -E0133: r##" -Unsafe code was used outside of an unsafe function or block. - -Erroneous code example: - -```compile_fail,E0133 -unsafe fn f() { return; } // This is the unsafe code - -fn main() { - f(); // error: call to unsafe function requires unsafe function or block -} -``` - -Using unsafe functionality is potentially dangerous and disallowed by safety -checks. Examples: - -* Dereferencing raw pointers -* Calling functions via FFI -* Calling functions marked unsafe - -These safety checks can be relaxed for a section of the code by wrapping the -unsafe instructions with an `unsafe` block. For instance: - -``` -unsafe fn f() { return; } - -fn main() { - unsafe { f(); } // ok! -} -``` - -See also https://doc.rust-lang.org/book/ch19-01-unsafe-rust.html -"##, - E0373: r##" This error occurs when an attempt is made to use data captured by a closure, when that data may no longer exist. It's most commonly seen when attempting to @@ -672,7 +728,9 @@ about safety. "##, E0381: r##" -It is not allowed to use or capture an uninitialized variable. For example: +It is not allowed to use or capture an uninitialized variable. + +Erroneous code example: ```compile_fail,E0381 fn main() { @@ -694,7 +752,9 @@ fn main() { E0382: r##" This error occurs when an attempt is made to use a variable after its contents -have been moved elsewhere. For example: +have been moved elsewhere. + +Erroneous code example: ```compile_fail,E0382 struct MyStruct { s: u32 } @@ -842,7 +902,8 @@ x = Foo { a: 2 }; E0384: r##" This error occurs when an attempt is made to reassign an immutable variable. -For example: + +Erroneous code example: ```compile_fail,E0384 fn main() { @@ -862,13 +923,15 @@ fn main() { ``` "##, -/*E0386: r##" +E0386: r##" +#### Note: this error code is no longer emitted by the compiler. + This error occurs when an attempt is made to mutate the target of a mutable reference stored inside an immutable container. For example, this can happen when storing a `&mut` inside an immutable `Box`: -```compile_fail,E0386 +``` let mut x: i64 = 1; let y: Box<_> = Box::new(&mut x); **y = 2; // error, cannot assign to data in an immutable container @@ -892,13 +955,15 @@ let x: i64 = 1; let y: Box> = Box::new(Cell::new(x)); y.set(2); ``` -"##,*/ +"##, E0387: r##" #### Note: this error code is no longer emitted by the compiler. This error occurs when an attempt is made to mutate or mutably reference data -that a closure has captured immutably. Examples of this error are shown below: +that a closure has captured immutably. + +Erroneous code example: ```compile_fail // Accepts a function or a closure that captures its environment immutably. @@ -953,7 +1018,7 @@ https://doc.rust-lang.org/std/cell/ "##, E0388: r##" -E0388 was removed and is no longer issued. +#### Note: this error code is no longer emitted by the compiler. "##, E0389: r##" @@ -963,7 +1028,7 @@ An attempt was made to mutate data using a non-mutable reference. This commonly occurs when attempting to assign to a non-mutable reference of a mutable reference (`&(&mut T)`). -Example of erroneous code: +Erroneous code example: ```compile_fail struct FancyNum { @@ -1022,43 +1087,11 @@ fn main() { ``` "##, -E0161: r##" -A value was moved. However, its size was not known at compile time, and only -values of a known size can be moved. +E0492: r##" +A borrow of a constant containing interior mutability was attempted. Erroneous code example: -```compile_fail -#![feature(box_syntax)] - -fn main() { - let array: &[isize] = &[1, 2, 3]; - let _x: Box<[isize]> = box *array; - // error: cannot move a value of type [isize]: the size of [isize] cannot - // be statically determined -} -``` - -In Rust, you can only move a value when its size is known at compile time. - -To work around this restriction, consider "hiding" the value behind a reference: -either `&x` or `&mut x`. Since a reference has a fixed size, this lets you move -it around as usual. Example: - -``` -#![feature(box_syntax)] - -fn main() { - let array: &[isize] = &[1, 2, 3]; - let _x: Box<&[isize]> = box array; // ok! -} -``` -"##, - -E0492: r##" -A borrow of a constant containing interior mutability was attempted. Erroneous -code example: - ```compile_fail,E0492 use std::sync::atomic::AtomicUsize; @@ -1128,8 +1161,55 @@ Remember this solution is unsafe! You will have to ensure that accesses to the cell are synchronized. "##, +E0493: r##" +A type with a `Drop` implementation was destructured when trying to initialize +a static item. + +Erroneous code example: + +```compile_fail,E0493 +enum DropType { + A, +} + +impl Drop for DropType { + fn drop(&mut self) {} +} + +struct Foo { + field1: DropType, +} + +static FOO: Foo = Foo { ..Foo { field1: DropType::A } }; // error! +``` + +The problem here is that if the given type or one of its fields implements the +`Drop` trait, this `Drop` implementation cannot be called during the static +type initialization which might cause a memory leak. To prevent this issue, +you need to instantiate all the static type's fields by hand. + +``` +enum DropType { + A, +} + +impl Drop for DropType { + fn drop(&mut self) {} +} + +struct Foo { + field1: DropType, +} + +static FOO: Foo = Foo { field1: DropType::A }; // We initialize all fields + // by hand. +``` +"##, + E0499: r##" -A variable was borrowed as mutable more than once. Erroneous code example: +A variable was borrowed as mutable more than once. + +Erroneous code example: ```compile_fail,E0499 let mut i = 0; @@ -1160,7 +1240,9 @@ a; "##, E0500: r##" -A borrowed variable was used by a closure. Example of erroneous code: +A borrowed variable was used by a closure. + +Erroneous code example: ```compile_fail,E0500 fn you_know_nothing(jon_snow: &mut i32) { @@ -1211,7 +1293,7 @@ situation, the closure is borrowing the variable. Take a look at http://rustbyexample.com/fn/closures/capture.html for more information about capturing. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0501 fn inside_closure(x: &mut i32) { @@ -1284,7 +1366,7 @@ E0502: r##" This error indicates that you are trying to borrow a variable as mutable when it has already been borrowed as immutable. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0502 fn bar(x: &mut i32) {} @@ -1315,7 +1397,7 @@ https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html. E0503: r##" A value was used after it was mutably borrowed. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0503 fn main() { @@ -1373,7 +1455,7 @@ E0504: r##" This error occurs when an attempt is made to move a borrowed variable into a closure. -Example of erroneous code: +Erroneous code example: ```compile_fail struct FancyNum { @@ -1564,7 +1646,7 @@ http://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html E0506: r##" This error occurs when an attempt is made to assign to a borrowed value. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0506 struct FancyNum { @@ -1782,7 +1864,7 @@ http://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html E0508: r##" A value was moved out of a non-copy fixed-size array. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0508 struct NonCopy; @@ -1827,7 +1909,7 @@ E0509: r##" This error occurs when an attempt is made to move out of a value whose type implements the `Drop` trait. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0509 struct FancyNum { @@ -1937,30 +2019,14 @@ Here executing `x = None` would modify the value being matched and require us to go "back in time" to the `None` arm. "##, -E0579: r##" -When matching against an exclusive range, the compiler verifies that the range -is non-empty. Exclusive range patterns include the start point but not the end -point, so this is equivalent to requiring the start of the range to be less -than the end of the range. - -For example: - -```compile_fail -match 5u32 { - // This range is ok, albeit pointless. - 1 .. 2 => {} - // This range is empty, and the compiler can tell. - 5 .. 5 => {} -} -``` -"##, - E0515: r##" Cannot return value that references local variable Local variables, function parameters and temporaries are all dropped before the end of the function body. So a reference to them cannot be returned. +Erroneous code example: + ```compile_fail,E0515 fn get_dangling_reference() -> &'static i32 { let x = 0; @@ -1993,6 +2059,91 @@ fn get_owned_iterator() -> IntoIter { ``` "##, +E0524: r##" +A variable which requires unique access is being used in more than one closure +at the same time. + +Erroneous code example: + +```compile_fail,E0524 +fn set(x: &mut isize) { + *x += 4; +} + +fn dragoooon(x: &mut isize) { + let mut c1 = || set(x); + let mut c2 = || set(x); // error! + + c2(); + c1(); +} +``` + +To solve this issue, multiple solutions are available. First, is it required +for this variable to be used in more than one closure at a time? If it is the +case, use reference counted types such as `Rc` (or `Arc` if it runs +concurrently): + +``` +use std::rc::Rc; +use std::cell::RefCell; + +fn set(x: &mut isize) { + *x += 4; +} + +fn dragoooon(x: &mut isize) { + let x = Rc::new(RefCell::new(x)); + let y = Rc::clone(&x); + let mut c1 = || { let mut x2 = x.borrow_mut(); set(&mut x2); }; + let mut c2 = || { let mut x2 = y.borrow_mut(); set(&mut x2); }; // ok! + + c2(); + c1(); +} +``` + +If not, just run closures one at a time: + +``` +fn set(x: &mut isize) { + *x += 4; +} + +fn dragoooon(x: &mut isize) { + { // This block isn't necessary since non-lexical lifetimes, it's just to + // make it more clear. + let mut c1 = || set(&mut *x); + c1(); + } // `c1` has been dropped here so we're free to use `x` again! + let mut c2 = || set(&mut *x); + c2(); +} +``` +"##, + +E0579: r##" +When matching against an exclusive range, the compiler verifies that the range +is non-empty. Exclusive range patterns include the start point but not the end +point, so this is equivalent to requiring the start of the range to be less +than the end of the range. + +Erroneous code example: + +```compile_fail,E0579 +#![feature(exclusive_range_pattern)] + +fn main() { + match 5u32 { + // This range is ok, albeit pointless. + 1 .. 2 => {} + // This range is empty, and the compiler can tell. + 5 .. 5 => {} // error! + } +} +``` +"##, + E0595: r##" #### Note: this error code is no longer emitted by the compiler. @@ -2016,7 +2167,7 @@ let mut c = || { x += 1 }; E0596: r##" This error occurs because you tried to mutably borrow a non-mutable variable. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0596 let x = 1; @@ -2035,7 +2186,7 @@ let y = &mut x; // ok! E0597: r##" This error occurs because a value was dropped while it was still borrowed -Example of erroneous code: +Erroneous code example: ```compile_fail,E0597 struct Foo<'a> { @@ -2072,6 +2223,8 @@ E0626: r##" This error occurs because a borrow in a generator persists across a yield point. +Erroneous code example: + ```compile_fail,E0626 # #![feature(generators, generator_trait, pin)] # use std::ops::Generator; @@ -2163,7 +2316,7 @@ E0712: r##" This error occurs because a borrow of a thread-local variable was made inside a function which outlived the lifetime of the function. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0712 #![feature(thread_local)] @@ -2185,7 +2338,7 @@ E0713: r##" This error occurs when an attempt is made to borrow state past the end of the lifetime of a type that implements the `Drop` trait. -Example of erroneous code: +Erroneous code example: ```compile_fail,E0713 #![feature(nll)] @@ -2391,10 +2544,8 @@ There are some known bugs that trigger this message. // E0299, // mismatched types between arms // E0471, // constant evaluation error (in pattern) // E0385, // {} in an aliasable location - E0493, // destructors cannot be evaluated at compile-time E0521, // borrowed data escapes outside of closure - E0524, // two closures require unique access to `..` at the same time - E0526, // shuffle indices are not constant +// E0526, // shuffle indices are not constant E0594, // cannot assign to {} // E0598, // lifetime of {} is too short to guarantee its contents can be... E0625, // thread-local statics cannot be accessed at compile-time diff --git a/src/librustc_mir/hair/constant.rs b/src/librustc_mir/hair/constant.rs index bc01e3ee95..956716f8ce 100644 --- a/src/librustc_mir/hair/constant.rs +++ b/src/librustc_mir/hair/constant.rs @@ -49,7 +49,7 @@ crate fn lit_to_const<'tcx>( parse_float(n, fty, neg).map_err(|_| LitToConstError::UnparseableFloat)? } LitKind::FloatUnsuffixed(n) => { - let fty = match ty.sty { + let fty = match ty.kind { ty::Float(fty) => fty, _ => bug!() }; diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs index 9a73842d2f..e9777dab26 100644 --- a/src/librustc_mir/hair/cx/block.rs +++ b/src/librustc_mir/hair/cx/block.rs @@ -5,7 +5,7 @@ use rustc::middle::region; use rustc::hir; use rustc::ty; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; impl<'tcx> Mirror<'tcx> for &'tcx hir::Block { type Output = Block<'tcx>; @@ -49,7 +49,7 @@ fn mirror_stmts<'a, 'tcx>( for (index, stmt) in stmts.iter().enumerate() { let hir_id = stmt.hir_id; let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id); - match stmt.node { + match stmt.kind { hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => { result.push(StmtRef::Mirror(Box::new(Stmt { @@ -78,12 +78,12 @@ fn mirror_stmts<'a, 'tcx>( if let Some(ty) = &local.ty { if let Some(&user_ty) = cx.tables.user_provided_types().get(ty.hir_id) { debug!("mirror_stmts: user_ty={:?}", user_ty); - pattern = Pattern { + pattern = Pat { ty: pattern.ty, span: pattern.span, - kind: Box::new(PatternKind::AscribeUserType { + kind: Box::new(PatKind::AscribeUserType { ascription: hair::pattern::Ascription { - user_ty: PatternTypeProjection::from_user_type(user_ty), + user_ty: PatTyProj::from_user_type(user_ty), user_ty_span: ty.span, variance: ty::Variance::Covariant, }, diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index a33d7207ed..7bb96661bb 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -3,7 +3,7 @@ use crate::hair::cx::Cx; use crate::hair::cx::block; use crate::hair::cx::to_ref::ToRef; use crate::hair::util::UserAnnotatedTyHelpers; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use rustc::hir::def::{CtorOf, Res, DefKind, CtorKind}; use rustc::mir::interpret::{GlobalId, ErrorHandled, ConstValue}; use rustc::ty::{self, AdtKind, Ty}; @@ -204,7 +204,7 @@ fn make_mirror_unadjusted<'a, 'tcx>( let expr_ty = cx.tables().expr_ty(expr); let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id); - let kind = match expr.node { + let kind = match expr.kind { // Here comes the interesting stuff: hir::ExprKind::MethodCall(_, method_span, ref args) => { // Rewrite a.b(c) into UFCS form like Trait::b(a, c) @@ -247,7 +247,7 @@ fn make_mirror_unadjusted<'a, 'tcx>( } } else { let adt_data = if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = - fun.node + fun.kind { // Tuple-like ADTs are represented as ExprKind::Call. We convert them here. expr_ty.ty_adt_def().and_then(|adt_def| { @@ -427,7 +427,7 @@ fn make_mirror_unadjusted<'a, 'tcx>( if cx.tables().is_method_call(expr) { overloaded_operator(cx, expr, vec![arg.to_ref()]) } else { - if let hir::ExprKind::Lit(ref lit) = arg.node { + if let hir::ExprKind::Lit(ref lit) = arg.kind { ExprKind::Literal { literal: cx.const_eval_literal(&lit.node, expr_ty, lit.span, true), user_ty: None, @@ -442,7 +442,7 @@ fn make_mirror_unadjusted<'a, 'tcx>( } hir::ExprKind::Struct(ref qpath, ref fields, ref base) => { - match expr_ty.sty { + match expr_ty.kind { ty::Adt(adt, substs) => { match adt.adt_kind() { AdtKind::Struct | AdtKind::Union => { @@ -505,8 +505,9 @@ fn make_mirror_unadjusted<'a, 'tcx>( hir::ExprKind::Closure(..) => { let closure_ty = cx.tables().expr_ty(expr); - let (def_id, substs, movability) = match closure_ty.sty { - ty::Closure(def_id, substs) => (def_id, UpvarSubsts::Closure(substs), None), + let (def_id, substs, movability) = match closure_ty.kind { + ty::Closure(def_id, substs) => (def_id, + UpvarSubsts::Closure(substs), None), ty::Generator(def_id, substs, movability) => { (def_id, UpvarSubsts::Generator(substs), Some(movability)) } @@ -543,9 +544,9 @@ fn make_mirror_unadjusted<'a, 'tcx>( // Now comes the rote stuff: hir::ExprKind::Repeat(ref v, ref count) => { let def_id = cx.tcx.hir().local_def_id(count.hir_id); - let substs = InternalSubsts::identity_for_item(cx.tcx.global_tcx(), def_id); + let substs = InternalSubsts::identity_for_item(cx.tcx, def_id); let instance = ty::Instance::resolve( - cx.tcx.global_tcx(), + cx.tcx, cx.param_env, def_id, substs, @@ -639,7 +640,7 @@ fn make_mirror_unadjusted<'a, 'tcx>( // } // The correct solution would be to add symbolic computations to miri, // so we wouldn't have to compute and store the actual value - let var = if let hir::ExprKind::Path(ref qpath) = source.node { + let var = if let hir::ExprKind::Path(ref qpath) = source.kind { let res = cx.tables().qpath_res(qpath, source.hir_id); cx .tables() @@ -860,9 +861,9 @@ impl ToBorrowKind for hir::Mutability { } } -fn convert_arm<'a, 'tcx>(cx: &mut Cx<'a, 'tcx>, arm: &'tcx hir::Arm) -> Arm<'tcx> { +fn convert_arm<'tcx>(cx: &mut Cx<'_, 'tcx>, arm: &'tcx hir::Arm) -> Arm<'tcx> { Arm { - patterns: arm.pats.iter().map(|p| cx.pattern_from_hir(p)).collect(), + pattern: cx.pattern_from_hir(&arm.pat), guard: match arm.guard { Some(hir::Guard::If(ref e)) => Some(Guard::If(e.to_ref())), _ => None, @@ -907,7 +908,7 @@ fn convert_path_expr<'a, 'tcx>( let generics = cx.tcx.generics_of(item_def_id); let local_def_id = cx.tcx.hir().local_def_id(hir_id); let index = generics.param_def_id_to_index[&local_def_id]; - let name = cx.tcx.hir().name(hir_id).as_interned_str(); + let name = cx.tcx.hir().name(hir_id); let val = ConstValue::Param(ty::ParamConst::new(index, name)); ExprKind::Literal { literal: cx.tcx.mk_const( @@ -938,7 +939,7 @@ fn convert_path_expr<'a, 'tcx>( let user_provided_type = user_provided_types.get(expr.hir_id).map(|u_ty| *u_ty); debug!("convert_path_expr: user_provided_type={:?}", user_provided_type); let ty = cx.tables().node_type(expr.hir_id); - match ty.sty { + match ty.kind { // A unit struct/variant which is used as a value. // We return a completely different ExprKind here to account for this special case. ty::Adt(adt_def, substs) => { @@ -1001,7 +1002,7 @@ fn convert_var( }); let region = cx.tcx.mk_region(region); - let self_expr = if let ty::Closure(_, closure_substs) = closure_ty.sty { + let self_expr = if let ty::Closure(_, closure_substs) = closure_ty.kind { match cx.infcx.closure_kind(closure_def_id, closure_substs).unwrap() { ty::ClosureKind::Fn => { let ref_closure_ty = cx.tcx.mk_ref(region, @@ -1011,7 +1012,7 @@ fn convert_var( }); Expr { ty: closure_ty, - temp_lifetime: temp_lifetime, + temp_lifetime, span: expr.span, kind: ExprKind::Deref { arg: Expr { @@ -1147,7 +1148,7 @@ fn overloaded_place<'a, 'tcx>( // Reconstruct the output assuming it's a reference with the // same region and mutability as the receiver. This holds for // `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`. - let (region, mutbl) = match recv_ty.sty { + let (region, mutbl) = match recv_ty.kind { ty::Ref(region, _, mutbl) => (region, mutbl), _ => span_bug!(expr.span, "overloaded_place: receiver is not a reference"), }; diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 740dc2011c..e120b496d3 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -5,14 +5,14 @@ use crate::hair::*; use crate::hair::util::UserAnnotatedTyHelpers; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use rustc::hir::def_id::DefId; use rustc::hir::Node; use rustc::middle::region; use rustc::infer::InferCtxt; use rustc::ty::subst::Subst; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::subst::{Kind, InternalSubsts}; +use rustc::ty::subst::{GenericArg, InternalSubsts}; use rustc::ty::layout::VariantIdx; use syntax::ast; use syntax::attr; @@ -83,7 +83,7 @@ impl<'a, 'tcx> Cx<'a, 'tcx> { infcx, root_lint_level: src_id, param_env: tcx.param_env(src_def_id), - identity_substs: InternalSubsts::identity_for_item(tcx.global_tcx(), src_def_id), + identity_substs: InternalSubsts::identity_for_item(tcx, src_def_id), region_scope_tree: tcx.region_scope_tree(src_def_id), tables, constness, @@ -153,23 +153,19 @@ impl<'a, 'tcx> Cx<'a, 'tcx> { } } - pub fn pattern_from_hir(&mut self, p: &hir::Pat) -> Pattern<'tcx> { - let tcx = self.tcx.global_tcx(); - let p = match tcx.hir().get(p.hir_id) { + pub fn pattern_from_hir(&mut self, p: &hir::Pat) -> Pat<'tcx> { + let p = match self.tcx.hir().get(p.hir_id) { Node::Pat(p) | Node::Binding(p) => p, node => bug!("pattern became {:?}", node) }; - Pattern::from_hir(tcx, - self.param_env.and(self.identity_substs), - self.tables(), - p) + Pat::from_hir(self.tcx, self.param_env.and(self.identity_substs), self.tables(), p) } pub fn trait_method(&mut self, trait_def_id: DefId, method_name: Symbol, self_ty: Ty<'tcx>, - params: &[Kind<'tcx>]) + params: &[GenericArg<'tcx>]) -> &'tcx ty::Const<'tcx> { let substs = self.tcx.mk_substs_trait(self_ty, params); for item in self.tcx.associated_items(trait_def_id) { @@ -190,7 +186,7 @@ impl<'a, 'tcx> Cx<'a, 'tcx> { } pub fn needs_drop(&mut self, ty: Ty<'tcx>) -> bool { - ty.needs_drop(self.tcx.global_tcx(), self.param_env) + ty.needs_drop(self.tcx, self.param_env) } pub fn tcx(&self) -> TyCtxt<'tcx> { diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index 0638cb462f..a76377d24b 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -20,8 +20,8 @@ pub mod cx; mod constant; pub mod pattern; -pub use self::pattern::{BindingMode, Pattern, PatternKind, PatternRange, FieldPattern}; -pub(crate) use self::pattern::PatternTypeProjection; +pub use self::pattern::{BindingMode, Pat, PatKind, PatRange, FieldPat}; +pub(crate) use self::pattern::PatTyProj; mod util; @@ -83,7 +83,7 @@ pub enum StmtKind<'tcx> { /// `let = ...` /// /// if a type is included, it is added as an ascription pattern - pattern: Pattern<'tcx>, + pattern: Pat<'tcx>, /// let pat: ty = ... initializer: Option>, @@ -293,7 +293,7 @@ pub struct FruInfo<'tcx> { #[derive(Clone, Debug)] pub struct Arm<'tcx> { - pub patterns: Vec>, + pub pattern: Pat<'tcx>, pub guard: Option>, pub body: ExprRef<'tcx>, pub lint_level: LintLevel, @@ -301,6 +301,17 @@ pub struct Arm<'tcx> { pub span: Span, } +impl Arm<'tcx> { + // HACK(or_patterns; Centril | dlrobertson): Remove this and + // correctly handle each case in which this method is used. + pub fn top_pats_hack(&self) -> &[Pat<'tcx>] { + match &*self.pattern.kind { + PatKind::Or { pats } => pats, + _ => std::slice::from_ref(&self.pattern), + } + } +} + #[derive(Clone, Debug)] pub enum Guard<'tcx> { If(ExprRef<'tcx>), diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index a6d955f336..8989d2e15d 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -1,3 +1,6 @@ +/// Note: most tests relevant to this file can be found (at the time of writing) +/// in src/tests/ui/pattern/usefulness. +/// /// This file includes the logic for exhaustiveness and usefulness checking for /// pattern-matching. Specifically, given a list of patterns for a type, we can /// tell whether: @@ -11,20 +14,24 @@ /// (without being so rigorous). /// /// The core of the algorithm revolves about a "usefulness" check. In particular, we -/// are trying to compute a predicate `U(P, p_{m + 1})` where `P` is a list of patterns -/// of length `m` for a compound (product) type with `n` components (we refer to this as -/// a matrix). `U(P, p_{m + 1})` represents whether, given an existing list of patterns -/// `p_1 ..= p_m`, adding a new pattern will be "useful" (that is, cover previously- +/// are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as +/// a matrix). `U(P, p)` represents whether, given an existing list of patterns +/// `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously- /// uncovered values of the type). /// /// If we have this predicate, then we can easily compute both exhaustiveness of an /// entire set of patterns and the individual usefulness of each one. /// (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard /// match doesn't increase the number of values we're matching) -/// (b) a pattern `p_i` is not useful if `U(P[0..=(i-1), p_i)` is false (i.e., adding a +/// (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a /// pattern to those that have come before it doesn't increase the number of values /// we're matching). /// +/// During the course of the algorithm, the rows of the matrix won't just be individual patterns, +/// but rather partially-deconstructed patterns in the form of a list of patterns. The paper +/// calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the +/// new pattern `p`. +/// /// For example, say we have the following: /// ``` /// // x: (Option, Result<()>) @@ -34,93 +41,155 @@ /// (None, Err(_)) => {} /// } /// ``` -/// Here, the matrix `P` is 3 x 2 (rows x columns). +/// Here, the matrix `P` starts as: /// [ -/// [Some(true), _], -/// [None, Err(())], -/// [None, Err(_)], +/// [(Some(true), _)], +/// [(None, Err(()))], +/// [(None, Err(_))], /// ] /// We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering -/// `[Some(false), _]`, for instance). In addition, row 3 is not useful, because +/// `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because /// all the values it covers are already covered by row 2. /// -/// To compute `U`, we must have two other concepts. -/// 1. `S(c, P)` is a "specialized matrix", where `c` is a constructor (like `Some` or -/// `None`). You can think of it as filtering `P` to just the rows whose *first* pattern -/// can cover `c` (and expanding OR-patterns into distinct patterns), and then expanding -/// the constructor into all of its components. -/// The specialization of a row vector is computed by `specialize`. +/// A list of patterns can be thought of as a stack, because we are mainly interested in the top of +/// the stack at any given point, and we can pop or apply constructors to get new pattern-stacks. +/// To match the paper, the top of the stack is at the beginning / on the left. /// -/// It is computed as follows. For each row `p_i` of P, we have four cases: -/// 1.1. `p_(i,1) = c(r_1, .., r_a)`. Then `S(c, P)` has a corresponding row: -/// r_1, .., r_a, p_(i,2), .., p_(i,n) -/// 1.2. `p_(i,1) = c'(r_1, .., r_a')` where `c ≠ c'`. Then `S(c, P)` has no -/// corresponding row. -/// 1.3. `p_(i,1) = _`. Then `S(c, P)` has a corresponding row: -/// _, .., _, p_(i,2), .., p_(i,n) -/// 1.4. `p_(i,1) = r_1 | r_2`. Then `S(c, P)` has corresponding rows inlined from: -/// S(c, (r_1, p_(i,2), .., p_(i,n))) -/// S(c, (r_2, p_(i,2), .., p_(i,n))) +/// There are two important operations on pattern-stacks necessary to understand the algorithm: +/// 1. We can pop a given constructor off the top of a stack. This operation is called +/// `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or +/// `None`) and `p` a pattern-stack. +/// If the pattern on top of the stack can cover `c`, this removes the constructor and +/// pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns. +/// Otherwise the pattern-stack is discarded. +/// This essentially filters those pattern-stacks whose top covers the constructor `c` and +/// discards the others. /// -/// 2. `D(P)` is a "default matrix". This is used when we know there are missing -/// constructor cases, but there might be existing wildcard patterns, so to check the -/// usefulness of the matrix, we have to check all its *other* components. -/// The default matrix is computed inline in `is_useful`. +/// For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we +/// pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the +/// `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get +/// nothing back. /// -/// It is computed as follows. For each row `p_i` of P, we have three cases: -/// 1.1. `p_(i,1) = c(r_1, .., r_a)`. Then `D(P)` has no corresponding row. -/// 1.2. `p_(i,1) = _`. Then `D(P)` has a corresponding row: -/// p_(i,2), .., p_(i,n) -/// 1.3. `p_(i,1) = r_1 | r_2`. Then `D(P)` has corresponding rows inlined from: -/// D((r_1, p_(i,2), .., p_(i,n))) -/// D((r_2, p_(i,2), .., p_(i,n))) +/// This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1` +/// on top of the stack, and we have four cases: +/// 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We +/// push onto the stack the arguments of this constructor, and return the result: +/// r_1, .., r_a, p_2, .., p_n +/// 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠ c'`. We discard the current stack and +/// return nothing. +/// 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has +/// arguments (its arity), and return the resulting stack: +/// _, .., _, p_2, .., p_n +/// 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting +/// stack: +/// S(c, (r_1, p_2, .., p_n)) +/// S(c, (r_2, p_2, .., p_n)) +/// +/// 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is +/// a pattern-stack. +/// This is used when we know there are missing constructor cases, but there might be +/// existing wildcard patterns, so to check the usefulness of the matrix, we have to check +/// all its *other* components. +/// +/// It is computed as follows. We look at the pattern `p_1` on top of the stack, +/// and we have three cases: +/// 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing. +/// 1.2. `p_1 = _`. We return the rest of the stack: +/// p_2, .., p_n +/// 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting +/// stack. +/// D((r_1, p_2, .., p_n)) +/// D((r_2, p_2, .., p_n)) +/// +/// Note that the OR-patterns are not always used directly in Rust, but are used to derive the +/// exhaustive integer matching rules, so they're written here for posterity. +/// +/// Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by +/// working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with +/// the given constructor, and popping a wildcard keeps those rows that start with a wildcard. /// -/// Note that the OR-patterns are not always used directly in Rust, but are used to derive -/// the exhaustive integer matching rules, so they're written here for posterity. /// /// The algorithm for computing `U` /// ------------------------------- /// The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). /// That means we're going to check the components from left-to-right, so the algorithm -/// operates principally on the first component of the matrix and new pattern `p_{m + 1}`. +/// operates principally on the first component of the matrix and new pattern-stack `p`. /// This algorithm is realised in the `is_useful` function. /// /// Base case. (`n = 0`, i.e., an empty tuple pattern) /// - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), -/// then `U(P, p_{m + 1})` is false. -/// - Otherwise, `P` must be empty, so `U(P, p_{m + 1})` is true. +/// then `U(P, p)` is false. +/// - Otherwise, `P` must be empty, so `U(P, p)` is true. /// /// Inductive step. (`n > 0`, i.e., whether there's at least one column /// [which may then be expanded into further columns later]) -/// We're going to match on the new pattern, `p_{m + 1}`. -/// - If `p_{m + 1} == c(r_1, .., r_a)`, then we have a constructor pattern. -/// Thus, the usefulness of `p_{m + 1}` can be reduced to whether it is useful when -/// we ignore all the patterns in `P` that involve other constructors. This is where -/// `S(c, P)` comes in: -/// `U(P, p_{m + 1}) := U(S(c, P), S(c, p_{m + 1}))` +/// We're going to match on the top of the new pattern-stack, `p_1`. +/// - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern. +/// Then, the usefulness of `p_1` can be reduced to whether it is useful when +/// we ignore all the patterns in the first column of `P` that involve other constructors. +/// This is where `S(c, P)` comes in: +/// `U(P, p) := U(S(c, P), S(c, p))` /// This special case is handled in `is_useful_specialized`. -/// - If `p_{m + 1} == _`, then we have two more cases: -/// + All the constructors of the first component of the type exist within -/// all the rows (after having expanded OR-patterns). In this case: -/// `U(P, p_{m + 1}) := ∨(k ϵ constructors) U(S(k, P), S(k, p_{m + 1}))` -/// I.e., the pattern `p_{m + 1}` is only useful when all the constructors are -/// present *if* its later components are useful for the respective constructors -/// covered by `p_{m + 1}` (usually a single constructor, but all in the case of `_`). -/// + Some constructors are not present in the existing rows (after having expanded -/// OR-patterns). However, there might be wildcard patterns (`_`) present. Thus, we -/// are only really concerned with the other patterns leading with wildcards. This is -/// where `D` comes in: -/// `U(P, p_{m + 1}) := U(D(P), p_({m + 1},2), .., p_({m + 1},n))` -/// - If `p_{m + 1} == r_1 | r_2`, then the usefulness depends on each separately: -/// `U(P, p_{m + 1}) := U(P, (r_1, p_({m + 1},2), .., p_({m + 1},n))) -/// || U(P, (r_2, p_({m + 1},2), .., p_({m + 1},n)))` +/// +/// For example, if `P` is: +/// [ +/// [Some(true), _], +/// [None, 0], +/// ] +/// and `p` is [Some(false), 0], then we don't care about row 2 since we know `p` only +/// matches values that row 2 doesn't. For row 1 however, we need to dig into the +/// arguments of `Some` to know whether some new value is covered. So we compute +/// `U([[true, _]], [false, 0])`. +/// +/// - If `p_1 == _`, then we look at the list of constructors that appear in the first +/// component of the rows of `P`: +/// + If there are some constructors that aren't present, then we might think that the +/// wildcard `_` is useful, since it covers those constructors that weren't covered +/// before. +/// That's almost correct, but only works if there were no wildcards in those first +/// components. So we need to check that `p` is useful with respect to the rows that +/// start with a wildcard, if there are any. This is where `D` comes in: +/// `U(P, p) := U(D(P), D(p))` +/// +/// For example, if `P` is: +/// [ +/// [_, true, _], +/// [None, false, 1], +/// ] +/// and `p` is [_, false, _], the `Some` constructor doesn't appear in `P`. So if we +/// only had row 2, we'd know that `p` is useful. However row 1 starts with a +/// wildcard, so we need to check whether `U([[true, _]], [false, 1])`. +/// +/// + Otherwise, all possible constructors (for the relevant type) are present. In this +/// case we must check whether the wildcard pattern covers any unmatched value. For +/// that, we can think of the `_` pattern as a big OR-pattern that covers all +/// possible constructors. For `Option`, that would mean `_ = None | Some(_)` for +/// example. The wildcard pattern is useful in this case if it is useful when +/// specialized to one of the possible constructors. So we compute: +/// `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` +/// +/// For example, if `P` is: +/// [ +/// [Some(true), _], +/// [None, false], +/// ] +/// and `p` is [_, false], both `None` and `Some` constructors appear in the first +/// components of `P`. We will therefore try popping both constructors in turn: we +/// compute U([[true, _]], [_, false]) for the `Some` constructor, and U([[false]], +/// [false]) for the `None` constructor. The first case returns true, so we know that +/// `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched +/// before. +/// +/// - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately: +/// `U(P, p) := U(P, (r_1, p_2, .., p_n)) +/// || U(P, (r_2, p_2, .., p_n))` /// /// Modifications to the algorithm /// ------------------------------ /// The algorithm in the paper doesn't cover some of the special cases that arise in Rust, for /// example uninhabited types and variable-length slice patterns. These are drawn attention to -/// throughout the code below. I'll make a quick note here about how exhaustive integer matching -/// is accounted for, though. +/// throughout the code below. I'll make a quick note here about how exhaustive integer matching is +/// accounted for, though. /// /// Exhaustive integer matching /// --------------------------- @@ -150,29 +219,30 @@ /// invalid, because we want a disjunction over every *integer* in each range, not just a /// disjunction over every range. This is a bit more tricky to deal with: essentially we need /// to form equivalence classes of subranges of the constructor range for which the behaviour -/// of the matrix `P` and new pattern `p_{m + 1}` are the same. This is described in more +/// of the matrix `P` and new pattern `p` are the same. This is described in more /// detail in `split_grouped_constructors`. /// + If some constructors are missing from the matrix, it turns out we don't need to do /// anything special (because we know none of the integers are actually wildcards: i.e., we /// can't span wildcards using ranges). - use self::Constructor::*; use self::Usefulness::*; use self::WitnessPreference::*; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; -use super::{FieldPattern, Pattern, PatternKind, PatternRange}; -use super::{PatternFoldable, PatternFolder, compare_const_vals}; +use super::{compare_const_vals, PatternFoldable, PatternFolder}; +use super::{FieldPat, Pat, PatKind, PatRange}; use rustc::hir::def_id::DefId; -use rustc::hir::RangeEnd; -use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Const}; -use rustc::ty::layout::{Integer, IntegerExt, VariantIdx, Size}; +use rustc::hir::{HirId, RangeEnd}; +use rustc::ty::layout::{Integer, IntegerExt, Size, VariantIdx}; +use rustc::ty::{self, Const, Ty, TyCtxt, TypeFoldable}; +use rustc::lint; +use rustc::mir::interpret::{truncate, AllocId, ConstValue, Pointer, Scalar}; use rustc::mir::Field; -use rustc::mir::interpret::{ConstValue, Scalar, truncate, AllocId, Pointer}; +use rustc::util::captures::Captures; use rustc::util::common::ErrorReported; use syntax::attr::{SignedInt, UnsignedInt}; @@ -180,18 +250,16 @@ use syntax_pos::{Span, DUMMY_SP}; use arena::TypedArena; -use smallvec::{SmallVec, smallvec}; -use std::cmp::{self, Ordering, min, max}; +use smallvec::{smallvec, SmallVec}; +use std::cmp::{self, max, min, Ordering}; +use std::convert::TryInto; use std::fmt; use std::iter::{FromIterator, IntoIterator}; use std::ops::RangeInclusive; use std::u128; -use std::convert::TryInto; -pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pattern<'tcx>) - -> &'a Pattern<'tcx> -{ - cx.pattern_arena.alloc(LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat)) +pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> Pat<'tcx> { + LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat) } struct LiteralExpander<'tcx> { @@ -213,15 +281,12 @@ impl LiteralExpander<'tcx> { crty: Ty<'tcx>, ) -> ConstValue<'tcx> { debug!("fold_const_value_deref {:?} {:?} {:?}", val, rty, crty); - match (val, &crty.sty, &rty.sty) { + match (val, &crty.kind, &rty.kind) { // the easy case, deref a reference (ConstValue::Scalar(Scalar::Ptr(p)), x, y) if x == y => { let alloc = self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id); - ConstValue::ByRef { - alloc, - offset: p.offset, - } - }, + ConstValue::ByRef { alloc, offset: p.offset } + } // unsize array to slice if pattern is array but match value or other patterns are slice (ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => { assert_eq!(t, u); @@ -230,12 +295,11 @@ impl LiteralExpander<'tcx> { start: p.offset.bytes().try_into().unwrap(), end: n.eval_usize(self.tcx, ty::ParamEnv::empty()).try_into().unwrap(), } - }, + } // fat pointers stay the same - | (ConstValue::Slice { .. }, _, _) + (ConstValue::Slice { .. }, _, _) | (_, ty::Slice(_), ty::Slice(_)) - | (_, ty::Str, ty::Str) - => val, + | (_, ty::Str, ty::Str) => val, // FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;` being used _ => bug!("cannot deref {:#?}, {} -> {}", val, crty, rty), } @@ -243,89 +307,189 @@ impl LiteralExpander<'tcx> { } impl PatternFolder<'tcx> for LiteralExpander<'tcx> { - fn fold_pattern(&mut self, pat: &Pattern<'tcx>) -> Pattern<'tcx> { - debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.sty, pat.kind); - match (&pat.ty.sty, &*pat.kind) { + fn fold_pattern(&mut self, pat: &Pat<'tcx>) -> Pat<'tcx> { + debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.kind, pat.kind); + match (&pat.ty.kind, &*pat.kind) { ( &ty::Ref(_, rty, _), - &PatternKind::Constant { value: Const { - val, - ty: ty::TyS { sty: ty::Ref(_, crty, _), .. }, - } }, - ) => { - Pattern { - ty: pat.ty, - span: pat.span, - kind: box PatternKind::Deref { - subpattern: Pattern { - ty: rty, - span: pat.span, - kind: box PatternKind::Constant { value: self.tcx.mk_const(Const { + &PatKind::Constant { + value: Const { val, ty: ty::TyS { kind: ty::Ref(_, crty, _), .. } }, + }, + ) => Pat { + ty: pat.ty, + span: pat.span, + kind: box PatKind::Deref { + subpattern: Pat { + ty: rty, + span: pat.span, + kind: box PatKind::Constant { + value: self.tcx.mk_const(Const { val: self.fold_const_value_deref(*val, rty, crty), ty: rty, - }) }, - } - } - } - } - (_, &PatternKind::Binding { subpattern: Some(ref s), .. }) => { - s.fold_with(self) - } - _ => pat.super_fold_with(self) + }), + }, + }, + }, + }, + (_, &PatKind::Binding { subpattern: Some(ref s), .. }) => s.fold_with(self), + _ => pat.super_fold_with(self), } } } -impl<'tcx> Pattern<'tcx> { +impl<'tcx> Pat<'tcx> { fn is_wildcard(&self) -> bool { match *self.kind { - PatternKind::Binding { subpattern: None, .. } | PatternKind::Wild => - true, - _ => false + PatKind::Binding { subpattern: None, .. } | PatKind::Wild => true, + _ => false, } } } -/// A 2D matrix. Nx1 matrices are very common, which is why `SmallVec[_; 2]` -/// works well for each row. -pub struct Matrix<'p, 'tcx>(Vec; 2]>>); +/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]` +/// works well. +#[derive(Debug, Clone)] +pub struct PatStack<'p, 'tcx>(SmallVec<[&'p Pat<'tcx>; 2]>); + +impl<'p, 'tcx> PatStack<'p, 'tcx> { + pub fn from_pattern(pat: &'p Pat<'tcx>) -> Self { + PatStack(smallvec![pat]) + } + + fn from_vec(vec: SmallVec<[&'p Pat<'tcx>; 2]>) -> Self { + PatStack(vec) + } + + fn from_slice(s: &[&'p Pat<'tcx>]) -> Self { + PatStack(SmallVec::from_slice(s)) + } + + fn is_empty(&self) -> bool { + self.0.is_empty() + } + + fn len(&self) -> usize { + self.0.len() + } + + fn head(&self) -> &'p Pat<'tcx> { + self.0[0] + } + + fn to_tail(&self) -> Self { + PatStack::from_slice(&self.0[1..]) + } + + fn iter(&self) -> impl Iterator> { + self.0.iter().map(|p| *p) + } + + /// This computes `D(self)`. See top of the file for explanations. + fn specialize_wildcard(&self) -> Option { + if self.head().is_wildcard() { Some(self.to_tail()) } else { None } + } + + /// This computes `S(constructor, self)`. See top of the file for explanations. + fn specialize_constructor<'a, 'q>( + &self, + cx: &mut MatchCheckCtxt<'a, 'tcx>, + constructor: &Constructor<'tcx>, + ctor_wild_subpatterns: &[&'q Pat<'tcx>], + ) -> Option> + where + 'a: 'q, + 'p: 'q, + { + let new_heads = specialize_one_pattern(cx, self.head(), constructor, ctor_wild_subpatterns); + new_heads.map(|mut new_head| { + new_head.0.extend_from_slice(&self.0[1..]); + new_head + }) + } +} + +impl<'p, 'tcx> Default for PatStack<'p, 'tcx> { + fn default() -> Self { + PatStack(smallvec![]) + } +} + +impl<'p, 'tcx> FromIterator<&'p Pat<'tcx>> for PatStack<'p, 'tcx> { + fn from_iter(iter: T) -> Self + where + T: IntoIterator>, + { + PatStack(iter.into_iter().collect()) + } +} + +/// A 2D matrix. +pub struct Matrix<'p, 'tcx>(Vec>); impl<'p, 'tcx> Matrix<'p, 'tcx> { pub fn empty() -> Self { Matrix(vec![]) } - pub fn push(&mut self, row: SmallVec<[&'p Pattern<'tcx>; 2]>) { + pub fn push(&mut self, row: PatStack<'p, 'tcx>) { self.0.push(row) } + + /// Iterate over the first component of each row + fn heads<'a>(&'a self) -> impl Iterator> + Captures<'p> { + self.0.iter().map(|r| r.head()) + } + + /// This computes `D(self)`. See top of the file for explanations. + fn specialize_wildcard(&self) -> Self { + self.0.iter().filter_map(|r| r.specialize_wildcard()).collect() + } + + /// This computes `S(constructor, self)`. See top of the file for explanations. + fn specialize_constructor<'a, 'q>( + &self, + cx: &mut MatchCheckCtxt<'a, 'tcx>, + constructor: &Constructor<'tcx>, + ctor_wild_subpatterns: &[&'q Pat<'tcx>], + ) -> Matrix<'q, 'tcx> + where + 'a: 'q, + 'p: 'q, + { + Matrix( + self.0 + .iter() + .filter_map(|r| r.specialize_constructor(cx, constructor, ctor_wild_subpatterns)) + .collect(), + ) + } } /// Pretty-printer for matrices of patterns, example: -/// ++++++++++++++++++++++++++ -/// + _ + [] + -/// ++++++++++++++++++++++++++ -/// + true + [First] + -/// ++++++++++++++++++++++++++ -/// + true + [Second(true)] + -/// ++++++++++++++++++++++++++ -/// + false + [_] + -/// ++++++++++++++++++++++++++ -/// + _ + [_, _, ..tail] + -/// ++++++++++++++++++++++++++ +/// +++++++++++++++++++++++++++++ +/// + _ + [] + +/// +++++++++++++++++++++++++++++ +/// + true + [First] + +/// +++++++++++++++++++++++++++++ +/// + true + [Second(true)] + +/// +++++++++++++++++++++++++++++ +/// + false + [_] + +/// +++++++++++++++++++++++++++++ +/// + _ + [_, _, tail @ ..] + +/// +++++++++++++++++++++++++++++ impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\n")?; let &Matrix(ref m) = self; - let pretty_printed_matrix: Vec> = m.iter().map(|row| { - row.iter().map(|pat| format!("{:?}", pat)).collect() - }).collect(); + let pretty_printed_matrix: Vec> = + m.iter().map(|row| row.iter().map(|pat| format!("{:?}", pat)).collect()).collect(); let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0); assert!(m.iter().all(|row| row.len() == column_count)); - let column_widths: Vec = (0..column_count).map(|col| { - pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0) - }).collect(); + let column_widths: Vec = (0..column_count) + .map(|col| pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)) + .collect(); let total_width = column_widths.iter().cloned().sum::() + column_count * 3 + 1; let br = "+".repeat(total_width); @@ -344,9 +508,10 @@ impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { } } -impl<'p, 'tcx> FromIterator; 2]>> for Matrix<'p, 'tcx> { +impl<'p, 'tcx> FromIterator> for Matrix<'p, 'tcx> { fn from_iter(iter: T) -> Self - where T: IntoIterator; 2]>> + where + T: IntoIterator>, { Matrix(iter.into_iter().collect()) } @@ -362,8 +527,8 @@ pub struct MatchCheckCtxt<'a, 'tcx> { /// statement. pub module: DefId, param_env: ty::ParamEnv<'tcx>, - pub pattern_arena: &'a TypedArena>, - pub byte_array_map: FxHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>, + pub pattern_arena: &'a TypedArena>, + pub byte_array_map: FxHashMap<*const Pat<'tcx>, Vec<&'a Pat<'tcx>>>, } impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { @@ -395,32 +560,22 @@ impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { } } - fn is_non_exhaustive_variant<'p>(&self, pattern: &'p Pattern<'tcx>) -> bool { - match *pattern.kind { - PatternKind::Variant { adt_def, variant_index, .. } => { - let ref variant = adt_def.variants[variant_index]; - variant.is_field_list_non_exhaustive() - } - _ => false, - } - } - fn is_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool { - match ty.sty { + match ty.kind { ty::Adt(adt_def, ..) => adt_def.is_variant_list_non_exhaustive(), _ => false, } } fn is_local(&self, ty: Ty<'tcx>) -> bool { - match ty.sty { + match ty.kind { ty::Adt(adt_def, ..) => adt_def.did.is_local(), _ => false, } } } -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug)] enum Constructor<'tcx> { /// The constructor of all patterns that don't vary by constructor, /// e.g., struct patterns and fixed-length arrays. @@ -428,43 +583,294 @@ enum Constructor<'tcx> { /// Enum variants. Variant(DefId), /// Literal values. - ConstantValue(&'tcx ty::Const<'tcx>), + ConstantValue(&'tcx ty::Const<'tcx>, Span), /// Ranges of literal values (`2..=5` and `2..5`). - ConstantRange(u128, u128, Ty<'tcx>, RangeEnd), + ConstantRange(u128, u128, Ty<'tcx>, RangeEnd, Span), /// Array patterns of length n. Slice(u64), } +// Ignore spans when comparing, they don't carry semantic information as they are only for lints. +impl<'tcx> std::cmp::PartialEq for Constructor<'tcx> { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Constructor::Single, Constructor::Single) => true, + (Constructor::Variant(a), Constructor::Variant(b)) => a == b, + (Constructor::ConstantValue(a, _), Constructor::ConstantValue(b, _)) => a == b, + ( + Constructor::ConstantRange(a_start, a_end, a_ty, a_range_end, _), + Constructor::ConstantRange(b_start, b_end, b_ty, b_range_end, _), + ) => a_start == b_start && a_end == b_end && a_ty == b_ty && a_range_end == b_range_end, + (Constructor::Slice(a), Constructor::Slice(b)) => a == b, + _ => false, + } + } +} + impl<'tcx> Constructor<'tcx> { + fn is_slice(&self) -> bool { + match self { + Slice { .. } => true, + _ => false, + } + } + fn variant_index_for_adt<'a>( &self, cx: &MatchCheckCtxt<'a, 'tcx>, adt: &'tcx ty::AdtDef, ) -> VariantIdx { match self { - &Variant(id) => adt.variant_index_with_id(id), - &Single => { + Variant(id) => adt.variant_index_with_id(*id), + Single => { assert!(!adt.is_enum()); VariantIdx::new(0) } - &ConstantValue(c) => crate::const_eval::const_variant_index(cx.tcx, cx.param_env, c), - _ => bug!("bad constructor {:?} for adt {:?}", self, adt) + ConstantValue(c, _) => crate::const_eval::const_variant_index(cx.tcx, cx.param_env, c), + _ => bug!("bad constructor {:?} for adt {:?}", self, adt), } } + + fn display(&self, tcx: TyCtxt<'tcx>) -> String { + match self { + Constructor::ConstantValue(val, _) => format!("{}", val), + Constructor::ConstantRange(lo, hi, ty, range_end, _) => { + // Get the right sign on the output: + let ty = ty::ParamEnv::empty().and(*ty); + format!( + "{}{}{}", + ty::Const::from_bits(tcx, *lo, ty), + range_end, + ty::Const::from_bits(tcx, *hi, ty), + ) + } + Constructor::Slice(val) => format!("[{}]", val), + _ => bug!("bad constructor being displayed: `{:?}", self), + } + } + + // Returns the set of constructors covered by `self` but not by + // anything in `other_ctors`. + fn subtract_ctors( + &self, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + other_ctors: &Vec>, + ) -> Vec> { + let mut refined_ctors = vec![self.clone()]; + for other_ctor in other_ctors { + if other_ctor == self { + // If a constructor appears in a `match` arm, we can + // eliminate it straight away. + refined_ctors = vec![] + } else if let Some(interval) = IntRange::from_ctor(tcx, param_env, other_ctor) { + // Refine the required constructors for the type by subtracting + // the range defined by the current constructor pattern. + refined_ctors = interval.subtract_from(tcx, param_env, refined_ctors); + } + + // If the constructor patterns that have been considered so far + // already cover the entire range of values, then we know the + // constructor is not missing, and we can move on to the next one. + if refined_ctors.is_empty() { + break; + } + } + + // If a constructor has not been matched, then it is missing. + // We add `refined_ctors` instead of `self`, because then we can + // provide more detailed error information about precisely which + // ranges have been omitted. + refined_ctors + } + + /// This returns one wildcard pattern for each argument to this constructor. + fn wildcard_subpatterns<'a>( + &self, + cx: &MatchCheckCtxt<'a, 'tcx>, + ty: Ty<'tcx>, + ) -> impl Iterator> + DoubleEndedIterator { + constructor_sub_pattern_tys(cx, self, ty).into_iter().map(|ty| Pat { + ty, + span: DUMMY_SP, + kind: box PatKind::Wild, + }) + } + + /// This computes the arity of a constructor. The arity of a constructor + /// is how many subpattern patterns of that constructor should be expanded to. + /// + /// For instance, a tuple pattern `(_, 42, Some([]))` has the arity of 3. + /// A struct pattern's arity is the number of fields it contains, etc. + fn arity<'a>(&self, cx: &MatchCheckCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> u64 { + debug!("Constructor::arity({:#?}, {:?})", self, ty); + match ty.kind { + ty::Tuple(ref fs) => fs.len() as u64, + ty::Slice(..) | ty::Array(..) => match *self { + Slice(length) => length, + ConstantValue(..) => 0, + _ => bug!("bad slice pattern {:?} {:?}", self, ty), + }, + ty::Ref(..) => 1, + ty::Adt(adt, _) => { + adt.variants[self.variant_index_for_adt(cx, adt)].fields.len() as u64 + } + _ => 0, + } + } + + /// Apply a constructor to a list of patterns, yielding a new pattern. `pats` + /// must have as many elements as this constructor's arity. + /// + /// Examples: + /// `self`: `Constructor::Single` + /// `ty`: `(u32, u32, u32)` + /// `pats`: `[10, 20, _]` + /// returns `(10, 20, _)` + /// + /// `self`: `Constructor::Variant(Option::Some)` + /// `ty`: `Option` + /// `pats`: `[false]` + /// returns `Some(false)` + fn apply<'a>( + &self, + cx: &MatchCheckCtxt<'a, 'tcx>, + ty: Ty<'tcx>, + pats: impl IntoIterator>, + ) -> Pat<'tcx> { + let mut subpatterns = pats.into_iter(); + let pat = match ty.kind { + ty::Adt(..) | ty::Tuple(..) => { + let subpatterns = subpatterns + .enumerate() + .map(|(i, p)| FieldPat { field: Field::new(i), pattern: p }) + .collect(); + + if let ty::Adt(adt, substs) = ty.kind { + if adt.is_enum() { + PatKind::Variant { + adt_def: adt, + substs, + variant_index: self.variant_index_for_adt(cx, adt), + subpatterns, + } + } else { + PatKind::Leaf { subpatterns } + } + } else { + PatKind::Leaf { subpatterns } + } + } + + ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.nth(0).unwrap() }, + + ty::Slice(_) | ty::Array(..) => { + PatKind::Slice { prefix: subpatterns.collect(), slice: None, suffix: vec![] } + } + + _ => match *self { + ConstantValue(value, _) => PatKind::Constant { value }, + ConstantRange(lo, hi, ty, end, _) => PatKind::Range(PatRange { + lo: ty::Const::from_bits(cx.tcx, lo, ty::ParamEnv::empty().and(ty)), + hi: ty::Const::from_bits(cx.tcx, hi, ty::ParamEnv::empty().and(ty)), + end, + }), + _ => PatKind::Wild, + }, + }; + + Pat { ty, span: DUMMY_SP, kind: Box::new(pat) } + } + + /// Like `apply`, but where all the subpatterns are wildcards `_`. + fn apply_wildcards<'a>(&self, cx: &MatchCheckCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> Pat<'tcx> { + let subpatterns = self.wildcard_subpatterns(cx, ty).rev(); + self.apply(cx, ty, subpatterns) + } } #[derive(Clone, Debug)] pub enum Usefulness<'tcx> { Useful, UsefulWithWitness(Vec>), - NotUseful + NotUseful, } impl<'tcx> Usefulness<'tcx> { + fn new_useful(preference: WitnessPreference) -> Self { + match preference { + ConstructWitness => UsefulWithWitness(vec![Witness(vec![])]), + LeaveOutWitness => Useful, + } + } + fn is_useful(&self) -> bool { match *self { NotUseful => false, - _ => true + _ => true, + } + } + + fn apply_constructor( + self, + cx: &MatchCheckCtxt<'_, 'tcx>, + ctor: &Constructor<'tcx>, + ty: Ty<'tcx>, + ) -> Self { + match self { + UsefulWithWitness(witnesses) => UsefulWithWitness( + witnesses + .into_iter() + .map(|witness| witness.apply_constructor(cx, &ctor, ty)) + .collect(), + ), + x => x, + } + } + + fn apply_wildcard(self, ty: Ty<'tcx>) -> Self { + match self { + UsefulWithWitness(witnesses) => { + let wild = Pat { ty, span: DUMMY_SP, kind: box PatKind::Wild }; + UsefulWithWitness( + witnesses + .into_iter() + .map(|mut witness| { + witness.0.push(wild.clone()); + witness + }) + .collect(), + ) + } + x => x, + } + } + + fn apply_missing_ctors( + self, + cx: &MatchCheckCtxt<'_, 'tcx>, + ty: Ty<'tcx>, + missing_ctors: &MissingConstructors<'tcx>, + ) -> Self { + match self { + UsefulWithWitness(witnesses) => { + let new_patterns: Vec<_> = + missing_ctors.iter().map(|ctor| ctor.apply_wildcards(cx, ty)).collect(); + // Add the new patterns to each witness + UsefulWithWitness( + witnesses + .into_iter() + .flat_map(|witness| { + new_patterns.iter().map(move |pat| { + let mut witness = witness.clone(); + witness.0.push(pat.clone()); + witness + }) + }) + .collect(), + ) + } + x => x, } } } @@ -472,13 +878,14 @@ impl<'tcx> Usefulness<'tcx> { #[derive(Copy, Clone, Debug)] pub enum WitnessPreference { ConstructWitness, - LeaveOutWitness + LeaveOutWitness, } #[derive(Copy, Clone, Debug)] -struct PatternContext<'tcx> { +struct PatCtxt<'tcx> { ty: Ty<'tcx>, max_slice_length: u64, + span: Span, } /// A witness of non-exhaustiveness for error reporting, represented @@ -514,32 +921,14 @@ struct PatternContext<'tcx> { /// /// The final `Pair(Some(_), true)` is then the resulting witness. #[derive(Clone, Debug)] -pub struct Witness<'tcx>(Vec>); +pub struct Witness<'tcx>(Vec>); impl<'tcx> Witness<'tcx> { - pub fn single_pattern(self) -> Pattern<'tcx> { + pub fn single_pattern(self) -> Pat<'tcx> { assert_eq!(self.0.len(), 1); self.0.into_iter().next().unwrap() } - fn push_wild_constructor<'a>( - mut self, - cx: &MatchCheckCtxt<'a, 'tcx>, - ctor: &Constructor<'tcx>, - ty: Ty<'tcx>) - -> Self - { - let sub_pattern_tys = constructor_sub_pattern_tys(cx, ctor, ty); - self.0.extend(sub_pattern_tys.into_iter().map(|ty| { - Pattern { - ty, - span: DUMMY_SP, - kind: box PatternKind::Wild, - } - })); - self.apply_constructor(cx, ctor, ty) - } - /// Constructs a partial witness for a pattern given a list of /// patterns expanded by the specialization step. /// @@ -555,73 +944,18 @@ impl<'tcx> Witness<'tcx> { /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } fn apply_constructor<'a>( mut self, - cx: &MatchCheckCtxt<'a,'tcx>, + cx: &MatchCheckCtxt<'a, 'tcx>, ctor: &Constructor<'tcx>, - ty: Ty<'tcx>) - -> Self - { - let arity = constructor_arity(cx, ctor, ty); + ty: Ty<'tcx>, + ) -> Self { + let arity = ctor.arity(cx, ty); let pat = { let len = self.0.len() as u64; - let mut pats = self.0.drain((len - arity) as usize..).rev(); - - match ty.sty { - ty::Adt(..) | - ty::Tuple(..) => { - let pats = pats.enumerate().map(|(i, p)| { - FieldPattern { - field: Field::new(i), - pattern: p - } - }).collect(); - - if let ty::Adt(adt, substs) = ty.sty { - if adt.is_enum() { - PatternKind::Variant { - adt_def: adt, - substs, - variant_index: ctor.variant_index_for_adt(cx, adt), - subpatterns: pats - } - } else { - PatternKind::Leaf { subpatterns: pats } - } - } else { - PatternKind::Leaf { subpatterns: pats } - } - } - - ty::Ref(..) => { - PatternKind::Deref { subpattern: pats.nth(0).unwrap() } - } - - ty::Slice(_) | ty::Array(..) => { - PatternKind::Slice { - prefix: pats.collect(), - slice: None, - suffix: vec![] - } - } - - _ => { - match *ctor { - ConstantValue(value) => PatternKind::Constant { value }, - ConstantRange(lo, hi, ty, end) => PatternKind::Range(PatternRange { - lo: ty::Const::from_bits(cx.tcx, lo, ty::ParamEnv::empty().and(ty)), - hi: ty::Const::from_bits(cx.tcx, hi, ty::ParamEnv::empty().and(ty)), - end, - }), - _ => PatternKind::Wild, - } - } - } + let pats = self.0.drain((len - arity) as usize..).rev(); + ctor.apply(cx, ty, pats) }; - self.0.push(Pattern { - ty, - span: DUMMY_SP, - kind: Box::new(pat), - }); + self.0.push(pat); self } @@ -636,53 +970,53 @@ impl<'tcx> Witness<'tcx> { /// `Option`, we do not include `Some(_)` in the returned list of constructors. fn all_constructors<'a, 'tcx>( cx: &mut MatchCheckCtxt<'a, 'tcx>, - pcx: PatternContext<'tcx>, + pcx: PatCtxt<'tcx>, ) -> Vec> { debug!("all_constructors({:?})", pcx.ty); - let ctors = match pcx.ty.sty { - ty::Bool => { - [true, false].iter().map(|&b| { - ConstantValue(ty::Const::from_bool(cx.tcx, b)) - }).collect() - } + let ctors = match pcx.ty.kind { + ty::Bool => [true, false] + .iter() + .map(|&b| ConstantValue(ty::Const::from_bool(cx.tcx, b), pcx.span)) + .collect(), ty::Array(ref sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => { let len = len.eval_usize(cx.tcx, cx.param_env); - if len != 0 && cx.is_uninhabited(sub_ty) { - vec![] - } else { - vec![Slice(len)] - } + if len != 0 && cx.is_uninhabited(sub_ty) { vec![] } else { vec![Slice(len)] } } // Treat arrays of a constant but unknown length like slices. - ty::Array(ref sub_ty, _) | - ty::Slice(ref sub_ty) => { + ty::Array(ref sub_ty, _) | ty::Slice(ref sub_ty) => { if cx.is_uninhabited(sub_ty) { vec![Slice(0)] } else { - (0..pcx.max_slice_length+1).map(|length| Slice(length)).collect() + (0..pcx.max_slice_length + 1).map(|length| Slice(length)).collect() } } - ty::Adt(def, substs) if def.is_enum() => { - def.variants.iter() - .filter(|v| { - !cx.tcx.features().exhaustive_patterns || - !v.uninhabited_from(cx.tcx, substs, def.adt_kind()).contains(cx.tcx, cx.module) - }) - .map(|v| Variant(v.def_id)) - .collect() - } + ty::Adt(def, substs) if def.is_enum() => def + .variants + .iter() + .filter(|v| { + !cx.tcx.features().exhaustive_patterns + || !v + .uninhabited_from(cx.tcx, substs, def.adt_kind()) + .contains(cx.tcx, cx.module) + }) + .map(|v| Variant(v.def_id)) + .collect(), ty::Char => { vec![ // The valid Unicode Scalar Value ranges. - ConstantRange('\u{0000}' as u128, - '\u{D7FF}' as u128, - cx.tcx.types.char, - RangeEnd::Included + ConstantRange( + '\u{0000}' as u128, + '\u{D7FF}' as u128, + cx.tcx.types.char, + RangeEnd::Included, + pcx.span, ), - ConstantRange('\u{E000}' as u128, - '\u{10FFFF}' as u128, - cx.tcx.types.char, - RangeEnd::Included + ConstantRange( + '\u{E000}' as u128, + '\u{10FFFF}' as u128, + cx.tcx.types.char, + RangeEnd::Included, + pcx.span, ), ] } @@ -690,12 +1024,12 @@ fn all_constructors<'a, 'tcx>( let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128; let min = 1u128 << (bits - 1); let max = min - 1; - vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included)] + vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included, pcx.span)] } ty::Uint(uty) => { let size = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size(); let max = truncate(u128::max_value(), size); - vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included)] + vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included, pcx.span)] } _ => { if cx.is_uninhabited(pcx.ty) { @@ -710,7 +1044,7 @@ fn all_constructors<'a, 'tcx>( fn max_slice_length<'p, 'a, 'tcx, I>(cx: &mut MatchCheckCtxt<'a, 'tcx>, patterns: I) -> u64 where - I: Iterator>, + I: Iterator>, 'tcx: 'p, { // The exhaustiveness-checking paper does not include any details on @@ -783,25 +1117,23 @@ where for row in patterns { match *row.kind { - PatternKind::Constant { value } => { + PatKind::Constant { value } => { // extract the length of an array/slice from a constant - match (value.val, &value.ty.sty) { - (_, ty::Array(_, n)) => max_fixed_len = cmp::max( - max_fixed_len, - n.eval_usize(cx.tcx, cx.param_env), - ), - (ConstValue::Slice{ start, end, .. }, ty::Slice(_)) => max_fixed_len = cmp::max( - max_fixed_len, - (end - start) as u64, - ), - _ => {}, + match (value.val, &value.ty.kind) { + (_, ty::Array(_, n)) => { + max_fixed_len = cmp::max(max_fixed_len, n.eval_usize(cx.tcx, cx.param_env)) + } + (ConstValue::Slice { start, end, .. }, ty::Slice(_)) => { + max_fixed_len = cmp::max(max_fixed_len, (end - start) as u64) + } + _ => {} } } - PatternKind::Slice { ref prefix, slice: None, ref suffix } => { + PatKind::Slice { ref prefix, slice: None, ref suffix } => { let fixed_len = prefix.len() as u64 + suffix.len() as u64; max_fixed_len = cmp::max(max_fixed_len, fixed_len); } - PatternKind::Slice { ref prefix, slice: Some(_), ref suffix } => { + PatKind::Slice { ref prefix, slice: Some(_), ref suffix } => { max_prefix_len = cmp::max(max_prefix_len, prefix.len() as u64); max_suffix_len = cmp::max(max_suffix_len, suffix.len() as u64); } @@ -822,13 +1154,90 @@ where /// /// `IntRange` is never used to encode an empty range or a "range" that wraps /// around the (offset) space: i.e., `range.lo <= range.hi`. -#[derive(Clone)] +#[derive(Clone, Debug)] struct IntRange<'tcx> { pub range: RangeInclusive, pub ty: Ty<'tcx>, + pub span: Span, } impl<'tcx> IntRange<'tcx> { + #[inline] + fn is_integral(ty: Ty<'_>) -> bool { + match ty.kind { + ty::Char | ty::Int(_) | ty::Uint(_) => true, + _ => false, + } + } + + #[inline] + fn integral_size_and_signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'_>) -> Option<(Size, u128)> { + match ty.kind { + ty::Char => Some((Size::from_bytes(4), 0)), + ty::Int(ity) => { + let size = Integer::from_attr(&tcx, SignedInt(ity)).size(); + Some((size, 1u128 << (size.bits() as u128 - 1))) + } + ty::Uint(uty) => Some((Integer::from_attr(&tcx, UnsignedInt(uty)).size(), 0)), + _ => None, + } + } + + #[inline] + fn from_const( + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + value: &Const<'tcx>, + span: Span, + ) -> Option> { + if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, value.ty) { + let ty = value.ty; + let val = if let ConstValue::Scalar(Scalar::Raw { data, size }) = value.val { + // For this specific pattern we can skip a lot of effort and go + // straight to the result, after doing a bit of checking. (We + // could remove this branch and just use the next branch, which + // is more general but much slower.) + Scalar::<()>::check_raw(data, size, target_size); + data + } else if let Some(val) = value.try_eval_bits(tcx, param_env, ty) { + // This is a more general form of the previous branch. + val + } else { + return None; + }; + let val = val ^ bias; + Some(IntRange { range: val..=val, ty, span }) + } else { + None + } + } + + #[inline] + fn from_range( + tcx: TyCtxt<'tcx>, + lo: u128, + hi: u128, + ty: Ty<'tcx>, + end: &RangeEnd, + span: Span, + ) -> Option> { + if Self::is_integral(ty) { + // Perform a shift if the underlying types are signed, + // which makes the interval arithmetic simpler. + let bias = IntRange::signed_bias(tcx, ty); + let (lo, hi) = (lo ^ bias, hi ^ bias); + // Make sure the interval is well-formed. + if lo > hi || lo == hi && *end == RangeEnd::Excluded { + None + } else { + let offset = (*end == RangeEnd::Excluded) as u128; + Some(IntRange { range: lo..=(hi - offset), ty, span }) + } + } else { + None + } + } + fn from_ctor( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -836,37 +1245,9 @@ impl<'tcx> IntRange<'tcx> { ) -> Option> { // Floating-point ranges are permitted and we don't want // to consider them when constructing integer ranges. - fn is_integral(ty: Ty<'_>) -> bool { - match ty.sty { - ty::Char | ty::Int(_) | ty::Uint(_) => true, - _ => false, - } - } - match ctor { - ConstantRange(lo, hi, ty, end) if is_integral(ty) => { - // Perform a shift if the underlying types are signed, - // which makes the interval arithmetic simpler. - let bias = IntRange::signed_bias(tcx, ty); - let (lo, hi) = (lo ^ bias, hi ^ bias); - // Make sure the interval is well-formed. - if lo > hi || lo == hi && *end == RangeEnd::Excluded { - None - } else { - let offset = (*end == RangeEnd::Excluded) as u128; - Some(IntRange { range: lo..=(hi - offset), ty }) - } - } - ConstantValue(val) if is_integral(val.ty) => { - let ty = val.ty; - if let Some(val) = val.try_eval_bits(tcx, param_env, ty) { - let bias = IntRange::signed_bias(tcx, ty); - let val = val ^ bias; - Some(IntRange { range: val..=val, ty }) - } else { - None - } - } + ConstantRange(lo, hi, ty, end, span) => Self::from_range(tcx, *lo, *hi, ty, end, *span), + ConstantValue(val, span) => Self::from_const(tcx, param_env, val, *span), _ => None, } } @@ -874,34 +1255,39 @@ impl<'tcx> IntRange<'tcx> { fn from_pat( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - mut pat: &Pattern<'tcx>, + mut pat: &Pat<'tcx>, ) -> Option> { - let range = loop { + loop { match pat.kind { - box PatternKind::Constant { value } => break ConstantValue(value), - box PatternKind::Range(PatternRange { lo, hi, end }) => break ConstantRange( - lo.eval_bits(tcx, param_env, lo.ty), - hi.eval_bits(tcx, param_env, hi.ty), - lo.ty, - end, - ), - box PatternKind::AscribeUserType { ref subpattern, .. } => { + box PatKind::Constant { value } => { + return Self::from_const(tcx, param_env, value, pat.span); + } + box PatKind::Range(PatRange { lo, hi, end }) => { + return Self::from_range( + tcx, + lo.eval_bits(tcx, param_env, lo.ty), + hi.eval_bits(tcx, param_env, hi.ty), + &lo.ty, + &end, + pat.span, + ); + } + box PatKind::AscribeUserType { ref subpattern, .. } => { pat = subpattern; - }, + } _ => return None, } - }; - Self::from_ctor(tcx, param_env, &range) + } } // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it. fn signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> u128 { - match ty.sty { + match ty.kind { ty::Int(ity) => { let bits = Integer::from_attr(&tcx, SignedInt(ity)).size().bits() as u128; 1u128 << (bits - 1) } - _ => 0 + _ => 0, } } @@ -910,14 +1296,15 @@ impl<'tcx> IntRange<'tcx> { tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, r: RangeInclusive, + span: Span, ) -> Constructor<'tcx> { let bias = IntRange::signed_bias(tcx, ty); let (lo, hi) = r.into_inner(); if lo == hi { let ty = ty::ParamEnv::empty().and(ty); - ConstantValue(ty::Const::from_bits(tcx, lo ^ bias, ty)) + ConstantValue(ty::Const::from_bits(tcx, lo ^ bias, ty), span) } else { - ConstantRange(lo ^ bias, hi ^ bias, ty, RangeEnd::Included) + ConstantRange(lo ^ bias, hi ^ bias, ty, RangeEnd::Included, span) } } @@ -929,28 +1316,43 @@ impl<'tcx> IntRange<'tcx> { param_env: ty::ParamEnv<'tcx>, ranges: Vec>, ) -> Vec> { - let ranges = ranges.into_iter().filter_map(|r| { - IntRange::from_ctor(tcx, param_env, &r).map(|i| i.range) - }); + let ranges = ranges + .into_iter() + .filter_map(|r| IntRange::from_ctor(tcx, param_env, &r).map(|i| i.range)); let mut remaining_ranges = vec![]; let ty = self.ty; let (lo, hi) = self.range.into_inner(); for subrange in ranges { let (subrange_lo, subrange_hi) = subrange.into_inner(); - if lo > subrange_hi || subrange_lo > hi { + if lo > subrange_hi || subrange_lo > hi { // The pattern doesn't intersect with the subrange at all, // so the subrange remains untouched. - remaining_ranges.push(Self::range_to_ctor(tcx, ty, subrange_lo..=subrange_hi)); + remaining_ranges.push(Self::range_to_ctor( + tcx, + ty, + subrange_lo..=subrange_hi, + self.span, + )); } else { if lo > subrange_lo { // The pattern intersects an upper section of the // subrange, so a lower section will remain. - remaining_ranges.push(Self::range_to_ctor(tcx, ty, subrange_lo..=(lo - 1))); + remaining_ranges.push(Self::range_to_ctor( + tcx, + ty, + subrange_lo..=(lo - 1), + self.span, + )); } if hi < subrange_hi { // The pattern intersects a lower section of the // subrange, so an upper section will remain. - remaining_ranges.push(Self::range_to_ctor(tcx, ty, (hi + 1)..=subrange_hi)); + remaining_ranges.push(Self::range_to_ctor( + tcx, + ty, + (hi + 1)..=subrange_hi, + self.span, + )); } } } @@ -962,86 +1364,74 @@ impl<'tcx> IntRange<'tcx> { let (lo, hi) = (*self.range.start(), *self.range.end()); let (other_lo, other_hi) = (*other.range.start(), *other.range.end()); if lo <= other_hi && other_lo <= hi { - Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), ty }) + let span = other.span; + Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), ty, span }) } else { None } } + + fn suspicious_intersection(&self, other: &Self) -> bool { + // `false` in the following cases: + // 1 ---- // 1 ---------- // 1 ---- // 1 ---- + // 2 ---------- // 2 ---- // 2 ---- // 2 ---- + // + // The following are currently `false`, but could be `true` in the future (#64007): + // 1 --------- // 1 --------- + // 2 ---------- // 2 ---------- + // + // `true` in the following cases: + // 1 ------- // 1 ------- + // 2 -------- // 2 ------- + let (lo, hi) = (*self.range.start(), *self.range.end()); + let (other_lo, other_hi) = (*other.range.start(), *other.range.end()); + (lo == other_hi || hi == other_lo) + } } -// A request for missing constructor data in terms of either: -// - whether or not there any missing constructors; or -// - the actual set of missing constructors. -#[derive(PartialEq)] -enum MissingCtorsInfo { - Emptiness, - Ctors, -} - -// Used by `compute_missing_ctors`. -#[derive(Debug, PartialEq)] -enum MissingCtors<'tcx> { - Empty, - NonEmpty, - - // Note that the Vec can be empty. - Ctors(Vec>), -} - -// When `info` is `MissingCtorsInfo::Ctors`, compute a set of constructors -// equivalent to `all_ctors \ used_ctors`. When `info` is -// `MissingCtorsInfo::Emptiness`, just determines if that set is empty or not. -// (The split logic gives a performance win, because we always need to know if -// the set is empty, but we rarely need the full set, and it can be expensive -// to compute the full set.) -fn compute_missing_ctors<'tcx>( - info: MissingCtorsInfo, +// A struct to compute a set of constructors equivalent to `all_ctors \ used_ctors`. +struct MissingConstructors<'tcx> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - all_ctors: &Vec>, - used_ctors: &Vec>, -) -> MissingCtors<'tcx> { - let mut missing_ctors = vec![]; + all_ctors: Vec>, + used_ctors: Vec>, +} - for req_ctor in all_ctors { - let mut refined_ctors = vec![req_ctor.clone()]; - for used_ctor in used_ctors { - if used_ctor == req_ctor { - // If a constructor appears in a `match` arm, we can - // eliminate it straight away. - refined_ctors = vec![] - } else if let Some(interval) = IntRange::from_ctor(tcx, param_env, used_ctor) { - // Refine the required constructors for the type by subtracting - // the range defined by the current constructor pattern. - refined_ctors = interval.subtract_from(tcx, param_env, refined_ctors); - } - - // If the constructor patterns that have been considered so far - // already cover the entire range of values, then we the - // constructor is not missing, and we can move on to the next one. - if refined_ctors.is_empty() { - break; - } - } - // If a constructor has not been matched, then it is missing. - // We add `refined_ctors` instead of `req_ctor`, because then we can - // provide more detailed error information about precisely which - // ranges have been omitted. - if info == MissingCtorsInfo::Emptiness { - if !refined_ctors.is_empty() { - // The set is non-empty; return early. - return MissingCtors::NonEmpty; - } - } else { - missing_ctors.extend(refined_ctors); - } +impl<'tcx> MissingConstructors<'tcx> { + fn new( + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + all_ctors: Vec>, + used_ctors: Vec>, + ) -> Self { + MissingConstructors { tcx, param_env, all_ctors, used_ctors } } - if info == MissingCtorsInfo::Emptiness { - // If we reached here, the set is empty. - MissingCtors::Empty - } else { - MissingCtors::Ctors(missing_ctors) + fn into_inner(self) -> (Vec>, Vec>) { + (self.all_ctors, self.used_ctors) + } + + fn is_empty(&self) -> bool { + self.iter().next().is_none() + } + /// Whether this contains all the constructors for the given type or only a + /// subset. + fn all_ctors_are_missing(&self) -> bool { + self.used_ctors.is_empty() + } + + /// Iterate over all_ctors \ used_ctors + fn iter<'a>(&'a self) -> impl Iterator> + Captures<'a> { + self.all_ctors.iter().flat_map(move |req_ctor| { + req_ctor.subtract_ctors(self.tcx, self.param_env, &self.used_ctors) + }) + } +} + +impl<'tcx> fmt::Debug for MissingConstructors<'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let ctors: Vec<_> = self.iter().collect(); + write!(f, "{:?}", ctors) } } @@ -1058,7 +1448,7 @@ fn compute_missing_ctors<'tcx>( /// inputs that will match `v` but not any of the sets in `m`. /// /// All the patterns at each column of the `matrix ++ v` matrix must -/// have the same type, except that wildcard (PatternKind::Wild) patterns +/// have the same type, except that wildcard (PatKind::Wild) patterns /// with type `TyErr` are also allowed, even if the "type of the column" /// is not `TyErr`. That is used to represent private fields, as using their /// real type would assert that they are inhabited. @@ -1070,8 +1460,9 @@ fn compute_missing_ctors<'tcx>( pub fn is_useful<'p, 'a, 'tcx>( cx: &mut MatchCheckCtxt<'a, 'tcx>, matrix: &Matrix<'p, 'tcx>, - v: &[&Pattern<'tcx>], - witness: WitnessPreference, + v: &PatStack<'_, 'tcx>, + witness_preference: WitnessPreference, + hir_id: HirId, ) -> Usefulness<'tcx> { let &Matrix(ref rows) = matrix; debug!("is_useful({:#?}, {:#?})", matrix, v); @@ -1083,18 +1474,20 @@ pub fn is_useful<'p, 'a, 'tcx>( // the type of the tuple we're checking is inhabited or not. if v.is_empty() { return if rows.is_empty() { - match witness { - ConstructWitness => UsefulWithWitness(vec![Witness(vec![])]), - LeaveOutWitness => Useful, - } + Usefulness::new_useful(witness_preference) } else { NotUseful - } + }; }; assert!(rows.iter().all(|r| r.len() == v.len())); - let pcx = PatternContext { + let (ty, span) = matrix + .heads() + .map(|r| (r.ty, r.span)) + .find(|(ty, _)| !ty.references_error()) + .unwrap_or((v.head().ty, v.head().span)); + let pcx = PatCtxt { // TyErr is used to represent the type of wildcard patterns matching // against inaccessible (private) fields of structs, so that we won't // be able to observe whether the types of the struct's fields are @@ -1114,38 +1507,42 @@ pub fn is_useful<'p, 'a, 'tcx>( // FIXME: this might lead to "unstable" behavior with macro hygiene // introducing uninhabited patterns for inaccessible fields. We // need to figure out how to model that. - ty: rows.iter().map(|r| r[0].ty).find(|ty| !ty.references_error()).unwrap_or(v[0].ty), - max_slice_length: max_slice_length(cx, rows.iter().map(|r| r[0]).chain(Some(v[0]))) + ty, + max_slice_length: max_slice_length(cx, matrix.heads().chain(Some(v.head()))), + span, }; - debug!("is_useful_expand_first_col: pcx={:#?}, expanding {:#?}", pcx, v[0]); + debug!("is_useful_expand_first_col: pcx={:#?}, expanding {:#?}", pcx, v.head()); - if let Some(constructors) = pat_constructors(cx, v[0], pcx) { - let is_declared_nonexhaustive = cx.is_non_exhaustive_variant(v[0]) && !cx.is_local(pcx.ty); - debug!("is_useful - expanding constructors: {:#?}, is_declared_nonexhaustive: {:?}", - constructors, is_declared_nonexhaustive); - - if is_declared_nonexhaustive { - Useful - } else { - split_grouped_constructors( - cx.tcx, cx.param_env, constructors, matrix, pcx.ty, - ).into_iter().map(|c| - is_useful_specialized(cx, matrix, v, c, pcx.ty, witness) - ).find(|result| result.is_useful()).unwrap_or(NotUseful) - } + if let Some(constructors) = pat_constructors(cx, v.head(), pcx) { + debug!("is_useful - expanding constructors: {:#?}", constructors); + split_grouped_constructors( + cx.tcx, + cx.param_env, + constructors, + matrix, + pcx.ty, + pcx.span, + Some(hir_id), + ) + .into_iter() + .map(|c| is_useful_specialized(cx, matrix, v, c, pcx.ty, witness_preference, hir_id)) + .find(|result| result.is_useful()) + .unwrap_or(NotUseful) } else { debug!("is_useful - expanding wildcard"); - let used_ctors: Vec> = rows.iter().flat_map(|row| { - pat_constructors(cx, row[0], pcx).unwrap_or(vec![]) - }).collect(); + let used_ctors: Vec> = + matrix.heads().flat_map(|p| pat_constructors(cx, p, pcx).unwrap_or(vec![])).collect(); debug!("used_ctors = {:#?}", used_ctors); // `all_ctors` are all the constructors for the given type, which // should all be represented (or caught with the wild pattern `_`). let all_ctors = all_constructors(cx, pcx); debug!("all_ctors = {:#?}", all_ctors); + let is_privately_empty = all_ctors.is_empty() && !cx.is_uninhabited(pcx.ty); + let is_declared_nonexhaustive = cx.is_non_exhaustive_enum(pcx.ty) && !cx.is_local(pcx.ty); + // `missing_ctors` is the set of constructors from the same type as the // first column of `matrix` that are matched only by wildcard patterns // from the first column. @@ -1166,115 +1563,98 @@ pub fn is_useful<'p, 'a, 'tcx>( // needed for that case. // Missing constructors are those that are not matched by any - // non-wildcard patterns in the current column. We always determine if - // the set is empty, but we only fully construct them on-demand, - // because they're rarely used and can be big. - let cheap_missing_ctors = compute_missing_ctors( - MissingCtorsInfo::Emptiness, cx.tcx, cx.param_env, &all_ctors, &used_ctors, - ); + // non-wildcard patterns in the current column. To determine if + // the set is empty, we can check that `.peek().is_none()`, so + // we only fully construct them on-demand, because they're rarely used and can be big. + let missing_ctors = MissingConstructors::new(cx.tcx, cx.param_env, all_ctors, used_ctors); - let is_privately_empty = all_ctors.is_empty() && !cx.is_uninhabited(pcx.ty); - let is_declared_nonexhaustive = cx.is_non_exhaustive_enum(pcx.ty) && !cx.is_local(pcx.ty); - debug!("cheap_missing_ctors={:#?} is_privately_empty={:#?} is_declared_nonexhaustive={:#?}", - cheap_missing_ctors, is_privately_empty, is_declared_nonexhaustive); + debug!( + "missing_ctors.empty()={:#?} is_privately_empty={:#?} is_declared_nonexhaustive={:#?}", + missing_ctors.is_empty(), + is_privately_empty, + is_declared_nonexhaustive + ); // For privately empty and non-exhaustive enums, we work as if there were an "extra" // `_` constructor for the type, so we can never match over all constructors. - let is_non_exhaustive = is_privately_empty || is_declared_nonexhaustive || - (pcx.ty.is_ptr_sized_integral() && !cx.tcx.features().precise_pointer_size_matching); + let is_non_exhaustive = is_privately_empty + || is_declared_nonexhaustive + || (pcx.ty.is_ptr_sized_integral() && !cx.tcx.features().precise_pointer_size_matching); - if cheap_missing_ctors == MissingCtors::Empty && !is_non_exhaustive { - split_grouped_constructors(cx.tcx, cx.param_env, all_ctors, matrix, pcx.ty) - .into_iter().map(|c| is_useful_specialized(cx, matrix, v, c, pcx.ty, witness)) - .find(|result| result.is_useful()) - .unwrap_or(NotUseful) + if missing_ctors.is_empty() && !is_non_exhaustive { + let (all_ctors, _) = missing_ctors.into_inner(); + split_grouped_constructors( + cx.tcx, + cx.param_env, + all_ctors, + matrix, + pcx.ty, + DUMMY_SP, + None, + ) + .into_iter() + .map(|c| is_useful_specialized(cx, matrix, v, c, pcx.ty, witness_preference, hir_id)) + .find(|result| result.is_useful()) + .unwrap_or(NotUseful) } else { - let matrix = rows.iter().filter_map(|r| { - if r[0].is_wildcard() { - Some(SmallVec::from_slice(&r[1..])) - } else { - None - } - }).collect(); - match is_useful(cx, &matrix, &v[1..], witness) { - UsefulWithWitness(pats) => { - let cx = &*cx; - // In this case, there's at least one "free" - // constructor that is only matched against by - // wildcard patterns. - // - // There are 2 ways we can report a witness here. - // Commonly, we can report all the "free" - // constructors as witnesses, e.g., if we have: - // - // ``` - // enum Direction { N, S, E, W } - // let Direction::N = ...; - // ``` - // - // we can report 3 witnesses: `S`, `E`, and `W`. - // - // However, there are 2 cases where we don't want - // to do this and instead report a single `_` witness: - // - // 1) If the user is matching against a non-exhaustive - // enum, there is no point in enumerating all possible - // variants, because the user can't actually match - // against them himself, e.g., in an example like: - // ``` - // let err: io::ErrorKind = ...; - // match err { - // io::ErrorKind::NotFound => {}, - // } - // ``` - // we don't want to show every possible IO error, - // but instead have `_` as the witness (this is - // actually *required* if the user specified *all* - // IO errors, but is probably what we want in every - // case). - // - // 2) If the user didn't actually specify a constructor - // in this arm, e.g., in - // ``` - // let x: (Direction, Direction, bool) = ...; - // let (_, _, false) = x; - // ``` - // we don't want to show all 16 possible witnesses - // `(, , true)` - we are - // satisfied with `(_, _, true)`. In this case, - // `used_ctors` is empty. - let new_witnesses = if is_non_exhaustive || used_ctors.is_empty() { - // All constructors are unused. Add wild patterns - // rather than each individual constructor. - pats.into_iter().map(|mut witness| { - witness.0.push(Pattern { - ty: pcx.ty, - span: DUMMY_SP, - kind: box PatternKind::Wild, - }); - witness - }).collect() - } else { - let expensive_missing_ctors = compute_missing_ctors( - MissingCtorsInfo::Ctors, cx.tcx, cx.param_env, &all_ctors, &used_ctors, - ); - if let MissingCtors::Ctors(missing_ctors) = expensive_missing_ctors { - pats.into_iter().flat_map(|witness| { - missing_ctors.iter().map(move |ctor| { - // Extends the witness with a "wild" version of this - // constructor, that matches everything that can be built with - // it. For example, if `ctor` is a `Constructor::Variant` for - // `Option::Some`, this pushes the witness for `Some(_)`. - witness.clone().push_wild_constructor(cx, ctor, pcx.ty) - }) - }).collect() - } else { - bug!("cheap missing ctors") - } - }; - UsefulWithWitness(new_witnesses) - } - result => result + let matrix = matrix.specialize_wildcard(); + let v = v.to_tail(); + let usefulness = is_useful(cx, &matrix, &v, witness_preference, hir_id); + + // In this case, there's at least one "free" + // constructor that is only matched against by + // wildcard patterns. + // + // There are 2 ways we can report a witness here. + // Commonly, we can report all the "free" + // constructors as witnesses, e.g., if we have: + // + // ``` + // enum Direction { N, S, E, W } + // let Direction::N = ...; + // ``` + // + // we can report 3 witnesses: `S`, `E`, and `W`. + // + // However, there are 2 cases where we don't want + // to do this and instead report a single `_` witness: + // + // 1) If the user is matching against a non-exhaustive + // enum, there is no point in enumerating all possible + // variants, because the user can't actually match + // against them themselves, e.g., in an example like: + // ``` + // let err: io::ErrorKind = ...; + // match err { + // io::ErrorKind::NotFound => {}, + // } + // ``` + // we don't want to show every possible IO error, + // but instead have `_` as the witness (this is + // actually *required* if the user specified *all* + // IO errors, but is probably what we want in every + // case). + // + // 2) If the user didn't actually specify a constructor + // in this arm, e.g., in + // ``` + // let x: (Direction, Direction, bool) = ...; + // let (_, _, false) = x; + // ``` + // we don't want to show all 16 possible witnesses + // `(, , true)` - we are + // satisfied with `(_, _, true)`. In this case, + // `used_ctors` is empty. + if is_non_exhaustive || missing_ctors.all_ctors_are_missing() { + // All constructors are unused. Add a wild pattern + // rather than each individual constructor. + usefulness.apply_wildcard(pcx.ty) + } else { + // Construct for each missing constructor a "wild" version of this + // constructor, that matches everything that can be built with + // it. For example, if `ctor` is a `Constructor::Variant` for + // `Option::Some`, we get the pattern `Some(_)`. + usefulness.apply_missing_ctors(cx, pcx.ty, &missing_ctors) } } } @@ -1284,36 +1664,22 @@ pub fn is_useful<'p, 'a, 'tcx>( /// to the specialised version of both the pattern matrix `P` and the new pattern `q`. fn is_useful_specialized<'p, 'a, 'tcx>( cx: &mut MatchCheckCtxt<'a, 'tcx>, - &Matrix(ref m): &Matrix<'p, 'tcx>, - v: &[&Pattern<'tcx>], + matrix: &Matrix<'p, 'tcx>, + v: &PatStack<'_, 'tcx>, ctor: Constructor<'tcx>, lty: Ty<'tcx>, - witness: WitnessPreference, + witness_preference: WitnessPreference, + hir_id: HirId, ) -> Usefulness<'tcx> { debug!("is_useful_specialized({:#?}, {:#?}, {:?})", v, ctor, lty); - let sub_pat_tys = constructor_sub_pattern_tys(cx, &ctor, lty); - let wild_patterns_owned: Vec<_> = sub_pat_tys.iter().map(|ty| { - Pattern { - ty, - span: DUMMY_SP, - kind: box PatternKind::Wild, - } - }).collect(); - let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect(); - let matrix = Matrix(m.iter().flat_map(|r| { - specialize(cx, &r, &ctor, &wild_patterns) - }).collect()); - match specialize(cx, v, &ctor, &wild_patterns) { - Some(v) => match is_useful(cx, &matrix, &v, witness) { - UsefulWithWitness(witnesses) => UsefulWithWitness( - witnesses.into_iter() - .map(|witness| witness.apply_constructor(cx, &ctor, lty)) - .collect() - ), - result => result - } - None => NotUseful - } + + let ctor_wild_subpatterns_owned: Vec<_> = ctor.wildcard_subpatterns(cx, lty).collect(); + let ctor_wild_subpatterns: Vec<_> = ctor_wild_subpatterns_owned.iter().collect(); + let matrix = matrix.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns); + v.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns) + .map(|v| is_useful(cx, &matrix, &v, witness_preference, hir_id)) + .map(|u| u.apply_constructor(cx, &ctor, lty)) + .unwrap_or(NotUseful) } /// Determines the constructors that the given pattern can be specialized to. @@ -1321,72 +1687,47 @@ fn is_useful_specialized<'p, 'a, 'tcx>( /// In most cases, there's only one constructor that a specific pattern /// represents, such as a specific enum variant or a specific literal value. /// Slice patterns, however, can match slices of different lengths. For instance, -/// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on. +/// `[a, b, tail @ ..]` can match a slice of length 2, 3, 4 and so on. /// /// Returns `None` in case of a catch-all, which can't be specialized. -fn pat_constructors<'tcx>(cx: &mut MatchCheckCtxt<'_, 'tcx>, - pat: &Pattern<'tcx>, - pcx: PatternContext<'tcx>) - -> Option>> -{ +fn pat_constructors<'tcx>( + cx: &mut MatchCheckCtxt<'_, 'tcx>, + pat: &Pat<'tcx>, + pcx: PatCtxt<'tcx>, +) -> Option>> { match *pat.kind { - PatternKind::AscribeUserType { ref subpattern, .. } => - pat_constructors(cx, subpattern, pcx), - PatternKind::Binding { .. } | PatternKind::Wild => None, - PatternKind::Leaf { .. } | PatternKind::Deref { .. } => Some(vec![Single]), - PatternKind::Variant { adt_def, variant_index, .. } => { + PatKind::AscribeUserType { ref subpattern, .. } => pat_constructors(cx, subpattern, pcx), + PatKind::Binding { .. } | PatKind::Wild => None, + PatKind::Leaf { .. } | PatKind::Deref { .. } => Some(vec![Single]), + PatKind::Variant { adt_def, variant_index, .. } => { Some(vec![Variant(adt_def.variants[variant_index].def_id)]) } - PatternKind::Constant { value } => Some(vec![ConstantValue(value)]), - PatternKind::Range(PatternRange { lo, hi, end }) => - Some(vec![ConstantRange( - lo.eval_bits(cx.tcx, cx.param_env, lo.ty), - hi.eval_bits(cx.tcx, cx.param_env, hi.ty), - lo.ty, - end, - )]), - PatternKind::Array { .. } => match pcx.ty.sty { - ty::Array(_, length) => Some(vec![ - Slice(length.eval_usize(cx.tcx, cx.param_env)) - ]), - _ => span_bug!(pat.span, "bad ty {:?} for array pattern", pcx.ty) + PatKind::Constant { value } => Some(vec![ConstantValue(value, pat.span)]), + PatKind::Range(PatRange { lo, hi, end }) => Some(vec![ConstantRange( + lo.eval_bits(cx.tcx, cx.param_env, lo.ty), + hi.eval_bits(cx.tcx, cx.param_env, hi.ty), + lo.ty, + end, + pat.span, + )]), + PatKind::Array { .. } => match pcx.ty.kind { + ty::Array(_, length) => Some(vec![Slice(length.eval_usize(cx.tcx, cx.param_env))]), + _ => span_bug!(pat.span, "bad ty {:?} for array pattern", pcx.ty), }, - PatternKind::Slice { ref prefix, ref slice, ref suffix } => { + PatKind::Slice { ref prefix, ref slice, ref suffix } => { let pat_len = prefix.len() as u64 + suffix.len() as u64; if slice.is_some() { - Some((pat_len..pcx.max_slice_length+1).map(Slice).collect()) + Some((pat_len..pcx.max_slice_length + 1).map(Slice).collect()) } else { Some(vec![Slice(pat_len)]) } } - PatternKind::Or { .. } => { + PatKind::Or { .. } => { bug!("support for or-patterns has not been fully implemented yet."); } } } -/// This computes the arity of a constructor. The arity of a constructor -/// is how many subpattern patterns of that constructor should be expanded to. -/// -/// For instance, a tuple pattern `(_, 42, Some([]))` has the arity of 3. -/// A struct pattern's arity is the number of fields it contains, etc. -fn constructor_arity(cx: &MatchCheckCtxt<'a, 'tcx>, ctor: &Constructor<'tcx>, ty: Ty<'tcx>) -> u64 { - debug!("constructor_arity({:#?}, {:?})", ctor, ty); - match ty.sty { - ty::Tuple(ref fs) => fs.len() as u64, - ty::Slice(..) | ty::Array(..) => match *ctor { - Slice(length) => length, - ConstantValue(_) => 0, - _ => bug!("bad slice pattern {:?} {:?}", ctor, ty) - } - ty::Ref(..) => 1, - ty::Adt(adt, _) => { - adt.variants[ctor.variant_index_for_adt(cx, adt)].fields.len() as u64 - } - _ => 0 - } -} - /// This computes the types of the sub patterns that a constructor should be /// expanded to. /// @@ -1397,42 +1738,52 @@ fn constructor_sub_pattern_tys<'a, 'tcx>( ty: Ty<'tcx>, ) -> Vec> { debug!("constructor_sub_pattern_tys({:#?}, {:?})", ctor, ty); - match ty.sty { + match ty.kind { ty::Tuple(ref fs) => fs.into_iter().map(|t| t.expect_ty()).collect(), ty::Slice(ty) | ty::Array(ty, _) => match *ctor { Slice(length) => (0..length).map(|_| ty).collect(), - ConstantValue(_) => vec![], - _ => bug!("bad slice pattern {:?} {:?}", ctor, ty) - } + ConstantValue(..) => vec![], + _ => bug!("bad slice pattern {:?} {:?}", ctor, ty), + }, ty::Ref(_, rty, _) => vec![rty], ty::Adt(adt, substs) => { if adt.is_box() { // Use T as the sub pattern type of Box. vec![substs.type_at(0)] } else { - adt.variants[ctor.variant_index_for_adt(cx, adt)].fields.iter().map(|field| { - let is_visible = adt.is_enum() - || field.vis.is_accessible_from(cx.module, cx.tcx); - if is_visible { - let ty = field.ty(cx.tcx, substs); - match ty.sty { - // If the field type returned is an array of an unknown - // size return an TyErr. - ty::Array(_, len) - if len.try_eval_usize(cx.tcx, cx.param_env).is_none() => - cx.tcx.types.err, - _ => ty, + let variant = &adt.variants[ctor.variant_index_for_adt(cx, adt)]; + let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !cx.is_local(ty); + variant + .fields + .iter() + .map(|field| { + let is_visible = + adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx); + let is_uninhabited = cx.is_uninhabited(field.ty(cx.tcx, substs)); + match (is_visible, is_non_exhaustive, is_uninhabited) { + // Treat all uninhabited types in non-exhaustive variants as `TyErr`. + (_, true, true) => cx.tcx.types.err, + // Treat all non-visible fields as `TyErr`. They can't appear in any + // other pattern from this match (because they are private), so their + // type does not matter - but we don't want to know they are + // uninhabited. + (false, ..) => cx.tcx.types.err, + (true, ..) => { + let ty = field.ty(cx.tcx, substs); + match ty.kind { + // If the field type returned is an array of an unknown size + // return an TyErr. + ty::Array(_, len) + if len.try_eval_usize(cx.tcx, cx.param_env).is_none() => + { + cx.tcx.types.err + } + _ => ty, + } + } } - } else { - // Treat all non-visible fields as TyErr. They - // can't appear in any other pattern from - // this match (because they are private), - // so their type does not matter - but - // we don't want to know they are - // uninhabited. - cx.tcx.types.err - } - }).collect() + }) + .collect() } } _ => vec![], @@ -1446,28 +1797,31 @@ fn slice_pat_covered_by_const<'tcx>( tcx: TyCtxt<'tcx>, _span: Span, const_val: &'tcx ty::Const<'tcx>, - prefix: &[Pattern<'tcx>], - slice: &Option>, - suffix: &[Pattern<'tcx>], + prefix: &[Pat<'tcx>], + slice: &Option>, + suffix: &[Pat<'tcx>], param_env: ty::ParamEnv<'tcx>, ) -> Result { - let data: &[u8] = match (const_val.val, &const_val.ty.sty) { + let data: &[u8] = match (const_val.val, &const_val.ty.kind) { (ConstValue::ByRef { offset, alloc, .. }, ty::Array(t, n)) => { assert_eq!(*t, tcx.types.u8); let n = n.eval_usize(tcx, param_env); let ptr = Pointer::new(AllocId(0), offset); alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap() - }, + } (ConstValue::Slice { data, start, end }, ty::Slice(t)) => { assert_eq!(*t, tcx.types.u8); let ptr = Pointer::new(AllocId(0), Size::from_bytes(start as u64)); data.get_bytes(&tcx, ptr, Size::from_bytes((end - start) as u64)).unwrap() - }, + } // FIXME(oli-obk): create a way to extract fat pointers from ByRef (_, ty::Slice(_)) => return Ok(false), _ => bug!( "slice_pat_covered_by_const: {:#?}, {:#?}, {:#?}, {:#?}", - const_val, prefix, slice, suffix, + const_val, + prefix, + slice, + suffix, ), }; @@ -1476,12 +1830,13 @@ fn slice_pat_covered_by_const<'tcx>( return Ok(false); } - for (ch, pat) in - data[..prefix.len()].iter().zip(prefix).chain( - data[data.len()-suffix.len()..].iter().zip(suffix)) + for (ch, pat) in data[..prefix.len()] + .iter() + .zip(prefix) + .chain(data[data.len() - suffix.len()..].iter().zip(suffix)) { match pat.kind { - box PatternKind::Constant { value } => { + box PatKind::Constant { value } => { let b = value.eval_bits(tcx, param_env, pat.ty); assert_eq!(b as u8 as u128, b); if b as u8 != *ch { @@ -1499,11 +1854,11 @@ fn slice_pat_covered_by_const<'tcx>( // constructor is a range or constant with an integer type. fn should_treat_range_exhaustively(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>) -> bool { let ty = match ctor { - ConstantValue(value) => value.ty, - ConstantRange(_, _, ty, _) => ty, + ConstantValue(value, _) => value.ty, + ConstantRange(_, _, ty, _, _) => ty, _ => return false, }; - if let ty::Char | ty::Int(_) | ty::Uint(_) = ty.sty { + if let ty::Char | ty::Int(_) | ty::Uint(_) = ty.kind { !ty.is_ptr_sized_integral() || tcx.features().precise_pointer_size_matching } else { false @@ -1542,12 +1897,17 @@ fn should_treat_range_exhaustively(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>) /// boundaries for each interval range, sort them, then create constructors for each new interval /// between every pair of boundary points. (This essentially sums up to performing the intuitive /// merging operation depicted above.) +/// +/// `hir_id` is `None` when we're evaluating the wildcard pattern, do not lint for overlapping in +/// ranges that case. fn split_grouped_constructors<'p, 'tcx>( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, ctors: Vec>, - &Matrix(ref m): &Matrix<'p, 'tcx>, + matrix: &Matrix<'p, 'tcx>, ty: Ty<'tcx>, + span: Span, + hir_id: Option, ) -> Vec> { let mut split_ctors = Vec::with_capacity(ctors.len()); @@ -1564,7 +1924,7 @@ fn split_grouped_constructors<'p, 'tcx>( /// Represents a border between 2 integers. Because the intervals spanning borders /// must be able to cover every integer, we need to be able to represent /// 2^128 + 1 such borders. - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] enum Border { JustBefore(u128), AfterMax, @@ -1581,35 +1941,59 @@ fn split_grouped_constructors<'p, 'tcx>( vec![from, to].into_iter() } + // Collect the span and range of all the intersecting ranges to lint on likely + // incorrect range patterns. (#63987) + let mut overlaps = vec![]; // `borders` is the set of borders between equivalence classes: each equivalence // class lies between 2 borders. - let row_borders = m.iter() - .flat_map(|row| IntRange::from_pat(tcx, param_env, row[0])) - .flat_map(|range| ctor_range.intersection(&range)) + let row_borders = matrix + .0 + .iter() + .flat_map(|row| { + IntRange::from_pat(tcx, param_env, row.head()).map(|r| (r, row.len())) + }) + .flat_map(|(range, row_len)| { + let intersection = ctor_range.intersection(&range); + let should_lint = ctor_range.suspicious_intersection(&range); + if let (Some(range), 1, true) = (&intersection, row_len, should_lint) { + // FIXME: for now, only check for overlapping ranges on simple range + // patterns. Otherwise with the current logic the following is detected + // as overlapping: + // match (10u8, true) { + // (0 ..= 125, false) => {} + // (126 ..= 255, false) => {} + // (0 ..= 255, true) => {} + // } + overlaps.push(range.clone()); + } + intersection + }) .flat_map(|range| range_borders(range)); let ctor_borders = range_borders(ctor_range.clone()); let mut borders: Vec<_> = row_borders.chain(ctor_borders).collect(); borders.sort_unstable(); - // We're going to iterate through every pair of borders, making sure that each - // represents an interval of nonnegative length, and convert each such interval - // into a constructor. - for IntRange { range, .. } in borders.windows(2).filter_map(|window| { - match (window[0], window[1]) { + lint_overlapping_patterns(tcx, hir_id, ctor_range, ty, overlaps); + + // We're going to iterate through every adjacent pair of borders, making sure that + // each represents an interval of nonnegative length, and convert each such + // interval into a constructor. + for IntRange { range, .. } in + borders.windows(2).filter_map(|window| match (window[0], window[1]) { (Border::JustBefore(n), Border::JustBefore(m)) => { if n < m { - Some(IntRange { range: n..=(m - 1), ty }) + Some(IntRange { range: n..=(m - 1), ty, span }) } else { None } } (Border::JustBefore(n), Border::AfterMax) => { - Some(IntRange { range: n..=u128::MAX, ty }) + Some(IntRange { range: n..=u128::MAX, ty, span }) } (Border::AfterMax, _) => None, - } - }) { - split_ctors.push(IntRange::range_to_ctor(tcx, ty, range)); + }) + { + split_ctors.push(IntRange::range_to_ctor(tcx, ty, range, span)); } } // Any other constructor can be used unchanged. @@ -1620,33 +2004,32 @@ fn split_grouped_constructors<'p, 'tcx>( split_ctors } -/// Checks whether there exists any shared value in either `ctor` or `pat` by intersecting them. -fn constructor_intersects_pattern<'p, 'tcx>( +fn lint_overlapping_patterns( tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - ctor: &Constructor<'tcx>, - pat: &'p Pattern<'tcx>, -) -> Option; 2]>> { - if should_treat_range_exhaustively(tcx, ctor) { - match (IntRange::from_ctor(tcx, param_env, ctor), IntRange::from_pat(tcx, param_env, pat)) { - (Some(ctor), Some(pat)) => { - ctor.intersection(&pat).map(|_| { - let (pat_lo, pat_hi) = pat.range.into_inner(); - let (ctor_lo, ctor_hi) = ctor.range.into_inner(); - assert!(pat_lo <= ctor_lo && ctor_hi <= pat_hi); - smallvec![] - }) - } - _ => None, - } - } else { - // Fallback for non-ranges and ranges that involve floating-point numbers, which are not - // conveniently handled by `IntRange`. For these cases, the constructor may not be a range - // so intersection actually devolves into being covered by the pattern. - match constructor_covered_by_range(tcx, param_env, ctor, pat) { - Ok(true) => Some(smallvec![]), - Ok(false) | Err(ErrorReported) => None, + hir_id: Option, + ctor_range: IntRange<'tcx>, + ty: Ty<'tcx>, + overlaps: Vec>, +) { + if let (true, Some(hir_id)) = (!overlaps.is_empty(), hir_id) { + let mut err = tcx.struct_span_lint_hir( + lint::builtin::OVERLAPPING_PATTERNS, + hir_id, + ctor_range.span, + "multiple patterns covering the same range", + ); + err.span_label(ctor_range.span, "overlapping patterns"); + for int_range in overlaps { + // Use the real type for user display of the ranges: + err.span_label( + int_range.span, + &format!( + "this range overlaps on `{}`", + IntRange::range_to_ctor(tcx, ty, int_range.range, DUMMY_SP).display(tcx), + ), + ); } + err.emit(); } } @@ -1654,16 +2037,17 @@ fn constructor_covered_by_range<'tcx>( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, ctor: &Constructor<'tcx>, - pat: &Pattern<'tcx>, + pat: &Pat<'tcx>, ) -> Result { let (from, to, end, ty) = match pat.kind { - box PatternKind::Constant { value } => (value, value, RangeEnd::Included, value.ty), - box PatternKind::Range(PatternRange { lo, hi, end }) => (lo, hi, end, lo.ty), + box PatKind::Constant { value } => (value, value, RangeEnd::Included, value.ty), + box PatKind::Range(PatRange { lo, hi, end }) => (lo, hi, end, lo.ty), _ => bug!("`constructor_covered_by_range` called with {:?}", pat), }; trace!("constructor_covered_by_range {:#?}, {:#?}, {:#?}, {}", ctor, from, to, ty); - let cmp_from = |c_from| compare_const_vals(tcx, c_from, from, param_env, ty) - .map(|res| res != Ordering::Less); + let cmp_from = |c_from| { + compare_const_vals(tcx, c_from, from, param_env, ty).map(|res| res != Ordering::Less) + }; let cmp_to = |c_to| compare_const_vals(tcx, c_to, to, param_env, ty); macro_rules! some_or_ok { ($e:expr) => { @@ -1674,228 +2058,233 @@ fn constructor_covered_by_range<'tcx>( }; } match *ctor { - ConstantValue(value) => { + ConstantValue(value, _) => { let to = some_or_ok!(cmp_to(value)); - let end = (to == Ordering::Less) || - (end == RangeEnd::Included && to == Ordering::Equal); + let end = + (to == Ordering::Less) || (end == RangeEnd::Included && to == Ordering::Equal); Ok(some_or_ok!(cmp_from(value)) && end) - }, - ConstantRange(from, to, ty, RangeEnd::Included) => { - let to = some_or_ok!(cmp_to(ty::Const::from_bits( - tcx, - to, - ty::ParamEnv::empty().and(ty), - ))); - let end = (to == Ordering::Less) || - (end == RangeEnd::Included && to == Ordering::Equal); + } + ConstantRange(from, to, ty, RangeEnd::Included, _) => { + let to = + some_or_ok!(cmp_to(ty::Const::from_bits(tcx, to, ty::ParamEnv::empty().and(ty),))); + let end = + (to == Ordering::Less) || (end == RangeEnd::Included && to == Ordering::Equal); Ok(some_or_ok!(cmp_from(ty::Const::from_bits( tcx, from, ty::ParamEnv::empty().and(ty), ))) && end) - }, - ConstantRange(from, to, ty, RangeEnd::Excluded) => { - let to = some_or_ok!(cmp_to(ty::Const::from_bits( - tcx, - to, - ty::ParamEnv::empty().and(ty) - ))); - let end = (to == Ordering::Less) || - (end == RangeEnd::Excluded && to == Ordering::Equal); + } + ConstantRange(from, to, ty, RangeEnd::Excluded, _) => { + let to = + some_or_ok!(cmp_to(ty::Const::from_bits(tcx, to, ty::ParamEnv::empty().and(ty)))); + let end = + (to == Ordering::Less) || (end == RangeEnd::Excluded && to == Ordering::Equal); Ok(some_or_ok!(cmp_from(ty::Const::from_bits( tcx, from, - ty::ParamEnv::empty().and(ty))) - ) && end) + ty::ParamEnv::empty().and(ty) + ))) && end) } Single => Ok(true), _ => bug!(), } } -fn patterns_for_variant<'p, 'tcx>( - subpatterns: &'p [FieldPattern<'tcx>], - wild_patterns: &[&'p Pattern<'tcx>]) - -> SmallVec<[&'p Pattern<'tcx>; 2]> -{ - let mut result = SmallVec::from_slice(wild_patterns); +fn patterns_for_variant<'p, 'a: 'p, 'tcx>( + cx: &mut MatchCheckCtxt<'a, 'tcx>, + subpatterns: &'p [FieldPat<'tcx>], + ctor_wild_subpatterns: &[&'p Pat<'tcx>], + is_non_exhaustive: bool, +) -> PatStack<'p, 'tcx> { + let mut result = SmallVec::from_slice(ctor_wild_subpatterns); for subpat in subpatterns { - result[subpat.field.index()] = &subpat.pattern; + if !is_non_exhaustive || !cx.is_uninhabited(subpat.pattern.ty) { + result[subpat.field.index()] = &subpat.pattern; + } } - debug!("patterns_for_variant({:#?}, {:#?}) = {:#?}", subpatterns, wild_patterns, result); - result + debug!( + "patterns_for_variant({:#?}, {:#?}) = {:#?}", + subpatterns, ctor_wild_subpatterns, result + ); + PatStack::from_vec(result) } -/// This is the main specialization step. It expands the first pattern in the given row +/// This is the main specialization step. It expands the pattern /// into `arity` patterns based on the constructor. For most patterns, the step is trivial, /// for instance tuple patterns are flattened and box patterns expand into their inner pattern. +/// Returns `None` if the pattern does not have the given constructor. /// -/// OTOH, slice patterns with a subslice pattern (..tail) can be expanded into multiple +/// OTOH, slice patterns with a subslice pattern (tail @ ..) can be expanded into multiple /// different patterns. /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing /// fields filled with wild patterns. -fn specialize<'p, 'a: 'p, 'tcx>( +fn specialize_one_pattern<'p, 'a: 'p, 'q: 'p, 'tcx>( cx: &mut MatchCheckCtxt<'a, 'tcx>, - r: &[&'p Pattern<'tcx>], + pat: &'q Pat<'tcx>, constructor: &Constructor<'tcx>, - wild_patterns: &[&'p Pattern<'tcx>], -) -> Option; 2]>> { - let pat = &r[0]; + ctor_wild_subpatterns: &[&'p Pat<'tcx>], +) -> Option> { + let result = match *pat.kind { + PatKind::AscribeUserType { ref subpattern, .. } => PatStack::from_pattern(subpattern) + .specialize_constructor(cx, constructor, ctor_wild_subpatterns), - let head = match *pat.kind { - PatternKind::AscribeUserType { ref subpattern, .. } => { - specialize(cx, ::std::slice::from_ref(&subpattern), constructor, wild_patterns) + PatKind::Binding { .. } | PatKind::Wild => { + Some(PatStack::from_slice(ctor_wild_subpatterns)) } - PatternKind::Binding { .. } | PatternKind::Wild => { - Some(SmallVec::from_slice(wild_patterns)) - } - - PatternKind::Variant { adt_def, variant_index, ref subpatterns, .. } => { + PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => { let ref variant = adt_def.variants[variant_index]; + let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !cx.is_local(pat.ty); Some(Variant(variant.def_id)) .filter(|variant_constructor| variant_constructor == constructor) - .map(|_| patterns_for_variant(subpatterns, wild_patterns)) + .map(|_| { + patterns_for_variant(cx, subpatterns, ctor_wild_subpatterns, is_non_exhaustive) + }) } - PatternKind::Leaf { ref subpatterns } => { - Some(patterns_for_variant(subpatterns, wild_patterns)) + PatKind::Leaf { ref subpatterns } => { + Some(patterns_for_variant(cx, subpatterns, ctor_wild_subpatterns, false)) } - PatternKind::Deref { ref subpattern } => { - Some(smallvec![subpattern]) - } + PatKind::Deref { ref subpattern } => Some(PatStack::from_pattern(subpattern)), - PatternKind::Constant { value } => { - match *constructor { - Slice(..) => { - // we extract an `Option` for the pointer because slices of zero elements don't - // necessarily point to memory, they are usually just integers. The only time - // they should be pointing to memory is when they are subslices of nonzero - // slices - let (alloc, offset, n, ty) = match value.ty.sty { - ty::Array(t, n) => { - match value.val { - ConstValue::ByRef { offset, alloc, .. } => ( - alloc, - offset, - n.eval_usize(cx.tcx, cx.param_env), - t, - ), - _ => span_bug!( - pat.span, - "array pattern is {:?}", value, - ), - } - }, - ty::Slice(t) => { - match value.val { - ConstValue::Slice { data, start, end } => ( - data, - Size::from_bytes(start as u64), - (end - start) as u64, - t, - ), - ConstValue::ByRef { .. } => { - // FIXME(oli-obk): implement `deref` for `ConstValue` - return None; - }, - _ => span_bug!( - pat.span, - "slice pattern constant must be scalar pair but is {:?}", - value, - ), - } - }, + PatKind::Constant { value } if constructor.is_slice() => { + // We extract an `Option` for the pointer because slices of zero + // elements don't necessarily point to memory, they are usually + // just integers. The only time they should be pointing to memory + // is when they are subslices of nonzero slices. + let (alloc, offset, n, ty) = match value.ty.kind { + ty::Array(t, n) => match value.val { + ConstValue::ByRef { offset, alloc, .. } => { + (alloc, offset, n.eval_usize(cx.tcx, cx.param_env), t) + } + _ => span_bug!(pat.span, "array pattern is {:?}", value,), + }, + ty::Slice(t) => { + match value.val { + ConstValue::Slice { data, start, end } => { + (data, Size::from_bytes(start as u64), (end - start) as u64, t) + } + ConstValue::ByRef { .. } => { + // FIXME(oli-obk): implement `deref` for `ConstValue` + return None; + } _ => span_bug!( pat.span, - "unexpected const-val {:?} with ctor {:?}", + "slice pattern constant must be scalar pair but is {:?}", value, - constructor, ), - }; - if wild_patterns.len() as u64 == n { - // convert a constant slice/array pattern to a list of patterns. - let layout = cx.tcx.layout_of(cx.param_env.and(ty)).ok()?; - let ptr = Pointer::new(AllocId(0), offset); - (0..n).map(|i| { - let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?; - let scalar = alloc.read_scalar( - &cx.tcx, ptr, layout.size, - ).ok()?; - let scalar = scalar.not_undef().ok()?; - let value = ty::Const::from_scalar(cx.tcx, scalar, ty); - let pattern = Pattern { - ty, - span: pat.span, - kind: box PatternKind::Constant { value }, - }; - Some(&*cx.pattern_arena.alloc(pattern)) - }).collect() - } else { - None } } - _ => { - // If the constructor is a: - // Single value: add a row if the constructor equals the pattern. - // Range: add a row if the constructor contains the pattern. - constructor_intersects_pattern(cx.tcx, cx.param_env, constructor, pat) - } + _ => span_bug!( + pat.span, + "unexpected const-val {:?} with ctor {:?}", + value, + constructor, + ), + }; + if ctor_wild_subpatterns.len() as u64 == n { + // convert a constant slice/array pattern to a list of patterns. + let layout = cx.tcx.layout_of(cx.param_env.and(ty)).ok()?; + let ptr = Pointer::new(AllocId(0), offset); + (0..n) + .map(|i| { + let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?; + let scalar = alloc.read_scalar(&cx.tcx, ptr, layout.size).ok()?; + let scalar = scalar.not_undef().ok()?; + let value = ty::Const::from_scalar(cx.tcx, scalar, ty); + let pattern = + Pat { ty, span: pat.span, kind: box PatKind::Constant { value } }; + Some(&*cx.pattern_arena.alloc(pattern)) + }) + .collect() + } else { + None } } - PatternKind::Range { .. } => { + PatKind::Constant { .. } | PatKind::Range { .. } => { // If the constructor is a: - // Single value: add a row if the pattern contains the constructor. - // Range: add a row if the constructor intersects the pattern. - constructor_intersects_pattern(cx.tcx, cx.param_env, constructor, pat) - } - - PatternKind::Array { ref prefix, ref slice, ref suffix } | - PatternKind::Slice { ref prefix, ref slice, ref suffix } => { - match *constructor { - Slice(..) => { - let pat_len = prefix.len() + suffix.len(); - if let Some(slice_count) = wild_patterns.len().checked_sub(pat_len) { - if slice_count == 0 || slice.is_some() { - Some(prefix.iter().chain( - wild_patterns.iter().map(|p| *p) - .skip(prefix.len()) - .take(slice_count) - .chain(suffix.iter()) - ).collect()) - } else { - None - } - } else { - None - } + // - Single value: add a row if the pattern contains the constructor. + // - Range: add a row if the constructor intersects the pattern. + if should_treat_range_exhaustively(cx.tcx, constructor) { + match ( + IntRange::from_ctor(cx.tcx, cx.param_env, constructor), + IntRange::from_pat(cx.tcx, cx.param_env, pat), + ) { + (Some(ctor), Some(pat)) => ctor.intersection(&pat).map(|_| { + let (pat_lo, pat_hi) = pat.range.into_inner(); + let (ctor_lo, ctor_hi) = ctor.range.into_inner(); + assert!(pat_lo <= ctor_lo && ctor_hi <= pat_hi); + PatStack::default() + }), + _ => None, } - ConstantValue(cv) => { - match slice_pat_covered_by_const( - cx.tcx, pat.span, cv, prefix, slice, suffix, cx.param_env, - ) { - Ok(true) => Some(smallvec![]), - Ok(false) => None, - Err(ErrorReported) => None - } + } else { + // Fallback for non-ranges and ranges that involve + // floating-point numbers, which are not conveniently handled + // by `IntRange`. For these cases, the constructor may not be a + // range so intersection actually devolves into being covered + // by the pattern. + match constructor_covered_by_range(cx.tcx, cx.param_env, constructor, pat) { + Ok(true) => Some(PatStack::default()), + Ok(false) | Err(ErrorReported) => None, } - _ => span_bug!(pat.span, - "unexpected ctor {:?} for slice pat", constructor) } } - PatternKind::Or { .. } => { + PatKind::Array { ref prefix, ref slice, ref suffix } + | PatKind::Slice { ref prefix, ref slice, ref suffix } => match *constructor { + Slice(..) => { + let pat_len = prefix.len() + suffix.len(); + if let Some(slice_count) = ctor_wild_subpatterns.len().checked_sub(pat_len) { + if slice_count == 0 || slice.is_some() { + Some( + prefix + .iter() + .chain( + ctor_wild_subpatterns + .iter() + .map(|p| *p) + .skip(prefix.len()) + .take(slice_count) + .chain(suffix.iter()), + ) + .collect(), + ) + } else { + None + } + } else { + None + } + } + ConstantValue(cv, _) => { + match slice_pat_covered_by_const( + cx.tcx, + pat.span, + cv, + prefix, + slice, + suffix, + cx.param_env, + ) { + Ok(true) => Some(PatStack::default()), + Ok(false) => None, + Err(ErrorReported) => None, + } + } + _ => span_bug!(pat.span, "unexpected ctor {:?} for slice pat", constructor), + }, + + PatKind::Or { .. } => { bug!("support for or-patterns has not been fully implemented yet."); } }; - debug!("specialize({:#?}, {:#?}) = {:#?}", r[0], wild_patterns, head); + debug!("specialize({:#?}, {:#?}) = {:#?}", pat, ctor_wild_subpatterns, result); - head.map(|mut head| { - head.extend_from_slice(&r[1 ..]); - head - }) + result } diff --git a/src/librustc_mir/hair/pattern/check_match.rs b/src/librustc_mir/hair/pattern/check_match.rs index 161c58a175..9d370554e8 100644 --- a/src/librustc_mir/hair/pattern/check_match.rs +++ b/src/librustc_mir/hair/pattern/check_match.rs @@ -1,32 +1,29 @@ -use super::_match::{MatchCheckCtxt, Matrix, expand_pattern, is_useful}; use super::_match::Usefulness::*; use super::_match::WitnessPreference::*; +use super::_match::{expand_pattern, is_useful, MatchCheckCtxt, Matrix, PatStack}; -use super::{Pattern, PatternContext, PatternError, PatternKind}; +use super::{PatCtxt, PatKind, PatternError}; -use rustc::middle::borrowck::SignalledError; -use rustc::session::Session; -use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::subst::{InternalSubsts, SubstsRef}; use rustc::lint; +use rustc::session::Session; +use rustc::ty::subst::{InternalSubsts, SubstsRef}; +use rustc::ty::{self, Ty, TyCtxt}; use rustc_errors::{Applicability, DiagnosticBuilder}; use rustc::hir::def::*; use rustc::hir::def_id::DefId; -use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; -use rustc::hir::ptr::P; -use rustc::hir::{self, Pat, PatKind}; +use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor}; +use rustc::hir::HirId; +use rustc::hir::{self, Pat}; -use smallvec::smallvec; use std::slice; -use syntax_pos::{Span, DUMMY_SP, MultiSpan}; +use syntax_pos::{MultiSpan, Span, DUMMY_SP}; -crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) -> SignalledError { - let body_id = if let Some(id) = tcx.hir().as_local_hir_id(def_id) { - tcx.hir().body_owned_by(id) - } else { - return SignalledError::NoErrorsSeen; +crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) { + let body_id = match tcx.hir().as_local_hir_id(def_id) { + None => return, + Some(id) => tcx.hir().body_owned_by(id), }; let mut visitor = MatchVisitor { @@ -34,10 +31,8 @@ crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) -> SignalledError { tables: tcx.body_tables(body_id), param_env: tcx.param_env(def_id), identity_substs: InternalSubsts::identity_for_item(tcx, def_id), - signalled_error: SignalledError::NoErrorsSeen, }; visitor.visit_body(tcx.hir().body(body_id)); - visitor.signalled_error } fn create_e0004(sess: &Session, sp: Span, error_message: String) -> DiagnosticBuilder<'_> { @@ -49,7 +44,6 @@ struct MatchVisitor<'a, 'tcx> { tables: &'a ty::TypeckTables<'tcx>, param_env: ty::ParamEnv<'tcx>, identity_substs: SubstsRef<'tcx>, - signalled_error: SignalledError, } impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, 'tcx> { @@ -60,7 +54,7 @@ impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, 'tcx> { fn visit_expr(&mut self, ex: &'tcx hir::Expr) { intravisit::walk_expr(self, ex); - if let hir::ExprKind::Match(ref scrut, ref arms, source) = ex.node { + if let hir::ExprKind::Match(ref scrut, ref arms, source) = ex.kind { self.check_match(scrut, arms, source); } } @@ -68,28 +62,29 @@ impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, 'tcx> { fn visit_local(&mut self, loc: &'tcx hir::Local) { intravisit::walk_local(self, loc); - self.check_irrefutable(&loc.pat, match loc.source { - hir::LocalSource::Normal => "local binding", - hir::LocalSource::ForLoopDesugar => "`for` loop binding", - hir::LocalSource::AsyncFn => "async fn binding", - hir::LocalSource::AwaitDesugar => "`await` future binding", - }); + let (msg, sp) = match loc.source { + hir::LocalSource::Normal => ("local binding", Some(loc.span)), + hir::LocalSource::ForLoopDesugar => ("`for` loop binding", None), + hir::LocalSource::AsyncFn => ("async fn binding", None), + hir::LocalSource::AwaitDesugar => ("`await` future binding", None), + }; + self.check_irrefutable(&loc.pat, msg, sp); // Check legality of move bindings and `@` patterns. - self.check_patterns(false, slice::from_ref(&loc.pat)); + self.check_patterns(false, &loc.pat); } fn visit_body(&mut self, body: &'tcx hir::Body) { intravisit::walk_body(self, body); for param in &body.params { - self.check_irrefutable(¶m.pat, "function argument"); - self.check_patterns(false, slice::from_ref(¶m.pat)); + self.check_irrefutable(¶m.pat, "function argument", None); + self.check_patterns(false, ¶m.pat); } } } -impl PatternContext<'_, '_> { +impl PatCtxt<'_, '_> { fn report_inlining_errors(&self, pat_span: Span) { for error in &self.errors { match *error { @@ -104,13 +99,15 @@ impl PatternContext<'_, '_> { ::rustc::mir::interpret::struct_error( self.tcx.at(pat_span), "could not evaluate float literal (see issue #31407)", - ).emit(); + ) + .emit(); } PatternError::NonConstPath(span) => { ::rustc::mir::interpret::struct_error( self.tcx.at(span), "runtime values cannot be referenced in patterns", - ).emit(); + ) + .emit(); } } } @@ -122,56 +119,59 @@ impl PatternContext<'_, '_> { } impl<'tcx> MatchVisitor<'_, 'tcx> { - fn check_patterns(&mut self, has_guard: bool, pats: &[P]) { - check_legality_of_move_bindings(self, has_guard, pats); - for pat in pats { - check_legality_of_bindings_in_at_patterns(self, pat); - } + fn check_patterns(&mut self, has_guard: bool, pat: &Pat) { + check_legality_of_move_bindings(self, has_guard, pat); + check_legality_of_bindings_in_at_patterns(self, pat); } - fn check_match( - &mut self, - scrut: &hir::Expr, - arms: &'tcx [hir::Arm], - source: hir::MatchSource - ) { + fn check_match(&mut self, scrut: &hir::Expr, arms: &'tcx [hir::Arm], source: hir::MatchSource) { for arm in arms { // First, check legality of move bindings. - self.check_patterns(arm.guard.is_some(), &arm.pats); + self.check_patterns(arm.guard.is_some(), &arm.pat); - // Second, if there is a guard on each arm, make sure it isn't - // assigning or borrowing anything mutably. - if arm.guard.is_some() { - self.signalled_error = SignalledError::SawSomeError; - } - - // Third, perform some lints. - for pat in &arm.pats { - check_for_bindings_named_same_as_variants(self, pat); - } + // Second, perform some lints. + check_for_bindings_named_same_as_variants(self, &arm.pat); } let module = self.tcx.hir().get_module_parent(scrut.hir_id); MatchCheckCtxt::create_and_enter(self.tcx, self.param_env, module, |ref mut cx| { let mut have_errors = false; - let inlined_arms : Vec<(Vec<_>, _)> = arms.iter().map(|arm| ( - arm.pats.iter().map(|pat| { - let mut patcx = PatternContext::new(self.tcx, - self.param_env.and(self.identity_substs), - self.tables); - patcx.include_lint_checks(); - let pattern = expand_pattern(cx, patcx.lower_pattern(&pat)); - if !patcx.errors.is_empty() { - patcx.report_inlining_errors(pat.span); - have_errors = true; - } - (pattern, &**pat) - }).collect(), - arm.guard.as_ref().map(|g| match g { - hir::Guard::If(ref e) => &**e, + let inlined_arms: Vec<(Vec<_>, _)> = arms + .iter() + .map(|arm| { + ( + // HACK(or_patterns; Centril | dlrobertson): Remove this and + // correctly handle exhaustiveness checking for nested or-patterns. + match &arm.pat.kind { + hir::PatKind::Or(pats) => pats, + _ => std::slice::from_ref(&arm.pat), + } + .iter() + .map(|pat| { + let mut patcx = PatCtxt::new( + self.tcx, + self.param_env.and(self.identity_substs), + self.tables, + ); + patcx.include_lint_checks(); + let pattern = cx + .pattern_arena + .alloc(expand_pattern(cx, patcx.lower_pattern(&pat))) + as &_; + if !patcx.errors.is_empty() { + patcx.report_inlining_errors(pat.span); + have_errors = true; + } + (pattern, &**pat) + }) + .collect(), + arm.guard.as_ref().map(|g| match g { + hir::Guard::If(ref e) => &**e, + }), + ) }) - )).collect(); + .collect(); // Bail out early if inlining failed. if have_errors { @@ -191,23 +191,22 @@ impl<'tcx> MatchVisitor<'_, 'tcx> { let scrutinee_is_uninhabited = if self.tcx.features().exhaustive_patterns { self.tcx.is_ty_uninhabited_from(module, pat_ty) } else { - match pat_ty.sty { + match pat_ty.kind { ty::Never => true, ty::Adt(def, _) => { def_span = self.tcx.hir().span_if_local(def.did); if def.variants.len() < 4 && !def.variants.is_empty() { // keep around to point at the definition of non-covered variants - missing_variants = def.variants.iter() - .map(|variant| variant.ident) - .collect(); + missing_variants = + def.variants.iter().map(|variant| variant.ident).collect(); } let is_non_exhaustive_and_non_local = def.is_variant_list_non_exhaustive() && !def.did.is_local(); !(is_non_exhaustive_and_non_local) && def.variants.is_empty() - }, - _ => false + } + _ => false, } }; if !scrutinee_is_uninhabited { @@ -215,18 +214,25 @@ impl<'tcx> MatchVisitor<'_, 'tcx> { let mut err = create_e0004( self.tcx.sess, scrut.span, - format!("non-exhaustive patterns: {}", match missing_variants.len() { - 0 => format!("type `{}` is non-empty", pat_ty), - 1 => format!( - "pattern `{}` of type `{}` is not handled", - missing_variants[0].name, - pat_ty, - ), - _ => format!("multiple patterns of type `{}` are not handled", pat_ty), - }), + format!( + "non-exhaustive patterns: {}", + match missing_variants.len() { + 0 => format!("type `{}` is non-empty", pat_ty), + 1 => format!( + "pattern `{}` of type `{}` is not handled", + missing_variants[0].name, pat_ty, + ), + _ => format!( + "multiple patterns of type `{}` are not handled", + pat_ty + ), + } + ), + ); + err.help( + "ensure that all possible cases are being handled, \ + possibly by adding wildcards or more match arms", ); - err.help("ensure that all possible cases are being handled, \ - possibly by adding wildcards or more match arms"); if let Some(sp) = def_span { err.span_label(sp, format!("`{}` defined here", pat_ty)); } @@ -244,74 +250,133 @@ impl<'tcx> MatchVisitor<'_, 'tcx> { .iter() .filter(|&&(_, guard)| guard.is_none()) .flat_map(|arm| &arm.0) - .map(|pat| smallvec![pat.0]) + .map(|pat| PatStack::from_pattern(pat.0)) .collect(); let scrut_ty = self.tables.node_type(scrut.hir_id); - check_exhaustive(cx, scrut_ty, scrut.span, &matrix); + check_exhaustive(cx, scrut_ty, scrut.span, &matrix, scrut.hir_id); }) } - fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str) { + fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str, sp: Option) { let module = self.tcx.hir().get_module_parent(pat.hir_id); MatchCheckCtxt::create_and_enter(self.tcx, self.param_env, module, |ref mut cx| { - let mut patcx = PatternContext::new(self.tcx, - self.param_env.and(self.identity_substs), - self.tables); + let mut patcx = + PatCtxt::new(self.tcx, self.param_env.and(self.identity_substs), self.tables); patcx.include_lint_checks(); let pattern = patcx.lower_pattern(pat); let pattern_ty = pattern.ty; - let pats: Matrix<'_, '_> = vec![smallvec![ - expand_pattern(cx, pattern) - ]].into_iter().collect(); + let pattern = expand_pattern(cx, pattern); + let pats: Matrix<'_, '_> = vec![PatStack::from_pattern(&pattern)].into_iter().collect(); - let witnesses = match check_not_useful(cx, pattern_ty, &pats) { + let witnesses = match check_not_useful(cx, pattern_ty, &pats, pat.hir_id) { Ok(_) => return, Err(err) => err, }; let joined_patterns = joined_uncovered_patterns(&witnesses); let mut err = struct_span_err!( - self.tcx.sess, pat.span, E0005, + self.tcx.sess, + pat.span, + E0005, "refutable pattern in {}: {} not covered", - origin, joined_patterns + origin, + joined_patterns ); - err.span_label(pat.span, match &pat.node { - PatKind::Path(hir::QPath::Resolved(None, path)) - if path.segments.len() == 1 && path.segments[0].args.is_none() => { - format!("interpreted as {} {} pattern, not new variable", - path.res.article(), path.res.descr()) + let suggest_if_let = match &pat.kind { + hir::PatKind::Path(hir::QPath::Resolved(None, path)) + if path.segments.len() == 1 && path.segments[0].args.is_none() => + { + const_not_var(&mut err, cx.tcx, pat, path); + false } - _ => pattern_not_convered_label(&witnesses, &joined_patterns), - }); + _ => { + err.span_label( + pat.span, + pattern_not_covered_label(&witnesses, &joined_patterns), + ); + true + } + }; + + if let (Some(span), true) = (sp, suggest_if_let) { + err.note( + "`let` bindings require an \"irrefutable pattern\", like a `struct` or \ + an `enum` with only one variant", + ); + if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { + err.span_suggestion( + span, + "you might want to use `if let` to ignore the variant that isn't matched", + format!("if {} {{ /* */ }}", &snippet[..snippet.len() - 1]), + Applicability::HasPlaceholders, + ); + } + err.note( + "for more information, visit \ + https://doc.rust-lang.org/book/ch18-02-refutability.html", + ); + } + adt_defined_here(cx, &mut err, pattern_ty, &witnesses); err.emit(); }); } } +/// A path pattern was interpreted as a constant, not a new variable. +/// This caused an irrefutable match failure in e.g. `let`. +fn const_not_var(err: &mut DiagnosticBuilder<'_>, tcx: TyCtxt<'_>, pat: &Pat, path: &hir::Path) { + let descr = path.res.descr(); + err.span_label( + pat.span, + format!("interpreted as {} {} pattern, not a new variable", path.res.article(), descr,), + ); + + err.span_suggestion( + pat.span, + "introduce a variable instead", + format!("{}_var", path.segments[0].ident).to_lowercase(), + // Cannot use `MachineApplicable` as it's not really *always* correct + // because there may be such an identifier in scope or the user maybe + // really wanted to match against the constant. This is quite unlikely however. + Applicability::MaybeIncorrect, + ); + + if let Some(span) = tcx.hir().res_span(path.res) { + err.span_label(span, format!("{} defined here", descr)); + } +} + fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_>, pat: &Pat) { pat.walk(|p| { - if let PatKind::Binding(_, _, ident, None) = p.node { + if let hir::PatKind::Binding(_, _, ident, None) = p.kind { if let Some(&bm) = cx.tables.pat_binding_modes().get(p.hir_id) { if bm != ty::BindByValue(hir::MutImmutable) { // Nothing to check. return true; } let pat_ty = cx.tables.pat_ty(p); - if let ty::Adt(edef, _) = pat_ty.sty { - if edef.is_enum() && edef.variants.iter().any(|variant| { - variant.ident == ident && variant.ctor_kind == CtorKind::Const - }) { + if let ty::Adt(edef, _) = pat_ty.kind { + if edef.is_enum() + && edef.variants.iter().any(|variant| { + variant.ident == ident && variant.ctor_kind == CtorKind::Const + }) + { let ty_path = cx.tcx.def_path_str(edef.did); - let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170, + let mut err = struct_span_warn!( + cx.tcx.sess, + p.span, + E0170, "pattern binding `{}` is named the same as one \ - of the variants of the type `{}`", - ident, ty_path); + of the variants of the type `{}`", + ident, + ty_path + ); err.span_suggestion( p.span, "to match on the variant, qualify the path", format!("{}::{}", ty_path, ident), - Applicability::MachineApplicable + Applicability::MachineApplicable, ); err.emit(); } @@ -326,34 +391,33 @@ fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_>, pat: &Pa /// Checks for common cases of "catchall" patterns that may not be intended as such. fn pat_is_catchall(pat: &Pat) -> bool { - match pat.node { - PatKind::Binding(.., None) => true, - PatKind::Binding(.., Some(ref s)) => pat_is_catchall(s), - PatKind::Ref(ref s, _) => pat_is_catchall(s), - PatKind::Tuple(ref v, _) => v.iter().all(|p| { - pat_is_catchall(&p) - }), - _ => false + match pat.kind { + hir::PatKind::Binding(.., None) => true, + hir::PatKind::Binding(.., Some(ref s)) => pat_is_catchall(s), + hir::PatKind::Ref(ref s, _) => pat_is_catchall(s), + hir::PatKind::Tuple(ref v, _) => v.iter().all(|p| pat_is_catchall(&p)), + _ => false, } } // Check for unreachable patterns fn check_arms<'tcx>( cx: &mut MatchCheckCtxt<'_, 'tcx>, - arms: &[(Vec<(&Pattern<'tcx>, &hir::Pat)>, Option<&hir::Expr>)], + arms: &[(Vec<(&super::Pat<'tcx>, &hir::Pat)>, Option<&hir::Expr>)], source: hir::MatchSource, ) { let mut seen = Matrix::empty(); let mut catchall = None; for (arm_index, &(ref pats, guard)) in arms.iter().enumerate() { for &(pat, hir_pat) in pats { - let v = smallvec![pat]; + let v = PatStack::from_pattern(pat); - match is_useful(cx, &seen, &v, LeaveOutWitness) { + match is_useful(cx, &seen, &v, LeaveOutWitness, hir_pat.hir_id) { NotUseful => { match source { - hir::MatchSource::IfDesugar { .. } | - hir::MatchSource::WhileDesugar => bug!(), + hir::MatchSource::IfDesugar { .. } | hir::MatchSource::WhileDesugar => { + bug!() + } hir::MatchSource::IfLetDesugar { .. } => { cx.tcx.lint_hir( lint::builtin::IRREFUTABLE_LET_PATTERNS, @@ -370,9 +434,11 @@ fn check_arms<'tcx>( 0 => { cx.tcx.lint_hir( lint::builtin::UNREACHABLE_PATTERNS, - hir_pat.hir_id, pat.span, - "unreachable pattern"); - }, + hir_pat.hir_id, + pat.span, + "unreachable pattern", + ); + } // The arm with the wildcard pattern. 1 => { cx.tcx.lint_hir( @@ -381,13 +447,12 @@ fn check_arms<'tcx>( pat.span, "irrefutable while-let pattern", ); - }, + } _ => bug!(), } } - hir::MatchSource::ForLoopDesugar | - hir::MatchSource::Normal => { + hir::MatchSource::ForLoopDesugar | hir::MatchSource::Normal => { let mut err = cx.tcx.struct_span_lint_hir( lint::builtin::UNREACHABLE_PATTERNS, hir_pat.hir_id, @@ -404,12 +469,11 @@ fn check_arms<'tcx>( // Unreachable patterns in try and await expressions occur when one of // the arms are an uninhabited type. Which is OK. - hir::MatchSource::AwaitDesugar | - hir::MatchSource::TryDesugar => {} + hir::MatchSource::AwaitDesugar | hir::MatchSource::TryDesugar => {} } } Useful => (), - UsefulWithWitness(_) => bug!() + UsefulWithWitness(_) => bug!(), } if guard.is_none() { seen.push(v); @@ -425,9 +489,10 @@ fn check_not_useful( cx: &mut MatchCheckCtxt<'_, 'tcx>, ty: Ty<'tcx>, matrix: &Matrix<'_, 'tcx>, -) -> Result<(), Vec>> { - let wild_pattern = Pattern { ty, span: DUMMY_SP, kind: box PatternKind::Wild }; - match is_useful(cx, matrix, &[&wild_pattern], ConstructWitness) { + hir_id: HirId, +) -> Result<(), Vec>> { + let wild_pattern = super::Pat { ty, span: DUMMY_SP, kind: box PatKind::Wild }; + match is_useful(cx, matrix, &PatStack::from_pattern(&wild_pattern), ConstructWitness, hir_id) { NotUseful => Ok(()), // This is good, wildcard pattern isn't reachable. UsefulWithWitness(pats) => Err(if pats.is_empty() { vec![wild_pattern] @@ -443,27 +508,29 @@ fn check_exhaustive<'tcx>( scrut_ty: Ty<'tcx>, sp: Span, matrix: &Matrix<'_, 'tcx>, + hir_id: HirId, ) { - let witnesses = match check_not_useful(cx, scrut_ty, matrix) { + let witnesses = match check_not_useful(cx, scrut_ty, matrix, hir_id) { Ok(_) => return, Err(err) => err, }; let joined_patterns = joined_uncovered_patterns(&witnesses); let mut err = create_e0004( - cx.tcx.sess, sp, + cx.tcx.sess, + sp, format!("non-exhaustive patterns: {} not covered", joined_patterns), ); - err.span_label(sp, pattern_not_convered_label(&witnesses, &joined_patterns)); + err.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns)); adt_defined_here(cx, &mut err, scrut_ty, &witnesses); err.help( "ensure that all possible cases are being handled, \ - possibly by adding wildcards or more match arms" + possibly by adding wildcards or more match arms", ) .emit(); } -fn joined_uncovered_patterns(witnesses: &[Pattern<'_>]) -> String { +fn joined_uncovered_patterns(witnesses: &[super::Pat<'_>]) -> String { const LIMIT: usize = 3; match witnesses { [] => bug!(), @@ -480,7 +547,7 @@ fn joined_uncovered_patterns(witnesses: &[Pattern<'_>]) -> String { } } -fn pattern_not_convered_label(witnesses: &[Pattern<'_>], joined_patterns: &str) -> String { +fn pattern_not_covered_label(witnesses: &[super::Pat<'_>], joined_patterns: &str) -> String { format!("pattern{} {} not covered", rustc_errors::pluralise!(witnesses.len()), joined_patterns) } @@ -489,10 +556,10 @@ fn adt_defined_here( cx: &MatchCheckCtxt<'_, '_>, err: &mut DiagnosticBuilder<'_>, ty: Ty<'_>, - witnesses: &[Pattern<'_>], + witnesses: &[super::Pat<'_>], ) { let ty = ty.peel_refs(); - if let ty::Adt(def, _) = ty.sty { + if let ty::Adt(def, _) = ty.kind { if let Some(sp) = cx.tcx.hir().span_if_local(def.did) { err.span_label(sp, format!("`{}` defined here", ty)); } @@ -505,13 +572,13 @@ fn adt_defined_here( } } -fn maybe_point_at_variant(ty: Ty<'_>, patterns: &[Pattern<'_>]) -> Vec { +fn maybe_point_at_variant(ty: Ty<'_>, patterns: &[super::Pat<'_>]) -> Vec { let mut covered = vec![]; - if let ty::Adt(def, _) = ty.sty { + if let ty::Adt(def, _) = ty.kind { // Don't point at variants that have already been covered due to other patterns to avoid // visual clutter. for pattern in patterns { - use PatternKind::{AscribeUserType, Deref, Variant, Or, Leaf}; + use PatKind::{AscribeUserType, Deref, Leaf, Or, Variant}; match &*pattern.kind { AscribeUserType { subpattern, .. } | Deref { subpattern } => { covered.extend(maybe_point_at_variant(ty, slice::from_ref(&subpattern))); @@ -523,13 +590,15 @@ fn maybe_point_at_variant(ty: Ty<'_>, patterns: &[Pattern<'_>]) -> Vec { } covered.push(sp); - let pats = subpatterns.iter() + let pats = subpatterns + .iter() .map(|field_pattern| field_pattern.pattern.clone()) .collect::>(); covered.extend(maybe_point_at_variant(ty, &pats)); } Leaf { subpatterns } => { - let pats = subpatterns.iter() + let pats = subpatterns + .iter() .map(|field_pattern| field_pattern.pattern.clone()) .collect::>(); covered.extend(maybe_point_at_variant(ty, &pats)); @@ -545,78 +614,60 @@ fn maybe_point_at_variant(ty: Ty<'_>, patterns: &[Pattern<'_>]) -> Vec { covered } -// Legality of move bindings checking -fn check_legality_of_move_bindings( - cx: &mut MatchVisitor<'_, '_>, - has_guard: bool, - pats: &[P], -) { +// Check the legality of legality of by-move bindings. +fn check_legality_of_move_bindings(cx: &mut MatchVisitor<'_, '_>, has_guard: bool, pat: &Pat) { let mut by_ref_span = None; - for pat in pats { - pat.each_binding(|_, hir_id, span, _path| { - if let Some(&bm) = cx.tables.pat_binding_modes().get(hir_id) { - if let ty::BindByReference(..) = bm { - by_ref_span = Some(span); + pat.each_binding(|_, hir_id, span, _| { + if let Some(&bm) = cx.tables.pat_binding_modes().get(hir_id) { + if let ty::BindByReference(..) = bm { + by_ref_span = Some(span); + } + } else { + cx.tcx.sess.delay_span_bug(pat.span, "missing binding mode"); + } + }); + + let span_vec = &mut Vec::new(); + let mut check_move = |p: &Pat, sub: Option<&Pat>| { + // Check legality of moving out of the enum. + // + // `x @ Foo(..)` is legal, but `x @ Foo(y)` isn't. + if sub.map_or(false, |p| p.contains_bindings()) { + struct_span_err!(cx.tcx.sess, p.span, E0007, "cannot bind by-move with sub-bindings") + .span_label(p.span, "binds an already bound by-move value by moving it") + .emit(); + } else if !has_guard && by_ref_span.is_some() { + span_vec.push(p.span); + } + }; + + pat.walk(|p| { + if let hir::PatKind::Binding(.., sub) = &p.kind { + if let Some(&bm) = cx.tables.pat_binding_modes().get(p.hir_id) { + if let ty::BindByValue(..) = bm { + let pat_ty = cx.tables.node_type(p.hir_id); + if !pat_ty.is_copy_modulo_regions(cx.tcx, cx.param_env, pat.span) { + check_move(p, sub.as_deref()); + } } } else { cx.tcx.sess.delay_span_bug(pat.span, "missing binding mode"); } - }) - } - let span_vec = &mut Vec::new(); - let check_move = | - cx: &mut MatchVisitor<'_, '_>, - p: &Pat, - sub: Option<&Pat>, - span_vec: &mut Vec, - | { - // check legality of moving out of the enum - - // x @ Foo(..) is legal, but x @ Foo(y) isn't. - if sub.map_or(false, |p| p.contains_bindings()) { - struct_span_err!(cx.tcx.sess, p.span, E0007, - "cannot bind by-move with sub-bindings") - .span_label(p.span, "binds an already bound by-move value by moving it") - .emit(); - } else if !has_guard { - if let Some(_by_ref_span) = by_ref_span { - span_vec.push(p.span); - } } - }; + true + }); - for pat in pats { - pat.walk(|p| { - if let PatKind::Binding(_, _, _, ref sub) = p.node { - if let Some(&bm) = cx.tables.pat_binding_modes().get(p.hir_id) { - match bm { - ty::BindByValue(..) => { - let pat_ty = cx.tables.node_type(p.hir_id); - if !pat_ty.is_copy_modulo_regions(cx.tcx, cx.param_env, pat.span) { - check_move(cx, p, sub.as_ref().map(|p| &**p), span_vec); - } - } - _ => {} - } - } else { - cx.tcx.sess.delay_span_bug(pat.span, "missing binding mode"); - } - } - true - }); - } - if !span_vec.is_empty(){ - let span = MultiSpan::from_spans(span_vec.clone()); + if !span_vec.is_empty() { let mut err = struct_span_err!( cx.tcx.sess, - span, + MultiSpan::from_spans(span_vec.clone()), E0009, "cannot bind by-move and by-ref in the same pattern", ); if let Some(by_ref_span) = by_ref_span { err.span_label(by_ref_span, "both by-ref and by-move used"); } - for span in span_vec.iter(){ + for span in span_vec.iter() { err.span_label(*span, "by-move pattern here"); } err.emit(); @@ -627,12 +678,12 @@ fn check_legality_of_move_bindings( /// because of the way rvalues are handled in the borrow check. (See issue /// #14587.) fn check_legality_of_bindings_in_at_patterns(cx: &MatchVisitor<'_, '_>, pat: &Pat) { - AtBindingPatternVisitor { cx: cx, bindings_allowed: true }.visit_pat(pat); + AtBindingPatternVisitor { cx, bindings_allowed: true }.visit_pat(pat); } struct AtBindingPatternVisitor<'a, 'b, 'tcx> { cx: &'a MatchVisitor<'b, 'tcx>, - bindings_allowed: bool + bindings_allowed: bool, } impl<'v> Visitor<'v> for AtBindingPatternVisitor<'_, '_, '_> { @@ -641,13 +692,17 @@ impl<'v> Visitor<'v> for AtBindingPatternVisitor<'_, '_, '_> { } fn visit_pat(&mut self, pat: &Pat) { - match pat.node { - PatKind::Binding(.., ref subpat) => { + match pat.kind { + hir::PatKind::Binding(.., ref subpat) => { if !self.bindings_allowed { - struct_span_err!(self.cx.tcx.sess, pat.span, E0303, - "pattern bindings are not allowed after an `@`") - .span_label(pat.span, "not allowed after `@`") - .emit(); + struct_span_err!( + self.cx.tcx.sess, + pat.span, + E0303, + "pattern bindings are not allowed after an `@`" + ) + .span_label(pat.span, "not allowed after `@`") + .emit(); } if subpat.is_some() { diff --git a/src/librustc_mir/hair/pattern/const_to_pat.rs b/src/librustc_mir/hair/pattern/const_to_pat.rs new file mode 100644 index 0000000000..bfc539639d --- /dev/null +++ b/src/librustc_mir/hair/pattern/const_to_pat.rs @@ -0,0 +1,282 @@ +use crate::const_eval::const_variant_index; + +use rustc::hir; +use rustc::lint; +use rustc::mir::Field; +use rustc::infer::InferCtxt; +use rustc::traits::{ObligationCause, PredicateObligation}; +use rustc::ty::{self, Ty, TyCtxt}; + +use rustc_index::vec::Idx; + +use syntax_pos::Span; + + +use std::cell::Cell; + +use super::{FieldPat, Pat, PatCtxt, PatKind}; + +impl<'a, 'tcx> PatCtxt<'a, 'tcx> { + /// Converts an evaluated constant to a pattern (if possible). + /// This means aggregate values (like structs and enums) are converted + /// to a pattern that matches the value (as if you'd compared via structural equality). + pub(super) fn const_to_pat( + &self, + cv: &'tcx ty::Const<'tcx>, + id: hir::HirId, + span: Span, + ) -> Pat<'tcx> { + debug!("const_to_pat: cv={:#?} id={:?}", cv, id); + debug!("const_to_pat: cv.ty={:?} span={:?}", cv.ty, span); + + self.tcx.infer_ctxt().enter(|infcx| { + let mut convert = ConstToPat::new(self, id, span, infcx); + convert.to_pat(cv) + }) + } +} + +struct ConstToPat<'a, 'tcx> { + id: hir::HirId, + span: Span, + param_env: ty::ParamEnv<'tcx>, + + // This tracks if we signal some hard error for a given const value, so that + // we will not subsequently issue an irrelevant lint for the same const + // value. + saw_const_match_error: Cell, + + // inference context used for checking `T: Structural` bounds. + infcx: InferCtxt<'a, 'tcx>, + + include_lint_checks: bool, +} + +impl<'a, 'tcx> ConstToPat<'a, 'tcx> { + fn new(pat_ctxt: &PatCtxt<'_, 'tcx>, + id: hir::HirId, + span: Span, + infcx: InferCtxt<'a, 'tcx>) -> Self { + ConstToPat { + id, span, infcx, + param_env: pat_ctxt.param_env, + include_lint_checks: pat_ctxt.include_lint_checks, + saw_const_match_error: Cell::new(false), + } + } + + fn tcx(&self) -> TyCtxt<'tcx> { self.infcx.tcx } + + fn search_for_structural_match_violation(&self, + ty: Ty<'tcx>) + -> Option> + { + ty::search_for_structural_match_violation(self.id, self.span, self.tcx(), ty) + } + + fn type_marked_structural(&self, ty: Ty<'tcx>) -> bool { + ty::type_marked_structural(self.id, self.span, &self.infcx, ty) + } + + fn to_pat(&mut self, cv: &'tcx ty::Const<'tcx>) -> Pat<'tcx> { + // This method is just a wrapper handling a validity check; the heavy lifting is + // performed by the recursive `recur` method, which is not meant to be + // invoked except by this method. + // + // once indirect_structural_match is a full fledged error, this + // level of indirection can be eliminated + + let inlined_const_as_pat = self.recur(cv); + + if self.include_lint_checks && !self.saw_const_match_error.get() { + // If we were able to successfully convert the const to some pat, + // double-check that all types in the const implement `Structural`. + + let structural = self.search_for_structural_match_violation(cv.ty); + debug!("search_for_structural_match_violation cv.ty: {:?} returned: {:?}", + cv.ty, structural); + if let Some(non_sm_ty) = structural { + let adt_def = match non_sm_ty { + ty::NonStructuralMatchTy::Adt(adt_def) => adt_def, + ty::NonStructuralMatchTy::Param => + bug!("use of constant whose type is a parameter inside a pattern"), + }; + let path = self.tcx().def_path_str(adt_def.did); + let msg = format!( + "to use a constant of type `{}` in a pattern, \ + `{}` must be annotated with `#[derive(PartialEq, Eq)]`", + path, + path, + ); + + // double-check there even *is* a semantic `PartialEq` to dispatch to. + // + // (If there isn't, then we can safely issue a hard + // error, because that's never worked, due to compiler + // using `PartialEq::eq` in this scenario in the past.) + // + // Note: To fix rust-lang/rust#65466, one could lift this check + // *before* any structural-match checking, and unconditionally error + // if `PartialEq` is not implemented. However, that breaks stable + // code at the moment, because types like `for <'a> fn(&'a ())` do + // not *yet* implement `PartialEq`. So for now we leave this here. + let ty_is_partial_eq: bool = { + let partial_eq_trait_id = self.tcx().lang_items().eq_trait().unwrap(); + let obligation: PredicateObligation<'_> = + self.tcx().predicate_for_trait_def( + self.param_env, + ObligationCause::misc(self.span, self.id), + partial_eq_trait_id, + 0, + cv.ty, + &[]); + // FIXME: should this call a `predicate_must_hold` variant instead? + self.infcx.predicate_may_hold(&obligation) + }; + + if !ty_is_partial_eq { + // span_fatal avoids ICE from resolution of non-existent method (rare case). + self.tcx().sess.span_fatal(self.span, &msg); + } else { + self.tcx().lint_hir(lint::builtin::INDIRECT_STRUCTURAL_MATCH, + self.id, + self.span, + &msg); + } + } + } + + inlined_const_as_pat + } + + // Recursive helper for `to_pat`; invoke that (instead of calling this directly). + fn recur(&self, cv: &'tcx ty::Const<'tcx>) -> Pat<'tcx> { + let id = self.id; + let span = self.span; + let tcx = self.tcx(); + let param_env = self.param_env; + + let adt_subpattern = |i, variant_opt| { + let field = Field::new(i); + let val = crate::const_eval::const_field( + tcx, param_env, variant_opt, field, cv + ); + self.recur(val) + }; + let adt_subpatterns = |n, variant_opt| { + (0..n).map(|i| { + let field = Field::new(i); + FieldPat { + field, + pattern: adt_subpattern(i, variant_opt), + } + }).collect::>() + }; + + + let kind = match cv.ty.kind { + ty::Float(_) => { + tcx.lint_hir( + ::rustc::lint::builtin::ILLEGAL_FLOATING_POINT_LITERAL_PATTERN, + id, + span, + "floating-point types cannot be used in patterns", + ); + PatKind::Constant { + value: cv, + } + } + ty::Adt(adt_def, _) if adt_def.is_union() => { + // Matching on union fields is unsafe, we can't hide it in constants + self.saw_const_match_error.set(true); + tcx.sess.span_err(span, "cannot use unions in constant patterns"); + PatKind::Wild + } + // keep old code until future-compat upgraded to errors. + ty::Adt(adt_def, _) if !self.type_marked_structural(cv.ty) => { + debug!("adt_def {:?} has !type_marked_structural for cv.ty: {:?}", + adt_def, cv.ty); + let path = tcx.def_path_str(adt_def.did); + let msg = format!( + "to use a constant of type `{}` in a pattern, \ + `{}` must be annotated with `#[derive(PartialEq, Eq)]`", + path, + path, + ); + self.saw_const_match_error.set(true); + tcx.sess.span_err(span, &msg); + PatKind::Wild + } + // keep old code until future-compat upgraded to errors. + ty::Ref(_, adt_ty @ ty::TyS { kind: ty::Adt(_, _), .. }, _) + if !self.type_marked_structural(adt_ty) => + { + let adt_def = if let ty::Adt(adt_def, _) = adt_ty.kind { + adt_def + } else { + unreachable!() + }; + + debug!("adt_def {:?} has !type_marked_structural for adt_ty: {:?}", + adt_def, adt_ty); + + // HACK(estebank): Side-step ICE #53708, but anything other than erroring here + // would be wrong. Returnging `PatKind::Wild` is not technically correct. + let path = tcx.def_path_str(adt_def.did); + let msg = format!( + "to use a constant of type `{}` in a pattern, \ + `{}` must be annotated with `#[derive(PartialEq, Eq)]`", + path, + path, + ); + self.saw_const_match_error.set(true); + tcx.sess.span_err(span, &msg); + PatKind::Wild + } + ty::Adt(adt_def, substs) if adt_def.is_enum() => { + let variant_index = const_variant_index(tcx, self.param_env, cv); + let subpatterns = adt_subpatterns( + adt_def.variants[variant_index].fields.len(), + Some(variant_index), + ); + PatKind::Variant { + adt_def, + substs, + variant_index, + subpatterns, + } + } + ty::Adt(adt_def, _) => { + let struct_var = adt_def.non_enum_variant(); + PatKind::Leaf { + subpatterns: adt_subpatterns(struct_var.fields.len(), None), + } + } + ty::Tuple(fields) => { + PatKind::Leaf { + subpatterns: adt_subpatterns(fields.len(), None), + } + } + ty::Array(_, n) => { + PatKind::Array { + prefix: (0..n.eval_usize(tcx, self.param_env)) + .map(|i| adt_subpattern(i as usize, None)) + .collect(), + slice: None, + suffix: Vec::new(), + } + } + _ => { + PatKind::Constant { + value: cv, + } + } + }; + + Pat { + span, + ty: cv.ty, + kind: Box::new(kind), + } + } +} diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index 4aaa5e8ee2..1ecc78ba22 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -2,35 +2,30 @@ mod _match; mod check_match; +mod const_to_pat; pub(crate) use self::check_match::check_match; -use crate::const_eval::const_variant_index; - use crate::hair::util::UserAnnotatedTyHelpers; use crate::hair::constant::*; -use rustc::lint; use rustc::mir::{Field, BorrowKind, Mutability}; use rustc::mir::{UserTypeProjection}; -use rustc::mir::interpret::{GlobalId, ConstValue, sign_extend, AllocId, Pointer}; -use rustc::traits::{ObligationCause, PredicateObligation}; +use rustc::mir::interpret::{GlobalId, ConstValue, get_slice_bytes, sign_extend}; use rustc::ty::{self, Region, TyCtxt, AdtDef, Ty, UserType, DefIdTree}; use rustc::ty::{CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations}; -use rustc::ty::subst::{SubstsRef, Kind}; -use rustc::ty::layout::{VariantIdx, Size}; -use rustc::hir::{self, PatKind, RangeEnd}; +use rustc::ty::subst::{SubstsRef, GenericArg}; +use rustc::ty::layout::VariantIdx; +use rustc::hir::{self, RangeEnd}; use rustc::hir::def::{CtorOf, Res, DefKind, CtorKind}; use rustc::hir::pat_util::EnumerateAndAdjustIterator; use rustc::hir::ptr::P; -use rustc_data_structures::indexed_vec::Idx; -use rustc_data_structures::fx::FxHashSet; +use rustc_index::vec::Idx; use std::cmp::Ordering; use std::fmt; use syntax::ast; -use syntax::symbol::sym; use syntax_pos::Span; #[derive(Clone, Debug)] @@ -48,25 +43,25 @@ pub enum BindingMode { } #[derive(Clone, Debug)] -pub struct FieldPattern<'tcx> { +pub struct FieldPat<'tcx> { pub field: Field, - pub pattern: Pattern<'tcx>, + pub pattern: Pat<'tcx>, } #[derive(Clone, Debug)] -pub struct Pattern<'tcx> { +pub struct Pat<'tcx> { pub ty: Ty<'tcx>, pub span: Span, - pub kind: Box>, + pub kind: Box>, } #[derive(Copy, Clone, Debug, PartialEq)] -pub struct PatternTypeProjection<'tcx> { +pub struct PatTyProj<'tcx> { pub user_ty: CanonicalUserType<'tcx>, } -impl<'tcx> PatternTypeProjection<'tcx> { +impl<'tcx> PatTyProj<'tcx> { pub(crate) fn from_user_type(user_annotation: CanonicalUserType<'tcx>) -> Self { Self { user_ty: user_annotation, @@ -92,7 +87,7 @@ impl<'tcx> PatternTypeProjection<'tcx> { #[derive(Copy, Clone, Debug, PartialEq)] pub struct Ascription<'tcx> { - pub user_ty: PatternTypeProjection<'tcx>, + pub user_ty: PatTyProj<'tcx>, /// Variance to use when relating the type `user_ty` to the **type of the value being /// matched**. Typically, this is `Variance::Covariant`, since the value being matched must /// have a type that is some subtype of the ascribed type. @@ -116,12 +111,12 @@ pub struct Ascription<'tcx> { } #[derive(Clone, Debug)] -pub enum PatternKind<'tcx> { +pub enum PatKind<'tcx> { Wild, AscribeUserType { ascription: Ascription<'tcx>, - subpattern: Pattern<'tcx>, + subpattern: Pat<'tcx>, }, /// `x`, `ref x`, `x @ P`, etc. @@ -131,7 +126,7 @@ pub enum PatternKind<'tcx> { mode: BindingMode, var: hir::HirId, ty: Ty<'tcx>, - subpattern: Option>, + subpattern: Option>, }, /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with @@ -140,57 +135,57 @@ pub enum PatternKind<'tcx> { adt_def: &'tcx AdtDef, substs: SubstsRef<'tcx>, variant_index: VariantIdx, - subpatterns: Vec>, + subpatterns: Vec>, }, /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with /// a single variant. Leaf { - subpatterns: Vec>, + subpatterns: Vec>, }, /// `box P`, `&P`, `&mut P`, etc. Deref { - subpattern: Pattern<'tcx>, + subpattern: Pat<'tcx>, }, Constant { value: &'tcx ty::Const<'tcx>, }, - Range(PatternRange<'tcx>), + Range(PatRange<'tcx>), /// Matches against a slice, checking the length and extracting elements. /// irrefutable when there is a slice pattern and both `prefix` and `suffix` are empty. /// e.g., `&[ref xs @ ..]`. Slice { - prefix: Vec>, - slice: Option>, - suffix: Vec>, + prefix: Vec>, + slice: Option>, + suffix: Vec>, }, /// Fixed match against an array; irrefutable. Array { - prefix: Vec>, - slice: Option>, - suffix: Vec>, + prefix: Vec>, + slice: Option>, + suffix: Vec>, }, /// An or-pattern, e.g. `p | q`. /// Invariant: `pats.len() >= 2`. Or { - pats: Vec>, + pats: Vec>, }, } #[derive(Copy, Clone, Debug, PartialEq)] -pub struct PatternRange<'tcx> { +pub struct PatRange<'tcx> { pub lo: &'tcx ty::Const<'tcx>, pub hi: &'tcx ty::Const<'tcx>, pub end: RangeEnd, } -impl<'tcx> fmt::Display for Pattern<'tcx> { +impl<'tcx> fmt::Display for Pat<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // Printing lists is a chore. let mut first = true; @@ -205,10 +200,10 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { let mut start_or_comma = || start_or_continue(", "); match *self.kind { - PatternKind::Wild => write!(f, "_"), - PatternKind::AscribeUserType { ref subpattern, .. } => + PatKind::Wild => write!(f, "_"), + PatKind::AscribeUserType { ref subpattern, .. } => write!(f, "{}: _", subpattern), - PatternKind::Binding { mutability, name, mode, ref subpattern, .. } => { + PatKind::Binding { mutability, name, mode, ref subpattern, .. } => { let is_mut = match mode { BindingMode::ByValue => mutability == Mutability::Mut, BindingMode::ByRef(bk) => { @@ -225,13 +220,13 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { } Ok(()) } - PatternKind::Variant { ref subpatterns, .. } | - PatternKind::Leaf { ref subpatterns } => { + PatKind::Variant { ref subpatterns, .. } | + PatKind::Leaf { ref subpatterns } => { let variant = match *self.kind { - PatternKind::Variant { adt_def, variant_index, .. } => { + PatKind::Variant { adt_def, variant_index, .. } => { Some(&adt_def.variants[variant_index]) } - _ => if let ty::Adt(adt, _) = self.ty.sty { + _ => if let ty::Adt(adt, _) = self.ty.kind { if !adt.is_enum() { Some(&adt.variants[VariantIdx::new(0)]) } else { @@ -252,7 +247,7 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { let mut printed = 0; for p in subpatterns { - if let PatternKind::Wild = *p.pattern.kind { + if let PatKind::Wild = *p.pattern.kind { continue; } let name = variant.fields[p.field.index()].ident; @@ -294,8 +289,8 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { Ok(()) } - PatternKind::Deref { ref subpattern } => { - match self.ty.sty { + PatKind::Deref { ref subpattern } => { + match self.ty.kind { ty::Adt(def, _) if def.is_box() => write!(f, "box ")?, ty::Ref(_, _, mutbl) => { write!(f, "&")?; @@ -307,19 +302,16 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { } write!(f, "{}", subpattern) } - PatternKind::Constant { value } => { + PatKind::Constant { value } => { write!(f, "{}", value) } - PatternKind::Range(PatternRange { lo, hi, end }) => { + PatKind::Range(PatRange { lo, hi, end }) => { write!(f, "{}", lo)?; - match end { - RangeEnd::Included => write!(f, "..=")?, - RangeEnd::Excluded => write!(f, "..")?, - } + write!(f, "{}", end)?; write!(f, "{}", hi) } - PatternKind::Slice { ref prefix, ref slice, ref suffix } | - PatternKind::Array { ref prefix, ref slice, ref suffix } => { + PatKind::Slice { ref prefix, ref slice, ref suffix } | + PatKind::Array { ref prefix, ref slice, ref suffix } => { write!(f, "[")?; for p in prefix { write!(f, "{}{}", start_or_comma(), p)?; @@ -327,7 +319,7 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { if let Some(ref slice) = *slice { write!(f, "{}", start_or_comma())?; match *slice.kind { - PatternKind::Wild => {} + PatKind::Wild => {} _ => write!(f, "{}", slice)? } write!(f, "..")?; @@ -337,7 +329,7 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { } write!(f, "]") } - PatternKind::Or { ref pats } => { + PatKind::Or { ref pats } => { for pat in pats { write!(f, "{}{}", start_or_continue(" | "), pat)?; } @@ -347,7 +339,7 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { } } -pub struct PatternContext<'a, 'tcx> { +pub struct PatCtxt<'a, 'tcx> { pub tcx: TyCtxt<'tcx>, pub param_env: ty::ParamEnv<'tcx>, pub tables: &'a ty::TypeckTables<'tcx>, @@ -356,31 +348,31 @@ pub struct PatternContext<'a, 'tcx> { include_lint_checks: bool, } -impl<'a, 'tcx> Pattern<'tcx> { +impl<'a, 'tcx> Pat<'tcx> { pub fn from_hir( tcx: TyCtxt<'tcx>, param_env_and_substs: ty::ParamEnvAnd<'tcx, SubstsRef<'tcx>>, tables: &'a ty::TypeckTables<'tcx>, pat: &'tcx hir::Pat, ) -> Self { - let mut pcx = PatternContext::new(tcx, param_env_and_substs, tables); + let mut pcx = PatCtxt::new(tcx, param_env_and_substs, tables); let result = pcx.lower_pattern(pat); if !pcx.errors.is_empty() { let msg = format!("encountered errors lowering pattern: {:?}", pcx.errors); tcx.sess.delay_span_bug(pat.span, &msg); } - debug!("Pattern::from_hir({:?}) = {:?}", pat, result); + debug!("Pat::from_hir({:?}) = {:?}", pat, result); result } } -impl<'a, 'tcx> PatternContext<'a, 'tcx> { +impl<'a, 'tcx> PatCtxt<'a, 'tcx> { pub fn new( tcx: TyCtxt<'tcx>, param_env_and_substs: ty::ParamEnvAnd<'tcx, SubstsRef<'tcx>>, tables: &'a ty::TypeckTables<'tcx>, ) -> Self { - PatternContext { + PatCtxt { tcx, param_env: param_env_and_substs.param_env, tables, @@ -395,7 +387,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { self } - pub fn lower_pattern(&mut self, pat: &'tcx hir::Pat) -> Pattern<'tcx> { + pub fn lower_pattern(&mut self, pat: &'tcx hir::Pat) -> Pat<'tcx> { // When implicit dereferences have been inserted in this pattern, the unadjusted lowered // pattern has the type that results *after* dereferencing. For example, in this code: // @@ -412,7 +404,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { // `vec![&&Option, &Option]`. // // Applying the adjustments, we want to instead output `&&Some(n)` (as a HAIR pattern). So - // we wrap the unadjusted pattern in `PatternKind::Deref` repeatedly, consuming the + // we wrap the unadjusted pattern in `PatKind::Deref` repeatedly, consuming the // adjustments in *reverse order* (last-in-first-out, so that the last `Deref` inserted // gets the least-dereferenced type). let unadjusted_pat = self.lower_pattern_unadjusted(pat); @@ -424,10 +416,10 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { .rev() .fold(unadjusted_pat, |pat, ref_ty| { debug!("{:?}: wrapping pattern with type {:?}", pat, ref_ty); - Pattern { + Pat { span: pat.span, ty: ref_ty, - kind: Box::new(PatternKind::Deref { subpattern: pat }), + kind: Box::new(PatKind::Deref { subpattern: pat }), } }, ) @@ -436,30 +428,30 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { fn lower_range_expr( &mut self, expr: &'tcx hir::Expr, - ) -> (PatternKind<'tcx>, Option>) { + ) -> (PatKind<'tcx>, Option>) { match self.lower_lit(expr) { - PatternKind::AscribeUserType { + PatKind::AscribeUserType { ascription: lo_ascription, - subpattern: Pattern { kind: box kind, .. }, + subpattern: Pat { kind: box kind, .. }, } => (kind, Some(lo_ascription)), kind => (kind, None), } } - fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat) -> Pattern<'tcx> { + fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat) -> Pat<'tcx> { let mut ty = self.tables.node_type(pat.hir_id); - let kind = match pat.node { - PatKind::Wild => PatternKind::Wild, + let kind = match pat.kind { + hir::PatKind::Wild => PatKind::Wild, - PatKind::Lit(ref value) => self.lower_lit(value), + hir::PatKind::Lit(ref value) => self.lower_lit(value), - PatKind::Range(ref lo_expr, ref hi_expr, end) => { + hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => { let (lo, lo_ascription) = self.lower_range_expr(lo_expr); let (hi, hi_ascription) = self.lower_range_expr(hi_expr); let mut kind = match (lo, hi) { - (PatternKind::Constant { value: lo }, PatternKind::Constant { value: hi }) => { + (PatKind::Constant { value: lo }, PatKind::Constant { value: hi }) => { assert_eq!(lo.ty, ty); assert_eq!(hi.ty, ty); let cmp = compare_const_vals( @@ -471,7 +463,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { ); match (end, cmp) { (RangeEnd::Excluded, Some(Ordering::Less)) => - PatternKind::Range(PatternRange { lo, hi, end }), + PatKind::Range(PatRange { lo, hi, end }), (RangeEnd::Excluded, _) => { span_err!( self.tcx.sess, @@ -479,13 +471,13 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { E0579, "lower range bound must be less than upper", ); - PatternKind::Wild + PatKind::Wild } (RangeEnd::Included, Some(Ordering::Equal)) => { - PatternKind::Constant { value: lo } + PatKind::Constant { value: lo } } (RangeEnd::Included, Some(Ordering::Less)) => { - PatternKind::Range(PatternRange { lo, hi, end }) + PatKind::Range(PatRange { lo, hi, end }) } (RangeEnd::Included, _) => { let mut err = struct_span_err!( @@ -506,7 +498,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { to be less than or equal to the end of the range."); } err.emit(); - PatternKind::Wild + PatKind::Wild } } }, @@ -519,7 +511,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { ), ); - PatternKind::Wild + PatKind::Wild }, }; @@ -528,9 +520,9 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { // constants somewhere. Have them on the range pattern. for ascription in &[lo_ascription, hi_ascription] { if let Some(ascription) = ascription { - kind = PatternKind::AscribeUserType { + kind = PatKind::AscribeUserType { ascription: *ascription, - subpattern: Pattern { span: pat.span, ty, kind: Box::new(kind), }, + subpattern: Pat { span: pat.span, ty, kind: Box::new(kind), }, }; } } @@ -538,20 +530,20 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { kind } - PatKind::Path(ref qpath) => { + hir::PatKind::Path(ref qpath) => { return self.lower_path(qpath, pat.hir_id, pat.span); } - PatKind::Ref(ref subpattern, _) | - PatKind::Box(ref subpattern) => { - PatternKind::Deref { subpattern: self.lower_pattern(subpattern) } + hir::PatKind::Ref(ref subpattern, _) | + hir::PatKind::Box(ref subpattern) => { + PatKind::Deref { subpattern: self.lower_pattern(subpattern) } } - PatKind::Slice(ref prefix, ref slice, ref suffix) => { - match ty.sty { + hir::PatKind::Slice(ref prefix, ref slice, ref suffix) => { + match ty.kind { ty::Ref(_, ty, _) => - PatternKind::Deref { - subpattern: Pattern { + PatKind::Deref { + subpattern: Pat { ty, span: pat.span, kind: Box::new(self.slice_or_array_pattern( @@ -562,7 +554,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { ty::Array(..) => self.slice_or_array_pattern(pat.span, ty, prefix, slice, suffix), ty::Error => { // Avoid ICE - return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) }; + return Pat { span: pat.span, ty, kind: Box::new(PatKind::Wild) }; } _ => span_bug!( @@ -572,32 +564,32 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { } } - PatKind::Tuple(ref subpatterns, ddpos) => { - match ty.sty { + hir::PatKind::Tuple(ref subpatterns, ddpos) => { + match ty.kind { ty::Tuple(ref tys) => { let subpatterns = subpatterns.iter() .enumerate_and_adjust(tys.len(), ddpos) - .map(|(i, subpattern)| FieldPattern { + .map(|(i, subpattern)| FieldPat { field: Field::new(i), pattern: self.lower_pattern(subpattern) }) .collect(); - PatternKind::Leaf { subpatterns } + PatKind::Leaf { subpatterns } } ty::Error => { // Avoid ICE (#50577) - return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) }; + return Pat { span: pat.span, ty, kind: Box::new(PatKind::Wild) }; } _ => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", ty), } } - PatKind::Binding(_, id, ident, ref sub) => { + hir::PatKind::Binding(_, id, ident, ref sub) => { let var_ty = self.tables.node_type(pat.hir_id); - if let ty::Error = var_ty.sty { + if let ty::Error = var_ty.kind { // Avoid ICE - return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) }; + return Pat { span: pat.span, ty, kind: Box::new(PatKind::Wild) }; }; let bm = *self.tables.pat_binding_modes().get(pat.hir_id) .expect("missing binding mode"); @@ -617,14 +609,14 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { // A ref x pattern is the same node used for x, and as such it has // x's type, which is &T, where we want T (the type being matched). if let ty::BindByReference(_) = bm { - if let ty::Ref(_, rty, _) = ty.sty { + if let ty::Ref(_, rty, _) = ty.kind { ty = rty; } else { bug!("`ref {}` has wrong type {}", ident, ty); } } - PatternKind::Binding { + PatKind::Binding { mutability, mode, name: ident.name, @@ -634,12 +626,12 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { } } - PatKind::TupleStruct(ref qpath, ref subpatterns, ddpos) => { + hir::PatKind::TupleStruct(ref qpath, ref subpatterns, ddpos) => { let res = self.tables.qpath_res(qpath, pat.hir_id); - let adt_def = match ty.sty { + let adt_def = match ty.kind { ty::Adt(adt_def, _) => adt_def, ty::Error => { // Avoid ICE (#50585) - return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) }; + return Pat { span: pat.span, ty, kind: Box::new(PatKind::Wild) }; } _ => span_bug!(pat.span, "tuple struct pattern not applied to an ADT {:?}", @@ -650,7 +642,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { let subpatterns = subpatterns.iter() .enumerate_and_adjust(variant_def.fields.len(), ddpos) - .map(|(i, field)| FieldPattern { + .map(|(i, field)| FieldPat { field: Field::new(i), pattern: self.lower_pattern(field), }) @@ -659,12 +651,12 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns) } - PatKind::Struct(ref qpath, ref fields, _) => { + hir::PatKind::Struct(ref qpath, ref fields, _) => { let res = self.tables.qpath_res(qpath, pat.hir_id); let subpatterns = fields.iter() .map(|field| { - FieldPattern { + FieldPat { field: Field::new(self.tcx.field_index(field.hir_id, self.tables)), pattern: self.lower_pattern(&field.pat), @@ -675,35 +667,35 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns) } - PatKind::Or(ref pats) => { - PatternKind::Or { + hir::PatKind::Or(ref pats) => { + PatKind::Or { pats: pats.iter().map(|p| self.lower_pattern(p)).collect(), } } }; - Pattern { + Pat { span: pat.span, ty, kind: Box::new(kind), } } - fn lower_patterns(&mut self, pats: &'tcx [P]) -> Vec> { + fn lower_patterns(&mut self, pats: &'tcx [P]) -> Vec> { pats.iter().map(|p| self.lower_pattern(p)).collect() } - fn lower_opt_pattern(&mut self, pat: &'tcx Option>) -> Option> + fn lower_opt_pattern(&mut self, pat: &'tcx Option>) -> Option> { pat.as_ref().map(|p| self.lower_pattern(p)) } fn flatten_nested_slice_patterns( &mut self, - prefix: Vec>, - slice: Option>, - suffix: Vec>) - -> (Vec>, Option>, Vec>) + prefix: Vec>, + slice: Option>, + suffix: Vec>) + -> (Vec>, Option>, Vec>) { let orig_slice = match slice { Some(orig_slice) => orig_slice, @@ -715,8 +707,8 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { // dance because of intentional borrow-checker stupidity. let kind = *orig_slice.kind; match kind { - PatternKind::Slice { prefix, slice, mut suffix } | - PatternKind::Array { prefix, slice, mut suffix } => { + PatKind::Slice { prefix, slice, mut suffix } | + PatKind::Array { prefix, slice, mut suffix } => { let mut orig_prefix = orig_prefix; orig_prefix.extend(prefix); @@ -725,7 +717,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { (orig_prefix, slice, suffix) } _ => { - (orig_prefix, Some(Pattern { + (orig_prefix, Some(Pat { kind: box kind, ..orig_slice }), orig_suffix) } @@ -739,7 +731,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { prefix: &'tcx [P], slice: &'tcx Option>, suffix: &'tcx [P]) - -> PatternKind<'tcx> + -> PatKind<'tcx> { let prefix = self.lower_patterns(prefix); let slice = self.lower_opt_pattern(slice); @@ -747,17 +739,17 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { let (prefix, slice, suffix) = self.flatten_nested_slice_patterns(prefix, slice, suffix); - match ty.sty { + match ty.kind { ty::Slice(..) => { // matching a slice or fixed-length array - PatternKind::Slice { prefix: prefix, slice: slice, suffix: suffix } + PatKind::Slice { prefix: prefix, slice: slice, suffix: suffix } } ty::Array(_, len) => { // fixed-length array let len = len.eval_usize(self.tcx, self.param_env); assert!(len >= prefix.len() as u64 + suffix.len() as u64); - PatternKind::Array { prefix: prefix, slice: slice, suffix: suffix } + PatKind::Array { prefix: prefix, slice: slice, suffix: suffix } } _ => { @@ -772,8 +764,8 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { hir_id: hir::HirId, span: Span, ty: Ty<'tcx>, - subpatterns: Vec>, - ) -> PatternKind<'tcx> { + subpatterns: Vec>, + ) -> PatKind<'tcx> { let res = match res { Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_id) => { let variant_id = self.tcx.parent(variant_ctor_id).unwrap(); @@ -787,22 +779,22 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { let enum_id = self.tcx.parent(variant_id).unwrap(); let adt_def = self.tcx.adt_def(enum_id); if adt_def.is_enum() { - let substs = match ty.sty { + let substs = match ty.kind { ty::Adt(_, substs) | ty::FnDef(_, substs) => substs, ty::Error => { // Avoid ICE (#50585) - return PatternKind::Wild; + return PatKind::Wild; } _ => bug!("inappropriate type for def: {:?}", ty), }; - PatternKind::Variant { + PatKind::Variant { adt_def, substs, variant_index: adt_def.variant_index_with_id(variant_id), subpatterns, } } else { - PatternKind::Leaf { subpatterns } + PatKind::Leaf { subpatterns } } } @@ -813,25 +805,25 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { | Res::Def(DefKind::AssocTy, _) | Res::SelfTy(..) | Res::SelfCtor(..) => { - PatternKind::Leaf { subpatterns } + PatKind::Leaf { subpatterns } } _ => { self.errors.push(PatternError::NonConstPath(span)); - PatternKind::Wild + PatKind::Wild } }; if let Some(user_ty) = self.user_substs_applied_to_ty_of_hir_id(hir_id) { debug!("lower_variant_or_leaf: kind={:?} user_ty={:?} span={:?}", kind, user_ty, span); - kind = PatternKind::AscribeUserType { - subpattern: Pattern { + kind = PatKind::AscribeUserType { + subpattern: Pat { span, ty, kind: Box::new(kind), }, ascription: Ascription { - user_ty: PatternTypeProjection::from_user_type(user_ty), + user_ty: PatTyProj::from_user_type(user_ty), user_ty_span: span, variance: ty::Variance::Covariant, }, @@ -848,7 +840,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { qpath: &hir::QPath, id: hir::HirId, span: Span) - -> Pattern<'tcx> { + -> Pat<'tcx> { let ty = self.tables.node_type(id); let res = self.tables.qpath_res(qpath, id); let is_associated_const = match res { @@ -871,18 +863,18 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { }; match self.tcx.at(span).const_eval(self.param_env.and(cid)) { Ok(value) => { - let pattern = self.const_to_pat(instance, value, id, span); + let pattern = self.const_to_pat(value, id, span); if !is_associated_const { return pattern; } let user_provided_types = self.tables().user_provided_types(); return if let Some(u_ty) = user_provided_types.get(id) { - let user_ty = PatternTypeProjection::from_user_type(*u_ty); - Pattern { + let user_ty = PatTyProj::from_user_type(*u_ty); + Pat { span, kind: Box::new( - PatternKind::AscribeUserType { + PatKind::AscribeUserType { subpattern: pattern, ascription: Ascription { /// Note that use `Contravariant` here. See the @@ -904,7 +896,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { span, "could not evaluate constant pattern", ); - PatternKind::Wild + PatKind::Wild } } }, @@ -914,14 +906,14 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { } else { PatternError::StaticInPattern(span) }); - PatternKind::Wild + PatKind::Wild }, } } _ => self.lower_variant_or_leaf(res, id, span, ty, vec![]), }; - Pattern { + Pat { span, ty, kind: Box::new(kind), @@ -932,368 +924,45 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { /// The special case for negation exists to allow things like `-128_i8` /// which would overflow if we tried to evaluate `128_i8` and then negate /// afterwards. - fn lower_lit(&mut self, expr: &'tcx hir::Expr) -> PatternKind<'tcx> { - match expr.node { + fn lower_lit(&mut self, expr: &'tcx hir::Expr) -> PatKind<'tcx> { + match expr.kind { hir::ExprKind::Lit(ref lit) => { let ty = self.tables.expr_ty(expr); match lit_to_const(&lit.node, self.tcx, ty, false) { Ok(val) => { - let instance = ty::Instance::new( - self.tables.local_id_root.expect("literal outside any scope"), - self.substs, - ); - *self.const_to_pat(instance, val, expr.hir_id, lit.span).kind + *self.const_to_pat(val, expr.hir_id, lit.span).kind }, Err(LitToConstError::UnparseableFloat) => { self.errors.push(PatternError::FloatBug); - PatternKind::Wild + PatKind::Wild }, - Err(LitToConstError::Reported) => PatternKind::Wild, + Err(LitToConstError::Reported) => PatKind::Wild, } }, hir::ExprKind::Path(ref qpath) => *self.lower_path(qpath, expr.hir_id, expr.span).kind, hir::ExprKind::Unary(hir::UnNeg, ref expr) => { let ty = self.tables.expr_ty(expr); - let lit = match expr.node { + let lit = match expr.kind { hir::ExprKind::Lit(ref lit) => lit, _ => span_bug!(expr.span, "not a literal: {:?}", expr), }; match lit_to_const(&lit.node, self.tcx, ty, true) { Ok(val) => { - let instance = ty::Instance::new( - self.tables.local_id_root.expect("literal outside any scope"), - self.substs, - ); - *self.const_to_pat(instance, val, expr.hir_id, lit.span).kind + *self.const_to_pat(val, expr.hir_id, lit.span).kind }, Err(LitToConstError::UnparseableFloat) => { self.errors.push(PatternError::FloatBug); - PatternKind::Wild + PatKind::Wild }, - Err(LitToConstError::Reported) => PatternKind::Wild, + Err(LitToConstError::Reported) => PatKind::Wild, } } _ => span_bug!(expr.span, "not a literal: {:?}", expr), } } - - /// Converts an evaluated constant to a pattern (if possible). - /// This means aggregate values (like structs and enums) are converted - /// to a pattern that matches the value (as if you'd compared via structural equality). - fn const_to_pat( - &self, - instance: ty::Instance<'tcx>, - cv: &'tcx ty::Const<'tcx>, - id: hir::HirId, - span: Span, - ) -> Pattern<'tcx> { - // This method is just a warpper handling a validity check; the heavy lifting is - // performed by the recursive const_to_pat_inner method, which is not meant to be - // invoked except by this method. - // - // once indirect_structural_match is a full fledged error, this - // level of indirection can be eliminated - - debug!("const_to_pat: cv={:#?} id={:?}", cv, id); - debug!("const_to_pat: cv.ty={:?} span={:?}", cv.ty, span); - - let mut saw_error = false; - let inlined_const_as_pat = self.const_to_pat_inner(instance, cv, id, span, &mut saw_error); - - if self.include_lint_checks && !saw_error { - // If we were able to successfully convert the const to some pat, double-check - // that the type of the const obeys `#[structural_match]` constraint. - if let Some(adt_def) = search_for_adt_without_structural_match(self.tcx, cv.ty) { - - let path = self.tcx.def_path_str(adt_def.did); - let msg = format!( - "to use a constant of type `{}` in a pattern, \ - `{}` must be annotated with `#[derive(PartialEq, Eq)]`", - path, - path, - ); - - // before issuing lint, double-check there even *is* a - // semantic PartialEq for us to dispatch to. - // - // (If there isn't, then we can safely issue a hard - // error, because that's never worked, due to compiler - // using PartialEq::eq in this scenario in the past.) - - let ty_is_partial_eq: bool = { - let partial_eq_trait_id = self.tcx.lang_items().eq_trait().unwrap(); - let obligation: PredicateObligation<'_> = - self.tcx.predicate_for_trait_def(self.param_env, - ObligationCause::misc(span, id), - partial_eq_trait_id, - 0, - cv.ty, - &[]); - self.tcx - .infer_ctxt() - .enter(|infcx| infcx.predicate_may_hold(&obligation)) - }; - - if !ty_is_partial_eq { - // span_fatal avoids ICE from resolution of non-existent method (rare case). - self.tcx.sess.span_fatal(span, &msg); - } else { - self.tcx.lint_hir(lint::builtin::INDIRECT_STRUCTURAL_MATCH, id, span, &msg); - } - } - } - - inlined_const_as_pat - } - - /// Recursive helper for `const_to_pat`; invoke that (instead of calling this directly). - fn const_to_pat_inner( - &self, - instance: ty::Instance<'tcx>, - cv: &'tcx ty::Const<'tcx>, - id: hir::HirId, - span: Span, - // This tracks if we signal some hard error for a given const - // value, so that we will not subsequently issue an irrelevant - // lint for the same const value. - saw_const_match_error: &mut bool, - ) -> Pattern<'tcx> { - - let mut adt_subpattern = |i, variant_opt| { - let field = Field::new(i); - let val = crate::const_eval::const_field( - self.tcx, self.param_env, variant_opt, field, cv - ); - self.const_to_pat_inner(instance, val, id, span, saw_const_match_error) - }; - let mut adt_subpatterns = |n, variant_opt| { - (0..n).map(|i| { - let field = Field::new(i); - FieldPattern { - field, - pattern: adt_subpattern(i, variant_opt), - } - }).collect::>() - }; - - - let kind = match cv.ty.sty { - ty::Float(_) => { - self.tcx.lint_hir( - ::rustc::lint::builtin::ILLEGAL_FLOATING_POINT_LITERAL_PATTERN, - id, - span, - "floating-point types cannot be used in patterns", - ); - PatternKind::Constant { - value: cv, - } - } - ty::Adt(adt_def, _) if adt_def.is_union() => { - // Matching on union fields is unsafe, we can't hide it in constants - *saw_const_match_error = true; - self.tcx.sess.span_err(span, "cannot use unions in constant patterns"); - PatternKind::Wild - } - // keep old code until future-compat upgraded to errors. - ty::Adt(adt_def, _) if !self.tcx.has_attr(adt_def.did, sym::structural_match) => { - let path = self.tcx.def_path_str(adt_def.did); - let msg = format!( - "to use a constant of type `{}` in a pattern, \ - `{}` must be annotated with `#[derive(PartialEq, Eq)]`", - path, - path, - ); - *saw_const_match_error = true; - self.tcx.sess.span_err(span, &msg); - PatternKind::Wild - } - // keep old code until future-compat upgraded to errors. - ty::Ref(_, ty::TyS { sty: ty::Adt(adt_def, _), .. }, _) - if !self.tcx.has_attr(adt_def.did, sym::structural_match) => { - // HACK(estebank): Side-step ICE #53708, but anything other than erroring here - // would be wrong. Returnging `PatternKind::Wild` is not technically correct. - let path = self.tcx.def_path_str(adt_def.did); - let msg = format!( - "to use a constant of type `{}` in a pattern, \ - `{}` must be annotated with `#[derive(PartialEq, Eq)]`", - path, - path, - ); - *saw_const_match_error = true; - self.tcx.sess.span_err(span, &msg); - PatternKind::Wild - } - ty::Adt(adt_def, substs) if adt_def.is_enum() => { - let variant_index = const_variant_index(self.tcx, self.param_env, cv); - let subpatterns = adt_subpatterns( - adt_def.variants[variant_index].fields.len(), - Some(variant_index), - ); - PatternKind::Variant { - adt_def, - substs, - variant_index, - subpatterns, - } - } - ty::Adt(adt_def, _) => { - let struct_var = adt_def.non_enum_variant(); - PatternKind::Leaf { - subpatterns: adt_subpatterns(struct_var.fields.len(), None), - } - } - ty::Tuple(fields) => { - PatternKind::Leaf { - subpatterns: adt_subpatterns(fields.len(), None), - } - } - ty::Array(_, n) => { - PatternKind::Array { - prefix: (0..n.eval_usize(self.tcx, self.param_env)) - .map(|i| adt_subpattern(i as usize, None)) - .collect(), - slice: None, - suffix: Vec::new(), - } - } - _ => { - PatternKind::Constant { - value: cv, - } - } - }; - - Pattern { - span, - ty: cv.ty, - kind: Box::new(kind), - } - } } -/// This method traverses the structure of `ty`, trying to find an -/// instance of an ADT (i.e. struct or enum) that was declared without -/// the `#[structural_match]` attribute. -/// -/// The "structure of a type" includes all components that would be -/// considered when doing a pattern match on a constant of that -/// type. -/// -/// * This means this method descends into fields of structs/enums, -/// and also descends into the inner type `T` of `&T` and `&mut T` -/// -/// * The traversal doesn't dereference unsafe pointers (`*const T`, -/// `*mut T`), and it does not visit the type arguments of an -/// instantiated generic like `PhantomData`. -/// -/// The reason we do this search is Rust currently require all ADT's -/// reachable from a constant's type to be annotated with -/// `#[structural_match]`, an attribute which essentially says that -/// the implementation of `PartialEq::eq` behaves *equivalently* to a -/// comparison against the unfolded structure. -/// -/// For more background on why Rust has this requirement, and issues -/// that arose when the requirement was not enforced completely, see -/// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307. -fn search_for_adt_without_structural_match<'tcx>(tcx: TyCtxt<'tcx>, - ty: Ty<'tcx>) - -> Option<&'tcx AdtDef> -{ - // Import here (not mod level), because `TypeFoldable::fold_with` - // conflicts with `PatternFoldable::fold_with` - use crate::rustc::ty::fold::TypeVisitor; - use crate::rustc::ty::TypeFoldable; - - let mut search = Search { tcx, found: None, seen: FxHashSet::default() }; - ty.visit_with(&mut search); - return search.found; - - struct Search<'tcx> { - tcx: TyCtxt<'tcx>, - - // records the first ADT we find without `#[structural_match` - found: Option<&'tcx AdtDef>, - - // tracks ADT's previously encountered during search, so that - // we will not recur on them again. - seen: FxHashSet<&'tcx AdtDef>, - } - - impl<'tcx> TypeVisitor<'tcx> for Search<'tcx> { - fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool { - debug!("Search visiting ty: {:?}", ty); - - let (adt_def, substs) = match ty.sty { - ty::Adt(adt_def, substs) => (adt_def, substs), - ty::RawPtr(..) => { - // `#[structural_match]` ignores substructure of - // `*const _`/`*mut _`, so skip super_visit_with - // - // (But still tell caller to continue search.) - return false; - } - ty::FnDef(..) | ty::FnPtr(..) => { - // types of formals and return in `fn(_) -> _` are also irrelevant - // - // (But still tell caller to continue search.) - return false; - } - ty::Array(_, n) if n.try_eval_usize(self.tcx, ty::ParamEnv::reveal_all()) == Some(0) - => { - // rust-lang/rust#62336: ignore type of contents - // for empty array. - return false; - } - _ => { - ty.super_visit_with(self); - return false; - } - }; - - if !self.tcx.has_attr(adt_def.did, sym::structural_match) { - self.found = Some(&adt_def); - debug!("Search found adt_def: {:?}", adt_def); - return true // Halt visiting! - } - - if self.seen.contains(adt_def) { - debug!("Search already seen adt_def: {:?}", adt_def); - // let caller continue its search - return false; - } - - self.seen.insert(adt_def); - - // `#[structural_match]` does not care about the - // instantiation of the generics in an ADT (it - // instead looks directly at its fields outside - // this match), so we skip super_visit_with. - // - // (Must not recur on substs for `PhantomData` cf - // rust-lang/rust#55028 and rust-lang/rust#55837; but also - // want to skip substs when only uses of generic are - // behind unsafe pointers `*const T`/`*mut T`.) - - // even though we skip super_visit_with, we must recur on - // fields of ADT. - let tcx = self.tcx; - for field_ty in adt_def.all_fields().map(|field| field.ty(tcx, substs)) { - if field_ty.visit_with(self) { - // found an ADT without `#[structural_match]`; halt visiting! - assert!(self.found.is_some()); - return true; - } - } - - // Even though we do not want to recur on substs, we do - // want our caller to continue its own search. - false - } - } -} - -impl UserAnnotatedTyHelpers<'tcx> for PatternContext<'_, 'tcx> { +impl UserAnnotatedTyHelpers<'tcx> for PatCtxt<'_, 'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } @@ -1313,11 +982,11 @@ pub trait PatternFoldable<'tcx> : Sized { } pub trait PatternFolder<'tcx> : Sized { - fn fold_pattern(&mut self, pattern: &Pattern<'tcx>) -> Pattern<'tcx> { + fn fold_pattern(&mut self, pattern: &Pat<'tcx>) -> Pat<'tcx> { pattern.super_fold_with(self) } - fn fold_pattern_kind(&mut self, kind: &PatternKind<'tcx>) -> PatternKind<'tcx> { + fn fold_pattern_kind(&mut self, kind: &PatKind<'tcx>) -> PatKind<'tcx> { kind.super_fold_with(self) } } @@ -1357,26 +1026,26 @@ macro_rules! CloneImpls { CloneImpls!{ <'tcx> Span, Field, Mutability, ast::Name, hir::HirId, usize, ty::Const<'tcx>, Region<'tcx>, Ty<'tcx>, BindingMode, &'tcx AdtDef, - SubstsRef<'tcx>, &'tcx Kind<'tcx>, UserType<'tcx>, - UserTypeProjection, PatternTypeProjection<'tcx> + SubstsRef<'tcx>, &'tcx GenericArg<'tcx>, UserType<'tcx>, + UserTypeProjection, PatTyProj<'tcx> } -impl<'tcx> PatternFoldable<'tcx> for FieldPattern<'tcx> { +impl<'tcx> PatternFoldable<'tcx> for FieldPat<'tcx> { fn super_fold_with>(&self, folder: &mut F) -> Self { - FieldPattern { + FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) } } } -impl<'tcx> PatternFoldable<'tcx> for Pattern<'tcx> { +impl<'tcx> PatternFoldable<'tcx> for Pat<'tcx> { fn fold_with>(&self, folder: &mut F) -> Self { folder.fold_pattern(self) } fn super_fold_with>(&self, folder: &mut F) -> Self { - Pattern { + Pat { ty: self.ty.fold_with(folder), span: self.span.fold_with(folder), kind: self.kind.fold_with(folder) @@ -1384,22 +1053,22 @@ impl<'tcx> PatternFoldable<'tcx> for Pattern<'tcx> { } } -impl<'tcx> PatternFoldable<'tcx> for PatternKind<'tcx> { +impl<'tcx> PatternFoldable<'tcx> for PatKind<'tcx> { fn fold_with>(&self, folder: &mut F) -> Self { folder.fold_pattern_kind(self) } fn super_fold_with>(&self, folder: &mut F) -> Self { match *self { - PatternKind::Wild => PatternKind::Wild, - PatternKind::AscribeUserType { + PatKind::Wild => PatKind::Wild, + PatKind::AscribeUserType { ref subpattern, ascription: Ascription { variance, ref user_ty, user_ty_span, }, - } => PatternKind::AscribeUserType { + } => PatKind::AscribeUserType { subpattern: subpattern.fold_with(folder), ascription: Ascription { user_ty: user_ty.fold_with(folder), @@ -1407,14 +1076,14 @@ impl<'tcx> PatternFoldable<'tcx> for PatternKind<'tcx> { user_ty_span, }, }, - PatternKind::Binding { + PatKind::Binding { mutability, name, mode, var, ty, ref subpattern, - } => PatternKind::Binding { + } => PatKind::Binding { mutability: mutability.fold_with(folder), name: name.fold_with(folder), mode: mode.fold_with(folder), @@ -1422,52 +1091,52 @@ impl<'tcx> PatternFoldable<'tcx> for PatternKind<'tcx> { ty: ty.fold_with(folder), subpattern: subpattern.fold_with(folder), }, - PatternKind::Variant { + PatKind::Variant { adt_def, substs, variant_index, ref subpatterns, - } => PatternKind::Variant { + } => PatKind::Variant { adt_def: adt_def.fold_with(folder), substs: substs.fold_with(folder), variant_index, subpatterns: subpatterns.fold_with(folder) }, - PatternKind::Leaf { + PatKind::Leaf { ref subpatterns, - } => PatternKind::Leaf { + } => PatKind::Leaf { subpatterns: subpatterns.fold_with(folder), }, - PatternKind::Deref { + PatKind::Deref { ref subpattern, - } => PatternKind::Deref { + } => PatKind::Deref { subpattern: subpattern.fold_with(folder), }, - PatternKind::Constant { + PatKind::Constant { value - } => PatternKind::Constant { + } => PatKind::Constant { value, }, - PatternKind::Range(range) => PatternKind::Range(range), - PatternKind::Slice { + PatKind::Range(range) => PatKind::Range(range), + PatKind::Slice { ref prefix, ref slice, ref suffix, - } => PatternKind::Slice { + } => PatKind::Slice { prefix: prefix.fold_with(folder), slice: slice.fold_with(folder), suffix: suffix.fold_with(folder) }, - PatternKind::Array { + PatKind::Array { ref prefix, ref slice, ref suffix - } => PatternKind::Array { + } => PatKind::Array { prefix: prefix.fold_with(folder), slice: slice.fold_with(folder), suffix: suffix.fold_with(folder) }, - PatternKind::Or { ref pats } => PatternKind::Or { pats: pats.fold_with(folder) }, + PatKind::Or { ref pats } => PatKind::Or { pats: pats.fold_with(folder) }, } } } @@ -1501,7 +1170,7 @@ pub fn compare_const_vals<'tcx>( if let (Some(a), Some(b)) = (a_bits, b_bits) { use ::rustc_apfloat::Float; - return match ty.sty { + return match ty.kind { ty::Float(ast::FloatTy::F32) => { let l = ::rustc_apfloat::ieee::Single::from_bits(a); let r = ::rustc_apfloat::ieee::Single::from_bits(b); @@ -1524,29 +1193,12 @@ pub fn compare_const_vals<'tcx>( } } - if let ty::Str = ty.sty { + if let ty::Str = ty.kind { match (a.val, b.val) { - ( - ConstValue::Slice { data: alloc_a, start: offset_a, end: end_a }, - ConstValue::Slice { data: alloc_b, start: offset_b, end: end_b }, - ) => { - let len_a = end_a - offset_a; - let len_b = end_b - offset_b; - let a = alloc_a.get_bytes( - &tcx, - // invent a pointer, only the offset is relevant anyway - Pointer::new(AllocId(0), Size::from_bytes(offset_a as u64)), - Size::from_bytes(len_a as u64), - ); - let b = alloc_b.get_bytes( - &tcx, - // invent a pointer, only the offset is relevant anyway - Pointer::new(AllocId(0), Size::from_bytes(offset_b as u64)), - Size::from_bytes(len_b as u64), - ); - if let (Ok(a), Ok(b)) = (a, b) { - return from_bool(a == b); - } + (ConstValue::Slice { .. }, ConstValue::Slice { .. }) => { + let a_bytes = get_slice_bytes(&tcx, a.val); + let b_bytes = get_slice_bytes(&tcx, b.val); + return from_bool(a_bytes == b_bytes); } _ => (), } diff --git a/src/librustc_mir/hair/util.rs b/src/librustc_mir/hair/util.rs index 4e014855df..d63541f7a3 100644 --- a/src/librustc_mir/hair/util.rs +++ b/src/librustc_mir/hair/util.rs @@ -17,7 +17,7 @@ crate trait UserAnnotatedTyHelpers<'tcx> { let mut user_ty = *user_provided_types.get(hir_id)?; debug!("user_subts_applied_to_ty_of_hir_id: user_ty={:?}", user_ty); let ty = self.tables().node_type(hir_id); - match ty.sty { + match ty.kind { ty::Adt(adt_def, ..) => { if let UserType::TypeOf(ref mut did, _) = &mut user_ty.value { *did = adt_def.did; diff --git a/src/librustc_mir/interpret/cast.rs b/src/librustc_mir/interpret/cast.rs index 210647ac1e..9ab347957f 100644 --- a/src/librustc_mir/interpret/cast.rs +++ b/src/librustc_mir/interpret/cast.rs @@ -34,7 +34,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Pointer(PointerCast::ReifyFnPointer) => { // The src operand does not matter, just its type - match src.layout.ty.sty { + match src.layout.ty.kind { ty::FnDef(def_id, substs) => { // All reifications must be monomorphic, bail out otherwise. if src.layout.ty.needs_subst() { @@ -44,7 +44,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { if self.tcx.has_attr(def_id, sym::rustc_args_required_const) { bug!("reifying a fn ptr that requires const arguments"); } - let instance = self.resolve(def_id, substs)?; + + let instance = ty::Instance::resolve_for_fn_ptr( + *self.tcx, + self.param_env, + def_id, + substs, + ).ok_or_else(|| err_inval!(TooGeneric))?; + let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance)); self.write_scalar(Scalar::Ptr(fn_ptr.into()), dest)?; } @@ -54,7 +61,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Pointer(PointerCast::UnsafeFnPointer) => { let src = self.read_immediate(src)?; - match dest.layout.ty.sty { + match dest.layout.ty.kind { ty::FnPtr(_) => { // No change to value self.write_immediate(*src, dest)?; @@ -65,7 +72,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Pointer(PointerCast::ClosureFnPointer(_)) => { // The src operand does not matter, just its type - match src.layout.ty.sty { + match src.layout.ty.kind { ty::Closure(def_id, substs) => { // All reifications must be monomorphic, bail out otherwise. if src.layout.ty.needs_subst() { @@ -97,7 +104,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { use rustc::ty::TyKind::*; trace!("Casting {:?}: {:?} to {:?}", *src, src.layout.ty, dest_layout.ty); - match src.layout.ty.sty { + match src.layout.ty.kind { // Floating point Float(FloatTy::F32) => return Ok(self.cast_from_float(src.to_scalar()?.to_f32()?, dest_layout.ty)?.into()), @@ -176,7 +183,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { }; trace!("cast_from_int: {}, {}, {}", v, src_layout.ty, dest_layout.ty); use rustc::ty::TyKind::*; - match dest_layout.ty.sty { + match dest_layout.ty.kind { Int(_) | Uint(_) | RawPtr(_) => { let v = self.truncate(v, dest_layout); Ok(Scalar::from_uint(v, dest_layout.size)) @@ -214,7 +221,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { where F: Float + Into> + FloatConvert + FloatConvert { use rustc::ty::TyKind::*; - match dest_ty.sty { + match dest_ty.kind { // float -> uint Uint(t) => { let width = t.bit_width().unwrap_or_else(|| self.pointer_size().bits() as usize); @@ -244,14 +251,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { src: OpTy<'tcx, M::PointerTag>, dest: PlaceTy<'tcx, M::PointerTag>, // The pointee types - sty: Ty<'tcx>, - dty: Ty<'tcx>, + source_ty: Ty<'tcx>, + dest_ty: Ty<'tcx>, ) -> InterpResult<'tcx> { // A -> A conversion let (src_pointee_ty, dest_pointee_ty) = - self.tcx.struct_lockstep_tails_erasing_lifetimes(sty, dty, self.param_env); + self.tcx.struct_lockstep_tails_erasing_lifetimes(source_ty, dest_ty, self.param_env); - match (&src_pointee_ty.sty, &dest_pointee_ty.sty) { + match (&src_pointee_ty.kind, &dest_pointee_ty.kind) { (&ty::Array(_, length), &ty::Slice(_)) => { let ptr = self.read_immediate(src)?.to_scalar_ptr()?; // u64 cast is from usize to u64, which is always good @@ -287,7 +294,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { dest: PlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx> { trace!("Unsizing {:?} into {:?}", src, dest); - match (&src.layout.ty.sty, &dest.layout.ty.sty) { + match (&src.layout.ty.kind, &dest.layout.ty.kind) { (&ty::Ref(_, s, _), &ty::Ref(_, d, _)) | (&ty::Ref(_, s, _), &ty::RawPtr(TypeAndMut { ty: d, .. })) | (&ty::RawPtr(TypeAndMut { ty: s, .. }), diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index 78996ed693..d929e958f0 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -12,7 +12,7 @@ use rustc::ty::layout::{ use rustc::ty::subst::SubstsRef; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc::ty::query::TyCtxtAt; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc::mir::interpret::{ GlobalId, Scalar, Pointer, FrameInfo, AllocId, InterpResult, truncate, sign_extend, @@ -35,7 +35,7 @@ pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> { pub(crate) param_env: ty::ParamEnv<'tcx>, /// The virtual memory system. - pub(crate) memory: Memory<'mir, 'tcx, M>, + pub memory: Memory<'mir, 'tcx, M>, /// The virtual call stack. pub(crate) stack: Vec>, @@ -91,7 +91,7 @@ pub struct Frame<'mir, 'tcx, Tag=(), Extra=()> { pub extra: Extra, } -#[derive(Clone, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Eq, PartialEq, Debug)] // Miri debug-prints these pub enum StackPopCleanup { /// Jump to the next block in the caller, or cause UB if None (that's a function /// that may never return). Also store layout of return place so @@ -113,7 +113,7 @@ pub struct LocalState<'tcx, Tag=(), Id=AllocId> { } /// Current value of a local variable -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Debug)] // Miri debug-prints these pub enum LocalValue { /// This local is not currently alive, and cannot be used at all. Dead, @@ -211,16 +211,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } - #[inline(always)] - pub fn memory(&self) -> &Memory<'mir, 'tcx, M> { - &self.memory - } - - #[inline(always)] - pub fn memory_mut(&mut self) -> &mut Memory<'mir, 'tcx, M> { - &mut self.memory - } - #[inline(always)] pub fn force_ptr( &self, @@ -387,7 +377,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { if !layout.is_unsized() { return Ok(Some((layout.size, layout.align.abi))); } - match layout.ty.sty { + match layout.ty.kind { ty::Adt(..) | ty::Tuple(..) => { // First get the size of all statically known fields. // Don't use type_of::sizing_type_of because that expects t to be sized, diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs index 95647ce642..924529d7f5 100644 --- a/src/librustc_mir/interpret/intern.rs +++ b/src/librustc_mir/interpret/intern.rs @@ -6,7 +6,6 @@ use rustc::ty::{Ty, self}; use rustc::mir::interpret::{InterpResult, ErrorHandled}; use rustc::hir; -use rustc::hir::def_id::DefId; use super::validity::RefTracking; use rustc_data_structures::fx::FxHashSet; @@ -73,8 +72,7 @@ fn intern_shallow<'rt, 'mir, 'tcx>( ); // remove allocation let tcx = ecx.tcx; - let memory = ecx.memory_mut(); - let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) { + let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) { Some(entry) => entry, None => { // Pointer not found in local memory map. It is either a pointer to the global @@ -192,13 +190,13 @@ for // Handle Reference types, as these are the only relocations supported by const eval. // Raw pointers (and boxes) are handled by the `leftover_relocations` logic. let ty = mplace.layout.ty; - if let ty::Ref(_, referenced_ty, mutability) = ty.sty { + if let ty::Ref(_, referenced_ty, mutability) = ty.kind { let value = self.ecx.read_immediate(mplace.into())?; // Handle trait object vtables if let Ok(meta) = value.to_meta() { if let ty::Dynamic(..) = self.ecx.tcx.struct_tail_erasing_lifetimes( - referenced_ty, self.ecx.param_env).sty + referenced_ty, self.ecx.param_env).kind { if let Ok(vtable) = meta.unwrap().to_ptr() { // explitly choose `Immutable` here, since vtables are immutable, even @@ -228,7 +226,7 @@ for // we statically prevent `&mut T` via `const_qualif` and double check this here (InternMode::ConstBase, hir::Mutability::MutMutable) | (InternMode::Const, hir::Mutability::MutMutable) => { - match referenced_ty.sty { + match referenced_ty.kind { ty::Array(_, n) if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {} ty::Slice(_) @@ -271,12 +269,12 @@ for pub fn intern_const_alloc_recursive( ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, - def_id: DefId, + // The `mutability` of the place, ignoring the type. + place_mut: Option, ret: MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let tcx = ecx.tcx; - // this `mutability` is the mutability of the place, ignoring the type - let (base_mutability, base_intern_mode) = match tcx.static_mutability(def_id) { + let (base_mutability, base_intern_mode) = match place_mut { Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static), // `static mut` doesn't care about interior mutability, it's mutable anyway Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static), @@ -332,7 +330,7 @@ pub fn intern_const_alloc_recursive( let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect(); while let Some(alloc_id) = todo.pop() { - if let Some((_, mut alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { + if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) { // We can't call the `intern_shallow` method here, as its logic is tailored to safe // references and a `leftover_allocations` set (where we only have a todo-list here). // So we hand-roll the interning logic here again. @@ -350,7 +348,7 @@ pub fn intern_const_alloc_recursive( todo.push(reloc); } } - } else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) { + } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) { // dangling pointer throw_unsup!(ValidationFailure("encountered dangling pointer in final constant".into())) } diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs index ec09e69ec8..c08e4c8960 100644 --- a/src/librustc_mir/interpret/intrinsics.rs +++ b/src/librustc_mir/interpret/intrinsics.rs @@ -3,6 +3,7 @@ //! and miri. use syntax::symbol::Symbol; +use syntax_pos::Span; use rustc::ty; use rustc::ty::layout::{LayoutOf, Primitive, Size}; use rustc::ty::subst::SubstsRef; @@ -12,9 +13,10 @@ use rustc::mir::BinOp; use rustc::mir::interpret::{InterpResult, Scalar, GlobalId, ConstValue}; use super::{ - Machine, PlaceTy, OpTy, InterpCx, + Machine, PlaceTy, OpTy, InterpCx, ImmTy, }; +mod caller_location; mod type_name; fn numeric_intrinsic<'tcx, Tag>( @@ -86,6 +88,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { /// Returns `true` if emulation happened. pub fn emulate_intrinsic( &mut self, + span: Span, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx, M::PointerTag>], dest: PlaceTy<'tcx, M::PointerTag>, @@ -94,6 +97,16 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let intrinsic_name = &self.tcx.item_name(instance.def_id()).as_str()[..]; match intrinsic_name { + "caller_location" => { + let caller = self.tcx.sess.source_map().lookup_char_pos(span.lo()); + let location = self.alloc_caller_location( + Symbol::intern(&caller.file.name.to_string()), + caller.line as u32, + caller.col_display as u32 + 1, + )?; + self.write_scalar(location.ptr, dest)?; + } + "min_align_of" | "pref_align_of" | "needs_drop" | @@ -236,10 +249,78 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let result = Scalar::from_uint(truncated_bits, layout.size); self.write_scalar(result, dest)?; } + + "ptr_offset_from" => { + let a = self.read_immediate(args[0])?.to_scalar()?.to_ptr()?; + let b = self.read_immediate(args[1])?.to_scalar()?.to_ptr()?; + if a.alloc_id != b.alloc_id { + throw_ub_format!( + "ptr_offset_from cannot compute offset of pointers into different \ + allocations.", + ); + } + let usize_layout = self.layout_of(self.tcx.types.usize)?; + let a_offset = ImmTy::from_uint(a.offset.bytes(), usize_layout); + let b_offset = ImmTy::from_uint(b.offset.bytes(), usize_layout); + let (val, _overflowed, _ty) = self.overflowing_binary_op( + BinOp::Sub, a_offset, b_offset, + )?; + let pointee_layout = self.layout_of(substs.type_at(0))?; + let isize_layout = self.layout_of(self.tcx.types.isize)?; + let val = ImmTy::from_scalar(val, isize_layout); + let size = ImmTy::from_int(pointee_layout.size.bytes(), isize_layout); + self.exact_div(val, size, dest)?; + } + "transmute" => { self.copy_op_transmute(args[0], dest)?; } + "simd_insert" => { + let index = self.read_scalar(args[1])?.to_u32()? as u64; + let scalar = args[2]; + let input = args[0]; + let (len, e_ty) = self.read_vector_ty(input); + assert!( + index < len, + "Index `{}` must be in bounds of vector type `{}`: `[0, {})`", + index, e_ty, len + ); + assert_eq!( + input.layout, dest.layout, + "Return type `{}` must match vector type `{}`", + dest.layout.ty, input.layout.ty + ); + assert_eq!( + scalar.layout.ty, e_ty, + "Scalar type `{}` must match vector element type `{}`", + scalar.layout.ty, e_ty + ); + for i in 0..len { + let place = self.place_field(dest, i)?; + let value = if i == index { + scalar + } else { + self.operand_field(input, i)? + }; + self.copy_op(value, place)?; + } + } + "simd_extract" => { + let index = self.read_scalar(args[1])?.to_u32()? as _; + let (len, e_ty) = self.read_vector_ty(args[0]); + assert!( + index < len, + "index `{}` is out-of-bounds of vector type `{}` with length `{}`", + index, e_ty, len + ); + assert_eq!( + e_ty, dest.layout.ty, + "Return type `{}` must match vector element type `{}`", + dest.layout.ty, e_ty + ); + self.copy_op(self.operand_field(args[0], index)?, dest)?; + } _ => return Ok(false), } @@ -256,18 +337,19 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ) -> InterpResult<'tcx, bool> { let def_id = instance.def_id(); if Some(def_id) == self.tcx.lang_items().panic_fn() { - assert!(args.len() == 1); - // &(&'static str, &'static str, u32, u32) - let place = self.deref_operand(args[0])?; - let (msg, file, line, col) = ( - self.mplace_field(place, 0)?, - self.mplace_field(place, 1)?, - self.mplace_field(place, 2)?, - self.mplace_field(place, 3)?, + // &'static str, &core::panic::Location { &'static str, u32, u32 } + assert!(args.len() == 2); + + let msg_place = self.deref_operand(args[0])?; + let msg = Symbol::intern(self.read_str(msg_place)?); + + let location = self.deref_operand(args[1])?; + let (file, line, col) = ( + self.mplace_field(location, 0)?, + self.mplace_field(location, 1)?, + self.mplace_field(location, 2)?, ); - let msg_place = self.deref_operand(msg.into())?; - let msg = Symbol::intern(self.read_str(msg_place)?); let file_place = self.deref_operand(file.into())?; let file = Symbol::intern(self.read_str(file_place)?); let line = self.read_scalar(line.into())?.to_u32()?; @@ -295,4 +377,30 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { return Ok(false); } } + + pub fn exact_div( + &mut self, + a: ImmTy<'tcx, M::PointerTag>, + b: ImmTy<'tcx, M::PointerTag>, + dest: PlaceTy<'tcx, M::PointerTag>, + ) -> InterpResult<'tcx> { + // Performs an exact division, resulting in undefined behavior where + // `x % y != 0` or `y == 0` or `x == T::min_value() && y == -1`. + // First, check x % y != 0. + if self.binary_op(BinOp::Rem, a, b)?.to_bits()? != 0 { + // Then, check if `b` is -1, which is the "min_value / -1" case. + let minus1 = Scalar::from_int(-1, dest.layout.size); + let b = b.to_scalar().unwrap(); + if b == minus1 { + throw_ub_format!("exact_div: result of dividing MIN by -1 cannot be represented") + } else { + throw_ub_format!( + "exact_div: {} cannot be divided by {} without remainder", + a.to_scalar().unwrap(), + b, + ) + } + } + self.binop_ignore_overflow(BinOp::Div, a, b, dest) + } } diff --git a/src/librustc_mir/interpret/intrinsics/caller_location.rs b/src/librustc_mir/interpret/intrinsics/caller_location.rs new file mode 100644 index 0000000000..249d2f9ff5 --- /dev/null +++ b/src/librustc_mir/interpret/intrinsics/caller_location.rs @@ -0,0 +1,49 @@ +use rustc::middle::lang_items::PanicLocationLangItem; +use rustc::mir::interpret::{Pointer, PointerArithmetic, Scalar}; +use rustc::ty::subst::Subst; +use rustc_target::abi::{LayoutOf, Size}; +use syntax_pos::Symbol; + +use crate::interpret::{MemoryKind, MPlaceTy, intrinsics::{InterpCx, InterpResult, Machine}}; + +impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { + pub fn alloc_caller_location( + &mut self, + filename: Symbol, + line: u32, + col: u32, + ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { + let line = Scalar::from_u32(line); + let col = Scalar::from_u32(col); + + let ptr_size = self.pointer_size(); + let u32_size = Size::from_bits(32); + + let loc_ty = self.tcx.type_of(self.tcx.require_lang_item(PanicLocationLangItem, None)) + .subst(*self.tcx, self.tcx.mk_substs([self.tcx.lifetimes.re_static.into()].iter())); + let loc_layout = self.layout_of(loc_ty)?; + + let file_alloc = self.tcx.allocate_bytes(filename.as_str().as_bytes()); + let file_ptr = Pointer::new(file_alloc, Size::ZERO); + let file = Scalar::Ptr(self.tag_static_base_pointer(file_ptr)); + let file_len = Scalar::from_uint(filename.as_str().len() as u128, ptr_size); + + let location = self.allocate(loc_layout, MemoryKind::Stack); + + let file_out = self.mplace_field(location, 0)?; + let file_ptr_out = self.force_ptr(self.mplace_field(file_out, 0)?.ptr)?; + let file_len_out = self.force_ptr(self.mplace_field(file_out, 1)?.ptr)?; + let line_out = self.force_ptr(self.mplace_field(location, 1)?.ptr)?; + let col_out = self.force_ptr(self.mplace_field(location, 2)?.ptr)?; + + let layout = &self.tcx.data_layout; + let alloc = self.memory.get_mut(file_ptr_out.alloc_id)?; + + alloc.write_scalar(layout, file_ptr_out, file.into(), ptr_size)?; + alloc.write_scalar(layout, file_len_out, file_len.into(), ptr_size)?; + alloc.write_scalar(layout, line_out, line.into(), u32_size)?; + alloc.write_scalar(layout, col_out, col.into(), u32_size)?; + + Ok(location) + } +} diff --git a/src/librustc_mir/interpret/intrinsics/type_name.rs b/src/librustc_mir/interpret/intrinsics/type_name.rs index 1e765a4ed9..f1f9fac08c 100644 --- a/src/librustc_mir/interpret/intrinsics/type_name.rs +++ b/src/librustc_mir/interpret/intrinsics/type_name.rs @@ -1,6 +1,6 @@ use rustc::ty::{ TyCtxt, Ty, - subst::{UnpackedKind, Kind}, + subst::{GenericArgKind, GenericArg}, print::{Printer, PrettyPrinter, Print}, self, }; @@ -32,7 +32,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { } fn print_type(mut self, ty: Ty<'tcx>) -> Result { - match ty.sty { + match ty.kind { // Types without identity. | ty::Bool | ty::Char @@ -67,9 +67,8 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::Opaque(def_id, substs) | ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) | ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) - | ty::Closure(def_id, ty::ClosureSubsts { substs }) - | ty::Generator(def_id, ty::GeneratorSubsts { substs }, _) - => self.print_def_path(def_id, substs), + | ty::Closure(def_id, substs) + | ty::Generator(def_id, substs, _) => self.print_def_path(def_id, substs), ty::Foreign(def_id) => self.print_def_path(def_id, &[]), ty::GeneratorWitness(_) => { @@ -149,19 +148,19 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { self.path.push_str("::"); - self.path.push_str(&disambiguated_data.data.as_interned_str().as_str()); + self.path.push_str(&disambiguated_data.data.as_symbol().as_str()); Ok(self) } fn path_generic_args( mut self, print_prefix: impl FnOnce(Self) -> Result, - args: &[Kind<'tcx>], + args: &[GenericArg<'tcx>], ) -> Result { self = print_prefix(self)?; let args = args.iter().cloned().filter(|arg| { match arg.unpack() { - UnpackedKind::Lifetime(_) => false, + GenericArgKind::Lifetime(_) => false, _ => true, } }); diff --git a/src/librustc_mir/interpret/machine.rs b/src/librustc_mir/interpret/machine.rs index bb74a50156..870e50a3cb 100644 --- a/src/librustc_mir/interpret/machine.rs +++ b/src/librustc_mir/interpret/machine.rs @@ -8,10 +8,12 @@ use std::hash::Hash; use rustc::hir::def_id::DefId; use rustc::mir; use rustc::ty::{self, Ty, TyCtxt}; +use syntax_pos::Span; use super::{ Allocation, AllocId, InterpResult, Scalar, AllocationExtra, InterpCx, PlaceTy, OpTy, ImmTy, MemoryKind, Pointer, Memory, + Frame, Operand, }; /// Whether this kind of memory is allowed to leak @@ -151,6 +153,7 @@ pub trait Machine<'mir, 'tcx>: Sized { /// If this returns successfully, the engine will take care of jumping to the next block. fn call_intrinsic( ecx: &mut InterpCx<'mir, 'tcx, Self>, + span: Span, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx, Self::PointerTag>], dest: PlaceTy<'tcx, Self::PointerTag>, @@ -184,6 +187,22 @@ pub trait Machine<'mir, 'tcx>: Sized { dest: PlaceTy<'tcx, Self::PointerTag>, ) -> InterpResult<'tcx>; + /// Called to read the specified `local` from the `frame`. + fn access_local( + _ecx: &InterpCx<'mir, 'tcx, Self>, + frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, + local: mir::Local, + ) -> InterpResult<'tcx, Operand> { + frame.locals[local].access() + } + + /// Called before a `StaticKind::Static` value is accessed. + fn before_access_static( + _allocation: &Allocation, + ) -> InterpResult<'tcx> { + Ok(()) + } + /// Called to initialize the "extra" state of an allocation and make the pointers /// it contains (in relocations) tagged. The way we construct allocations is /// to always first construct it without extra and then add the extra. diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index 62b1760508..d113ee3316 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -22,7 +22,7 @@ use super::{ Machine, AllocMap, MayLeak, ErrorHandled, CheckInAllocMsg, }; -#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] +#[derive(Debug, PartialEq, Copy, Clone)] pub enum MemoryKind { /// Error if deallocated except during a stack pop Stack, @@ -462,6 +462,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { // Make sure we use the ID of the resolved memory, not the lazy one! let id = raw_const.alloc_id; let allocation = tcx.alloc_map.lock().unwrap_memory(id); + + M::before_access_static(allocation)?; Cow::Borrowed(allocation) } } @@ -783,6 +785,26 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { self.get(ptr.alloc_id)?.read_c_str(self, ptr) } + /// Writes the given stream of bytes into memory. + /// + /// Performs appropriate bounds checks. + pub fn write_bytes( + &mut self, + ptr: Scalar, + src: impl IntoIterator, + ) -> InterpResult<'tcx> + { + let src = src.into_iter(); + let size = Size::from_bytes(src.size_hint().0 as u64); + // `write_bytes` checks that this lower bound matches the upper bound matches reality. + let ptr = match self.check_ptr_access(ptr, size, Align::from_bytes(1).unwrap())? { + Some(ptr) => ptr, + None => return Ok(()), // zero-sized access + }; + let tcx = self.tcx.tcx; + self.get_mut(ptr.alloc_id)?.write_bytes(&tcx, ptr, src) + } + /// Expects the caller to have checked bounds and alignment. pub fn copy( &mut self, diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index dd214c4a03..ae23971849 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -19,14 +19,14 @@ use super::{ }; pub use rustc::mir::interpret::ScalarMaybeUndef; -/// A `Value` represents a single immediate self-contained Rust value. +/// An `Immediate` represents a single immediate self-contained Rust value. /// /// For optimization of a few very common cases, there is also a representation for a pair of /// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary /// operations and fat pointers. This idea was taken from rustc's codegen. /// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely /// defined on `Immediate`, and do not have to work with a `Place`. -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Immediate { Scalar(ScalarMaybeUndef), ScalarPair(ScalarMaybeUndef, ScalarMaybeUndef), @@ -123,7 +123,7 @@ impl<'tcx, Tag> ::std::ops::Deref for ImmTy<'tcx, Tag> { /// An `Operand` is the result of computing a `mir::Operand`. It can be immediate, /// or still in memory. The latter is an optimization, to delay reading that chunk of /// memory and to avoid having to store arbitrary-sized data here. -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Operand { Immediate(Immediate), Indirect(MemPlace), @@ -153,7 +153,7 @@ impl Operand { } } -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq)] pub struct OpTy<'tcx, Tag=()> { op: Operand, // Keep this private, it helps enforce invariants pub layout: TyLayout<'tcx>, @@ -335,6 +335,17 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } + /// Read vector length and element type + pub fn read_vector_ty( + &self, op: OpTy<'tcx, M::PointerTag> + ) -> (u64, &rustc::ty::TyS<'tcx>) { + if let layout::Abi::Vector { .. } = op.layout.abi { + (op.layout.ty.simd_size(*self.tcx) as _, op.layout.ty.simd_type(*self.tcx)) + } else { + bug!("Type `{}` is not a SIMD vector type", op.layout.ty) + } + } + /// Read a scalar from a place pub fn read_scalar( &self, @@ -447,7 +458,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // Do not read from ZST, they might not be initialized Operand::Immediate(Scalar::zst().into()) } else { - frame.locals[local].access()? + M::access_local(&self, frame, local)? }; Ok(OpTy { op, layout }) } @@ -470,7 +481,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // Evaluate a place with the goal of reading from it. This lets us sometimes // avoid allocations. - pub(super) fn eval_place_to_op( + pub fn eval_place_to_op( &self, place: &mir::Place<'tcx>, layout: Option>, @@ -578,8 +589,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let ptr = self.tag_static_base_pointer(Pointer::new(id, offset)); Operand::Indirect(MemPlace::from_ptr(ptr, layout.align.abi)) }, - ConstValue::Scalar(x) => - Operand::Immediate(tag_scalar(x).into()), + ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x).into()), ConstValue::Slice { data, start, end } => { // We rely on mutability being set correctly in `data` to prevent writes // where none should happen. @@ -595,6 +605,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } ConstValue::Param(..) | ConstValue::Infer(..) | + ConstValue::Bound(..) | ConstValue::Placeholder(..) | ConstValue::Unevaluated(..) => bug!("eval_const_to_op: Unexpected ConstValue {:?}", val), @@ -636,7 +647,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let bits_discr = raw_discr .not_undef() .and_then(|raw_discr| self.force_bits(raw_discr, discr_val.layout.size)) - .map_err(|_| err_unsup!(InvalidDiscriminant(raw_discr.erase_tag())))?; + .map_err(|_| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?; let real_discr = if discr_val.layout.ty.is_signed() { // going from layout tag type to typeck discriminant type // requires first sign extending with the discriminant layout @@ -653,16 +664,20 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { bits_discr }; // Make sure we catch invalid discriminants - let index = match &rval.layout.ty.sty { + let index = match rval.layout.ty.kind { ty::Adt(adt, _) => adt .discriminants(self.tcx.tcx) .find(|(_, var)| var.val == real_discr), - ty::Generator(def_id, substs, _) => substs - .discriminants(*def_id, self.tcx.tcx) - .find(|(_, var)| var.val == real_discr), + ty::Generator(def_id, substs, _) => { + let substs = substs.as_generator(); + substs + .discriminants(def_id, self.tcx.tcx) + .find(|(_, var)| var.val == real_discr) + } _ => bug!("tagged layout for non-adt non-generator"), + }.ok_or_else( - || err_unsup!(InvalidDiscriminant(raw_discr.erase_tag())) + || err_ub!(InvalidDiscriminant(raw_discr.erase_tag())) )?; (real_discr, index.0) }, @@ -674,7 +689,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let variants_start = niche_variants.start().as_u32(); let variants_end = niche_variants.end().as_u32(); let raw_discr = raw_discr.not_undef().map_err(|_| { - err_unsup!(InvalidDiscriminant(ScalarMaybeUndef::Undef)) + err_ub!(InvalidDiscriminant(ScalarMaybeUndef::Undef)) })?; match raw_discr.to_bits_or_ptr(discr_val.layout.size, self) { Err(ptr) => { @@ -682,7 +697,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let ptr_valid = niche_start == 0 && variants_start == variants_end && !self.memory.ptr_may_be_null(ptr); if !ptr_valid { - throw_unsup!(InvalidDiscriminant(raw_discr.erase_tag().into())) + throw_ub!(InvalidDiscriminant(raw_discr.erase_tag().into())) } (dataful_variant.as_u32() as u128, dataful_variant) }, diff --git a/src/librustc_mir/interpret/operator.rs b/src/librustc_mir/interpret/operator.rs index 470cc9346e..176b084f22 100644 --- a/src/librustc_mir/interpret/operator.rs +++ b/src/librustc_mir/interpret/operator.rs @@ -274,7 +274,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { trace!("Running binary op {:?}: {:?} ({:?}), {:?} ({:?})", bin_op, *left, left.layout.ty, *right, right.layout.ty); - match left.layout.ty.sty { + match left.layout.ty.kind { ty::Char => { assert_eq!(left.layout.ty, right.layout.ty); let left = left.to_scalar()?; @@ -348,7 +348,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let val = val.to_scalar()?; trace!("Running unary op {:?}: {:?} ({:?})", un_op, val, layout.ty); - match layout.ty.sty { + match layout.ty.kind { ty::Bool => { let val = val.to_bool()?; let res = match un_op { diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index c3660fb7a2..0289c52fd3 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -193,7 +193,7 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { pub(super) fn len(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> { if self.layout.is_unsized() { // We need to consult `meta` metadata - match self.layout.ty.sty { + match self.layout.ty.kind { ty::Slice(..) | ty::Str => return self.mplace.meta.unwrap().to_usize(cx), _ => bug!("len not supported on unsized type {:?}", self.layout.ty), @@ -210,7 +210,7 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { #[inline] pub(super) fn vtable(self) -> Scalar { - match self.layout.ty.sty { + match self.layout.ty.kind { ty::Dynamic(..) => self.mplace.meta.unwrap(), _ => bug!("vtable not supported on type {:?}", self.layout.ty), } @@ -377,8 +377,8 @@ where layout::FieldPlacement::Array { stride, .. } => { let len = base.len(self)?; if field >= len { - // This can be violated because this runs during promotion on code where the - // type system has not yet ensured that such things don't happen. + // This can be violated because the index (field) can be a runtime value + // provided by the user. debug!("tried to access element {} of array/slice with length {}", field, len); throw_panic!(BoundsCheck { len, index: field }); } @@ -386,7 +386,8 @@ where } layout::FieldPlacement::Union(count) => { assert!(field < count as u64, - "Tried to access field {} of union with {} fields", field, count); + "Tried to access field {} of union {:#?} with {} fields", + field, base.layout, count); // Offset is always 0 Size::from_bytes(0) } @@ -459,7 +460,7 @@ where // Compute meta and new layout let inner_len = len - to - from; - let (meta, ty) = match base.layout.ty.sty { + let (meta, ty) = match base.layout.ty.kind { // It is not nice to match on the type, but that seems to be the only way to // implement this. ty::Array(inner, _) => @@ -590,6 +591,13 @@ where StaticKind::Promoted(promoted, promoted_substs) => { let substs = self.subst_from_frame_and_normalize_erasing_regions(promoted_substs); let instance = ty::Instance::new(place_static.def_id, substs); + + // Even after getting `substs` from the frame, this instance may still be + // polymorphic because `ConstProp` will try to promote polymorphic MIR. + if instance.needs_subst() { + throw_inval!(TooGeneric); + } + self.const_eval_raw(GlobalId { instance, promoted: Some(promoted), @@ -1023,9 +1031,13 @@ where variant_index: VariantIdx, dest: PlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx> { + let variant_scalar = Scalar::from_u32(variant_index.as_u32()).into(); + match dest.layout.variants { layout::Variants::Single { index } => { - assert_eq!(index, variant_index); + if index != variant_index { + throw_ub!(InvalidDiscriminant(variant_scalar)); + } } layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, @@ -1033,7 +1045,9 @@ where discr_index, .. } => { - assert!(dest.layout.ty.variant_range(*self.tcx).unwrap().contains(&variant_index)); + if !dest.layout.ty.variant_range(*self.tcx).unwrap().contains(&variant_index) { + throw_ub!(InvalidDiscriminant(variant_scalar)); + } let discr_val = dest.layout.ty.discriminant_for_variant(*self.tcx, variant_index).unwrap().val; @@ -1056,9 +1070,9 @@ where discr_index, .. } => { - assert!( - variant_index.as_usize() < dest.layout.ty.ty_adt_def().unwrap().variants.len(), - ); + if !variant_index.as_usize() < dest.layout.ty.ty_adt_def().unwrap().variants.len() { + throw_ub!(InvalidDiscriminant(variant_scalar)); + } if variant_index != dataful_variant { let variants_start = niche_variants.start().as_u32(); let variant_index_relative = variant_index.as_u32() diff --git a/src/librustc_mir/interpret/snapshot.rs b/src/librustc_mir/interpret/snapshot.rs index 2cac8bb0c5..7ce151e087 100644 --- a/src/librustc_mir/interpret/snapshot.rs +++ b/src/librustc_mir/interpret/snapshot.rs @@ -17,7 +17,7 @@ use rustc::mir::interpret::{ use rustc::ty::{self, TyCtxt}; use rustc::ty::layout::{Align, Size}; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use syntax::ast::Mutability; use syntax::source_map::Span; @@ -52,9 +52,9 @@ impl<'mir, 'tcx> InfiniteLoopDetector<'mir, 'tcx> { ) -> InterpResult<'tcx, ()> { // Compute stack's hash before copying anything let mut hcx = tcx.get_stable_hashing_context(); - let mut hasher = StableHasher::::new(); + let mut hasher = StableHasher::new(); stack.hash_stable(&mut hcx, &mut hasher); - let hash = hasher.finish(); + let hash = hasher.finish::(); // Check if we know that hash already if self.hashes.is_empty() { @@ -428,9 +428,9 @@ impl<'mir, 'tcx> Hash for InterpSnapshot<'mir, 'tcx> { fn hash(&self, state: &mut H) { // Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2) let mut hcx = self.memory.tcx.get_stable_hashing_context(); - let mut hasher = StableHasher::::new(); + let mut hasher = StableHasher::new(); self.hash_stable(&mut hcx, &mut hasher); - hasher.finish().hash(state) + hasher.finish::().hash(state) } } diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs index affca10bf5..daca7a2578 100644 --- a/src/librustc_mir/interpret/step.rs +++ b/src/librustc_mir/interpret/step.rs @@ -132,7 +132,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { /// /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue /// type writes its results directly into the memory specified by the place. - fn eval_rvalue_into_place( + pub fn eval_rvalue_into_place( &mut self, rvalue: &mir::Rvalue<'tcx>, place: &mir::Place<'tcx>, diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs index 8310ef02f9..d90f2058aa 100644 --- a/src/librustc_mir/interpret/terminator.rs +++ b/src/librustc_mir/interpret/terminator.rs @@ -75,7 +75,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { }; let func = self.eval_operand(func, None)?; - let (fn_val, abi) = match func.layout.ty.sty { + let (fn_val, abi) = match func.layout.ty.kind { ty::FnPtr(sig) => { let caller_abi = sig.abi(); let fn_ptr = self.read_scalar(func)?.not_undef()?; @@ -140,12 +140,12 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { .read_immediate(self.eval_operand(len, None)?) .expect("can't eval len") .to_scalar()? - .to_bits(self.memory().pointer_size())? as u64; + .to_bits(self.memory.pointer_size())? as u64; let index = self .read_immediate(self.eval_operand(index, None)?) .expect("can't eval index") .to_scalar()? - .to_bits(self.memory().pointer_size())? as u64; + .to_bits(self.memory.pointer_size())? as u64; err_panic!(BoundsCheck { len, index }) } Overflow(op) => err_panic!(Overflow(*op)), @@ -249,16 +249,13 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { match instance.def { ty::InstanceDef::Intrinsic(..) => { - if caller_abi != Abi::RustIntrinsic { - throw_unsup!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic)) - } // The intrinsic itself cannot diverge, so if we got here without a return // place... (can happen e.g., for transmute returning `!`) let dest = match dest { Some(dest) => dest, None => throw_ub!(Unreachable) }; - M::call_intrinsic(self, instance, args, dest)?; + M::call_intrinsic(self, span, instance, args, dest)?; // No stack frame gets pushed, the main loop will just act as if the // call completed. self.goto_block(ret)?; @@ -266,6 +263,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Ok(()) } ty::InstanceDef::VtableShim(..) | + ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::ClosureOnceShim { .. } | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::DropGlue(..) | @@ -275,7 +273,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { { let callee_abi = { let instance_ty = instance.ty(*self.tcx); - match instance_ty.sty { + match instance_ty.kind { ty::FnDef(..) => instance_ty.fn_sig(*self.tcx).abi(), ty::Closure(..) => Abi::RustCall, @@ -482,7 +480,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // implementation fail -- a problem shared by rustc. let place = self.force_allocation(place)?; - let (instance, place) = match place.layout.ty.sty { + let (instance, place) = match place.layout.ty.kind { ty::Dynamic(..) => { // Dropping a trait object. self.unpack_dyn_trait(place)? diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs index 3e14ba3efc..3444fb60f3 100644 --- a/src/librustc_mir/interpret/validity.rs +++ b/src/librustc_mir/interpret/validity.rs @@ -188,7 +188,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M layout: TyLayout<'tcx>, field: usize, ) -> PathElem { - match layout.ty.sty { + match layout.ty.kind { // generators and closures. ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => { let mut name = None; @@ -200,7 +200,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M if let Some((&var_hir_id, _)) = upvars.get_index(field) { let node = self.ecx.tcx.hir().get(var_hir_id); if let hir::Node::Binding(pat) = node { - if let hir::PatKind::Binding(_, _, ident, _) = pat.node { + if let hir::PatKind::Binding(_, _, ident, _) = pat.kind { name = Some(ident.name); } } @@ -263,7 +263,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M pointee: TyLayout<'tcx>, ) -> InterpResult<'tcx> { let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env); - match tail.sty { + match tail.kind { ty::Dynamic(..) => { let vtable = meta.unwrap(); try_validation!( @@ -327,7 +327,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> variant_id: VariantIdx, new_op: OpTy<'tcx, M::PointerTag> ) -> InterpResult<'tcx> { - let name = match old_op.layout.ty.sty { + let name = match old_op.layout.ty.kind { ty::Adt(adt, _) => PathElem::Variant(adt.variants[variant_id].ident.name), // Generators also have variants ty::Generator(..) => PathElem::GeneratorState(variant_id), @@ -344,7 +344,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> match self.walk_value(op) { Ok(()) => Ok(()), Err(err) => match err.kind { - err_unsup!(InvalidDiscriminant(val)) => + err_ub!(InvalidDiscriminant(val)) => throw_validation_failure!( val, self.path, "a valid enum discriminant" ), @@ -362,7 +362,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> let value = self.ecx.read_immediate(value)?; // Go over all the primitive types let ty = value.layout.ty; - match ty.sty { + match ty.kind { ty::Bool => { let value = value.to_scalar_or_undef(); try_validation!(value.to_bool(), @@ -581,7 +581,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> op: OpTy<'tcx, M::PointerTag>, fields: impl Iterator>, ) -> InterpResult<'tcx> { - match op.layout.ty.sty { + match op.layout.ty.kind { ty::Str => { let mplace = op.assert_mem_place(); // strings are never immediate try_validation!(self.ecx.read_str(mplace), @@ -590,7 +590,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> ty::Array(tys, ..) | ty::Slice(tys) if { // This optimization applies only for integer and floating point types // (i.e., types that can hold arbitrary bytes). - match tys.sty { + match tys.kind { ty::Int(..) | ty::Uint(..) | ty::Float(..) => true, _ => false, } diff --git a/src/librustc_mir/interpret/visitor.rs b/src/librustc_mir/interpret/visitor.rs index 91fbd307db..427f94f4fb 100644 --- a/src/librustc_mir/interpret/visitor.rs +++ b/src/librustc_mir/interpret/visitor.rs @@ -239,7 +239,7 @@ macro_rules! make_value_visitor { // Even for single variants, we might be able to get a more refined type: // If it is a trait object, switch to the actual type that was used to create it. - match v.layout().ty.sty { + match v.layout().ty.kind { ty::Dynamic(..) => { // immediate trait objects are not a thing let dest = v.to_op(self.ecx())?.assert_mem_place(); diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs index 9a03719ab6..4d604cb025 100644 --- a/src/librustc_mir/lib.rs +++ b/src/librustc_mir/lib.rs @@ -6,6 +6,7 @@ Rust MIR: a lowered representation of Rust. Also: an experiment! #![feature(nll)] #![feature(in_band_lifetimes)] +#![feature(inner_deref)] #![feature(slice_patterns)] #![feature(box_patterns)] #![feature(box_syntax)] @@ -13,6 +14,7 @@ Rust MIR: a lowered representation of Rust. Also: an experiment! #![feature(core_intrinsics)] #![feature(const_fn)] #![feature(decl_macro)] +#![feature(drain_filter)] #![feature(exhaustive_patterns)] #![feature(never_type)] #![feature(specialization)] @@ -21,9 +23,9 @@ Rust MIR: a lowered representation of Rust. Also: an experiment! #![feature(slice_concat_ext)] #![feature(trusted_len)] #![feature(try_blocks)] -#![feature(mem_take)] #![feature(associated_type_bounds)] #![feature(range_is_empty)] +#![feature(stmt_expr_attributes)] #![recursion_limit="256"] @@ -56,6 +58,7 @@ pub fn provide(providers: &mut Providers<'_>) { providers.const_eval = const_eval::const_eval_provider; providers.const_eval_raw = const_eval::const_eval_raw_provider; providers.check_match = hair::pattern::check_match; + providers.const_caller_location = const_eval::const_caller_location; providers.const_field = |tcx, param_env_and_value| { let (param_env, (value, field)) = param_env_and_value.into_parts(); const_eval::const_field(tcx, param_env, None, field, value) diff --git a/src/librustc_mir/lints.rs b/src/librustc_mir/lints.rs index 8c815a51b5..158b730b9b 100644 --- a/src/librustc_mir/lints.rs +++ b/src/librustc_mir/lints.rs @@ -1,4 +1,4 @@ -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use rustc::hir::def_id::DefId; use rustc::hir::intravisit::FnKind; use rustc::hir::map::blocks::FnLikeNode; @@ -72,13 +72,11 @@ fn check_fn_for_unconditional_recursion( let caller_substs = &InternalSubsts::identity_for_item(tcx, def_id)[..trait_substs_count]; while let Some(bb) = reachable_without_self_call_queue.pop() { - if visited.contains(bb) { + if !visited.insert(bb) { //already done continue; } - visited.insert(bb); - let block = &basic_blocks[bb]; if let Some(ref terminator) = block.terminator { @@ -86,7 +84,7 @@ fn check_fn_for_unconditional_recursion( TerminatorKind::Call { ref func, .. } => { let func_ty = func.ty(body, tcx); - if let ty::FnDef(fn_def_id, substs) = func_ty.sty { + if let ty::FnDef(fn_def_id, substs) = func_ty.kind { let (call_fn_id, call_substs) = if let Some(instance) = Instance::resolve(tcx, param_env, diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index 1f7efebfda..5e31b80bec 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -194,12 +194,12 @@ use crate::monomorphize; use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap}; use rustc::util::common::time; -use rustc_data_structures::bit_set::GrowableBitSet; +use rustc_index::bit_set::GrowableBitSet; use rustc_data_structures::sync::{MTRef, MTLock, ParallelIterator, par_iter}; use std::iter; -#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] +#[derive(PartialEq)] pub enum MonoItemCollectionMode { Eager, Lazy @@ -285,7 +285,11 @@ pub fn collect_crate_mono_items( tcx: TyCtxt<'_>, mode: MonoItemCollectionMode, ) -> (FxHashSet>, InliningMap<'_>) { + let _prof_timer = tcx.prof.generic_activity("monomorphization_collector"); + let roots = time(tcx.sess, "collecting roots", || { + let _prof_timer = tcx.prof + .generic_activity("monomorphization_collector_root_collections"); collect_roots(tcx, mode) }); @@ -295,6 +299,9 @@ pub fn collect_crate_mono_items( let mut inlining_map = MTLock::new(InliningMap::new()); { + let _prof_timer = tcx.prof + .generic_activity("monomorphization_collector_graph_walk"); + let visited: MTRef<'_, _> = &mut visited; let inlining_map: MTRef<'_, _> = &mut inlining_map; @@ -578,10 +585,11 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { ty::ParamEnv::reveal_all(), &source_ty, ); - match source_ty.sty { + match source_ty.kind { ty::Closure(def_id, substs) => { let instance = Instance::resolve_closure( - self.tcx, def_id, substs, ty::ClosureKind::FnOnce); + self.tcx, def_id, + substs, ty::ClosureKind::FnOnce); if should_monomorphize_locally(self.tcx, &instance) { self.output.push(create_fn_mono_item(instance)); } @@ -712,11 +720,13 @@ fn visit_fn_use<'tcx>( is_direct_call: bool, output: &mut Vec>, ) { - if let ty::FnDef(def_id, substs) = ty.sty { - let instance = ty::Instance::resolve(tcx, - ty::ParamEnv::reveal_all(), - def_id, - substs).unwrap(); + if let ty::FnDef(def_id, substs) = ty.kind { + let resolver = if is_direct_call { + ty::Instance::resolve + } else { + ty::Instance::resolve_for_fn_ptr + }; + let instance = resolver(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap(); visit_instance_use(tcx, instance, is_direct_call, output); } } @@ -739,6 +749,7 @@ fn visit_instance_use<'tcx>( } } ty::InstanceDef::VtableShim(..) | + ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::Virtual(..) | ty::InstanceDef::DropGlue(_, None) => { // don't need to emit shim if we are calling directly. @@ -765,6 +776,7 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx let def_id = match instance.def { ty::InstanceDef::Item(def_id) => def_id, ty::InstanceDef::VtableShim(..) | + ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::ClosureOnceShim { .. } | ty::InstanceDef::Virtual(..) | ty::InstanceDef::FnPtrShim(..) | @@ -874,7 +886,7 @@ fn find_vtable_types_for_unsizing<'tcx>( return false; } let tail = tcx.struct_tail_erasing_lifetimes(ty, param_env); - match tail.sty { + match tail.kind { ty::Foreign(..) => false, ty::Str | ty::Slice(..) | ty::Dynamic(..) => true, _ => bug!("unexpected unsized tail: {:?}", tail), @@ -887,7 +899,7 @@ fn find_vtable_types_for_unsizing<'tcx>( } }; - match (&source_ty.sty, &target_ty.sty) { + match (&source_ty.kind, &target_ty.kind) { (&ty::Ref(_, a, _), &ty::Ref(_, b, _)) | (&ty::Ref(_, a, _), @@ -945,7 +957,7 @@ fn create_mono_items_for_vtable_methods<'tcx>( assert!(!trait_ty.needs_subst() && !trait_ty.has_escaping_bound_vars() && !impl_ty.needs_subst() && !impl_ty.has_escaping_bound_vars()); - if let ty::Dynamic(ref trait_ty, ..) = trait_ty.sty { + if let ty::Dynamic(ref trait_ty, ..) = trait_ty.kind { if let Some(principal) = trait_ty.principal() { let poly_trait_ref = principal.with_self_ty(tcx, impl_ty); assert!(!poly_trait_ref.has_escaping_bound_vars()); @@ -981,7 +993,7 @@ struct RootCollector<'a, 'tcx> { impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { fn visit_item(&mut self, item: &'v hir::Item) { - match item.node { + match item.kind { hir::ItemKind::ExternCrate(..) | hir::ItemKind::Use(..) | hir::ItemKind::ForeignMod(..) | @@ -1058,7 +1070,7 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { } fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) { - match ii.node { + match ii.kind { hir::ImplItemKind::Method(hir::MethodSig { .. }, _) => { let def_id = self.tcx.hir().local_def_id(ii.hir_id); self.push_if_root(def_id); @@ -1141,7 +1153,7 @@ fn create_mono_items_for_default_impls<'tcx>( item: &'tcx hir::Item, output: &mut Vec>, ) { - match item.node { + match item.kind { hir::ItemKind::Impl(_, _, _, ref generics, .., ref impl_item_refs) => { for param in &generics.params { match param.kind { @@ -1264,7 +1276,14 @@ fn collect_const<'tcx>( ) { debug!("visiting const {:?}", constant); - match constant.val { + let param_env = ty::ParamEnv::reveal_all(); + let substituted_constant = tcx.subst_and_normalize_erasing_regions( + param_substs, + param_env, + &constant, + ); + + match substituted_constant.val { ConstValue::Scalar(Scalar::Ptr(ptr)) => collect_miri(tcx, ptr.alloc_id, output), ConstValue::Slice { data: alloc, start: _, end: _ } | @@ -1274,12 +1293,6 @@ fn collect_const<'tcx>( } } ConstValue::Unevaluated(def_id, substs) => { - let param_env = ty::ParamEnv::reveal_all(); - let substs = tcx.subst_and_normalize_erasing_regions( - param_substs, - param_env, - &substs, - ); let instance = ty::Instance::resolve(tcx, param_env, def_id, @@ -1296,7 +1309,7 @@ fn collect_const<'tcx>( tcx.def_span(def_id), "collection encountered polymorphic constant", ), } - } + }, _ => {}, } } diff --git a/src/librustc_mir/monomorphize/item.rs b/src/librustc_mir/monomorphize/item.rs deleted file mode 100644 index e63426281b..0000000000 --- a/src/librustc_mir/monomorphize/item.rs +++ /dev/null @@ -1,204 +0,0 @@ -use rustc::hir::def_id::LOCAL_CRATE; -use rustc::mir::mono::MonoItem; -use rustc::session::config::OptLevel; -use rustc::ty::{self, TyCtxt, Instance}; -use rustc::ty::subst::InternalSubsts; -use rustc::ty::print::obsolete::DefPathBasedNames; -use syntax::attr::InlineAttr; -use std::fmt; -use rustc::mir::mono::Linkage; -use syntax_pos::symbol::InternedString; -use syntax::source_map::Span; - -/// Describes how a monomorphization will be instantiated in object files. -#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] -pub enum InstantiationMode { - /// There will be exactly one instance of the given MonoItem. It will have - /// external linkage so that it can be linked to from other codegen units. - GloballyShared { - /// In some compilation scenarios we may decide to take functions that - /// are typically `LocalCopy` and instead move them to `GloballyShared` - /// to avoid codegenning them a bunch of times. In this situation, - /// however, our local copy may conflict with other crates also - /// inlining the same function. - /// - /// This flag indicates that this situation is occurring, and informs - /// symbol name calculation that some extra mangling is needed to - /// avoid conflicts. Note that this may eventually go away entirely if - /// ThinLTO enables us to *always* have a globally shared instance of a - /// function within one crate's compilation. - may_conflict: bool, - }, - - /// Each codegen unit containing a reference to the given MonoItem will - /// have its own private copy of the function (with internal linkage). - LocalCopy, -} - -pub trait MonoItemExt<'tcx>: fmt::Debug { - fn as_mono_item(&self) -> &MonoItem<'tcx>; - - fn is_generic_fn(&self) -> bool { - match *self.as_mono_item() { - MonoItem::Fn(ref instance) => { - instance.substs.non_erasable_generics().next().is_some() - } - MonoItem::Static(..) | - MonoItem::GlobalAsm(..) => false, - } - } - - fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> ty::SymbolName { - match *self.as_mono_item() { - MonoItem::Fn(instance) => tcx.symbol_name(instance), - MonoItem::Static(def_id) => { - tcx.symbol_name(Instance::mono(tcx, def_id)) - } - MonoItem::GlobalAsm(hir_id) => { - let def_id = tcx.hir().local_def_id(hir_id); - ty::SymbolName { - name: InternedString::intern(&format!("global_asm_{:?}", def_id)) - } - } - } - } - fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode { - let inline_in_all_cgus = - tcx.sess.opts.debugging_opts.inline_in_all_cgus.unwrap_or_else(|| { - tcx.sess.opts.optimize != OptLevel::No - }) && !tcx.sess.opts.cg.link_dead_code; - - match *self.as_mono_item() { - MonoItem::Fn(ref instance) => { - let entry_def_id = tcx.entry_fn(LOCAL_CRATE).map(|(id, _)| id); - // If this function isn't inlined or otherwise has explicit - // linkage, then we'll be creating a globally shared version. - if self.explicit_linkage(tcx).is_some() || - !instance.def.requires_local(tcx) || - Some(instance.def_id()) == entry_def_id - { - return InstantiationMode::GloballyShared { may_conflict: false } - } - - // At this point we don't have explicit linkage and we're an - // inlined function. If we're inlining into all CGUs then we'll - // be creating a local copy per CGU - if inline_in_all_cgus { - return InstantiationMode::LocalCopy - } - - // Finally, if this is `#[inline(always)]` we're sure to respect - // that with an inline copy per CGU, but otherwise we'll be - // creating one copy of this `#[inline]` function which may - // conflict with upstream crates as it could be an exported - // symbol. - match tcx.codegen_fn_attrs(instance.def_id()).inline { - InlineAttr::Always => InstantiationMode::LocalCopy, - _ => { - InstantiationMode::GloballyShared { may_conflict: true } - } - } - } - MonoItem::Static(..) | - MonoItem::GlobalAsm(..) => { - InstantiationMode::GloballyShared { may_conflict: false } - } - } - } - - fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option { - let def_id = match *self.as_mono_item() { - MonoItem::Fn(ref instance) => instance.def_id(), - MonoItem::Static(def_id) => def_id, - MonoItem::GlobalAsm(..) => return None, - }; - - let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id); - codegen_fn_attrs.linkage - } - - /// Returns `true` if this instance is instantiable - whether it has no unsatisfied - /// predicates. - /// - /// In order to codegen an item, all of its predicates must hold, because - /// otherwise the item does not make sense. Type-checking ensures that - /// the predicates of every item that is *used by* a valid item *do* - /// hold, so we can rely on that. - /// - /// However, we codegen collector roots (reachable items) and functions - /// in vtables when they are seen, even if they are not used, and so they - /// might not be instantiable. For example, a programmer can define this - /// public function: - /// - /// pub fn foo<'a>(s: &'a mut ()) where &'a mut (): Clone { - /// <&mut () as Clone>::clone(&s); - /// } - /// - /// That function can't be codegened, because the method `<&mut () as Clone>::clone` - /// does not exist. Luckily for us, that function can't ever be used, - /// because that would require for `&'a mut (): Clone` to hold, so we - /// can just not emit any code, or even a linker reference for it. - /// - /// Similarly, if a vtable method has such a signature, and therefore can't - /// be used, we can just not emit it and have a placeholder (a null pointer, - /// which will never be accessed) in its place. - fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool { - debug!("is_instantiable({:?})", self); - let (def_id, substs) = match *self.as_mono_item() { - MonoItem::Fn(ref instance) => (instance.def_id(), instance.substs), - MonoItem::Static(def_id) => (def_id, InternalSubsts::empty()), - // global asm never has predicates - MonoItem::GlobalAsm(..) => return true - }; - - tcx.substitute_normalize_and_test_predicates((def_id, &substs)) - } - - fn to_string(&self, tcx: TyCtxt<'tcx>, debug: bool) -> String { - return match *self.as_mono_item() { - MonoItem::Fn(instance) => { - to_string_internal(tcx, "fn ", instance, debug) - }, - MonoItem::Static(def_id) => { - let instance = Instance::new(def_id, tcx.intern_substs(&[])); - to_string_internal(tcx, "static ", instance, debug) - }, - MonoItem::GlobalAsm(..) => { - "global_asm".to_string() - } - }; - - fn to_string_internal<'a, 'tcx>( - tcx: TyCtxt<'tcx>, - prefix: &str, - instance: Instance<'tcx>, - debug: bool, - ) -> String { - let mut result = String::with_capacity(32); - result.push_str(prefix); - let printer = DefPathBasedNames::new(tcx, false, false); - printer.push_instance_as_string(instance, &mut result, debug); - result - } - } - - fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self.as_mono_item() { - MonoItem::Fn(Instance { def, .. }) => { - tcx.hir().as_local_hir_id(def.def_id()) - } - MonoItem::Static(def_id) => { - tcx.hir().as_local_hir_id(def_id) - } - MonoItem::GlobalAsm(hir_id) => { - Some(hir_id) - } - }.map(|hir_id| tcx.hir().span(hir_id)) - } -} - -impl MonoItemExt<'tcx> for MonoItem<'tcx> { - fn as_mono_item(&self) -> &MonoItem<'tcx> { - self - } -} diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs index c193911247..42f08771f8 100644 --- a/src/librustc_mir/monomorphize/partitioning.rs +++ b/src/librustc_mir/monomorphize/partitioning.rs @@ -96,7 +96,7 @@ use std::collections::hash_map::Entry; use std::cmp; use std::sync::Arc; -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; use rustc::hir::CodegenFnAttrFlags; use rustc::hir::def::DefKind; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE, CRATE_DEF_INDEX}; @@ -121,7 +121,7 @@ pub enum PartitioningStrategy { } // Anything we can't find a proper codegen unit for goes into this. -fn fallback_cgu_name(name_builder: &mut CodegenUnitNameBuilder<'_>) -> InternedString { +fn fallback_cgu_name(name_builder: &mut CodegenUnitNameBuilder<'_>) -> Symbol { name_builder.build_cgu_name(LOCAL_CRATE, &["fallback"], Some("cgu")) } @@ -134,10 +134,15 @@ pub fn partition<'tcx, I>( where I: Iterator>, { + let _prof_timer = tcx.prof.generic_activity("cgu_partitioning"); + // In the first step, we place all regular monomorphizations into their // respective 'home' codegen unit. Regular monomorphizations are all // functions and statics defined in the local crate. - let mut initial_partitioning = place_root_mono_items(tcx, mono_items); + let mut initial_partitioning = { + let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_roots"); + place_root_mono_items(tcx, mono_items) + }; initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(tcx)); @@ -146,8 +151,8 @@ where // If the partitioning should produce a fixed count of codegen units, merge // until that count is reached. if let PartitioningStrategy::FixedUnitCount(count) = strategy { + let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus"); merge_codegen_units(tcx, &mut initial_partitioning, count); - debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter()); } @@ -155,8 +160,11 @@ where // monomorphizations have to go into each codegen unit. These additional // monomorphizations can be drop-glue, functions from external crates, and // local functions the definition of which is marked with `#[inline]`. - let mut post_inlining = place_inlined_mono_items(initial_partitioning, - inlining_map); + let mut post_inlining = { + let _prof_timer = + tcx.prof.generic_activity("cgu_partitioning_place_inline_items"); + place_inlined_mono_items(initial_partitioning, inlining_map) + }; post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(tcx)); @@ -165,6 +173,8 @@ where // Next we try to make as many symbols "internal" as possible, so LLVM has // more freedom to optimize. if !tcx.sess.opts.cg.link_dead_code { + let _prof_timer = + tcx.prof.generic_activity("cgu_partitioning_internalize_symbols"); internalize_symbols(tcx, &mut post_inlining, inlining_map); } @@ -175,9 +185,7 @@ where internalization_candidates: _, } = post_inlining; - result.sort_by(|cgu1, cgu2| { - cgu1.name().cmp(cgu2.name()) - }); + result.sort_by_cached_key(|cgu| cgu.name().as_str()); result } @@ -193,7 +201,7 @@ struct PreInliningPartitioning<'tcx> { /// to keep track of that. #[derive(Clone, PartialEq, Eq, Debug)] enum MonoItemPlacement { - SingleCgu { cgu_name: InternedString }, + SingleCgu { cgu_name: Symbol }, MultipleCgus, } @@ -241,8 +249,8 @@ where None => fallback_cgu_name(cgu_name_builder), }; - let codegen_unit = codegen_units.entry(codegen_unit_name.clone()) - .or_insert_with(|| CodegenUnit::new(codegen_unit_name.clone())); + let codegen_unit = codegen_units.entry(codegen_unit_name) + .or_insert_with(|| CodegenUnit::new(codegen_unit_name)); let mut can_be_internalized = true; let (linkage, visibility) = mono_item_linkage_and_visibility( @@ -263,8 +271,7 @@ where // crate with just types (for example), we could wind up with no CGU. if codegen_units.is_empty() { let codegen_unit_name = fallback_cgu_name(cgu_name_builder); - codegen_units.insert(codegen_unit_name.clone(), - CodegenUnit::new(codegen_unit_name.clone())); + codegen_units.insert(codegen_unit_name, CodegenUnit::new(codegen_unit_name)); } PreInliningPartitioning { @@ -329,6 +336,7 @@ fn mono_item_visibility( // These are all compiler glue and such, never exported, always hidden. InstanceDef::VtableShim(..) | + InstanceDef::ReifyShim(..) | InstanceDef::FnPtrShim(..) | InstanceDef::Virtual(..) | InstanceDef::Intrinsic(..) | @@ -481,7 +489,7 @@ fn merge_codegen_units<'tcx>( // smallest into each other) we're sure to start off with a deterministic // order (sorted by name). This'll mean that if two cgus have the same size // the stable sort below will keep everything nice and deterministic. - codegen_units.sort_by_key(|cgu| *cgu.name()); + codegen_units.sort_by_cached_key(|cgu| cgu.name().as_str()); // Merge the two smallest codegen units until the target size is reached. while codegen_units.len() > target_cgu_count { @@ -494,6 +502,9 @@ fn merge_codegen_units<'tcx>( for (k, v) in smallest.items_mut().drain() { second_smallest.items_mut().insert(k, v); } + debug!("CodegenUnit {} merged in to CodegenUnit {}", + smallest.name(), + second_smallest.name()); } let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx); @@ -523,7 +534,7 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning< follow_inlining(*root, inlining_map, &mut reachable); } - let mut new_codegen_unit = CodegenUnit::new(old_codegen_unit.name().clone()); + let mut new_codegen_unit = CodegenUnit::new(old_codegen_unit.name()); // Add all monomorphizations that are not already there. for mono_item in reachable { @@ -550,8 +561,8 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning< Entry::Occupied(e) => { let placement = e.into_mut(); debug_assert!(match *placement { - MonoItemPlacement::SingleCgu { ref cgu_name } => { - *cgu_name != *new_codegen_unit.name() + MonoItemPlacement::SingleCgu { cgu_name } => { + cgu_name != new_codegen_unit.name() } MonoItemPlacement::MultipleCgus => true, }); @@ -559,7 +570,7 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning< } Entry::Vacant(e) => { e.insert(MonoItemPlacement::SingleCgu { - cgu_name: new_codegen_unit.name().clone() + cgu_name: new_codegen_unit.name() }); } } @@ -624,7 +635,7 @@ fn internalize_symbols<'tcx>( // accessed from outside its defining codegen unit. for cgu in &mut partitioning.codegen_units { let home_cgu = MonoItemPlacement::SingleCgu { - cgu_name: cgu.name().clone() + cgu_name: cgu.name() }; for (accessee, linkage_and_visibility) in cgu.items_mut() { @@ -664,6 +675,7 @@ fn characteristic_def_id_of_mono_item<'tcx>( let def_id = match instance.def { ty::InstanceDef::Item(def_id) => def_id, ty::InstanceDef::VtableShim(..) | + ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::ClosureOnceShim { .. } | ty::InstanceDef::Intrinsic(..) | @@ -702,7 +714,7 @@ fn characteristic_def_id_of_mono_item<'tcx>( } } -type CguNameCache = FxHashMap<(DefId, bool), InternedString>; +type CguNameCache = FxHashMap<(DefId, bool), Symbol>; fn compute_codegen_unit_name( tcx: TyCtxt<'_>, @@ -710,7 +722,7 @@ fn compute_codegen_unit_name( def_id: DefId, volatile: bool, cache: &mut CguNameCache, -) -> InternedString { +) -> Symbol { // Find the innermost module that is not nested within a function. let mut current_def_id = def_id; let mut cgu_def_id = None; @@ -747,7 +759,7 @@ fn compute_codegen_unit_name( let components = def_path .data .iter() - .map(|part| part.data.as_interned_str()); + .map(|part| part.data.as_symbol()); let volatile_suffix = if volatile { Some("volatile") @@ -762,7 +774,7 @@ fn compute_codegen_unit_name( fn numbered_codegen_unit_name( name_builder: &mut CodegenUnitNameBuilder<'_>, index: usize, -) -> InternedString { +) -> Symbol { name_builder.build_cgu_name_no_mangle(LOCAL_CRATE, &["cgu"], Some(index)) } @@ -774,7 +786,7 @@ where if cfg!(debug_assertions) { debug!("{}", label); for cgu in cgus { - debug!("CodegenUnit {}:", cgu.name()); + debug!("CodegenUnit {} estimated size {} :", cgu.name(), cgu.size_estimate()); for (mono_item, linkage) in cgu.items() { let symbol_name = mono_item.symbol_name(tcx).name.as_str(); @@ -782,10 +794,11 @@ where let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..]) .unwrap_or(""); - debug!(" - {} [{:?}] [{}]", + debug!(" - {} [{:?}] [{}] estimated size {}", mono_item.to_string(tcx, true), linkage, - symbol_hash); + symbol_hash, + mono_item.size_estimate(tcx)); } debug!(""); @@ -913,7 +926,7 @@ fn collect_and_partition_mono_items( for (&mono_item, &linkage) in cgu.items() { item_to_cgus.entry(mono_item) .or_default() - .push((cgu.name().clone(), linkage)); + .push((cgu.name(), linkage)); } } @@ -975,7 +988,7 @@ pub fn provide(providers: &mut Providers<'_>) { providers.codegen_unit = |tcx, name| { let (_, all) = tcx.collect_and_partition_mono_items(LOCAL_CRATE); all.iter() - .find(|cgu| *cgu.name() == name) + .find(|cgu| cgu.name() == name) .cloned() .unwrap_or_else(|| panic!("failed to find cgu with name {:?}", name)) }; diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index 6daca5e261..177639956f 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -6,7 +6,7 @@ use rustc::ty::layout::VariantIdx; use rustc::ty::subst::{Subst, InternalSubsts}; use rustc::ty::query::Providers; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use rustc_target::spec::abi::Abi; use syntax_pos::{Span, sym}; @@ -66,9 +66,12 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx Some(arg_tys) ) } - ty::InstanceDef::Virtual(def_id, _) => { - // We are generating a call back to our def-id, which the - // codegen backend knows to turn to an actual virtual call. + // We are generating a call back to our def-id, which the + // codegen backend knows to turn to an actual virtual call. + ty::InstanceDef::Virtual(def_id, _) | + // ...or we are generating a direct call to a function for which indirect calls must be + // codegen'd differently than direct ones (example: #[track_caller]) + ty::InstanceDef::ReifyShim(def_id) => { build_call_shim( tcx, def_id, @@ -79,7 +82,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx } ty::InstanceDef::ClosureOnceShim { call_once } => { let fn_mut = tcx.lang_items().fn_mut_trait().unwrap(); - let call_mut = tcx.global_tcx() + let call_mut = tcx .associated_items(fn_mut) .find(|it| it.kind == ty::AssocKind::Method) .unwrap().def_id; @@ -114,7 +117,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx run_passes(tcx, &mut result, instance, None, MirPhase::Const, &[ &add_moves_for_packed_drops::AddMovesForPackedDrops, - &no_landing_pads::NoLandingPads, + &no_landing_pads::NoLandingPads::new(tcx), &remove_noop_landing_pads::RemoveNoopLandingPads, &simplify::SimplifyCfg::new("make_shim"), &add_call_guards::CriticalCallEdges, @@ -167,9 +170,9 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty); // Check if this is a generator, if so, return the drop glue for it - if let Some(&ty::TyS { sty: ty::Generator(gen_def_id, substs, _), .. }) = ty { + if let Some(&ty::TyS { kind: ty::Generator(gen_def_id, substs, _), .. }) = ty { let body = &**tcx.optimized_mir(gen_def_id).generator_drop.as_ref().unwrap(); - return body.subst(tcx, substs.substs); + return body.subst(tcx, substs); } let substs = if let Some(ty) = ty { @@ -228,7 +231,7 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) tcx, param_env }; - let dropee = dropee_ptr.deref(); + let dropee = tcx.mk_place_deref(dropee_ptr); let resume_block = elaborator.patch.resume_block(); elaborate_drops::elaborate_drop( &mut elaborator, @@ -309,9 +312,9 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) - let is_copy = self_ty.is_copy_modulo_regions(tcx, param_env, builder.span); let dest = Place::return_place(); - let src = Place::from(Local::new(1+0)).deref(); + let src = tcx.mk_place_deref(Place::from(Local::new(1+0))); - match self_ty.sty { + match self_ty.kind { _ if is_copy => builder.copy_shim(), ty::Array(ty, len) => { let len = len.eval_usize(tcx, param_env); @@ -320,7 +323,7 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) - ty::Closure(def_id, substs) => { builder.tuple_like_shim( dest, src, - substs.upvar_tys(def_id, tcx) + substs.as_closure().upvar_tys(def_id, tcx) ) } ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()), @@ -412,7 +415,7 @@ impl CloneShimBuilder<'tcx> { } fn copy_shim(&mut self) { - let rcvr = Place::from(Local::new(1+0)).deref(); + let rcvr = self.tcx.mk_place_deref(Place::from(Local::new(1+0))); let ret_statement = self.make_statement( StatementKind::Assign( box( @@ -558,8 +561,8 @@ impl CloneShimBuilder<'tcx> { // BB #2 // `dest[i] = Clone::clone(src[beg])`; // Goto #3 if ok, #5 if unwinding happens. - let dest_field = dest.clone().index(beg); - let src_field = src.index(beg); + let dest_field = self.tcx.mk_place_index(dest.clone(), beg); + let src_field = self.tcx.mk_place_index(src, beg); self.make_clone_call(dest_field, src_field, ty, BasicBlock::new(3), BasicBlock::new(5)); @@ -613,7 +616,7 @@ impl CloneShimBuilder<'tcx> { // BB #7 (cleanup) // `drop(dest[beg])`; self.block(vec![], TerminatorKind::Drop { - location: dest.index(beg), + location: self.tcx.mk_place_index(dest, beg), target: BasicBlock::new(8), unwind: None, }, true); @@ -645,9 +648,9 @@ impl CloneShimBuilder<'tcx> { let mut previous_field = None; for (i, ity) in tys.enumerate() { let field = Field::new(i); - let src_field = src.clone().field(field, ity); + let src_field = self.tcx.mk_place_field(src.clone(), field, ity); - let dest_field = dest.clone().field(field, ity); + let dest_field = self.tcx.mk_place_field(dest.clone(), field, ity); // #(2i + 1) is the cleanup block for the previous clone operation let cleanup_block = self.block_index_offset(1); @@ -718,14 +721,14 @@ fn build_call_shim<'tcx>( let rcvr = match rcvr_adjustment { Adjustment::Identity => Operand::Move(rcvr_l), - Adjustment::Deref => Operand::Copy(rcvr_l.deref()), + Adjustment::Deref => Operand::Copy(tcx.mk_place_deref(rcvr_l)), Adjustment::DerefMove => { // fn(Self, ...) -> fn(*mut Self, ...) let arg_ty = local_decls[rcvr_arg].ty; debug_assert!(tcx.generics_of(def_id).has_self && arg_ty == tcx.types.self_param); local_decls[rcvr_arg].ty = tcx.mk_mut_ptr(arg_ty); - Operand::Move(rcvr_l.deref()) + Operand::Move(tcx.mk_place_deref(rcvr_l)) } Adjustment::RefMut => { // let rcvr = &mut rcvr; @@ -769,7 +772,7 @@ fn build_call_shim<'tcx>( if let Some(untuple_args) = untuple_args { args.extend(untuple_args.iter().enumerate().map(|(i, ity)| { let arg_place = Place::from(Local::new(1+1)); - Operand::Move(arg_place.field(Field::new(i), *ity)) + Operand::Move(tcx.mk_place_field(arg_place, Field::new(i), *ity)) })); } else { args.extend((1..sig.inputs().len()).map(|i| { @@ -857,7 +860,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> &Body<'_> { .expect("LBR in ADT constructor signature"); let sig = tcx.normalize_erasing_regions(param_env, sig); - let (adt_def, substs) = match sig.output().sty { + let (adt_def, substs) = match sig.output().kind { ty::Adt(adt_def, substs) => (adt_def, substs), _ => bug!("unexpected type for ADT ctor {:?}", sig.output()) }; @@ -898,6 +901,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> &Body<'_> { )), AggregateKind::Adt(adt_def, variant_index, substs, None, None), source_info, + tcx, ).collect(); let start_block = BasicBlockData { diff --git a/src/librustc_mir/transform/add_call_guards.rs b/src/librustc_mir/transform/add_call_guards.rs index 15ecc6c379..bf3df1ae2f 100644 --- a/src/librustc_mir/transform/add_call_guards.rs +++ b/src/librustc_mir/transform/add_call_guards.rs @@ -1,6 +1,6 @@ use rustc::ty::TyCtxt; use rustc::mir::*; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use crate::transform::{MirPass, MirSource}; #[derive(PartialEq)] diff --git a/src/librustc_mir/transform/add_retag.rs b/src/librustc_mir/transform/add_retag.rs index 833c8b1646..b56a1b263f 100644 --- a/src/librustc_mir/transform/add_retag.rs +++ b/src/librustc_mir/transform/add_retag.rs @@ -37,7 +37,7 @@ fn is_stable( /// Determine whether this type may be a reference (or box), and thus needs retagging. fn may_be_reference<'tcx>(ty: Ty<'tcx>) -> bool { - match ty.sty { + match ty.kind { // Primitive types that are not references ty::Bool | ty::Char | ty::Float(_) | ty::Int(_) | ty::Uint(_) | diff --git a/src/librustc_mir/transform/check_consts/mod.rs b/src/librustc_mir/transform/check_consts/mod.rs new file mode 100644 index 0000000000..364e23ed8d --- /dev/null +++ b/src/librustc_mir/transform/check_consts/mod.rs @@ -0,0 +1,124 @@ +//! Check the bodies of `const`s, `static`s and `const fn`s for illegal operations. +//! +//! This module will eventually replace the parts of `qualify_consts.rs` that check whether a local +//! has interior mutability or needs to be dropped, as well as the visitor that emits errors when +//! it finds operations that are invalid in a certain context. + +use rustc::hir::{self, def_id::DefId}; +use rustc::mir; +use rustc::ty::{self, TyCtxt}; + +use std::fmt; + +pub use self::qualifs::Qualif; + +pub mod ops; +pub mod qualifs; +mod resolver; +pub mod validation; + +/// Information about the item currently being const-checked, as well as a reference to the global +/// context. +pub struct Item<'mir, 'tcx> { + pub body: &'mir mir::Body<'tcx>, + pub tcx: TyCtxt<'tcx>, + pub def_id: DefId, + pub param_env: ty::ParamEnv<'tcx>, + pub const_kind: Option, +} + +impl Item<'mir, 'tcx> { + pub fn new( + tcx: TyCtxt<'tcx>, + def_id: DefId, + body: &'mir mir::Body<'tcx>, + ) -> Self { + let param_env = tcx.param_env(def_id); + let const_kind = ConstKind::for_item(tcx, def_id); + + Item { + body, + tcx, + def_id, + param_env, + const_kind, + } + } + + /// Returns the kind of const context this `Item` represents (`const`, `static`, etc.). + /// + /// Panics if this `Item` is not const. + pub fn const_kind(&self) -> ConstKind { + self.const_kind.expect("`const_kind` must not be called on a non-const fn") + } +} + +/// The kinds of items which require compile-time evaluation. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum ConstKind { + /// A `static` item. + Static, + /// A `static mut` item. + StaticMut, + /// A `const fn` item. + ConstFn, + /// A `const` item or an anonymous constant (e.g. in array lengths). + Const, +} + +impl ConstKind { + /// Returns the validation mode for the item with the given `DefId`, or `None` if this item + /// does not require validation (e.g. a non-const `fn`). + pub fn for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option { + use hir::BodyOwnerKind as HirKind; + + let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); + + let mode = match tcx.hir().body_owner_kind(hir_id) { + HirKind::Closure => return None, + + HirKind::Fn if tcx.is_const_fn(def_id) => ConstKind::ConstFn, + HirKind::Fn => return None, + + HirKind::Const => ConstKind::Const, + + HirKind::Static(hir::MutImmutable) => ConstKind::Static, + HirKind::Static(hir::MutMutable) => ConstKind::StaticMut, + }; + + Some(mode) + } + + pub fn is_static(self) -> bool { + match self { + ConstKind::Static | ConstKind::StaticMut => true, + ConstKind::ConstFn | ConstKind::Const => false, + } + } + + /// Returns `true` if the value returned by this item must be `Sync`. + /// + /// This returns false for `StaticMut` since all accesses to one are `unsafe` anyway. + pub fn requires_sync(self) -> bool { + match self { + ConstKind::Static => true, + ConstKind::ConstFn | ConstKind::Const | ConstKind::StaticMut => false, + } + } +} + +impl fmt::Display for ConstKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + ConstKind::Const => write!(f, "constant"), + ConstKind::Static | ConstKind::StaticMut => write!(f, "static"), + ConstKind::ConstFn => write!(f, "constant function"), + } + } +} + +/// Returns `true` if this `DefId` points to one of the official `panic` lang items. +pub fn is_lang_panic_fn(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool { + Some(def_id) == tcx.lang_items().panic_fn() || + Some(def_id) == tcx.lang_items().begin_panic_fn() +} diff --git a/src/librustc_mir/transform/check_consts/ops.rs b/src/librustc_mir/transform/check_consts/ops.rs new file mode 100644 index 0000000000..4b374cff80 --- /dev/null +++ b/src/librustc_mir/transform/check_consts/ops.rs @@ -0,0 +1,338 @@ +//! Concrete error types for all operations which may be invalid in a certain const context. + +use rustc::hir::def_id::DefId; +use rustc::mir::BorrowKind; +use rustc::session::config::nightly_options; +use rustc::ty::TyCtxt; +use syntax::feature_gate::{emit_feature_err, GateIssue}; +use syntax::symbol::sym; +use syntax_pos::{Span, Symbol}; + +use super::{ConstKind, Item}; + +/// An operation that is not *always* allowed in a const context. +pub trait NonConstOp: std::fmt::Debug { + /// Whether this operation can be evaluated by miri. + const IS_SUPPORTED_IN_MIRI: bool = true; + + /// Returns a boolean indicating whether the feature gate that would allow this operation is + /// enabled, or `None` if such a feature gate does not exist. + fn feature_gate(_tcx: TyCtxt<'tcx>) -> Option { + None + } + + /// Returns `true` if this operation is allowed in the given item. + /// + /// This check should assume that we are not in a non-const `fn`, where all operations are + /// legal. + fn is_allowed_in_item(&self, item: &Item<'_, '_>) -> bool { + Self::feature_gate(item.tcx).unwrap_or(false) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let mut err = struct_span_err!( + item.tcx.sess, + span, + E0019, + "{} contains unimplemented expression type", + item.const_kind() + ); + if item.tcx.sess.teach(&err.get_code().unwrap()) { + err.note("A function call isn't allowed in the const's initialization expression \ + because the expression's value must be known at compile-time."); + err.note("Remember: you can't use a function call inside a const's initialization \ + expression! However, you can use it anywhere else."); + } + err.emit(); + } +} + +/// A `Downcast` projection. +#[derive(Debug)] +pub struct Downcast; +impl NonConstOp for Downcast {} + +/// A function call where the callee is a pointer. +#[derive(Debug)] +pub struct FnCallIndirect; +impl NonConstOp for FnCallIndirect { + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let mut err = item.tcx.sess.struct_span_err( + span, + &format!("function pointers are not allowed in const fn")); + err.emit(); + } +} + +/// A function call where the callee is not marked as `const`. +#[derive(Debug)] +pub struct FnCallNonConst(pub DefId); +impl NonConstOp for FnCallNonConst { + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let mut err = struct_span_err!( + item.tcx.sess, + span, + E0015, + "calls in {}s are limited to constant functions, \ + tuple structs and tuple variants", + item.const_kind(), + ); + err.emit(); + } +} + +/// A function call where the callee is not a function definition or function pointer, e.g. a +/// closure. +/// +/// This can be subdivided in the future to produce a better error message. +#[derive(Debug)] +pub struct FnCallOther; +impl NonConstOp for FnCallOther { + const IS_SUPPORTED_IN_MIRI: bool = false; +} + +/// A call to a `#[unstable]` const fn or `#[rustc_const_unstable]` function. +/// +/// Contains the name of the feature that would allow the use of this function. +#[derive(Debug)] +pub struct FnCallUnstable(pub DefId, pub Symbol); +impl NonConstOp for FnCallUnstable { + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let FnCallUnstable(def_id, feature) = *self; + + let mut err = item.tcx.sess.struct_span_err(span, + &format!("`{}` is not yet stable as a const fn", + item.tcx.def_path_str(def_id))); + if nightly_options::is_nightly_build() { + help!(&mut err, + "add `#![feature({})]` to the \ + crate attributes to enable", + feature); + } + err.emit(); + } +} + +#[derive(Debug)] +pub struct HeapAllocation; +impl NonConstOp for HeapAllocation { + const IS_SUPPORTED_IN_MIRI: bool = false; + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let mut err = struct_span_err!(item.tcx.sess, span, E0010, + "allocations are not allowed in {}s", item.const_kind()); + err.span_label(span, format!("allocation not allowed in {}s", item.const_kind())); + if item.tcx.sess.teach(&err.get_code().unwrap()) { + err.note( + "The value of statics and constants must be known at compile time, \ + and they live for the entire lifetime of a program. Creating a boxed \ + value allocates memory on the heap at runtime, and therefore cannot \ + be done at compile time." + ); + } + err.emit(); + } +} + +#[derive(Debug)] +pub struct IfOrMatch; +impl NonConstOp for IfOrMatch {} + +#[derive(Debug)] +pub struct LiveDrop; +impl NonConstOp for LiveDrop { + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + struct_span_err!(item.tcx.sess, span, E0493, + "destructors cannot be evaluated at compile-time") + .span_label(span, format!("{}s cannot evaluate destructors", + item.const_kind())) + .emit(); + } +} + +#[derive(Debug)] +pub struct Loop; +impl NonConstOp for Loop {} + +#[derive(Debug)] +pub struct MutBorrow(pub BorrowKind); +impl NonConstOp for MutBorrow { + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let kind = self.0; + if let BorrowKind::Mut { .. } = kind { + let mut err = struct_span_err!(item.tcx.sess, span, E0017, + "references in {}s may only refer \ + to immutable values", item.const_kind()); + err.span_label(span, format!("{}s require immutable values", + item.const_kind())); + if item.tcx.sess.teach(&err.get_code().unwrap()) { + err.note("References in statics and constants may only refer \ + to immutable values.\n\n\ + Statics are shared everywhere, and if they refer to \ + mutable data one might violate memory safety since \ + holding multiple mutable references to shared data \ + is not allowed.\n\n\ + If you really want global mutable state, try using \ + static mut or a global UnsafeCell."); + } + err.emit(); + } else { + span_err!(item.tcx.sess, span, E0492, + "cannot borrow a constant which may contain \ + interior mutability, create a static instead"); + } + } +} + +#[derive(Debug)] +pub struct MutDeref; +impl NonConstOp for MutDeref {} + +#[derive(Debug)] +pub struct Panic; +impl NonConstOp for Panic { + fn feature_gate(tcx: TyCtxt<'_>) -> Option { + Some(tcx.features().const_panic) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + emit_feature_err( + &item.tcx.sess.parse_sess, + sym::const_panic, + span, + GateIssue::Language, + &format!("panicking in {}s is unstable", item.const_kind()), + ); + } +} + +#[derive(Debug)] +pub struct RawPtrComparison; +impl NonConstOp for RawPtrComparison { + fn feature_gate(tcx: TyCtxt<'_>) -> Option { + Some(tcx.features().const_compare_raw_pointers) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + emit_feature_err( + &item.tcx.sess.parse_sess, + sym::const_compare_raw_pointers, + span, + GateIssue::Language, + &format!("comparing raw pointers inside {}", item.const_kind()), + ); + } +} + +#[derive(Debug)] +pub struct RawPtrDeref; +impl NonConstOp for RawPtrDeref { + fn feature_gate(tcx: TyCtxt<'_>) -> Option { + Some(tcx.features().const_raw_ptr_deref) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + emit_feature_err( + &item.tcx.sess.parse_sess, sym::const_raw_ptr_deref, + span, GateIssue::Language, + &format!( + "dereferencing raw pointers in {}s is unstable", + item.const_kind(), + ), + ); + } +} + +#[derive(Debug)] +pub struct RawPtrToIntCast; +impl NonConstOp for RawPtrToIntCast { + fn feature_gate(tcx: TyCtxt<'_>) -> Option { + Some(tcx.features().const_raw_ptr_to_usize_cast) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + emit_feature_err( + &item.tcx.sess.parse_sess, sym::const_raw_ptr_to_usize_cast, + span, GateIssue::Language, + &format!( + "casting pointers to integers in {}s is unstable", + item.const_kind(), + ), + ); + } +} + +/// An access to a (non-thread-local) `static`. +#[derive(Debug)] +pub struct StaticAccess; +impl NonConstOp for StaticAccess { + fn is_allowed_in_item(&self, item: &Item<'_, '_>) -> bool { + item.const_kind().is_static() + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + let mut err = struct_span_err!(item.tcx.sess, span, E0013, + "{}s cannot refer to statics, use \ + a constant instead", item.const_kind()); + if item.tcx.sess.teach(&err.get_code().unwrap()) { + err.note( + "Static and const variables can refer to other const variables. \ + But a const variable cannot refer to a static variable." + ); + err.help( + "To fix this, the value can be extracted as a const and then used." + ); + } + err.emit(); + } +} + +/// An access to a thread-local `static`. +#[derive(Debug)] +pub struct ThreadLocalAccess; +impl NonConstOp for ThreadLocalAccess { + const IS_SUPPORTED_IN_MIRI: bool = false; + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + span_err!(item.tcx.sess, span, E0625, + "thread-local statics cannot be \ + accessed at compile-time"); + } +} + +#[derive(Debug)] +pub struct Transmute; +impl NonConstOp for Transmute { + fn feature_gate(tcx: TyCtxt<'_>) -> Option { + Some(tcx.features().const_transmute) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + emit_feature_err( + &item.tcx.sess.parse_sess, sym::const_transmute, + span, GateIssue::Language, + &format!("The use of std::mem::transmute() \ + is gated in {}s", item.const_kind())); + } +} + +#[derive(Debug)] +pub struct UnionAccess; +impl NonConstOp for UnionAccess { + fn is_allowed_in_item(&self, item: &Item<'_, '_>) -> bool { + // Union accesses are stable in all contexts except `const fn`. + item.const_kind() != ConstKind::ConstFn || Self::feature_gate(item.tcx).unwrap() + } + + fn feature_gate(tcx: TyCtxt<'_>) -> Option { + Some(tcx.features().const_fn_union) + } + + fn emit_error(&self, item: &Item<'_, '_>, span: Span) { + emit_feature_err( + &item.tcx.sess.parse_sess, sym::const_fn_union, + span, GateIssue::Language, + "unions in const fn are unstable", + ); + } +} diff --git a/src/librustc_mir/transform/check_consts/qualifs.rs b/src/librustc_mir/transform/check_consts/qualifs.rs new file mode 100644 index 0000000000..840ad30301 --- /dev/null +++ b/src/librustc_mir/transform/check_consts/qualifs.rs @@ -0,0 +1,302 @@ +//! A copy of the `Qualif` trait in `qualify_consts.rs` that is suitable for the new validator. + +use rustc::mir::*; +use rustc::mir::interpret::ConstValue; +use rustc::ty::{self, Ty}; +use syntax_pos::DUMMY_SP; + +use super::{ConstKind, Item as ConstCx}; + +#[derive(Clone, Copy)] +pub struct QualifSet(u8); + +impl QualifSet { + fn contains(self) -> bool { + self.0 & (1 << Q::IDX) != 0 + } +} + +/// A "qualif"(-ication) is a way to look for something "bad" in the MIR that would disqualify some +/// code for promotion or prevent it from evaluating at compile time. So `return true` means +/// "I found something bad, no reason to go on searching". `false` is only returned if we +/// definitely cannot find anything bad anywhere. +/// +/// The default implementations proceed structurally. +pub trait Qualif { + const IDX: usize; + + /// The name of the file used to debug the dataflow analysis that computes this qualif. + const ANALYSIS_NAME: &'static str; + + /// Whether this `Qualif` is cleared when a local is moved from. + const IS_CLEARED_ON_MOVE: bool = false; + + /// Return the qualification that is (conservatively) correct for any value + /// of the type. + fn in_any_value_of_ty(_cx: &ConstCx<'_, 'tcx>, _ty: Ty<'tcx>) -> bool; + + fn in_static(_cx: &ConstCx<'_, 'tcx>, _static: &Static<'tcx>) -> bool { + // FIXME(eddyb) should we do anything here for value properties? + false + } + + fn in_projection_structurally( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + place: PlaceRef<'_, 'tcx>, + ) -> bool { + if let [proj_base @ .., elem] = place.projection { + let base_qualif = Self::in_place(cx, per_local, PlaceRef { + base: place.base, + projection: proj_base, + }); + let qualif = base_qualif && Self::in_any_value_of_ty( + cx, + Place::ty_from(place.base, proj_base, cx.body, cx.tcx) + .projection_ty(cx.tcx, elem) + .ty, + ); + match elem { + ProjectionElem::Deref | + ProjectionElem::Subslice { .. } | + ProjectionElem::Field(..) | + ProjectionElem::ConstantIndex { .. } | + ProjectionElem::Downcast(..) => qualif, + + ProjectionElem::Index(local) => qualif || per_local(*local), + } + } else { + bug!("This should be called if projection is not empty"); + } + } + + fn in_projection( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + place: PlaceRef<'_, 'tcx>, + ) -> bool { + Self::in_projection_structurally(cx, per_local, place) + } + + fn in_place( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + place: PlaceRef<'_, 'tcx>, + ) -> bool { + match place { + PlaceRef { + base: PlaceBase::Local(local), + projection: [], + } => per_local(*local), + PlaceRef { + base: PlaceBase::Static(box Static { + kind: StaticKind::Promoted(..), + .. + }), + projection: [], + } => bug!("qualifying already promoted MIR"), + PlaceRef { + base: PlaceBase::Static(static_), + projection: [], + } => { + Self::in_static(cx, static_) + }, + PlaceRef { + base: _, + projection: [.., _], + } => Self::in_projection(cx, per_local, place), + } + } + + fn in_operand( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + operand: &Operand<'tcx>, + ) -> bool { + match *operand { + Operand::Copy(ref place) | + Operand::Move(ref place) => Self::in_place(cx, per_local, place.as_ref()), + + Operand::Constant(ref constant) => { + if let ConstValue::Unevaluated(def_id, _) = constant.literal.val { + // Don't peek inside trait associated constants. + if cx.tcx.trait_of_item(def_id).is_some() { + Self::in_any_value_of_ty(cx, constant.literal.ty) + } else { + let (bits, _) = cx.tcx.at(constant.span).mir_const_qualif(def_id); + + let qualif = QualifSet(bits).contains::(); + + // Just in case the type is more specific than + // the definition, e.g., impl associated const + // with type parameters, take it into account. + qualif && Self::in_any_value_of_ty(cx, constant.literal.ty) + } + } else { + false + } + } + } + } + + fn in_rvalue_structurally( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + rvalue: &Rvalue<'tcx>, + ) -> bool { + match *rvalue { + Rvalue::NullaryOp(..) => false, + + Rvalue::Discriminant(ref place) | + Rvalue::Len(ref place) => Self::in_place(cx, per_local, place.as_ref()), + + Rvalue::Use(ref operand) | + Rvalue::Repeat(ref operand, _) | + Rvalue::UnaryOp(_, ref operand) | + Rvalue::Cast(_, ref operand, _) => Self::in_operand(cx, per_local, operand), + + Rvalue::BinaryOp(_, ref lhs, ref rhs) | + Rvalue::CheckedBinaryOp(_, ref lhs, ref rhs) => { + Self::in_operand(cx, per_local, lhs) || Self::in_operand(cx, per_local, rhs) + } + + Rvalue::Ref(_, _, ref place) => { + // Special-case reborrows to be more like a copy of the reference. + if let &[ref proj_base @ .., elem] = place.projection.as_ref() { + if ProjectionElem::Deref == elem { + let base_ty = Place::ty_from(&place.base, proj_base, cx.body, cx.tcx).ty; + if let ty::Ref(..) = base_ty.kind { + return Self::in_place(cx, per_local, PlaceRef { + base: &place.base, + projection: proj_base, + }); + } + } + } + + Self::in_place(cx, per_local, place.as_ref()) + } + + Rvalue::Aggregate(_, ref operands) => { + operands.iter().any(|o| Self::in_operand(cx, per_local, o)) + } + } + } + + fn in_rvalue( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + rvalue: &Rvalue<'tcx>, + ) -> bool { + Self::in_rvalue_structurally(cx, per_local, rvalue) + } + + fn in_call( + cx: &ConstCx<'_, 'tcx>, + _per_local: &impl Fn(Local) -> bool, + _callee: &Operand<'tcx>, + _args: &[Operand<'tcx>], + return_ty: Ty<'tcx>, + ) -> bool { + // Be conservative about the returned value of a const fn. + Self::in_any_value_of_ty(cx, return_ty) + } +} + +/// Constant containing interior mutability (`UnsafeCell`). +/// This must be ruled out to make sure that evaluating the constant at compile-time +/// and at *any point* during the run-time would produce the same result. In particular, +/// promotion of temporaries must not change program behavior; if the promoted could be +/// written to, that would be a problem. +pub struct HasMutInterior; + +impl Qualif for HasMutInterior { + const IDX: usize = 0; + const ANALYSIS_NAME: &'static str = "flow_has_mut_interior"; + + fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> bool { + !ty.is_freeze(cx.tcx, cx.param_env, DUMMY_SP) + } + + fn in_rvalue( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + rvalue: &Rvalue<'tcx>, + ) -> bool { + match *rvalue { + // Returning `true` for `Rvalue::Ref` indicates the borrow isn't + // allowed in constants (and the `Checker` will error), and/or it + // won't be promoted, due to `&mut ...` or interior mutability. + Rvalue::Ref(_, kind, ref place) => { + let ty = place.ty(cx.body, cx.tcx).ty; + + if let BorrowKind::Mut { .. } = kind { + // In theory, any zero-sized value could be borrowed + // mutably without consequences. + match ty.kind { + // Inside a `static mut`, &mut [...] is also allowed. + | ty::Array(..) + | ty::Slice(_) + if cx.const_kind == Some(ConstKind::StaticMut) + => {}, + + // FIXME(eddyb): We only return false for `&mut []` outside a const + // context which seems unnecessary given that this is merely a ZST. + | ty::Array(_, len) + if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0) + && cx.const_kind == None + => {}, + + _ => return true, + } + } + } + + Rvalue::Aggregate(ref kind, _) => { + if let AggregateKind::Adt(def, ..) = **kind { + if Some(def.did) == cx.tcx.lang_items().unsafe_cell_type() { + let ty = rvalue.ty(cx.body, cx.tcx); + assert_eq!(Self::in_any_value_of_ty(cx, ty), true); + return true; + } + } + } + + _ => {} + } + + Self::in_rvalue_structurally(cx, per_local, rvalue) + } +} + +/// Constant containing an ADT that implements `Drop`. +/// This must be ruled out (a) because we cannot run `Drop` during compile-time +/// as that might not be a `const fn`, and (b) because implicit promotion would +/// remove side-effects that occur as part of dropping that value. +pub struct NeedsDrop; + +impl Qualif for NeedsDrop { + const IDX: usize = 1; + const ANALYSIS_NAME: &'static str = "flow_needs_drop"; + const IS_CLEARED_ON_MOVE: bool = true; + + fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> bool { + ty.needs_drop(cx.tcx, cx.param_env) + } + + fn in_rvalue( + cx: &ConstCx<'_, 'tcx>, + per_local: &impl Fn(Local) -> bool, + rvalue: &Rvalue<'tcx>, + ) -> bool { + if let Rvalue::Aggregate(ref kind, _) = *rvalue { + if let AggregateKind::Adt(def, ..) = **kind { + if def.has_dtor(cx.tcx) { + return true; + } + } + } + + Self::in_rvalue_structurally(cx, per_local, rvalue) + } +} diff --git a/src/librustc_mir/transform/check_consts/resolver.rs b/src/librustc_mir/transform/check_consts/resolver.rs new file mode 100644 index 0000000000..8909ef7db6 --- /dev/null +++ b/src/librustc_mir/transform/check_consts/resolver.rs @@ -0,0 +1,220 @@ +//! Propagate `Qualif`s between locals and query the results. +//! +//! This contains the dataflow analysis used to track `Qualif`s on complex control-flow graphs. + +use rustc::mir::visit::Visitor; +use rustc::mir::{self, BasicBlock, Local, Location}; +use rustc_index::bit_set::BitSet; + +use std::marker::PhantomData; + +use crate::dataflow::{self as old_dataflow, generic as dataflow}; +use super::{Item, Qualif}; + +/// A `Visitor` that propagates qualifs between locals. This defines the transfer function of +/// `FlowSensitiveAnalysis`. +/// +/// This transfer does nothing when encountering an indirect assignment. Consumers should rely on +/// the `IndirectlyMutableLocals` dataflow pass to see if a `Local` may have become qualified via +/// an indirect assignment or function call. +struct TransferFunction<'a, 'mir, 'tcx, Q> { + item: &'a Item<'mir, 'tcx>, + qualifs_per_local: &'a mut BitSet, + + _qualif: PhantomData, +} + +impl TransferFunction<'a, 'mir, 'tcx, Q> +where + Q: Qualif, +{ + fn new( + item: &'a Item<'mir, 'tcx>, + qualifs_per_local: &'a mut BitSet, + ) -> Self { + TransferFunction { + item, + qualifs_per_local, + _qualif: PhantomData, + } + } + + fn initialize_state(&mut self) { + self.qualifs_per_local.clear(); + + for arg in self.item.body.args_iter() { + let arg_ty = self.item.body.local_decls[arg].ty; + if Q::in_any_value_of_ty(self.item, arg_ty) { + self.qualifs_per_local.insert(arg); + } + } + } + + fn assign_qualif_direct(&mut self, place: &mir::Place<'tcx>, value: bool) { + debug_assert!(!place.is_indirect()); + + match (value, place.as_ref()) { + (true, mir::PlaceRef { base: &mir::PlaceBase::Local(local), .. }) => { + self.qualifs_per_local.insert(local); + } + + // For now, we do not clear the qualif if a local is overwritten in full by + // an unqualified rvalue (e.g. `y = 5`). This is to be consistent + // with aggregates where we overwrite all fields with assignments, which would not + // get this feature. + (false, mir::PlaceRef { base: &mir::PlaceBase::Local(_local), projection: &[] }) => { + // self.qualifs_per_local.remove(*local); + } + + _ => {} + } + } + + fn apply_call_return_effect( + &mut self, + _block: BasicBlock, + func: &mir::Operand<'tcx>, + args: &[mir::Operand<'tcx>], + return_place: &mir::Place<'tcx>, + ) { + let return_ty = return_place.ty(self.item.body, self.item.tcx).ty; + let qualif = Q::in_call( + self.item, + &|l| self.qualifs_per_local.contains(l), + func, + args, + return_ty, + ); + if !return_place.is_indirect() { + self.assign_qualif_direct(return_place, qualif); + } + } +} + +impl Visitor<'tcx> for TransferFunction<'_, '_, 'tcx, Q> +where + Q: Qualif, +{ + fn visit_operand(&mut self, operand: &mir::Operand<'tcx>, location: Location) { + self.super_operand(operand, location); + + if !Q::IS_CLEARED_ON_MOVE { + return; + } + + // If a local with no projections is moved from (e.g. `x` in `y = x`), record that + // it no longer needs to be dropped. + if let mir::Operand::Move(place) = operand { + if let Some(local) = place.as_local() { + self.qualifs_per_local.remove(local); + } + } + } + + fn visit_assign( + &mut self, + place: &mir::Place<'tcx>, + rvalue: &mir::Rvalue<'tcx>, + location: Location, + ) { + let qualif = Q::in_rvalue(self.item, &|l| self.qualifs_per_local.contains(l), rvalue); + if !place.is_indirect() { + self.assign_qualif_direct(place, qualif); + } + + // We need to assign qualifs to the left-hand side before visiting `rvalue` since + // qualifs can be cleared on move. + self.super_assign(place, rvalue, location); + } + + fn visit_terminator_kind(&mut self, kind: &mir::TerminatorKind<'tcx>, location: Location) { + // The effect of assignment to the return place in `TerminatorKind::Call` is not applied + // here; that occurs in `apply_call_return_effect`. + + if let mir::TerminatorKind::DropAndReplace { value, location: dest, .. } = kind { + let qualif = Q::in_operand(self.item, &|l| self.qualifs_per_local.contains(l), value); + if !dest.is_indirect() { + self.assign_qualif_direct(dest, qualif); + } + } + + // We need to assign qualifs to the dropped location before visiting the operand that + // replaces it since qualifs can be cleared on move. + self.super_terminator_kind(kind, location); + } +} + +/// The dataflow analysis used to propagate qualifs on arbitrary CFGs. +pub(super) struct FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q> { + item: &'a Item<'mir, 'tcx>, + _qualif: PhantomData, +} + +impl<'a, 'mir, 'tcx, Q> FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q> +where + Q: Qualif, +{ + pub(super) fn new(_: Q, item: &'a Item<'mir, 'tcx>) -> Self { + FlowSensitiveAnalysis { + item, + _qualif: PhantomData, + } + } + + fn transfer_function( + &self, + state: &'a mut BitSet, + ) -> TransferFunction<'a, 'mir, 'tcx, Q> { + TransferFunction::::new(self.item, state) + } +} + +impl old_dataflow::BottomValue for FlowSensitiveAnalysis<'_, '_, '_, Q> { + const BOTTOM_VALUE: bool = false; +} + +impl dataflow::Analysis<'tcx> for FlowSensitiveAnalysis<'_, '_, 'tcx, Q> +where + Q: Qualif, +{ + type Idx = Local; + + const NAME: &'static str = Q::ANALYSIS_NAME; + + fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize { + body.local_decls.len() + } + + fn initialize_start_block(&self, _body: &mir::Body<'tcx>, state: &mut BitSet) { + self.transfer_function(state).initialize_state(); + } + + fn apply_statement_effect( + &self, + state: &mut BitSet, + statement: &mir::Statement<'tcx>, + location: Location, + ) { + self.transfer_function(state).visit_statement(statement, location); + } + + fn apply_terminator_effect( + &self, + state: &mut BitSet, + terminator: &mir::Terminator<'tcx>, + location: Location, + ) { + self.transfer_function(state).visit_terminator(terminator, location); + } + + fn apply_call_return_effect( + &self, + state: &mut BitSet, + block: BasicBlock, + func: &mir::Operand<'tcx>, + args: &[mir::Operand<'tcx>], + return_place: &mir::Place<'tcx>, + ) { + self.transfer_function(state).apply_call_return_effect(block, func, args, return_place) + } +} diff --git a/src/librustc_mir/transform/check_consts/validation.rs b/src/librustc_mir/transform/check_consts/validation.rs new file mode 100644 index 0000000000..244d434a51 --- /dev/null +++ b/src/librustc_mir/transform/check_consts/validation.rs @@ -0,0 +1,569 @@ +//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations. + +use rustc::mir::visit::{PlaceContext, Visitor, MutatingUseContext, NonMutatingUseContext}; +use rustc::mir::*; +use rustc::ty::cast::CastTy; +use rustc::ty; +use rustc_index::bit_set::BitSet; +use rustc_target::spec::abi::Abi; +use syntax::symbol::sym; +use syntax_pos::Span; + +use std::fmt; +use std::ops::Deref; + +use crate::dataflow::{self as old_dataflow, generic as dataflow}; +use self::old_dataflow::IndirectlyMutableLocals; +use super::ops::{self, NonConstOp}; +use super::qualifs::{HasMutInterior, NeedsDrop}; +use super::resolver::FlowSensitiveAnalysis; +use super::{ConstKind, Item, Qualif, is_lang_panic_fn}; + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum CheckOpResult { + Forbidden, + Unleashed, + Allowed, +} + +pub type IndirectlyMutableResults<'mir, 'tcx> = + old_dataflow::DataflowResultsCursor<'mir, 'tcx, IndirectlyMutableLocals<'mir, 'tcx>>; + +struct QualifCursor<'a, 'mir, 'tcx, Q: Qualif> { + cursor: dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q>>, + in_any_value_of_ty: BitSet, +} + +impl QualifCursor<'a, 'mir, 'tcx, Q> { + pub fn new( + q: Q, + item: &'a Item<'mir, 'tcx>, + dead_unwinds: &BitSet, + ) -> Self { + let analysis = FlowSensitiveAnalysis::new(q, item); + let results = + dataflow::Engine::new(item.tcx, item.body, item.def_id, dead_unwinds, analysis) + .iterate_to_fixpoint(); + let cursor = dataflow::ResultsCursor::new(item.body, results); + + let mut in_any_value_of_ty = BitSet::new_empty(item.body.local_decls.len()); + for (local, decl) in item.body.local_decls.iter_enumerated() { + if Q::in_any_value_of_ty(item, decl.ty) { + in_any_value_of_ty.insert(local); + } + } + + QualifCursor { + cursor, + in_any_value_of_ty, + } + } +} + +pub struct Qualifs<'a, 'mir, 'tcx> { + has_mut_interior: QualifCursor<'a, 'mir, 'tcx, HasMutInterior>, + needs_drop: QualifCursor<'a, 'mir, 'tcx, NeedsDrop>, + indirectly_mutable: IndirectlyMutableResults<'mir, 'tcx>, +} + +impl Qualifs<'a, 'mir, 'tcx> { + fn indirectly_mutable(&mut self, local: Local, location: Location) -> bool { + self.indirectly_mutable.seek(location); + self.indirectly_mutable.get().contains(local) + } + + /// Returns `true` if `local` is `NeedsDrop` at the given `Location`. + /// + /// Only updates the cursor if absolutely necessary + fn needs_drop_lazy_seek(&mut self, local: Local, location: Location) -> bool { + if !self.needs_drop.in_any_value_of_ty.contains(local) { + return false; + } + + self.needs_drop.cursor.seek_before(location); + self.needs_drop.cursor.get().contains(local) + || self.indirectly_mutable(local, location) + } + + /// Returns `true` if `local` is `HasMutInterior`, but requires the `has_mut_interior` and + /// `indirectly_mutable` cursors to be updated beforehand. + fn has_mut_interior_eager_seek(&self, local: Local) -> bool { + if !self.has_mut_interior.in_any_value_of_ty.contains(local) { + return false; + } + + self.has_mut_interior.cursor.get().contains(local) + || self.indirectly_mutable.get().contains(local) + } +} + +pub struct Validator<'a, 'mir, 'tcx> { + item: &'a Item<'mir, 'tcx>, + qualifs: Qualifs<'a, 'mir, 'tcx>, + + /// The span of the current statement. + span: Span, + + /// True if the local was assigned the result of an illegal borrow (`ops::MutBorrow`). + /// + /// This is used to hide errors from {re,}borrowing the newly-assigned local, instead pointing + /// the user to the place where the illegal borrow occurred. This set is only populated once an + /// error has been emitted, so it will never cause an erroneous `mir::Body` to pass validation. + /// + /// FIXME(ecstaticmorse): assert at the end of checking that if `tcx.has_errors() == false`, + /// this set is empty. Note that if we start removing locals from + /// `derived_from_illegal_borrow`, just checking at the end won't be enough. + derived_from_illegal_borrow: BitSet, + + errors: Vec<(Span, String)>, + + /// Whether to actually emit errors or just store them in `errors`. + pub(crate) suppress_errors: bool, +} + +impl Deref for Validator<'_, 'mir, 'tcx> { + type Target = Item<'mir, 'tcx>; + + fn deref(&self) -> &Self::Target { + &self.item + } +} + +impl Validator<'a, 'mir, 'tcx> { + pub fn new( + item: &'a Item<'mir, 'tcx>, + ) -> Self { + let dead_unwinds = BitSet::new_empty(item.body.basic_blocks().len()); + + let needs_drop = QualifCursor::new( + NeedsDrop, + item, + &dead_unwinds, + ); + + let has_mut_interior = QualifCursor::new( + HasMutInterior, + item, + &dead_unwinds, + ); + + let indirectly_mutable = old_dataflow::do_dataflow( + item.tcx, + item.body, + item.def_id, + &item.tcx.get_attrs(item.def_id), + &dead_unwinds, + old_dataflow::IndirectlyMutableLocals::new(item.tcx, item.body, item.param_env), + |_, local| old_dataflow::DebugFormatted::new(&local), + ); + + let indirectly_mutable = old_dataflow::DataflowResultsCursor::new( + indirectly_mutable, + item.body, + ); + + let qualifs = Qualifs { + needs_drop, + has_mut_interior, + indirectly_mutable, + }; + + Validator { + span: item.body.span, + item, + qualifs, + errors: vec![], + derived_from_illegal_borrow: BitSet::new_empty(item.body.local_decls.len()), + suppress_errors: false, + } + } + + pub fn take_errors(&mut self) -> Vec<(Span, String)> { + std::mem::replace(&mut self.errors, vec![]) + } + + /// Emits an error at the given `span` if an expression cannot be evaluated in the current + /// context. Returns `Forbidden` if an error was emitted. + pub fn check_op_spanned(&mut self, op: O, span: Span) -> CheckOpResult + where + O: NonConstOp + fmt::Debug + { + trace!("check_op: op={:?}", op); + + if op.is_allowed_in_item(self) { + return CheckOpResult::Allowed; + } + + // If an operation is supported in miri (and is not already controlled by a feature gate) it + // can be turned on with `-Zunleash-the-miri-inside-of-you`. + let is_unleashable = O::IS_SUPPORTED_IN_MIRI + && O::feature_gate(self.tcx).is_none(); + + if is_unleashable && self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { + self.tcx.sess.span_warn(span, "skipping const checks"); + return CheckOpResult::Unleashed; + } + + if !self.suppress_errors { + op.emit_error(self, span); + } + + self.errors.push((span, format!("{:?}", op))); + CheckOpResult::Forbidden + } + + /// Emits an error if an expression cannot be evaluated in the current context. + pub fn check_op(&mut self, op: impl NonConstOp + fmt::Debug) -> CheckOpResult { + let span = self.span; + self.check_op_spanned(op, span) + } +} + +impl Visitor<'tcx> for Validator<'_, 'mir, 'tcx> { + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { + trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location); + + // Check nested operands and places. + if let Rvalue::Ref(_, kind, ref place) = *rvalue { + // Special-case reborrows to be more like a copy of a reference. + let mut reborrow_place = None; + if let &[ref proj_base @ .., elem] = place.projection.as_ref() { + if elem == ProjectionElem::Deref { + let base_ty = Place::ty_from(&place.base, proj_base, self.body, self.tcx).ty; + if let ty::Ref(..) = base_ty.kind { + reborrow_place = Some(proj_base); + } + } + } + + if let Some(proj) = reborrow_place { + let ctx = match kind { + BorrowKind::Shared => PlaceContext::NonMutatingUse( + NonMutatingUseContext::SharedBorrow, + ), + BorrowKind::Shallow => PlaceContext::NonMutatingUse( + NonMutatingUseContext::ShallowBorrow, + ), + BorrowKind::Unique => PlaceContext::NonMutatingUse( + NonMutatingUseContext::UniqueBorrow, + ), + BorrowKind::Mut { .. } => PlaceContext::MutatingUse( + MutatingUseContext::Borrow, + ), + }; + self.visit_place_base(&place.base, ctx, location); + self.visit_projection(&place.base, proj, ctx, location); + } else { + self.super_rvalue(rvalue, location); + } + } else { + self.super_rvalue(rvalue, location); + } + + match *rvalue { + Rvalue::Use(_) | + Rvalue::Repeat(..) | + Rvalue::UnaryOp(UnOp::Neg, _) | + Rvalue::UnaryOp(UnOp::Not, _) | + Rvalue::NullaryOp(NullOp::SizeOf, _) | + Rvalue::CheckedBinaryOp(..) | + Rvalue::Cast(CastKind::Pointer(_), ..) | + Rvalue::Discriminant(..) | + Rvalue::Len(_) | + Rvalue::Ref(..) | + Rvalue::Aggregate(..) => {} + + Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => { + let operand_ty = operand.ty(self.body, self.tcx); + let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); + let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); + + if let (CastTy::Ptr(_), CastTy::Int(_)) + | (CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) { + self.check_op(ops::RawPtrToIntCast); + } + } + + Rvalue::BinaryOp(op, ref lhs, _) => { + if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind { + assert!(op == BinOp::Eq || op == BinOp::Ne || + op == BinOp::Le || op == BinOp::Lt || + op == BinOp::Ge || op == BinOp::Gt || + op == BinOp::Offset); + + + self.check_op(ops::RawPtrComparison); + } + } + + Rvalue::NullaryOp(NullOp::Box, _) => { + self.check_op(ops::HeapAllocation); + } + } + } + + fn visit_place_base( + &mut self, + place_base: &PlaceBase<'tcx>, + context: PlaceContext, + location: Location, + ) { + trace!( + "visit_place_base: place_base={:?} context={:?} location={:?}", + place_base, + context, + location, + ); + self.super_place_base(place_base, context, location); + + match place_base { + PlaceBase::Local(_) => {} + PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_, _), .. }) => { + bug!("Promotion must be run after const validation"); + } + + PlaceBase::Static(box Static{ kind: StaticKind::Static, def_id, .. }) => { + let is_thread_local = self.tcx.has_attr(*def_id, sym::thread_local); + if is_thread_local { + self.check_op(ops::ThreadLocalAccess); + } else if self.const_kind() == ConstKind::Static && context.is_mutating_use() { + // this is not strictly necessary as miri will also bail out + // For interior mutability we can't really catch this statically as that + // goes through raw pointers and intermediate temporaries, so miri has + // to catch this anyway + + self.tcx.sess.span_err( + self.span, + "cannot mutate statics in the initializer of another static", + ); + } else { + self.check_op(ops::StaticAccess); + } + } + } + } + + fn visit_assign(&mut self, dest: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { + trace!("visit_assign: dest={:?} rvalue={:?} location={:?}", dest, rvalue, location); + + // Error on mutable borrows or shared borrows of values with interior mutability. + // + // This replicates the logic at the start of `assign` in the old const checker. Note that + // it depends on `HasMutInterior` being set for mutable borrows as well as values with + // interior mutability. + if let Rvalue::Ref(_, kind, ref borrowed_place) = *rvalue { + // FIXME: Change the `in_*` methods to take a `FnMut` so we don't have to manually seek + // the cursors beforehand. + self.qualifs.has_mut_interior.cursor.seek_before(location); + self.qualifs.indirectly_mutable.seek(location); + + let rvalue_has_mut_interior = HasMutInterior::in_rvalue( + &self.item, + &|local| self.qualifs.has_mut_interior_eager_seek(local), + rvalue, + ); + + if rvalue_has_mut_interior { + let is_derived_from_illegal_borrow = match borrowed_place.as_local() { + // If an unprojected local was borrowed and its value was the result of an + // illegal borrow, suppress this error and mark the result of this borrow as + // illegal as well. + Some(borrowed_local) + if self.derived_from_illegal_borrow.contains(borrowed_local) => + { + true + } + + // Otherwise proceed normally: check the legality of a mutable borrow in this + // context. + _ => self.check_op(ops::MutBorrow(kind)) == CheckOpResult::Forbidden, + }; + + // When the target of the assignment is a local with no projections, mark it as + // derived from an illegal borrow if necessary. + // + // FIXME: should we also clear `derived_from_illegal_borrow` when a local is + // assigned a new value? + if is_derived_from_illegal_borrow { + if let Some(dest) = dest.as_local() { + self.derived_from_illegal_borrow.insert(dest); + } + } + } + } + + self.super_assign(dest, rvalue, location); + } + + fn visit_projection_elem( + &mut self, + place_base: &PlaceBase<'tcx>, + proj_base: &[PlaceElem<'tcx>], + elem: &PlaceElem<'tcx>, + context: PlaceContext, + location: Location, + ) { + trace!( + "visit_projection_elem: place_base={:?} proj_base={:?} elem={:?} \ + context={:?} location={:?}", + place_base, + proj_base, + elem, + context, + location, + ); + + self.super_projection_elem(place_base, proj_base, elem, context, location); + + match elem { + ProjectionElem::Deref => { + if context.is_mutating_use() { + self.check_op(ops::MutDeref); + } + + let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty; + if let ty::RawPtr(_) = base_ty.kind { + self.check_op(ops::RawPtrDeref); + } + } + + ProjectionElem::ConstantIndex {..} | + ProjectionElem::Subslice {..} | + ProjectionElem::Field(..) | + ProjectionElem::Index(_) => { + let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty; + match base_ty.ty_adt_def() { + Some(def) if def.is_union() => { + self.check_op(ops::UnionAccess); + } + + _ => {} + } + } + + ProjectionElem::Downcast(..) => { + self.check_op(ops::Downcast); + } + } + } + + + fn visit_source_info(&mut self, source_info: &SourceInfo) { + trace!("visit_source_info: source_info={:?}", source_info); + self.span = source_info.span; + } + + fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { + trace!("visit_statement: statement={:?} location={:?}", statement, location); + + match statement.kind { + StatementKind::Assign(..) => { + self.super_statement(statement, location); + } + StatementKind::FakeRead(FakeReadCause::ForMatchedPlace, _) => { + self.check_op(ops::IfOrMatch); + } + // FIXME(eddyb) should these really do nothing? + StatementKind::FakeRead(..) | + StatementKind::SetDiscriminant { .. } | + StatementKind::StorageLive(_) | + StatementKind::StorageDead(_) | + StatementKind::InlineAsm {..} | + StatementKind::Retag { .. } | + StatementKind::AscribeUserType(..) | + StatementKind::Nop => {} + } + } + + fn visit_terminator_kind(&mut self, kind: &TerminatorKind<'tcx>, location: Location) { + trace!("visit_terminator_kind: kind={:?} location={:?}", kind, location); + self.super_terminator_kind(kind, location); + + match kind { + TerminatorKind::Call { func, .. } => { + let fn_ty = func.ty(self.body, self.tcx); + + let def_id = match fn_ty.kind { + ty::FnDef(def_id, _) => def_id, + + ty::FnPtr(_) => { + self.check_op(ops::FnCallIndirect); + return; + } + _ => { + self.check_op(ops::FnCallOther); + return; + } + }; + + // At this point, we are calling a function whose `DefId` is known... + + if let Abi::RustIntrinsic | Abi::PlatformIntrinsic = self.tcx.fn_sig(def_id).abi() { + assert!(!self.tcx.is_const_fn(def_id)); + + if self.tcx.item_name(def_id) == sym::transmute { + self.check_op(ops::Transmute); + return; + } + + // To preserve the current semantics, we return early, allowing all + // intrinsics (except `transmute`) to pass unchecked to miri. + // + // FIXME: We should keep a whitelist of allowed intrinsics (or at least a + // blacklist of unimplemented ones) and fail here instead. + return; + } + + if self.tcx.is_const_fn(def_id) { + return; + } + + if is_lang_panic_fn(self.tcx, def_id) { + self.check_op(ops::Panic); + } else if let Some(feature) = self.tcx.is_unstable_const_fn(def_id) { + // Exempt unstable const fns inside of macros with + // `#[allow_internal_unstable]`. + if !self.span.allows_unstable(feature) { + self.check_op(ops::FnCallUnstable(def_id, feature)); + } + } else { + self.check_op(ops::FnCallNonConst(def_id)); + } + + } + + // Forbid all `Drop` terminators unless the place being dropped is a local with no + // projections that cannot be `NeedsDrop`. + | TerminatorKind::Drop { location: dropped_place, .. } + | TerminatorKind::DropAndReplace { location: dropped_place, .. } + => { + let mut err_span = self.span; + + // Check to see if the type of this place can ever have a drop impl. If not, this + // `Drop` terminator is frivolous. + let ty_needs_drop = dropped_place + .ty(self.body, self.tcx) + .ty + .needs_drop(self.tcx, self.param_env); + + if !ty_needs_drop { + return; + } + + let needs_drop = if let Some(local) = dropped_place.as_local() { + // Use the span where the local was declared as the span of the drop error. + err_span = self.body.local_decls[local].source_info.span; + self.qualifs.needs_drop_lazy_seek(local, location) + } else { + true + }; + + if needs_drop { + self.check_op_spanned(ops::LiveDrop, err_span); + } + } + + _ => {} + } + } +} diff --git a/src/librustc_mir/transform/check_unsafety.rs b/src/librustc_mir/transform/check_unsafety.rs index 61e32ca8de..d9b983ab79 100644 --- a/src/librustc_mir/transform/check_unsafety.rs +++ b/src/librustc_mir/transform/check_unsafety.rs @@ -1,5 +1,5 @@ use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_data_structures::sync::Lrc; use rustc::ty::query::Providers; @@ -12,7 +12,7 @@ use rustc::lint::builtin::{SAFE_EXTERN_STATICS, SAFE_PACKED_BORROWS, UNUSED_UNSA use rustc::mir::*; use rustc::mir::visit::{PlaceContext, Visitor, MutatingUseContext}; -use syntax::symbol::{InternedString, sym}; +use syntax::symbol::{Symbol, sym}; use std::ops::Bound; @@ -167,9 +167,8 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> { (CastTy::FnPtr, CastTy::Int(_)) => { self.register_violations(&[UnsafetyViolation { source_info: self.source_info, - description: InternedString::intern("cast of pointer to int"), - details: InternedString::intern( - "casting pointers to integers in constants"), + description: Symbol::intern("cast of pointer to int"), + details: Symbol::intern("casting pointers to integers in constants"), kind: UnsafetyViolationKind::General, }], &[]); }, @@ -182,11 +181,11 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> { // result of a comparison of addresses would differ between runtime and compile-time. Rvalue::BinaryOp(_, ref lhs, _) if self.const_context && self.tcx.features().const_compare_raw_pointers => { - if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).sty { + if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind { self.register_violations(&[UnsafetyViolation { source_info: self.source_info, - description: InternedString::intern("pointer operation"), - details: InternedString::intern("operations on pointers in constants"), + description: Symbol::intern("pointer operation"), + details: Symbol::intern("operations on pointers in constants"), kind: UnsafetyViolationKind::General, }], &[]); } @@ -219,8 +218,8 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> { self.source_scope_local_data[source_info.scope].lint_root; self.register_violations(&[UnsafetyViolation { source_info, - description: InternedString::intern("use of extern static"), - details: InternedString::intern( + description: Symbol::intern("use of extern static"), + details: Symbol::intern( "extern statics are not controlled by the Rust type system: \ invalid data, aliasing violations or data races will cause \ undefined behavior"), @@ -240,8 +239,8 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> { self.source_scope_local_data[source_info.scope].lint_root; self.register_violations(&[UnsafetyViolation { source_info, - description: InternedString::intern("borrow of packed field"), - details: InternedString::intern( + description: Symbol::intern("borrow of packed field"), + details: Symbol::intern( "fields of packed structs might be misaligned: dereferencing a \ misaligned pointer or even just creating a misaligned reference \ is undefined behavior"), @@ -274,7 +273,7 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> { } } let base_ty = Place::ty_from(&place.base, proj_base, self.body, self.tcx).ty; - match base_ty.sty { + match base_ty.kind { ty::RawPtr(..) => { self.require_unsafe("dereference of raw pointer", "raw pointers may be NULL, dangling or unaligned; they can violate \ @@ -334,8 +333,8 @@ impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { let source_info = self.source_info; self.register_violations(&[UnsafetyViolation { source_info, - description: InternedString::intern(description), - details: InternedString::intern(details), + description: Symbol::intern(description), + details: Symbol::intern(details), kind, }], &[]); } @@ -407,8 +406,8 @@ impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { place: &Place<'tcx>, is_mut_use: bool, ) { - let mut cursor = &*place.projection; - while let [proj_base @ .., elem] = cursor { + let mut cursor = place.projection.as_ref(); + while let &[ref proj_base @ .., elem] = cursor { cursor = proj_base; match elem { @@ -416,7 +415,7 @@ impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { let ty = Place::ty_from(&place.base, proj_base, &self.body.local_decls, self.tcx) .ty; - match ty.sty { + match ty.kind { ty::Adt(def, _) => match self.tcx.layout_scalar_valid_range(def.did) { (Bound::Unbounded, Bound::Unbounded) => {}, _ => { @@ -438,8 +437,8 @@ impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { let source_info = self.source_info; self.register_violations(&[UnsafetyViolation { source_info, - description: InternedString::intern(description), - details: InternedString::intern(details), + description: Symbol::intern(description), + details: Symbol::intern(details), kind: UnsafetyViolationKind::GeneralAndConstFn, }], &[]); } @@ -577,7 +576,7 @@ fn is_enclosed( if used_unsafe.contains(&parent_id) { Some(("block".to_string(), parent_id)) } else if let Some(Node::Item(&hir::Item { - node: hir::ItemKind::Fn(_, header, _, _), + kind: hir::ItemKind::Fn(_, header, _, _), .. })) = tcx.hir().find(parent_id) { match header.unsafety { diff --git a/src/librustc_mir/transform/cleanup_post_borrowck.rs b/src/librustc_mir/transform/cleanup_post_borrowck.rs index ea173279aa..4fd4fe45cd 100644 --- a/src/librustc_mir/transform/cleanup_post_borrowck.rs +++ b/src/librustc_mir/transform/cleanup_post_borrowck.rs @@ -24,16 +24,22 @@ use crate::transform::{MirPass, MirSource}; pub struct CleanupNonCodegenStatements; -pub struct DeleteNonCodegenStatements; +pub struct DeleteNonCodegenStatements<'tcx> { + tcx: TyCtxt<'tcx>, +} impl<'tcx> MirPass<'tcx> for CleanupNonCodegenStatements { - fn run_pass(&self, _tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, body: &mut Body<'tcx>) { - let mut delete = DeleteNonCodegenStatements; + fn run_pass(&self, tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, body: &mut Body<'tcx>) { + let mut delete = DeleteNonCodegenStatements { tcx }; delete.visit_body(body); } } -impl<'tcx> MutVisitor<'tcx> for DeleteNonCodegenStatements { +impl<'tcx> MutVisitor<'tcx> for DeleteNonCodegenStatements<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) { diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index 614d5d2a4a..2723690083 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -1,36 +1,42 @@ //! Propagates constants for early reporting of statically known //! assertion failures +use std::borrow::Cow; use std::cell::Cell; use rustc::hir::def::DefKind; +use rustc::hir::def_id::DefId; use rustc::mir::{ - AggregateKind, Constant, Location, Place, PlaceBase, Body, Operand, Rvalue, - Local, NullOp, UnOp, StatementKind, Statement, LocalKind, Static, StaticKind, - TerminatorKind, Terminator, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem, - SourceScope, SourceScopeLocalData, LocalDecl, + AggregateKind, Constant, Location, Place, PlaceBase, Body, Operand, Rvalue, Local, UnOp, + StatementKind, Statement, LocalKind, TerminatorKind, Terminator, ClearCrossCrate, SourceInfo, + BinOp, SourceScope, SourceScopeLocalData, LocalDecl, BasicBlock, }; use rustc::mir::visit::{ Visitor, PlaceContext, MutatingUseContext, MutVisitor, NonMutatingUseContext, }; -use rustc::mir::interpret::{Scalar, GlobalId, InterpResult, PanicInfo}; +use rustc::mir::interpret::{Scalar, InterpResult, PanicInfo}; use rustc::ty::{self, Instance, ParamEnv, Ty, TyCtxt}; +use syntax::ast::Mutability; use syntax_pos::{Span, DUMMY_SP}; use rustc::ty::subst::InternalSubsts; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_data_structures::fx::FxHashMap; +use rustc_index::vec::IndexVec; use rustc::ty::layout::{ - LayoutOf, TyLayout, LayoutError, HasTyCtxt, TargetDataLayout, HasDataLayout, + LayoutOf, TyLayout, LayoutError, HasTyCtxt, TargetDataLayout, HasDataLayout, Size, }; use crate::interpret::{ self, InterpCx, ScalarMaybeUndef, Immediate, OpTy, - ImmTy, MemoryKind, StackPopCleanup, LocalValue, LocalState, -}; -use crate::const_eval::{ - CompileTimeInterpreter, error_to_const_error, mk_eval_cx, + StackPopCleanup, LocalValue, LocalState, AllocId, Frame, + Allocation, MemoryKind, ImmTy, Pointer, Memory, PlaceTy, + Operand as InterpOperand, }; +use crate::const_eval::error_to_const_error; use crate::transform::{MirPass, MirSource}; +/// The maximum number of bytes that we'll allocate space for a return value. +const MAX_ALLOC_LIMIT: u64 = 1024; + pub struct ConstProp; impl<'tcx> MirPass<'tcx> for ConstProp { @@ -57,6 +63,14 @@ impl<'tcx> MirPass<'tcx> for ConstProp { return } + let is_generator = tcx.type_of(source.def_id()).is_generator(); + // FIXME(welseywiser) const prop doesn't work on generators because of query cycles + // computing their layout. + if is_generator { + trace!("ConstProp skipped for generator {:?}", source.def_id()); + return + } + trace!("ConstProp starting for {:?}", source.def_id()); // Steal some data we need from `body`. @@ -103,11 +117,155 @@ impl<'tcx> MirPass<'tcx> for ConstProp { } } +struct ConstPropMachine; + +impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine { + type MemoryKinds = !; + type PointerTag = (); + type ExtraFnVal = !; + + type FrameExtra = (); + type MemoryExtra = (); + type AllocExtra = (); + + type MemoryMap = FxHashMap, Allocation)>; + + const STATIC_KIND: Option = None; + + const CHECK_ALIGN: bool = false; + + #[inline(always)] + fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + false + } + + fn find_fn( + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + _instance: ty::Instance<'tcx>, + _args: &[OpTy<'tcx>], + _dest: Option>, + _ret: Option, + ) -> InterpResult<'tcx, Option<&'mir Body<'tcx>>> { + Ok(None) + } + + fn call_extra_fn( + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + fn_val: !, + _args: &[OpTy<'tcx>], + _dest: Option>, + _ret: Option, + ) -> InterpResult<'tcx> { + match fn_val {} + } + + fn call_intrinsic( + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + _span: Span, + _instance: ty::Instance<'tcx>, + _args: &[OpTy<'tcx>], + _dest: PlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + throw_unsup_format!("calling intrinsics isn't supported in ConstProp"); + } + + fn ptr_to_int( + _mem: &Memory<'mir, 'tcx, Self>, + _ptr: Pointer, + ) -> InterpResult<'tcx, u64> { + throw_unsup_format!("ptr-to-int casts aren't supported in ConstProp"); + } + + fn binary_ptr_op( + _ecx: &InterpCx<'mir, 'tcx, Self>, + _bin_op: BinOp, + _left: ImmTy<'tcx>, + _right: ImmTy<'tcx>, + ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { + // We can't do this because aliasing of memory can differ between const eval and llvm + throw_unsup_format!("pointer arithmetic or comparisons aren't supported in ConstProp"); + } + + fn find_foreign_static( + _tcx: TyCtxt<'tcx>, + _def_id: DefId, + ) -> InterpResult<'tcx, Cow<'tcx, Allocation>> { + throw_unsup!(ReadForeignStatic) + } + + #[inline(always)] + fn tag_allocation<'b>( + _memory_extra: &(), + _id: AllocId, + alloc: Cow<'b, Allocation>, + _kind: Option>, + ) -> (Cow<'b, Allocation>, Self::PointerTag) { + // We do not use a tag so we can just cheaply forward the allocation + (alloc, ()) + } + + #[inline(always)] + fn tag_static_base_pointer( + _memory_extra: &(), + _id: AllocId, + ) -> Self::PointerTag { + () + } + + fn box_alloc( + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + _dest: PlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + throw_unsup_format!("can't const prop `box` keyword"); + } + + fn access_local( + _ecx: &InterpCx<'mir, 'tcx, Self>, + frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, + local: Local, + ) -> InterpResult<'tcx, InterpOperand> { + let l = &frame.locals[local]; + + if l.value == LocalValue::Uninitialized { + throw_unsup_format!("tried to access an uninitialized local"); + } + + l.access() + } + + fn before_access_static( + allocation: &Allocation, + ) -> InterpResult<'tcx> { + // if the static allocation is mutable or if it has relocations (it may be legal to mutate + // the memory behind that in the future), then we can't const prop it + if allocation.mutability == Mutability::Mutable || allocation.relocations().len() > 0 { + throw_unsup_format!("can't eval mutable statics in ConstProp"); + } + + Ok(()) + } + + fn before_terminator(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { + Ok(()) + } + + #[inline(always)] + fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { + Ok(()) + } + + /// Called immediately before a stack frame gets popped. + #[inline(always)] + fn stack_pop(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _extra: ()) -> InterpResult<'tcx> { + Ok(()) + } +} + type Const<'tcx> = OpTy<'tcx>; /// Finds optimization opportunities on the MIR. struct ConstPropagator<'mir, 'tcx> { - ecx: InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>, + ecx: InterpCx<'mir, 'tcx, ConstPropMachine>, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, can_const_prop: IndexVec, @@ -150,7 +308,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let def_id = source.def_id(); let param_env = tcx.param_env(def_id); let span = tcx.def_span(def_id); - let mut ecx = mk_eval_cx(tcx, span, param_env); + let mut ecx = InterpCx::new(tcx.at(span), param_env, ConstPropMachine, ()); let can_const_prop = CanConstProp::check(body); ecx.push_stack_frame( @@ -180,34 +338,9 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { } fn get_const(&self, local: Local) -> Option> { - let l = &self.ecx.frame().locals[local]; - - // If the local is `Unitialized` or `Dead` then we haven't propagated a value into it. - // - // `InterpCx::access_local()` mostly takes care of this for us however, for ZSTs, - // it will synthesize a value for us. In doing so, that will cause the - // `get_const(l).is_empty()` assert right before we call `set_const()` in `visit_statement` - // to fail. - if let LocalValue::Uninitialized | LocalValue::Dead = l.value { - return None; - } - self.ecx.access_local(self.ecx.frame(), local, None).ok() } - fn set_const(&mut self, local: Local, c: Const<'tcx>) { - let frame = self.ecx.frame_mut(); - - if let Some(layout) = frame.locals[local].layout.get() { - debug_assert_eq!(c.layout, layout); - } - - frame.locals[local] = LocalState { - value: LocalValue::Live(*c), - layout: Cell::new(Some(c.layout)), - }; - } - fn remove_const(&mut self, local: Local) { self.ecx.frame_mut().locals[local] = LocalState { value: LocalValue::Uninitialized, @@ -282,53 +415,9 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { fn eval_place(&mut self, place: &Place<'tcx>, source_info: SourceInfo) -> Option> { trace!("eval_place(place={:?})", place); - let mut eval = match place.base { - PlaceBase::Local(loc) => self.get_const(loc).clone()?, - PlaceBase::Static(box Static {kind: StaticKind::Promoted(promoted, _), ..}) => { - let generics = self.tcx.generics_of(self.source.def_id()); - if generics.requires_monomorphization(self.tcx) { - // FIXME: can't handle code with generics - return None; - } - let substs = InternalSubsts::identity_for_item(self.tcx, self.source.def_id()); - let instance = Instance::new(self.source.def_id(), substs); - let cid = GlobalId { - instance, - promoted: Some(promoted), - }; - let res = self.use_ecx(source_info, |this| { - this.ecx.const_eval_raw(cid) - })?; - trace!("evaluated promoted {:?} to {:?}", promoted, res); - res.into() - } - _ => return None, - }; - - for (i, elem) in place.projection.iter().enumerate() { - let proj_base = &place.projection[..i]; - - match elem { - ProjectionElem::Field(field, _) => { - trace!("field proj on {:?}", proj_base); - eval = self.use_ecx(source_info, |this| { - this.ecx.operand_field(eval, field.index() as u64) - })?; - }, - ProjectionElem::Deref => { - trace!("processing deref"); - eval = self.use_ecx(source_info, |this| { - this.ecx.deref_operand(eval) - })?.into(); - } - // We could get more projections by using e.g., `operand_projection`, - // but we do not even have the stack frame set up properly so - // an `Index` projection would throw us off-track. - _ => return None, - } - } - - Some(eval) + self.use_ecx(source_info, |this| { + this.ecx.eval_place_to_op(place, None) + }) } fn eval_operand(&mut self, op: &Operand<'tcx>, source_info: SourceInfo) -> Option> { @@ -344,123 +433,66 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { rvalue: &Rvalue<'tcx>, place_layout: TyLayout<'tcx>, source_info: SourceInfo, - ) -> Option> { + place: &Place<'tcx>, + ) -> Option<()> { let span = source_info.span; - match *rvalue { - Rvalue::Use(ref op) => { - self.eval_operand(op, source_info) - }, - Rvalue::Ref(_, _, ref place) => { - let src = self.eval_place(place, source_info)?; - let mplace = src.try_as_mplace().ok()?; - Some(ImmTy::from_scalar(mplace.ptr.into(), place_layout).into()) - }, - Rvalue::Repeat(..) | - Rvalue::Aggregate(..) | - Rvalue::NullaryOp(NullOp::Box, _) | - Rvalue::Discriminant(..) => None, - Rvalue::Cast(kind, ref operand, _) => { - let op = self.eval_operand(operand, source_info)?; + // #66397: Don't try to eval into large places as that can cause an OOM + if place_layout.size >= Size::from_bytes(MAX_ALLOC_LIMIT) { + return None; + } + + let overflow_check = self.tcx.sess.overflow_checks(); + + // Perform any special handling for specific Rvalue types. + // Generally, checks here fall into one of two categories: + // 1. Additional checking to provide useful lints to the user + // - In this case, we will do some validation and then fall through to the + // end of the function which evals the assignment. + // 2. Working around bugs in other parts of the compiler + // - In this case, we'll return `None` from this function to stop evaluation. + match rvalue { + // Additional checking: if overflow checks are disabled (which is usually the case in + // release mode), then we need to do additional checking here to give lints to the user + // if an overflow would occur. + Rvalue::UnaryOp(UnOp::Neg, arg) if !overflow_check => { + trace!("checking UnaryOp(op = Neg, arg = {:?})", arg); + self.use_ecx(source_info, |this| { - let dest = this.ecx.allocate(place_layout, MemoryKind::Stack); - this.ecx.cast(op, kind, dest.into())?; - Ok(dest.into()) - }) - }, - Rvalue::Len(ref place) => { - let place = self.eval_place(&place, source_info)?; - let mplace = place.try_as_mplace().ok()?; + let ty = arg.ty(&this.local_decls, this.tcx); - if let ty::Slice(_) = mplace.layout.ty.sty { - let len = mplace.meta.unwrap().to_usize(&self.ecx).unwrap(); - - Some(ImmTy::from_uint( - len, - self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?, - ).into()) - } else { - trace!("not slice: {:?}", mplace.layout.ty.sty); - None - } - }, - Rvalue::NullaryOp(NullOp::SizeOf, ty) => { - type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some( - ImmTy::from_uint( - n, - self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?, - ).into() - )) - } - Rvalue::UnaryOp(op, ref arg) => { - let def_id = if self.tcx.is_closure(self.source.def_id()) { - self.tcx.closure_base_def_id(self.source.def_id()) - } else { - self.source.def_id() - }; - let generics = self.tcx.generics_of(def_id); - if generics.requires_monomorphization(self.tcx) { - // FIXME: can't handle code with generics - return None; - } - - let arg = self.eval_operand(arg, source_info)?; - let oflo_check = self.tcx.sess.overflow_checks(); - let val = self.use_ecx(source_info, |this| { - let prim = this.ecx.read_immediate(arg)?; - match op { - UnOp::Neg => { - // We check overflow in debug mode already - // so should only check in release mode. - if !oflo_check - && prim.layout.ty.is_signed() - && prim.to_bits()? == (1 << (prim.layout.size.bits() - 1)) { - throw_panic!(OverflowNeg) - } - } - UnOp::Not => { - // Cannot overflow + if ty.is_integral() { + let arg = this.ecx.eval_operand(arg, None)?; + let prim = this.ecx.read_immediate(arg)?; + // Need to do overflow check here: For actual CTFE, MIR + // generation emits code that does this before calling the op. + if prim.to_bits()? == (1 << (prim.layout.size.bits() - 1)) { + throw_panic!(OverflowNeg) } } - // Now run the actual operation. - this.ecx.unary_op(op, prim) + + Ok(()) })?; - Some(val.into()) } - Rvalue::CheckedBinaryOp(op, ref left, ref right) | - Rvalue::BinaryOp(op, ref left, ref right) => { - trace!("rvalue binop {:?} for {:?} and {:?}", op, left, right); - let right = self.eval_operand(right, source_info)?; - let def_id = if self.tcx.is_closure(self.source.def_id()) { - self.tcx.closure_base_def_id(self.source.def_id()) - } else { - self.source.def_id() - }; - let generics = self.tcx.generics_of(def_id); - if generics.requires_monomorphization(self.tcx) { - // FIXME: can't handle code with generics - return None; - } + + // Additional checking: check for overflows on integer binary operations and report + // them to the user as lints. + Rvalue::BinaryOp(op, left, right) => { + trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right); let r = self.use_ecx(source_info, |this| { - this.ecx.read_immediate(right) + this.ecx.read_immediate(this.ecx.eval_operand(right, None)?) })?; - if op == BinOp::Shr || op == BinOp::Shl { - let left_ty = left.ty(&self.local_decls, self.tcx); - let left_bits = self - .tcx - .layout_of(self.param_env.and(left_ty)) - .unwrap() - .size - .bits(); - let right_size = right.layout.size; + if *op == BinOp::Shr || *op == BinOp::Shl { + let left_bits = place_layout.size.bits(); + let right_size = r.layout.size; let r_bits = r.to_scalar().and_then(|r| r.to_bits(right_size)); if r_bits.ok().map_or(false, |b| b >= left_bits as u128) { let source_scope_local_data = match self.source_scope_local_data { ClearCrossCrate::Set(ref data) => data, ClearCrossCrate::Clear => return None, }; - let dir = if op == BinOp::Shr { + let dir = if *op == BinOp::Shr { "right" } else { "left" @@ -474,36 +506,59 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { return None; } } - let left = self.eval_operand(left, source_info)?; - let l = self.use_ecx(source_info, |this| { - this.ecx.read_immediate(left) - })?; - trace!("const evaluating {:?} for {:?} and {:?}", op, left, right); - let (val, overflow, _ty) = self.use_ecx(source_info, |this| { - this.ecx.overflowing_binary_op(op, l, r) - })?; - let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue { - Immediate::ScalarPair( - val.into(), - Scalar::from_bool(overflow).into(), - ) - } else { - // We check overflow in debug mode already - // so should only check in release mode. - if !self.tcx.sess.overflow_checks() && overflow { - let err = err_panic!(Overflow(op)).into(); - let _: Option<()> = self.use_ecx(source_info, |_| Err(err)); + + // If overflow checking is enabled (like in debug mode by default), + // then we'll already catch overflow when we evaluate the `Assert` statement + // in MIR. However, if overflow checking is disabled, then there won't be any + // `Assert` statement and so we have to do additional checking here. + if !overflow_check { + self.use_ecx(source_info, |this| { + let l = this.ecx.read_immediate(this.ecx.eval_operand(left, None)?)?; + let (_, overflow, _ty) = this.ecx.overflowing_binary_op(*op, l, r)?; + + if overflow { + let err = err_panic!(Overflow(*op)).into(); + return Err(err); + } + + Ok(()) + })?; + } + } + + // Work around: avoid ICE in miri. FIXME(wesleywiser) + // The Miri engine ICEs when taking a reference to an uninitialized unsized + // local. There's nothing it can do here: taking a reference needs an allocation + // which needs to know the size. Normally that's okay as during execution + // (e.g. for CTFE) it can never happen. But here in const_prop + // unknown data is uninitialized, so if e.g. a function argument is unsized + // and has a reference taken, we get an ICE. + Rvalue::Ref(_, _, place_ref) => { + trace!("checking Ref({:?})", place_ref); + + if let Some(local) = place_ref.as_local() { + let alive = + if let LocalValue::Live(_) = self.ecx.frame().locals[local].value { + true + } else { + false + }; + + if !alive { + trace!("skipping Ref({:?}) to uninitialized local", place); return None; } - Immediate::Scalar(val.into()) - }; - let res = ImmTy { - imm: val, - layout: place_layout, - }; - Some(res.into()) - }, + } + } + + _ => { } } + + self.use_ecx(source_info, |this| { + trace!("calling eval_rvalue_into_place(rvalue = {:?}, place = {:?})", rvalue, place); + this.ecx.eval_rvalue_into_place(rvalue, place)?; + Ok(()) + }) } fn operand_from_scalar(&self, scalar: Scalar, ty: Ty<'tcx>, span: Span) -> Operand<'tcx> { @@ -552,7 +607,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { ScalarMaybeUndef::Scalar(one), ScalarMaybeUndef::Scalar(two) ) => { - let ty = &value.layout.ty.sty; + let ty = &value.layout.ty.kind; if let ty::Tuple(substs) = ty { *rval = Rvalue::Aggregate( Box::new(AggregateKind::Tuple), @@ -577,14 +632,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { } } -fn type_size_of<'tcx>( - tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - ty: Ty<'tcx>, -) -> Option { - tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes()) -} - struct CanConstProp { can_const_prop: IndexVec, // false at the beginning, once set, there are not allowed to be any more assignments @@ -649,6 +696,10 @@ impl<'tcx> Visitor<'tcx> for CanConstProp { } impl<'mir, 'tcx> MutVisitor<'tcx> for ConstPropagator<'mir, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_constant( &mut self, constant: &mut Constant<'tcx>, @@ -670,29 +721,45 @@ impl<'mir, 'tcx> MutVisitor<'tcx> for ConstPropagator<'mir, 'tcx> { .ty(&self.local_decls, self.tcx) .ty; if let Ok(place_layout) = self.tcx.layout_of(self.param_env.and(place_ty)) { - if let Some(value) = self.const_prop(rval, place_layout, statement.source_info) { - if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = *place { - trace!("checking whether {:?} can be stored to {:?}", value, local); + if let Some(local) = place.as_local() { + let source = statement.source_info; + if let Some(()) = self.const_prop(rval, place_layout, source, place) { if self.can_const_prop[local] { - trace!("storing {:?} to {:?}", value, local); - assert!(self.get_const(local).is_none()); - self.set_const(local, value); + trace!("propagated into {:?}", local); if self.should_const_prop() { + let value = + self.get_const(local).expect("local was dead/uninitialized"); + trace!("replacing {:?} with {:?}", rval, value); self.replace_with_const( rval, value, statement.source_info, ); } + } else { + trace!("can't propagate into {:?}", local); + self.remove_const(local); } } } } + } else { + match statement.kind { + StatementKind::StorageLive(local) | + StatementKind::StorageDead(local) if self.can_const_prop[local] => { + let frame = self.ecx.frame_mut(); + frame.locals[local].value = + if let StatementKind::StorageLive(_) = statement.kind { + LocalValue::Uninitialized + } else { + LocalValue::Dead + }; + } + _ => {} + } } + self.super_statement(statement, location); } diff --git a/src/librustc_mir/transform/copy_prop.rs b/src/librustc_mir/transform/copy_prop.rs index 28f97f41b5..4c26feac4a 100644 --- a/src/librustc_mir/transform/copy_prop.rs +++ b/src/librustc_mir/transform/copy_prop.rs @@ -19,9 +19,7 @@ //! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the //! future. -use rustc::mir::{ - Constant, Local, LocalKind, Location, Place, PlaceBase, Body, Operand, Rvalue, StatementKind -}; +use rustc::mir::{Constant, Local, LocalKind, Location, Place, Body, Operand, Rvalue, StatementKind}; use rustc::mir::visit::MutVisitor; use rustc::ty::TyCtxt; use crate::transform::{MirPass, MirSource}; @@ -92,28 +90,32 @@ impl<'tcx> MirPass<'tcx> for CopyPropagation { }; // That use of the source must be an assignment. - match statement.kind { - StatementKind::Assign( - box( - Place { - base: PlaceBase::Local(local), - projection: box [], - }, - Rvalue::Use(ref operand) - ) - ) if local == dest_local => { - let maybe_action = match *operand { - Operand::Copy(ref src_place) | - Operand::Move(ref src_place) => { - Action::local_copy(&body, &def_use_analysis, src_place) + match &statement.kind { + StatementKind::Assign(box(place, Rvalue::Use(operand))) => { + if let Some(local) = place.as_local() { + if local == dest_local { + let maybe_action = match operand { + Operand::Copy(ref src_place) | + Operand::Move(ref src_place) => { + Action::local_copy(&body, &def_use_analysis, src_place) + } + Operand::Constant(ref src_constant) => { + Action::constant(src_constant) + } + }; + match maybe_action { + Some(this_action) => action = this_action, + None => continue, + } + } else { + debug!(" Can't copy-propagate local: source use is not an \ + assignment"); + continue } - Operand::Constant(ref src_constant) => { - Action::constant(src_constant) - } - }; - match maybe_action { - Some(this_action) => action = this_action, - None => continue, + } else { + debug!(" Can't copy-propagate local: source use is not an \ + assignment"); + continue } } _ => { @@ -124,7 +126,8 @@ impl<'tcx> MirPass<'tcx> for CopyPropagation { } } - changed = action.perform(body, &def_use_analysis, dest_local, location) || changed; + changed = + action.perform(body, &def_use_analysis, dest_local, location, tcx) || changed; // FIXME(pcwalton): Update the use-def chains to delete the instructions instead of // regenerating the chains. break @@ -148,31 +151,20 @@ fn eliminate_self_assignments( for def in dest_use_info.defs_not_including_drop() { let location = def.location; if let Some(stmt) = body[location.block].statements.get(location.statement_index) { - match stmt.kind { - StatementKind::Assign( - box( - Place { - base: PlaceBase::Local(local), - projection: box [], - }, - Rvalue::Use(Operand::Copy(Place { - base: PlaceBase::Local(src_local), - projection: box [], - })), - ) - ) | - StatementKind::Assign( - box( - Place { - base: PlaceBase::Local(local), - projection: box [], - }, - Rvalue::Use(Operand::Move(Place { - base: PlaceBase::Local(src_local), - projection: box [], - })), - ) - ) if local == dest_local && dest_local == src_local => {} + match &stmt.kind { + StatementKind::Assign(box (place, Rvalue::Use(Operand::Copy(src_place)))) + | StatementKind::Assign(box (place, Rvalue::Use(Operand::Move(src_place)))) => { + if let (Some(local), Some(src_local)) = + (place.as_local(), src_place.as_local()) + { + if local == dest_local && dest_local == src_local { + } else { + continue; + } + } else { + continue; + } + } _ => { continue; } @@ -198,10 +190,7 @@ impl<'tcx> Action<'tcx> { fn local_copy(body: &Body<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &Place<'tcx>) -> Option> { // The source must be a local. - let src_local = if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = *src_place { + let src_local = if let Some(local) = src_place.as_local() { local } else { debug!(" Can't copy-propagate local: source is not a local"); @@ -256,7 +245,8 @@ impl<'tcx> Action<'tcx> { body: &mut Body<'tcx>, def_use_analysis: &DefUseAnalysis, dest_local: Local, - location: Location) + location: Location, + tcx: TyCtxt<'tcx>) -> bool { match self { Action::PropagateLocalCopy(src_local) => { @@ -280,7 +270,7 @@ impl<'tcx> Action<'tcx> { } // Replace all uses of the destination local with the source local. - def_use_analysis.replace_all_defs_and_uses_with(dest_local, body, src_local); + def_use_analysis.replace_all_defs_and_uses_with(dest_local, body, src_local, tcx); // Finally, zap the now-useless assignment instruction. debug!(" Deleting assignment"); @@ -304,7 +294,8 @@ impl<'tcx> Action<'tcx> { // Replace all uses of the destination local with the constant. let mut visitor = ConstantPropagationVisitor::new(dest_local, - src_constant); + src_constant, + tcx); for dest_place_use in &dest_local_info.defs_and_uses { visitor.visit_location(body, dest_place_use.location) } @@ -336,33 +327,42 @@ impl<'tcx> Action<'tcx> { struct ConstantPropagationVisitor<'tcx> { dest_local: Local, constant: Constant<'tcx>, + tcx: TyCtxt<'tcx>, uses_replaced: usize, } impl<'tcx> ConstantPropagationVisitor<'tcx> { - fn new(dest_local: Local, constant: Constant<'tcx>) + fn new(dest_local: Local, constant: Constant<'tcx>, tcx: TyCtxt<'tcx>) -> ConstantPropagationVisitor<'tcx> { ConstantPropagationVisitor { dest_local, constant, + tcx, uses_replaced: 0, } } } impl<'tcx> MutVisitor<'tcx> for ConstantPropagationVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) { self.super_operand(operand, location); - match *operand { - Operand::Copy(Place { - base: PlaceBase::Local(local), - projection: box [], - }) | - Operand::Move(Place { - base: PlaceBase::Local(local), - projection: box [], - }) if local == self.dest_local => {} + match operand { + Operand::Copy(place) | + Operand::Move(place) => { + if let Some(local) = place.as_local() { + if local == self.dest_local { + } else { + return; + } + } else { + return; + } + } _ => return, } diff --git a/src/librustc_mir/transform/deaggregator.rs b/src/librustc_mir/transform/deaggregator.rs index c1224be632..cdde9e12ed 100644 --- a/src/librustc_mir/transform/deaggregator.rs +++ b/src/librustc_mir/transform/deaggregator.rs @@ -45,6 +45,7 @@ impl<'tcx> MirPass<'tcx> for Deaggregator { }), *kind, source_info, + tcx, )) }); } diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs index a9c66b3c8c..f91a08bcd9 100644 --- a/src/librustc_mir/transform/elaborate_drops.rs +++ b/src/librustc_mir/transform/elaborate_drops.rs @@ -14,7 +14,7 @@ use rustc::ty::layout::VariantIdx; use rustc::hir; use rustc::mir::*; use rustc::util::nodemap::FxHashMap; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use std::fmt; use syntax_pos::Span; @@ -28,17 +28,7 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops { let param_env = tcx.param_env(src.def_id()).with_reveal_all(); let move_data = match MoveData::gather_moves(body, tcx) { Ok(move_data) => move_data, - Err((move_data, _move_errors)) => { - // The only way we should be allowing any move_errors - // in here is if we are in the migration path for the - // NLL-based MIR-borrowck. - // - // If we are in the migration path, we have already - // reported these errors as warnings to the user. So - // we will just ignore them here. - assert!(tcx.migrate_borrowck()); - move_data - } + Err(_) => bug!("No `move_errors` should be allowed in MIR borrowck"), }; let elaborate_patch = { let body = &*body; diff --git a/src/librustc_mir/transform/erase_regions.rs b/src/librustc_mir/transform/erase_regions.rs index 21ca339eb9..b30e2de4ca 100644 --- a/src/librustc_mir/transform/erase_regions.rs +++ b/src/librustc_mir/transform/erase_regions.rs @@ -23,9 +23,12 @@ impl EraseRegionsVisitor<'tcx> { } impl MutVisitor<'tcx> for EraseRegionsVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) { *ty = self.tcx.erase_regions(ty); - self.super_ty(ty); } fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) { @@ -40,10 +43,19 @@ impl MutVisitor<'tcx> for EraseRegionsVisitor<'tcx> { *substs = self.tcx.erase_regions(substs); } - fn visit_statement(&mut self, - statement: &mut Statement<'tcx>, - location: Location) { - self.super_statement(statement, location); + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + if let PlaceElem::Field(field, ty) = elem { + let new_ty = self.tcx.erase_regions(ty); + + if new_ty != *ty { + return Some(PlaceElem::Field(*field, new_ty)); + } + } + + None } } diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs index caf588af85..911901be36 100644 --- a/src/librustc_mir/transform/generator.rs +++ b/src/librustc_mir/transform/generator.rs @@ -58,8 +58,8 @@ use rustc::ty::GeneratorSubsts; use rustc::ty::layout::VariantIdx; use rustc::ty::subst::SubstsRef; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc_data_structures::bit_set::{BitSet, BitMatrix}; +use rustc_index::vec::{Idx, IndexVec}; +use rustc_index::bit_set::{BitSet, BitMatrix}; use std::borrow::Cow; use std::iter; use std::mem; @@ -74,12 +74,17 @@ use crate::util::liveness; pub struct StateTransform; -struct RenameLocalVisitor { +struct RenameLocalVisitor<'tcx> { from: Local, to: Local, + tcx: TyCtxt<'tcx>, } -impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor { +impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, @@ -88,11 +93,29 @@ impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor { *local = self.to; } } + + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + match elem { + PlaceElem::Index(local) if *local == self.from => { + Some(PlaceElem::Index(self.to)) + } + _ => None, + } + } } -struct DerefArgVisitor; +struct DerefArgVisitor<'tcx> { + tcx: TyCtxt<'tcx>, +} + +impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } -impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor { fn visit_local(&mut self, local: &mut Local, _: PlaceContext, @@ -107,19 +130,30 @@ impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor { if place.base == PlaceBase::Local(self_arg()) { replace_base(place, Place { base: PlaceBase::Local(self_arg()), - projection: Box::new([ProjectionElem::Deref]), - }); + projection: self.tcx().intern_place_elems(&vec![ProjectionElem::Deref]), + }, self.tcx); } else { - self.super_place(place, context, location); + self.visit_place_base(&mut place.base, context, location); + + for elem in place.projection.iter() { + if let PlaceElem::Index(local) = elem { + assert_ne!(*local, self_arg()); + } + } } } } struct PinArgVisitor<'tcx> { ref_gen_ty: Ty<'tcx>, + tcx: TyCtxt<'tcx>, } impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, @@ -127,28 +161,38 @@ impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> { assert_ne!(*local, self_arg()); } - fn visit_place(&mut self, - place: &mut Place<'tcx>, - context: PlaceContext, - location: Location) { + fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { if place.base == PlaceBase::Local(self_arg()) { - replace_base(place, Place { - base: PlaceBase::Local(self_arg()), - projection: Box::new([ProjectionElem::Field(Field::new(0), self.ref_gen_ty)]), - }); + replace_base( + place, + Place { + base: PlaceBase::Local(self_arg()), + projection: self.tcx().intern_place_elems(&vec![ProjectionElem::Field( + Field::new(0), + self.ref_gen_ty, + )]), + }, + self.tcx, + ); } else { - self.super_place(place, context, location); + self.visit_place_base(&mut place.base, context, location); + + for elem in place.projection.iter() { + if let PlaceElem::Index(local) = elem { + assert_ne!(*local, self_arg()); + } + } } } } -fn replace_base(place: &mut Place<'tcx>, new_base: Place<'tcx>) { +fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) { place.base = new_base.base; let mut new_projection = new_base.projection.to_vec(); new_projection.append(&mut place.projection.to_vec()); - place.projection = new_projection.into_boxed_slice(); + place.projection = tcx.intern_place_elems(&new_projection); } fn self_arg() -> Local { @@ -202,13 +246,13 @@ impl TransformVisitor<'tcx> { // Create a Place referencing a generator struct field fn make_field(&self, variant_index: VariantIdx, idx: usize, ty: Ty<'tcx>) -> Place<'tcx> { let self_place = Place::from(self_arg()); - let base = self_place.downcast_unnamed(variant_index); + let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index); let mut projection = base.projection.to_vec(); projection.push(ProjectionElem::Field(Field::new(idx), ty)); Place { base: base.base, - projection: projection.into_boxed_slice(), + projection: self.tcx.intern_place_elems(&projection), } } @@ -240,6 +284,10 @@ impl TransformVisitor<'tcx> { } impl MutVisitor<'tcx> for TransformVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, @@ -247,17 +295,25 @@ impl MutVisitor<'tcx> for TransformVisitor<'tcx> { assert_eq!(self.remap.get(local), None); } - fn visit_place(&mut self, - place: &mut Place<'tcx>, - context: PlaceContext, - location: Location) { + fn visit_place( + &mut self, + place: &mut Place<'tcx>, + context: PlaceContext, + location: Location, + ) { if let PlaceBase::Local(l) = place.base { // Replace an Local in the remap with a generator struct access if let Some(&(ty, variant_index, idx)) = self.remap.get(&l) { - replace_base(place, self.make_field(variant_index, idx, ty)); + replace_base(place, self.make_field(variant_index, idx, ty), self.tcx); } } else { - self.super_place(place, context, location); + self.visit_place_base(&mut place.base, context, location); + + for elem in place.projection.iter() { + if let PlaceElem::Index(local) = elem { + assert_ne!(*local, self_arg()); + } + } } } @@ -343,7 +399,7 @@ fn make_generator_state_argument_indirect<'tcx>( body.local_decls.raw[1].ty = ref_gen_ty; // Add a deref to accesses of the generator state - DerefArgVisitor.visit_body(body); + DerefArgVisitor { tcx }.visit_body(body); } fn make_generator_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { @@ -358,12 +414,13 @@ fn make_generator_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body body.local_decls.raw[1].ty = pin_ref_gen_ty; // Add the Pin field access to accesses of the generator state - PinArgVisitor { ref_gen_ty }.visit_body(body); + PinArgVisitor { ref_gen_ty, tcx }.visit_body(body); } fn replace_result_variable<'tcx>( ret_ty: Ty<'tcx>, body: &mut Body<'tcx>, + tcx: TyCtxt<'tcx>, ) -> Local { let source_info = source_info(body); let new_ret = LocalDecl { @@ -384,6 +441,7 @@ fn replace_result_variable<'tcx>( RenameLocalVisitor { from: RETURN_PLACE, to: new_ret_local, + tcx, }.visit_body(body); new_ret_local @@ -508,10 +566,7 @@ fn locals_live_across_suspend_points( storage_liveness_map.insert(block, storage_liveness.clone()); requires_storage_cursor.seek(loc); - let mut storage_required = requires_storage_cursor.get().clone(); - - // Mark locals without storage statements as always requiring storage - storage_required.union(&ignored.0); + let storage_required = requires_storage_cursor.get().clone(); // Locals live are live at this point only if they are used across // suspension points (the `liveness` variable) @@ -711,7 +766,7 @@ fn compute_layout<'tcx>( // Erase regions from the types passed in from typeck so we can compare them with // MIR types let allowed_upvars = tcx.erase_regions(upvars); - let allowed = match interior.sty { + let allowed = match interior.kind { ty::GeneratorWitness(s) => tcx.erase_late_bound_regions(&s), _ => bug!(), }; @@ -835,17 +890,24 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, body: &mut for (block, block_data) in body.basic_blocks().iter_enumerated() { let (target, unwind, source_info) = match block_data.terminator() { - &Terminator { + Terminator { source_info, kind: TerminatorKind::Drop { - location: Place { - base: PlaceBase::Local(local), - projection: box [], - }, + location, target, unwind } - } if local == gen => (target, unwind, source_info), + } => { + if let Some(local) = location.as_local() { + if local == gen { + (target, unwind, source_info) + } else { + continue; + } + } else { + continue; + } + } _ => continue, }; let unwind = if block_data.is_cleanup { @@ -855,10 +917,10 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, body: &mut }; elaborate_drop( &mut elaborator, - source_info, + *source_info, &Place::from(gen), (), - target, + *target, unwind, block, ); @@ -1124,8 +1186,9 @@ impl<'tcx> MirPass<'tcx> for StateTransform { let gen_ty = body.local_decls.raw[1].ty; // Get the interior types and substs which typeck computed - let (upvars, interior, discr_ty, movable) = match gen_ty.sty { + let (upvars, interior, discr_ty, movable) = match gen_ty.kind { ty::Generator(_, substs, movability) => { + let substs = substs.as_generator(); (substs.upvar_tys(def_id, tcx).collect(), substs.witness(def_id, tcx), substs.discr_ty(tcx), @@ -1145,7 +1208,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform { // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local // RETURN_PLACE then is a fresh unused local with type ret_ty. - let new_ret_local = replace_result_variable(ret_ty, body); + let new_ret_local = replace_result_variable(ret_ty, body, tcx); // Extract locals which are live across suspension point into `layout` // `remap` gives a mapping from local indices onto generator struct indices diff --git a/src/librustc_mir/transform/inline.rs b/src/librustc_mir/transform/inline.rs index 5ad026dc14..5a34e3f471 100644 --- a/src/librustc_mir/transform/inline.rs +++ b/src/librustc_mir/transform/inline.rs @@ -3,8 +3,8 @@ use rustc::hir::CodegenFnAttrFlags; use rustc::hir::def_id::DefId; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::{Idx, IndexVec}; use rustc::mir::*; use rustc::mir::visit::*; @@ -177,7 +177,7 @@ impl Inliner<'tcx> { // Only consider direct calls to functions let terminator = bb_data.terminator(); if let TerminatorKind::Call { func: ref op, .. } = terminator.kind { - if let ty::FnDef(callee_def_id, substs) = op.ty(caller_body, self.tcx).sty { + if let ty::FnDef(callee_def_id, substs) = op.ty(caller_body, self.tcx).kind { let instance = Instance::resolve(self.tcx, param_env, callee_def_id, @@ -328,7 +328,7 @@ impl Inliner<'tcx> { } TerminatorKind::Call {func: Operand::Constant(ref f), .. } => { - if let ty::FnDef(def_id, _) = f.literal.ty.sty { + if let ty::FnDef(def_id, _) = f.literal.ty.kind { // Don't give intrinsics the extra penalty for calls let f = tcx.fn_sig(def_id); if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic { @@ -461,7 +461,7 @@ impl Inliner<'tcx> { }; caller_body[callsite.bb] .statements.push(stmt); - tmp.deref() + self.tcx.mk_place_deref(tmp) } else { destination.0 }; @@ -481,6 +481,7 @@ impl Inliner<'tcx> { return_block, cleanup_block: cleanup, in_cleanup_block: false, + tcx: self.tcx, }; @@ -546,7 +547,7 @@ impl Inliner<'tcx> { assert!(args.next().is_none()); let tuple = Place::from(tuple); - let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.sty { + let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.kind { s } else { bug!("Closure arguments are not passed as a tuple"); @@ -559,7 +560,8 @@ impl Inliner<'tcx> { let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| { // This is e.g., `tuple_tmp.0` in our example above. - let tuple_field = Operand::Move(tuple.clone().field( + let tuple_field = Operand::Move(tcx.mk_place_field( + tuple.clone(), Field::new(i), ty.expect_ty(), )); @@ -587,13 +589,12 @@ impl Inliner<'tcx> { // FIXME: Analysis of the usage of the arguments to avoid // unnecessary temporaries. - if let Operand::Move(Place { - base: PlaceBase::Local(local), - projection: box [], - }) = arg { - if caller_body.local_kind(local) == LocalKind::Temp { - // Reuse the operand if it's a temporary already - return local; + if let Operand::Move(place) = &arg { + if let Some(local) = place.as_local() { + if caller_body.local_kind(local) == LocalKind::Temp { + // Reuse the operand if it's a temporary already + return local; + } } } @@ -639,6 +640,7 @@ struct Integrator<'a, 'tcx> { return_block: BasicBlock, cleanup_block: Option, in_cleanup_block: bool, + tcx: TyCtxt<'tcx>, } impl<'a, 'tcx> Integrator<'a, 'tcx> { @@ -647,50 +649,67 @@ impl<'a, 'tcx> Integrator<'a, 'tcx> { debug!("updating target `{:?}`, new: `{:?}`", tgt, new); new } + + fn make_integrate_local(&self, local: &Local) -> Local { + if *local == RETURN_PLACE { + match self.destination.as_local() { + Some(l) => return l, + ref place => bug!("Return place is {:?}, not local", place), + } + } + + let idx = local.index() - 1; + if idx < self.args.len() { + return self.args[idx]; + } + + self.local_map[Local::new(idx - self.args.len())] + } } impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> { - fn visit_local(&mut self, - local: &mut Local, - _ctxt: PlaceContext, - _location: Location) { - if *local == RETURN_PLACE { - match self.destination { - Place { - base: PlaceBase::Local(l), - projection: box [], - } => { - *local = l; - return; - }, - ref place => bug!("Return place is {:?}, not local", place) - } - } - let idx = local.index() - 1; - if idx < self.args.len() { - *local = self.args[idx]; - return; - } - *local = self.local_map[Local::new(idx - self.args.len())]; + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx } - fn visit_place(&mut self, - place: &mut Place<'tcx>, - _ctxt: PlaceContext, - _location: Location) { + fn visit_local( + &mut self, + local: &mut Local, + _ctxt: PlaceContext, + _location: Location, + ) { + *local = self.make_integrate_local(local); + } - match place { - Place { - base: PlaceBase::Local(RETURN_PLACE), - projection: box [], - } => { - // Return pointer; update the place itself - *place = self.destination.clone(); - }, - _ => self.super_place(place, _ctxt, _location) + fn visit_place( + &mut self, + place: &mut Place<'tcx>, + context: PlaceContext, + location: Location, + ) { + if let Some(RETURN_PLACE) = place.as_local() { + // Return pointer; update the place itself + *place = self.destination.clone(); + } else { + self.super_place(place, context, location); } } + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + if let PlaceElem::Index(local) = elem { + let new_local = self.make_integrate_local(local); + + if new_local != *local { + return Some(PlaceElem::Index(new_local)) + } + } + + None + } + fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) { self.in_cleanup_block = data.is_cleanup; self.super_basic_block_data(block, data); diff --git a/src/librustc_mir/transform/instcombine.rs b/src/librustc_mir/transform/instcombine.rs index 0e04e63af4..a567ed668b 100644 --- a/src/librustc_mir/transform/instcombine.rs +++ b/src/librustc_mir/transform/instcombine.rs @@ -1,11 +1,12 @@ //! Performs various peephole optimizations. -use rustc::mir::{Constant, Location, Place, PlaceBase, Body, Operand, ProjectionElem, Rvalue, - Local}; +use rustc::mir::{ + Constant, Location, Place, PlaceBase, PlaceRef, Body, Operand, ProjectionElem, Rvalue, Local +}; use rustc::mir::visit::{MutVisitor, Visitor}; use rustc::ty::{self, TyCtxt}; use rustc::util::nodemap::{FxHashMap, FxHashSet}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use std::mem; use crate::transform::{MirPass, MirSource}; @@ -28,32 +29,33 @@ impl<'tcx> MirPass<'tcx> for InstCombine { }; // Then carry out those optimizations. - MutVisitor::visit_body(&mut InstCombineVisitor { optimizations }, body); + MutVisitor::visit_body(&mut InstCombineVisitor { optimizations, tcx }, body); } } pub struct InstCombineVisitor<'tcx> { optimizations: OptimizationList<'tcx>, + tcx: TyCtxt<'tcx>, } impl<'tcx> MutVisitor<'tcx> for InstCombineVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_rvalue(&mut self, rvalue: &mut Rvalue<'tcx>, location: Location) { if self.optimizations.and_stars.remove(&location) { debug!("replacing `&*`: {:?}", rvalue); - let new_place = match *rvalue { - Rvalue::Ref(_, _, Place { - ref mut base, - projection: ref mut projection @ box [.., _], - }) => { - if let box [proj_l @ .., proj_r] = projection { - let place = Place { - // Replace with dummy - base: mem::replace(base, PlaceBase::Local(Local::new(0))), - projection: proj_l.to_vec().into_boxed_slice(), - }; - *projection = vec![proj_r.clone()].into_boxed_slice(); + let new_place = match rvalue { + Rvalue::Ref(_, _, place) => { + if let &[ref proj_l @ .., proj_r] = place.projection.as_ref() { + place.projection = self.tcx().intern_place_elems(&vec![proj_r.clone()]); - place + Place { + // Replace with dummy + base: mem::replace(&mut place.base, PlaceBase::Local(Local::new(0))), + projection: self.tcx().intern_place_elems(proj_l), + } } else { unreachable!(); } @@ -91,18 +93,20 @@ impl OptimizationFinder<'b, 'tcx> { impl Visitor<'tcx> for OptimizationFinder<'b, 'tcx> { fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { - if let Rvalue::Ref(_, _, Place { - base, - projection: box [proj_base @ .., ProjectionElem::Deref], - }) = rvalue { - if Place::ty_from(base, proj_base, self.body, self.tcx).ty.is_region_ptr() { - self.optimizations.and_stars.insert(location); + if let Rvalue::Ref(_, _, place) = rvalue { + if let PlaceRef { + base, + projection: &[ref proj_base @ .., ProjectionElem::Deref], + } = place.as_ref() { + if Place::ty_from(base, proj_base, self.body, self.tcx).ty.is_region_ptr() { + self.optimizations.and_stars.insert(location); + } } } if let Rvalue::Len(ref place) = *rvalue { let place_ty = place.ty(&self.body.local_decls, self.tcx).ty; - if let ty::Array(_, len) = place_ty.sty { + if let ty::Array(_, len) = place_ty.kind { let span = self.body.source_info(location).span; let constant = Constant { span, literal: len, user_ty: None }; self.optimizations.arrays_lengths.insert(location, constant); diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index ac291c2996..dbe6c78459 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -1,5 +1,5 @@ use crate::{build, shim}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::mir::{Body, MirPhase, Promoted}; use rustc::ty::{TyCtxt, InstanceDef}; @@ -15,6 +15,7 @@ use syntax_pos::Span; pub mod add_retag; pub mod add_moves_for_packed_drops; pub mod cleanup_post_borrowck; +pub mod check_consts; pub mod check_unsafety; pub mod simplify_branches; pub mod simplify; @@ -227,7 +228,7 @@ fn run_optimization_passes<'tcx>( ) { run_passes(tcx, body, InstanceDef::Item(def_id), promoted, MirPhase::Optimized, &[ // Remove all things only needed by analysis - &no_landing_pads::NoLandingPads, + &no_landing_pads::NoLandingPads::new(tcx), &simplify_branches::SimplifyBranches::new("initial"), &remove_noop_landing_pads::RemoveNoopLandingPads, &cleanup_post_borrowck::CleanupNonCodegenStatements, @@ -237,7 +238,7 @@ fn run_optimization_passes<'tcx>( // These next passes must be executed together &add_call_guards::CriticalCallEdges, &elaborate_drops::ElaborateDrops, - &no_landing_pads::NoLandingPads, + &no_landing_pads::NoLandingPads::new(tcx), // AddMovesForPackedDrops needs to run after drop // elaboration. &add_moves_for_packed_drops::AddMovesForPackedDrops, @@ -256,7 +257,7 @@ fn run_optimization_passes<'tcx>( // Optimizations begin. - &uniform_array_move_out::RestoreSubsliceArrayMoveOut, + &uniform_array_move_out::RestoreSubsliceArrayMoveOut::new(tcx), &inline::Inline, // Lowering generator control-flow and variables @@ -291,10 +292,6 @@ fn optimized_mir(tcx: TyCtxt<'_>, def_id: DefId) -> &Body<'_> { // execute before we can steal. tcx.ensure().mir_borrowck(def_id); - if tcx.use_ast_borrowck() { - tcx.ensure().borrowck(def_id); - } - let (body, _) = tcx.mir_validated(def_id); let mut body = body.steal(); run_optimization_passes(tcx, &mut body, def_id, None); diff --git a/src/librustc_mir/transform/no_landing_pads.rs b/src/librustc_mir/transform/no_landing_pads.rs index 762bb5d448..fbd14d9ef6 100644 --- a/src/librustc_mir/transform/no_landing_pads.rs +++ b/src/librustc_mir/transform/no_landing_pads.rs @@ -6,9 +6,17 @@ use rustc::mir::*; use rustc::mir::visit::MutVisitor; use crate::transform::{MirPass, MirSource}; -pub struct NoLandingPads; +pub struct NoLandingPads<'tcx> { + tcx: TyCtxt<'tcx>, +} -impl<'tcx> MirPass<'tcx> for NoLandingPads { +impl<'tcx> NoLandingPads<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>) -> Self { + NoLandingPads { tcx } + } +} + +impl<'tcx> MirPass<'tcx> for NoLandingPads<'tcx> { fn run_pass(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut Body<'tcx>) { no_landing_pads(tcx, body) } @@ -16,11 +24,15 @@ impl<'tcx> MirPass<'tcx> for NoLandingPads { pub fn no_landing_pads<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { if tcx.sess.no_landing_pads() { - NoLandingPads.visit_body(body); + NoLandingPads::new(tcx).visit_body(body); } } -impl<'tcx> MutVisitor<'tcx> for NoLandingPads { +impl<'tcx> MutVisitor<'tcx> for NoLandingPads<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_terminator_kind(&mut self, kind: &mut TerminatorKind<'tcx>, location: Location) { diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs index 7d1b96b8be..83f3aafc55 100644 --- a/src/librustc_mir/transform/promote_consts.rs +++ b/src/librustc_mir/transform/promote_consts.rs @@ -14,16 +14,23 @@ use rustc::hir::def_id::DefId; use rustc::mir::*; +use rustc::mir::interpret::ConstValue; use rustc::mir::visit::{PlaceContext, MutatingUseContext, MutVisitor, Visitor}; use rustc::mir::traversal::ReversePostorder; +use rustc::ty::{self, List, TyCtxt}; use rustc::ty::subst::InternalSubsts; -use rustc::ty::TyCtxt; -use syntax_pos::Span; +use rustc::ty::cast::CastTy; +use syntax::ast::LitKind; +use syntax::symbol::sym; +use syntax_pos::{Span, DUMMY_SP}; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; +use rustc_target::spec::abi::Abi; use std::{iter, mem, usize}; +use crate::transform::check_consts::{qualifs, Item, ConstKind, is_lang_panic_fn}; + /// State of a temporary during collection and promotion. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TempState { @@ -57,7 +64,7 @@ impl TempState { /// A "root candidate" for promotion, which will become the /// returned value in a promoted MIR, unless it's a subset /// of a larger candidate. -#[derive(Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Candidate { /// Borrow of a constant temporary. Ref(Location), @@ -73,13 +80,39 @@ pub enum Candidate { Argument { bb: BasicBlock, index: usize }, } -struct TempCollector<'tcx> { - temps: IndexVec, - span: Span, - body: &'tcx Body<'tcx>, +impl Candidate { + /// Returns `true` if we should use the "explicit" rules for promotability for this `Candidate`. + fn forces_explicit_promotion(&self) -> bool { + match self { + Candidate::Ref(_) | + Candidate::Repeat(_) => false, + Candidate::Argument { .. } => true, + } + } } -impl<'tcx> Visitor<'tcx> for TempCollector<'tcx> { +fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option> { + let attrs = tcx.get_attrs(def_id); + let attr = attrs.iter().find(|a| a.check_name(sym::rustc_args_required_const))?; + let mut ret = vec![]; + for meta in attr.meta_item_list()? { + match meta.literal()?.kind { + LitKind::Int(a, _) => { ret.push(a as usize); } + _ => return None, + } + } + Some(ret) +} + +struct Collector<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + body: &'a Body<'tcx>, + temps: IndexVec, + candidates: Vec, + span: Span, +} + +impl<'tcx> Visitor<'tcx> for Collector<'_, 'tcx> { fn visit_local(&mut self, &index: &Local, context: PlaceContext, @@ -134,22 +167,594 @@ impl<'tcx> Visitor<'tcx> for TempCollector<'tcx> { *temp = TempState::Unpromotable; } + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { + self.super_rvalue(rvalue, location); + + match *rvalue { + Rvalue::Ref(..) => { + self.candidates.push(Candidate::Ref(location)); + } + Rvalue::Repeat(..) if self.tcx.features().const_in_array_repeat_expressions => { + // FIXME(#49147) only promote the element when it isn't `Copy` + // (so that code that can copy it at runtime is unaffected). + self.candidates.push(Candidate::Repeat(location)); + } + _ => {} + } + } + + fn visit_terminator_kind(&mut self, + kind: &TerminatorKind<'tcx>, + location: Location) { + self.super_terminator_kind(kind, location); + + if let TerminatorKind::Call { ref func, .. } = *kind { + if let ty::FnDef(def_id, _) = func.ty(self.body, self.tcx).kind { + let fn_sig = self.tcx.fn_sig(def_id); + if let Abi::RustIntrinsic | Abi::PlatformIntrinsic = fn_sig.abi() { + let name = self.tcx.item_name(def_id); + // FIXME(eddyb) use `#[rustc_args_required_const(2)]` for shuffles. + if name.as_str().starts_with("simd_shuffle") { + self.candidates.push(Candidate::Argument { + bb: location.block, + index: 2, + }); + } + } + + if let Some(constant_args) = args_required_const(self.tcx, def_id) { + for index in constant_args { + self.candidates.push(Candidate::Argument { bb: location.block, index }); + } + } + } + } + } + fn visit_source_info(&mut self, source_info: &SourceInfo) { self.span = source_info.span; } } -pub fn collect_temps(body: &Body<'_>, - rpo: &mut ReversePostorder<'_, '_>) -> IndexVec { - let mut collector = TempCollector { - temps: IndexVec::from_elem(TempState::Undefined, &body.local_decls), - span: body.span, +pub fn collect_temps_and_candidates( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + rpo: &mut ReversePostorder<'_, 'tcx>, +) -> (IndexVec, Vec) { + let mut collector = Collector { + tcx, body, + temps: IndexVec::from_elem(TempState::Undefined, &body.local_decls), + candidates: vec![], + span: body.span, }; for (bb, data) in rpo { collector.visit_basic_block_data(bb, data); } - collector.temps + (collector.temps, collector.candidates) +} + +/// Checks whether locals that appear in a promotion context (`Candidate`) are actually promotable. +/// +/// This wraps an `Item`, and has access to all fields of that `Item` via `Deref` coercion. +struct Validator<'a, 'tcx> { + item: Item<'a, 'tcx>, + temps: &'a IndexVec, + + /// Explicit promotion happens e.g. for constant arguments declared via + /// `rustc_args_required_const`. + /// Implicit promotion has almost the same rules, except that disallows `const fn` + /// except for those marked `#[rustc_promotable]`. This is to avoid changing + /// a legitimate run-time operation into a failing compile-time operation + /// e.g. due to addresses being compared inside the function. + explicit: bool, +} + +impl std::ops::Deref for Validator<'a, 'tcx> { + type Target = Item<'a, 'tcx>; + + fn deref(&self) -> &Self::Target { + &self.item + } +} + +struct Unpromotable; + +impl<'tcx> Validator<'_, 'tcx> { + fn validate_candidate(&self, candidate: Candidate) -> Result<(), Unpromotable> { + match candidate { + Candidate::Ref(loc) => { + assert!(!self.explicit); + + let statement = &self.body[loc.block].statements[loc.statement_index]; + match &statement.kind { + StatementKind::Assign(box(_, Rvalue::Ref(_, kind, place))) => { + match kind { + BorrowKind::Shared | BorrowKind::Mut { .. } => {} + + // FIXME(eddyb) these aren't promoted here but *could* + // be promoted as part of a larger value because + // `validate_rvalue` doesn't check them, need to + // figure out what is the intended behavior. + BorrowKind::Shallow | BorrowKind::Unique => return Err(Unpromotable), + } + + // We can only promote interior borrows of promotable temps (non-temps + // don't get promoted anyway). + let base = match place.base { + PlaceBase::Local(local) => local, + _ => return Err(Unpromotable), + }; + self.validate_local(base)?; + + if place.projection.contains(&ProjectionElem::Deref) { + return Err(Unpromotable); + } + + let mut has_mut_interior = + self.qualif_local::(base); + // HACK(eddyb) this should compute the same thing as + // `::in_projection` from + // `check_consts::qualifs` but without recursion. + if has_mut_interior { + // This allows borrowing fields which don't have + // `HasMutInterior`, from a type that does, e.g.: + // `let _: &'static _ = &(Cell::new(1), 2).1;` + let mut place_projection = &place.projection[..]; + // FIXME(eddyb) use a forward loop instead of a reverse one. + while let [proj_base @ .., elem] = place_projection { + // FIXME(eddyb) this is probably excessive, with + // the exception of `union` member accesses. + let ty = + Place::ty_from(&place.base, proj_base, self.body, self.tcx) + .projection_ty(self.tcx, elem) + .ty; + if ty.is_freeze(self.tcx, self.param_env, DUMMY_SP) { + has_mut_interior = false; + break; + } + + place_projection = proj_base; + } + } + + // FIXME(eddyb) this duplicates part of `validate_rvalue`. + if has_mut_interior { + return Err(Unpromotable); + } + if self.qualif_local::(base) { + return Err(Unpromotable); + } + + if let BorrowKind::Mut { .. } = kind { + let ty = place.ty(self.body, self.tcx).ty; + + // In theory, any zero-sized value could be borrowed + // mutably without consequences. However, only &mut [] + // is allowed right now, and only in functions. + if self.const_kind == Some(ConstKind::StaticMut) { + // Inside a `static mut`, &mut [...] is also allowed. + match ty.kind { + ty::Array(..) | ty::Slice(_) => {} + _ => return Err(Unpromotable), + } + } else if let ty::Array(_, len) = ty.kind { + // FIXME(eddyb) the `self.is_non_const_fn` condition + // seems unnecessary, given that this is merely a ZST. + match len.try_eval_usize(self.tcx, self.param_env) { + Some(0) if self.const_kind.is_none() => {}, + _ => return Err(Unpromotable), + } + } else { + return Err(Unpromotable); + } + } + + Ok(()) + } + _ => bug!() + } + } + Candidate::Repeat(loc) => { + assert!(!self.explicit); + + let statement = &self.body[loc.block].statements[loc.statement_index]; + match &statement.kind { + StatementKind::Assign(box(_, Rvalue::Repeat(ref operand, _))) => { + if !self.tcx.features().const_in_array_repeat_expressions { + return Err(Unpromotable); + } + + self.validate_operand(operand) + } + _ => bug!() + } + }, + Candidate::Argument { bb, index } => { + assert!(self.explicit); + + let terminator = self.body[bb].terminator(); + match &terminator.kind { + TerminatorKind::Call { args, .. } => { + self.validate_operand(&args[index]) + } + _ => bug!() + } + } + } + } + + // FIXME(eddyb) maybe cache this? + fn qualif_local(&self, local: Local) -> bool { + let per_local = &|l| self.qualif_local::(l); + + if let TempState::Defined { location: loc, .. } = self.temps[local] { + let num_stmts = self.body[loc.block].statements.len(); + + if loc.statement_index < num_stmts { + let statement = &self.body[loc.block].statements[loc.statement_index]; + match &statement.kind { + StatementKind::Assign(box(_, rhs)) => { + Q::in_rvalue(&self.item, per_local, rhs) + } + _ => { + span_bug!(statement.source_info.span, "{:?} is not an assignment", + statement); + } + } + } else { + let terminator = self.body[loc.block].terminator(); + match &terminator.kind { + TerminatorKind::Call { func, args, .. } => { + let return_ty = self.body.local_decls[local].ty; + Q::in_call(&self.item, per_local, func, args, return_ty) + } + kind => { + span_bug!(terminator.source_info.span, "{:?} not promotable", kind); + } + } + } + } else { + let span = self.body.local_decls[local].source_info.span; + span_bug!(span, "{:?} not promotable, qualif_local shouldn't have been called", local); + } + } + + // FIXME(eddyb) maybe cache this? + fn validate_local(&self, local: Local) -> Result<(), Unpromotable> { + if let TempState::Defined { location: loc, .. } = self.temps[local] { + let num_stmts = self.body[loc.block].statements.len(); + + if loc.statement_index < num_stmts { + let statement = &self.body[loc.block].statements[loc.statement_index]; + match &statement.kind { + StatementKind::Assign(box(_, rhs)) => self.validate_rvalue(rhs), + _ => { + span_bug!(statement.source_info.span, "{:?} is not an assignment", + statement); + } + } + } else { + let terminator = self.body[loc.block].terminator(); + match &terminator.kind { + TerminatorKind::Call { func, args, .. } => self.validate_call(func, args), + kind => { + span_bug!(terminator.source_info.span, "{:?} not promotable", kind); + } + } + } + } else { + Err(Unpromotable) + } + } + + fn validate_place(&self, place: PlaceRef<'_, 'tcx>) -> Result<(), Unpromotable> { + match place { + PlaceRef { + base: PlaceBase::Local(local), + projection: [], + } => self.validate_local(*local), + PlaceRef { + base: PlaceBase::Static(box Static { + kind: StaticKind::Promoted { .. }, + .. + }), + projection: [], + } => bug!("qualifying already promoted MIR"), + PlaceRef { + base: PlaceBase::Static(box Static { + kind: StaticKind::Static, + def_id, + .. + }), + projection: [], + } => { + // Only allow statics (not consts) to refer to other statics. + // FIXME(eddyb) does this matter at all for promotion? + let is_static = self.const_kind.map_or(false, |k| k.is_static()); + if !is_static { + return Err(Unpromotable); + } + + let is_thread_local = self.tcx.has_attr(*def_id, sym::thread_local); + if is_thread_local { + return Err(Unpromotable); + } + + Ok(()) + } + PlaceRef { + base: _, + projection: [proj_base @ .., elem], + } => { + match *elem { + ProjectionElem::Deref | + ProjectionElem::Downcast(..) => return Err(Unpromotable), + + ProjectionElem::ConstantIndex {..} | + ProjectionElem::Subslice {..} => {} + + ProjectionElem::Index(local) => { + self.validate_local(local)?; + } + + ProjectionElem::Field(..) => { + if self.const_kind.is_none() { + let base_ty = + Place::ty_from(place.base, proj_base, self.body, self.tcx).ty; + if let Some(def) = base_ty.ty_adt_def() { + // No promotion of union field accesses. + if def.is_union() { + return Err(Unpromotable); + } + } + } + } + } + + self.validate_place(PlaceRef { + base: place.base, + projection: proj_base, + }) + } + } + } + + fn validate_operand(&self, operand: &Operand<'tcx>) -> Result<(), Unpromotable> { + match operand { + Operand::Copy(place) | + Operand::Move(place) => self.validate_place(place.as_ref()), + + Operand::Constant(constant) => { + if let ConstValue::Unevaluated(def_id, _) = constant.literal.val { + if self.tcx.trait_of_item(def_id).is_some() { + // Don't peek inside trait associated constants. + // (see below what we do for other consts, for now) + } else { + // HACK(eddyb) ensure that errors propagate correctly. + // FIXME(eddyb) remove this once the old promotion logic + // is gone - we can always promote constants even if they + // fail to pass const-checking, as compilation would've + // errored independently and promotion can't change that. + let (bits, _) = self.tcx.at(constant.span).mir_const_qualif(def_id); + if bits == super::qualify_consts::QUALIF_ERROR_BIT { + self.tcx.sess.delay_span_bug( + constant.span, + "promote_consts: MIR had errors", + ); + return Err(Unpromotable); + } + } + } + + Ok(()) + } + } + } + + fn validate_rvalue(&self, rvalue: &Rvalue<'tcx>) -> Result<(), Unpromotable> { + match *rvalue { + Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if self.const_kind.is_none() => { + let operand_ty = operand.ty(self.body, self.tcx); + let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); + let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); + match (cast_in, cast_out) { + (CastTy::Ptr(_), CastTy::Int(_)) | + (CastTy::FnPtr, CastTy::Int(_)) => { + // in normal functions, mark such casts as not promotable + return Err(Unpromotable); + } + _ => {} + } + } + + Rvalue::BinaryOp(op, ref lhs, _) if self.const_kind.is_none() => { + if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind { + assert!(op == BinOp::Eq || op == BinOp::Ne || + op == BinOp::Le || op == BinOp::Lt || + op == BinOp::Ge || op == BinOp::Gt || + op == BinOp::Offset); + + // raw pointer operations are not allowed inside promoteds + return Err(Unpromotable); + } + } + + Rvalue::NullaryOp(NullOp::Box, _) => return Err(Unpromotable), + + _ => {} + } + + match rvalue { + Rvalue::NullaryOp(..) => Ok(()), + + Rvalue::Discriminant(place) | + Rvalue::Len(place) => self.validate_place(place.as_ref()), + + Rvalue::Use(operand) | + Rvalue::Repeat(operand, _) | + Rvalue::UnaryOp(_, operand) | + Rvalue::Cast(_, operand, _) => self.validate_operand(operand), + + Rvalue::BinaryOp(_, lhs, rhs) | + Rvalue::CheckedBinaryOp(_, lhs, rhs) => { + self.validate_operand(lhs)?; + self.validate_operand(rhs) + } + + Rvalue::Ref(_, kind, place) => { + if let BorrowKind::Mut { .. } = kind { + let ty = place.ty(self.body, self.tcx).ty; + + // In theory, any zero-sized value could be borrowed + // mutably without consequences. However, only &mut [] + // is allowed right now, and only in functions. + if self.const_kind == Some(ConstKind::StaticMut) { + // Inside a `static mut`, &mut [...] is also allowed. + match ty.kind { + ty::Array(..) | ty::Slice(_) => {} + _ => return Err(Unpromotable), + } + } else if let ty::Array(_, len) = ty.kind { + // FIXME(eddyb): We only return `Unpromotable` for `&mut []` inside a + // const context which seems unnecessary given that this is merely a ZST. + match len.try_eval_usize(self.tcx, self.param_env) { + Some(0) if self.const_kind.is_none() => {}, + _ => return Err(Unpromotable), + } + } else { + return Err(Unpromotable); + } + } + + // Special-case reborrows to be more like a copy of the reference. + let mut place = place.as_ref(); + if let [proj_base @ .., ProjectionElem::Deref] = &place.projection { + let base_ty = + Place::ty_from(&place.base, proj_base, self.body, self.tcx).ty; + if let ty::Ref(..) = base_ty.kind { + place = PlaceRef { + base: &place.base, + projection: proj_base, + }; + } + } + + self.validate_place(place)?; + + // HACK(eddyb) this should compute the same thing as + // `::in_projection` from + // `check_consts::qualifs` but without recursion. + let mut has_mut_interior = match place.base { + PlaceBase::Local(local) => { + self.qualif_local::(*local) + } + PlaceBase::Static(_) => false, + }; + if has_mut_interior { + let mut place_projection = place.projection; + // FIXME(eddyb) use a forward loop instead of a reverse one. + while let [proj_base @ .., elem] = place_projection { + // FIXME(eddyb) this is probably excessive, with + // the exception of `union` member accesses. + let ty = Place::ty_from(place.base, proj_base, self.body, self.tcx) + .projection_ty(self.tcx, elem) + .ty; + if ty.is_freeze(self.tcx, self.param_env, DUMMY_SP) { + has_mut_interior = false; + break; + } + + place_projection = proj_base; + } + } + if has_mut_interior { + return Err(Unpromotable); + } + + Ok(()) + } + + Rvalue::Aggregate(_, ref operands) => { + for o in operands { + self.validate_operand(o)?; + } + + Ok(()) + } + } + } + + fn validate_call( + &self, + callee: &Operand<'tcx>, + args: &[Operand<'tcx>], + ) -> Result<(), Unpromotable> { + let fn_ty = callee.ty(self.body, self.tcx); + + if !self.explicit && self.const_kind.is_none() { + if let ty::FnDef(def_id, _) = fn_ty.kind { + // Never promote runtime `const fn` calls of + // functions without `#[rustc_promotable]`. + if !self.tcx.is_promotable_const_fn(def_id) { + return Err(Unpromotable); + } + } + } + + let is_const_fn = match fn_ty.kind { + ty::FnDef(def_id, _) => { + self.tcx.is_const_fn(def_id) || + self.tcx.is_unstable_const_fn(def_id).is_some() || + is_lang_panic_fn(self.tcx, self.def_id) + } + _ => false, + }; + if !is_const_fn { + return Err(Unpromotable); + } + + self.validate_operand(callee)?; + for arg in args { + self.validate_operand(arg)?; + } + + Ok(()) + } +} + +// FIXME(eddyb) remove the differences for promotability in `static`, `const`, `const fn`. +pub fn validate_candidates( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + def_id: DefId, + temps: &IndexVec, + candidates: &[Candidate], +) -> Vec { + let mut validator = Validator { + item: Item::new(tcx, def_id, body), + temps, + explicit: false, + }; + + candidates.iter().copied().filter(|&candidate| { + validator.explicit = candidate.forces_explicit_promotion(); + + // FIXME(eddyb) also emit the errors for shuffle indices + // and `#[rustc_args_required_const]` arguments here. + + let is_promotable = validator.validate_candidate(candidate).is_ok(); + match candidate { + Candidate::Argument { bb, index } if !is_promotable => { + let span = body[bb].terminator().source_info.span; + let msg = format!("argument {} is required to be a constant", index + 1); + tcx.sess.span_err(span, &msg); + } + _ => () + } + + is_promotable + }).collect() } struct Promoter<'a, 'tcx> { @@ -191,6 +796,10 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { }); } + fn is_temp_kind(&self, local: Local) -> bool { + self.source.local_kind(local) == LocalKind::Temp + } + /// Copies the initialization of this temp to the /// promoted MIR, recursing through temps. fn promote_temp(&mut self, temp: Local) -> Local { @@ -211,17 +820,17 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { self.temps[temp] = TempState::PromotedOut; } - let no_stmts = self.source[loc.block].statements.len(); + let num_stmts = self.source[loc.block].statements.len(); let new_temp = self.promoted.local_decls.push( LocalDecl::new_temp(self.source.local_decls[temp].ty, self.source.local_decls[temp].source_info.span)); debug!("promote({:?} @ {:?}/{:?}, {:?})", - temp, loc, no_stmts, self.keep_original); + temp, loc, num_stmts, self.keep_original); // First, take the Rvalue or Call out of the source MIR, // or duplicate it, depending on keep_original. - if loc.statement_index < no_stmts { + if loc.statement_index < num_stmts { let (mut rvalue, source_info) = { let statement = &mut self.source[loc.block].statements[loc.statement_index]; let rhs = match statement.kind { @@ -317,7 +926,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { ty, def_id, }), - projection: box [], + projection: List::empty(), } }; let (blocks, local_decls) = self.source.basic_blocks_and_local_decls_mut(); @@ -335,7 +944,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { &mut place.base, promoted_place(ty, span).base, ), - projection: box [], + projection: List::empty(), }) } _ => bug!() @@ -392,14 +1001,30 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { /// Replaces all temporaries with their promoted counterparts. impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { - if self.source.local_kind(*local) == LocalKind::Temp { + if self.is_temp_kind(*local) { *local = self.promote_temp(*local); } } + + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + match elem { + PlaceElem::Index(local) if self.is_temp_kind(*local) => { + Some(PlaceElem::Index(self.promote_temp(*local))) + } + _ => None, + } + } } pub fn promote_candidates<'tcx>( @@ -418,14 +1043,13 @@ pub fn promote_candidates<'tcx>( match candidate { Candidate::Repeat(Location { block, statement_index }) | Candidate::Ref(Location { block, statement_index }) => { - match body[block].statements[statement_index].kind { - StatementKind::Assign(box(Place { - base: PlaceBase::Local(local), - projection: box [], - }, _)) => { - if temps[local] == TempState::PromotedOut { - // Already promoted. - continue; + match &body[block].statements[statement_index].kind { + StatementKind::Assign(box(place, _)) => { + if let Some(local) = place.as_local() { + if temps[local] == TempState::PromotedOut { + // Already promoted. + continue; + } } } _ => {} @@ -471,28 +1095,30 @@ pub fn promote_candidates<'tcx>( let promoted = |index: Local| temps[index] == TempState::PromotedOut; for block in body.basic_blocks_mut() { block.statements.retain(|statement| { - match statement.kind { - StatementKind::Assign(box(Place { - base: PlaceBase::Local(index), - projection: box [], - }, _)) | + match &statement.kind { + StatementKind::Assign(box(place, _)) => { + if let Some(index) = place.as_local() { + !promoted(index) + } else { + true + } + } StatementKind::StorageLive(index) | StatementKind::StorageDead(index) => { - !promoted(index) + !promoted(*index) } _ => true } }); let terminator = block.terminator_mut(); - match terminator.kind { - TerminatorKind::Drop { location: Place { - base: PlaceBase::Local(index), - projection: box [], - }, target, .. } => { - if promoted(index) { - terminator.kind = TerminatorKind::Goto { - target, - }; + match &terminator.kind { + TerminatorKind::Drop { location: place, target, .. } => { + if let Some(index) = place.as_local() { + if promoted(index) { + terminator.kind = TerminatorKind::Goto { + target: *target, + }; + } } } _ => {} @@ -501,3 +1127,28 @@ pub fn promote_candidates<'tcx>( promotions } + +/// This function returns `true` if the `const_in_array_repeat_expression` feature attribute should +/// be suggested. This function is probably quite expensive, it shouldn't be run in the happy path. +/// Feature attribute should be suggested if `operand` can be promoted and the feature is not +/// enabled. +crate fn should_suggest_const_in_array_repeat_expressions_attribute<'tcx>( + tcx: TyCtxt<'tcx>, + mir_def_id: DefId, + body: &Body<'tcx>, + operand: &Operand<'tcx>, +) -> bool { + let mut rpo = traversal::reverse_postorder(body); + let (temps, _) = collect_temps_and_candidates(tcx, body, &mut rpo); + let validator = Validator { + item: Item::new(tcx, mir_def_id, body), + temps: &temps, + explicit: false, + }; + + let should_promote = validator.validate_operand(operand).is_ok(); + let feature_flag = tcx.features().const_in_array_repeat_expressions; + debug!("should_suggest_const_in_array_repeat_expressions_flag: mir_def_id={:?} \ + should_promote={:?} feature_flag={:?}", mir_def_id, should_promote, feature_flag); + should_promote && !feature_flag +} diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index 795721f3b3..5463b94447 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -4,8 +4,8 @@ //! The Qualif flags below can be used to also provide better //! diagnostics as to why a constant rvalue wasn't promoted. -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::IndexVec; use rustc_data_structures::fx::FxHashSet; use rustc_target::spec::abi::Abi; use rustc::hir; @@ -34,6 +34,7 @@ use std::usize; use rustc::hir::HirId; use crate::transform::{MirPass, MirSource}; use super::promote_consts::{self, Candidate, TempState}; +use crate::transform::check_consts::ops::{self, NonConstOp}; /// What kind of item we are in. #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -205,6 +206,9 @@ trait Qualif { ProjectionElem::ConstantIndex { .. } | ProjectionElem::Downcast(..) => qualif, + // FIXME(eddyb) shouldn't this be masked *after* including the + // index local? Then again, it's `usize` which is neither + // `HasMutInterior` nor `NeedsDrop`. ProjectionElem::Index(local) => qualif || Self::in_local(cx, *local), } } else { @@ -291,10 +295,10 @@ trait Qualif { Rvalue::Ref(_, _, ref place) => { // Special-case reborrows to be more like a copy of the reference. - if let box [proj_base @ .., elem] = &place.projection { - if ProjectionElem::Deref == *elem { + if let &[ref proj_base @ .., elem] = place.projection.as_ref() { + if ProjectionElem::Deref == elem { let base_ty = Place::ty_from(&place.base, proj_base, cx.body, cx.tcx).ty; - if let ty::Ref(..) = base_ty.sty { + if let ty::Ref(..) = base_ty.kind { return Self::in_place(cx, PlaceRef { base: &place.base, projection: proj_base, @@ -365,11 +369,11 @@ impl Qualif for HasMutInterior { // is allowed right now, and only in functions. if cx.mode == Mode::StaticMut { // Inside a `static mut`, &mut [...] is also allowed. - match ty.sty { + match ty.kind { ty::Array(..) | ty::Slice(_) => {} _ => return true, } - } else if let ty::Array(_, len) = ty.sty { + } else if let ty::Array(_, len) = ty.kind { // FIXME(eddyb) the `cx.mode == Mode::NonConstFn` condition // seems unnecessary, given that this is merely a ZST. match len.try_eval_usize(cx.tcx, cx.param_env) { @@ -441,6 +445,7 @@ impl Qualif for IsNotPromotable { StaticKind::Promoted(_, _) => unreachable!(), StaticKind::Static => { // Only allow statics (not consts) to refer to other statics. + // FIXME(eddyb) does this matter at all for promotion? let allowed = cx.mode == Mode::Static || cx.mode == Mode::StaticMut; !allowed || @@ -500,7 +505,7 @@ impl Qualif for IsNotPromotable { } Rvalue::BinaryOp(op, ref lhs, _) if cx.mode == Mode::NonConstFn => { - if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.body, cx.tcx).sty { + if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.body, cx.tcx).kind { assert!(op == BinOp::Eq || op == BinOp::Ne || op == BinOp::Le || op == BinOp::Lt || op == BinOp::Ge || op == BinOp::Gt || @@ -526,7 +531,7 @@ impl Qualif for IsNotPromotable { _return_ty: Ty<'tcx>, ) -> bool { let fn_ty = callee.ty(cx.body, cx.tcx); - match fn_ty.sty { + match fn_ty.kind { ty::FnDef(def_id, _) => { match cx.tcx.fn_sig(def_id).abi() { Abi::RustIntrinsic | @@ -557,6 +562,9 @@ impl Qualif for IsNotPromotable { | "saturating_add" | "saturating_sub" | "transmute" + | "simd_insert" + | "simd_extract" + | "ptr_offset_from" => return true, _ => {} @@ -597,7 +605,7 @@ impl Qualif for IsNotImplicitlyPromotable { _return_ty: Ty<'tcx>, ) -> bool { if cx.mode == Mode::NonConstFn { - if let ty::FnDef(def_id, _) = callee.ty(cx.body, cx.tcx).sty { + if let ty::FnDef(def_id, _) = callee.ty(cx.body, cx.tcx).kind { // Never promote runtime `const fn` calls of // functions without `#[rustc_promotable]`. if !cx.tcx.is_promotable_const_fn(def_id) { @@ -671,12 +679,19 @@ struct Checker<'a, 'tcx> { temp_promotion_state: IndexVec, promotion_candidates: Vec, + unchecked_promotion_candidates: Vec, + + /// If `true`, do not emit errors to the user, merely collect them in `errors`. + suppress_errors: bool, + errors: Vec<(Span, String)>, } macro_rules! unleash_miri { ($this:expr) => {{ if $this.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { - $this.tcx.sess.span_warn($this.span, "skipping const checks"); + if $this.mode.requires_const_checking() && !$this.suppress_errors { + $this.tcx.sess.span_warn($this.span, "skipping const checks"); + } return; } }} @@ -694,7 +709,8 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>, mode: Mode) -> Self { assert!(def_id.is_local()); let mut rpo = traversal::reverse_postorder(body); - let temps = promote_consts::collect_temps(body, &mut rpo); + let (temps, unchecked_promotion_candidates) = + promote_consts::collect_temps_and_candidates(tcx, body, &mut rpo); rpo.reset(); let param_env = tcx.param_env(def_id); @@ -732,16 +748,20 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { def_id, rpo, temp_promotion_state: temps, - promotion_candidates: vec![] + promotion_candidates: vec![], + unchecked_promotion_candidates, + errors: vec![], + suppress_errors: false, } } // FIXME(eddyb) we could split the errors into meaningful // categories, but enabling full miri would make that // slightly pointless (even with feature-gating). - fn not_const(&mut self) { + fn not_const(&mut self, op: impl NonConstOp) { unleash_miri!(self); - if self.mode.requires_const_checking() { + if self.mode.requires_const_checking() && !self.suppress_errors { + self.record_error(op); let mut err = struct_span_err!( self.tcx.sess, self.span, @@ -759,6 +779,14 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { } } + fn record_error(&mut self, op: impl NonConstOp) { + self.record_error_spanned(op, self.span); + } + + fn record_error_spanned(&mut self, op: impl NonConstOp, span: Span) { + self.errors.push((span, format!("{:?}", op))); + } + /// Assigns an rvalue/call qualification to the given destination. fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location: Location) { trace!("assign: {:?} <- {:?}", dest, source); @@ -777,8 +805,10 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { qualifs[HasMutInterior] = false; qualifs[IsNotPromotable] = true; - if self.mode.requires_const_checking() { + debug!("suppress_errors: {}", self.suppress_errors); + if self.mode.requires_const_checking() && !self.suppress_errors { if !self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { + self.record_error(ops::MutBorrow(kind)); if let BorrowKind::Mut { .. } = kind { let mut err = struct_span_err!(self.tcx.sess, self.span, E0017, "references in {}s may only refer \ @@ -806,6 +836,10 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { } else if let BorrowKind::Mut { .. } | BorrowKind::Shared = kind { // Don't promote BorrowKind::Shallow borrows, as they don't // reach codegen. + // FIXME(eddyb) the two other kinds of borrow (`Shallow` and `Unique`) + // aren't promoted here but *could* be promoted as part of a larger + // value because `IsNotPromotable` isn't being set for them, + // need to figure out what is the intended behavior. // We might have a candidate for promotion. let candidate = Candidate::Ref(location); @@ -845,13 +879,11 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { } }, ValueSource::Rvalue(&Rvalue::Repeat(ref operand, _)) => { - let candidate = Candidate::Repeat(location); - let not_promotable = IsNotImplicitlyPromotable::in_operand(self, operand) || - IsNotPromotable::in_operand(self, operand); - debug!("assign: self.def_id={:?} operand={:?}", self.def_id, operand); - if !not_promotable && self.tcx.features().const_in_array_repeat_expressions { - debug!("assign: candidate={:?}", candidate); - self.promotion_candidates.push(candidate); + debug!("assign: self.cx.mode={:?} self.def_id={:?} location={:?} operand={:?}", + self.cx.mode, self.def_id, location, operand); + if self.should_promote_repeat_expression(operand) && + self.tcx.features().const_in_array_repeat_expressions { + self.promotion_candidates.push(Candidate::Repeat(location)); } }, _ => {}, @@ -923,16 +955,33 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { /// Check a whole const, static initializer or const fn. fn check_const(&mut self) -> (u8, &'tcx BitSet) { + use crate::transform::check_consts as new_checker; + debug!("const-checking {} {:?}", self.mode, self.def_id); + // FIXME: Also use the new validator when features that require it (e.g. `const_if`) are + // enabled. + let use_new_validator = self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you; + if use_new_validator { + debug!("Using dataflow-based const validator"); + } + + let item = new_checker::Item::new(self.tcx, self.def_id, self.body); + let mut validator = new_checker::validation::Validator::new(&item); + + validator.suppress_errors = !use_new_validator; + self.suppress_errors = use_new_validator; + let body = self.body; let mut seen_blocks = BitSet::new_empty(body.basic_blocks().len()); let mut bb = START_BLOCK; + let mut has_controlflow_error = false; loop { seen_blocks.insert(bb.index()); self.visit_basic_block_data(bb, &body[bb]); + validator.visit_basic_block_data(bb, &body[bb]); let target = match body[bb].terminator().kind { TerminatorKind::Goto { target } | @@ -968,41 +1017,78 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { bb = target; } _ => { - self.not_const(); + has_controlflow_error = true; + self.not_const(ops::Loop); + validator.check_op(ops::Loop); break; } } } + // The new validation pass should agree with the old when running on simple const bodies + // (e.g. no `if` or `loop`). + if !use_new_validator { + let mut new_errors = validator.take_errors(); + + // FIXME: each checker sometimes emits the same error with the same span twice in a row. + self.errors.dedup(); + new_errors.dedup(); + + if self.errors != new_errors { + validator_mismatch( + self.tcx, + body, + std::mem::replace(&mut self.errors, vec![]), + new_errors, + ); + } + } // Collect all the temps we need to promote. let mut promoted_temps = BitSet::new_empty(self.temp_promotion_state.len()); - debug!("qualify_const: promotion_candidates={:?}", self.promotion_candidates); - for candidate in &self.promotion_candidates { - match *candidate { + // HACK(eddyb) don't try to validate promotion candidates if any + // parts of the control-flow graph were skipped due to an error. + let promotion_candidates = if has_controlflow_error { + let unleash_miri = self + .tcx + .sess + .opts + .debugging_opts + .unleash_the_miri_inside_of_you; + if !unleash_miri { + self.tcx.sess.delay_span_bug( + body.span, + "check_const: expected control-flow error(s)", + ); + } + self.promotion_candidates.clone() + } else { + self.valid_promotion_candidates() + }; + debug!("qualify_const: promotion_candidates={:?}", promotion_candidates); + for candidate in promotion_candidates { + match candidate { Candidate::Repeat(Location { block: bb, statement_index: stmt_idx }) => { if let StatementKind::Assign(box(_, Rvalue::Repeat( - Operand::Move(Place { - base: PlaceBase::Local(index), - projection: box [], - }), + Operand::Move(place), _ - ))) = self.body[bb].statements[stmt_idx].kind { - promoted_temps.insert(index); + ))) = &self.body[bb].statements[stmt_idx].kind { + if let Some(index) = place.as_local() { + promoted_temps.insert(index); + } } } Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => { if let StatementKind::Assign( box( _, - Rvalue::Ref(_, _, Place { - base: PlaceBase::Local(index), - projection: box [], - }) + Rvalue::Ref(_, _, place) ) - ) = self.body[bb].statements[stmt_idx].kind { - promoted_temps.insert(index); + ) = &self.body[bb].statements[stmt_idx].kind { + if let Some(index) = place.as_local() { + promoted_temps.insert(index); + } } } Candidate::Argument { .. } => {} @@ -1019,6 +1105,58 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { (qualifs.encode_to_bits(), self.tcx.arena.alloc(promoted_temps)) } + + /// Get the subset of `unchecked_promotion_candidates` that are eligible + /// for promotion. + // FIXME(eddyb) replace the old candidate gathering with this. + fn valid_promotion_candidates(&self) -> Vec { + // Sanity-check the promotion candidates. + let candidates = promote_consts::validate_candidates( + self.tcx, + self.body, + self.def_id, + &self.temp_promotion_state, + &self.unchecked_promotion_candidates, + ); + + if candidates != self.promotion_candidates { + let report = |msg, candidate| { + let span = match candidate { + Candidate::Ref(loc) | + Candidate::Repeat(loc) => self.body.source_info(loc).span, + Candidate::Argument { bb, .. } => { + self.body[bb].terminator().source_info.span + } + }; + self.tcx.sess.span_err(span, &format!("{}: {:?}", msg, candidate)); + }; + + for &c in &self.promotion_candidates { + if !candidates.contains(&c) { + report("invalidated old candidate", c); + } + } + + for &c in &candidates { + if !self.promotion_candidates.contains(&c) { + report("extra new candidate", c); + } + } + + bug!("promotion candidate validation mismatches (see above)"); + } + + candidates + } + + /// Returns `true` if the operand of a repeat expression is promotable. + fn should_promote_repeat_expression(&self, operand: &Operand<'tcx>) -> bool { + let not_promotable = IsNotImplicitlyPromotable::in_operand(self, operand) || + IsNotPromotable::in_operand(self, operand); + debug!("should_promote_repeat_expression: operand={:?} not_promotable={:?}", + operand, not_promotable); + !not_promotable + } } impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { @@ -1039,7 +1177,8 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { .get_attrs(*def_id) .iter() .any(|attr| attr.check_name(sym::thread_local)) { - if self.mode.requires_const_checking() { + if self.mode.requires_const_checking() && !self.suppress_errors { + self.record_error(ops::ThreadLocalAccess); span_err!(self.tcx.sess, self.span, E0625, "thread-local statics cannot be \ accessed at compile-time"); @@ -1049,7 +1188,10 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // Only allow statics (not consts) to refer to other statics. if self.mode == Mode::Static || self.mode == Mode::StaticMut { - if self.mode == Mode::Static && context.is_mutating_use() { + if self.mode == Mode::Static + && context.is_mutating_use() + && !self.suppress_errors + { // this is not strictly necessary as miri will also bail out // For interior mutability we can't really catch this statically as that // goes through raw pointers and intermediate temporaries, so miri has @@ -1063,7 +1205,8 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { } unleash_miri!(self); - if self.mode.requires_const_checking() { + if self.mode.requires_const_checking() && !self.suppress_errors { + self.record_error(ops::StaticAccess); let mut err = struct_span_err!(self.tcx.sess, self.span, E0013, "{}s cannot refer to statics, use \ a constant instead", self.mode); @@ -1082,77 +1225,87 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { } } - fn visit_projection( + fn visit_projection_elem( &mut self, place_base: &PlaceBase<'tcx>, - proj: &[PlaceElem<'tcx>], + proj_base: &[PlaceElem<'tcx>], + elem: &PlaceElem<'tcx>, context: PlaceContext, location: Location, ) { debug!( - "visit_place_projection: proj={:?} context={:?} location={:?}", - proj, context, location, + "visit_projection_elem: place_base={:?} proj_base={:?} elem={:?} \ + context={:?} location={:?}", + place_base, + proj_base, + elem, + context, + location, ); - self.super_projection(place_base, proj, context, location); - if let [proj_base @ .., elem] = proj { - match elem { - ProjectionElem::Deref => { - if context.is_mutating_use() { - // `not_const` errors out in const contexts - self.not_const() + self.super_projection_elem(place_base, proj_base, elem, context, location); + + match elem { + ProjectionElem::Deref => { + if context.is_mutating_use() { + // `not_const` errors out in const contexts + self.not_const(ops::MutDeref) + } + let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty; + match self.mode { + Mode::NonConstFn => {} + _ if self.suppress_errors => {} + _ => { + if let ty::RawPtr(_) = base_ty.kind { + if !self.tcx.features().const_raw_ptr_deref { + self.record_error(ops::RawPtrDeref); + emit_feature_err( + &self.tcx.sess.parse_sess, sym::const_raw_ptr_deref, + self.span, GateIssue::Language, + &format!( + "dereferencing raw pointers in {}s is unstable", + self.mode, + ), + ); + } + } } - let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty; - match self.mode { - Mode::NonConstFn => {}, - _ => { - if let ty::RawPtr(_) = base_ty.sty { - if !self.tcx.features().const_raw_ptr_deref { + } + } + + ProjectionElem::ConstantIndex {..} | + ProjectionElem::Subslice {..} | + ProjectionElem::Field(..) | + ProjectionElem::Index(_) => { + let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty; + if let Some(def) = base_ty.ty_adt_def() { + if def.is_union() { + match self.mode { + Mode::ConstFn => { + if !self.tcx.features().const_fn_union + && !self.suppress_errors + { + self.record_error(ops::UnionAccess); emit_feature_err( - &self.tcx.sess.parse_sess, sym::const_raw_ptr_deref, + &self.tcx.sess.parse_sess, sym::const_fn_union, self.span, GateIssue::Language, - &format!( - "dereferencing raw pointers in {}s is unstable", - self.mode, - ), + "unions in const fn are unstable", ); } - } + }, + + | Mode::NonConstFn + | Mode::Static + | Mode::StaticMut + | Mode::Const + => {}, } } } + } - ProjectionElem::ConstantIndex {..} | - ProjectionElem::Subslice {..} | - ProjectionElem::Field(..) | - ProjectionElem::Index(_) => { - let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty; - if let Some(def) = base_ty.ty_adt_def() { - if def.is_union() { - match self.mode { - Mode::ConstFn => { - if !self.tcx.features().const_fn_union { - emit_feature_err( - &self.tcx.sess.parse_sess, sym::const_fn_union, - self.span, GateIssue::Language, - "unions in const fn are unstable", - ); - } - }, - - | Mode::NonConstFn - | Mode::Static - | Mode::StaticMut - | Mode::Const - => {}, - } - } - } - } - - ProjectionElem::Downcast(..) => { - self.not_const() - } + ProjectionElem::Downcast(..) => { + self.not_const(ops::Downcast) } } } @@ -1164,10 +1317,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { match *operand { Operand::Move(ref place) => { // Mark the consumed locals to indicate later drops are noops. - if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = *place { + if let Some(local) = place.as_local() { self.cx.per_local[NeedsDrop].remove(local); } } @@ -1183,10 +1333,10 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { if let Rvalue::Ref(_, kind, ref place) = *rvalue { // Special-case reborrows. let mut reborrow_place = None; - if let box [proj_base @ .., elem] = &place.projection { - if *elem == ProjectionElem::Deref { + if let &[ref proj_base @ .., elem] = place.projection.as_ref() { + if elem == ProjectionElem::Deref { let base_ty = Place::ty_from(&place.base, proj_base, self.body, self.tcx).ty; - if let ty::Ref(..) = base_ty.sty { + if let ty::Ref(..) = base_ty.kind { reborrow_place = Some(proj_base); } } @@ -1237,9 +1387,12 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) if self.mode != Mode::NonConstFn => { unleash_miri!(self); - if !self.tcx.features().const_raw_ptr_to_usize_cast { + if !self.tcx.features().const_raw_ptr_to_usize_cast + && !self.suppress_errors + { // in const fn and constants require the feature gate // FIXME: make it unsafe inside const fn and constants + self.record_error(ops::RawPtrToIntCast); emit_feature_err( &self.tcx.sess.parse_sess, sym::const_raw_ptr_to_usize_cast, self.span, GateIssue::Language, @@ -1255,7 +1408,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { } Rvalue::BinaryOp(op, ref lhs, _) => { - if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).sty { + if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind { assert!(op == BinOp::Eq || op == BinOp::Ne || op == BinOp::Le || op == BinOp::Lt || op == BinOp::Ge || op == BinOp::Gt || @@ -1263,8 +1416,10 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { unleash_miri!(self); if self.mode.requires_const_checking() && - !self.tcx.features().const_compare_raw_pointers + !self.tcx.features().const_compare_raw_pointers && + !self.suppress_errors { + self.record_error(ops::RawPtrComparison); // require the feature gate inside constants and const fn // FIXME: make it unsafe to use these operations emit_feature_err( @@ -1280,7 +1435,8 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { Rvalue::NullaryOp(NullOp::Box, _) => { unleash_miri!(self); - if self.mode.requires_const_checking() { + if self.mode.requires_const_checking() && !self.suppress_errors { + self.record_error(ops::HeapAllocation); let mut err = struct_span_err!(self.tcx.sess, self.span, E0010, "allocations are not allowed in {}s", self.mode); err.span_label(self.span, format!("allocation not allowed in {}s", self.mode)); @@ -1314,7 +1470,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { let fn_ty = func.ty(self.body, self.tcx); let mut callee_def_id = None; let mut is_shuffle = false; - match fn_ty.sty { + match fn_ty.kind { ty::FnDef(def_id, _) => { callee_def_id = Some(def_id); match self.tcx.fn_sig(def_id).abi() { @@ -1325,9 +1481,12 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // special intrinsic that can be called diretly without an intrinsic // feature gate needs a language feature gate "transmute" => { - if self.mode.requires_const_checking() { + if self.mode.requires_const_checking() + && !self.suppress_errors + { // const eval transmute calls only with the feature gate if !self.tcx.features().const_transmute { + self.record_error(ops::Transmute); emit_feature_err( &self.tcx.sess.parse_sess, sym::const_transmute, self.span, GateIssue::Language, @@ -1355,7 +1514,10 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { .opts .debugging_opts .unleash_the_miri_inside_of_you; - if self.tcx.is_const_fn(def_id) || unleash_miri { + if self.tcx.is_const_fn(def_id) + || unleash_miri + || self.suppress_errors + { // stable const fns or unstable const fns // with their feature gate active // FIXME(eddyb) move stability checks from `is_const_fn` here. @@ -1366,6 +1528,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // since the macro is marked with the attribute. if !self.tcx.features().const_panic { // Don't allow panics in constants without the feature gate. + self.record_error(ops::Panic); emit_feature_err( &self.tcx.sess.parse_sess, sym::const_panic, @@ -1380,6 +1543,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // functions without the feature gate active in this crate in // order to report a better error message than the one below. if !self.span.allows_unstable(feature) { + self.record_error(ops::FnCallUnstable(def_id, feature)); let mut err = self.tcx.sess.struct_span_err(self.span, &format!("`{}` is not yet stable as a const fn", self.tcx.def_path_str(def_id))); @@ -1392,6 +1556,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { err.emit(); } } else { + self.record_error(ops::FnCallNonConst(def_id)); let mut err = struct_span_err!( self.tcx.sess, self.span, @@ -1407,13 +1572,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { } } ty::FnPtr(_) => { - let unleash_miri = self - .tcx - .sess - .opts - .debugging_opts - .unleash_the_miri_inside_of_you; - if self.mode.requires_const_checking() && !unleash_miri { + unleash_miri!(self); + if self.mode.requires_const_checking() && !self.suppress_errors { + self.record_error(ops::FnCallIndirect); let mut err = self.tcx.sess.struct_span_err( self.span, "function pointers are not allowed in const fn" @@ -1422,7 +1583,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { } } _ => { - self.not_const(); + self.not_const(ops::FnCallOther); } } @@ -1446,20 +1607,12 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // This is not a problem, because the argument explicitly // requests constness, in contrast to regular promotion // which happens even without the user requesting it. - // We can error out with a hard error if the argument is not - // constant here. + // + // `promote_consts` is responsible for emitting the error if + // the argument is not promotable. if !IsNotPromotable::in_operand(self, arg) { debug!("visit_terminator_kind: candidate={:?}", candidate); self.promotion_candidates.push(candidate); - } else { - if is_shuffle { - span_err!(self.tcx.sess, self.span, E0526, - "shuffle indices are not constant"); - } else { - self.tcx.sess.span_err(self.span, - &format!("argument {} is required to be a constant", - i + 1)); - } } } } @@ -1480,14 +1633,11 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { } // Deny *any* live drops anywhere other than functions. - if self.mode.requires_const_checking() { + if self.mode.requires_const_checking() && !self.suppress_errors { unleash_miri!(self); // HACK(eddyb): emulate a bit of dataflow analysis, // conservatively, that drop elaboration will do. - let needs_drop = if let Place { - base: PlaceBase::Local(local), - projection: box [], - } = *place { + let needs_drop = if let Some(local) = place.as_local() { if NeedsDrop::in_local(self, local) { Some(self.body.local_decls[local].source_info.span) } else { @@ -1501,6 +1651,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // Double-check the type being dropped, to minimize false positives. let ty = place.ty(self.body, self.tcx).ty; if ty.needs_drop(self.tcx, self.param_env) { + self.record_error_spanned(ops::LiveDrop, span); struct_span_err!(self.tcx.sess, span, E0493, "destructors cannot be evaluated at compile-time") .span_label(span, format!("{}s cannot evaluate destructors", @@ -1545,7 +1696,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { self.super_statement(statement, location); } StatementKind::FakeRead(FakeReadCause::ForMatchedPlace, _) => { - self.not_const(); + self.not_const(ops::IfOrMatch); } // FIXME(eddyb) should these really do nothing? StatementKind::FakeRead(..) | @@ -1567,6 +1718,10 @@ pub fn provide(providers: &mut Providers<'_>) { }; } +// FIXME(eddyb) this is only left around for the validation logic +// in `promote_consts`, see the comment in `validate_operand`. +pub(super) const QUALIF_ERROR_BIT: u8 = 1 << IsNotPromotable::IDX; + fn mir_const_qualif(tcx: TyCtxt<'_>, def_id: DefId) -> (u8, &BitSet) { // N.B., this `borrow()` is guaranteed to be valid (i.e., the value // cannot yet be stolen), because `mir_validated()`, which steals @@ -1576,7 +1731,7 @@ fn mir_const_qualif(tcx: TyCtxt<'_>, def_id: DefId) -> (u8, &BitSet) { if body.return_ty().references_error() { tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors"); - return (1 << IsNotPromotable::IDX, tcx.arena.alloc(BitSet::new_empty(0))); + return (QUALIF_ERROR_BIT, tcx.arena.alloc(BitSet::new_empty(0))); } Checker::new(tcx, def_id, body, Mode::Const).check_const() @@ -1618,29 +1773,33 @@ impl<'tcx> MirPass<'tcx> for QualifyAndPromoteConstants<'tcx> { let (temps, candidates) = { let mut checker = Checker::new(tcx, def_id, body, mode); if let Mode::ConstFn = mode { - if tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { - checker.check_const(); - } else if tcx.is_min_const_fn(def_id) { + let use_min_const_fn_checks = + !tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you && + tcx.is_min_const_fn(def_id); + if use_min_const_fn_checks { // Enforce `min_const_fn` for stable `const fn`s. use super::qualify_min_const_fn::is_min_const_fn; if let Err((span, err)) = is_min_const_fn(tcx, def_id, body) { error_min_const_fn_violation(tcx, span, err); - } else { - // this should not produce any errors, but better safe than sorry - // FIXME(#53819) - checker.check_const(); + return; } - } else { - // Enforce a constant-like CFG for `const fn`. - checker.check_const(); + + // `check_const` should not produce any errors, but better safe than sorry + // FIXME(#53819) + // NOTE(eddyb) `check_const` is actually needed for promotion inside + // `min_const_fn` functions. } + + // Enforce a constant-like CFG for `const fn`. + checker.check_const(); } else { while let Some((bb, data)) = checker.rpo.next() { checker.visit_basic_block_data(bb, data); } } - (checker.temp_promotion_state, checker.promotion_candidates) + let promotion_candidates = checker.valid_promotion_candidates(); + (checker.temp_promotion_state, promotion_candidates) }; // Do the actual promotion, now that we know what's viable. @@ -1732,16 +1891,17 @@ fn remove_drop_and_storage_dead_on_promoted_locals( } }); let terminator = block.terminator_mut(); - match terminator.kind { + match &terminator.kind { TerminatorKind::Drop { - location: Place { - base: PlaceBase::Local(index), - projection: box [], - }, + location, target, .. - } if promoted_temps.contains(index) => { - terminator.kind = TerminatorKind::Goto { target }; + } => { + if let Some(index) = location.as_local() { + if promoted_temps.contains(index) { + terminator.kind = TerminatorKind::Goto { target: *target }; + } + } } _ => {} } @@ -1766,10 +1926,66 @@ fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option { ret.insert(a as usize); } _ => return None, } } Some(ret) } + +fn validator_mismatch( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + mut old_errors: Vec<(Span, String)>, + mut new_errors: Vec<(Span, String)>, +) { + error!("old validator: {:?}", old_errors); + error!("new validator: {:?}", new_errors); + + // ICE on nightly if the validators do not emit exactly the same errors. + // Users can supress this panic with an unstable compiler flag (hopefully after + // filing an issue). + let opts = &tcx.sess.opts; + let strict_validation_enabled = opts.unstable_features.is_nightly_build() + && !opts.debugging_opts.suppress_const_validation_back_compat_ice; + + if !strict_validation_enabled { + return; + } + + // If this difference would cause a regression from the old to the new or vice versa, trigger + // the ICE. + if old_errors.is_empty() || new_errors.is_empty() { + span_bug!(body.span, "{}", VALIDATOR_MISMATCH_ERR); + } + + // HACK: Borrows that would allow mutation are forbidden in const contexts, but they cause the + // new validator to be more conservative about when a dropped local has been moved out of. + // + // Supress the mismatch ICE in cases where the validators disagree only on the number of + // `LiveDrop` errors and both observe the same sequence of `MutBorrow`s. + + let is_live_drop = |(_, s): &mut (_, String)| s.starts_with("LiveDrop"); + let is_mut_borrow = |(_, s): &&(_, String)| s.starts_with("MutBorrow"); + + let old_live_drops: Vec<_> = old_errors.drain_filter(is_live_drop).collect(); + let new_live_drops: Vec<_> = new_errors.drain_filter(is_live_drop).collect(); + + let only_live_drops_differ = old_live_drops != new_live_drops && old_errors == new_errors; + + let old_mut_borrows = old_errors.iter().filter(is_mut_borrow); + let new_mut_borrows = new_errors.iter().filter(is_mut_borrow); + + let at_least_one_mut_borrow = old_mut_borrows.clone().next().is_some(); + + if only_live_drops_differ && at_least_one_mut_borrow && old_mut_borrows.eq(new_mut_borrows) { + return; + } + + span_bug!(body.span, "{}", VALIDATOR_MISMATCH_ERR); +} + +const VALIDATOR_MISMATCH_ERR: &str = + r"Disagreement between legacy and dataflow-based const validators. + After filing an issue, use `-Zsuppress-const-validation-back-compat-ice` to compile your code."; diff --git a/src/librustc_mir/transform/qualify_min_const_fn.rs b/src/librustc_mir/transform/qualify_min_const_fn.rs index be83c823d4..c4e44091bc 100644 --- a/src/librustc_mir/transform/qualify_min_const_fn.rs +++ b/src/librustc_mir/transform/qualify_min_const_fn.rs @@ -14,7 +14,7 @@ pub fn is_min_const_fn(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>) - let mut current = def_id; loop { let predicates = tcx.predicates_of(current); - for (predicate, _) in &predicates.predicates { + for (predicate, _) in predicates.predicates { match predicate { | Predicate::RegionOutlives(_) | Predicate::TypeOutlives(_) @@ -32,7 +32,7 @@ pub fn is_min_const_fn(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>) - if Some(pred.def_id()) == tcx.lang_items().sized_trait() { continue; } - match pred.skip_binder().self_ty().sty { + match pred.skip_binder().self_ty().kind { ty::Param(ref p) => { let generics = tcx.generics_of(current); let def = generics.type_param(p, tcx); @@ -79,7 +79,7 @@ pub fn is_min_const_fn(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>) - fn check_ty(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span, fn_def_id: DefId) -> McfResult { for ty in ty.walk() { - match ty.sty { + match ty.kind { ty::Ref(_, _, hir::Mutability::MutMutable) => return Err(( span, "mutable references in const fn are unstable".into(), @@ -259,8 +259,8 @@ fn check_place( def_id: DefId, body: &Body<'tcx> ) -> McfResult { - let mut cursor = &*place.projection; - while let [proj_base @ .., elem] = cursor { + let mut cursor = place.projection.as_ref(); + while let &[ref proj_base @ .., elem] = cursor { cursor = proj_base; match elem { ProjectionElem::Downcast(..) => { @@ -342,7 +342,7 @@ fn check_terminator( cleanup: _, } => { let fn_ty = func.ty(body, tcx); - if let ty::FnDef(def_id, _) = fn_ty.sty { + if let ty::FnDef(def_id, _) = fn_ty.kind { // some intrinsics are waved through if called inside the // standard library. Users never need to call them directly diff --git a/src/librustc_mir/transform/remove_noop_landing_pads.rs b/src/librustc_mir/transform/remove_noop_landing_pads.rs index 70b11944e2..130393e2c4 100644 --- a/src/librustc_mir/transform/remove_noop_landing_pads.rs +++ b/src/librustc_mir/transform/remove_noop_landing_pads.rs @@ -1,6 +1,6 @@ use rustc::ty::TyCtxt; use rustc::mir::*; -use rustc_data_structures::bit_set::BitSet; +use rustc_index::bit_set::BitSet; use crate::transform::{MirPass, MirSource}; use crate::util::patch::MirPatch; @@ -32,7 +32,7 @@ impl RemoveNoopLandingPads { nop_landing_pads: &BitSet, ) -> bool { for stmt in &body[bb].statements { - match stmt.kind { + match &stmt.kind { StatementKind::FakeRead(..) | StatementKind::StorageLive(_) | StatementKind::StorageDead(_) | @@ -41,12 +41,13 @@ impl RemoveNoopLandingPads { // These are all nops in a landing pad } - StatementKind::Assign(box(Place { - base: PlaceBase::Local(_), - projection: box [], - }, Rvalue::Use(_))) => { - // Writing to a local (e.g., a drop flag) does not - // turn a landing pad to a non-nop + StatementKind::Assign(box(place, Rvalue::Use(_))) => { + if place.as_local().is_some() { + // Writing to a local (e.g., a drop flag) does not + // turn a landing pad to a non-nop + } else { + return false; + } } StatementKind::Assign { .. } | diff --git a/src/librustc_mir/transform/rustc_peek.rs b/src/librustc_mir/transform/rustc_peek.rs index 68fa082d29..aada7641df 100644 --- a/src/librustc_mir/transform/rustc_peek.rs +++ b/src/librustc_mir/transform/rustc_peek.rs @@ -3,19 +3,21 @@ use syntax::ast; use syntax::symbol::sym; use syntax_pos::Span; -use rustc::ty::{self, TyCtxt}; +use rustc::ty::{self, TyCtxt, Ty}; use rustc::hir::def_id::DefId; -use rustc::mir::{self, Body, Location}; -use rustc_data_structures::bit_set::BitSet; +use rustc::mir::{self, Body, Location, Local}; +use rustc_index::bit_set::BitSet; use crate::transform::{MirPass, MirSource}; use crate::dataflow::{do_dataflow, DebugFormatted}; use crate::dataflow::MoveDataParamEnv; use crate::dataflow::BitDenotation; use crate::dataflow::DataflowResults; +use crate::dataflow::DataflowResultsCursor; use crate::dataflow::{ DefinitelyInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces }; +use crate::dataflow::IndirectlyMutableLocals; use crate::dataflow::move_paths::{MovePathIndex, LookupResult}; use crate::dataflow::move_paths::{HasMoveData, MoveData}; @@ -50,6 +52,10 @@ impl<'tcx> MirPass<'tcx> for SanityCheck { do_dataflow(tcx, body, def_id, &attributes, &dead_unwinds, DefinitelyInitializedPlaces::new(tcx, body, &mdpe), |bd, i| DebugFormatted::new(&bd.move_data().move_paths[i])); + let flow_indirectly_mut = + do_dataflow(tcx, body, def_id, &attributes, &dead_unwinds, + IndirectlyMutableLocals::new(tcx, body, param_env), + |_, i| DebugFormatted::new(&i)); if has_rustc_mir_with(&attributes, sym::rustc_peek_maybe_init).is_some() { sanity_check_via_rustc_peek(tcx, body, def_id, &attributes, &flow_inits); @@ -60,6 +66,9 @@ impl<'tcx> MirPass<'tcx> for SanityCheck { if has_rustc_mir_with(&attributes, sym::rustc_peek_definite_init).is_some() { sanity_check_via_rustc_peek(tcx, body, def_id, &attributes, &flow_def_inits); } + if has_rustc_mir_with(&attributes, sym::rustc_peek_indirectly_mutable).is_some() { + sanity_check_via_rustc_peek(tcx, body, def_id, &attributes, &flow_indirectly_mut); + } if has_rustc_mir_with(&attributes, sym::stop_after_dataflow).is_some() { tcx.sess.fatal("stop_after_dataflow ended compilation"); } @@ -88,151 +97,204 @@ pub fn sanity_check_via_rustc_peek<'tcx, O>( def_id: DefId, _attributes: &[ast::Attribute], results: &DataflowResults<'tcx, O>, -) where - O: BitDenotation<'tcx, Idx = MovePathIndex> + HasMoveData<'tcx>, -{ +) where O: RustcPeekAt<'tcx> { debug!("sanity_check_via_rustc_peek def_id: {:?}", def_id); - // FIXME: this is not DRY. Figure out way to abstract this and - // `dataflow::build_sets`. (But note it is doing non-standard - // stuff, so such generalization may not be realistic.) - for bb in body.basic_blocks().indices() { - each_block(tcx, body, results, bb); + let mut cursor = DataflowResultsCursor::new(results, body); + + let peek_calls = body + .basic_blocks() + .iter_enumerated() + .filter_map(|(bb, block_data)| { + PeekCall::from_terminator(tcx, block_data.terminator()) + .map(|call| (bb, block_data, call)) + }); + + for (bb, block_data, call) in peek_calls { + // Look for a sequence like the following to indicate that we should be peeking at `_1`: + // _2 = &_1; + // rustc_peek(_2); + // + // /* or */ + // + // _2 = _1; + // rustc_peek(_2); + let (statement_index, peek_rval) = block_data + .statements + .iter() + .enumerate() + .filter_map(|(i, stmt)| value_assigned_to_local(stmt, call.arg).map(|rval| (i, rval))) + .next() + .expect("call to rustc_peek should be preceded by \ + assignment to temporary holding its argument"); + + match (call.kind, peek_rval) { + | (PeekCallKind::ByRef, mir::Rvalue::Ref(_, _, place)) + | (PeekCallKind::ByVal, mir::Rvalue::Use(mir::Operand::Move(place))) + | (PeekCallKind::ByVal, mir::Rvalue::Use(mir::Operand::Copy(place))) + => { + let loc = Location { block: bb, statement_index }; + cursor.seek(loc); + let state = cursor.get(); + results.operator().peek_at(tcx, place, state, call); + } + + _ => { + let msg = "rustc_peek: argument expression \ + must be either `place` or `&place`"; + tcx.sess.span_err(call.span, msg); + } + } } } -fn each_block<'tcx, O>( - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - results: &DataflowResults<'tcx, O>, - bb: mir::BasicBlock, -) where - O: BitDenotation<'tcx, Idx = MovePathIndex> + HasMoveData<'tcx>, -{ - let move_data = results.0.operator.move_data(); - let mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = body[bb]; - - let (args, span) = match is_rustc_peek(tcx, terminator) { - Some(args_and_span) => args_and_span, - None => return, - }; - assert!(args.len() == 1); - let peek_arg_place = match args[0] { - mir::Operand::Copy(ref place @ mir::Place { - base: mir::PlaceBase::Local(_), - projection: box [], - }) | - mir::Operand::Move(ref place @ mir::Place { - base: mir::PlaceBase::Local(_), - projection: box [], - }) => Some(place), - _ => None, - }; - - let peek_arg_place = match peek_arg_place { - Some(arg) => arg, - None => { - tcx.sess.diagnostic().span_err( - span, "dataflow::sanity_check cannot feed a non-temp to rustc_peek."); - return; - } - }; - - let mut on_entry = results.0.sets.entry_set_for(bb.index()).to_owned(); - let mut trans = results.0.sets.trans_for(bb.index()).clone(); - - // Emulate effect of all statements in the block up to (but not - // including) the borrow within `peek_arg_place`. Do *not* include - // call to `peek_arg_place` itself (since we are peeking the state - // of the argument at time immediate preceding Call to - // `rustc_peek`). - - for (j, stmt) in statements.iter().enumerate() { - debug!("rustc_peek: ({:?},{}) {:?}", bb, j, stmt); - let (place, rvalue) = match stmt.kind { - mir::StatementKind::Assign(box(ref place, ref rvalue)) => { - (place, rvalue) +/// If `stmt` is an assignment where the LHS is the given local (with no projections), returns the +/// RHS of the assignment. +fn value_assigned_to_local<'a, 'tcx>( + stmt: &'a mir::Statement<'tcx>, + local: Local, +) -> Option<&'a mir::Rvalue<'tcx>> { + if let mir::StatementKind::Assign(box (place, rvalue)) = &stmt.kind { + if let Some(l) = place.as_local() { + if local == l { + return Some(&*rvalue); } - mir::StatementKind::FakeRead(..) | - mir::StatementKind::StorageLive(_) | - mir::StatementKind::StorageDead(_) | - mir::StatementKind::InlineAsm { .. } | - mir::StatementKind::Retag { .. } | - mir::StatementKind::AscribeUserType(..) | - mir::StatementKind::Nop => continue, - mir::StatementKind::SetDiscriminant{ .. } => - span_bug!(stmt.source_info.span, - "sanity_check should run before Deaggregator inserts SetDiscriminant"), - }; + } + } - if place == peek_arg_place { - if let mir::Rvalue::Ref(_, mir::BorrowKind::Shared, ref peeking_at_place) = *rvalue { - // Okay, our search is over. - match move_data.rev_lookup.find(peeking_at_place.as_ref()) { - LookupResult::Exact(peek_mpi) => { - let bit_state = on_entry.contains(peek_mpi); - debug!("rustc_peek({:?} = &{:?}) bit_state: {}", - place, peeking_at_place, bit_state); - if !bit_state { - tcx.sess.span_err(span, "rustc_peek: bit not set"); + None +} + +#[derive(Clone, Copy, Debug)] +enum PeekCallKind { + ByVal, + ByRef, +} + +impl PeekCallKind { + fn from_arg_ty(arg: Ty<'_>) -> Self { + match arg.kind { + ty::Ref(_, _, _) => PeekCallKind::ByRef, + _ => PeekCallKind::ByVal, + } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct PeekCall { + arg: Local, + kind: PeekCallKind, + span: Span, +} + +impl PeekCall { + fn from_terminator<'tcx>( + tcx: TyCtxt<'tcx>, + terminator: &mir::Terminator<'tcx>, + ) -> Option { + use mir::Operand; + + let span = terminator.source_info.span; + if let mir::TerminatorKind::Call { func: Operand::Constant(func), args, .. } = + &terminator.kind + { + if let ty::FnDef(def_id, substs) = func.literal.ty.kind { + let sig = tcx.fn_sig(def_id); + let name = tcx.item_name(def_id); + if sig.abi() != Abi::RustIntrinsic || name != sym::rustc_peek { + return None; + } + + assert_eq!(args.len(), 1); + let kind = PeekCallKind::from_arg_ty(substs.type_at(0)); + let arg = match &args[0] { + Operand::Copy(place) | Operand::Move(place) => { + if let Some(local) = place.as_local() { + local + } else { + tcx.sess.diagnostic().span_err( + span, + "dataflow::sanity_check cannot feed a non-temp to rustc_peek.", + ); + return None; } } - LookupResult::Parent(..) => { - tcx.sess.span_err(span, "rustc_peek: argument untracked"); + _ => { + tcx.sess.diagnostic().span_err( + span, + "dataflow::sanity_check cannot feed a non-temp to rustc_peek.", + ); + return None; } - } - return; - } else { - // Our search should have been over, but the input - // does not match expectations of `rustc_peek` for - // this sanity_check. - let msg = "rustc_peek: argument expression \ - must be immediate borrow of form `&expr`"; - tcx.sess.span_err(span, msg); + }; + + return Some(PeekCall { + arg, + kind, + span, + }); } } - let lhs_mpi = move_data.rev_lookup.find(place.as_ref()); - - debug!("rustc_peek: computing effect on place: {:?} ({:?}) in stmt: {:?}", - place, lhs_mpi, stmt); - // reset GEN and KILL sets before emulating their effect. - trans.clear(); - results.0.operator.before_statement_effect( - &mut trans, - Location { block: bb, statement_index: j }); - results.0.operator.statement_effect( - &mut trans, - Location { block: bb, statement_index: j }); - trans.apply(&mut on_entry); + None } - - results.0.operator.before_terminator_effect( - &mut trans, - Location { block: bb, statement_index: statements.len() }); - - tcx.sess.span_err(span, &format!("rustc_peek: MIR did not match \ - anticipated pattern; note that \ - rustc_peek expects input of \ - form `&expr`")); } -fn is_rustc_peek<'a, 'tcx>( - tcx: TyCtxt<'tcx>, - terminator: &'a Option>, -) -> Option<(&'a [mir::Operand<'tcx>], Span)> { - if let Some(mir::Terminator { ref kind, source_info, .. }) = *terminator { - if let mir::TerminatorKind::Call { func: ref oper, ref args, .. } = *kind { - if let mir::Operand::Constant(ref func) = *oper { - if let ty::FnDef(def_id, _) = func.literal.ty.sty { - let abi = tcx.fn_sig(def_id).abi(); - let name = tcx.item_name(def_id); - if abi == Abi::RustIntrinsic && name == sym::rustc_peek { - return Some((args, source_info.span)); - } +pub trait RustcPeekAt<'tcx>: BitDenotation<'tcx> { + fn peek_at( + &self, + tcx: TyCtxt<'tcx>, + place: &mir::Place<'tcx>, + flow_state: &BitSet, + call: PeekCall, + ); +} + +impl<'tcx, O> RustcPeekAt<'tcx> for O + where O: BitDenotation<'tcx, Idx = MovePathIndex> + HasMoveData<'tcx>, +{ + fn peek_at( + &self, + tcx: TyCtxt<'tcx>, + place: &mir::Place<'tcx>, + flow_state: &BitSet, + call: PeekCall, + ) { + match self.move_data().rev_lookup.find(place.as_ref()) { + LookupResult::Exact(peek_mpi) => { + let bit_state = flow_state.contains(peek_mpi); + debug!("rustc_peek({:?} = &{:?}) bit_state: {}", + call.arg, place, bit_state); + if !bit_state { + tcx.sess.span_err(call.span, "rustc_peek: bit not set"); } } + + LookupResult::Parent(..) => { + tcx.sess.span_err(call.span, "rustc_peek: argument untracked"); + } + } + } +} + +impl<'tcx> RustcPeekAt<'tcx> for IndirectlyMutableLocals<'_, 'tcx> { + fn peek_at( + &self, + tcx: TyCtxt<'tcx>, + place: &mir::Place<'tcx>, + flow_state: &BitSet, + call: PeekCall, + ) { + warn!("peek_at: place={:?}", place); + let local = if let Some(l) = place.as_local() { + l + } else { + tcx.sess.span_err(call.span, "rustc_peek: argument was not a local"); + return; + }; + + if !flow_state.contains(local) { + tcx.sess.span_err(call.span, "rustc_peek: bit not set"); } } - return None; } diff --git a/src/librustc_mir/transform/simplify.rs b/src/librustc_mir/transform/simplify.rs index d4599ee08a..385fc7ed2c 100644 --- a/src/librustc_mir/transform/simplify.rs +++ b/src/librustc_mir/transform/simplify.rs @@ -27,11 +27,11 @@ //! naively generate still contains the `_a = ()` write in the unreachable block "after" the //! return. -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::{Idx, IndexVec}; use rustc::ty::TyCtxt; use rustc::mir::*; -use rustc::mir::visit::{MutVisitor, Visitor, PlaceContext}; +use rustc::mir::visit::{MutVisitor, Visitor, PlaceContext, MutatingUseContext}; use rustc::session::config::DebugInfo; use std::borrow::Cow; use crate::transform::{MirPass, MirSource}; @@ -293,25 +293,33 @@ pub fn remove_dead_blocks(body: &mut Body<'_>) { pub struct SimplifyLocals; impl<'tcx> MirPass<'tcx> for SimplifyLocals { - fn run_pass(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut Body<'tcx>) { - let mut marker = DeclMarker { locals: BitSet::new_empty(body.local_decls.len()) }; - marker.visit_body(body); - // Return pointer and arguments are always live - marker.locals.insert(RETURN_PLACE); - for arg in body.args_iter() { - marker.locals.insert(arg); - } - - // We may need to keep dead user variables live for debuginfo. - if tcx.sess.opts.debuginfo == DebugInfo::Full { - for local in body.vars_iter() { - marker.locals.insert(local); + fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) { + trace!("running SimplifyLocals on {:?}", source); + let locals = { + let mut marker = DeclMarker { + locals: BitSet::new_empty(body.local_decls.len()), + body, + }; + marker.visit_body(body); + // Return pointer and arguments are always live + marker.locals.insert(RETURN_PLACE); + for arg in body.args_iter() { + marker.locals.insert(arg); } - } - let map = make_local_map(&mut body.local_decls, marker.locals); + // We may need to keep dead user variables live for debuginfo. + if tcx.sess.opts.debuginfo == DebugInfo::Full { + for local in body.vars_iter() { + marker.locals.insert(local); + } + } + + marker.locals + }; + + let map = make_local_map(&mut body.local_decls, locals); // Update references to all vars and tmps now - LocalUpdater { map }.visit_body(body); + LocalUpdater { map, tcx }.visit_body(body); body.local_decls.shrink_to_fit(); } } @@ -334,39 +342,86 @@ fn make_local_map( map } -struct DeclMarker { +struct DeclMarker<'a, 'tcx> { pub locals: BitSet, + pub body: &'a Body<'tcx>, } -impl<'tcx> Visitor<'tcx> for DeclMarker { - fn visit_local(&mut self, local: &Local, ctx: PlaceContext, _: Location) { +impl<'a, 'tcx> Visitor<'tcx> for DeclMarker<'a, 'tcx> { + fn visit_local(&mut self, local: &Local, ctx: PlaceContext, location: Location) { // Ignore storage markers altogether, they get removed along with their otherwise unused // decls. // FIXME: Extend this to all non-uses. - if !ctx.is_storage_marker() { - self.locals.insert(*local); + if ctx.is_storage_marker() { + return; } + + // Ignore stores of constants because `ConstProp` and `CopyProp` can remove uses of many + // of these locals. However, if the local is still needed, then it will be referenced in + // another place and we'll mark it as being used there. + if ctx == PlaceContext::MutatingUse(MutatingUseContext::Store) { + let stmt = + &self.body.basic_blocks()[location.block].statements[location.statement_index]; + if let StatementKind::Assign(box (p, Rvalue::Use(Operand::Constant(c)))) = &stmt.kind { + match c.literal.val { + // Keep assignments from unevaluated constants around, since the evaluation + // may report errors, even if the use of the constant is dead code. + interpret::ConstValue::Unevaluated(..) => {} + _ => if p.as_local().is_some() { + trace!("skipping store of const value {:?} to {:?}", c, p); + return; + }, + } + } + } + + self.locals.insert(*local); } } -struct LocalUpdater { +struct LocalUpdater<'tcx> { map: IndexVec>, + tcx: TyCtxt<'tcx>, } -impl<'tcx> MutVisitor<'tcx> for LocalUpdater { +impl<'tcx> MutVisitor<'tcx> for LocalUpdater<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) { // Remove unnecessary StorageLive and StorageDead annotations. data.statements.retain(|stmt| { - match stmt.kind { + match &stmt.kind { StatementKind::StorageLive(l) | StatementKind::StorageDead(l) => { - self.map[l].is_some() + self.map[*l].is_some() + } + StatementKind::Assign(box (place, _)) => { + if let Some(local) = place.as_local() { + self.map[local].is_some() + } else { + true + } } _ => true } }); self.super_basic_block_data(block, data); } + fn visit_local(&mut self, l: &mut Local, _: PlaceContext, _: Location) { *l = self.map[*l].unwrap(); } + + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + match elem { + PlaceElem::Index(local) => { + Some(PlaceElem::Index(self.map[*local].unwrap())) + } + _ => None + } + } } diff --git a/src/librustc_mir/transform/uniform_array_move_out.rs b/src/librustc_mir/transform/uniform_array_move_out.rs index 34ad5cb5dc..e4c2f7d389 100644 --- a/src/librustc_mir/transform/uniform_array_move_out.rs +++ b/src/librustc_mir/transform/uniform_array_move_out.rs @@ -30,7 +30,7 @@ use rustc::ty; use rustc::ty::TyCtxt; use rustc::mir::*; use rustc::mir::visit::{Visitor, PlaceContext, NonUseContext}; -use rustc_data_structures::indexed_vec::{IndexVec}; +use rustc_index::vec::{IndexVec}; use crate::transform::{MirPass, MirSource}; use crate::util::patch::MirPatch; @@ -61,7 +61,7 @@ impl<'a, 'tcx> Visitor<'tcx> for UniformArrayMoveOutVisitor<'a, 'tcx> { rvalue: &Rvalue<'tcx>, location: Location) { if let Rvalue::Use(Operand::Move(ref src_place)) = rvalue { - if let box [proj_base @ .., elem] = &src_place.projection { + if let &[ref proj_base @ .., elem] = src_place.projection.as_ref() { if let ProjectionElem::ConstantIndex{offset: _, min_length: _, from_end: false} = elem { @@ -69,7 +69,7 @@ impl<'a, 'tcx> Visitor<'tcx> for UniformArrayMoveOutVisitor<'a, 'tcx> { } else { let place_ty = Place::ty_from(&src_place.base, proj_base, self.body, self.tcx).ty; - if let ty::Array(item_ty, const_size) = place_ty.sty { + if let ty::Array(item_ty, const_size) = place_ty.kind { if let Some(size) = const_size.try_eval_usize(self.tcx, self.param_env) { assert!(size <= u32::max_value() as u64, "uniform array move out doesn't supported @@ -116,16 +116,13 @@ impl<'a, 'tcx> UniformArrayMoveOutVisitor<'a, 'tcx> { min_length: size, from_end: false, }); - self.patch.add_assign(location, - Place::from(temp), - Rvalue::Use( - Operand::Move( - Place { - base: base.clone(), - projection: projection.into_boxed_slice(), - } - ) - ) + self.patch.add_assign( + location, + Place::from(temp), + Rvalue::Use(Operand::Move(Place { + base: base.clone(), + projection: self.tcx.intern_place_elems(&projection), + })), ); temp }).collect(); @@ -153,16 +150,13 @@ impl<'a, 'tcx> UniformArrayMoveOutVisitor<'a, 'tcx> { min_length: size, from_end: false, }); - self.patch.add_assign(location, - dst_place.clone(), - Rvalue::Use( - Operand::Move( - Place { - base: base.clone(), - projection: projection.into_boxed_slice(), - } - ) - ) + self.patch.add_assign( + location, + dst_place.clone(), + Rvalue::Use(Operand::Move(Place { + base: base.clone(), + projection: self.tcx.intern_place_elems(&projection), + })), ); } _ => {} @@ -185,9 +179,11 @@ impl<'a, 'tcx> UniformArrayMoveOutVisitor<'a, 'tcx> { // // replaced by _10 = move _2[:-1]; -pub struct RestoreSubsliceArrayMoveOut; +pub struct RestoreSubsliceArrayMoveOut<'tcx> { + tcx: TyCtxt<'tcx> +} -impl<'tcx> MirPass<'tcx> for RestoreSubsliceArrayMoveOut { +impl<'tcx> MirPass<'tcx> for RestoreSubsliceArrayMoveOut<'tcx> { fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) { let mut patch = MirPatch::new(body); let param_env = tcx.param_env(src.def_id()); @@ -203,18 +199,17 @@ impl<'tcx> MirPass<'tcx> for RestoreSubsliceArrayMoveOut { if let StatementKind::Assign(box(ref dst_place, ref rval)) = statement.kind { if let Rvalue::Aggregate(box AggregateKind::Array(_), ref items) = *rval { let items : Vec<_> = items.iter().map(|item| { - if let Operand::Move(Place { - base: PlaceBase::Local(local), - projection: box [], - }) = item { - let local_use = &visitor.locals_use[*local]; - let opt_index_and_place = - Self::try_get_item_source(local_use, body); - // each local should be used twice: - // in assign and in aggregate statements - if local_use.use_count == 2 && opt_index_and_place.is_some() { - let (index, src_place) = opt_index_and_place.unwrap(); - return Some((local_use, index, src_place)); + if let Operand::Move(place) = item { + if let Some(local) = place.as_local() { + let local_use = &visitor.locals_use[local]; + let opt_index_and_place = + Self::try_get_item_source(local_use, body); + // each local should be used twice: + // in assign and in aggregate statements + if local_use.use_count == 2 && opt_index_and_place.is_some() { + let (index, src_place) = opt_index_and_place.unwrap(); + return Some((local_use, index, src_place)); + } } } None @@ -224,13 +219,15 @@ impl<'tcx> MirPass<'tcx> for RestoreSubsliceArrayMoveOut { let opt_size = opt_src_place.and_then(|src_place| { let src_ty = Place::ty_from(src_place.base, src_place.projection, body, tcx).ty; - if let ty::Array(_, ref size_o) = src_ty.sty { + if let ty::Array(_, ref size_o) = src_ty.kind { size_o.try_eval_usize(tcx, param_env) } else { None } }); - Self::check_and_patch(*candidate, &items, opt_size, &mut patch, dst_place); + let restore_subslice = RestoreSubsliceArrayMoveOut { tcx }; + restore_subslice + .check_and_patch(*candidate, &items, opt_size, &mut patch, dst_place); } } } @@ -239,15 +236,20 @@ impl<'tcx> MirPass<'tcx> for RestoreSubsliceArrayMoveOut { } } -impl RestoreSubsliceArrayMoveOut { +impl RestoreSubsliceArrayMoveOut<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>) -> Self { + RestoreSubsliceArrayMoveOut { tcx } + } + // Checks that source has size, all locals are inited from same source place and // indices is an integer interval. If all checks pass do the replacent. // items are Vec> - fn check_and_patch<'tcx>(candidate: Location, - items: &[Option<(&LocalUse, u32, PlaceRef<'_, 'tcx>)>], - opt_size: Option, - patch: &mut MirPatch<'tcx>, - dst_place: &Place<'tcx>) { + fn check_and_patch(&self, + candidate: Location, + items: &[Option<(&LocalUse, u32, PlaceRef<'_, 'tcx>)>], + opt_size: Option, + patch: &mut MirPatch<'tcx>, + dst_place: &Place<'tcx>) { let opt_src_place = items.first().and_then(|x| *x).map(|x| x.2); if opt_size.is_some() && items.iter().all( @@ -280,46 +282,40 @@ impl RestoreSubsliceArrayMoveOut { dst_place.clone(), Rvalue::Use(Operand::Move(Place { base: src_place.base.clone(), - projection: projection.into_boxed_slice(), + projection: self.tcx.intern_place_elems(&projection), })), ); } } - fn try_get_item_source<'a, 'tcx>(local_use: &LocalUse, - body: &'a Body<'tcx>) -> Option<(u32, PlaceRef<'a, 'tcx>)> { + fn try_get_item_source<'a>(local_use: &LocalUse, + body: &'a Body<'tcx>) -> Option<(u32, PlaceRef<'a, 'tcx>)> { if let Some(location) = local_use.first_use { let block = &body[location.block]; if block.statements.len() > location.statement_index { let statement = &block.statements[location.statement_index]; if let StatementKind::Assign( - box( - Place { - base: PlaceBase::Local(_), - projection: box [], - }, - Rvalue::Use(Operand::Move(Place { - base: _, - projection: box [.., ProjectionElem::ConstantIndex { - offset, min_length: _, from_end: false - }], - })), - ) + box(place, Rvalue::Use(Operand::Move(src_place))) ) = &statement.kind { - // FIXME remove once we can use slices patterns - if let StatementKind::Assign( - box( - _, - Rvalue::Use(Operand::Move(Place { + if let (Some(_), PlaceRef { + base: _, + projection: &[.., ProjectionElem::ConstantIndex { + offset, min_length: _, from_end: false + }], + }) = (place.as_local(), src_place.as_ref()) { + if let StatementKind::Assign( + box(_, Rvalue::Use(Operand::Move(place))) + ) = &statement.kind { + if let PlaceRef { base, - projection: box [proj_base @ .., _], - })), - ) - ) = &statement.kind { - return Some((*offset, PlaceRef { - base, - projection: proj_base, - })) + projection: &[ref proj_base @ .., _], + } = place.as_ref() { + return Some((offset, PlaceRef { + base, + projection: proj_base, + })) + } + } } } } diff --git a/src/librustc_mir/util/aggregate.rs b/src/librustc_mir/util/aggregate.rs index b3565d40b8..e6c3e4384d 100644 --- a/src/librustc_mir/util/aggregate.rs +++ b/src/librustc_mir/util/aggregate.rs @@ -1,7 +1,7 @@ use rustc::mir::*; -use rustc::ty::Ty; +use rustc::ty::{Ty, TyCtxt}; use rustc::ty::layout::VariantIdx; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use std::iter::TrustedLen; @@ -17,6 +17,7 @@ pub fn expand_aggregate<'tcx>( operands: impl Iterator, Ty<'tcx>)> + TrustedLen, kind: AggregateKind<'tcx>, source_info: SourceInfo, + tcx: TyCtxt<'tcx>, ) -> impl Iterator> + TrustedLen { let mut set_discriminant = None; let active_field_index = match kind { @@ -29,7 +30,7 @@ pub fn expand_aggregate<'tcx>( }, source_info, }); - lhs = lhs.downcast(adt_def, variant_index); + lhs = tcx.mk_place_downcast(lhs, adt_def, variant_index); } active_field_index } @@ -58,7 +59,7 @@ pub fn expand_aggregate<'tcx>( // FIXME(eddyb) `offset` should be u64. let offset = i as u32; assert_eq!(offset as usize, i); - lhs.clone().elem(ProjectionElem::ConstantIndex { + tcx.mk_place_elem(lhs.clone(), ProjectionElem::ConstantIndex { offset, // FIXME(eddyb) `min_length` doesn't appear to be used. min_length: offset + 1, @@ -66,7 +67,7 @@ pub fn expand_aggregate<'tcx>( }) } else { let field = Field::new(active_field_index.unwrap_or(i)); - lhs.clone().field(field, ty) + tcx.mk_place_field(lhs.clone(), field, ty) }; Statement { source_info, diff --git a/src/librustc_mir/util/alignment.rs b/src/librustc_mir/util/alignment.rs index a75c1af04f..f949fcf074 100644 --- a/src/librustc_mir/util/alignment.rs +++ b/src/librustc_mir/util/alignment.rs @@ -38,8 +38,8 @@ fn is_within_packed<'tcx, L>(tcx: TyCtxt<'tcx>, local_decls: &L, place: &Place<' where L: HasLocalDecls<'tcx>, { - let mut cursor = &*place.projection; - while let [proj_base @ .., elem] = cursor { + let mut cursor = place.projection.as_ref(); + while let &[ref proj_base @ .., elem] = cursor { cursor = proj_base; match elem { @@ -47,7 +47,7 @@ where ProjectionElem::Deref => break, ProjectionElem::Field(..) => { let ty = Place::ty_from(&place.base, proj_base, local_decls, tcx).ty; - match ty.sty { + match ty.kind { ty::Adt(def, _) if def.repr.packed() => { return true } diff --git a/src/librustc_mir/util/borrowck_errors.rs b/src/librustc_mir/util/borrowck_errors.rs index cf9ef55c17..96ba829358 100644 --- a/src/librustc_mir/util/borrowck_errors.rs +++ b/src/librustc_mir/util/borrowck_errors.rs @@ -324,7 +324,7 @@ impl<'cx, 'tcx> crate::borrow_check::MirBorrowckCtxt<'cx, 'tcx> { ty: Ty<'_>, is_index: Option, ) -> DiagnosticBuilder<'cx> { - let type_name = match (&ty.sty, is_index) { + let type_name = match (&ty.kind, is_index) { (&ty::Array(_, _), Some(true)) | (&ty::Array(_, _), None) => "array", (&ty::Slice(_), _) => "slice", _ => span_bug!(move_from_span, "this path should not cause illegal move"), diff --git a/src/librustc_mir/util/def_use.rs b/src/librustc_mir/util/def_use.rs index 59821440c6..725ec84ca6 100644 --- a/src/librustc_mir/util/def_use.rs +++ b/src/librustc_mir/util/def_use.rs @@ -1,8 +1,9 @@ //! Def-use analysis. -use rustc::mir::{Local, Location, Body}; +use rustc::mir::{Body, Local, Location, PlaceElem}; use rustc::mir::visit::{PlaceContext, MutVisitor, Visitor}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc::ty::TyCtxt; +use rustc_index::vec::IndexVec; use std::mem; pub struct DefUseAnalysis { @@ -47,23 +48,26 @@ impl DefUseAnalysis { &self.info[local] } - fn mutate_defs_and_uses(&self, local: Local, body: &mut Body<'_>, mut callback: F) - where F: for<'a> FnMut(&'a mut Local, - PlaceContext, - Location) { + fn mutate_defs_and_uses( + &self, + local: Local, + body: &mut Body<'tcx>, + new_local: Local, + tcx: TyCtxt<'tcx>, + ) { for place_use in &self.info[local].defs_and_uses { - MutateUseVisitor::new(local, - &mut callback, - body).visit_location(body, place_use.location) + MutateUseVisitor::new(local, new_local, body, tcx) + .visit_location(body, place_use.location) } } // FIXME(pcwalton): this should update the def-use chains. pub fn replace_all_defs_and_uses_with(&self, local: Local, - body: &mut Body<'_>, - new_local: Local) { - self.mutate_defs_and_uses(local, body, |local, _, _| *local = new_local) + body: &mut Body<'tcx>, + new_local: Local, + tcx: TyCtxt<'tcx>) { + self.mutate_defs_and_uses(local, body, new_local, tcx) } } @@ -117,30 +121,46 @@ impl Info { } } -struct MutateUseVisitor { +struct MutateUseVisitor<'tcx> { query: Local, - callback: F, + new_local: Local, + tcx: TyCtxt<'tcx>, } -impl MutateUseVisitor { - fn new(query: Local, callback: F, _: &Body<'_>) - -> MutateUseVisitor - where F: for<'a> FnMut(&'a mut Local, PlaceContext, Location) { - MutateUseVisitor { - query, - callback, - } +impl MutateUseVisitor<'tcx> { + fn new( + query: Local, + new_local: Local, + _: &Body<'tcx>, + tcx: TyCtxt<'tcx>, + ) -> MutateUseVisitor<'tcx> { + MutateUseVisitor { query, new_local, tcx } } } -impl MutVisitor<'_> for MutateUseVisitor - where F: for<'a> FnMut(&'a mut Local, PlaceContext, Location) { +impl MutVisitor<'tcx> for MutateUseVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn visit_local(&mut self, local: &mut Local, - context: PlaceContext, - location: Location) { + _context: PlaceContext, + _location: Location) { if *local == self.query { - (self.callback)(local, context, location) + *local = self.new_local; + } + } + + fn process_projection_elem( + &mut self, + elem: &PlaceElem<'tcx>, + ) -> Option> { + match elem { + PlaceElem::Index(local) if *local == self.query => { + Some(PlaceElem::Index(self.new_local)) + } + _ => None, } } } diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index 52ad97bbde..a1846a1fb5 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -7,7 +7,7 @@ use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::layout::VariantIdx; use rustc::ty::subst::SubstsRef; use rustc::ty::util::IntTypeExt; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use crate::util::patch::MirPatch; use std::convert::TryInto; @@ -200,13 +200,14 @@ where variant.fields.iter().enumerate().map(|(i, f)| { let field = Field::new(i); let subpath = self.elaborator.field_subpath(variant_path, field); + let tcx = self.tcx(); assert_eq!(self.elaborator.param_env().reveal, Reveal::All); - let field_ty = self.tcx().normalize_erasing_regions( + let field_ty = tcx.normalize_erasing_regions( self.elaborator.param_env(), - f.ty(self.tcx(), substs), + f.ty(tcx, substs), ); - (base_place.clone().field(field, field_ty), subpath) + (tcx.mk_place_field(base_place.clone(), field, field_ty), subpath) }).collect() } @@ -323,7 +324,7 @@ where debug!("open_drop_for_tuple({:?}, {:?})", self, tys); let fields = tys.iter().enumerate().map(|(i, &ty)| { - (self.place.clone().field(Field::new(i), ty), + (self.tcx().mk_place_field(self.place.clone(), Field::new(i), ty), self.elaborator.field_subpath(self.path, Field::new(i))) }).collect(); @@ -334,7 +335,7 @@ where fn open_drop_for_box(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock { debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs); - let interior = self.place.clone().deref(); + let interior = self.tcx().mk_place_deref(self.place.clone()); let interior_path = self.elaborator.deref_subpath(self.path); let succ = self.succ; // FIXME(#43234) @@ -406,14 +407,19 @@ where }; let mut have_otherwise = false; + let tcx = self.tcx(); - for (variant_index, discr) in adt.discriminants(self.tcx()) { + for (variant_index, discr) in adt.discriminants(tcx) { let subpath = self.elaborator.downcast_subpath( self.path, variant_index); if let Some(variant_path) = subpath { - let base_place = self.place.clone().elem( - ProjectionElem::Downcast(Some(adt.variants[variant_index].ident.name), - variant_index)); + let base_place = tcx.mk_place_elem( + self.place.clone(), + ProjectionElem::Downcast( + Some(adt.variants[variant_index].ident.name), + variant_index, + ), + ); let fields = self.move_paths_for_fields( &base_place, variant_path, @@ -586,7 +592,7 @@ where BorrowKind::Mut { allow_two_phase_borrow: false }, Place { base: PlaceBase::Local(cur), - projection: Box::new([ProjectionElem::Deref]), + projection: tcx.intern_place_elems(&vec![ProjectionElem::Deref]), } ), Rvalue::BinaryOp(BinOp::Offset, move_(&Place::from(cur)), one)) @@ -594,7 +600,7 @@ where (Rvalue::Ref( tcx.lifetimes.re_erased, BorrowKind::Mut { allow_two_phase_borrow: false }, - self.place.clone().index(cur)), + tcx.mk_place_index(self.place.clone(), cur)), Rvalue::BinaryOp(BinOp::Add, move_(&Place::from(cur)), one)) }; @@ -627,7 +633,7 @@ where let loop_block = self.elaborator.patch().new_block(loop_block); self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop { - location: ptr.clone().deref(), + location: tcx.mk_place_deref(ptr.clone()), target: loop_block, unwind: unwind.into_option() }); @@ -644,18 +650,27 @@ where // ptr_based_loop // } + let tcx = self.tcx(); + if let Some(size) = opt_size { let size: u32 = size.try_into().unwrap_or_else(|_| { bug!("move out check isn't implemented for array sizes bigger than u32::MAX"); }); - let fields: Vec<(Place<'tcx>, Option)> = (0..size).map(|i| { - (self.place.clone().elem(ProjectionElem::ConstantIndex{ - offset: i, - min_length: size, - from_end: false - }), - self.elaborator.array_subpath(self.path, i, size)) - }).collect(); + let fields: Vec<(Place<'tcx>, Option)> = (0..size) + .map(|i| { + ( + tcx.mk_place_elem( + self.place.clone(), + ProjectionElem::ConstantIndex { + offset: i, + min_length: size, + from_end: false, + }, + ), + self.elaborator.array_subpath(self.path, i, size), + ) + }) + .collect(); if fields.iter().any(|(_,path)| path.is_some()) { let (succ, unwind) = self.drop_ladder_bottom(); @@ -664,7 +679,6 @@ where } let move_ = |place: &Place<'tcx>| Operand::Move(place.clone()); - let tcx = self.tcx(); let elem_size = &Place::from(self.new_temp(tcx.types.usize)); let len = &Place::from(self.new_temp(tcx.types.usize)); @@ -786,9 +800,9 @@ where /// ADT, both in the success case or if one of the destructors fail. fn open_drop(&mut self) -> BasicBlock { let ty = self.place_ty(self.place); - match ty.sty { + match ty.kind { ty::Closure(def_id, substs) => { - let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect(); + let tys : Vec<_> = substs.as_closure().upvar_tys(def_id, self.tcx()).collect(); self.open_drop_for_tuple(&tys) } // Note that `elaborate_drops` only drops the upvars of a generator, @@ -798,7 +812,7 @@ where // It effetively only contains upvars until the generator transformation runs. // See librustc_body/transform/generator.rs for more details. ty::Generator(def_id, substs, _) => { - let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect(); + let tys : Vec<_> = substs.as_generator().upvar_tys(def_id, self.tcx()).collect(); self.open_drop_for_tuple(&tys) } ty::Tuple(..) => { @@ -900,8 +914,8 @@ where ); let args = adt.variants[VariantIdx::new(0)].fields.iter().enumerate().map(|(i, f)| { let field = Field::new(i); - let field_ty = f.ty(self.tcx(), substs); - Operand::Move(self.place.clone().field(field, field_ty)) + let field_ty = f.ty(tcx, substs); + Operand::Move(tcx.mk_place_field(self.place.clone(), field, field_ty)) }).collect(); let call = TerminatorKind::Call { diff --git a/src/librustc_mir/util/graphviz.rs b/src/librustc_mir/util/graphviz.rs index 9d142d9b70..ff2946d3a6 100644 --- a/src/librustc_mir/util/graphviz.rs +++ b/src/librustc_mir/util/graphviz.rs @@ -1,7 +1,7 @@ use rustc::hir::def_id::DefId; use rustc::mir::*; use rustc::ty::TyCtxt; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use std::fmt::Debug; use std::io::{self, Write}; @@ -16,10 +16,22 @@ pub fn write_mir_graphviz( where W: Write, { - for def_id in dump_mir_def_ids(tcx, single) { - let body = &tcx.optimized_mir(def_id); - write_mir_fn_graphviz(tcx, def_id, body, w)?; + let def_ids = dump_mir_def_ids(tcx, single); + + let use_subgraphs = def_ids.len() > 1; + if use_subgraphs { + writeln!(w, "digraph __crate__ {{")?; } + + for def_id in def_ids { + let body = &tcx.optimized_mir(def_id); + write_mir_fn_graphviz(tcx, def_id, body, use_subgraphs, w)?; + } + + if use_subgraphs { + writeln!(w, "}}")?; + } + Ok(()) } @@ -38,12 +50,16 @@ pub fn write_mir_fn_graphviz<'tcx, W>( tcx: TyCtxt<'tcx>, def_id: DefId, body: &Body<'_>, + subgraph: bool, w: &mut W, ) -> io::Result<()> where W: Write, { - writeln!(w, "digraph Mir_{} {{", graphviz_safe_def_name(def_id))?; + let kind = if subgraph { "subgraph" } else { "digraph" }; + let cluster = if subgraph { "cluster_" } else { "" }; // Prints a border around MIR + let def_name = graphviz_safe_def_name(def_id); + writeln!(w, "{} {}Mir_{} {{", kind, cluster, def_name)?; // Global graph properties writeln!(w, r#" graph [fontname="monospace"];"#)?; @@ -55,12 +71,12 @@ where // Nodes for (block, _) in body.basic_blocks().iter_enumerated() { - write_node(block, body, w)?; + write_node(def_id, block, body, w)?; } // Edges for (source, _) in body.basic_blocks().iter_enumerated() { - write_edges(source, body, w)?; + write_edges(def_id, source, body, w)?; } writeln!(w, "}}") } @@ -111,25 +127,37 @@ pub fn write_node_label(block: BasicBlock, fini(w)?; // Close the table - writeln!(w, "") + write!(w, "") } /// Write a graphviz DOT node for the given basic block. -fn write_node(block: BasicBlock, body: &Body<'_>, w: &mut W) -> io::Result<()> { +fn write_node( + def_id: DefId, + block: BasicBlock, + body: &Body<'_>, + w: &mut W, +) -> io::Result<()> { // Start a new node with the label to follow, in one of DOT's pseudo-HTML tables. - write!(w, r#" {} [shape="none", label=<"#, node(block))?; + write!(w, r#" {} [shape="none", label=<"#, node(def_id, block))?; write_node_label(block, body, w, 1, |_| Ok(()), |_| Ok(()))?; // Close the node label and the node itself. writeln!(w, ">];") } /// Write graphviz DOT edges with labels between the given basic block and all of its successors. -fn write_edges(source: BasicBlock, body: &Body<'_>, w: &mut W) -> io::Result<()> { +fn write_edges( + def_id: DefId, + source: BasicBlock, + body: &Body<'_>, + w: &mut W, +) -> io::Result<()> { let terminator = body[source].terminator(); let labels = terminator.kind.fmt_successor_labels(); for (&target, label) in terminator.successors().zip(labels) { - writeln!(w, r#" {} -> {} [label="{}"];"#, node(source), node(target), label)?; + let src = node(def_id, source); + let trg = node(def_id, target); + writeln!(w, r#" {} -> {} [label="{}"];"#, src, trg, label)?; } Ok(()) @@ -181,8 +209,8 @@ fn write_graph_label<'tcx, W: Write>( writeln!(w, ">;") } -fn node(block: BasicBlock) -> String { - format!("bb{}", block.index()) +fn node(def_id: DefId, block: BasicBlock) -> String { + format!("bb{}__{}", block.index(), graphviz_safe_def_name(def_id)) } fn escape(t: &T) -> String { diff --git a/src/librustc_mir/util/liveness.rs b/src/librustc_mir/util/liveness.rs index b42eebc7ee..9757f4ac39 100644 --- a/src/librustc_mir/util/liveness.rs +++ b/src/librustc_mir/util/liveness.rs @@ -30,8 +30,8 @@ use rustc::mir::visit::{ use rustc::mir::Local; use rustc::mir::*; use rustc::ty::{self, TyCtxt}; -use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::{Idx, IndexVec}; use rustc_data_structures::work_queue::WorkQueue; use std::fs; use std::io::{self, Write}; diff --git a/src/librustc_mir/util/patch.rs b/src/librustc_mir/util/patch.rs index 2ea9924af7..a5f7e54015 100644 --- a/src/librustc_mir/util/patch.rs +++ b/src/librustc_mir/util/patch.rs @@ -1,6 +1,6 @@ use rustc::ty::Ty; use rustc::mir::*; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use syntax_pos::Span; /// This struct represents a patch to MIR, which can add diff --git a/src/librustc_mir/util/pretty.rs b/src/librustc_mir/util/pretty.rs index c35c9e4da9..89f298846d 100644 --- a/src/librustc_mir/util/pretty.rs +++ b/src/librustc_mir/util/pretty.rs @@ -3,7 +3,7 @@ use rustc::mir::*; use rustc::mir::visit::Visitor; use rustc::ty::{self, TyCtxt}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use std::fmt::Display; use std::fmt::Write as _; use std::fs; @@ -145,7 +145,7 @@ fn dump_matched_mir_node<'tcx, F>( let _: io::Result<()> = try { let mut file = create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, source)?; - write_mir_fn_graphviz(tcx, source.def_id(), body, &mut file)?; + write_mir_fn_graphviz(tcx, source.def_id(), body, false, &mut file)?; }; } } diff --git a/src/librustc_passes/Cargo.toml b/src/librustc_passes/Cargo.toml index 596ec6c19b..9d29a23031 100644 --- a/src/librustc_passes/Cargo.toml +++ b/src/librustc_passes/Cargo.toml @@ -15,3 +15,5 @@ rustc_data_structures = { path = "../librustc_data_structures" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } errors = { path = "../librustc_errors", package = "rustc_errors" } +rustc_target = { path = "../librustc_target" } +rustc_index = { path = "../librustc_index" } diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 5b78727fdd..e625334040 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -14,7 +14,7 @@ use rustc::session::Session; use rustc_data_structures::fx::FxHashMap; use syntax::ast::*; use syntax::attr; -use syntax::ext::proc_macro::is_proc_macro_attr; +use syntax::expand::is_proc_macro_attr; use syntax::feature_gate::is_builtin_attr; use syntax::source_map::Spanned; use syntax::symbol::{kw, sym}; @@ -73,6 +73,8 @@ struct AstValidator<'a> { /// these booleans. warning_period_57979_didnt_record_next_impl_trait: bool, warning_period_57979_impl_trait_in_proj: bool, + + lint_buffer: &'a mut lint::LintBuffer, } impl<'a> AstValidator<'a> { @@ -107,7 +109,7 @@ impl<'a> AstValidator<'a> { // rust-lang/rust#57979: bug in old `visit_generic_args` called // `walk_ty` rather than `visit_ty`, skipping outer `impl Trait` // if it happened to occur at `ty`. - if let TyKind::ImplTrait(..) = ty.node { + if let TyKind::ImplTrait(..) = ty.kind { self.warning_period_57979_didnt_record_next_impl_trait = true; } } @@ -126,7 +128,7 @@ impl<'a> AstValidator<'a> { // rust-lang/rust#57979: bug in old `visit_generic_args` called // `walk_ty` rather than `visit_ty`, skippping outer `impl Trait` // if it happened to occur at `ty`. - if let TyKind::ImplTrait(..) = ty.node { + if let TyKind::ImplTrait(..) = ty.kind { self.warning_period_57979_didnt_record_next_impl_trait = true; } self.visit_ty(ty); @@ -149,7 +151,7 @@ impl<'a> AstValidator<'a> { // Mirrors `visit::walk_ty`, but tracks relevant state. fn walk_ty(&mut self, t: &'a Ty) { - match t.node { + match t.kind { TyKind::ImplTrait(..) => { let outer_impl_trait = self.outer_impl_trait(t.span); self.with_impl_trait(Some(outer_impl_trait), |this| visit::walk_ty(this, t)) @@ -229,9 +231,9 @@ impl<'a> AstValidator<'a> { err.emit(); } - fn check_decl_no_pat(&self, decl: &FnDecl, report_err: ReportFn) { + fn check_decl_no_pat(decl: &FnDecl, mut report_err: F) { for arg in &decl.inputs { - match arg.pat.node { + match arg.pat.kind { PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), _, None) | PatKind::Wild => {} PatKind::Ident(BindingMode::ByValue(Mutability::Mutable), _, None) => @@ -263,7 +265,8 @@ impl<'a> AstValidator<'a> { let mut err = self.err_handler().struct_span_err(poly.span, &format!("`?Trait` is not permitted in {}", where_)); if is_trait { - err.note(&format!("traits are `?{}` by default", poly.trait_ref.path)); + let path_str = pprust::path_to_string(&poly.trait_ref.path); + err.note(&format!("traits are `?{}` by default", path_str)); } err.emit(); } @@ -286,11 +289,11 @@ impl<'a> AstValidator<'a> { // m!(S); // ``` fn check_expr_within_pat(&self, expr: &Expr, allow_paths: bool) { - match expr.node { + match expr.kind { ExprKind::Lit(..) | ExprKind::Err => {} ExprKind::Path(..) if allow_paths => {} ExprKind::Unary(UnOp::Neg, ref inner) - if match inner.node { ExprKind::Lit(_) => true, _ => false } => {} + if match inner.kind { ExprKind::Lit(_) => true, _ => false } => {} _ => self.err_handler().span_err(expr.span, "arbitrary expressions aren't allowed \ in patterns") } @@ -442,7 +445,7 @@ fn validate_generics_order<'a>( impl<'a> Visitor<'a> for AstValidator<'a> { fn visit_expr(&mut self, expr: &'a Expr) { - match &expr.node { + match &expr.kind { ExprKind::Closure(_, _, _, fn_decl, _, _) => { self.check_fn_decl(fn_decl); } @@ -456,10 +459,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } fn visit_ty(&mut self, ty: &'a Ty) { - match ty.node { + match ty.kind { TyKind::BareFn(ref bfty) => { self.check_fn_decl(&bfty.decl); - self.check_decl_no_pat(&bfty.decl, |span, _| { + Self::check_decl_no_pat(&bfty.decl, |span, _| { struct_span_err!(self.session, span, E0561, "patterns aren't allowed in function pointer types").emit(); }); @@ -482,7 +485,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { TyKind::ImplTrait(_, ref bounds) => { if self.is_impl_trait_banned { if self.warning_period_57979_impl_trait_in_proj { - self.session.buffer_lint( + self.lint_buffer.buffer_lint( NESTED_IMPL_TRAIT, ty.id, ty.span, "`impl Trait` is not allowed in path parameters"); } else { @@ -493,7 +496,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if let Some(outer_impl_trait) = self.outer_impl_trait { if outer_impl_trait.should_warn_instead_of_error() { - self.session.buffer_lint_with_diagnostic( + self.lint_buffer.buffer_lint_with_diagnostic( NESTED_IMPL_TRAIT, ty.id, ty.span, "nested `impl Trait` is not allowed", BuiltinLintDiagnostics::NestedImplTrait { @@ -538,10 +541,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.has_proc_macro_decls = true; } - match item.node { + match item.kind { ItemKind::Impl(unsafety, polarity, _, _, Some(..), ref ty, ref impl_items) => { self.invalid_visibility(&item.vis, None); - if let TyKind::Err = ty.node { + if let TyKind::Err = ty.kind { self.err_handler() .struct_span_err(item.span, "`impl Trait for .. {}` is an obsolete syntax") .help("use `auto trait Trait {}` instead").emit(); @@ -551,7 +554,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } for impl_item in impl_items { self.invalid_visibility(&impl_item.vis, None); - if let ImplItemKind::Method(ref sig, _) = impl_item.node { + if let ImplItemKind::Method(ref sig, _) = impl_item.kind { self.check_trait_fn_not_const(sig.header.constness); self.check_trait_fn_not_async(impl_item.span, sig.header.asyncness.node); } @@ -628,14 +631,14 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } self.no_questions_in_bounds(bounds, "supertraits", true); for trait_item in trait_items { - if let TraitItemKind::Method(ref sig, ref block) = trait_item.node { + if let TraitItemKind::Method(ref sig, ref block) = trait_item.kind { self.check_fn_decl(&sig.decl); self.check_trait_fn_not_async(trait_item.span, sig.header.asyncness.node); self.check_trait_fn_not_const(sig.header.constness); if block.is_none() { - self.check_decl_no_pat(&sig.decl, |span, mut_ident| { + Self::check_decl_no_pat(&sig.decl, |span, mut_ident| { if mut_ident { - self.session.buffer_lint( + self.lint_buffer.buffer_lint( lint::builtin::PATTERNS_IN_FNS_WITHOUT_BODY, trait_item.id, span, "patterns aren't allowed in methods without bodies"); @@ -654,7 +657,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if attr::contains_name(&item.attrs, sym::warn_directory_ownership) { let lint = lint::builtin::LEGACY_DIRECTORY_OWNERSHIP; let msg = "cannot declare a new module at this location"; - self.session.buffer_lint(lint, item.id, item.span, msg); + self.lint_buffer.buffer_lint(lint, item.id, item.span, msg); } } ItemKind::Union(ref vdata, _) => { @@ -682,10 +685,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } fn visit_foreign_item(&mut self, fi: &'a ForeignItem) { - match fi.node { + match fi.kind { ForeignItemKind::Fn(ref decl, _) => { self.check_fn_decl(decl); - self.check_decl_no_pat(decl, |span, _| { + Self::check_decl_no_pat(decl, |span, _| { struct_span_err!(self.session, span, E0130, "patterns aren't allowed in foreign function declarations") .span_label(span, "pattern not allowed in foreign function").emit(); @@ -786,7 +789,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } fn visit_pat(&mut self, pat: &'a Pat) { - match pat.node { + match pat.kind { PatKind::Lit(ref expr) => { self.check_expr_within_pat(expr, false); } @@ -832,17 +835,14 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } fn visit_impl_item(&mut self, ii: &'a ImplItem) { - match ii.node { - ImplItemKind::Method(ref sig, _) => { - self.check_fn_decl(&sig.decl); - } - _ => {} + if let ImplItemKind::Method(ref sig, _) = ii.kind { + self.check_fn_decl(&sig.decl); } visit::walk_impl_item(self, ii); } } -pub fn check_crate(session: &Session, krate: &Crate) -> bool { +pub fn check_crate(session: &Session, krate: &Crate, lints: &mut lint::LintBuffer) -> bool { let mut validator = AstValidator { session, has_proc_macro_decls: false, @@ -851,6 +851,7 @@ pub fn check_crate(session: &Session, krate: &Crate) -> bool { is_assoc_ty_bound_banned: false, warning_period_57979_didnt_record_next_impl_trait: false, warning_period_57979_impl_trait_in_proj: false, + lint_buffer: lints, }; visit::walk_crate(&mut validator, krate); diff --git a/src/librustc/middle/dead.rs b/src/librustc_passes/dead.rs similarity index 93% rename from src/librustc/middle/dead.rs rename to src/librustc_passes/dead.rs index d4805a7c78..f2aef2c12c 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc_passes/dead.rs @@ -2,18 +2,18 @@ // closely. The idea is that all reachable symbols are live, codes called // from live codes are live, and everything else is dead. -use crate::hir::Node; -use crate::hir::{self, PatKind, TyKind}; -use crate::hir::intravisit::{self, Visitor, NestedVisitorMap}; -use crate::hir::itemlikevisit::ItemLikeVisitor; +use rustc::hir::Node; +use rustc::hir::{self, PatKind, TyKind}; +use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; +use rustc::hir::itemlikevisit::ItemLikeVisitor; -use crate::hir::def::{CtorOf, Res, DefKind}; -use crate::hir::CodegenFnAttrFlags; -use crate::hir::def_id::{DefId, LOCAL_CRATE}; -use crate::lint; -use crate::middle::privacy; -use crate::ty::{self, DefIdTree, TyCtxt}; -use crate::util::nodemap::FxHashSet; +use rustc::hir::def::{CtorOf, Res, DefKind}; +use rustc::hir::CodegenFnAttrFlags; +use rustc::hir::def_id::{DefId, LOCAL_CRATE}; +use rustc::lint; +use rustc::middle::privacy; +use rustc::ty::{self, DefIdTree, TyCtxt}; +use rustc::util::nodemap::FxHashSet; use rustc_data_structures::fx::FxHashMap; @@ -117,7 +117,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } fn handle_field_access(&mut self, lhs: &hir::Expr, hir_id: hir::HirId) { - match self.tables.expr_ty_adjusted(lhs).sty { + match self.tables.expr_ty_adjusted(lhs).kind { ty::Adt(def, _) => { let index = self.tcx.field_index(hir_id, self.tables); self.insert_def_id(def.non_enum_variant().fields[index].did); @@ -128,12 +128,12 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, res: Res, pats: &[hir::FieldPat]) { - let variant = match self.tables.node_type(lhs.hir_id).sty { + let variant = match self.tables.node_type(lhs.hir_id).kind { ty::Adt(adt, _) => adt.variant_of_res(res), _ => span_bug!(lhs.span, "non-ADT in struct pattern") }; for pat in pats { - if let PatKind::Wild = pat.pat.node { + if let PatKind::Wild = pat.pat.kind { continue; } let index = self.tcx.field_index(pat.hir_id, self.tables); @@ -166,7 +166,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { self.inherited_pub_visibility = false; match node { Node::Item(item) => { - match item.node { + match item.kind { hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => { let def_id = self.tcx.hir().local_def_id(item.hir_id); let def = self.tcx.adt_def(def_id); @@ -236,7 +236,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { - match expr.node { + match expr.kind { hir::ExprKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => { let res = self.tables.qpath_res(qpath, expr.hir_id); self.handle_res(res); @@ -248,7 +248,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { self.handle_field_access(&lhs, expr.hir_id); } hir::ExprKind::Struct(_, ref fields, _) => { - if let ty::Adt(ref adt, _) = self.tables.expr_ty(expr).sty { + if let ty::Adt(ref adt, _) = self.tables.expr_ty(expr).kind { self.mark_as_used_if_union(adt, fields); } } @@ -259,23 +259,17 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { } fn visit_arm(&mut self, arm: &'tcx hir::Arm) { - if arm.pats.len() == 1 { - let variants = arm.pats[0].necessary_variants(); - - // Inside the body, ignore constructions of variants - // necessary for the pattern to match. Those construction sites - // can't be reached unless the variant is constructed elsewhere. - let len = self.ignore_variant_stack.len(); - self.ignore_variant_stack.extend_from_slice(&variants); - intravisit::walk_arm(self, arm); - self.ignore_variant_stack.truncate(len); - } else { - intravisit::walk_arm(self, arm); - } + // Inside the body, ignore constructions of variants + // necessary for the pattern to match. Those construction sites + // can't be reached unless the variant is constructed elsewhere. + let len = self.ignore_variant_stack.len(); + self.ignore_variant_stack.extend(arm.pat.necessary_variants()); + intravisit::walk_arm(self, arm); + self.ignore_variant_stack.truncate(len); } fn visit_pat(&mut self, pat: &'tcx hir::Pat) { - match pat.node { + match pat.kind { PatKind::Struct(ref path, ref fields, _) => { let res = self.tables.qpath_res(path, pat.hir_id); self.handle_field_pattern_match(pat, res, fields); @@ -298,7 +292,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { } fn visit_ty(&mut self, ty: &'tcx hir::Ty) { - match ty.node { + match ty.kind { TyKind::Def(item_id, _) => { let item = self.tcx.hir().expect_item(item_id.id); intravisit::walk_item(self, item); @@ -375,7 +369,7 @@ impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { if allow_dead_code { self.worklist.push(item.hir_id); } - match item.node { + match item.kind { hir::ItemKind::Enum(ref enum_def, _) => { if allow_dead_code { self.worklist.extend(enum_def.variants.iter().map(|variant| variant.id)); @@ -390,7 +384,7 @@ impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { hir::ItemKind::Trait(.., ref trait_item_refs) => { for trait_item_ref in trait_item_refs { let trait_item = self.krate.trait_item(trait_item_ref.id); - match trait_item.node { + match trait_item.kind { hir::TraitItemKind::Const(_, Some(_)) | hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => { if has_allow_dead_code_or_lang_attr(self.tcx, @@ -488,7 +482,7 @@ struct DeadVisitor<'tcx> { impl DeadVisitor<'tcx> { fn should_warn_about_item(&mut self, item: &hir::Item) -> bool { - let should_warn = match item.node { + let should_warn = match item.kind { hir::ItemKind::Static(..) | hir::ItemKind::Const(..) | hir::ItemKind::Fn(..) @@ -577,7 +571,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { if self.should_warn_about_item(item) { // For items that have a definition with a signature followed by a // block, point only at the signature. - let span = match item.node { + let span = match item.kind { hir::ItemKind::Fn(..) | hir::ItemKind::Mod(..) | hir::ItemKind::Enum(..) | @@ -587,7 +581,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span), _ => item.span, }; - let participle = match item.node { + let participle = match item.kind { hir::ItemKind::Struct(..) => "constructed", // Issue #52325 _ => "used" }; @@ -595,7 +589,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { item.hir_id, span, item.ident.name, - item.node.descriptive_variant(), + item.kind.descriptive_variant(), participle, ); } else { @@ -619,7 +613,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { fn visit_foreign_item(&mut self, fi: &'tcx hir::ForeignItem) { if self.should_warn_about_foreign_item(fi) { self.warn_dead_code(fi.hir_id, fi.span, fi.ident.name, - fi.node.descriptive_variant(), "used"); + fi.kind.descriptive_variant(), "used"); } intravisit::walk_foreign_item(self, fi); } @@ -632,7 +626,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { } fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) { - match impl_item.node { + match impl_item.kind { hir::ImplItemKind::Const(_, body_id) => { if !self.symbol_is_live(impl_item.hir_id) { self.warn_dead_code(impl_item.hir_id, @@ -658,7 +652,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { // Overwrite so that we don't warn the trait item itself. fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) { - match trait_item.node { + match trait_item.kind { hir::TraitItemKind::Const(_, Some(body_id)) | hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => { self.visit_nested_body(body_id) diff --git a/src/librustc/middle/entry.rs b/src/librustc_passes/entry.rs similarity index 90% rename from src/librustc/middle/entry.rs rename to src/librustc_passes/entry.rs index ba27d332e4..6600471906 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc_passes/entry.rs @@ -1,15 +1,15 @@ -use crate::hir::map as hir_map; -use crate::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE}; -use crate::session::{config, Session}; -use crate::session::config::EntryFnType; +use rustc::hir::map as hir_map; +use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE}; +use rustc::session::{config, Session}; +use rustc::session::config::EntryFnType; use syntax::attr; use syntax::entry::EntryPointType; use syntax::symbol::sym; use syntax_pos::Span; -use crate::hir::{HirId, Item, ItemKind, ImplItem, TraitItem}; -use crate::hir::itemlikevisit::ItemLikeVisitor; -use crate::ty::TyCtxt; -use crate::ty::query::Providers; +use rustc::hir::{HirId, Item, ItemKind, ImplItem, TraitItem}; +use rustc::hir::itemlikevisit::ItemLikeVisitor; +use rustc::ty::TyCtxt; +use rustc::ty::query::Providers; struct EntryContext<'a, 'tcx> { session: &'a Session, @@ -80,7 +80,7 @@ fn entry_fn(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<(DefId, EntryFnType)> { // Beware, this is duplicated in `libsyntax/entry.rs`, so make sure to keep // them in sync. fn entry_point_type(item: &Item, at_root: bool) -> EntryPointType { - match item.node { + match item.kind { ItemKind::Fn(..) => { if attr::contains_name(&item.attrs, sym::start) { EntryPointType::Start @@ -154,6 +154,14 @@ fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) -> Option<(De } fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) { + let sp = tcx.hir().krate().span; + if *tcx.sess.parse_sess.reached_eof.borrow() { + // There's an unclosed brace that made the parser reach `Eof`, we shouldn't complain about + // the missing `fn main()` then as it might have been hidden inside an unclosed block. + tcx.sess.delay_span_bug(sp, "`main` not found, but expected unclosed brace error"); + return; + } + // There is no main function. let mut err = struct_err!(tcx.sess, E0601, "`main` function not found in crate `{}`", tcx.crate_name(LOCAL_CRATE)); @@ -173,7 +181,6 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) { } else { String::from("consider adding a `main` function at the crate level") }; - let sp = tcx.hir().krate().span; // The file may be empty, which leads to the diagnostic machinery not emitting this // note. This is a relatively simple way to detect that case and emit a span-less // note instead. diff --git a/src/librustc_passes/error_codes.rs b/src/librustc_passes/error_codes.rs index af07c790e2..e22e69a069 100644 --- a/src/librustc_passes/error_codes.rs +++ b/src/librustc_passes/error_codes.rs @@ -1,12 +1,15 @@ syntax::register_diagnostics! { -/* E0014: r##" +#### Note: this error code is no longer emitted by the compiler. + Constants can only be initialized by a constant value or, in a future version of Rust, a call to a const function. This error indicates the use of a path (like a::b, or x) denoting something other than one of these -allowed items. Erroneous code xample: +allowed items. -```compile_fail +Erroneous code example: + +``` const FOO: i32 = { let x = 0; x }; // 'x' isn't a constant nor a function! ``` @@ -18,10 +21,10 @@ const FOO: i32 = { const X : i32 = 0; X }; const FOO2: i32 = { 0 }; // but brackets are useless here ``` "##, -*/ E0130: r##" You declared a pattern as an argument in a foreign function declaration. + Erroneous code example: ```compile_fail @@ -53,6 +56,81 @@ extern { ``` "##, +// This shouldn't really ever trigger since the repeated value error comes first +E0136: r##" +A binary can only have one entry point, and by default that entry point is the +function `main()`. If there are multiple such functions, please rename one. + +Erroneous code example: + +```compile_fail,E0136 +fn main() { + // ... +} + +// ... + +fn main() { // error! + // ... +} +``` +"##, + +E0137: r##" +More than one function was declared with the `#[main]` attribute. + +Erroneous code example: + +```compile_fail,E0137 +#![feature(main)] + +#[main] +fn foo() {} + +#[main] +fn f() {} // error: multiple functions with a `#[main]` attribute +``` + +This error indicates that the compiler found multiple functions with the +`#[main]` attribute. This is an error because there must be a unique entry +point into a Rust program. Example: + +``` +#![feature(main)] + +#[main] +fn f() {} // ok! +``` +"##, + +E0138: r##" +More than one function was declared with the `#[start]` attribute. + +Erroneous code example: + +```compile_fail,E0138 +#![feature(start)] + +#[start] +fn foo(argc: isize, argv: *const *const u8) -> isize {} + +#[start] +fn f(argc: isize, argv: *const *const u8) -> isize {} +// error: multiple 'start' functions +``` + +This error indicates that the compiler found multiple functions with the +`#[start]` attribute. This is an error because there must be a unique entry +point into a Rust program. Example: + +``` +#![feature(start)] + +#[start] +fn foo(argc: isize, argv: *const *const u8) -> isize { 0 } // ok! +``` +"##, + E0197: r##" Inherent implementations (one that do not implement a trait but provide methods associated with a type) are always safe because they are not @@ -198,20 +276,115 @@ impl Foo for Bar { ``` "##, +E0512: r##" +Transmute with two differently sized types was attempted. Erroneous code +example: -E0590: r##" -`break` or `continue` must include a label when used in the condition of a -`while` loop. +```compile_fail,E0512 +fn takes_u8(_: u8) {} -Example of erroneous code: - -```compile_fail -while break {} +fn main() { + unsafe { takes_u8(::std::mem::transmute(0u16)); } + // error: cannot transmute between types of different sizes, + // or dependently-sized types +} ``` -To fix this, add a label specifying which loop is being broken out of: +Please use types with same size or use the expected type directly. Example: + ``` -'foo: while break 'foo {} +fn takes_u8(_: u8) {} + +fn main() { + unsafe { takes_u8(::std::mem::transmute(0i8)); } // ok! + // or: + unsafe { takes_u8(0u8); } // ok! +} +``` +"##, + +E0561: r##" +A non-ident or non-wildcard pattern has been used as a parameter of a function +pointer type. + +Erroneous code example: + +```compile_fail,E0561 +type A1 = fn(mut param: u8); // error! +type A2 = fn(¶m: u32); // error! +``` + +When using an alias over a function type, you cannot e.g. denote a parameter as +being mutable. + +To fix the issue, remove patterns (`_` is allowed though). Example: + +``` +type A1 = fn(param: u8); // ok! +type A2 = fn(_: u32); // ok! +``` + +You can also omit the parameter name: + +``` +type A3 = fn(i16); // ok! +``` +"##, + +E0567: r##" +Generics have been used on an auto trait. + +Erroneous code example: + +```compile_fail,E0567 +#![feature(optin_builtin_traits)] + +auto trait Generic {} // error! + +fn main() {} +``` + +Since an auto trait is implemented on all existing types, the +compiler would not be able to infer the types of the trait's generic +parameters. + +To fix this issue, just remove the generics: + +``` +#![feature(optin_builtin_traits)] + +auto trait Generic {} // ok! + +fn main() {} +``` +"##, + +E0568: r##" +A super trait has been added to an auto trait. + +Erroneous code example: + +```compile_fail,E0568 +#![feature(optin_builtin_traits)] + +auto trait Bound : Copy {} // error! + +fn main() {} +``` + +Since an auto trait is implemented on all existing types, adding a super trait +would filter out a lot of those types. In the current example, almost none of +all the existing types could implement `Bound` because very few of them have the +`Copy` trait. + +To fix this issue, just remove the super trait: + +``` +#![feature(optin_builtin_traits)] + +auto trait Bound {} // ok! + +fn main() {} ``` "##, @@ -249,6 +422,115 @@ let result = loop { // ok! ``` "##, +E0590: r##" +`break` or `continue` must include a label when used in the condition of a +`while` loop. + +Example of erroneous code: + +```compile_fail +while break {} +``` + +To fix this, add a label specifying which loop is being broken out of: +``` +'foo: while break 'foo {} +``` +"##, + +E0591: r##" +Per [RFC 401][rfc401], if you have a function declaration `foo`: + +``` +// For the purposes of this explanation, all of these +// different kinds of `fn` declarations are equivalent: +struct S; +fn foo(x: S) { /* ... */ } +# #[cfg(for_demonstration_only)] +extern "C" { fn foo(x: S); } +# #[cfg(for_demonstration_only)] +impl S { fn foo(self) { /* ... */ } } +``` + +the type of `foo` is **not** `fn(S)`, as one might expect. +Rather, it is a unique, zero-sized marker type written here as `typeof(foo)`. +However, `typeof(foo)` can be _coerced_ to a function pointer `fn(S)`, +so you rarely notice this: + +``` +# struct S; +# fn foo(_: S) {} +let x: fn(S) = foo; // OK, coerces +``` + +The reason that this matter is that the type `fn(S)` is not specific to +any particular function: it's a function _pointer_. So calling `x()` results +in a virtual call, whereas `foo()` is statically dispatched, because the type +of `foo` tells us precisely what function is being called. + +As noted above, coercions mean that most code doesn't have to be +concerned with this distinction. However, you can tell the difference +when using **transmute** to convert a fn item into a fn pointer. + +This is sometimes done as part of an FFI: + +```compile_fail,E0591 +extern "C" fn foo(userdata: Box) { + /* ... */ +} + +# fn callback(_: extern "C" fn(*mut i32)) {} +# use std::mem::transmute; +# unsafe { +let f: extern "C" fn(*mut i32) = transmute(foo); +callback(f); +# } +``` + +Here, transmute is being used to convert the types of the fn arguments. +This pattern is incorrect because, because the type of `foo` is a function +**item** (`typeof(foo)`), which is zero-sized, and the target type (`fn()`) +is a function pointer, which is not zero-sized. +This pattern should be rewritten. There are a few possible ways to do this: + +- change the original fn declaration to match the expected signature, + and do the cast in the fn body (the preferred option) +- cast the fn item fo a fn pointer before calling transmute, as shown here: + + ``` + # extern "C" fn foo(_: Box) {} + # use std::mem::transmute; + # unsafe { + let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_)); + let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too + # } + ``` + +The same applies to transmutes to `*mut fn()`, which were observed in practice. +Note though that use of this type is generally incorrect. +The intention is typically to describe a function pointer, but just `fn()` +alone suffices for that. `*mut fn()` is a pointer to a fn pointer. +(Since these values are typically just passed to C code, however, this rarely +makes a difference in practice.) + +[rfc401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md +"##, + +E0601: r##" +No `main` function was found in a binary crate. To fix this error, add a +`main` function. For example: + +``` +fn main() { + // Your program will start here. + println!("Hello world!"); +} +``` + +If you don't know the basics of Rust, you can go look to the Rust Book to get +started: https://doc.rust-lang.org/book/ +"##, + E0642: r##" Trait methods currently cannot take patterns as arguments. @@ -270,6 +552,30 @@ trait Foo { ``` "##, +E0666: r##" +`impl Trait` types cannot appear nested in the +generic arguments of other `impl Trait` types. + +Example of erroneous code: + +```compile_fail,E0666 +trait MyGenericTrait {} +trait MyInnerTrait {} + +fn foo(bar: impl MyGenericTrait) {} +``` + +Type parameters for `impl Trait` types must be +explicitly defined as named generic parameters: + +``` +trait MyGenericTrait {} +trait MyInnerTrait {} + +fn foo(bar: impl MyGenericTrait) {} +``` +"##, + E0695: r##" A `break` statement without a label appeared inside a labeled block. @@ -319,13 +625,10 @@ async fn foo() {} Switch to the Rust 2018 edition to use `async fn`. "##, + ; E0226, // only a single explicit lifetime bound is permitted E0472, // asm! is unsupported on this target - E0561, // patterns aren't allowed in function pointer types - E0567, // auto traits can not have generic parameters - E0568, // auto traits can not have super traits - E0666, // nested `impl Trait` is illegal E0667, // `impl Trait` in projections E0696, // `continue` pointing to a labeled block E0706, // `async fn` in trait diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc_passes/intrinsicck.rs similarity index 91% rename from src/librustc/middle/intrinsicck.rs rename to src/librustc_passes/intrinsicck.rs index 1cc96c549e..91a7e9f5d7 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc_passes/intrinsicck.rs @@ -1,14 +1,14 @@ -use crate::hir::def::{Res, DefKind}; -use crate::hir::def_id::DefId; -use crate::ty::{self, Ty, TyCtxt}; -use crate::ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; -use crate::ty::query::Providers; +use rustc::hir::def::{Res, DefKind}; +use rustc::hir::def_id::DefId; +use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; +use rustc::ty::query::Providers; use rustc_target::spec::abi::Abi::RustIntrinsic; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use syntax_pos::{Span, sym}; -use crate::hir::intravisit::{self, Visitor, NestedVisitorMap}; -use crate::hir; +use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; +use rustc::hir; fn check_mod_intrinsics(tcx: TyCtxt<'_>, module_def_id: DefId) { tcx.hir().visit_item_likes_in_module( @@ -37,7 +37,7 @@ struct ExprVisitor<'tcx> { /// If the type is `Option`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - let (def, substs) = match ty.sty { + let (def, substs) = match ty.kind { ty::Adt(def, substs) => (def, substs), _ => return ty }; @@ -82,8 +82,8 @@ impl ExprVisitor<'tcx> { // Special-case transmutting from `typeof(function)` and // `Option` to present a clearer error. - let from = unpack_option_like(self.tcx.global_tcx(), from); - if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) { + let from = unpack_option_like(self.tcx, from); + if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.kind, sk_to) { if size_to == Pointer.size(&self.tcx) { struct_span_err!(self.tcx.sess, span, E0591, "can't transmute zero-sized type") @@ -150,7 +150,7 @@ impl Visitor<'tcx> for ExprVisitor<'tcx> { } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { - let res = if let hir::ExprKind::Path(ref qpath) = expr.node { + let res = if let hir::ExprKind::Path(ref qpath) = expr.kind { self.tables.qpath_res(qpath, expr.hir_id) } else { Res::Err diff --git a/src/librustc_passes/layout_test.rs b/src/librustc_passes/layout_test.rs index 45a185dccf..06683c16e4 100644 --- a/src/librustc_passes/layout_test.rs +++ b/src/librustc_passes/layout_test.rs @@ -31,7 +31,7 @@ impl ItemLikeVisitor<'tcx> for VarianceTest<'tcx> { fn visit_item(&mut self, item: &'tcx hir::Item) { let item_def_id = self.tcx.hir().local_def_id(item.hir_id); - if let ItemKind::TyAlias(..) = item.node { + if let ItemKind::TyAlias(..) = item.kind { for attr in self.tcx.get_attrs(item_def_id).iter() { if attr.check_name(sym::rustc_layout) { self.dump_layout_of(item_def_id, item, attr); diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs index a5a8315a1e..db59d8e101 100644 --- a/src/librustc_passes/lib.rs +++ b/src/librustc_passes/lib.rs @@ -8,24 +8,32 @@ #![feature(in_band_lifetimes)] #![feature(nll)] -#![cfg_attr(bootstrap, feature(bind_by_move_pattern_guards))] #![recursion_limit="256"] #[macro_use] extern crate rustc; +#[macro_use] +extern crate log; +#[macro_use] +extern crate syntax; use rustc::ty::query::Providers; pub mod error_codes; pub mod ast_validation; -pub mod rvalue_promotion; pub mod hir_stats; pub mod layout_test; pub mod loops; +pub mod dead; +pub mod entry; +mod liveness; +mod intrinsicck; pub fn provide(providers: &mut Providers<'_>) { - rvalue_promotion::provide(providers); + entry::provide(providers); loops::provide(providers); + liveness::provide(providers); + intrinsicck::provide(providers); } diff --git a/src/librustc/middle/liveness.rs b/src/librustc_passes/liveness.rs similarity index 85% rename from src/librustc/middle/liveness.rs rename to src/librustc_passes/liveness.rs index 00013bfc57..fb06808619 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc_passes/liveness.rs @@ -96,29 +96,29 @@ use self::LiveNodeKind::*; use self::VarKind::*; -use crate::hir::def::*; -use crate::hir::Node; -use crate::hir::ptr::P; -use crate::ty::{self, TyCtxt}; -use crate::ty::query::Providers; -use crate::lint; -use crate::util::nodemap::{HirIdMap, HirIdSet}; +use rustc::hir; +use rustc::hir::{Expr, HirId}; +use rustc::hir::def::*; +use rustc::hir::def_id::DefId; +use rustc::hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap}; +use rustc::hir::Node; +use rustc::hir::ptr::P; +use rustc::ty::{self, TyCtxt}; +use rustc::ty::query::Providers; +use rustc::lint; +use rustc::util::nodemap::{HirIdMap, HirIdSet}; use errors::Applicability; -use std::collections::{BTreeMap, VecDeque}; +use rustc_data_structures::fx::FxIndexMap; +use std::collections::VecDeque; use std::{fmt, u32}; use std::io::prelude::*; use std::io; use std::rc::Rc; use syntax::ast; -use syntax::symbol::{kw, sym}; +use syntax::symbol::sym; use syntax_pos::Span; -use crate::hir; -use crate::hir::{Expr, HirId}; -use crate::hir::def_id::DefId; -use crate::hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap}; - #[derive(Copy, Clone, PartialEq)] struct Variable(u32); @@ -372,8 +372,8 @@ fn visit_fn<'tcx>( let body = ir.tcx.hir().body(body_id); for param in &body.params { - let is_shorthand = match param.pat.node { - crate::hir::PatKind::Struct(..) => true, + let is_shorthand = match param.pat.kind { + rustc::hir::PatKind::Struct(..) => true, _ => false, }; param.pat.each_binding(|_bm, hir_id, _x, ident| { @@ -404,34 +404,29 @@ fn visit_fn<'tcx>( lsets.warn_about_unused_args(body, entry_ln); } -fn add_from_pat<'tcx>(ir: &mut IrMaps<'tcx>, pat: &P) { +fn add_from_pat(ir: &mut IrMaps<'_>, pat: &P) { // For struct patterns, take note of which fields used shorthand // (`x` rather than `x: x`). let mut shorthand_field_ids = HirIdSet::default(); let mut pats = VecDeque::new(); pats.push_back(pat); while let Some(pat) = pats.pop_front() { - use crate::hir::PatKind::*; - match pat.node { - Binding(_, _, _, ref inner_pat) => { + use rustc::hir::PatKind::*; + match &pat.kind { + Binding(.., inner_pat) => { pats.extend(inner_pat.iter()); } - Struct(_, ref fields, _) => { - for field in fields { - if field.is_shorthand { - shorthand_field_ids.insert(field.pat.hir_id); - } - } + Struct(_, fields, _) => { + let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id); + shorthand_field_ids.extend(ids); } - Ref(ref inner_pat, _) | - Box(ref inner_pat) => { + Ref(inner_pat, _) | Box(inner_pat) => { pats.push_back(inner_pat); } - TupleStruct(_, ref inner_pats, _) | - Tuple(ref inner_pats, _) => { + TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => { pats.extend(inner_pats.iter()); } - Slice(ref pre_pats, ref inner_pat, ref post_pats) => { + Slice(pre_pats, inner_pat, post_pats) => { pats.extend(pre_pats.iter()); pats.extend(inner_pat.iter()); pats.extend(post_pats.iter()); @@ -440,7 +435,7 @@ fn add_from_pat<'tcx>(ir: &mut IrMaps<'tcx>, pat: &P) { } } - pat.each_binding(|_bm, hir_id, _sp, ident| { + pat.each_binding(|_, hir_id, _, ident| { ir.add_live_node_for_node(hir_id, VarDefNode(ident.span)); ir.add_variable(Local(LocalInfo { id: hir_id, @@ -456,14 +451,12 @@ fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local) { } fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm) { - for pat in &arm.pats { - add_from_pat(ir, pat); - } + add_from_pat(ir, &arm.pat); intravisit::walk_arm(ir, arm); } fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr) { - match expr.node { + match expr.kind { // live nodes required for uses or definitions of variables: hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => { debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res); @@ -734,35 +727,15 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.ir.variable(hir_id, span) } - fn pat_bindings(&mut self, pat: &hir::Pat, mut f: F) where - F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, HirId), - { - pat.each_binding(|_bm, hir_id, sp, n| { - let ln = self.live_node(hir_id, sp); - let var = self.variable(hir_id, n.span); - f(self, ln, var, n.span, hir_id); - }) - } - - fn arm_pats_bindings(&mut self, pat: Option<&hir::Pat>, f: F) where - F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, HirId), - { - if let Some(pat) = pat { - self.pat_bindings(pat, f); - } - } - - fn define_bindings_in_pat(&mut self, pat: &hir::Pat, succ: LiveNode) - -> LiveNode { - self.define_bindings_in_arm_pats(Some(pat), succ) - } - - fn define_bindings_in_arm_pats(&mut self, pat: Option<&hir::Pat>, succ: LiveNode) - -> LiveNode { - let mut succ = succ; - self.arm_pats_bindings(pat, |this, ln, var, _sp, _id| { - this.init_from_succ(ln, succ); - this.define(ln, var); + fn define_bindings_in_pat(&mut self, pat: &hir::Pat, mut succ: LiveNode) -> LiveNode { + // In an or-pattern, only consider the first pattern; any later patterns + // must have the same bindings, and we also consider the first pattern + // to be the "authoritative" set of ids. + pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| { + let ln = self.live_node(hir_id, pat_sp); + let var = self.variable(hir_id, ident.span); + self.init_from_succ(ln, succ); + self.define(ln, var); succ = ln; }); succ @@ -974,7 +947,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode) -> LiveNode { - match stmt.node { + match stmt.kind { hir::StmtKind::Local(ref local) => { // Note: we mark the variable as defined regardless of whether // there is an initializer. Initially I had thought to only mark @@ -1018,7 +991,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { -> LiveNode { debug!("propagate_through_expr: {}", self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id)); - match expr.node { + match expr.kind { // Interesting cases with control flow or which gen/kill hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => { self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE) @@ -1076,12 +1049,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { arm.guard.as_ref().map(|hir::Guard::If(e)| &**e), body_succ ); - // only consider the first pattern; any later patterns must have - // the same bindings, and we also consider the first pattern to be - // the "authoritative" set of ids - let arm_succ = - self.define_bindings_in_arm_pats(arm.pats.first().map(|p| &**p), - guard_succ); + let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ); self.merge_from_succ(ln, arm_succ, first_merge); first_merge = false; }; @@ -1291,7 +1259,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // these errors are detected in the later pass borrowck. We // just ignore such cases and treat them as reads. - match expr.node { + match expr.kind { hir::ExprKind::Path(_) => succ, hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ), _ => self.propagate_through_expr(expr, succ) @@ -1300,7 +1268,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // see comment on propagate_through_place() fn write_place(&mut self, expr: &Expr, succ: LiveNode, acc: u32) -> LiveNode { - match expr.node { + match expr.kind { hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => { self.access_path(expr.hir_id, path, succ, acc) } @@ -1388,74 +1356,36 @@ impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> { NestedVisitorMap::None } - fn visit_local(&mut self, l: &'tcx hir::Local) { - check_local(self, l); + fn visit_local(&mut self, local: &'tcx hir::Local) { + self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| { + if local.init.is_some() { + self.warn_about_dead_assign(spans, hir_id, ln, var); + } + }); + + intravisit::walk_local(self, local); } + fn visit_expr(&mut self, ex: &'tcx Expr) { check_expr(self, ex); } - fn visit_arm(&mut self, a: &'tcx hir::Arm) { - check_arm(self, a); + + fn visit_arm(&mut self, arm: &'tcx hir::Arm) { + self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {}); + intravisit::walk_arm(self, arm); } } -fn check_local<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, local: &'tcx hir::Local) { - match local.init { - Some(_) => { - this.warn_about_unused_or_dead_vars_in_pat(&local.pat); - }, - None => { - this.pat_bindings(&local.pat, |this, ln, var, sp, id| { - let span = local.pat.simple_ident().map_or(sp, |ident| ident.span); - this.warn_about_unused(vec![span], id, ln, var); - }) - } - } - - intravisit::walk_local(this, local); -} - -fn check_arm<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, arm: &'tcx hir::Arm) { - // Only consider the variable from the first pattern; any later patterns must have - // the same bindings, and we also consider the first pattern to be the "authoritative" set of - // ids. However, we should take the spans of variables with the same name from the later - // patterns so the suggestions to prefix with underscores will apply to those too. - let mut vars: BTreeMap)> = Default::default(); - - for pat in &arm.pats { - this.arm_pats_bindings(Some(&*pat), |this, ln, var, sp, id| { - let name = this.ir.variable_name(var); - vars.entry(name) - .and_modify(|(.., spans)| { - spans.push(sp); - }) - .or_insert_with(|| { - (ln, var, id, vec![sp]) - }); - }); - } - - for (_, (ln, var, id, spans)) in vars { - this.warn_about_unused(spans, id, ln, var); - } - - intravisit::walk_arm(this, arm); -} - -fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) { - match expr.node { +fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr) { + match expr.kind { hir::ExprKind::Assign(ref l, _) => { this.check_place(&l); - - intravisit::walk_expr(this, expr); } hir::ExprKind::AssignOp(_, ref l, _) => { if !this.tables.is_method_call(expr) { this.check_place(&l); } - - intravisit::walk_expr(this, expr); } hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => { @@ -1470,8 +1400,6 @@ fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) { } this.visit_expr(output); } - - intravisit::walk_expr(this, expr); } // no correctness conditions related to liveness @@ -1484,15 +1412,15 @@ fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) { hir::ExprKind::Lit(_) | hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) | hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) | hir::ExprKind::Closure(..) | hir::ExprKind::Path(_) | hir::ExprKind::Yield(..) | - hir::ExprKind::Box(..) | hir::ExprKind::Type(..) | hir::ExprKind::Err => { - intravisit::walk_expr(this, expr); - } + hir::ExprKind::Box(..) | hir::ExprKind::Type(..) | hir::ExprKind::Err => {} } + + intravisit::walk_expr(this, expr); } -impl<'a, 'tcx> Liveness<'a, 'tcx> { +impl<'tcx> Liveness<'_, 'tcx> { fn check_place(&mut self, expr: &'tcx Expr) { - match expr.node { + match expr.kind { hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => { if let Res::Local(var_hid) = path.res { let upvars = self.ir.tcx.upvars(self.ir.body_owner); @@ -1503,7 +1431,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // as being used. let ln = self.live_node(expr.hir_id, expr.span); let var = self.variable(var_hid, expr.span); - self.warn_about_dead_assign(expr.span, expr.hir_id, ln, var); + self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var); } } } @@ -1525,109 +1453,112 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) { - for param in &body.params { - param.pat.each_binding(|_bm, hir_id, _, ident| { - let sp = ident.span; - let var = self.variable(hir_id, sp); - // Ignore unused self. - if ident.name != kw::SelfLower { - if !self.warn_about_unused(vec![sp], hir_id, entry_ln, var) { - if self.live_on_entry(entry_ln, var).is_none() { - self.report_dead_assign(hir_id, sp, var, true); - } - } + for p in &body.params { + self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| { + if self.live_on_entry(ln, var).is_none() { + self.report_dead_assign(hir_id, spans, var, true); } - }) + }); } } - fn warn_about_unused_or_dead_vars_in_pat(&mut self, pat: &hir::Pat) { - self.pat_bindings(pat, |this, ln, var, sp, id| { - if !this.warn_about_unused(vec![sp], id, ln, var) { - this.warn_about_dead_assign(sp, id, ln, var); + fn check_unused_vars_in_pat( + &self, + pat: &hir::Pat, + entry_ln: Option, + on_used_on_entry: impl Fn(Vec, HirId, LiveNode, Variable), + ) { + // In an or-pattern, only consider the variable; any later patterns must have the same + // bindings, and we also consider the first pattern to be the "authoritative" set of ids. + // However, we should take the spans of variables with the same name from the later + // patterns so the suggestions to prefix with underscores will apply to those too. + let mut vars: FxIndexMap)> = <_>::default(); + + pat.each_binding(|_, hir_id, pat_sp, ident| { + let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp)); + let var = self.variable(hir_id, ident.span); + vars.entry(self.ir.variable_name(var)) + .and_modify(|(.., spans)| spans.push(ident.span)) + .or_insert_with(|| (ln, var, hir_id, vec![ident.span])); + }); + + for (_, (ln, var, id, spans)) in vars { + if self.used_on_entry(ln, var) { + on_used_on_entry(spans, id, ln, var); + } else { + self.report_unused(spans, id, ln, var); } - }) + } } - fn warn_about_unused(&self, - spans: Vec, - hir_id: HirId, - ln: LiveNode, - var: Variable) - -> bool { - if !self.used_on_entry(ln, var) { - let r = self.should_warn(var); - if let Some(name) = r { - // annoying: for parameters in funcs like `fn(x: i32) - // {ret}`, there is only one node, so asking about - // assigned_on_exit() is not meaningful. - let is_assigned = if ln == self.s.exit_ln { - false - } else { - self.assigned_on_exit(ln, var).is_some() - }; + fn report_unused(&self, spans: Vec, hir_id: HirId, ln: LiveNode, var: Variable) { + if let Some(name) = self.should_warn(var).filter(|name| name != "self") { + // annoying: for parameters in funcs like `fn(x: i32) + // {ret}`, there is only one node, so asking about + // assigned_on_exit() is not meaningful. + let is_assigned = if ln == self.s.exit_ln { + false + } else { + self.assigned_on_exit(ln, var).is_some() + }; - if is_assigned { - self.ir.tcx.lint_hir_note( - lint::builtin::UNUSED_VARIABLES, - hir_id, - spans, - &format!("variable `{}` is assigned to, but never used", name), - &format!("consider using `_{}` instead", name), - ); - } else if name != "self" { - let mut err = self.ir.tcx.struct_span_lint_hir( - lint::builtin::UNUSED_VARIABLES, - hir_id, - spans.clone(), - &format!("unused variable: `{}`", name), - ); + if is_assigned { + self.ir.tcx.lint_hir_note( + lint::builtin::UNUSED_VARIABLES, + hir_id, + spans, + &format!("variable `{}` is assigned to, but never used", name), + &format!("consider using `_{}` instead", name), + ); + } else { + let mut err = self.ir.tcx.struct_span_lint_hir( + lint::builtin::UNUSED_VARIABLES, + hir_id, + spans.clone(), + &format!("unused variable: `{}`", name), + ); - if self.ir.variable_is_shorthand(var) { - if let Node::Binding(pat) = self.ir.tcx.hir().get(hir_id) { - // Handle `ref` and `ref mut`. - let spans = spans.iter() - .map(|_span| (pat.span, format!("{}: _", name))) - .collect(); + if self.ir.variable_is_shorthand(var) { + if let Node::Binding(pat) = self.ir.tcx.hir().get(hir_id) { + // Handle `ref` and `ref mut`. + let spans = spans.iter() + .map(|_span| (pat.span, format!("{}: _", name))) + .collect(); - err.multipart_suggestion( - "try ignoring the field", - spans, - Applicability::MachineApplicable, - ); - } - } else { err.multipart_suggestion( - "consider prefixing with an underscore", - spans.iter().map(|span| (*span, format!("_{}", name))).collect(), + "try ignoring the field", + spans, Applicability::MachineApplicable, ); } - - err.emit() + } else { + err.multipart_suggestion( + "consider prefixing with an underscore", + spans.iter().map(|span| (*span, format!("_{}", name))).collect(), + Applicability::MachineApplicable, + ); } + + err.emit() } - true - } else { - false } } - fn warn_about_dead_assign(&self, sp: Span, hir_id: HirId, ln: LiveNode, var: Variable) { + fn warn_about_dead_assign(&self, spans: Vec, hir_id: HirId, ln: LiveNode, var: Variable) { if self.live_on_exit(ln, var).is_none() { - self.report_dead_assign(hir_id, sp, var, false); + self.report_dead_assign(hir_id, spans, var, false); } } - fn report_dead_assign(&self, hir_id: HirId, sp: Span, var: Variable, is_argument: bool) { + fn report_dead_assign(&self, hir_id: HirId, spans: Vec, var: Variable, is_param: bool) { if let Some(name) = self.should_warn(var) { - if is_argument { - self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, sp, + if is_param { + self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans, &format!("value passed to `{}` is never read", name)) .help("maybe it is overwritten before being read?") .emit(); } else { - self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, sp, + self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans, &format!("value assigned to `{}` is never read", name)) .help("maybe it is overwritten before being read?") .emit(); diff --git a/src/librustc_passes/loops.rs b/src/librustc_passes/loops.rs index dbfbec32a6..6c9e018faf 100644 --- a/src/librustc_passes/loops.rs +++ b/src/librustc_passes/loops.rs @@ -54,7 +54,7 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> { } fn visit_expr(&mut self, e: &'hir hir::Expr) { - match e.node { + match e.kind { hir::ExprKind::Loop(ref b, _, source) => { self.with_context(Loop(source), |v| v.visit_block(&b)); } @@ -99,7 +99,7 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> { let loop_kind = if loop_id == hir::DUMMY_HIR_ID { None } else { - Some(match self.hir_map.expect_expr(loop_id).node { + Some(match self.hir_map.expect_expr(loop_id).kind { hir::ExprKind::Loop(_, _, source) => source, ref r => span_bug!(e.span, "break label resolved to a non-loop: {:?}", r), diff --git a/src/librustc_passes/rvalue_promotion.rs b/src/librustc_passes/rvalue_promotion.rs deleted file mode 100644 index f2461f7016..0000000000 --- a/src/librustc_passes/rvalue_promotion.rs +++ /dev/null @@ -1,662 +0,0 @@ -// Verifies that the types and values of const and static items -// are safe. The rules enforced by this module are: -// -// - For each *mutable* static item, it checks that its **type**: -// - doesn't have a destructor -// - doesn't own a box -// -// - For each *immutable* static item, it checks that its **value**: -// - doesn't own a box -// - doesn't contain a struct literal or a call to an enum variant / struct constructor where -// - the type of the struct/enum has a dtor -// -// Rules Enforced Elsewhere: -// - It's not possible to take the address of a static item with unsafe interior. This is enforced -// by borrowck::gather_loans - -use rustc::ty::cast::CastTy; -use rustc::hir::def::{Res, DefKind, CtorKind}; -use rustc::hir::def_id::DefId; -use rustc::middle::expr_use_visitor as euv; -use rustc::middle::mem_categorization as mc; -use rustc::middle::mem_categorization::Categorization; -use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::query::Providers; -use rustc::ty::subst::{InternalSubsts, SubstsRef}; -use rustc::util::nodemap::{ItemLocalSet, HirIdSet}; -use rustc::hir; -use syntax::symbol::sym; -use syntax_pos::{Span, DUMMY_SP}; -use log::debug; -use Promotability::*; -use std::ops::{BitAnd, BitAndAssign, BitOr}; - -pub fn provide(providers: &mut Providers<'_>) { - *providers = Providers { - rvalue_promotable_map, - const_is_rvalue_promotable_to_static, - ..*providers - }; -} - -fn const_is_rvalue_promotable_to_static(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - assert!(def_id.is_local()); - - let hir_id = tcx.hir().as_local_hir_id(def_id) - .expect("rvalue_promotable_map invoked with non-local def-id"); - let body_id = tcx.hir().body_owned_by(hir_id); - tcx.rvalue_promotable_map(def_id).contains(&body_id.hir_id.local_id) -} - -fn rvalue_promotable_map(tcx: TyCtxt<'_>, def_id: DefId) -> &ItemLocalSet { - let outer_def_id = tcx.closure_base_def_id(def_id); - if outer_def_id != def_id { - return tcx.rvalue_promotable_map(outer_def_id); - } - - let mut visitor = CheckCrateVisitor { - tcx, - tables: &ty::TypeckTables::empty(None), - in_fn: false, - in_static: false, - mut_rvalue_borrows: Default::default(), - param_env: ty::ParamEnv::empty(), - identity_substs: InternalSubsts::empty(), - result: ItemLocalSet::default(), - }; - - // `def_id` should be a `Body` owner - let hir_id = tcx.hir().as_local_hir_id(def_id) - .expect("rvalue_promotable_map invoked with non-local def-id"); - let body_id = tcx.hir().body_owned_by(hir_id); - let _ = visitor.check_nested_body(body_id); - - tcx.arena.alloc(visitor.result) -} - -struct CheckCrateVisitor<'a, 'tcx> { - tcx: TyCtxt<'tcx>, - in_fn: bool, - in_static: bool, - mut_rvalue_borrows: HirIdSet, - param_env: ty::ParamEnv<'tcx>, - identity_substs: SubstsRef<'tcx>, - tables: &'a ty::TypeckTables<'tcx>, - result: ItemLocalSet, -} - -#[must_use] -#[derive(Debug, Clone, Copy, PartialEq)] -enum Promotability { - Promotable, - NotPromotable -} - -impl BitAnd for Promotability { - type Output = Self; - - fn bitand(self, rhs: Self) -> Self { - match (self, rhs) { - (Promotable, Promotable) => Promotable, - _ => NotPromotable, - } - } -} - -impl BitAndAssign for Promotability { - fn bitand_assign(&mut self, rhs: Self) { - *self = *self & rhs - } -} - -impl BitOr for Promotability { - type Output = Self; - - fn bitor(self, rhs: Self) -> Self { - match (self, rhs) { - (NotPromotable, NotPromotable) => NotPromotable, - _ => Promotable, - } - } -} - -impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { - // Returns true iff all the values of the type are promotable. - fn type_promotability(&mut self, ty: Ty<'tcx>) -> Promotability { - debug!("type_promotability({})", ty); - - if ty.is_freeze(self.tcx, self.param_env, DUMMY_SP) && - !ty.needs_drop(self.tcx, self.param_env) { - Promotable - } else { - NotPromotable - } - } - - fn handle_const_fn_call( - &mut self, - def_id: DefId, - ) -> Promotability { - if self.tcx.is_promotable_const_fn(def_id) { - Promotable - } else { - NotPromotable - } - } - - /// While the `ExprUseVisitor` walks, we will identify which - /// expressions are borrowed, and insert their IDs into this - /// table. Actually, we insert the "borrow-id", which is normally - /// the ID of the expression being borrowed: but in the case of - /// `ref mut` borrows, the `id` of the pattern is - /// inserted. Therefore, later we remove that entry from the table - /// and transfer it over to the value being matched. This will - /// then prevent said value from being promoted. - fn remove_mut_rvalue_borrow(&mut self, pat: &hir::Pat) -> bool { - let mut any_removed = false; - pat.walk(|p| { - any_removed |= self.mut_rvalue_borrows.remove(&p.hir_id); - true - }); - any_removed - } -} - -impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { - fn check_nested_body(&mut self, body_id: hir::BodyId) -> Promotability { - let item_id = self.tcx.hir().body_owner(body_id); - let item_def_id = self.tcx.hir().local_def_id(item_id); - - let outer_in_fn = self.in_fn; - let outer_tables = self.tables; - let outer_param_env = self.param_env; - let outer_identity_substs = self.identity_substs; - - self.in_fn = false; - self.in_static = false; - - match self.tcx.hir().body_owner_kind(item_id) { - hir::BodyOwnerKind::Closure | - hir::BodyOwnerKind::Fn => self.in_fn = true, - hir::BodyOwnerKind::Static(_) => self.in_static = true, - _ => {} - }; - - - self.tables = self.tcx.typeck_tables_of(item_def_id); - self.param_env = self.tcx.param_env(item_def_id); - self.identity_substs = InternalSubsts::identity_for_item(self.tcx, item_def_id); - - let body = self.tcx.hir().body(body_id); - - let tcx = self.tcx; - let param_env = self.param_env; - let region_scope_tree = self.tcx.region_scope_tree(item_def_id); - let tables = self.tables; - euv::ExprUseVisitor::new( - self, - tcx, - item_def_id, - param_env, - ®ion_scope_tree, - tables, - None, - ).consume_body(body); - - let body_promotable = self.check_expr(&body.value); - self.in_fn = outer_in_fn; - self.tables = outer_tables; - self.param_env = outer_param_env; - self.identity_substs = outer_identity_substs; - body_promotable - } - - fn check_stmt(&mut self, stmt: &'tcx hir::Stmt) -> Promotability { - match stmt.node { - hir::StmtKind::Local(ref local) => { - if self.remove_mut_rvalue_borrow(&local.pat) { - if let Some(init) = &local.init { - self.mut_rvalue_borrows.insert(init.hir_id); - } - } - - if let Some(ref expr) = local.init { - let _ = self.check_expr(&expr); - } - NotPromotable - } - // Item statements are allowed - hir::StmtKind::Item(..) => Promotable, - hir::StmtKind::Expr(ref box_expr) | - hir::StmtKind::Semi(ref box_expr) => { - let _ = self.check_expr(box_expr); - NotPromotable - } - } - } - - fn check_expr(&mut self, ex: &'tcx hir::Expr) -> Promotability { - let node_ty = self.tables.node_type(ex.hir_id); - let mut outer = check_expr_kind(self, ex, node_ty); - outer &= check_adjustments(self, ex); - - // Handle borrows on (or inside the autorefs of) this expression. - if self.mut_rvalue_borrows.remove(&ex.hir_id) { - outer = NotPromotable - } - - if outer == Promotable { - self.result.insert(ex.hir_id.local_id); - } - outer - } - - fn check_block(&mut self, block: &'tcx hir::Block) -> Promotability { - let mut iter_result = Promotable; - for index in block.stmts.iter() { - iter_result &= self.check_stmt(index); - } - match block.expr { - Some(ref box_expr) => iter_result & self.check_expr(&*box_expr), - None => iter_result, - } - } -} - -/// This function is used to enforce the constraints on -/// const/static items. It walks through the *value* -/// of the item walking down the expression and evaluating -/// every nested expression. If the expression is not part -/// of a const/static item, it is qualified for promotion -/// instead of producing errors. -fn check_expr_kind<'a, 'tcx>( - v: &mut CheckCrateVisitor<'a, 'tcx>, - e: &'tcx hir::Expr, node_ty: Ty<'tcx>) -> Promotability { - - let ty_result = match node_ty.sty { - ty::Adt(def, _) if def.has_dtor(v.tcx) => { - NotPromotable - } - _ => Promotable - }; - - let node_result = match e.node { - hir::ExprKind::Box(ref expr) => { - let _ = v.check_expr(&expr); - NotPromotable - } - hir::ExprKind::Unary(op, ref expr) => { - let expr_promotability = v.check_expr(expr); - if v.tables.is_method_call(e) || op == hir::UnDeref { - return NotPromotable; - } - expr_promotability - } - hir::ExprKind::Binary(op, ref lhs, ref rhs) => { - let lefty = v.check_expr(lhs); - let righty = v.check_expr(rhs); - if v.tables.is_method_call(e) { - return NotPromotable; - } - match v.tables.node_type(lhs.hir_id).sty { - ty::RawPtr(_) | ty::FnPtr(..) => { - assert!(op.node == hir::BinOpKind::Eq || op.node == hir::BinOpKind::Ne || - op.node == hir::BinOpKind::Le || op.node == hir::BinOpKind::Lt || - op.node == hir::BinOpKind::Ge || op.node == hir::BinOpKind::Gt); - - NotPromotable - } - _ => lefty & righty - } - } - hir::ExprKind::Cast(ref from, _) => { - let expr_promotability = v.check_expr(from); - debug!("checking const cast(id={})", from.hir_id); - let cast_in = CastTy::from_ty(v.tables.expr_ty(from)); - let cast_out = CastTy::from_ty(v.tables.expr_ty(e)); - match (cast_in, cast_out) { - (Some(CastTy::FnPtr), Some(CastTy::Int(_))) | - (Some(CastTy::Ptr(_)), Some(CastTy::Int(_))) => NotPromotable, - (_, _) => expr_promotability - } - } - hir::ExprKind::Path(ref qpath) => { - let res = v.tables.qpath_res(qpath, e.hir_id); - match res { - Res::Def(DefKind::Ctor(..), _) - | Res::Def(DefKind::Fn, _) - | Res::Def(DefKind::Method, _) - | Res::SelfCtor(..) => - Promotable, - - // References to a static that are themselves within a static - // are inherently promotable with the exception - // of "#[thread_local]" statics, which may not - // outlive the current function - Res::Def(DefKind::Static, did) => { - - if v.in_static { - for attr in &v.tcx.get_attrs(did)[..] { - if attr.check_name(sym::thread_local) { - debug!("reference to `Static(id={:?})` is unpromotable \ - due to a `#[thread_local]` attribute", did); - return NotPromotable; - } - } - Promotable - } else { - debug!("reference to `Static(id={:?})` is unpromotable as it is not \ - referenced from a static", did); - NotPromotable - } - } - - Res::Def(DefKind::Const, did) | - Res::Def(DefKind::AssocConst, did) => { - let promotable = if v.tcx.trait_of_item(did).is_some() { - // Don't peek inside trait associated constants. - NotPromotable - } else if v.tcx.at(e.span).const_is_rvalue_promotable_to_static(did) { - Promotable - } else { - NotPromotable - }; - // Just in case the type is more specific than the definition, - // e.g., impl associated const with type parameters, check it. - // Also, trait associated consts are relaxed by this. - promotable | v.type_promotability(node_ty) - } - _ => NotPromotable - } - } - hir::ExprKind::Call(ref callee, ref hirvec) => { - let mut call_result = v.check_expr(callee); - for index in hirvec.iter() { - call_result &= v.check_expr(index); - } - let mut callee = &**callee; - loop { - callee = match callee.node { - hir::ExprKind::Block(ref block, _) => match block.expr { - Some(ref tail) => &tail, - None => break - }, - _ => break - }; - } - // The callee is an arbitrary expression, it doesn't necessarily have a definition. - let def = if let hir::ExprKind::Path(ref qpath) = callee.node { - v.tables.qpath_res(qpath, callee.hir_id) - } else { - Res::Err - }; - let def_result = match def { - Res::Def(DefKind::Ctor(_, CtorKind::Fn), _) | - Res::SelfCtor(..) => Promotable, - Res::Def(DefKind::Fn, did) => v.handle_const_fn_call(did), - Res::Def(DefKind::Method, did) => { - match v.tcx.associated_item(did).container { - ty::ImplContainer(_) => v.handle_const_fn_call(did), - ty::TraitContainer(_) => NotPromotable, - } - } - _ => NotPromotable, - }; - def_result & call_result - } - hir::ExprKind::MethodCall(ref _pathsegment, ref _span, ref hirvec) => { - let mut method_call_result = Promotable; - for index in hirvec.iter() { - method_call_result &= v.check_expr(index); - } - if let Some(def_id) = v.tables.type_dependent_def_id(e.hir_id) { - match v.tcx.associated_item(def_id).container { - ty::ImplContainer(_) => method_call_result & v.handle_const_fn_call(def_id), - ty::TraitContainer(_) => NotPromotable, - } - } else { - v.tcx.sess.delay_span_bug(e.span, "no type-dependent def for method call"); - NotPromotable - } - } - hir::ExprKind::Struct(ref _qpath, ref hirvec, ref option_expr) => { - let mut struct_result = Promotable; - for index in hirvec.iter() { - struct_result &= v.check_expr(&index.expr); - } - if let Some(ref expr) = *option_expr { - struct_result &= v.check_expr(&expr); - } - if let ty::Adt(adt, ..) = v.tables.expr_ty(e).sty { - // unsafe_cell_type doesn't necessarily exist with no_core - if Some(adt.did) == v.tcx.lang_items().unsafe_cell_type() { - return NotPromotable; - } - } - struct_result - } - - hir::ExprKind::Lit(_) | - hir::ExprKind::Err => Promotable, - - hir::ExprKind::AddrOf(_, ref expr) | - hir::ExprKind::Repeat(ref expr, _) | - hir::ExprKind::Type(ref expr, _) | - hir::ExprKind::DropTemps(ref expr) => { - v.check_expr(&expr) - } - - hir::ExprKind::Closure(_capture_clause, ref _box_fn_decl, - body_id, _span, _option_generator_movability) => { - let nested_body_promotable = v.check_nested_body(body_id); - // Paths in constant contexts cannot refer to local variables, - // as there are none, and thus closures can't have upvars there. - let closure_def_id = v.tcx.hir().local_def_id(e.hir_id); - if !v.tcx.upvars(closure_def_id).map_or(true, |v| v.is_empty()) { - NotPromotable - } else { - nested_body_promotable - } - } - - hir::ExprKind::Field(ref expr, _ident) => { - let expr_promotability = v.check_expr(&expr); - if let Some(def) = v.tables.expr_ty(expr).ty_adt_def() { - if def.is_union() { - return NotPromotable; - } - } - expr_promotability - } - - hir::ExprKind::Block(ref box_block, ref _option_label) => { - v.check_block(box_block) - } - - hir::ExprKind::Index(ref lhs, ref rhs) => { - let lefty = v.check_expr(lhs); - let righty = v.check_expr(rhs); - if v.tables.is_method_call(e) { - return NotPromotable; - } - lefty & righty - } - - hir::ExprKind::Array(ref hirvec) => { - let mut array_result = Promotable; - for index in hirvec.iter() { - array_result &= v.check_expr(index); - } - array_result - } - - hir::ExprKind::Tup(ref hirvec) => { - let mut tup_result = Promotable; - for index in hirvec.iter() { - tup_result &= v.check_expr(index); - } - tup_result - } - - // Conditional control flow (possible to implement). - hir::ExprKind::Match(ref expr, ref hirvec_arm, ref _match_source) => { - // Compute the most demanding borrow from all the arms' - // patterns and set that on the discriminator. - let mut mut_borrow = false; - for pat in hirvec_arm.iter().flat_map(|arm| &arm.pats) { - mut_borrow = v.remove_mut_rvalue_borrow(pat); - } - if mut_borrow { - v.mut_rvalue_borrows.insert(expr.hir_id); - } - - let _ = v.check_expr(expr); - for index in hirvec_arm.iter() { - let _ = v.check_expr(&*index.body); - if let Some(hir::Guard::If(ref expr)) = index.guard { - let _ = v.check_expr(&expr); - } - } - NotPromotable - } - - hir::ExprKind::Loop(ref box_block, ref _option_label, ref _loop_source) => { - let _ = v.check_block(box_block); - NotPromotable - } - - // More control flow (also not very meaningful). - hir::ExprKind::Break(_, ref option_expr) | hir::ExprKind::Ret(ref option_expr) => { - if let Some(ref expr) = *option_expr { - let _ = v.check_expr(&expr); - } - NotPromotable - } - - hir::ExprKind::Continue(_) => { - NotPromotable - } - - // Generator expressions - hir::ExprKind::Yield(ref expr, _) => { - let _ = v.check_expr(&expr); - NotPromotable - } - - // Expressions with side-effects. - hir::ExprKind::AssignOp(_, ref lhs, ref rhs) | hir::ExprKind::Assign(ref lhs, ref rhs) => { - let _ = v.check_expr(lhs); - let _ = v.check_expr(rhs); - NotPromotable - } - - hir::ExprKind::InlineAsm(ref _inline_asm, ref hirvec_lhs, ref hirvec_rhs) => { - for index in hirvec_lhs.iter().chain(hirvec_rhs.iter()) { - let _ = v.check_expr(index); - } - NotPromotable - } - }; - ty_result & node_result -} - -/// Checks the adjustments of an expression. -fn check_adjustments<'a, 'tcx>( - v: &mut CheckCrateVisitor<'a, 'tcx>, - e: &hir::Expr) -> Promotability { - use rustc::ty::adjustment::*; - - let mut adjustments = v.tables.expr_adjustments(e).iter().peekable(); - while let Some(adjustment) = adjustments.next() { - match adjustment.kind { - Adjust::NeverToAny | - Adjust::Pointer(_) | - Adjust::Borrow(_) => {} - - Adjust::Deref(_) => { - if let Some(next_adjustment) = adjustments.peek() { - if let Adjust::Borrow(_) = next_adjustment.kind { - continue; - } - } - return NotPromotable; - } - } - } - Promotable -} - -impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { - fn consume(&mut self, - _consume_id: hir::HirId, - _consume_span: Span, - _cmt: &mc::cmt_<'_>, - _mode: euv::ConsumeMode) {} - - fn borrow(&mut self, - borrow_id: hir::HirId, - _borrow_span: Span, - cmt: &mc::cmt_<'tcx>, - _loan_region: ty::Region<'tcx>, - bk: ty::BorrowKind, - loan_cause: euv::LoanCause) { - debug!( - "borrow(borrow_id={:?}, cmt={:?}, bk={:?}, loan_cause={:?})", - borrow_id, - cmt, - bk, - loan_cause, - ); - - // Kind of hacky, but we allow Unsafe coercions in constants. - // These occur when we convert a &T or *T to a *U, as well as - // when making a thin pointer (e.g., `*T`) into a fat pointer - // (e.g., `*Trait`). - if let euv::LoanCause::AutoUnsafe = loan_cause { - return; - } - - let mut cur = cmt; - loop { - match cur.cat { - Categorization::ThreadLocal(..) | - Categorization::Rvalue(..) => { - if loan_cause == euv::MatchDiscriminant { - // Ignore the dummy immutable borrow created by EUV. - break; - } - if bk.to_mutbl_lossy() == hir::MutMutable { - self.mut_rvalue_borrows.insert(borrow_id); - } - break; - } - Categorization::StaticItem => { - break; - } - Categorization::Deref(ref cmt, _) | - Categorization::Downcast(ref cmt, _) | - Categorization::Interior(ref cmt, _) => { - cur = cmt; - } - - Categorization::Upvar(..) | - Categorization::Local(..) => break, - } - } - } - - fn decl_without_init(&mut self, _id: hir::HirId, _span: Span) {} - fn mutate(&mut self, - _assignment_id: hir::HirId, - _assignment_span: Span, - _assignee_cmt: &mc::cmt_<'_>, - _mode: euv::MutateMode) { - } - - fn matched_pat(&mut self, _: &hir::Pat, _: &mc::cmt_<'_>, _: euv::MatchMode) {} - - fn consume_pat(&mut self, - _consume_pat: &hir::Pat, - _cmt: &mc::cmt_<'_>, - _mode: euv::ConsumeMode) {} -} diff --git a/src/librustc_plugin/Cargo.toml b/src/librustc_plugin/Cargo.toml index 84a743ed1a..e8bf4e7ea8 100644 --- a/src/librustc_plugin/Cargo.toml +++ b/src/librustc_plugin/Cargo.toml @@ -14,5 +14,5 @@ doctest = false rustc = { path = "../librustc" } rustc_metadata = { path = "../librustc_metadata" } syntax = { path = "../libsyntax" } +syntax_expand = { path = "../libsyntax_expand" } syntax_pos = { path = "../libsyntax_pos" } -rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_plugin/build.rs b/src/librustc_plugin/build.rs index f1bf1111cf..01559a95c9 100644 --- a/src/librustc_plugin/build.rs +++ b/src/librustc_plugin/build.rs @@ -15,7 +15,7 @@ struct RegistrarFinder { impl<'v> ItemLikeVisitor<'v> for RegistrarFinder { fn visit_item(&mut self, item: &hir::Item) { - if let hir::ItemKind::Fn(..) = item.node { + if let hir::ItemKind::Fn(..) = item.kind { if attr::contains_name(&item.attrs, sym::plugin_registrar) { self.registrars.push((item.hir_id, item.span)); } diff --git a/src/librustc_plugin/lib.rs b/src/librustc_plugin/lib.rs index 4e1a47c503..38738e2063 100644 --- a/src/librustc_plugin/lib.rs +++ b/src/librustc_plugin/lib.rs @@ -21,7 +21,7 @@ //! extern crate syntax_pos; //! //! use rustc_driver::plugin::Registry; -//! use syntax::ext::base::{ExtCtxt, MacResult}; +//! use syntax_expand::base::{ExtCtxt, MacResult}; //! use syntax_pos::Span; //! use syntax::tokenstream::TokenTree; //! diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index 4481892bcf..8ceb56b0fd 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -1,8 +1,8 @@ //! Used by `rustc` when loading a plugin. +use rustc::middle::cstore::MetadataLoader; use rustc::session::Session; -use rustc_metadata::creader::CrateLoader; -use rustc_metadata::cstore::CStore; +use rustc_metadata::locator; use crate::registry::Registry; use std::borrow::ToOwned; @@ -25,7 +25,7 @@ pub struct PluginRegistrar { struct PluginLoader<'a> { sess: &'a Session, - reader: CrateLoader<'a>, + metadata_loader: &'a dyn MetadataLoader, plugins: Vec, } @@ -37,11 +37,10 @@ fn call_malformed_plugin_attribute(sess: &Session, span: Span) { /// Read plugin metadata and dynamically load registrar functions. pub fn load_plugins(sess: &Session, - cstore: &CStore, + metadata_loader: &dyn MetadataLoader, krate: &ast::Crate, - crate_name: &str, addl_plugins: Option>) -> Vec { - let mut loader = PluginLoader::new(sess, cstore, crate_name); + let mut loader = PluginLoader { sess, metadata_loader, plugins: Vec::new() }; // do not report any error now. since crate attributes are // not touched by expansion, every use of plugin without @@ -80,16 +79,8 @@ pub fn load_plugins(sess: &Session, } impl<'a> PluginLoader<'a> { - fn new(sess: &'a Session, cstore: &'a CStore, crate_name: &str) -> Self { - PluginLoader { - sess, - reader: CrateLoader::new(sess, cstore, crate_name), - plugins: vec![], - } - } - fn load_plugin(&mut self, span: Span, name: Symbol, args: Vec) { - let registrar = self.reader.find_plugin_registrar(span, name); + let registrar = locator::find_plugin_registrar(self.sess, self.metadata_loader, span, name); if let Some((lib, disambiguator)) = registrar { let symbol = self.sess.generate_plugin_registrar_symbol(disambiguator); diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index bb3c950eda..2e23b8c870 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -1,11 +1,10 @@ //! Used by plugin crates to tell `rustc` about the plugins they provide. -use rustc::lint::{EarlyLintPassObject, LateLintPassObject, LintId, Lint}; +use rustc::lint::LintStore; use rustc::session::Session; -use rustc::util::nodemap::FxHashMap; -use syntax::ext::base::{SyntaxExtension, SyntaxExtensionKind, NamedSyntaxExtension}; -use syntax::ext::base::MacroExpanderFn; +use syntax_expand::base::{SyntaxExtension, SyntaxExtensionKind, NamedSyntaxExtension}; +use syntax_expand::base::MacroExpanderFn; use syntax::symbol::Symbol; use syntax::ast; use syntax::feature_gate::AttributeType; @@ -26,6 +25,9 @@ pub struct Registry<'a> { /// from the plugin registrar. pub sess: &'a Session, + /// The `LintStore` allows plugins to register new lints. + pub lint_store: &'a mut LintStore, + #[doc(hidden)] pub args_hidden: Option>, @@ -35,15 +37,6 @@ pub struct Registry<'a> { #[doc(hidden)] pub syntax_exts: Vec, - #[doc(hidden)] - pub early_lint_passes: Vec, - - #[doc(hidden)] - pub late_lint_passes: Vec, - - #[doc(hidden)] - pub lint_groups: FxHashMap<&'static str, (Vec, Option<&'static str>)>, - #[doc(hidden)] pub llvm_passes: Vec, @@ -53,15 +46,13 @@ pub struct Registry<'a> { impl<'a> Registry<'a> { #[doc(hidden)] - pub fn new(sess: &'a Session, krate_span: Span) -> Registry<'a> { + pub fn new(sess: &'a Session, lint_store: &'a mut LintStore, krate_span: Span) -> Registry<'a> { Registry { sess, + lint_store, args_hidden: None, krate_span, syntax_exts: vec![], - early_lint_passes: vec![], - late_lint_passes: vec![], - lint_groups: FxHashMap::default(), llvm_passes: vec![], attributes: vec![], } @@ -99,27 +90,6 @@ impl<'a> Registry<'a> { self.register_syntax_extension(Symbol::intern(name), ext); } - /// Register a compiler lint pass. - pub fn register_early_lint_pass(&mut self, lint_pass: EarlyLintPassObject) { - self.early_lint_passes.push(lint_pass); - } - - /// Register a compiler lint pass. - pub fn register_late_lint_pass(&mut self, lint_pass: LateLintPassObject) { - self.late_lint_passes.push(lint_pass); - } - /// Register a lint group. - pub fn register_lint_group( - &mut self, - name: &'static str, - deprecated_name: Option<&'static str>, - to: Vec<&'static Lint> - ) { - self.lint_groups.insert(name, - (to.into_iter().map(|x| LintId::of(x)).collect(), - deprecated_name)); - } - /// Register an LLVM pass. /// /// Registration with LLVM itself is handled through static C++ objects with diff --git a/src/librustc_privacy/error_codes.rs b/src/librustc_privacy/error_codes.rs index 67066466f1..03afb547d3 100644 --- a/src/librustc_privacy/error_codes.rs +++ b/src/librustc_privacy/error_codes.rs @@ -1,8 +1,9 @@ syntax::register_diagnostics! { E0445: r##" -A private trait was used on a public type parameter bound. Erroneous code -examples: +A private trait was used on a public type parameter bound. + +Erroneous code examples: ```compile_fail,E0445 #![deny(private_in_public)] @@ -32,7 +33,9 @@ pub fn foo (t: T) {} // ok! "##, E0446: r##" -A private type was used in a public type signature. Erroneous code example: +A private type was used in a public type signature. + +Erroneous code example: ```compile_fail,E0446 #![deny(private_in_public)] @@ -65,7 +68,9 @@ mod Foo { E0447: r##" #### Note: this error code is no longer emitted by the compiler. -The `pub` keyword was used inside a function. Erroneous code example: +The `pub` keyword was used inside a function. + +Erroneous code example: ``` fn foo() { @@ -79,7 +84,11 @@ is invalid. "##, E0448: r##" -The `pub` keyword was used inside a public enum. Erroneous code example: +#### Note: this error code is no longer emitted by the compiler. + +The `pub` keyword was used inside a public enum. + +Erroneous code example: ```compile_fail pub enum Foo { @@ -106,7 +115,9 @@ pub enum Foo { "##, E0451: r##" -A struct constructor with private fields was invoked. Erroneous code example: +A struct constructor with private fields was invoked. + +Erroneous code example: ```compile_fail,E0451 mod Bar { diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 1e61f78c35..34cdec229a 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -64,7 +64,7 @@ trait DefIdVisitor<'tcx> { fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool { self.skeleton().visit_trait(trait_ref) } - fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool { + fn visit_predicates(&mut self, predicates: ty::GenericPredicates<'tcx>) -> bool { self.skeleton().visit_predicates(predicates) } } @@ -88,7 +88,7 @@ where (!self.def_id_visitor.shallow() && substs.visit_with(self)) } - fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool { + fn visit_predicates(&mut self, predicates: ty::GenericPredicates<'tcx>) -> bool { let ty::GenericPredicates { parent: _, predicates } = predicates; for (predicate, _span) in predicates { match predicate { @@ -129,7 +129,7 @@ where fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool { let tcx = self.def_id_visitor.tcx(); // InternalSubsts are not visited here because they are visited below in `super_visit_with`. - match ty.sty { + match ty.kind { ty::Adt(&ty::AdtDef { did: def_id, .. }, ..) | ty::Foreign(def_id) | ty::FnDef(def_id, ..) | @@ -144,7 +144,7 @@ where // Default type visitor doesn't visit signatures of fn types. // Something like `fn() -> Priv {my_func}` is considered a private type even if // `my_func` is public, so we need to visit signatures. - if let ty::FnDef(..) = ty.sty { + if let ty::FnDef(..) = ty.kind { if tcx.fn_sig(def_id).visit_with(self) { return true; } @@ -240,7 +240,7 @@ fn def_id_visibility<'tcx>( } Node::ImplItem(impl_item) => { match tcx.hir().get(tcx.hir().get_parent_item(hir_id)) { - Node::Item(item) => match &item.node { + Node::Item(item) => match &item.kind { hir::ItemKind::Impl(.., None, _, _) => &impl_item.vis, hir::ItemKind::Impl(.., Some(trait_ref), _, _) => return def_id_visibility(tcx, trait_ref.path.res.def_id()), @@ -572,7 +572,7 @@ impl EmbargoVisitor<'tcx> { if let ty::Visibility::Public = vis { let item = self.tcx.hir().expect_item(hir_id); if let hir::ItemKind::Struct(ref struct_def, _) - | hir::ItemKind::Union(ref struct_def, _) = item.node + | hir::ItemKind::Union(ref struct_def, _) = item.kind { for field in struct_def.fields() { let field_vis = ty::Visibility::from_hir( @@ -630,12 +630,12 @@ impl EmbargoVisitor<'tcx> { .and_then(|def_id| self.tcx.hir().as_local_hir_id(def_id)) .map(|module_hir_id| self.tcx.hir().expect_item(module_hir_id)) { - if let hir::ItemKind::Mod(m) = &item.node { + if let hir::ItemKind::Mod(m) = &item.kind { for item_id in m.item_ids.as_ref() { let item = self.tcx.hir().expect_item(item_id.id); let def_id = self.tcx.hir().local_def_id(item_id.id); if !self.tcx.hygienic_eq(segment.ident, item.ident, def_id) { continue; } - if let hir::ItemKind::Use(..) = item.node { + if let hir::ItemKind::Use(..) = item.kind { self.update(item.hir_id, Some(AccessLevel::Exported)); } } @@ -653,7 +653,7 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { } fn visit_item(&mut self, item: &'tcx hir::Item) { - let inherited_item_level = match item.node { + let inherited_item_level = match item.kind { hir::ItemKind::Impl(..) => Option::::of_impl(item.hir_id, self.tcx, &self.access_levels), // Foreign modules inherit level from parents. @@ -673,7 +673,7 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { let item_level = self.update(item.hir_id, inherited_item_level); // Update levels of nested things. - match item.node { + match item.kind { hir::ItemKind::Enum(ref def, _) => { for variant in &def.variants { let variant_level = self.update(variant.id, item_level); @@ -727,7 +727,7 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { } // Mark all items in interfaces of reachable items as reachable. - match item.node { + match item.kind { // The interface is empty. hir::ItemKind::ExternCrate(..) => {} // All nested items are checked by `visit_item`. @@ -880,11 +880,11 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { self.tcx, self.tcx.hir().local_def_id(md.hir_id) ).unwrap(); - let mut module_id = self.tcx.hir().as_local_hir_id(macro_module_def_id).unwrap(); - if !self.tcx.hir().is_hir_id_module(module_id) { - // `module_id` doesn't correspond to a `mod`, return early (#63164). - return; - } + let mut module_id = match self.tcx.hir().as_local_hir_id(macro_module_def_id) { + Some(module_id) if self.tcx.hir().is_hir_id_module(module_id) => module_id, + // `module_id` doesn't correspond to a `mod`, return early (#63164, #65252). + _ => return, + }; let level = if md.vis.node.is_pub() { self.get(module_id) } else { None }; let new_level = self.update(md.hir_id, level); if new_level.is_none() { @@ -1028,7 +1028,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> { } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { - match expr.node { + match expr.kind { hir::ExprKind::Struct(ref qpath, ref fields, ref base) => { let res = self.tables.qpath_res(qpath, expr.hir_id); let adt = self.tables.expr_ty(expr).ty_adt_def().unwrap(); @@ -1062,7 +1062,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> { } fn visit_pat(&mut self, pat: &'tcx hir::Pat) { - match pat.node { + match pat.kind { PatKind::Struct(ref qpath, ref fields, _) => { let res = self.tables.qpath_res(qpath, pat.hir_id); let adt = self.tables.pat_ty(pat).ty_adt_def().unwrap(); @@ -1197,7 +1197,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> { // Do not check nested expressions if the error already happened. return; } - match expr.node { + match expr.kind { hir::ExprKind::Assign(.., ref rhs) | hir::ExprKind::Match(ref rhs, ..) => { // Do not report duplicate errors for `x = y` and `match x { ... }`. if self.check_expr_pat_type(rhs.hir_id, rhs.span) { @@ -1389,14 +1389,14 @@ impl<'a, 'b, 'tcx, 'v> Visitor<'v> for ObsoleteCheckTypeForPrivatenessVisitor<'a } fn visit_ty(&mut self, ty: &hir::Ty) { - if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = ty.node { + if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = ty.kind { if self.inner.path_is_private_type(path) { self.contains_private = true; // Found what we're looking for, so let's stop working. return } } - if let hir::TyKind::Path(_) = ty.node { + if let hir::TyKind::Path(_) = ty.kind { if self.at_outer_type { self.outer_type_is_public_path = true; } @@ -1417,7 +1417,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { } fn visit_item(&mut self, item: &'tcx hir::Item) { - match item.node { + match item.kind { // Contents of a private mod can be re-exported, so we need // to check internals. hir::ItemKind::Mod(_) => {} @@ -1489,7 +1489,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { impl_item_refs.iter() .any(|impl_item_ref| { let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); - match impl_item.node { + match impl_item.kind { hir::ImplItemKind::Const(..) | hir::ImplItemKind::Method(..) => { self.access_levels.is_reachable( @@ -1515,7 +1515,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { // don't erroneously report errors for private // types in private items. let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); - match impl_item.node { + match impl_item.kind { hir::ImplItemKind::Const(..) | hir::ImplItemKind::Method(..) if self.item_is_public(&impl_item.hir_id, &impl_item.vis) => @@ -1548,7 +1548,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { // Those in 3. are warned with this call. for impl_item_ref in impl_item_refs { let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); - if let hir::ImplItemKind::TyAlias(ref ty) = impl_item.node { + if let hir::ImplItemKind::TyAlias(ref ty) = impl_item.kind { self.visit_ty(ty); } } @@ -1628,7 +1628,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { } fn visit_ty(&mut self, t: &'tcx hir::Ty) { - if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = t.node { + if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = t.kind { if self.path_is_private_type(path) { self.old_error_set.insert(t.hir_id); } @@ -1853,7 +1853,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> let tcx = self.tcx; let item_visibility = ty::Visibility::from_hir(&item.vis, item.hir_id, tcx); - match item.node { + match item.kind { // Crates are always public. hir::ItemKind::ExternCrate(..) => {} // All nested items are checked by `visit_item`. diff --git a/src/librustc_resolve/Cargo.toml b/src/librustc_resolve/Cargo.toml index 548f982fe3..08ce7fd520 100644 --- a/src/librustc_resolve/Cargo.toml +++ b/src/librustc_resolve/Cargo.toml @@ -11,10 +11,10 @@ test = false doctest = false [dependencies] -bitflags = "1.0" -indexmap = "1" +bitflags = "1.2.1" log = "0.4" syntax = { path = "../libsyntax" } +syntax_expand = { path = "../libsyntax_expand" } rustc = { path = "../librustc" } arena = { path = "../libarena" } errors = { path = "../librustc_errors", package = "rustc_errors" } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 11dcf5b4b0..0a966b252e 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -11,7 +11,7 @@ use crate::resolve_imports::ImportDirectiveSubclass::{self, GlobImport, SingleIm use crate::{Module, ModuleData, ModuleKind, NameBinding, NameBindingKind, Segment, ToNameBinding}; use crate::{ModuleOrUniformRoot, ParentScope, PerNS, Resolver, ResolverArenas, ExternPreludeEntry}; use crate::Namespace::{self, TypeNS, ValueNS, MacroNS}; -use crate::{ResolutionError, Determinacy, PathResult, CrateLint}; +use crate::{ResolutionError, VisResolutionError, Determinacy, PathResult, CrateLint}; use rustc::bug; use rustc::hir::def::{self, *}; @@ -32,15 +32,15 @@ use syntax::attr; use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind, NodeId}; use syntax::ast::{MetaItemKind, StmtKind, TraitItem, TraitItemKind}; -use syntax::ext::base::{MacroKind, SyntaxExtension}; -use syntax::ext::expand::AstFragment; -use syntax::ext::hygiene::ExpnId; use syntax::feature_gate::is_builtin_attr; use syntax::parse::token::{self, Token}; -use syntax::{span_err, struct_span_err}; +use syntax::span_err; +use syntax::source_map::{respan, Spanned}; use syntax::symbol::{kw, sym}; use syntax::visit::{self, Visitor}; - +use syntax_expand::base::SyntaxExtension; +use syntax_expand::expand::AstFragment; +use syntax_pos::hygiene::{MacroKind, ExpnId}; use syntax_pos::{Span, DUMMY_SP}; use log::debug; @@ -92,7 +92,8 @@ impl<'a> Resolver<'a> { where T: ToNameBinding<'a>, { let binding = def.to_name_binding(self.arenas); - if let Err(old_binding) = self.try_define(parent, ident, ns, binding) { + let key = self.new_key(ident, ns); + if let Err(old_binding) = self.try_define(parent, key, binding) { self.report_conflict(parent, ident, ns, old_binding, &binding); } } @@ -102,24 +103,23 @@ impl<'a> Resolver<'a> { return self.module_map[&def_id] } - let macros_only = self.cstore.dep_kind_untracked(def_id.krate).macros_only(); - if let Some(&module) = self.extern_module_map.get(&(def_id, macros_only)) { + if let Some(&module) = self.extern_module_map.get(&def_id) { return module; } let (name, parent) = if def_id.index == CRATE_DEF_INDEX { - (self.cstore.crate_name_untracked(def_id.krate).as_interned_str(), None) + (self.cstore().crate_name_untracked(def_id.krate), None) } else { - let def_key = self.cstore.def_key(def_id); + let def_key = self.cstore().def_key(def_id); (def_key.disambiguated_data.data.get_opt_name().unwrap(), Some(self.get_module(DefId { index: def_key.parent.unwrap(), ..def_id }))) }; - let kind = ModuleKind::Def(DefKind::Mod, def_id, name.as_symbol()); + let kind = ModuleKind::Def(DefKind::Mod, def_id, name); let module = self.arenas.alloc_module(ModuleData::new( parent, kind, def_id, ExpnId::root(), DUMMY_SP )); - self.extern_module_map.insert((def_id, macros_only), module); + self.extern_module_map.insert(def_id, module); module } @@ -151,9 +151,8 @@ impl<'a> Resolver<'a> { return Some(ext.clone()); } - let ext = Lrc::new(match self.cstore.load_macro_untracked(def_id, &self.session) { - LoadedMacro::MacroDef(item) => - self.compile_macro(&item, self.cstore.crate_edition_untracked(def_id.krate)), + let ext = Lrc::new(match self.cstore().load_macro_untracked(def_id, &self.session) { + LoadedMacro::MacroDef(item, edition) => self.compile_macro(&item, edition), LoadedMacro::ProcMacro(ext) => ext, }); @@ -161,31 +160,21 @@ impl<'a> Resolver<'a> { Some(ext) } - // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders. crate fn build_reduced_graph( &mut self, fragment: &AstFragment, - extra_placeholders: &[NodeId], parent_scope: ParentScope<'a>, ) -> LegacyScope<'a> { let mut def_collector = DefCollector::new(&mut self.definitions, parent_scope.expansion); fragment.visit_with(&mut def_collector); - for placeholder in extra_placeholders { - def_collector.visit_macro_invoc(*placeholder); - } - let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope }; fragment.visit_with(&mut visitor); - for placeholder in extra_placeholders { - visitor.parent_scope.legacy = visitor.visit_invoc(*placeholder); - } - visitor.parent_scope.legacy } crate fn build_reduced_graph_external(&mut self, module: Module<'a>) { let def_id = module.def_id().expect("unpopulated module without a def-id"); - for child in self.cstore.item_children_untracked(def_id, self.session) { + for child in self.cstore().item_children_untracked(def_id, self.session) { let child = child.map_id(|_| panic!("unexpected id")); BuildReducedGraphVisitor { r: self, parent_scope: ParentScope::module(module) } .build_reduced_graph_for_external_crate_res(child); @@ -204,14 +193,25 @@ impl<'a> AsMut> for BuildReducedGraphVisitor<'a, '_> { impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility { + self.resolve_visibility_speculative(vis, false).unwrap_or_else(|err| { + self.r.report_vis_error(err); + ty::Visibility::Public + }) + } + + fn resolve_visibility_speculative<'ast>( + &mut self, + vis: &'ast ast::Visibility, + speculative: bool, + ) -> Result> { let parent_scope = &self.parent_scope; match vis.node { - ast::VisibilityKind::Public => ty::Visibility::Public, + ast::VisibilityKind::Public => Ok(ty::Visibility::Public), ast::VisibilityKind::Crate(..) => { - ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)) + Ok(ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX))) } ast::VisibilityKind::Inherited => { - ty::Visibility::Restricted(parent_scope.module.normal_ancestor_id) + Ok(ty::Visibility::Restricted(parent_scope.module.normal_ancestor_id)) } ast::VisibilityKind::Restricted { ref path, id, .. } => { // For visibilities we are not ready to provide correct implementation of "uniform @@ -221,87 +221,68 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { let ident = path.segments.get(0).expect("empty path in visibility").ident; let crate_root = if ident.is_path_segment_keyword() { None - } else if ident.span.rust_2018() { - let msg = "relative paths are not supported in visibilities on 2018 edition"; - self.r.session.struct_span_err(ident.span, msg) - .span_suggestion( - path.span, - "try", - format!("crate::{}", path), - Applicability::MaybeIncorrect, - ) - .emit(); - return ty::Visibility::Public; - } else { - let ctxt = ident.span.ctxt(); + } else if ident.span.rust_2015() { Some(Segment::from_ident(Ident::new( - kw::PathRoot, path.span.shrink_to_lo().with_ctxt(ctxt) + kw::PathRoot, path.span.shrink_to_lo().with_ctxt(ident.span.ctxt()) ))) + } else { + return Err(VisResolutionError::Relative2018(ident.span, path)); }; let segments = crate_root.into_iter() .chain(path.segments.iter().map(|seg| seg.into())).collect::>(); - let expected_found_error = |this: &Self, res: Res| { - let path_str = Segment::names_to_string(&segments); - struct_span_err!(this.r.session, path.span, E0577, - "expected module, found {} `{}`", res.descr(), path_str) - .span_label(path.span, "not a module").emit(); - }; + let expected_found_error = |res| Err(VisResolutionError::ExpectedFound( + path.span, Segment::names_to_string(&segments), res + )); match self.r.resolve_path( &segments, Some(TypeNS), parent_scope, - true, + !speculative, path.span, CrateLint::SimplePath(id), ) { PathResult::Module(ModuleOrUniformRoot::Module(module)) => { let res = module.res().expect("visibility resolved to unnamed block"); - self.r.record_partial_res(id, PartialRes::new(res)); + if !speculative { + self.r.record_partial_res(id, PartialRes::new(res)); + } if module.is_normal() { if res == Res::Err { - ty::Visibility::Public + Ok(ty::Visibility::Public) } else { let vis = ty::Visibility::Restricted(res.def_id()); if self.r.is_accessible_from(vis, parent_scope.module) { - vis + Ok(vis) } else { - let msg = - "visibilities can only be restricted to ancestor modules"; - self.r.session.span_err(path.span, msg); - ty::Visibility::Public + Err(VisResolutionError::AncestorOnly(path.span)) } } } else { - expected_found_error(self, res); - ty::Visibility::Public + expected_found_error(res) } } - PathResult::Module(..) => { - self.r.session.span_err(path.span, "visibility must resolve to a module"); - ty::Visibility::Public - } - PathResult::NonModule(partial_res) => { - expected_found_error(self, partial_res.base_res()); - ty::Visibility::Public - } - PathResult::Failed { span, label, suggestion, .. } => { - self.r.report_error( - span, ResolutionError::FailedToResolve { label, suggestion } - ); - ty::Visibility::Public - } - PathResult::Indeterminate => { - span_err!(self.r.session, path.span, E0578, - "cannot determine resolution for the visibility"); - ty::Visibility::Public - } + PathResult::Module(..) => + Err(VisResolutionError::ModuleOnly(path.span)), + PathResult::NonModule(partial_res) => + expected_found_error(partial_res.base_res()), + PathResult::Failed { span, label, suggestion, .. } => + Err(VisResolutionError::FailedToResolve(span, label, suggestion)), + PathResult::Indeterminate => + Err(VisResolutionError::Indeterminate(path.span)), } } } } - fn insert_field_names(&mut self, def_id: DefId, field_names: Vec) { + fn insert_field_names_local(&mut self, def_id: DefId, vdata: &ast::VariantData) { + let field_names = vdata.fields().iter().map(|field| { + respan(field.span, field.ident.map_or(kw::Invalid, |ident| ident.name)) + }).collect(); + self.insert_field_names(def_id, field_names); + } + + fn insert_field_names(&mut self, def_id: DefId, field_names: Vec>) { if !field_names.is_empty() { self.r.field_names.insert(def_id, field_names); } @@ -309,7 +290,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { fn block_needs_anonymous_module(&mut self, block: &Block) -> bool { // If any statements are items, we need to create an anonymous module - block.stmts.iter().any(|statement| match statement.node { + block.stmts.iter().any(|statement| match statement.kind { StmtKind::Item(_) | StmtKind::Mac(_) => true, _ => false, }) @@ -348,9 +329,12 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.r.indeterminate_imports.push(directive); match directive.subclass { + // Don't add unresolved underscore imports to modules + SingleImport { target: Ident { name: kw::Underscore, .. }, .. } => {} SingleImport { target, type_ns_only, .. } => { self.r.per_ns(|this, ns| if !type_ns_only || ns == TypeNS { - let mut resolution = this.resolution(current_module, target, ns).borrow_mut(); + let key = this.new_key(target, ns); + let mut resolution = this.resolution(current_module, key).borrow_mut(); resolution.add_single_import(directive); }); } @@ -406,7 +390,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { }; match use_tree.kind { ast::UseTreeKind::Simple(rename, ..) => { - let mut ident = use_tree.ident().gensym_if_underscore(); + let mut ident = use_tree.ident(); let mut module_path = prefix; let mut source = module_path.pop().unwrap(); let mut type_ns_only = false; @@ -584,11 +568,11 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { let parent_scope = &self.parent_scope; let parent = parent_scope.module; let expansion = parent_scope.expansion; - let ident = item.ident.gensym_if_underscore(); + let ident = item.ident; let sp = item.span; let vis = self.resolve_visibility(&item.vis); - match item.node { + match item.kind { ItemKind::Use(ref use_tree) => { self.build_reduced_graph_for_use_tree( // This particular use tree @@ -616,6 +600,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { let crate_id = self.r.crate_loader.process_extern_crate( item, &self.r.definitions ); + self.r.extern_crate_map.insert(item.id, crate_id); self.r.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX }) }; @@ -664,8 +649,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.r.define(parent, ident, TypeNS, imported_binding); } - ItemKind::GlobalAsm(..) => {} - ItemKind::Mod(..) if ident.name == kw::Invalid => {} // Crate root ItemKind::Mod(..) => { @@ -684,9 +667,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.parent_scope.module = module; } - // Handled in `rustc_metadata::{native_libs,link_args}` - ItemKind::ForeignMod(..) => {} - // These items live in the value namespace. ItemKind::Static(..) => { let res = Res::Def(DefKind::Static, self.r.definitions.local_def_id(item.id)); @@ -735,62 +715,52 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } // These items live in both the type and value namespaces. - ItemKind::Struct(ref struct_def, _) => { + ItemKind::Struct(ref vdata, _) => { // Define a name in the type namespace. let def_id = self.r.definitions.local_def_id(item.id); let res = Res::Def(DefKind::Struct, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); - let mut ctor_vis = vis; - - let has_non_exhaustive = attr::contains_name(&item.attrs, sym::non_exhaustive); - - // If the structure is marked as non_exhaustive then lower the visibility - // to within the crate. - if has_non_exhaustive && vis == ty::Visibility::Public { - ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); - } - // Record field names for error reporting. - let field_names = struct_def.fields().iter().filter_map(|field| { - let field_vis = self.resolve_visibility(&field.vis); - if ctor_vis.is_at_least(field_vis, &*self.r) { - ctor_vis = field_vis; - } - field.ident.map(|ident| ident.name) - }).collect(); - let item_def_id = self.r.definitions.local_def_id(item.id); - self.insert_field_names(item_def_id, field_names); + self.insert_field_names_local(def_id, vdata); // If this is a tuple or unit struct, define a name // in the value namespace as well. - if let Some(ctor_node_id) = struct_def.ctor_id() { + if let Some(ctor_node_id) = vdata.ctor_id() { + let mut ctor_vis = vis; + // If the structure is marked as non_exhaustive then lower the visibility + // to within the crate. + if vis == ty::Visibility::Public && + attr::contains_name(&item.attrs, sym::non_exhaustive) { + ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); + } + for field in vdata.fields() { + // NOTE: The field may be an expansion placeholder, but expansion sets + // correct visibilities for unnamed field placeholders specifically, so the + // constructor visibility should still be determined correctly. + if let Ok(field_vis) = + self.resolve_visibility_speculative(&field.vis, true) { + if ctor_vis.is_at_least(field_vis, &*self.r) { + ctor_vis = field_vis; + } + } + } let ctor_res = Res::Def( - DefKind::Ctor(CtorOf::Struct, CtorKind::from_ast(struct_def)), + DefKind::Ctor(CtorOf::Struct, CtorKind::from_ast(vdata)), self.r.definitions.local_def_id(ctor_node_id), ); self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, sp, expansion)); - self.r.struct_constructors.insert(res.def_id(), (ctor_res, ctor_vis)); + self.r.struct_constructors.insert(def_id, (ctor_res, ctor_vis)); } } ItemKind::Union(ref vdata, _) => { - let res = Res::Def(DefKind::Union, self.r.definitions.local_def_id(item.id)); + let def_id = self.r.definitions.local_def_id(item.id); + let res = Res::Def(DefKind::Union, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); // Record field names for error reporting. - let field_names = vdata.fields().iter().filter_map(|field| { - self.resolve_visibility(&field.vis); - field.ident.map(|ident| ident.name) - }).collect(); - let item_def_id = self.r.definitions.local_def_id(item.id); - self.insert_field_names(item_def_id, field_names); - } - - ItemKind::Impl(.., ref impl_items) => { - for impl_item in impl_items { - self.resolve_visibility(&impl_item.vis); - } + self.insert_field_names_local(def_id, vdata); } ItemKind::Trait(..) => { @@ -807,13 +777,16 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.parent_scope.module = module; } + // These items do not add names to modules. + ItemKind::Impl(..) | ItemKind::ForeignMod(..) | ItemKind::GlobalAsm(..) => {} + ItemKind::MacroDef(..) | ItemKind::Mac(_) => unreachable!(), } } /// Constructs the reduced graph for one foreign item. fn build_reduced_graph_for_foreign_item(&mut self, item: &ForeignItem) { - let (res, ns) = match item.node { + let (res, ns) = match item.kind { ForeignItemKind::Fn(..) => { (Res::Def(DefKind::Fn, self.r.definitions.local_def_id(item.id)), ValueNS) } @@ -849,22 +822,20 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { fn build_reduced_graph_for_external_crate_res(&mut self, child: Export) { let parent = self.parent_scope.module; let Export { ident, res, vis, span } = child; - // FIXME: We shouldn't create the gensym here, it should come from metadata, - // but metadata cannot encode gensyms currently, so we create it here. - // This is only a guess, two equivalent idents may incorrectly get different gensyms here. - let ident = ident.gensym_if_underscore(); let expansion = ExpnId::root(); // FIXME(jseyfried) intercrate hygiene // Record primary definitions. match res { Res::Def(kind @ DefKind::Mod, def_id) | Res::Def(kind @ DefKind::Enum, def_id) | Res::Def(kind @ DefKind::Trait, def_id) => { - let module = self.r.new_module(parent, - ModuleKind::Def(kind, def_id, ident.name), - def_id, - expansion, - span); - self.r.define(parent, ident, TypeNS, (module, vis, DUMMY_SP, expansion)); + let module = self.r.new_module( + parent, + ModuleKind::Def(kind, def_id, ident.name), + def_id, + expansion, + span, + ); + self.r.define(parent, ident, TypeNS, (module, vis, span, expansion)); } Res::Def(DefKind::Struct, _) | Res::Def(DefKind::Union, _) @@ -877,34 +848,35 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { | Res::Def(DefKind::AssocOpaqueTy, _) | Res::PrimTy(..) | Res::ToolMod => - self.r.define(parent, ident, TypeNS, (res, vis, DUMMY_SP, expansion)), + self.r.define(parent, ident, TypeNS, (res, vis, span, expansion)), Res::Def(DefKind::Fn, _) | Res::Def(DefKind::Method, _) | Res::Def(DefKind::Static, _) | Res::Def(DefKind::Const, _) | Res::Def(DefKind::AssocConst, _) | Res::Def(DefKind::Ctor(..), _) => - self.r.define(parent, ident, ValueNS, (res, vis, DUMMY_SP, expansion)), + self.r.define(parent, ident, ValueNS, (res, vis, span, expansion)), Res::Def(DefKind::Macro(..), _) | Res::NonMacroAttr(..) => - self.r.define(parent, ident, MacroNS, (res, vis, DUMMY_SP, expansion)), + self.r.define(parent, ident, MacroNS, (res, vis, span, expansion)), Res::Def(DefKind::TyParam, _) | Res::Def(DefKind::ConstParam, _) | Res::Local(..) | Res::SelfTy(..) | Res::SelfCtor(..) | Res::Err => bug!("unexpected resolution: {:?}", res) } // Record some extra data for better diagnostics. + let cstore = self.r.cstore(); match res { Res::Def(DefKind::Struct, def_id) | Res::Def(DefKind::Union, def_id) => { - let field_names = self.r.cstore.struct_field_names_untracked(def_id); + let field_names = cstore.struct_field_names_untracked(def_id, self.r.session); self.insert_field_names(def_id, field_names); } Res::Def(DefKind::Method, def_id) => { - if self.r.cstore.associated_item_cloned_untracked(def_id).method_has_self_argument { + if cstore.associated_item_cloned_untracked(def_id).method_has_self_argument { self.r.has_self.insert(def_id); } } Res::Def(DefKind::Ctor(CtorOf::Struct, ..), def_id) => { - let parent = self.r.cstore.def_key(def_id).parent; + let parent = cstore.def_key(def_id).parent; if let Some(struct_def_id) = parent.map(|index| DefId { index, ..def_id }) { self.r.struct_constructors.insert(struct_def_id, (res, vis)); } @@ -936,7 +908,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { span_err!(self.r.session, item.span, E0468, "an `extern crate` loading macros must be at the crate root"); } - if let ItemKind::ExternCrate(Some(orig_name)) = item.node { + if let ItemKind::ExternCrate(Some(orig_name)) = item.kind { if orig_name == kw::SelfLower { self.r.session.span_err(attr.span, "`macro_use` is not supported on `extern crate self`"); @@ -944,7 +916,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } let ill_formed = |span| span_err!(self.r.session, span, E0466, "bad macro import"); match attr.meta() { - Some(meta) => match meta.node { + Some(meta) => match meta.kind { MetaItemKind::Word => { import_all = Some(meta.span); break; @@ -1061,10 +1033,19 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { None } + // Mark the given macro as unused unless its name starts with `_`. + // Macro uses will remove items from this set, and the remaining + // items will be reported as `unused_macros`. + fn insert_unused_macro(&mut self, ident: Ident, node_id: NodeId, span: Span) { + if !ident.as_str().starts_with("_") { + self.r.unused_macros.insert(node_id, span); + } + } + fn define_macro(&mut self, item: &ast::Item) -> LegacyScope<'a> { - let parent_scope = &self.parent_scope; + let parent_scope = self.parent_scope; let expansion = parent_scope.expansion; - let (ext, ident, span, is_legacy) = match &item.node { + let (ext, ident, span, is_legacy) = match &item.kind { ItemKind::MacroDef(def) => { let ext = Lrc::new(self.r.compile_macro(item, self.r.session.edition())); (ext, item.ident, item.span, def.legacy) @@ -1102,7 +1083,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { (res, vis, span, expansion, IsMacroExport)); } else { self.r.check_reserved_macro_name(ident, res); - self.r.unused_macros.insert(item.id, span); + self.insert_unused_macro(ident, item.id, span); } LegacyScope::Binding(self.r.arenas.alloc_legacy_binding(LegacyBinding { parent_legacy_scope: parent_scope.legacy, binding, ident @@ -1111,7 +1092,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { let module = parent_scope.module; let vis = self.resolve_visibility(&item.vis); if vis != ty::Visibility::Public { - self.r.unused_macros.insert(item.id, span); + self.insert_unused_macro(ident, item.id, span); } self.r.define(module, ident, MacroNS, (res, vis, span, expansion)); self.parent_scope.legacy @@ -1122,7 +1103,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { macro_rules! method { ($visit:ident: $ty:ty, $invoc:path, $walk:ident) => { fn $visit(&mut self, node: &'b $ty) { - if let $invoc(..) = node.node { + if let $invoc(..) = node.kind { self.visit_invoc(node.id); } else { visit::$walk(self, node); @@ -1132,13 +1113,12 @@ macro_rules! method { } impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { - method!(visit_impl_item: ast::ImplItem, ast::ImplItemKind::Macro, walk_impl_item); method!(visit_expr: ast::Expr, ast::ExprKind::Mac, walk_expr); method!(visit_pat: ast::Pat, ast::PatKind::Mac, walk_pat); method!(visit_ty: ast::Ty, ast::TyKind::Mac, walk_ty); fn visit_item(&mut self, item: &'b Item) { - let macro_use = match item.node { + let macro_use = match item.kind { ItemKind::MacroDef(..) => { self.parent_scope.legacy = self.define_macro(item); return @@ -1161,7 +1141,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { } fn visit_stmt(&mut self, stmt: &'b ast::Stmt) { - if let ast::StmtKind::Mac(..) = stmt.node { + if let ast::StmtKind::Mac(..) = stmt.kind { self.parent_scope.legacy = self.visit_invoc(stmt.id); } else { visit::walk_stmt(self, stmt); @@ -1169,7 +1149,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { } fn visit_foreign_item(&mut self, foreign_item: &'b ForeignItem) { - if let ForeignItemKind::Macro(_) = foreign_item.node { + if let ForeignItemKind::Macro(_) = foreign_item.kind { self.visit_invoc(foreign_item.id); return; } @@ -1190,14 +1170,14 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { fn visit_trait_item(&mut self, item: &'b TraitItem) { let parent = self.parent_scope.module; - if let TraitItemKind::Macro(_) = item.node { + if let TraitItemKind::Macro(_) = item.kind { self.visit_invoc(item.id); return } // Add the item to the trait info. let item_def_id = self.r.definitions.local_def_id(item.id); - let (res, ns) = match item.node { + let (res, ns) = match item.kind { TraitItemKind::Const(..) => (Res::Def(DefKind::AssocConst, item_def_id), ValueNS), TraitItemKind::Method(ref sig, _) => { if sig.decl.has_self() { @@ -1216,10 +1196,19 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { visit::walk_trait_item(self, item); } + fn visit_impl_item(&mut self, item: &'b ast::ImplItem) { + if let ast::ImplItemKind::Macro(..) = item.kind { + self.visit_invoc(item.id); + } else { + self.resolve_visibility(&item.vis); + visit::walk_impl_item(self, item); + } + } + fn visit_token(&mut self, t: Token) { if let token::Interpolated(nt) = t.kind { if let token::NtExpr(ref expr) = *nt { - if let ast::ExprKind::Mac(..) = expr.node { + if let ast::ExprKind::Mac(..) = expr.kind { self.visit_invoc(expr.id); } } @@ -1277,6 +1266,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { if sf.is_placeholder { self.visit_invoc(sf.id); } else { + self.resolve_visibility(&sf.vis); visit::walk_struct_field(self, sf); } } diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 0d85be83e1..44b7a9fa04 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -103,7 +103,7 @@ impl<'a, 'b> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b> { // whether they're used or not. Also ignore imports with a dummy span // because this means that they were generated in some fashion by the // compiler and we don't need to consider them. - if let ast::ItemKind::Use(..) = item.node { + if let ast::ItemKind::Use(..) = item.kind { if item.vis.node.is_pub() || item.span.is_dummy() { return; } @@ -232,7 +232,7 @@ impl Resolver<'_> { directive.span.is_dummy() => { if let ImportDirectiveSubclass::MacroUse = directive.subclass { if !directive.span.is_dummy() { - self.session.buffer_lint( + self.lint_buffer.buffer_lint( lint::builtin::MACRO_USE_EXTERN_CRATE, directive.id, directive.span, @@ -250,7 +250,7 @@ impl Resolver<'_> { ImportDirectiveSubclass::MacroUse => { let lint = lint::builtin::UNUSED_IMPORTS; let msg = "unused `#[macro_use]` import"; - self.session.buffer_lint(lint, directive.id, directive.span, msg); + self.lint_buffer.buffer_lint(lint, directive.id, directive.span, msg); } _ => {} } @@ -312,7 +312,7 @@ impl Resolver<'_> { "remove the unused import" }; - visitor.r.session.buffer_lint_with_diagnostic( + visitor.r.lint_buffer.buffer_lint_with_diagnostic( lint::builtin::UNUSED_IMPORTS, unused.use_tree_id, ms, diff --git a/src/librustc_resolve/diagnostics.rs b/src/librustc_resolve/diagnostics.rs index c479912b4e..6a8a678da0 100644 --- a/src/librustc_resolve/diagnostics.rs +++ b/src/librustc_resolve/diagnostics.rs @@ -10,18 +10,20 @@ use rustc::session::Session; use rustc::ty::{self, DefIdTree}; use rustc::util::nodemap::FxHashSet; use syntax::ast::{self, Ident, Path}; -use syntax::ext::base::MacroKind; use syntax::feature_gate::BUILTIN_ATTRIBUTES; +use syntax::print::pprust; use syntax::source_map::SourceMap; use syntax::struct_span_err; use syntax::symbol::{Symbol, kw}; use syntax::util::lev_distance::find_best_match_for_name; +use syntax_pos::hygiene::MacroKind; use syntax_pos::{BytePos, Span, MultiSpan}; use crate::resolve_imports::{ImportDirective, ImportDirectiveSubclass, ImportResolver}; use crate::{path_names_to_string, KNOWN_TOOLS}; -use crate::{BindingError, CrateLint, LegacyScope, Module, ModuleOrUniformRoot}; +use crate::{BindingError, CrateLint, HasGenericParams, LegacyScope, Module, ModuleOrUniformRoot}; use crate::{PathResult, ParentScope, ResolutionError, Resolver, Scope, ScopeSet, Segment}; +use crate::VisResolutionError; type Res = def::Res; @@ -58,21 +60,6 @@ fn reduce_impl_span_to_impl_keyword(cm: &SourceMap, impl_span: Span) -> Span { impl_span } -crate fn add_typo_suggestion( - err: &mut DiagnosticBuilder<'_>, suggestion: Option, span: Span -) -> bool { - if let Some(suggestion) = suggestion { - let msg = format!( - "{} {} with a similar name exists", suggestion.res.article(), suggestion.res.descr() - ); - err.span_suggestion( - span, &msg, suggestion.candidate.to_string(), Applicability::MaybeIncorrect - ); - return true; - } - false -} - impl<'a> Resolver<'a> { crate fn add_module_candidates( &mut self, @@ -80,11 +67,11 @@ impl<'a> Resolver<'a> { names: &mut Vec, filter_fn: &impl Fn(Res) -> bool, ) { - for (&(ident, _), resolution) in self.resolutions(module).borrow().iter() { + for (key, resolution) in self.resolutions(module).borrow().iter() { if let Some(binding) = resolution.borrow().binding { let res = binding.res(); if filter_fn(res) { - names.push(TypoSuggestion::from_res(ident.name, res)); + names.push(TypoSuggestion::from_res(key.ident.name, res)); } } } @@ -102,7 +89,7 @@ impl<'a> Resolver<'a> { &self, span: Span, resolution_error: ResolutionError<'_> ) -> DiagnosticBuilder<'_> { match resolution_error { - ResolutionError::GenericParamsFromOuterFunction(outer_res) => { + ResolutionError::GenericParamsFromOuterFunction(outer_res, has_generic_params) => { let mut err = struct_span_err!(self.session, span, E0401, @@ -148,22 +135,24 @@ impl<'a> Resolver<'a> { } } - // Try to retrieve the span of the function signature and generate a new message - // with a local type or const parameter. - let sugg_msg = &format!("try using a local generic parameter instead"); - if let Some((sugg_span, new_snippet)) = cm.generate_local_type_param_snippet(span) { - // Suggest the modification to the user - err.span_suggestion( - sugg_span, - sugg_msg, - new_snippet, - Applicability::MachineApplicable, - ); - } else if let Some(sp) = cm.generate_fn_name_span(span) { - err.span_label(sp, - format!("try adding a local generic parameter in this method instead")); - } else { - err.help(&format!("try using a local generic parameter instead")); + if has_generic_params == HasGenericParams::Yes { + // Try to retrieve the span of the function signature and generate a new + // message with a local type or const parameter. + let sugg_msg = &format!("try using a local generic parameter instead"); + if let Some((sugg_span, snippet)) = cm.generate_local_type_param_snippet(span) { + // Suggest the modification to the user + err.span_suggestion( + sugg_span, + sugg_msg, + snippet, + Applicability::MachineApplicable, + ); + } else if let Some(sp) = cm.generate_fn_name_span(span) { + err.span_label(sp, + format!("try adding a local generic parameter in this method instead")); + } else { + err.help(&format!("try using a local generic parameter instead")); + } } err @@ -354,19 +343,58 @@ impl<'a> Resolver<'a> { span, "defaulted type parameters cannot be forward declared".to_string()); err } - ResolutionError::ConstParamDependentOnTypeParam => { + ResolutionError::SelfInTyParamDefault => { let mut err = struct_span_err!( self.session, span, - E0671, - "const parameters cannot depend on type parameters" + E0735, + "type parameters cannot use `Self` in their defaults" ); - err.span_label(span, format!("const parameter depends on type parameter")); + err.span_label( + span, "`Self` in type parameter default".to_string()); err } } } + crate fn report_vis_error(&self, vis_resolution_error: VisResolutionError<'_>) { + match vis_resolution_error { + VisResolutionError::Relative2018(span, path) => { + let mut err = self.session.struct_span_err(span, + "relative paths are not supported in visibilities on 2018 edition"); + err.span_suggestion( + path.span, + "try", + format!("crate::{}", pprust::path_to_string(&path)), + Applicability::MaybeIncorrect, + ); + err + } + VisResolutionError::AncestorOnly(span) => { + struct_span_err!(self.session, span, E0742, + "visibilities can only be restricted to ancestor modules") + } + VisResolutionError::FailedToResolve(span, label, suggestion) => { + self.into_struct_error( + span, ResolutionError::FailedToResolve { label, suggestion } + ) + } + VisResolutionError::ExpectedFound(span, path_str, res) => { + let mut err = struct_span_err!(self.session, span, E0577, + "expected module, found {} `{}`", res.descr(), path_str); + err.span_label(span, "not a module"); + err + } + VisResolutionError::Indeterminate(span) => { + struct_span_err!(self.session, span, E0578, + "cannot determine resolution for the visibility") + } + VisResolutionError::ModuleOnly(span) => { + self.session.struct_span_err(span, "visibility must resolve to a module") + } + }.emit() + } + /// Lookup typo candidate in scope for a macro or import. fn early_lookup_typo_candidate( &mut self, @@ -516,7 +544,7 @@ impl<'a> Resolver<'a> { in_module_is_extern)) = worklist.pop() { // We have to visit module children in deterministic order to avoid // instabilities in reported imports (#43552). - in_module.for_each_child_stable(self, |this, ident, ns, name_binding| { + in_module.for_each_child(self, |this, ident, ns, name_binding| { // avoid imports entirely if name_binding.is_import() && !name_binding.is_extern_crate() { return; } // avoid non-importable candidates as well @@ -638,7 +666,7 @@ impl<'a> Resolver<'a> { let suggestion = self.early_lookup_typo_candidate( ScopeSet::Macro(macro_kind), parent_scope, ident, is_expected ); - add_typo_suggestion(err, suggestion, ident.span); + self.add_typo_suggestion(err, suggestion, ident.span); if macro_kind == MacroKind::Derive && (ident.as_str() == "Send" || ident.as_str() == "Sync") { @@ -649,6 +677,33 @@ impl<'a> Resolver<'a> { err.help("have you added the `#[macro_use]` on the module/import?"); } } + + crate fn add_typo_suggestion( + &self, + err: &mut DiagnosticBuilder<'_>, + suggestion: Option, + span: Span, + ) -> bool { + if let Some(suggestion) = suggestion { + let msg = format!( + "{} {} with a similar name exists", suggestion.res.article(), suggestion.res.descr() + ); + err.span_suggestion( + span, &msg, suggestion.candidate.to_string(), Applicability::MaybeIncorrect + ); + let def_span = suggestion.res.opt_def_id() + .and_then(|def_id| self.definitions.opt_span(def_id)); + if let Some(span) = def_span { + err.span_label(span, &format!( + "similarly named {} `{}` defined here", + suggestion.res.descr(), + suggestion.candidate.as_str(), + )); + } + return true; + } + false + } } impl<'a, 'b> ImportResolver<'a, 'b> { @@ -836,7 +891,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> { } let resolutions = self.r.resolutions(crate_module).borrow(); - let resolution = resolutions.get(&(ident, MacroNS))?; + let resolution = resolutions.get(&self.r.new_key(ident, MacroNS))?; let binding = resolution.borrow().binding()?; if let Res::Def(DefKind::Macro(MacroKind::Bang), _) = binding.res() { let module_name = crate_module.kind.name().unwrap(); diff --git a/src/librustc_resolve/error_codes.rs b/src/librustc_resolve/error_codes.rs index adbff67cc8..c59959ae4f 100644 --- a/src/librustc_resolve/error_codes.rs +++ b/src/librustc_resolve/error_codes.rs @@ -8,9 +8,9 @@ Type parameter defaults can only use parameters that occur before them. Erroneous code example: ```compile_fail,E0128 -struct Foo { +struct Foo { field1: T, - filed2: U, + field2: U, } // error: type parameters with a default cannot use forward declared // identifiers @@ -20,9 +20,9 @@ Since type parameters are evaluated in-order, you may be able to fix this issue by doing: ``` -struct Foo { +struct Foo { field1: T, - filed2: U, + field2: U, } ``` @@ -974,7 +974,7 @@ function: struct Foo { a: bool }; let f = Foo(); -// error: expected function, found `Foo` +// error: expected function, tuple struct or tuple variant, found `Foo` // `Foo` is a struct name, but this expression uses it like a function name ``` @@ -992,7 +992,8 @@ yield this error: ```compile_fail,E0423 println(""); -// error: expected function, found macro `println` +// error: expected function, tuple struct or tuple variant, +// found macro `println` // did you mean `println!(...)`? (notice the trailing `!`) ``` @@ -1013,7 +1014,8 @@ fn h1() -> i32 { "##, E0424: r##" -The `self` keyword was used in a static method. +The `self` keyword was used inside of an associated function without a "`self` +receiver" parameter. Erroneous code example: @@ -1021,25 +1023,33 @@ Erroneous code example: struct Foo; impl Foo { - fn bar(self) {} + // `bar` is a method, because it has a receiver parameter. + fn bar(&self) {} + // `foo` is not a method, because it has no receiver parameter. fn foo() { - self.bar(); // error: `self` is not available in a static method. + self.bar(); // error: `self` value is a keyword only available in + // methods with a `self` parameter } } ``` -Please check if the method's argument list should have contained `self`, -`&self`, or `&mut self` (in case you didn't want to create a static -method), and add it if so. Example: +The `self` keyword can only be used inside methods, which are associated +functions (functions defined inside of a `trait` or `impl` block) that have a +`self` receiver as its first parameter, like `self`, `&self`, `&mut self` or +`self: &mut Pin` (this last one is an example of an ["abitrary `self` +type"](https://github.com/rust-lang/rust/issues/44874)). + +Check if the associated function's parameter list should have contained a `self` +receiver for it to be a method, and add it if so. Example: ``` struct Foo; impl Foo { - fn bar(self) {} + fn bar(&self) {} - fn foo(self) { + fn foo(self) { // `foo` is now a method. self.bar(); // ok! } } @@ -1525,6 +1535,51 @@ match r { ``` "##, +E0531: r##" +An unknown tuple struct/variant has been used. + +Erroneous code example: + +```compile_fail,E0531 +let Type(x) = Type(12); // error! +match Bar(12) { + Bar(x) => {} // error! + _ => {} +} +``` + +In most cases, it's either a forgotten import or a typo. However, let's look at +how you can have such a type: + +```edition2018 +struct Type(u32); // this is a tuple struct + +enum Foo { + Bar(u32), // this is a tuple variant +} + +use Foo::*; // To use Foo's variant directly, we need to import them in + // the scope. +``` + +Either way, it should work fine with our previous code: + +```edition2018 +struct Type(u32); + +enum Foo { + Bar(u32), +} +use Foo::*; + +let Type(x) = Type(12); // ok! +match Type(12) { + Type(x) => {} // ok! + _ => {} +} +``` +"##, + E0532: r##" Pattern arm did not match expected kind. @@ -1538,7 +1593,7 @@ enum State { fn print_on_failure(state: &State) { match *state { - // error: expected unit struct/variant or constant, found tuple + // error: expected unit struct, unit variant or constant, found tuple // variant `State::Failed` State::Failed => println!("Failed"), _ => () @@ -1566,6 +1621,263 @@ fn print_on_failure(state: &State) { ``` "##, +E0573: r##" +Something other than a type has been used when one was expected. + +Erroneous code examples: + +```compile_fail,E0573 +enum Dragon { + Born, +} + +fn oblivion() -> Dragon::Born { // error! + Dragon::Born +} + +const HOBBIT: u32 = 2; +impl HOBBIT {} // error! + +enum Wizard { + Gandalf, + Saruman, +} + +trait Isengard { + fn wizard(_: Wizard::Saruman); // error! +} +``` + +In all these errors, a type was expected. For example, in the first error, if +we want to return the `Born` variant from the `Dragon` enum, we must set the +function to return the enum and not its variant: + +``` +enum Dragon { + Born, +} + +fn oblivion() -> Dragon { // ok! + Dragon::Born +} +``` + +In the second error, you can't implement something on an item, only on types. +We would need to create a new type if we wanted to do something similar: + +``` +struct Hobbit(u32); // we create a new type + +const HOBBIT: Hobbit = Hobbit(2); +impl Hobbit {} // ok! +``` + +In the third case, we tried to only expect one variant of the `Wizard` enum, +which is not possible. To make this work, we need to using pattern matching +over the `Wizard` enum: + +``` +enum Wizard { + Gandalf, + Saruman, +} + +trait Isengard { + fn wizard(w: Wizard) { // ok! + match w { + Wizard::Saruman => { + // do something + } + _ => {} // ignore everything else + } + } +} +``` +"##, + +E0574: r##" +Something other than a struct, variant or union has been used when one was +expected. + +Erroneous code example: + +```compile_fail,E0574 +mod Mordor {} + +let sauron = Mordor { x: () }; // error! + +enum Jak { + Daxter { i: isize }, +} + +let eco = Jak::Daxter { i: 1 }; +match eco { + Jak { i } => {} // error! +} +``` + +In all these errors, a type was expected. For example, in the first error, +we tried to instantiate the `Mordor` module, which is impossible. If you want +to instantiate a type inside a module, you can do it as follow: + +``` +mod Mordor { + pub struct TheRing { + pub x: usize, + } +} + +let sauron = Mordor::TheRing { x: 1 }; // ok! +``` + +In the second error, we tried to bind the `Jak` enum directly, which is not +possible: you can only bind one of its variants. To do so: + +``` +enum Jak { + Daxter { i: isize }, +} + +let eco = Jak::Daxter { i: 1 }; +match eco { + Jak::Daxter { i } => {} // ok! +} +``` +"##, + +E0575: r##" +Something other than a type or an associated type was given. + +Erroneous code example: + +```compile_fail,E0575 +enum Rick { Morty } + +let _: ::Morty; // error! + +trait Age { + type Empire; + fn Mythology() {} +} + +impl Age for u8 { + type Empire = u16; +} + +let _: ::Mythology; // error! +``` + +In both cases, we're declaring a variable (called `_`) and we're giving it a +type. However, `::Morty` and `::Mythology` aren't types, +therefore the compiler throws an error. + +`::Morty` is an enum variant, you cannot use a variant as a type, +you have to use the enum directly: + +``` +enum Rick { Morty } + +let _: Rick; // ok! +``` + +`::Mythology` is a trait method, which is definitely not a type. +However, the `Age` trait provides an associated type `Empire` which can be +used as a type: + +``` +trait Age { + type Empire; + fn Mythology() {} +} + +impl Age for u8 { + type Empire = u16; +} + +let _: ::Empire; // ok! +``` +"##, + +E0576: r##" +An associated item wasn't found in the given type. + +Erroneous code example: + +```compile_fail,E0576 +trait Hello { + type Who; + + fn hello() -> ::You; // error! +} +``` + +In this example, we tried to use the non-existent associated type `You` of the +`Hello` trait. To fix this error, use an existing associated type: + +``` +trait Hello { + type Who; + + fn hello() -> ::Who; // ok! +} +``` +"##, + +E0577: r##" +Something other than a module was found in visibility scope. + +Erroneous code example: + +```compile_fail,E0577,edition2018 +pub struct Sea; + +pub (in crate::Sea) struct Shark; // error! + +fn main() {} +``` + +`Sea` is not a module, therefore it is invalid to use it in a visibility path. +To fix this error we need to ensure `Sea` is a module. + +Please note that the visibility scope can only be applied on ancestors! + +```edition2018 +pub mod Sea { + pub (in crate::Sea) struct Shark; // ok! +} + +fn main() {} +``` +"##, + +E0578: r##" +A module cannot be found and therefore, the visibility cannot be determined. + +Erroneous code example: + +```compile_fail,E0578,edition2018 +foo!(); + +pub (in ::Sea) struct Shark; // error! + +fn main() {} +``` + +Because of the call to the `foo` macro, the compiler guesses that the missing +module could be inside it and fails because the macro definition cannot be +found. + +To fix this error, please be sure that the module is in scope: + +```edition2018 +pub mod Sea { + pub (in crate::Sea) struct Shark; +} + +fn main() {} +``` +"##, + E0603: r##" A private item was used outside its scope. @@ -1600,7 +1912,7 @@ An item usage is ambiguous. Erroneous code example: -```compile_fail,E0659 +```compile_fail,edition2018,E0659 pub mod moon { pub fn foo() {} } @@ -1610,12 +1922,12 @@ pub mod earth { } mod collider { - pub use moon::*; - pub use earth::*; + pub use crate::moon::*; + pub use crate::earth::*; } fn main() { - collider::foo(); // ERROR: `foo` is ambiguous + crate::collider::foo(); // ERROR: `foo` is ambiguous } ``` @@ -1627,7 +1939,7 @@ functions collide. To solve this error, the best solution is generally to keep the path before the item when using it. Example: -``` +```edition2018 pub mod moon { pub fn foo() {} } @@ -1637,29 +1949,84 @@ pub mod earth { } mod collider { - pub use moon; - pub use earth; + pub use crate::moon; + pub use crate::earth; } fn main() { - collider::moon::foo(); // ok! - collider::earth::foo(); // ok! + crate::collider::moon::foo(); // ok! + crate::collider::earth::foo(); // ok! } ``` "##, E0671: r##" +#### Note: this error code is no longer emitted by the compiler. + Const parameters cannot depend on type parameters. The following is therefore invalid: -```compile_fail,E0671 + +```compile_fail,E0741 #![feature(const_generics)] -fn const_id() -> T { // error: const parameter - // depends on type parameter +fn const_id() -> T { // error N } ``` "##, + +E0735: r##" +Type parameter defaults cannot use `Self` on structs, enums, or unions. + +Erroneous code example: + +```compile_fail,E0735 +struct Foo> { + field1: Option, + field2: Option, +} +// error: type parameters cannot use `Self` in their defaults. +``` +"##, + +E0742: r##" +Visibility is restricted to a module which isn't an ancestor of the current +item. + +Erroneous code example: + +```compile_fail,E0742,edition2018 +pub mod Sea {} + +pub (in crate::Sea) struct Shark; // error! + +fn main() {} +``` + +To fix this error, we need to move the `Shark` struct inside the `Sea` module: + +```edition2018 +pub mod Sea { + pub (in crate::Sea) struct Shark; // ok! +} + +fn main() {} +``` + +Of course, you can do it as long as the module you're referring to is an +ancestor: + +```edition2018 +pub mod Earth { + pub mod Sea { + pub (in crate::Earth) struct Shark; // ok! + } +} + +fn main() {} +``` +"##, + ; // E0153, unused error code // E0157, unused error code @@ -1675,14 +2042,7 @@ fn const_id() -> T { // error: const parameter // E0419, merged into 531 // E0420, merged into 532 // E0421, merged into 531 - E0531, // unresolved pattern path kind `name` // E0427, merged into 530 // E0467, removed // E0470, removed - E0573, - E0574, - E0575, - E0576, - E0577, - E0578, } diff --git a/src/librustc_resolve/late.rs b/src/librustc_resolve/late.rs index aae283b745..004d86cee8 100644 --- a/src/librustc_resolve/late.rs +++ b/src/librustc_resolve/late.rs @@ -5,7 +5,6 @@ //! If you wonder why there's no `early.rs`, that's because it's split into three files - //! `build_reduced_graph.rs`, `macros.rs` and `resolve_imports.rs`. -use GenericParameters::*; use RibKind::*; use crate::{path_names_to_string, BindingError, CrateLint, LexicalScopeBinding}; @@ -46,16 +45,6 @@ struct BindingInfo { binding_mode: BindingMode, } -#[derive(Copy, Clone)] -enum GenericParameters<'a, 'b> { - NoGenericParams, - HasGenericParams(// Type parameters. - &'b Generics, - - // The kind of the rib used for type parameters. - RibKind<'a>), -} - #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum PatternSource { Match, @@ -85,6 +74,10 @@ enum PatBoundCtx { Or, } +/// Does this the item (from the item rib scope) allow generic parameters? +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +crate enum HasGenericParams { Yes, No } + /// The rib kind restricts certain accesses, /// e.g. to a `Res::Local` of an outer item. #[derive(Copy, Clone, Debug)] @@ -103,7 +96,7 @@ crate enum RibKind<'a> { FnItemRibKind, /// We passed through an item scope. Disallow upvars. - ItemRibKind, + ItemRibKind(HasGenericParams), /// We're in a constant item. Can't refer to dynamic stuff. ConstantItemRibKind, @@ -118,9 +111,6 @@ crate enum RibKind<'a> { /// from the default of a type parameter because they're not declared /// before said type parameter. Also see the `visit_generics` override. ForwardTyParamBanRibKind, - - /// We forbid the use of type parameters as the types of const parameters. - TyParamAsConstParamTy, } impl RibKind<'_> { @@ -134,9 +124,8 @@ impl RibKind<'_> { | ModuleRibKind(_) | MacroDefinition(_) => false, AssocItemRibKind - | ItemRibKind - | ForwardTyParamBanRibKind - | TyParamAsConstParamTy => true, + | ItemRibKind(_) + | ForwardTyParamBanRibKind => true, } } } @@ -210,21 +199,36 @@ impl<'a> PathSource<'a> { } fn descr_expected(self) -> &'static str { - match self { + match &self { PathSource::Type => "type", PathSource::Trait(_) => "trait", - PathSource::Pat => "unit struct/variant or constant", + PathSource::Pat => "unit struct, unit variant or constant", PathSource::Struct => "struct, variant or union type", - PathSource::TupleStruct => "tuple struct/variant", + PathSource::TupleStruct => "tuple struct or tuple variant", PathSource::TraitItem(ns) => match ns { TypeNS => "associated type", ValueNS => "method or associated constant", MacroNS => bug!("associated macro"), }, - PathSource::Expr(parent) => match parent.map(|p| &p.node) { + PathSource::Expr(parent) => match &parent.as_ref().map(|p| &p.kind) { // "function" here means "anything callable" rather than `DefKind::Fn`, // this is not precise but usually more helpful than just "value". - Some(&ExprKind::Call(..)) => "function", + Some(ExprKind::Call(call_expr, _)) => { + match &call_expr.kind { + ExprKind::Path(_, path) => { + let mut msg = "function"; + if let Some(segment) = path.segments.iter().last() { + if let Some(c) = segment.ident.to_string().chars().next() { + if c.is_uppercase() { + msg = "function, tuple struct or tuple variant"; + } + } + } + msg + } + _ => "function" + } + } _ => "value", }, } @@ -327,6 +331,31 @@ impl<'a> PathSource<'a> { } } +#[derive(Default)] +struct DiagnosticMetadata { + /// The current trait's associated types' ident, used for diagnostic suggestions. + current_trait_assoc_types: Vec, + + /// The current self type if inside an impl (used for better errors). + current_self_type: Option, + + /// The current self item if inside an ADT (used for better errors). + current_self_item: Option, + + /// The current enclosing funciton (used for better errors). + current_function: Option, + + /// A list of labels as of yet unused. Labels will be removed from this map when + /// they are used (in a `break` or `continue` statement) + unused_labels: FxHashMap, + + /// Only used for better errors on `fn(): fn()`. + current_type_ascription: Vec, + + /// Only used for better errors on `let : ;`. + current_let_binding: Option<(Span, Option, Option)>, +} + struct LateResolutionVisitor<'a, 'b> { r: &'b mut Resolver<'a>, @@ -343,21 +372,8 @@ struct LateResolutionVisitor<'a, 'b> { /// The trait that the current context can refer to. current_trait_ref: Option<(Module<'a>, TraitRef)>, - /// The current trait's associated types' ident, used for diagnostic suggestions. - current_trait_assoc_types: Vec, - - /// The current self type if inside an impl (used for better errors). - current_self_type: Option, - - /// The current self item if inside an ADT (used for better errors). - current_self_item: Option, - - /// A list of labels as of yet unused. Labels will be removed from this map when - /// they are used (in a `break` or `continue` statement) - unused_labels: FxHashMap, - - /// Only used for better errors on `fn(): fn()`. - current_type_ascription: Vec, + /// Fields used to add information to diagnostic errors. + diagnostic_metadata: DiagnosticMetadata, } /// Walks the whole crate in DFS order, visiting each item, resolving names as it goes. @@ -381,10 +397,21 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { self.resolve_expr(expr, None); } fn visit_local(&mut self, local: &'tcx Local) { + let local_spans = match local.pat.kind { + // We check for this to avoid tuple struct fields. + PatKind::Wild => None, + _ => Some(( + local.pat.span, + local.ty.as_ref().map(|ty| ty.span), + local.init.as_ref().map(|init| init.span), + )), + }; + let original = replace(&mut self.diagnostic_metadata.current_let_binding, local_spans); self.resolve_local(local); + self.diagnostic_metadata.current_let_binding = original; } fn visit_ty(&mut self, ty: &'tcx Ty) { - match ty.node { + match ty.kind { TyKind::Path(ref qself, ref path) => { self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type); } @@ -406,19 +433,24 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { visit::walk_poly_trait_ref(self, tref, m); } fn visit_foreign_item(&mut self, foreign_item: &'tcx ForeignItem) { - let generic_params = match foreign_item.node { + match foreign_item.kind { ForeignItemKind::Fn(_, ref generics) => { - HasGenericParams(generics, ItemRibKind) + self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| { + visit::walk_foreign_item(this, foreign_item); + }); } - ForeignItemKind::Static(..) => NoGenericParams, - ForeignItemKind::Ty => NoGenericParams, - ForeignItemKind::Macro(..) => NoGenericParams, - }; - self.with_generic_param_rib(generic_params, |this| { - visit::walk_foreign_item(this, foreign_item); - }); + ForeignItemKind::Static(..) => { + self.with_item_rib(HasGenericParams::No, |this| { + visit::walk_foreign_item(this, foreign_item); + }); + } + ForeignItemKind::Ty | ForeignItemKind::Macro(..) => { + visit::walk_foreign_item(self, foreign_item); + } + } } - fn visit_fn(&mut self, fn_kind: FnKind<'tcx>, declaration: &'tcx FnDecl, _: Span, _: NodeId) { + fn visit_fn(&mut self, fn_kind: FnKind<'tcx>, declaration: &'tcx FnDecl, sp: Span, _: NodeId) { + let previous_value = replace(&mut self.diagnostic_metadata.current_function, Some(sp)); debug!("(resolving function) entering function"); let rib_kind = match fn_kind { FnKind::ItemFn(..) => FnItemRibKind, @@ -444,6 +476,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { debug!("(resolving function) leaving function"); }) }); + self.diagnostic_metadata.current_function = previous_value; } fn visit_generics(&mut self, generics: &'tcx Generics) { @@ -468,17 +501,19 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { } })); - // We also ban access to type parameters for use as the types of const parameters. - let mut const_ty_param_ban_rib = Rib::new(TyParamAsConstParamTy); - const_ty_param_ban_rib.bindings.extend(generics.params.iter() - .filter(|param| { - if let GenericParamKind::Type { .. } = param.kind { - true - } else { - false - } - }) - .map(|param| (Ident::with_dummy_span(param.ident.name), Res::Err))); + // rust-lang/rust#61631: The type `Self` is essentially + // another type parameter. For ADTs, we consider it + // well-defined only after all of the ADT type parameters have + // been provided. Therefore, we do not allow use of `Self` + // anywhere in ADT type parameter defaults. + // + // (We however cannot ban `Self` for defaults on *all* generic + // lists; e.g. trait generics can usefully refer to `Self`, + // such as in the case of `trait Add`.) + if self.diagnostic_metadata.current_self_item.is_some() { + // (`Some` if + only if we are in ADT's generics.) + default_ban_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), Res::Err); + } for param in &generics.params { match param.kind { @@ -498,15 +533,10 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { default_ban_rib.bindings.remove(&Ident::with_dummy_span(param.ident.name)); } GenericParamKind::Const { ref ty } => { - self.ribs[TypeNS].push(const_ty_param_ban_rib); - for bound in ¶m.bounds { self.visit_param_bound(bound); } - self.visit_ty(ty); - - const_ty_param_ban_rib = self.ribs[TypeNS].pop().unwrap(); } } } @@ -533,11 +563,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { }, label_ribs: Vec::new(), current_trait_ref: None, - current_trait_assoc_types: Vec::new(), - current_self_type: None, - current_self_item: None, - unused_labels: Default::default(), - current_type_ascription: Vec::new(), + diagnostic_metadata: DiagnosticMetadata::default(), } } @@ -647,7 +673,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { fn resolve_adt(&mut self, item: &Item, generics: &Generics) { debug!("resolve_adt"); self.with_current_self_item(item, |this| { - this.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| { + this.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| { let item_def_id = this.r.definitions.local_def_id(item.id); this.with_self_rib(Res::SelfTy(None, Some(item_def_id)), |this| { visit::walk_item(this, item); @@ -700,16 +726,14 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { fn resolve_item(&mut self, item: &Item) { let name = item.ident.name; - debug!("(resolving item) resolving {} ({:?})", name, item.node); + debug!("(resolving item) resolving {} ({:?})", name, item.kind); - match item.node { + match item.kind { ItemKind::TyAlias(_, ref generics) | ItemKind::OpaqueTy(_, ref generics) | ItemKind::Fn(_, _, ref generics, _) => { - self.with_generic_param_rib( - HasGenericParams(generics, ItemRibKind), - |this| visit::walk_item(this, item) - ); + self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), + |this| visit::walk_item(this, item)); } ItemKind::Enum(_, ref generics) | @@ -727,7 +751,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { ItemKind::Trait(.., ref generics, ref bounds, ref trait_items) => { // Create a new rib for the trait-wide type parameters. - self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| { + self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| { let local_def_id = this.r.definitions.local_def_id(item.id); this.with_self_rib(Res::SelfTy(Some(local_def_id), None), |this| { this.visit_generics(generics); @@ -735,35 +759,32 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { for trait_item in trait_items { this.with_trait_items(trait_items, |this| { - let generic_params = HasGenericParams( - &trait_item.generics, - AssocItemRibKind, - ); - this.with_generic_param_rib(generic_params, |this| { - match trait_item.node { - TraitItemKind::Const(ref ty, ref default) => { - this.visit_ty(ty); + this.with_generic_param_rib(&trait_item.generics, AssocItemRibKind, + |this| { + match trait_item.kind { + TraitItemKind::Const(ref ty, ref default) => { + this.visit_ty(ty); - // Only impose the restrictions of - // ConstRibKind for an actual constant - // expression in a provided default. - if let Some(ref expr) = *default{ - this.with_constant_rib(|this| { - this.visit_expr(expr); - }); + // Only impose the restrictions of + // ConstRibKind for an actual constant + // expression in a provided default. + if let Some(ref expr) = *default{ + this.with_constant_rib(|this| { + this.visit_expr(expr); + }); + } } - } - TraitItemKind::Method(_, _) => { - visit::walk_trait_item(this, trait_item) - } - TraitItemKind::Type(..) => { - visit::walk_trait_item(this, trait_item) - } - TraitItemKind::Macro(_) => { - panic!("unexpanded macro in resolve!") - } - }; - }); + TraitItemKind::Method(_, _) => { + visit::walk_trait_item(this, trait_item) + } + TraitItemKind::Type(..) => { + visit::walk_trait_item(this, trait_item) + } + TraitItemKind::Macro(_) => { + panic!("unexpanded macro in resolve!") + } + }; + }); }); } }); @@ -772,7 +793,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { ItemKind::TraitAlias(ref generics, ref bounds) => { // Create a new rib for the trait-wide type parameters. - self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| { + self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| { let local_def_id = this.r.definitions.local_def_id(item.id); this.with_self_rib(Res::SelfTy(Some(local_def_id), None), |this| { this.visit_generics(generics); @@ -790,7 +811,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { ItemKind::Static(ref ty, _, ref expr) | ItemKind::Const(ref ty, ref expr) => { debug!("resolve_item ItemKind::Const"); - self.with_item_rib(|this| { + self.with_item_rib(HasGenericParams::No, |this| { this.visit_ty(ty); this.with_constant_rib(|this| { this.visit_expr(expr); @@ -811,91 +832,75 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { } } - fn with_generic_param_rib<'c, F>(&'c mut self, generic_params: GenericParameters<'a, 'c>, f: F) + fn with_generic_param_rib<'c, F>(&'c mut self, generics: &'c Generics, kind: RibKind<'a>, f: F) where F: FnOnce(&mut Self) { debug!("with_generic_param_rib"); - match generic_params { - HasGenericParams(generics, rib_kind) => { - let mut function_type_rib = Rib::new(rib_kind); - let mut function_value_rib = Rib::new(rib_kind); - let mut seen_bindings = FxHashMap::default(); - // We also can't shadow bindings from the parent item - if let AssocItemRibKind = rib_kind { - let mut add_bindings_for_ns = |ns| { - let parent_rib = self.ribs[ns].iter() - .rfind(|rib| if let ItemRibKind = rib.kind { true } else { false }) - .expect("associated item outside of an item"); - seen_bindings.extend( - parent_rib.bindings.iter().map(|(ident, _)| (*ident, ident.span)), - ); - }; - add_bindings_for_ns(ValueNS); - add_bindings_for_ns(TypeNS); - } - for param in &generics.params { - match param.kind { - GenericParamKind::Lifetime { .. } => {} - GenericParamKind::Type { .. } => { - let ident = param.ident.modern(); - debug!("with_generic_param_rib: {}", param.id); + let mut function_type_rib = Rib::new(kind); + let mut function_value_rib = Rib::new(kind); + let mut seen_bindings = FxHashMap::default(); - if seen_bindings.contains_key(&ident) { - let span = seen_bindings.get(&ident).unwrap(); - let err = ResolutionError::NameAlreadyUsedInParameterList( - ident.name, - *span, - ); - self.r.report_error(param.ident.span, err); - } - seen_bindings.entry(ident).or_insert(param.ident.span); + // We also can't shadow bindings from the parent item + if let AssocItemRibKind = kind { + let mut add_bindings_for_ns = |ns| { + let parent_rib = self.ribs[ns].iter() + .rfind(|r| if let ItemRibKind(_) = r.kind { true } else { false }) + .expect("associated item outside of an item"); + seen_bindings.extend( + parent_rib.bindings.iter().map(|(ident, _)| (*ident, ident.span)), + ); + }; + add_bindings_for_ns(ValueNS); + add_bindings_for_ns(TypeNS); + } - // Plain insert (no renaming). - let res = Res::Def( - DefKind::TyParam, - self.r.definitions.local_def_id(param.id), - ); - function_type_rib.bindings.insert(ident, res); - self.r.record_partial_res(param.id, PartialRes::new(res)); - } - GenericParamKind::Const { .. } => { - let ident = param.ident.modern(); - debug!("with_generic_param_rib: {}", param.id); - - if seen_bindings.contains_key(&ident) { - let span = seen_bindings.get(&ident).unwrap(); - let err = ResolutionError::NameAlreadyUsedInParameterList( - ident.name, - *span, - ); - self.r.report_error(param.ident.span, err); - } - seen_bindings.entry(ident).or_insert(param.ident.span); - - let res = Res::Def( - DefKind::ConstParam, - self.r.definitions.local_def_id(param.id), - ); - function_value_rib.bindings.insert(ident, res); - self.r.record_partial_res(param.id, PartialRes::new(res)); - } - } - } - self.ribs[ValueNS].push(function_value_rib); - self.ribs[TypeNS].push(function_type_rib); + for param in &generics.params { + if let GenericParamKind::Lifetime { .. } = param.kind { + continue; } - NoGenericParams => { - // Nothing to do. + let def_kind = match param.kind { + GenericParamKind::Type { .. } => DefKind::TyParam, + GenericParamKind::Const { .. } => DefKind::ConstParam, + _ => unreachable!(), + }; + + let ident = param.ident.modern(); + debug!("with_generic_param_rib: {}", param.id); + + if seen_bindings.contains_key(&ident) { + let span = seen_bindings.get(&ident).unwrap(); + let err = ResolutionError::NameAlreadyUsedInParameterList( + ident.name, + *span, + ); + self.r.report_error(param.ident.span, err); + } + seen_bindings.entry(ident).or_insert(param.ident.span); + + // Plain insert (no renaming). + let res = Res::Def(def_kind, self.r.definitions.local_def_id(param.id)); + + match param.kind { + GenericParamKind::Type { .. } => { + function_type_rib.bindings.insert(ident, res); + self.r.record_partial_res(param.id, PartialRes::new(res)); + } + GenericParamKind::Const { .. } => { + function_value_rib.bindings.insert(ident, res); + self.r.record_partial_res(param.id, PartialRes::new(res)); + } + _ => unreachable!(), } } + self.ribs[ValueNS].push(function_value_rib); + self.ribs[TypeNS].push(function_type_rib); + f(self); - if let HasGenericParams(..) = generic_params { - self.ribs[TypeNS].pop(); - self.ribs[ValueNS].pop(); - } + self.ribs[TypeNS].pop(); + self.ribs[ValueNS].pop(); } fn with_label_rib(&mut self, kind: RibKind<'a>, f: impl FnOnce(&mut Self)) { @@ -904,8 +909,9 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { self.label_ribs.pop(); } - fn with_item_rib(&mut self, f: impl FnOnce(&mut Self)) { - self.with_rib(ValueNS, ItemRibKind, |this| this.with_rib(TypeNS, ItemRibKind, f)) + fn with_item_rib(&mut self, has_generic_params: HasGenericParams, f: impl FnOnce(&mut Self)) { + let kind = ItemRibKind(has_generic_params); + self.with_rib(ValueNS, kind, |this| this.with_rib(TypeNS, kind, f)) } fn with_constant_rib(&mut self, f: impl FnOnce(&mut Self)) { @@ -917,16 +923,22 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { fn with_current_self_type(&mut self, self_type: &Ty, f: impl FnOnce(&mut Self) -> T) -> T { // Handle nested impls (inside fn bodies) - let previous_value = replace(&mut self.current_self_type, Some(self_type.clone())); + let previous_value = replace( + &mut self.diagnostic_metadata.current_self_type, + Some(self_type.clone()), + ); let result = f(self); - self.current_self_type = previous_value; + self.diagnostic_metadata.current_self_type = previous_value; result } fn with_current_self_item(&mut self, self_item: &Item, f: impl FnOnce(&mut Self) -> T) -> T { - let previous_value = replace(&mut self.current_self_item, Some(self_item.id)); + let previous_value = replace( + &mut self.diagnostic_metadata.current_self_item, + Some(self_item.id), + ); let result = f(self); - self.current_self_item = previous_value; + self.diagnostic_metadata.current_self_item = previous_value; result } @@ -937,14 +949,14 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { f: impl FnOnce(&mut Self) -> T, ) -> T { let trait_assoc_types = replace( - &mut self.current_trait_assoc_types, - trait_items.iter().filter_map(|item| match &item.node { + &mut self.diagnostic_metadata.current_trait_assoc_types, + trait_items.iter().filter_map(|item| match &item.kind { TraitItemKind::Type(bounds, _) if bounds.len() == 0 => Some(item.ident), _ => None, }).collect(), ); let result = f(self); - self.current_trait_assoc_types = trait_assoc_types; + self.diagnostic_metadata.current_trait_assoc_types = trait_assoc_types; result } @@ -1010,7 +1022,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { impl_items: &[ImplItem]) { debug!("resolve_implementation"); // If applicable, create a rib for the type parameters. - self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| { + self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| { // Dummy self type for better errors if `Self` is used in the trait path. this.with_self_rib(Res::SelfTy(None, None), |this| { // Resolve the trait reference, if necessary. @@ -1031,11 +1043,11 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { debug!("resolve_implementation with_self_rib_ns(ValueNS, ...)"); for impl_item in impl_items { // We also need a new scope for the impl item type parameters. - let generic_params = HasGenericParams(&impl_item.generics, - AssocItemRibKind); - this.with_generic_param_rib(generic_params, |this| { + this.with_generic_param_rib(&impl_item.generics, + AssocItemRibKind, + |this| { use crate::ResolutionError::*; - match impl_item.node { + match impl_item.kind { ImplItemKind::Const(..) => { debug!( "resolve_implementation ImplItemKind::Const", @@ -1146,7 +1158,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { let mut binding_map = FxHashMap::default(); pat.walk(&mut |pat| { - match pat.node { + match pat.kind { PatKind::Ident(binding_mode, ident, ref sub_pat) if sub_pat.is_some() || self.is_base_res_local(pat.id) => { @@ -1246,7 +1258,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { /// Check the consistency of the outermost or-patterns. fn check_consistent_bindings_top(&mut self, pat: &Pat) { - pat.walk(&mut |pat| match pat.node { + pat.walk(&mut |pat| match pat.kind { PatKind::Or(ref ps) => { self.check_consistent_bindings(ps); false @@ -1308,8 +1320,8 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { ) { // Visit all direct subpatterns of this pattern. pat.walk(&mut |pat| { - debug!("resolve_pattern pat={:?} node={:?}", pat, pat.node); - match pat.node { + debug!("resolve_pattern pat={:?} node={:?}", pat, pat.kind); + match pat.kind { PatKind::Ident(bmode, ident, ref sub) => { // First try to resolve the identifier as some existing entity, // then fall back to a fresh binding. @@ -1536,7 +1548,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { if is_expected(ctor_res) && self.r.is_accessible_from(ctor_vis, self.parent_scope.module) { let lint = lint::builtin::LEGACY_CONSTRUCTOR_VISIBILITY; - self.r.session.buffer_lint(lint, id, span, + self.r.lint_buffer.buffer_lint(lint, id, span, "private struct constructors are not usable through \ re-exports in outer modules", ); @@ -1762,7 +1774,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { }; if result.base_res() == unqualified_result { let lint = lint::builtin::UNUSED_QUALIFICATIONS; - self.r.session.buffer_lint(lint, id, span, "unnecessary qualification") + self.r.lint_buffer.buffer_lint(lint, id, span, "unnecessary qualification") } } @@ -1771,7 +1783,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { fn with_resolved_label(&mut self, label: Option` in `impl for C in D` + err.span_label(impl_ty.span, &msg); + } else { + // Point at `C` in `impl for C in D` + err.span_label(tr.path.span, &msg); + } + } + err.note("define and implement a trait or new type instead"); + err.emit(); return; } Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => { - struct_span_err!(self.tcx.sess, - sp, - E0210, - "type parameter `{}` must be used as the type parameter \ - for some local type (e.g., `MyStruct<{}>`)", - param_ty, - param_ty) - .span_label(sp, - format!("type parameter `{}` must be used as the type \ - parameter for some local type", param_ty)) - .note("only traits defined in the current crate can be implemented \ - for a type parameter") - .emit(); + let mut sp = sp; + for param in &generics.params { + if param.name.ident().to_string() == param_ty.to_string() { + sp = param.span; + } + } + let mut err = struct_span_err!( + self.tcx.sess, + sp, + E0210, + "type parameter `{}` must be used as the type parameter for some local \ + type (e.g., `MyStruct<{}>`)", + param_ty, + param_ty + ); + err.span_label(sp, format!( + "type parameter `{}` must be used as the type parameter for some local \ + type", + param_ty, + )); + err.note("only traits defined in the current crate can be implemented for a \ + type parameter"); + err.emit(); return; } } @@ -102,7 +144,7 @@ impl ItemLikeVisitor<'v> for OrphanChecker<'tcx> { if self.tcx.trait_is_auto(trait_def_id) && !trait_def_id.is_local() { let self_ty = trait_ref.self_ty(); - let opt_self_def_id = match self_ty.sty { + let opt_self_def_id = match self_ty.kind { ty::Adt(self_def, _) => Some(self_def.did), ty::Foreign(did) => Some(did), _ => None, diff --git a/src/librustc_typeck/coherence/unsafety.rs b/src/librustc_typeck/coherence/unsafety.rs index 07fbfddd96..b7cc6feee4 100644 --- a/src/librustc_typeck/coherence/unsafety.rs +++ b/src/librustc_typeck/coherence/unsafety.rs @@ -71,7 +71,7 @@ impl UnsafetyChecker<'tcx> { impl ItemLikeVisitor<'v> for UnsafetyChecker<'tcx> { fn visit_item(&mut self, item: &'v hir::Item) { - if let hir::ItemKind::Impl(unsafety, polarity, _, ref generics, ..) = item.node { + if let hir::ItemKind::Impl(unsafety, polarity, _, ref generics, ..) = item.kind { self.check_unsafety_coherence(item, Some(generics), unsafety, polarity); } } diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index d2e9203779..001d98aece 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -16,7 +16,7 @@ use crate::astconv::{AstConv, Bounds, SizedByDefault}; use crate::constrained_generic_params as cgp; -use crate::check::intrinsic::intrisic_operation_unsafety; +use crate::check::intrinsic::intrinsic_operation_unsafety; use crate::lint; use crate::middle::resolve_lifetime as rl; use crate::middle::weak_lang_items; @@ -25,7 +25,7 @@ use rustc::ty::query::Providers; use rustc::ty::subst::{Subst, InternalSubsts}; use rustc::ty::util::Discr; use rustc::ty::util::IntTypeExt; -use rustc::ty::subst::UnpackedKind; +use rustc::ty::subst::GenericArgKind; use rustc::ty::{self, AdtKind, DefIdTree, ToPolyTraitRef, Ty, TyCtxt, Const}; use rustc::ty::{ReprOptions, ToPredicate}; use rustc::util::captures::Captures; @@ -36,7 +36,7 @@ use syntax::ast; use syntax::ast::{Ident, MetaItemKind}; use syntax::attr::{InlineAttr, OptimizeAttr, list_contains_name, mark_used}; use syntax::feature_gate; -use syntax::symbol::{InternedString, kw, Symbol, sym}; +use syntax::symbol::{kw, Symbol, sym}; use syntax_pos::{Span, DUMMY_SP}; use rustc::hir::def::{CtorKind, Res, DefKind}; @@ -46,7 +46,7 @@ use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor}; use rustc::hir::GenericParamKind; use rustc::hir::{self, CodegenFnAttrFlags, CodegenFnAttrs, Unsafety}; -use errors::{Applicability, DiagnosticId}; +use errors::{Applicability, DiagnosticId, StashKey}; struct OnlySelfBounds(bool); @@ -135,7 +135,7 @@ impl Visitor<'tcx> for CollectItemTypesVisitor<'tcx> { } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { - if let hir::ExprKind::Closure(..) = expr.node { + if let hir::ExprKind::Closure(..) = expr.kind { let def_id = self.tcx.hir().local_def_id(expr.hir_id); self.tcx.generics_of(def_id); self.tcx.type_of(def_id); @@ -182,8 +182,7 @@ impl AstConv<'tcx> for ItemCtxt<'tcx> { self.tcx } - fn get_type_parameter_bounds(&self, span: Span, def_id: DefId) - -> &'tcx ty::GenericPredicates<'tcx> { + fn get_type_parameter_bounds(&self, span: Span, def_id: DefId) -> ty::GenericPredicates<'tcx> { self.tcx .at(span) .type_param_predicates((self.item_def_id, def_id)) @@ -254,7 +253,7 @@ impl AstConv<'tcx> for ItemCtxt<'tcx> { fn type_param_predicates( tcx: TyCtxt<'_>, (item_def_id, def_id): (DefId, DefId), -) -> &ty::GenericPredicates<'_> { +) -> ty::GenericPredicates<'_> { use rustc::hir::*; // In the AST, bounds can derive from two places. Either @@ -266,7 +265,7 @@ fn type_param_predicates( let param_owner_def_id = tcx.hir().local_def_id(param_owner); let generics = tcx.generics_of(param_owner_def_id); let index = generics.param_def_id_to_index[&def_id]; - let ty = tcx.mk_ty_param(index, tcx.hir().ty_param_name(param_id).as_interned_str()); + let ty = tcx.mk_ty_param(index, tcx.hir().ty_param_name(param_id)); // Don't look for bounds where the type parameter isn't in scope. let parent = if item_def_id == param_owner_def_id { @@ -275,10 +274,10 @@ fn type_param_predicates( tcx.generics_of(item_def_id).parent }; - let result = parent.map_or(&tcx.common.empty_predicates, |parent| { + let mut result = parent.map(|parent| { let icx = ItemCtxt::new(tcx, parent); icx.get_type_parameter_bounds(DUMMY_SP, def_id) - }); + }).unwrap_or_default(); let mut extend = None; let item_hir_id = tcx.hir().as_local_hir_id(item_def_id).unwrap(); @@ -288,7 +287,7 @@ fn type_param_predicates( Node::ImplItem(item) => &item.generics, Node::Item(item) => { - match item.node { + match item.kind { ItemKind::Fn(.., ref generics, _) | ItemKind::Impl(_, _, _, ref generics, ..) | ItemKind::TyAlias(_, ref generics) @@ -312,7 +311,7 @@ fn type_param_predicates( } } - Node::ForeignItem(item) => match item.node { + Node::ForeignItem(item) => match item.kind { ForeignItemKind::Fn(_, _, ref generics) => generics, _ => return result, }, @@ -321,9 +320,7 @@ fn type_param_predicates( }; let icx = ItemCtxt::new(tcx, item_def_id); - let mut result = (*result).clone(); - result.predicates.extend(extend.into_iter()); - result.predicates.extend( + let extra_predicates = extend.into_iter().chain( icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty, OnlySelfBounds(true)) .into_iter() .filter(|(predicate, _)| { @@ -331,9 +328,12 @@ fn type_param_predicates( ty::Predicate::Trait(ref data) => data.skip_binder().self_ty().is_param(index), _ => false, } - }) + }), ); - tcx.arena.alloc(result) + result.predicates = tcx.arena.alloc_from_iter( + result.predicates.iter().copied().chain(extra_predicates), + ); + result } impl ItemCtxt<'tcx> { @@ -387,7 +387,7 @@ impl ItemCtxt<'tcx> { /// `ast_ty_to_ty`, because we want to avoid triggering an all-out /// conversion of the type to avoid inducing unnecessary cycles. fn is_param(tcx: TyCtxt<'_>, ast_ty: &hir::Ty, param_id: hir::HirId) -> bool { - if let hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) = ast_ty.node { + if let hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) = ast_ty.kind { match path.res { Res::SelfTy(Some(def_id), None) | Res::Def(DefKind::TyParam, def_id) => { def_id == tcx.hir().local_def_id(param_id) @@ -403,7 +403,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::HirId) { let it = tcx.hir().expect_item(item_id); debug!("convert: item {} with id {}", it.ident, it.hir_id); let def_id = tcx.hir().local_def_id(item_id); - match it.node { + match it.kind { // These don't define types. hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) @@ -415,7 +415,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::HirId) { tcx.generics_of(def_id); tcx.type_of(def_id); tcx.predicates_of(def_id); - if let hir::ForeignItemKind::Fn(..) = item.node { + if let hir::ForeignItemKind::Fn(..) = item.kind { tcx.fn_sig(def_id); } } @@ -474,7 +474,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::HirId) { tcx.generics_of(def_id); tcx.type_of(def_id); tcx.predicates_of(def_id); - if let hir::ItemKind::Fn(..) = it.node { + if let hir::ItemKind::Fn(..) = it.kind { tcx.fn_sig(def_id); } } @@ -486,12 +486,12 @@ fn convert_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::HirId) { let def_id = tcx.hir().local_def_id(trait_item.hir_id); tcx.generics_of(def_id); - match trait_item.node { + match trait_item.kind { hir::TraitItemKind::Const(..) | hir::TraitItemKind::Type(_, Some(_)) | hir::TraitItemKind::Method(..) => { tcx.type_of(def_id); - if let hir::TraitItemKind::Method(..) = trait_item.node { + if let hir::TraitItemKind::Method(..) = trait_item.kind { tcx.fn_sig(def_id); } } @@ -507,7 +507,7 @@ fn convert_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::HirId) { tcx.generics_of(def_id); tcx.type_of(def_id); tcx.predicates_of(def_id); - if let hir::ImplItemKind::Method(..) = tcx.hir().expect_impl_item(impl_item_id).node { + if let hir::ImplItemKind::Method(..) = tcx.hir().expect_impl_item(impl_item_id).kind { tcx.fn_sig(def_id); } } @@ -638,7 +638,7 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::AdtDef { }; let repr = ReprOptions::new(tcx, def_id); - let (kind, variants) = match item.node { + let (kind, variants) = match item.kind { ItemKind::Enum(ref def, _) => { let mut distance_from_explicit = 0; let variants = def.variants @@ -698,7 +698,7 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::AdtDef { fn super_predicates_of( tcx: TyCtxt<'_>, trait_def_id: DefId, -) -> &ty::GenericPredicates<'_> { +) -> ty::GenericPredicates<'_> { debug!("super_predicates(trait_def_id={:?})", trait_def_id); let trait_hir_id = tcx.hir().as_local_hir_id(trait_def_id).unwrap(); @@ -707,7 +707,7 @@ fn super_predicates_of( _ => bug!("trait_node_id {} is not an item", trait_hir_id), }; - let (generics, bounds) = match item.node { + let (generics, bounds) = match item.kind { hir::ItemKind::Trait(.., ref generics, ref supertraits, _) => (generics, supertraits), hir::ItemKind::TraitAlias(ref generics, ref supertraits) => (generics, supertraits), _ => span_bug!(item.span, "super_predicates invoked on non-trait"), @@ -732,28 +732,30 @@ fn super_predicates_of( generics, item.hir_id, self_param_ty, OnlySelfBounds(!is_trait_alias)); // Combine the two lists to form the complete set of superbounds: - let superbounds: Vec<_> = superbounds1.into_iter().chain(superbounds2).collect(); + let superbounds = &*tcx.arena.alloc_from_iter( + superbounds1.into_iter().chain(superbounds2) + ); // Now require that immediate supertraits are converted, // which will, in turn, reach indirect supertraits. - for &(pred, span) in &superbounds { + for &(pred, span) in superbounds { debug!("superbound: {:?}", pred); if let ty::Predicate::Trait(bound) = pred { tcx.at(span).super_predicates_of(bound.def_id()); } } - tcx.arena.alloc(ty::GenericPredicates { + ty::GenericPredicates { parent: None, predicates: superbounds, - }) + } } fn trait_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::TraitDef { let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); let item = tcx.hir().expect_item(hir_id); - let (is_auto, unsafety) = match item.node { + let (is_auto, unsafety) = match item.kind { hir::ItemKind::Trait(is_auto, unsafety, ..) => (is_auto == hir::IsAuto::Yes, unsafety), hir::ItemKind::TraitAlias(..) => (false, hir::Unsafety::Normal), _ => span_bug!(item.span, "trait_def_of_item invoked on non-trait"), @@ -796,7 +798,7 @@ fn has_late_bound_regions<'tcx>(tcx: TyCtxt<'tcx>, node: Node<'tcx>) -> Option { self.outer_index.shift_in(1); intravisit::walk_ty(self, ty); @@ -860,25 +862,25 @@ fn has_late_bound_regions<'tcx>(tcx: TyCtxt<'tcx>, node: Node<'tcx>) -> Option match item.node { + Node::TraitItem(item) => match item.kind { hir::TraitItemKind::Method(ref sig, _) => { has_late_bound_regions(tcx, &item.generics, &sig.decl) } _ => None, }, - Node::ImplItem(item) => match item.node { + Node::ImplItem(item) => match item.kind { hir::ImplItemKind::Method(ref sig, _) => { has_late_bound_regions(tcx, &item.generics, &sig.decl) } _ => None, }, - Node::ForeignItem(item) => match item.node { + Node::ForeignItem(item) => match item.kind { hir::ForeignItemKind::Fn(ref fn_decl, _, ref generics) => { has_late_bound_regions(tcx, generics, fn_decl) } _ => None, }, - Node::Item(item) => match item.node { + Node::Item(item) => match item.kind { hir::ItemKind::Fn(ref fn_decl, .., ref generics, _) => { has_late_bound_regions(tcx, generics, fn_decl) } @@ -915,10 +917,10 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { } } Node::Expr(&hir::Expr { - node: hir::ExprKind::Closure(..), + kind: hir::ExprKind::Closure(..), .. }) => Some(tcx.closure_base_def_id(def_id)), - Node::Item(item) => match item.node { + Node::Item(item) => match item.kind { ItemKind::OpaqueTy(hir::OpaqueTy { impl_trait_fn, .. }) => impl_trait_fn, _ => None, }, @@ -935,7 +937,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { Node::ImplItem(item) => &item.generics, Node::Item(item) => { - match item.node { + match item.kind { ItemKind::Fn(.., ref generics, _) | ItemKind::Impl(_, _, _, ref generics, ..) => { generics } @@ -959,7 +961,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { opt_self = Some(ty::GenericParamDef { index: 0, - name: kw::SelfUpper.as_interned_str(), + name: kw::SelfUpper, def_id: tcx.hir().local_def_id(param_id), pure_wrt_drop: false, kind: ty::GenericParamDefKind::Type { @@ -977,7 +979,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { } } - Node::ForeignItem(item) => match item.node { + Node::ForeignItem(item) => match item.kind { ForeignItemKind::Static(..) => &no_generics, ForeignItemKind::Fn(_, _, ref generics) => generics, ForeignItemKind::Type => &no_generics, @@ -1004,7 +1006,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { early_lifetimes .enumerate() .map(|(i, param)| ty::GenericParamDef { - name: param.name.ident().as_interned_str(), + name: param.name.ident().name, index: own_start + i as u32, def_id: tcx.hir().local_def_id(param.hir_id), pure_wrt_drop: param.pure_wrt_drop, @@ -1058,7 +1060,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { let param_def = ty::GenericParamDef { index: type_start + i as u32, - name: param.name.ident().as_interned_str(), + name: param.name.ident().name, def_id: tcx.hir().local_def_id(param.hir_id), pure_wrt_drop: param.pure_wrt_drop, kind, @@ -1072,7 +1074,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { // cares about anything but the length is instantiation, // and we don't do that for closures. if let Node::Expr(&hir::Expr { - node: hir::ExprKind::Closure(.., gen), + kind: hir::ExprKind::Closure(.., gen), .. }) = node { @@ -1088,7 +1090,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { .enumerate() .map(|(i, &arg)| ty::GenericParamDef { index: type_start + i as u32, - name: InternedString::intern(arg), + name: Symbol::intern(arg), def_id, pure_wrt_drop: false, kind: ty::GenericParamDefKind::Type { @@ -1103,7 +1105,7 @@ fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::Generics { params.extend(upvars.iter().zip((dummy_args.len() as u32)..).map(|(_, i)| { ty::GenericParamDef { index: type_start + i, - name: InternedString::intern(""), + name: Symbol::intern(""), def_id, pure_wrt_drop: false, kind: ty::GenericParamDefKind::Type { @@ -1149,18 +1151,41 @@ fn infer_placeholder_type( def_id: DefId, body_id: hir::BodyId, span: Span, + item_ident: Ident, ) -> Ty<'_> { let ty = tcx.typeck_tables_of(def_id).node_type(body_id.hir_id); - let mut diag = bad_placeholder_type(tcx, span); - if ty != tcx.types.err { - diag.span_suggestion( - span, - "replace `_` with the correct type", - ty.to_string(), - Applicability::MaybeIncorrect, - ); + + // If this came from a free `const` or `static mut?` item, + // then the user may have written e.g. `const A = 42;`. + // In this case, the parser has stashed a diagnostic for + // us to improve in typeck so we do that now. + match tcx.sess.diagnostic().steal_diagnostic(span, StashKey::ItemNoType) { + Some(mut err) => { + // The parser provided a sub-optimal `HasPlaceholders` suggestion for the type. + // We are typeck and have the real type, so remove that and suggest the actual type. + err.suggestions.clear(); + err.span_suggestion( + span, + "provide a type for the item", + format!("{}: {}", item_ident, ty), + Applicability::MachineApplicable, + ) + .emit(); + } + None => { + let mut diag = bad_placeholder_type(tcx, span); + if ty != tcx.types.err { + diag.span_suggestion( + span, + "replace `_` with the correct type", + ty.to_string(), + Applicability::MaybeIncorrect, + ); + } + diag.emit(); + } } - diag.emit(); + ty } @@ -1184,15 +1209,15 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option match item.node { + Node::TraitItem(item) => match item.kind { TraitItemKind::Method(..) => { let substs = InternalSubsts::identity_for_item(tcx, def_id); tcx.mk_fn_def(def_id, substs) } TraitItemKind::Const(ref ty, body_id) => { body_id.and_then(|body_id| { - if let hir::TyKind::Infer = ty.node { - Some(infer_placeholder_type(tcx, def_id, body_id, ty.span)) + if let hir::TyKind::Infer = ty.kind { + Some(infer_placeholder_type(tcx, def_id, body_id, ty.span, item.ident)) } else { None } @@ -1207,14 +1232,14 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option match item.node { + Node::ImplItem(item) => match item.kind { ImplItemKind::Method(..) => { let substs = InternalSubsts::identity_for_item(tcx, def_id); tcx.mk_fn_def(def_id, substs) } ImplItemKind::Const(ref ty, body_id) => { - if let hir::TyKind::Infer = ty.node { - infer_placeholder_type(tcx, def_id, body_id, ty.span) + if let hir::TyKind::Infer = ty.kind { + infer_placeholder_type(tcx, def_id, body_id, ty.span, item.ident) } else { icx.to_ty(ty) } @@ -1242,11 +1267,11 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option { - match item.node { + match item.kind { ItemKind::Static(ref ty, .., body_id) | ItemKind::Const(ref ty, body_id) => { - if let hir::TyKind::Infer = ty.node { - infer_placeholder_type(tcx, def_id, body_id, ty.span) + if let hir::TyKind::Infer = ty.kind { + infer_placeholder_type(tcx, def_id, body_id, ty.span, item.ident) } else { icx.to_ty(ty) } @@ -1302,13 +1327,13 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option match foreign_item.node { + Node::ForeignItem(foreign_item) => match foreign_item.kind { ForeignItemKind::Fn(..) => { let substs = InternalSubsts::identity_for_item(tcx, def_id); tcx.mk_fn_def(def_id, substs) @@ -1332,17 +1357,14 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option icx.to_ty(&field.ty), Node::Expr(&hir::Expr { - node: hir::ExprKind::Closure(.., gen), + kind: hir::ExprKind::Closure(.., gen), .. }) => { if gen.is_some() { return Some(tcx.typeck_tables_of(def_id).node_type(hir_id)); } - let substs = ty::ClosureSubsts { - substs: InternalSubsts::identity_for_item(tcx, def_id), - }; - + let substs = InternalSubsts::identity_for_item(tcx, def_id); tcx.mk_closure(def_id, substs) } @@ -1350,15 +1372,15 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option { @@ -1376,22 +1398,22 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option { let path = match parent_node { Node::Ty(&hir::Ty { - node: hir::TyKind::Path(QPath::Resolved(_, ref path)), + kind: hir::TyKind::Path(QPath::Resolved(_, ref path)), .. }) | Node::Expr(&hir::Expr { - node: ExprKind::Path(QPath::Resolved(_, ref path)), + kind: ExprKind::Path(QPath::Resolved(_, ref path)), .. }) => { Some(&**path) } - Node::Expr(&hir::Expr { node: ExprKind::Struct(ref path, ..), .. }) => { + Node::Expr(&hir::Expr { kind: ExprKind::Struct(ref path, ..), .. }) => { if let QPath::Resolved(_, ref path) = **path { Some(&**path) } else { @@ -1488,9 +1510,42 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option match ¶m.kind { - hir::GenericParamKind::Type { default: Some(ref ty), .. } | - hir::GenericParamKind::Const { ref ty, .. } => { - icx.to_ty(ty) + hir::GenericParamKind::Type { default: Some(ref ty), .. } => icx.to_ty(ty), + hir::GenericParamKind::Const { ty: ref hir_ty, .. } => { + let ty = icx.to_ty(hir_ty); + if !tcx.features().const_compare_raw_pointers { + let err = match ty.peel_refs().kind { + ty::FnPtr(_) => Some("function pointers"), + ty::RawPtr(_) => Some("raw pointers"), + _ => None, + }; + if let Some(unsupported_type) = err { + feature_gate::emit_feature_err( + &tcx.sess.parse_sess, + sym::const_compare_raw_pointers, + hir_ty.span, + feature_gate::GateIssue::Language, + &format!( + "using {} as const generic parameters is unstable", + unsupported_type + ), + ); + }; + } + if ty::search_for_structural_match_violation( + param.hir_id, param.span, tcx, ty).is_some() + { + struct_span_err!( + tcx.sess, + hir_ty.span, + E0741, + "the types of const generic parameters must derive `PartialEq` and `Eq`", + ).span_label( + hir_ty.span, + format!("`{}` doesn't derive both `PartialEq` and `Eq`", ty), + ).emit(); + } + ty } x => { if !fail { @@ -1557,8 +1612,8 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { // Skipping binder is ok, since we only use this to find generic parameters and // their positions. for (idx, subst) in substs.iter().enumerate() { - if let UnpackedKind::Type(ty) = subst.unpack() { - if let ty::Param(p) = ty.sty { + if let GenericArgKind::Type(ty) = subst.unpack() { + if let ty::Param(p) = ty.kind { if index_map.insert(p, idx).is_some() { // There was already an entry for `p`, meaning a generic parameter // was used twice. @@ -1588,11 +1643,11 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { let indices = concrete_type .subst(self.tcx, substs) .walk() - .filter_map(|t| match &t.sty { + .filter_map(|t| match &t.kind { ty::Param(p) => Some(*index_map.get(p).unwrap()), _ => None, }).collect(); - let is_param = |ty: Ty<'_>| match ty.sty { + let is_param = |ty: Ty<'_>| match ty.kind { ty::Param(_) => true, _ => false, }; @@ -1604,7 +1659,7 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { } else if let Some((prev_span, prev_ty, ref prev_indices)) = self.found { let mut ty = concrete_type.walk().fuse(); let mut p_ty = prev_ty.walk().fuse(); - let iter_eq = (&mut ty).zip(&mut p_ty).all(|(t, p)| match (&t.sty, &p.sty) { + let iter_eq = (&mut ty).zip(&mut p_ty).all(|(t, p)| match (&t.kind, &p.kind) { // Type parameters are equal to any other type parameter for the purpose of // concrete type equality, as it is possible to obtain the same type just // by passing matching parameters to a function. @@ -1694,9 +1749,7 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { } let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); - let scope = tcx.hir() - .get_defining_scope(hir_id) - .expect("could not get defining scope"); + let scope = tcx.hir().get_defining_scope(hir_id); let mut locator = ConstraintLocator { def_id, tcx, @@ -1746,7 +1799,7 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { pub fn get_infer_ret_ty(output: &'_ hir::FunctionRetTy) -> Option<&hir::Ty> { if let hir::FunctionRetTy::Return(ref ty) = output { - if let hir::TyKind::Infer = ty.node { + if let hir::TyKind::Infer = ty.kind { return Some(&**ty) } } @@ -1763,15 +1816,15 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> { match tcx.hir().get(hir_id) { TraitItem(hir::TraitItem { - node: TraitItemKind::Method(MethodSig { header, decl }, TraitMethod::Provided(_)), + kind: TraitItemKind::Method(MethodSig { header, decl }, TraitMethod::Provided(_)), .. }) | ImplItem(hir::ImplItem { - node: ImplItemKind::Method(MethodSig { header, decl }, _), + kind: ImplItemKind::Method(MethodSig { header, decl }, _), .. }) | Item(hir::Item { - node: ItemKind::Fn(decl, header, _, _), + kind: ItemKind::Fn(decl, header, _, _), .. }) => match get_infer_ret_ty(&decl.output) { Some(ty) => { @@ -1793,14 +1846,14 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> { }, TraitItem(hir::TraitItem { - node: TraitItemKind::Method(MethodSig { header, decl }, _), + kind: TraitItemKind::Method(MethodSig { header, decl }, _), .. }) => { AstConv::ty_of_fn(&icx, header.unsafety, header.abi, decl) }, ForeignItem(&hir::ForeignItem { - node: ForeignItemKind::Fn(ref fn_decl, _, _), + kind: ForeignItemKind::Fn(ref fn_decl, _, _), .. }) => { let abi = tcx.hir().get_foreign_abi(hir_id); @@ -1824,7 +1877,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> { } Expr(&hir::Expr { - node: hir::ExprKind::Closure(..), + kind: hir::ExprKind::Closure(..), .. }) => { // Closure signatures are not like other function @@ -1837,7 +1890,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> { // the signature of a closure, you should use the // `closure_sig` method on the `ClosureSubsts`: // - // closure_substs.closure_sig(def_id, tcx) + // closure_substs.sig(def_id, tcx) // // or, inside of an inference context, you can use // @@ -1855,7 +1908,7 @@ fn impl_trait_ref(tcx: TyCtxt<'_>, def_id: DefId) -> Option> { let icx = ItemCtxt::new(tcx, def_id); let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); - match tcx.hir().expect_item(hir_id).node { + match tcx.hir().expect_item(hir_id).kind { hir::ItemKind::Impl(.., ref opt_trait_ref, _, _) => { opt_trait_ref.as_ref().map(|ast_trait_ref| { let selfty = tcx.type_of(def_id); @@ -1866,10 +1919,30 @@ fn impl_trait_ref(tcx: TyCtxt<'_>, def_id: DefId) -> Option> { } } -fn impl_polarity(tcx: TyCtxt<'_>, def_id: DefId) -> hir::ImplPolarity { +fn impl_polarity(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ImplPolarity { let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); - match tcx.hir().expect_item(hir_id).node { - hir::ItemKind::Impl(_, polarity, ..) => polarity, + let is_rustc_reservation = tcx.has_attr(def_id, sym::rustc_reservation_impl); + let item = tcx.hir().expect_item(hir_id); + match &item.kind { + hir::ItemKind::Impl(_, hir::ImplPolarity::Negative, ..) => { + if is_rustc_reservation { + tcx.sess.span_err(item.span, "reservation impls can't be negative"); + } + ty::ImplPolarity::Negative + } + hir::ItemKind::Impl(_, hir::ImplPolarity::Positive, _, _, None, _, _) => { + if is_rustc_reservation { + tcx.sess.span_err(item.span, "reservation impls can't be inherent"); + } + ty::ImplPolarity::Positive + } + hir::ItemKind::Impl(_, hir::ImplPolarity::Positive, _, _, Some(_tr), _, _) => { + if is_rustc_reservation { + ty::ImplPolarity::Reservation + } else { + ty::ImplPolarity::Positive + } + } ref item => bug!("impl_polarity: {:?} not an impl", item), } } @@ -1900,7 +1973,7 @@ fn early_bound_lifetimes_from_generics<'a, 'tcx: 'a>( fn predicates_defined_on( tcx: TyCtxt<'_>, def_id: DefId, -) -> &ty::GenericPredicates<'_> { +) -> ty::GenericPredicates<'_> { debug!("predicates_defined_on({:?})", def_id); let mut result = tcx.explicit_predicates_of(def_id); debug!( @@ -1910,15 +1983,18 @@ fn predicates_defined_on( ); let inferred_outlives = tcx.inferred_outlives_of(def_id); if !inferred_outlives.is_empty() { - let span = tcx.def_span(def_id); debug!( "predicates_defined_on: inferred_outlives_of({:?}) = {:?}", def_id, inferred_outlives, ); - let mut predicates = (*result).clone(); - predicates.predicates.extend(inferred_outlives.iter().map(|&p| (p, span))); - result = tcx.arena.alloc(predicates); + if result.predicates.is_empty() { + result.predicates = inferred_outlives; + } else { + result.predicates = tcx.arena.alloc_from_iter( + result.predicates.iter().chain(inferred_outlives).copied(), + ); + } } debug!("predicates_defined_on({:?}) = {:?}", def_id, result); result @@ -1927,7 +2003,7 @@ fn predicates_defined_on( /// Returns a list of all type predicates (explicit and implicit) for the definition with /// ID `def_id`. This includes all predicates returned by `predicates_defined_on`, plus /// `Self: Trait` predicates for traits. -fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::GenericPredicates<'_> { +fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicates<'_> { let mut result = tcx.predicates_defined_on(def_id); if tcx.is_trait(def_id) { @@ -1944,9 +2020,11 @@ fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::GenericPredicates<'_> { // used, and adding the predicate into this list ensures // that this is done. let span = tcx.def_span(def_id); - let mut predicates = (*result).clone(); - predicates.predicates.push((ty::TraitRef::identity(tcx, def_id).to_predicate(), span)); - result = tcx.arena.alloc(predicates); + result.predicates = tcx.arena.alloc_from_iter( + result.predicates.iter().copied().chain( + std::iter::once((ty::TraitRef::identity(tcx, def_id).to_predicate(), span)) + ), + ); } debug!("predicates_of(def_id={:?}) = {:?}", def_id, result); result @@ -1957,7 +2035,7 @@ fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::GenericPredicates<'_> { fn explicit_predicates_of( tcx: TyCtxt<'_>, def_id: DefId, -) -> &ty::GenericPredicates<'_> { +) -> ty::GenericPredicates<'_> { use rustc::hir::*; use rustc_data_structures::fx::FxHashSet; @@ -1966,6 +2044,7 @@ fn explicit_predicates_of( /// A data structure with unique elements, which preserves order of insertion. /// Preserving the order of insertion is important here so as not to break /// compile-fail UI tests. + // FIXME(eddyb) just use `IndexSet` from `indexmap`. struct UniquePredicates<'tcx> { predicates: Vec<(ty::Predicate<'tcx>, Span)>, uniques: FxHashSet<(ty::Predicate<'tcx>, Span)>, @@ -2012,7 +2091,7 @@ fn explicit_predicates_of( let ast_generics = match node { Node::TraitItem(item) => &item.generics, - Node::ImplItem(item) => match item.node { + Node::ImplItem(item) => match item.kind { ImplItemKind::OpaqueTy(ref bounds) => { let substs = InternalSubsts::identity_for_item(tcx, def_id); let opaque_ty = tcx.mk_opaque(def_id, substs); @@ -2033,7 +2112,7 @@ fn explicit_predicates_of( }, Node::Item(item) => { - match item.node { + match item.kind { ItemKind::Impl(_, _, defaultness, ref generics, ..) => { if defaultness.is_default() { is_default_impl_trait = tcx.impl_trait_ref(def_id); @@ -2075,10 +2154,10 @@ fn explicit_predicates_of( let bounds_predicates = bounds.predicates(tcx, opaque_ty); if impl_trait_fn.is_some() { // opaque types - return tcx.arena.alloc(ty::GenericPredicates { + return ty::GenericPredicates { parent: None, - predicates: bounds_predicates, - }); + predicates: tcx.arena.alloc_from_iter(bounds_predicates), + }; } else { // named opaque types predicates.extend(bounds_predicates); @@ -2090,7 +2169,7 @@ fn explicit_predicates_of( } } - Node::ForeignItem(item) => match item.node { + Node::ForeignItem(item) => match item.kind { ForeignItemKind::Static(..) => NO_GENERICS, ForeignItemKind::Fn(_, _, ref generics) => generics, ForeignItemKind::Type => NO_GENERICS, @@ -2131,7 +2210,7 @@ fn explicit_predicates_of( let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { def_id: tcx.hir().local_def_id(param.hir_id), index, - name: param.name.ident().as_interned_str(), + name: param.name.ident().name, })); index += 1; @@ -2154,7 +2233,7 @@ fn explicit_predicates_of( // type parameter (e.g., ``). for param in &ast_generics.params { if let GenericParamKind::Type { .. } = param.kind { - let name = param.name.ident().as_interned_str(); + let name = param.name.ident().name; let param_ty = ty::ParamTy::new(index, name).to_ty(tcx); index += 1; @@ -2175,7 +2254,7 @@ fn explicit_predicates_of( // That way, `where Ty:` is not a complete noop (see #53696) and `Ty` // is still checked for WF. if bound_pred.bounds.is_empty() { - if let ty::Param(_) = ty.sty { + if let ty::Param(_) = ty.kind { // This is a `where T:`, which can be in the HIR from the // transformation that moves `?Sized` to `T`'s declaration. // We can skip the predicate because type parameters are @@ -2238,7 +2317,7 @@ fn explicit_predicates_of( if let Some((self_trait_ref, trait_items)) = is_trait { predicates.extend(trait_items.iter().flat_map(|trait_item_ref| { let trait_item = tcx.hir().trait_item(trait_item_ref.id); - let bounds = match trait_item.node { + let bounds = match trait_item.kind { hir::TraitItemKind::Type(ref bounds, _) => bounds, _ => return Vec::new().into_iter() }; @@ -2267,7 +2346,7 @@ fn explicit_predicates_of( // in trait checking. See `setup_constraining_predicates` // for details. if let Node::Item(&Item { - node: ItemKind::Impl(..), + kind: ItemKind::Impl(..), .. }) = node { @@ -2281,10 +2360,10 @@ fn explicit_predicates_of( ); } - let result = tcx.arena.alloc(ty::GenericPredicates { + let result = ty::GenericPredicates { parent: generics.parent, - predicates, - }); + predicates: tcx.arena.alloc_from_iter(predicates), + }; debug!("explicit_predicates_of(def_id={:?}) = {:?}", def_id, result); result } @@ -2325,7 +2404,7 @@ fn compute_sig_of_foreign_fn_decl<'tcx>( abi: abi::Abi, ) -> ty::PolyFnSig<'tcx> { let unsafety = if abi == abi::Abi::RustIntrinsic { - intrisic_operation_unsafety(&*tcx.item_name(def_id).as_str()) + intrinsic_operation_unsafety(&*tcx.item_name(def_id).as_str()) } else { hir::Unsafety::Unsafe }; @@ -2374,10 +2453,10 @@ fn is_foreign_item(tcx: TyCtxt<'_>, def_id: DefId) -> bool { fn static_mutability(tcx: TyCtxt<'_>, def_id: DefId) -> Option { match tcx.hir().get_if_local(def_id) { Some(Node::Item(&hir::Item { - node: hir::ItemKind::Static(_, mutbl, _), .. + kind: hir::ItemKind::Static(_, mutbl, _), .. })) | Some(Node::ForeignItem( &hir::ForeignItem { - node: hir::ForeignItemKind::Static(_, mutbl), .. + kind: hir::ForeignItemKind::Static(_, mutbl), .. })) => Some(mutbl), Some(_) => None, _ => bug!("static_mutability applied to non-local def-id {:?}", def_id), @@ -2522,6 +2601,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { let whitelist = tcx.target_features_whitelist(LOCAL_CRATE); let mut inline_span = None; + let mut link_ordinal_span = None; for attr in attrs.iter() { if attr.check_name(sym::cold) { codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD; @@ -2555,6 +2635,16 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { codegen_fn_attrs.flags |= CodegenFnAttrFlags::USED; } else if attr.check_name(sym::thread_local) { codegen_fn_attrs.flags |= CodegenFnAttrFlags::THREAD_LOCAL; + } else if attr.check_name(sym::track_caller) { + if tcx.fn_sig(id).abi() != abi::Abi::Rust { + struct_span_err!( + tcx.sess, + attr.span, + E0737, + "Rust ABI is required to use `#[track_caller]`" + ).emit(); + } + codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER; } else if attr.check_name(sym::export_name) { if let Some(s) = attr.value_str() { if s.as_str().contains("\0") { @@ -2603,6 +2693,11 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { } } else if attr.check_name(sym::link_name) { codegen_fn_attrs.link_name = attr.value_str(); + } else if attr.check_name(sym::link_ordinal) { + link_ordinal_span = Some(attr.span); + if let ordinal @ Some(_) = check_link_ordinal(tcx, attr) { + codegen_fn_attrs.link_ordinal = ordinal; + } } } @@ -2610,7 +2705,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { if attr.path != sym::inline { return ia; } - match attr.meta().map(|i| i.node) { + match attr.meta().map(|i| i.kind) { Some(MetaItemKind::Word) => { mark_used(attr); InlineAttr::Hint @@ -2651,7 +2746,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { return ia; } let err = |sp, s| span_err!(tcx.sess.diagnostic(), sp, E0722, "{}", s); - match attr.meta().map(|i| i.node) { + match attr.meta().map(|i| i.kind) { Some(MetaItemKind::Word) => { err(attr.span, "expected one argument"); ia @@ -2680,6 +2775,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { // purpose functions as they wouldn't have the right target features // enabled. For that reason we also forbid #[inline(always)] as it can't be // respected. + if codegen_fn_attrs.target_features.len() > 0 { if codegen_fn_attrs.inline == InlineAttr::Always { if let Some(span) = inline_span { @@ -2704,6 +2800,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { codegen_fn_attrs.export_name = Some(name); codegen_fn_attrs.link_name = Some(name); } + check_link_name_xor_ordinal(tcx, &codegen_fn_attrs, link_ordinal_span); // Internal symbols to the standard library all have no_mangle semantics in // that they have defined symbol names present in the function name. This @@ -2714,3 +2811,48 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { codegen_fn_attrs } + +fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option { + use syntax::ast::{Lit, LitIntType, LitKind}; + let meta_item_list = attr.meta_item_list(); + let meta_item_list: Option<&[ast::NestedMetaItem]> = meta_item_list.as_ref().map(Vec::as_ref); + let sole_meta_list = match meta_item_list { + Some([item]) => item.literal(), + _ => None, + }; + if let Some(Lit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) = sole_meta_list { + if *ordinal <= std::usize::MAX as u128 { + Some(*ordinal as usize) + } else { + let msg = format!( + "ordinal value in `link_ordinal` is too large: `{}`", + &ordinal + ); + tcx.sess.struct_span_err(attr.span, &msg) + .note("the value may not exceed `std::usize::MAX`") + .emit(); + None + } + } else { + tcx.sess.struct_span_err(attr.span, "illegal ordinal format in `link_ordinal`") + .note("an unsuffixed integer value, e.g., `1`, is expected") + .emit(); + None + } +} + +fn check_link_name_xor_ordinal( + tcx: TyCtxt<'_>, + codegen_fn_attrs: &CodegenFnAttrs, + inline_span: Option, +) { + if codegen_fn_attrs.link_name.is_none() || codegen_fn_attrs.link_ordinal.is_none() { + return; + } + let msg = "cannot use `#[link_name]` with `#[link_ordinal]`"; + if let Some(span) = inline_span { + tcx.sess.span_err(span, msg); + } else { + tcx.sess.err(msg); + } +} diff --git a/src/librustc_typeck/constrained_generic_params.rs b/src/librustc_typeck/constrained_generic_params.rs index dd44f86717..1fdf49fde5 100644 --- a/src/librustc_typeck/constrained_generic_params.rs +++ b/src/librustc_typeck/constrained_generic_params.rs @@ -55,7 +55,7 @@ struct ParameterCollector { impl<'tcx> TypeVisitor<'tcx> for ParameterCollector { fn visit_ty(&mut self, t: Ty<'tcx>) -> bool { - match t.sty { + match t.kind { ty::Projection(..) | ty::Opaque(..) if !self.include_nonconstraining => { // projections are not injective return false; @@ -86,11 +86,11 @@ impl<'tcx> TypeVisitor<'tcx> for ParameterCollector { pub fn identify_constrained_generic_params<'tcx>( tcx: TyCtxt<'tcx>, - predicates: &ty::GenericPredicates<'tcx>, + predicates: ty::GenericPredicates<'tcx>, impl_trait_ref: Option>, input_parameters: &mut FxHashSet, ) { - let mut predicates = predicates.predicates.clone(); + let mut predicates = predicates.predicates.to_vec(); setup_constraining_predicates(tcx, &mut predicates, impl_trait_ref, input_parameters); } diff --git a/src/librustc_typeck/error_codes.rs b/src/librustc_typeck/error_codes.rs index e11dcfafb8..f21fc2df8b 100644 --- a/src/librustc_typeck/error_codes.rs +++ b/src/librustc_typeck/error_codes.rs @@ -194,7 +194,7 @@ a guard. ```compile_fail,E0029 let string = "salutations !"; -// The ordering relation for strings can't be evaluated at compile time, +// The ordering relation for strings cannot be evaluated at compile time, // so this doesn't work: match string { "hello" ..= "world" => {} @@ -348,7 +348,7 @@ fn main() { "##, E0044: r##" -You can't use type or const parameters on foreign items. +You cannot use type or const parameters on foreign items. Example of erroneous code: ```compile_fail,E0044 @@ -788,7 +788,7 @@ fn some_other_func() {} fn some_function() { SOME_CONST = 14; // error : a constant value cannot be changed! 1 = 3; // error : 1 isn't a valid place! - some_other_func() = 4; // error : we can't assign value to a function! + some_other_func() = 4; // error : we cannot assign value to a function! SomeStruct.x = 12; // error : SomeStruct a structure name but it is used // like a variable! } @@ -1873,13 +1873,14 @@ This fails because `&mut T` is not `Copy`, even when `T` is `Copy` (this differs from the behavior for `&T`, which is always `Copy`). "##, -/* E0205: r##" +#### Note: this error code is no longer emitted by the compiler. + An attempt to implement the `Copy` trait for an enum failed because one of the variants does not implement `Copy`. To fix this, you must implement `Copy` for the mentioned variant. Note that this may not be possible, as in the example of -```compile_fail,E0205 +```compile_fail,E0204 enum Foo { Bar(Vec), Baz, @@ -1892,7 +1893,7 @@ This fails because `Vec` does not implement `Copy` for any `T`. Here's another example that will fail: -```compile_fail,E0205 +```compile_fail,E0204 #[derive(Copy)] enum Foo<'a> { Bar(&'a mut bool), @@ -1903,7 +1904,6 @@ enum Foo<'a> { This fails because `&mut T` is not `Copy`, even when `T` is `Copy` (this differs from the behavior for `&T`, which is always `Copy`). "##, -*/ E0206: r##" You can only implement `Copy` for a struct or enum. Both of the following @@ -2126,8 +2126,9 @@ For information on the design of the orphan rules, see [RFC 1023]. [RFC 1023]: https://github.com/rust-lang/rfcs/blob/master/text/1023-rebalancing-coherence.md "##, -/* E0211: r##" +#### Note: this error code is no longer emitted by the compiler. + You used a function or type which doesn't fit the requirements for where it was used. Erroneous code examples: @@ -2174,7 +2175,7 @@ extern "rust-intrinsic" { } ``` -The second case example is a bit particular : the main function must always +The second case example is a bit particular: the main function must always have this definition: ```compile_fail @@ -2206,7 +2207,6 @@ impl Foo { } ``` "##, - */ E0220: r##" You used an associated type which isn't defined in the trait. @@ -2727,14 +2727,9 @@ impl CoerceUnsized> for MyType [`CoerceUnsized`]: https://doc.rust-lang.org/std/ops/trait.CoerceUnsized.html "##, -/* -// Associated consts can now be accessed through generic type parameters, and -// this error is no longer emitted. -// -// FIXME: consider whether to leave it in the error index, or remove it entirely -// as associated consts is not stabilized yet. - E0329: r##" +#### Note: this error code is no longer emitted by the compiler. + An attempt was made to access an associated constant through either a generic type parameter or `Self`. This is not supported yet. An example causing this error is shown below: @@ -2765,12 +2760,15 @@ trait Foo { struct MyStruct; +impl Foo for MyStruct { + const BAR: f64 = 0f64; +} + fn get_bar_good() -> f64 { ::BAR } ``` "##, -*/ E0366: r##" An attempt was made to implement `Drop` on a concrete specialization of a @@ -3610,6 +3608,43 @@ match r { ``` "##, +E0533: r##" +An item which isn't a unit struct, a variant, nor a constant has been used as a +match pattern. + +Erroneous code example: + +```compile_fail,E0533 +struct Tortoise; + +impl Tortoise { + fn turtle(&self) -> u32 { 0 } +} + +match 0u32 { + Tortoise::turtle => {} // Error! + _ => {} +} +if let Tortoise::turtle = 0u32 {} // Same error! +``` + +If you want to match against a value returned by a method, you need to bind the +value first: + +``` +struct Tortoise; + +impl Tortoise { + fn turtle(&self) -> u32 { 0 } +} + +match 0u32 { + x if x == Tortoise.turtle() => {} // Bound into `x` then we compare it! + _ => {} +} +``` +"##, + E0534: r##" The `inline` attribute was malformed. @@ -3856,6 +3891,52 @@ details. [issue #33685]: https://github.com/rust-lang/rust/issues/33685 "##, +E0587: r##" +A type has both `packed` and `align` representation hints. + +Erroneous code example: + +```compile_fail,E0587 +#[repr(packed, align(8))] // error! +struct Umbrella(i32); +``` + +You cannot use `packed` and `align` hints on a same type. If you want to pack a +type to a given size, you should provide a size to packed: + +``` +#[repr(packed)] // ok! +struct Umbrella(i32); +``` +"##, + +E0588: r##" +A type with `packed` representation hint has a field with `align` +representation hint. + +Erroneous code example: + +```compile_fail,E0588 +#[repr(align(16))] +struct Aligned(i32); + +#[repr(packed)] // error! +struct Packed(Aligned); +``` + +Just like you cannot have both `align` and `packed` representation hints on a +same type, a `packed` type cannot contain another type with the `align` +representation hint. However, you can do the opposite: + +``` +#[repr(packed)] +struct Packed(i32); + +#[repr(align(16))] // ok! +struct Aligned(Packed); +``` +"##, + E0592: r##" This error occurs when you defined methods or associated functions with same name. @@ -4264,7 +4345,7 @@ extern { unsafe { printf(::std::ptr::null(), 0f32); - // error: can't pass an `f32` to variadic function, cast to `c_double` + // error: cannot pass an `f32` to variadic function, cast to `c_double` } ``` @@ -4284,11 +4365,12 @@ enum X { Entry, } -X::Entry(); // error: expected function, found `X::Entry` +X::Entry(); // error: expected function, tuple struct or tuple variant, + // found `X::Entry` // Or even simpler: let x = 0i32; -x(); // error: expected function, found `i32` +x(); // error: expected function, tuple struct or tuple variant, found `i32` ``` Only functions and methods can be called using `()`. Example: @@ -4828,6 +4910,10 @@ assert_eq!(1, discriminant(&Enum::Struct{a: 7, b: 11})); ``` "##, +E0740: r##" +A `union` cannot have fields with destructors. +"##, + E0733: r##" Recursion in an `async fn` requires boxing. For example, this will not compile: @@ -4870,6 +4956,48 @@ fn foo_recursive(n: usize) -> Pin>> { The `Box<...>` ensures that the result is of known size, and the pin is required to keep it in the same place in memory. "##, + +E0737: r##" +`#[track_caller]` requires functions to have the `"Rust"` ABI for implicitly +receiving caller location. See [RFC 2091] for details on this and other +restrictions. + +Erroneous code example: + +```compile_fail,E0737 +#![feature(track_caller)] + +#[track_caller] +extern "C" fn foo() {} +``` + +[RFC 2091]: https://github.com/rust-lang/rfcs/blob/master/text/2091-inline-semantic.md +"##, + +E0741: r##" +Only `structural_match` types (that is, types that derive `PartialEq` and `Eq`) +may be used as the types of const generic parameters. + +```compile_fail,E0741 +#![feature(const_generics)] + +struct A; + +struct B; // error! +``` + +To fix this example, we derive `PartialEq` and `Eq`. + +``` +#![feature(const_generics)] + +#[derive(PartialEq, Eq)] +struct A; + +struct B; // ok! +``` +"##, + ; // E0035, merged into E0087/E0089 // E0036, merged into E0087/E0089 @@ -4892,7 +5020,7 @@ and the pin is required to keep it in the same place in memory. // E0174, // E0182, // merged into E0229 E0183, -// E0187, // can't infer the kind of the closure +// E0187, // cannot infer the kind of the closure // E0188, // can not cast an immutable reference to a mutable pointer // E0189, // deprecated: can only cast a boxed pointer to a boxed object // E0190, // deprecated: can only cast a &-pointer to an &-object @@ -4935,22 +5063,19 @@ and the pin is required to keep it in the same place in memory. E0377, // the trait `CoerceUnsized` may only be implemented for a coercion // between structures with the same definition // E0558, // replaced with a generic attribute input check - E0533, // `{}` does not name a unit variant, unit struct or a constant // E0563, // cannot determine a type for this `impl Trait` removed in 6383de15 - E0564, // only named lifetimes are allowed in `impl Trait`, +// E0564, // only named lifetimes are allowed in `impl Trait`, // but `{}` was found in the type `{}` - E0587, // type has conflicting packed and align representation hints - E0588, // packed type cannot transitively contain a `[repr(align)]` type // E0611, // merged into E0616 // E0612, // merged into E0609 // E0613, // Removed (merged with E0609) E0627, // yield statement outside of generator literal - E0632, // cannot provide explicit type parameters when `impl Trait` is used - // in argument position. + E0632, // cannot provide explicit generic arguments when `impl Trait` is + // used in argument position E0634, // type has conflicting packed representaton hints E0640, // infer outlives requirements E0641, // cannot cast to/from a pointer with an unknown kind - E0645, // trait aliases not finished +// E0645, // trait aliases not finished E0719, // duplicate values for associated type binding E0722, // Malformed `#[optimize]` attribute E0724, // `#[ffi_returns_twice]` is only allowed in foreign functions diff --git a/src/librustc_typeck/impl_wf_check.rs b/src/librustc_typeck/impl_wf_check.rs index bc0f17c3bf..2d18800771 100644 --- a/src/librustc_typeck/impl_wf_check.rs +++ b/src/librustc_typeck/impl_wf_check.rs @@ -54,7 +54,7 @@ pub fn impl_wf_check(tcx: TyCtxt<'_>) { // but it's one that we must perform earlier than the rest of // WfCheck. for &module in tcx.hir().krate().modules.keys() { - tcx.ensure().check_mod_impl_wf(tcx.hir().local_def_id_from_node_id(module)); + tcx.ensure().check_mod_impl_wf(tcx.hir().local_def_id(module)); } } @@ -78,7 +78,7 @@ struct ImplWfCheck<'tcx> { impl ItemLikeVisitor<'tcx> for ImplWfCheck<'tcx> { fn visit_item(&mut self, item: &'tcx hir::Item) { - if let hir::ItemKind::Impl(.., ref impl_item_refs) = item.node { + if let hir::ItemKind::Impl(.., ref impl_item_refs) = item.kind { let impl_def_id = self.tcx.hir().local_def_id(item.hir_id); enforce_impl_params_are_constrained(self.tcx, impl_def_id, @@ -114,7 +114,7 @@ fn enforce_impl_params_are_constrained( let mut input_parameters = cgp::parameters_for_impl(impl_self_ty, impl_trait_ref); cgp::identify_constrained_generic_params( - tcx, &impl_predicates, impl_trait_ref, &mut input_parameters); + tcx, impl_predicates, impl_trait_ref, &mut input_parameters); // Disallow unconstrained lifetimes, but only if they appear in assoc types. let lifetimes_in_associated_types: FxHashSet<_> = impl_item_refs.iter() @@ -197,7 +197,7 @@ fn enforce_impl_items_are_distinct(tcx: TyCtxt<'_>, impl_item_refs: &[hir::ImplI let mut seen_value_items = FxHashMap::default(); for impl_item_ref in impl_item_refs { let impl_item = tcx.hir().impl_item(impl_item_ref.id); - let seen_items = match impl_item.node { + let seen_items = match impl_item.kind { hir::ImplItemKind::TyAlias(_) => &mut seen_type_items, _ => &mut seen_value_items, }; diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 959483e443..9374113e1c 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -67,8 +67,6 @@ This API is completely unstable and subject to change. #![feature(nll)] #![feature(slice_patterns)] #![feature(never_type)] -#![feature(inner_deref)] -#![feature(mem_take)] #![recursion_limit="256"] @@ -159,10 +157,10 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { let main_id = tcx.hir().as_local_hir_id(main_def_id).unwrap(); let main_span = tcx.def_span(main_def_id); let main_t = tcx.type_of(main_def_id); - match main_t.sty { + match main_t.kind { ty::FnDef(..) => { if let Some(Node::Item(it)) = tcx.hir().find(main_id) { - if let hir::ItemKind::Fn(.., ref generics, _) = it.node { + if let hir::ItemKind::Fn(.., ref generics, _) = it.kind { let mut error = false; if !generics.params.is_empty() { let msg = "`main` function is not allowed to have generic \ @@ -224,10 +222,10 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { let start_id = tcx.hir().as_local_hir_id(start_def_id).unwrap(); let start_span = tcx.def_span(start_def_id); let start_t = tcx.type_of(start_def_id); - match start_t.sty { + match start_t.kind { ty::FnDef(..) => { if let Some(Node::Item(it)) = tcx.hir().find(start_id) { - if let hir::ItemKind::Fn(.., ref generics, _) = it.node { + if let hir::ItemKind::Fn(.., ref generics, _) = it.kind { let mut error = false; if !generics.params.is_empty() { struct_span_err!(tcx.sess, generics.span, E0132, @@ -295,7 +293,7 @@ pub fn provide(providers: &mut Providers<'_>) { } pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorReported> { - tcx.sess.profiler(|p| p.start_activity("type-check crate")); + let _prof_timer = tcx.prof.generic_activity("type_check_crate"); // this ensures that later parts of type checking can assume that items // have valid types and not error @@ -303,7 +301,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorReported> { tcx.sess.track_errors(|| { time(tcx.sess, "type collecting", || { for &module in tcx.hir().krate().modules.keys() { - tcx.ensure().collect_mod_item_types(tcx.hir().local_def_id_from_node_id(module)); + tcx.ensure().collect_mod_item_types(tcx.hir().local_def_id(module)); } }); })?; @@ -338,7 +336,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorReported> { time(tcx.sess, "item-types checking", || { for &module in tcx.hir().krate().modules.keys() { - tcx.ensure().check_mod_item_types(tcx.hir().local_def_id_from_node_id(module)); + tcx.ensure().check_mod_item_types(tcx.hir().local_def_id(module)); } }); @@ -347,8 +345,6 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorReported> { check_unused::check_crate(tcx); check_for_entry_fn(tcx); - tcx.sess.profiler(|p| p.end_activity("type-check crate")); - if tcx.sess.err_count() == 0 { Ok(()) } else { diff --git a/src/librustc_typeck/outlives/explicit.rs b/src/librustc_typeck/outlives/explicit.rs index 40a57788c0..21e529f33c 100644 --- a/src/librustc_typeck/outlives/explicit.rs +++ b/src/librustc_typeck/outlives/explicit.rs @@ -30,11 +30,17 @@ impl<'tcx> ExplicitPredicatesMap<'tcx> { let mut required_predicates = RequiredPredicates::default(); // process predicates and convert to `RequiredPredicates` entry, see below - for (pred, _) in predicates.predicates.iter() { - match pred { + for &(predicate, span) in predicates.predicates { + match predicate { ty::Predicate::TypeOutlives(predicate) => { let OutlivesPredicate(ref ty, ref reg) = predicate.skip_binder(); - insert_outlives_predicate(tcx, (*ty).into(), reg, &mut required_predicates) + insert_outlives_predicate( + tcx, + (*ty).into(), + reg, + span, + &mut required_predicates, + ) } ty::Predicate::RegionOutlives(predicate) => { @@ -43,6 +49,7 @@ impl<'tcx> ExplicitPredicatesMap<'tcx> { tcx, (*reg1).into(), reg2, + span, &mut required_predicates, ) } diff --git a/src/librustc_typeck/outlives/implicit_infer.rs b/src/librustc_typeck/outlives/implicit_infer.rs index 644d723ded..74048b8d20 100644 --- a/src/librustc_typeck/outlives/implicit_infer.rs +++ b/src/librustc_typeck/outlives/implicit_infer.rs @@ -1,9 +1,10 @@ use rustc::hir::{self, Node}; use rustc::hir::def_id::DefId; use rustc::hir::itemlikevisit::ItemLikeVisitor; -use rustc::ty::subst::{Kind, Subst, UnpackedKind}; +use rustc::ty::subst::{GenericArg, Subst, GenericArgKind}; use rustc::ty::{self, Ty, TyCtxt}; use rustc::util::nodemap::FxHashMap; +use syntax_pos::Span; use super::explicit::ExplicitPredicatesMap; use super::utils::*; @@ -66,7 +67,7 @@ impl<'cx, 'tcx> ItemLikeVisitor<'tcx> for InferVisitor<'cx, 'tcx> { }; let mut item_required_predicates = RequiredPredicates::default(); - match item.node { + match item.kind { hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) | hir::ItemKind::Struct(..) => { let adt_def = self.tcx.adt_def(item_did); @@ -79,9 +80,11 @@ impl<'cx, 'tcx> ItemLikeVisitor<'tcx> for InferVisitor<'cx, 'tcx> { // (struct/enum/union) there will be outlive // requirements for adt_def. let field_ty = self.tcx.type_of(field_def.did); + let field_span = self.tcx.def_span(field_def.did); insert_required_predicates_to_be_wf( self.tcx, field_ty, + field_span, self.global_inferred_outlives, &mut item_required_predicates, &mut self.explicit_map, @@ -118,19 +121,20 @@ impl<'cx, 'tcx> ItemLikeVisitor<'tcx> for InferVisitor<'cx, 'tcx> { fn insert_required_predicates_to_be_wf<'tcx>( tcx: TyCtxt<'tcx>, field_ty: Ty<'tcx>, + field_span: Span, global_inferred_outlives: &FxHashMap>, required_predicates: &mut RequiredPredicates<'tcx>, explicit_map: &mut ExplicitPredicatesMap<'tcx>, ) { for ty in field_ty.walk() { - match ty.sty { + match ty.kind { // The field is of type &'a T which means that we will have // a predicate requirement of T: 'a (T outlives 'a). // // We also want to calculate potential predicates for the T ty::Ref(region, rty, _) => { debug!("Ref"); - insert_outlives_predicate(tcx, rty.into(), region, required_predicates); + insert_outlives_predicate(tcx, rty.into(), region, field_span, required_predicates); } // For each Adt (struct/enum/union) type `Foo<'a, T>`, we @@ -158,7 +162,7 @@ fn insert_required_predicates_to_be_wf<'tcx>( // 'a` holds for `Foo`. debug!("Adt"); if let Some(unsubstituted_predicates) = global_inferred_outlives.get(&def.did) { - for unsubstituted_predicate in unsubstituted_predicates { + for (unsubstituted_predicate, &span) in unsubstituted_predicates { // `unsubstituted_predicate` is `U: 'b` in the // example above. So apply the substitution to // get `T: 'a` (or `predicate`): @@ -167,6 +171,7 @@ fn insert_required_predicates_to_be_wf<'tcx>( tcx, predicate.0, predicate.1, + span, required_predicates, ); } @@ -253,7 +258,7 @@ fn insert_required_predicates_to_be_wf<'tcx>( pub fn check_explicit_predicates<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, - substs: &[Kind<'tcx>], + substs: &[GenericArg<'tcx>], required_predicates: &mut RequiredPredicates<'tcx>, explicit_map: &mut ExplicitPredicatesMap<'tcx>, ignored_self_ty: Option>, @@ -272,7 +277,7 @@ pub fn check_explicit_predicates<'tcx>( ); let explicit_predicates = explicit_map.explicit_predicates_of(tcx, def_id); - for outlives_predicate in explicit_predicates.iter() { + for (outlives_predicate, &span) in explicit_predicates { debug!("outlives_predicate = {:?}", &outlives_predicate); // Careful: If we are inferring the effects of a `dyn Trait<..>` @@ -310,7 +315,7 @@ pub fn check_explicit_predicates<'tcx>( // binding) and thus infer an outlives requirement that `X: // 'b`. if let Some(self_ty) = ignored_self_ty { - if let UnpackedKind::Type(ty) = outlives_predicate.0.unpack() { + if let GenericArgKind::Type(ty) = outlives_predicate.0.unpack() { if ty.walk().any(|ty| ty == self_ty) { debug!("skipping self ty = {:?}", &ty); continue; @@ -320,6 +325,6 @@ pub fn check_explicit_predicates<'tcx>( let predicate = outlives_predicate.subst(tcx, substs); debug!("predicate = {:?}", &predicate); - insert_outlives_predicate(tcx, predicate.0.into(), predicate.1, required_predicates); + insert_outlives_predicate(tcx, predicate.0.into(), predicate.1, span, required_predicates); } } diff --git a/src/librustc_typeck/outlives/mod.rs b/src/librustc_typeck/outlives/mod.rs index 6b8f6fccd4..6b861656d7 100644 --- a/src/librustc_typeck/outlives/mod.rs +++ b/src/librustc_typeck/outlives/mod.rs @@ -2,9 +2,10 @@ use hir::Node; use rustc::hir; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::ty::query::Providers; -use rustc::ty::subst::UnpackedKind; +use rustc::ty::subst::GenericArgKind; use rustc::ty::{self, CratePredicatesMap, TyCtxt}; use syntax::symbol::sym; +use syntax_pos::Span; mod explicit; mod implicit_infer; @@ -23,14 +24,14 @@ pub fn provide(providers: &mut Providers<'_>) { fn inferred_outlives_of( tcx: TyCtxt<'_>, item_def_id: DefId, -) -> &[ty::Predicate<'_>] { +) -> &[(ty::Predicate<'_>, Span)] { let id = tcx .hir() .as_local_hir_id(item_def_id) .expect("expected local def-id"); match tcx.hir().get(id) { - Node::Item(item) => match item.node { + Node::Item(item) => match item.kind { hir::ItemKind::Struct(..) | hir::ItemKind::Enum(..) | hir::ItemKind::Union(..) => { let crate_map = tcx.inferred_outlives_crate(LOCAL_CRATE); @@ -43,7 +44,7 @@ fn inferred_outlives_of( if tcx.has_attr(item_def_id, sym::rustc_outlives) { let mut pred: Vec = predicates .iter() - .map(|out_pred| match out_pred { + .map(|(out_pred, _)| match out_pred { ty::Predicate::RegionOutlives(p) => p.to_string(), ty::Predicate::TypeOutlives(p) => p.to_string(), err => bug!("unexpected predicate {:?}", err), @@ -96,27 +97,27 @@ fn inferred_outlives_crate( let predicates = global_inferred_outlives .iter() .map(|(&def_id, set)| { - let predicates = tcx.arena.alloc_from_iter(set + let predicates = &*tcx.arena.alloc_from_iter(set .iter() .filter_map( - |ty::OutlivesPredicate(kind1, region2)| match kind1.unpack() { - UnpackedKind::Type(ty1) => { - Some(ty::Predicate::TypeOutlives(ty::Binder::bind( + |(ty::OutlivesPredicate(kind1, region2), &span)| match kind1.unpack() { + GenericArgKind::Type(ty1) => { + Some((ty::Predicate::TypeOutlives(ty::Binder::bind( ty::OutlivesPredicate(ty1, region2) - ))) + )), span)) } - UnpackedKind::Lifetime(region1) => { - Some(ty::Predicate::RegionOutlives( + GenericArgKind::Lifetime(region1) => { + Some((ty::Predicate::RegionOutlives( ty::Binder::bind(ty::OutlivesPredicate(region1, region2)) - )) + ), span)) } - UnpackedKind::Const(_) => { + GenericArgKind::Const(_) => { // Generic consts don't impose any constraints. None } }, )); - (def_id, &*predicates) + (def_id, predicates) }).collect(); tcx.arena.alloc(ty::CratePredicatesMap { diff --git a/src/librustc_typeck/outlives/utils.rs b/src/librustc_typeck/outlives/utils.rs index 783890da63..361116e96d 100644 --- a/src/librustc_typeck/outlives/utils.rs +++ b/src/librustc_typeck/outlives/utils.rs @@ -1,19 +1,22 @@ use rustc::ty::outlives::Component; -use rustc::ty::subst::{Kind, UnpackedKind}; +use rustc::ty::subst::{GenericArg, GenericArgKind}; use rustc::ty::{self, Region, RegionKind, Ty, TyCtxt}; use smallvec::smallvec; -use std::collections::BTreeSet; +use std::collections::BTreeMap; +use syntax_pos::Span; /// Tracks the `T: 'a` or `'a: 'a` predicates that we have inferred /// must be added to the struct header. -pub type RequiredPredicates<'tcx> = BTreeSet, ty::Region<'tcx>>>; +pub type RequiredPredicates<'tcx> = + BTreeMap, ty::Region<'tcx>>, Span>; /// Given a requirement `T: 'a` or `'b: 'a`, deduce the /// outlives_component and add it to `required_predicates` pub fn insert_outlives_predicate<'tcx>( tcx: TyCtxt<'tcx>, - kind: Kind<'tcx>, + kind: GenericArg<'tcx>, outlived_region: Region<'tcx>, + span: Span, required_predicates: &mut RequiredPredicates<'tcx>, ) { // If the `'a` region is bound within the field type itself, we @@ -23,7 +26,7 @@ pub fn insert_outlives_predicate<'tcx>( } match kind.unpack() { - UnpackedKind::Type(ty) => { + GenericArgKind::Type(ty) => { // `T: 'outlived_region` for some type `T` // But T could be a lot of things: // e.g., if `T = &'b u32`, then `'b: 'outlived_region` is @@ -52,6 +55,7 @@ pub fn insert_outlives_predicate<'tcx>( tcx, r.into(), outlived_region, + span, required_predicates, ); } @@ -72,7 +76,8 @@ pub fn insert_outlives_predicate<'tcx>( // where clause that `U: 'a`. let ty: Ty<'tcx> = param_ty.to_ty(tcx); required_predicates - .insert(ty::OutlivesPredicate(ty.into(), outlived_region)); + .entry(ty::OutlivesPredicate(ty.into(), outlived_region)) + .or_insert(span); } Component::Projection(proj_ty) => { @@ -87,7 +92,8 @@ pub fn insert_outlives_predicate<'tcx>( // Here we want to add an explicit `where ::Item: 'a`. let ty: Ty<'tcx> = tcx.mk_projection(proj_ty.item_def_id, proj_ty.substs); required_predicates - .insert(ty::OutlivesPredicate(ty.into(), outlived_region)); + .entry(ty::OutlivesPredicate(ty.into(), outlived_region)) + .or_insert(span); } Component::EscapingProjection(_) => { @@ -112,14 +118,15 @@ pub fn insert_outlives_predicate<'tcx>( } } - UnpackedKind::Lifetime(r) => { + GenericArgKind::Lifetime(r) => { if !is_free_region(tcx, r) { return; } - required_predicates.insert(ty::OutlivesPredicate(kind, outlived_region)); + required_predicates.entry(ty::OutlivesPredicate(kind, outlived_region)) + .or_insert(span); } - UnpackedKind::Const(_) => { + GenericArgKind::Const(_) => { // Generic consts don't impose any constraints. } } @@ -160,9 +167,14 @@ fn is_free_region(tcx: TyCtxt<'_>, region: Region<'_>) -> bool { // ignore it. We can't put it on the struct header anyway. RegionKind::ReLateBound(..) => false, + // This can appear in `where Self: ` bounds (#64855): + // + // struct Bar(::Type) where Self: ; + // struct Baz<'a>(&'a Self) where Self: ; + RegionKind::ReEmpty => false, + // These regions don't appear in types from type declarations: - RegionKind::ReEmpty - | RegionKind::ReErased + RegionKind::ReErased | RegionKind::ReClosureBound(..) | RegionKind::ReScope(..) | RegionKind::ReVar(..) diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index 7ed9d6606f..4431abdaf5 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -4,7 +4,7 @@ //! We walk the set of items and, for each member, generate new constraints. use hir::def_id::DefId; -use rustc::ty::subst::{SubstsRef, UnpackedKind}; +use rustc::ty::subst::{SubstsRef, GenericArgKind}; use rustc::ty::{self, Ty, TyCtxt}; use rustc::hir; use rustc::hir::itemlikevisit::ItemLikeVisitor; @@ -68,7 +68,7 @@ pub fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>) impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { fn visit_item(&mut self, item: &hir::Item) { - match item.node { + match item.kind { hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => { self.visit_node_helper(item.hir_id); @@ -94,7 +94,7 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { hir::ItemKind::ForeignMod(ref foreign_mod) => { for foreign_item in &foreign_mod.items { - if let hir::ForeignItemKind::Fn(..) = foreign_item.node { + if let hir::ForeignItemKind::Fn(..) = foreign_item.kind { self.visit_node_helper(foreign_item.hir_id); } } @@ -105,13 +105,13 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { } fn visit_trait_item(&mut self, trait_item: &hir::TraitItem) { - if let hir::TraitItemKind::Method(..) = trait_item.node { + if let hir::TraitItemKind::Method(..) = trait_item.kind { self.visit_node_helper(trait_item.hir_id); } } fn visit_impl_item(&mut self, impl_item: &hir::ImplItem) { - if let hir::ImplItemKind::Method(..) = impl_item.node { + if let hir::ImplItemKind::Method(..) = impl_item.kind { self.visit_node_helper(impl_item.hir_id); } } @@ -140,7 +140,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { let id = tcx.hir().as_local_hir_id(def_id).unwrap(); let inferred_start = self.terms_cx.inferred_starts[&id]; let current_item = &CurrentItem { inferred_start }; - match tcx.type_of(def_id).sty { + match tcx.type_of(def_id).kind { ty::Adt(def, _) => { // Not entirely obvious: constraints on structs/enums do not // affect the variance of their type parameters. See discussion @@ -232,13 +232,13 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { for k in substs { match k.unpack() { - UnpackedKind::Lifetime(lt) => { + GenericArgKind::Lifetime(lt) => { self.add_constraints_from_region(current, lt, variance_i) } - UnpackedKind::Type(ty) => { + GenericArgKind::Type(ty) => { self.add_constraints_from_ty(current, ty, variance_i) } - UnpackedKind::Const(_) => { + GenericArgKind::Const(_) => { // Consts impose no constraints. } } @@ -256,7 +256,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { ty, variance); - match ty.sty { + match ty.kind { ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never | ty::Foreign(..) => { // leaf type -- noop @@ -387,13 +387,13 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { variance_decl, variance_i); match k.unpack() { - UnpackedKind::Lifetime(lt) => { + GenericArgKind::Lifetime(lt) => { self.add_constraints_from_region(current, lt, variance_i) } - UnpackedKind::Type(ty) => { + GenericArgKind::Type(ty) => { self.add_constraints_from_ty(current, ty, variance_i) } - UnpackedKind::Const(_) => { + GenericArgKind::Const(_) => { // Consts impose no constraints. } } diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs index 343d7ea656..745dbee5fd 100644 --- a/src/librustc_typeck/variance/mod.rs +++ b/src/librustc_typeck/variance/mod.rs @@ -49,7 +49,7 @@ fn variances_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[ty::Variance] { span_bug!(tcx.hir().span(id), "asked to compute variance for wrong kind of item") }; match tcx.hir().get(id) { - Node::Item(item) => match item.node { + Node::Item(item) => match item.kind { hir::ItemKind::Enum(..) | hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) | @@ -58,19 +58,19 @@ fn variances_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[ty::Variance] { _ => unsupported() }, - Node::TraitItem(item) => match item.node { + Node::TraitItem(item) => match item.kind { hir::TraitItemKind::Method(..) => {} _ => unsupported() }, - Node::ImplItem(item) => match item.node { + Node::ImplItem(item) => match item.kind { hir::ImplItemKind::Method(..) => {} _ => unsupported() }, - Node::ForeignItem(item) => match item.node { + Node::ForeignItem(item) => match item.kind { hir::ForeignItemKind::Fn(..) => {} _ => unsupported() diff --git a/src/librustc_typeck/variance/solve.rs b/src/librustc_typeck/variance/solve.rs index 1176c5ebb3..fbd476ef83 100644 --- a/src/librustc_typeck/variance/solve.rs +++ b/src/librustc_typeck/variance/solve.rs @@ -109,7 +109,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { self.enforce_const_invariance(generics, variances); // Functions are permitted to have unused generic parameters: make those invariant. - if let ty::FnDef(..) = tcx.type_of(def_id).sty { + if let ty::FnDef(..) = tcx.type_of(def_id).kind { for variance in variances.iter_mut() { if *variance == ty::Bivariant { *variance = ty::Invariant; diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs index e10837e52a..863a0b267f 100644 --- a/src/librustc_typeck/variance/terms.rs +++ b/src/librustc_typeck/variance/terms.rs @@ -131,7 +131,7 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> { debug!("add_inferreds for item {}", self.tcx.hir().node_to_string(item.hir_id)); - match item.node { + match item.kind { hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => { self.add_inferreds_for_item(item.hir_id); @@ -157,7 +157,7 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> { hir::ItemKind::ForeignMod(ref foreign_mod) => { for foreign_item in &foreign_mod.items { - if let hir::ForeignItemKind::Fn(..) = foreign_item.node { + if let hir::ForeignItemKind::Fn(..) = foreign_item.kind { self.add_inferreds_for_item(foreign_item.hir_id); } } @@ -168,13 +168,13 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> { } fn visit_trait_item(&mut self, trait_item: &hir::TraitItem) { - if let hir::TraitItemKind::Method(..) = trait_item.node { + if let hir::TraitItemKind::Method(..) = trait_item.kind { self.add_inferreds_for_item(trait_item.hir_id); } } fn visit_impl_item(&mut self, impl_item: &hir::ImplItem) { - if let hir::ImplItemKind::Method(..) = impl_item.node { + if let hir::ImplItemKind::Method(..) = impl_item.kind { self.add_inferreds_for_item(impl_item.hir_id); } } diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 0eb8b73016..e3de7fe204 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -11,5 +11,5 @@ path = "lib.rs" [dependencies] pulldown-cmark = { version = "0.5.3", default-features = false } minifier = "0.0.33" -rayon = { version = "0.2.0", package = "rustc-rayon" } +rayon = { version = "0.3.0", package = "rustc-rayon" } tempfile = "3" diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs index 516be99ed6..b7f5ed9d00 100644 --- a/src/librustdoc/clean/auto_trait.rs +++ b/src/librustdoc/clean/auto_trait.rs @@ -104,7 +104,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { // regardless of the choice of `T`. let params = ( self.cx.tcx.generics_of(param_env_def_id), - &&self.cx.tcx.common.empty_predicates, + ty::GenericPredicates::default(), ).clean(self.cx).params; Generics { @@ -119,7 +119,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { source: Span::empty(), name: None, attrs: Default::default(), - visibility: None, + visibility: Inherited, def_id: self.cx.next_def_id(param_env_def_id.krate), stability: None, deprecation: None, @@ -489,7 +489,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { let mut generic_params = ( tcx.generics_of(param_env_def_id), - &tcx.explicit_predicates_of(param_env_def_id), + tcx.explicit_predicates_of(param_env_def_id), ).clean(self.cx).params; let mut has_sized = FxHashSet::default(); diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs index 490d4107c5..ff59dcab67 100644 --- a/src/librustdoc/clean/blanket_impl.rs +++ b/src/librustdoc/clean/blanket_impl.rs @@ -41,7 +41,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> { trait_def_id, impl_def_id); let trait_ref = self.cx.tcx.impl_trait_ref(impl_def_id).unwrap(); let may_apply = self.cx.tcx.infer_ctxt().enter(|infcx| { - match trait_ref.self_ty().sty { + match trait_ref.self_ty().kind { ty::Param(_) => {}, _ => return false, } @@ -99,7 +99,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> { source: self.cx.tcx.def_span(impl_def_id).clean(self.cx), name: None, attrs: Default::default(), - visibility: None, + visibility: Inherited, def_id: self.cx.next_def_id(impl_def_id.krate), stability: None, deprecation: None, @@ -107,7 +107,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> { unsafety: hir::Unsafety::Normal, generics: ( self.cx.tcx.generics_of(impl_def_id), - &self.cx.tcx.explicit_predicates_of(impl_def_id), + self.cx.tcx.explicit_predicates_of(impl_def_id), ).clean(self.cx), provided_trait_methods, // FIXME(eddyb) compute both `trait_` and `for_` from diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index c3092cea98..11f45c5f6d 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -9,7 +9,7 @@ use std::ops; use syntax::symbol::{Symbol, sym}; use syntax::ast::{MetaItem, MetaItemKind, NestedMetaItem, LitKind}; -use syntax::parse::ParseSess; +use syntax::sess::ParseSess; use syntax::feature_gate::Features; use syntax_pos::Span; @@ -68,9 +68,9 @@ impl Cfg { span: cfg.span }), }; - match cfg.node { + match cfg.kind { MetaItemKind::Word => Ok(Cfg::Cfg(name, None)), - MetaItemKind::NameValue(ref lit) => match lit.node { + MetaItemKind::NameValue(ref lit) => match lit.kind { LitKind::Str(value, _) => Ok(Cfg::Cfg(name, Some(value))), _ => Err(InvalidCfgError { // FIXME: if the main #[cfg] syntax decided to support non-string literals, @@ -346,6 +346,7 @@ impl<'a> fmt::Display for Html<'a> { "freebsd" => "FreeBSD", "fuchsia" => "Fuchsia", "haiku" => "Haiku", + "hermit" => "HermitCore", "ios" => "iOS", "l4re" => "L4Re", "linux" => "Linux", @@ -360,7 +361,7 @@ impl<'a> fmt::Display for Html<'a> { ("target_arch", Some(arch)) => match &*arch.as_str() { "aarch64" => "AArch64", "arm" => "ARM", - "asmjs" => "asm.js", + "asmjs" => "JavaScript", "mips" => "MIPS", "mips64" => "MIPS-64", "msp430" => "MSP430", diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs index ec5d86b2c6..580320a735 100644 --- a/src/librustdoc/clean/cfg/tests.rs +++ b/src/librustdoc/clean/cfg/tests.rs @@ -17,7 +17,7 @@ fn name_value_cfg(name: &str, value: &str) -> Cfg { fn dummy_meta_item_word(name: &str) -> MetaItem { MetaItem { path: Path::from_ident(Ident::from_str(name)), - node: MetaItemKind::Word, + kind: MetaItemKind::Word, span: DUMMY_SP, } } @@ -26,7 +26,7 @@ macro_rules! dummy_meta_item_list { ($name:ident, [$($list:ident),* $(,)?]) => { MetaItem { path: Path::from_ident(Ident::from_str(stringify!($name))), - node: MetaItemKind::List(vec![ + kind: MetaItemKind::List(vec![ $( NestedMetaItem::MetaItem( dummy_meta_item_word(stringify!($list)), @@ -40,7 +40,7 @@ macro_rules! dummy_meta_item_list { ($name:ident, [$($list:expr),* $(,)?]) => { MetaItem { path: Path::from_ident(Ident::from_str(stringify!($name))), - node: MetaItemKind::List(vec![ + kind: MetaItemKind::List(vec![ $( NestedMetaItem::MetaItem($list), )* diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index d71acb4fa7..b3b3750320 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -3,8 +3,8 @@ use std::iter::once; use syntax::ast; -use syntax::ext::base::MacroKind; use syntax::symbol::sym; +use syntax_pos::hygiene::MacroKind; use syntax_pos::Span; use rustc::hir; @@ -131,7 +131,7 @@ pub fn try_inline( name: Some(name.clean(cx)), attrs, inner, - visibility: Some(clean::Public), + visibility: clean::Public, stability: cx.tcx.lookup_stability(did).clean(cx), deprecation: cx.tcx.lookup_deprecation(did).clean(cx), def_id: did, @@ -193,7 +193,7 @@ pub fn build_external_trait(cx: &DocContext<'_>, did: DefId) -> clean::Trait { let auto_trait = cx.tcx.trait_def(did).has_auto_impl; let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect(); let predicates = cx.tcx.predicates_of(did); - let generics = (cx.tcx.generics_of(did), &predicates).clean(cx); + let generics = (cx.tcx.generics_of(did), predicates).clean(cx); let generics = filter_non_trait_generics(did, generics); let (generics, supertrait_bounds) = separate_supertrait_bounds(generics); let is_spotlight = load_attrs(cx, did).clean(cx).has_doc_flag(sym::spotlight); @@ -220,7 +220,7 @@ fn build_external_function(cx: &DocContext<'_>, did: DefId) -> clean::Function { let asyncness = cx.tcx.asyncness(did); let predicates = cx.tcx.predicates_of(did); let (generics, decl) = clean::enter_impl_trait(cx, || { - ((cx.tcx.generics_of(did), &predicates).clean(cx), (did, sig).clean(cx)) + ((cx.tcx.generics_of(did), predicates).clean(cx), (did, sig).clean(cx)) }); let (all_types, ret_types) = clean::get_all_types(&generics, &decl, cx); clean::Function { @@ -241,7 +241,7 @@ fn build_enum(cx: &DocContext<'_>, did: DefId) -> clean::Enum { let predicates = cx.tcx.explicit_predicates_of(did); clean::Enum { - generics: (cx.tcx.generics_of(did), &predicates).clean(cx), + generics: (cx.tcx.generics_of(did), predicates).clean(cx), variants_stripped: false, variants: cx.tcx.adt_def(did).variants.clean(cx), } @@ -257,7 +257,7 @@ fn build_struct(cx: &DocContext<'_>, did: DefId) -> clean::Struct { CtorKind::Fn => doctree::Tuple, CtorKind::Const => doctree::Unit, }, - generics: (cx.tcx.generics_of(did), &predicates).clean(cx), + generics: (cx.tcx.generics_of(did), predicates).clean(cx), fields: variant.fields.clean(cx), fields_stripped: false, } @@ -269,7 +269,7 @@ fn build_union(cx: &DocContext<'_>, did: DefId) -> clean::Union { clean::Union { struct_type: doctree::Plain, - generics: (cx.tcx.generics_of(did), &predicates).clean(cx), + generics: (cx.tcx.generics_of(did), predicates).clean(cx), fields: variant.fields.clean(cx), fields_stripped: false, } @@ -280,7 +280,7 @@ fn build_type_alias(cx: &DocContext<'_>, did: DefId) -> clean::Typedef { clean::Typedef { type_: cx.tcx.type_of(did).clean(cx), - generics: (cx.tcx.generics_of(did), &predicates).clean(cx), + generics: (cx.tcx.generics_of(did), predicates).clean(cx), } } @@ -333,7 +333,7 @@ pub fn build_impl(cx: &DocContext<'_>, did: DefId, attrs: Option>, } let for_ = if let Some(hir_id) = tcx.hir().as_local_hir_id(did) { - match tcx.hir().expect_item(hir_id).node { + match tcx.hir().expect_item(hir_id).kind { hir::ItemKind::Impl(.., ref t, _) => { t.clean(cx) } @@ -355,7 +355,7 @@ pub fn build_impl(cx: &DocContext<'_>, did: DefId, attrs: Option>, let predicates = tcx.explicit_predicates_of(did); let (trait_items, generics) = if let Some(hir_id) = tcx.hir().as_local_hir_id(did) { - match tcx.hir().expect_item(hir_id).node { + match tcx.hir().expect_item(hir_id).kind { hir::ItemKind::Impl(.., ref gen, _, _, ref item_ids) => { ( item_ids.iter() @@ -376,7 +376,7 @@ pub fn build_impl(cx: &DocContext<'_>, did: DefId, attrs: Option>, } }).collect::>(), clean::enter_impl_trait(cx, || { - (tcx.generics_of(did), &predicates).clean(cx) + (tcx.generics_of(did), predicates).clean(cx) }), ) }; @@ -418,7 +418,7 @@ pub fn build_impl(cx: &DocContext<'_>, did: DefId, attrs: Option>, source: tcx.def_span(did).clean(cx), name: None, attrs, - visibility: Some(clean::Inherited), + visibility: clean::Inherited, stability: tcx.lookup_stability(did).clean(cx), deprecation: tcx.lookup_deprecation(did).clean(cx), def_id: did, @@ -479,9 +479,9 @@ fn build_static(cx: &DocContext<'_>, did: DefId, mutable: bool) -> clean::Static fn build_macro(cx: &DocContext<'_>, did: DefId, name: ast::Name) -> clean::ItemEnum { let imported_from = cx.tcx.original_crate_name(did.krate); - match cx.cstore.load_macro_untracked(did, cx.sess()) { - LoadedMacro::MacroDef(def) => { - let matchers: hir::HirVec = if let ast::ItemKind::MacroDef(ref def) = def.node { + match cx.enter_resolver(|r| r.cstore().load_macro_untracked(did, cx.sess())) { + LoadedMacro::MacroDef(def, _) => { + let matchers: hir::HirVec = if let ast::ItemKind::MacroDef(ref def) = def.kind { let tts: Vec<_> = def.stream().into_trees().collect(); tts.chunks(4).map(|arm| arm[0].span()).collect() } else { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 95a5869e84..bdc0206223 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -9,7 +9,7 @@ mod simplify; mod auto_trait; mod blanket_impl; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; use rustc_target::spec::abi::Abi; use rustc_typeck::hir_ty_to_ty; use rustc::infer::region_constraints::{RegionConstraintData, Constraint}; @@ -21,17 +21,17 @@ use rustc::hir; use rustc::hir::def::{CtorKind, DefKind, Res}; use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc::hir::ptr::P; -use rustc::ty::subst::{InternalSubsts, SubstsRef, UnpackedKind}; +use rustc::ty::subst::{InternalSubsts, SubstsRef, GenericArgKind}; use rustc::ty::{self, DefIdTree, TyCtxt, Region, RegionVid, Ty, AdtKind}; use rustc::ty::fold::TypeFolder; use rustc::ty::layout::VariantIdx; use rustc::util::nodemap::{FxHashMap, FxHashSet}; -use syntax::ast::{self, AttrStyle, Ident}; +use syntax::ast::{self, Attribute, AttrStyle, AttrItem, Ident}; use syntax::attr; -use syntax::ext::base::MacroKind; +use syntax::parse::lexer::comments; use syntax::source_map::DUMMY_SP; -use syntax::symbol::{Symbol, kw, sym}; -use syntax::symbol::InternedString; +use syntax_pos::symbol::{Symbol, kw, sym}; +use syntax_pos::hygiene::MacroKind; use syntax_pos::{self, Pos, FileName}; use std::collections::hash_map::Entry; @@ -187,7 +187,7 @@ pub fn krate(mut cx: &mut DocContext<'_>) -> Crate { source: Span::empty(), name: Some(prim.to_url_str().to_string()), attrs: attrs.clone(), - visibility: Some(Public), + visibility: Public, stability: get_stability(cx, def_id), deprecation: get_deprecation(cx, def_id), def_id, @@ -198,8 +198,8 @@ pub fn krate(mut cx: &mut DocContext<'_>) -> Crate { Item { source: Span::empty(), name: Some(kw.clone()), - attrs: attrs, - visibility: Some(Public), + attrs, + visibility: Public, stability: get_stability(cx, def_id), deprecation: get_deprecation(cx, def_id), def_id, @@ -275,7 +275,7 @@ impl Clean for CrateNum { let primitives = if root.is_local() { cx.tcx.hir().krate().module.item_ids.iter().filter_map(|&id| { let item = cx.tcx.hir().expect_item(id.id); - match item.node { + match item.kind { hir::ItemKind::Mod(_) => { as_primitive(Res::Def( DefKind::Mod, @@ -319,7 +319,7 @@ impl Clean for CrateNum { let keywords = if root.is_local() { cx.tcx.hir().krate().module.item_ids.iter().filter_map(|&id| { let item = cx.tcx.hir().expect_item(id.id); - match item.node { + match item.kind { hir::ItemKind::Mod(_) => { as_keyword(Res::Def( DefKind::Mod, @@ -361,7 +361,7 @@ pub struct Item { pub name: Option, pub attrs: Attributes, pub inner: ItemEnum, - pub visibility: Option, + pub visibility: Visibility, pub def_id: DefId, pub stability: Option, pub deprecation: Option, @@ -778,11 +778,11 @@ impl Attributes { fn extract_cfg(mi: &ast::MetaItem) -> Option<&ast::MetaItem> { use syntax::ast::NestedMetaItem::MetaItem; - if let ast::MetaItemKind::List(ref nmis) = mi.node { + if let ast::MetaItemKind::List(ref nmis) = mi.kind { if nmis.len() == 1 { if let MetaItem(ref cfg_mi) = nmis[0] { if cfg_mi.check_name(sym::cfg) { - if let ast::MetaItemKind::List(ref cfg_nmis) = cfg_mi.node { + if let ast::MetaItemKind::List(ref cfg_nmis) = cfg_mi.kind { if cfg_nmis.len() == 1 { if let MetaItem(ref content_mi) = cfg_nmis[0] { return Some(content_mi); @@ -859,8 +859,31 @@ impl Attributes { let mut cfg = Cfg::True; let mut doc_line = 0; + /// Converts `attr` to a normal `#[doc="foo"]` comment, if it is a + /// comment like `///` or `/** */`. (Returns `attr` unchanged for + /// non-sugared doc attributes.) + pub fn with_desugared_doc(attr: &Attribute, f: impl FnOnce(&Attribute) -> T) -> T { + if attr.is_sugared_doc { + let comment = attr.value_str().unwrap(); + let meta = attr::mk_name_value_item_str( + Ident::with_dummy_span(sym::doc), + Symbol::intern(&comments::strip_doc_comment_decoration(&comment.as_str())), + DUMMY_SP, + ); + f(&Attribute { + item: AttrItem { path: meta.path, tokens: meta.kind.tokens(meta.span) }, + id: attr.id, + style: attr.style, + is_sugared_doc: true, + span: attr.span, + }) + } else { + f(attr) + } + } + let other_attrs = attrs.iter().filter_map(|attr| { - attr.with_desugared_doc(|attr| { + with_desugared_doc(attr, |attr| { if attr.check_name(sym::doc) { if let Some(mi) = attr.meta() { if let Some(value) = mi.value_str() { @@ -1098,33 +1121,33 @@ fn external_generic_args( substs: SubstsRef<'_>, ) -> GenericArgs { let mut skip_self = has_self; - let mut ty_sty = None; + let mut ty_kind = None; let args: Vec<_> = substs.iter().filter_map(|kind| match kind.unpack() { - UnpackedKind::Lifetime(lt) => { + GenericArgKind::Lifetime(lt) => { lt.clean(cx).and_then(|lt| Some(GenericArg::Lifetime(lt))) } - UnpackedKind::Type(_) if skip_self => { + GenericArgKind::Type(_) if skip_self => { skip_self = false; None } - UnpackedKind::Type(ty) => { - ty_sty = Some(&ty.sty); + GenericArgKind::Type(ty) => { + ty_kind = Some(&ty.kind); Some(GenericArg::Type(ty.clean(cx))) } - UnpackedKind::Const(ct) => Some(GenericArg::Const(ct.clean(cx))), + GenericArgKind::Const(ct) => Some(GenericArg::Const(ct.clean(cx))), }).collect(); match trait_did { // Attempt to sugar an external path like Fn<(A, B,), C> to Fn(A, B) -> C Some(did) if cx.tcx.lang_items().fn_trait_kind(did).is_some() => { - assert!(ty_sty.is_some()); - let inputs = match ty_sty { + assert!(ty_kind.is_some()); + let inputs = match ty_kind { Some(ty::Tuple(ref tys)) => tys.iter().map(|t| t.expect_ty().clean(cx)).collect(), _ => return GenericArgs::AngleBracketed { args, bindings }, }; let output = None; // FIXME(#20299) return type comes from a projection now - // match types[1].sty { + // match types[1].kind { // ty::Tuple(ref v) if v.is_empty() => None, // -> () // _ => Some(types[1].clean(cx)) // }; @@ -1162,9 +1185,9 @@ impl<'a, 'tcx> Clean for (&'a ty::TraitRef<'tcx>, Vec // collect any late bound regions let mut late_bounds = vec![]; for ty_s in trait_ref.input_types().skip(1) { - if let ty::Tuple(ts) = ty_s.sty { + if let ty::Tuple(ts) = ty_s.kind { for &ty_s in ts { - if let ty::Ref(ref reg, _, _) = ty_s.expect_ty().sty { + if let ty::Ref(ref reg, _, _) = ty_s.expect_ty().kind { if let &ty::RegionKind::ReLateBound(..) = *reg { debug!(" hit an ReLateBound {:?}", reg); if let Some(Lifetime(name)) = reg.clean(cx) { @@ -1307,7 +1330,7 @@ impl Clean> for ty::RegionKind { } } -#[derive(Clone, PartialEq, Eq, Debug, Hash)] +#[derive(Clone, Debug)] pub enum WherePredicate { BoundPredicate { ty: Type, bounds: Vec }, RegionPredicate { lifetime: Lifetime, bounds: Vec }, @@ -1570,7 +1593,7 @@ impl Clean for hir::GenericParam { did: cx.tcx.hir().local_def_id(self.hir_id), bounds: self.bounds.clean(cx), default: default.clean(cx), - synthetic: synthetic, + synthetic, }) } hir::GenericParamKind::Const { ref ty } => { @@ -1589,7 +1612,7 @@ impl Clean for hir::GenericParam { } // maybe use a Generic enum and use Vec? -#[derive(Clone, PartialEq, Eq, Debug, Default, Hash)] +#[derive(Clone, Debug, Default)] pub struct Generics { pub params: Vec, pub where_predicates: Vec, @@ -1664,8 +1687,7 @@ impl Clean for hir::Generics { } } -impl<'a, 'tcx> Clean for (&'a ty::Generics, - &'a &'tcx ty::GenericPredicates<'tcx>) { +impl<'a, 'tcx> Clean for (&'a ty::Generics, ty::GenericPredicates<'tcx>) { fn clean(&self, cx: &DocContext<'_>) -> Generics { use self::WherePredicate as WP; use std::collections::BTreeMap; @@ -1683,7 +1705,7 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, .filter_map(|param| match param.kind { ty::GenericParamDefKind::Lifetime => None, ty::GenericParamDefKind::Type { synthetic, .. } => { - if param.name.as_symbol() == kw::SelfUpper { + if param.name == kw::SelfUpper { assert_eq!(param.index, 0); return None; } @@ -1705,15 +1727,15 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics, let mut projection = None; let param_idx = (|| { if let Some(trait_ref) = p.to_opt_poly_trait_ref() { - if let ty::Param(param) = trait_ref.self_ty().sty { + if let ty::Param(param) = trait_ref.self_ty().kind { return Some(param.index); } } else if let Some(outlives) = p.to_opt_type_outlives() { - if let ty::Param(param) = outlives.skip_binder().0.sty { + if let ty::Param(param) = outlives.skip_binder().0.kind { return Some(param.index); } } else if let ty::Predicate::Projection(p) = p { - if let ty::Param(param) = p.skip_binder().projection_ty.self_ty().sty { + if let ty::Param(param) = p.skip_binder().projection_ty.self_ty().kind { projection = Some(p); return Some(param.index); } @@ -1849,7 +1871,7 @@ fn get_real_types( cx: &DocContext<'_>, recurse: i32, ) -> FxHashSet { - let arg_s = arg.to_string(); + let arg_s = arg.print().to_string(); let mut res = FxHashSet::default(); if recurse >= 10 { // FIXME: remove this whole recurse thing when the recursion bug is fixed return res; @@ -2032,6 +2054,7 @@ impl Clean for doctree::Function<'_> { pub struct FnDecl { pub inputs: Arguments, pub output: FunctionRetTy, + pub c_variadic: bool, pub attrs: Attributes, } @@ -2110,6 +2133,7 @@ impl<'a, A: Copy> Clean for (&'a hir::FnDecl, A) FnDecl { inputs: (&self.0.inputs[..], self.1).clean(cx), output: self.0.output.clean(cx), + c_variadic: self.0.c_variadic, attrs: Attributes::default(), } } @@ -2127,6 +2151,7 @@ impl<'tcx> Clean for (DefId, ty::PolyFnSig<'tcx>) { FnDecl { output: Return(sig.skip_binder().output().clean(cx)), attrs: Attributes::default(), + c_variadic: sig.skip_binder().c_variadic, inputs: Arguments { values: sig.skip_binder().inputs().iter().map(|t| { Argument { @@ -2210,7 +2235,7 @@ impl Clean for doctree::Trait<'_> { let is_spotlight = attrs.has_doc_flag(sym::spotlight); Item { name: Some(self.name.clean(cx)), - attrs: attrs, + attrs, source: self.whence.clean(cx), def_id: cx.tcx.hir().local_def_id(self.id), visibility: self.vis.clean(cx), @@ -2280,7 +2305,7 @@ impl Clean for hir::PolyTraitRef { impl Clean for hir::TraitItem { fn clean(&self, cx: &DocContext<'_>) -> Item { - let inner = match self.node { + let inner = match self.kind { hir::TraitItemKind::Const(ref ty, default) => { AssocConstItem(ty.clean(cx), default.map(|e| print_const_expr(cx, e))) @@ -2311,7 +2336,7 @@ impl Clean for hir::TraitItem { attrs: self.attrs.clean(cx), source: self.span.clean(cx), def_id: local_did, - visibility: None, + visibility: Visibility::Inherited, stability: get_stability(cx, local_did), deprecation: get_deprecation(cx, local_did), inner, @@ -2321,7 +2346,7 @@ impl Clean for hir::TraitItem { impl Clean for hir::ImplItem { fn clean(&self, cx: &DocContext<'_>) -> Item { - let inner = match self.node { + let inner = match self.kind { hir::ImplItemKind::Const(ref ty, expr) => { AssocConstItem(ty.clean(cx), Some(print_const_expr(cx, expr))) @@ -2366,7 +2391,7 @@ impl Clean for ty::AssocItem { } ty::AssocKind::Method => { let generics = (cx.tcx.generics_of(self.def_id), - &cx.tcx.explicit_predicates_of(self.def_id)).clean(cx); + cx.tcx.explicit_predicates_of(self.def_id)).clean(cx); let sig = cx.tcx.fn_sig(self.def_id); let mut decl = (self.def_id, sig).clean(cx); @@ -2380,7 +2405,7 @@ impl Clean for ty::AssocItem { let self_arg_ty = *sig.input(0).skip_binder(); if self_arg_ty == self_ty { decl.inputs.values[0].type_ = Generic(String::from("Self")); - } else if let ty::Ref(_, ty, _) = self_arg_ty.sty { + } else if let ty::Ref(_, ty, _) = self_arg_ty.kind { if ty == self_ty { match decl.inputs.values[0].type_ { BorrowedRef{ref mut type_, ..} => { @@ -2445,7 +2470,7 @@ impl Clean for ty::AssocItem { // all of the generics from there and then look for bounds that are // applied to this associated type in question. let predicates = cx.tcx.explicit_predicates_of(did); - let generics = (cx.tcx.generics_of(did), &predicates).clean(cx); + let generics = (cx.tcx.generics_of(did), predicates).clean(cx); let mut bounds = generics.where_predicates.iter().filter_map(|pred| { let (name, self_type, trait_, bounds) = match *pred { WherePredicate::BoundPredicate { @@ -2497,7 +2522,7 @@ impl Clean for ty::AssocItem { let visibility = match self.container { ty::ImplContainer(_) => self.vis.clean(cx), - ty::TraitContainer(_) => None, + ty::TraitContainer(_) => Inherited, }; Item { @@ -2545,7 +2570,6 @@ pub enum Type { Slice(Box), Array(Box, String), Never, - CVarArgs, RawPointer(Mutability, Box), BorrowedRef { lifetime: Option, @@ -2583,7 +2607,6 @@ pub enum PrimitiveType { Reference, Fn, Never, - CVarArgs, } #[derive(Clone, Copy, Debug)] @@ -2787,7 +2810,6 @@ impl PrimitiveType { Reference => "reference", Fn => "fn", Never => "never", - CVarArgs => "...", } } @@ -2835,7 +2857,7 @@ impl Clean for hir::Ty { fn clean(&self, cx: &DocContext<'_>) -> Type { use rustc::hir::*; - match self.node { + match self.kind { TyKind::Never => Never, TyKind::Ptr(ref m) => RawPointer(m.mutbl.clean(cx), box m.ty.clean(cx)), TyKind::Rptr(ref l, ref m) => { @@ -2844,7 +2866,7 @@ impl Clean for hir::Ty { } else { Some(l.clean(cx)) }; - BorrowedRef {lifetime: lifetime, mutability: m.mutbl.clean(cx), + BorrowedRef {lifetime, mutability: m.mutbl.clean(cx), type_: box m.ty.clean(cx)} } TyKind::Slice(ref ty) => Slice(box ty.clean(cx)), @@ -2868,7 +2890,7 @@ impl Clean for hir::Ty { TyKind::Tup(ref tys) => Tuple(tys.clean(cx)), TyKind::Def(item_id, _) => { let item = cx.tcx.hir().expect_item(item_id.id); - if let hir::ItemKind::OpaqueTy(ref ty) = item.node { + if let hir::ItemKind::OpaqueTy(ref ty) = item.kind { ImplTrait(ty.bounds.clean(cx)) } else { unreachable!() @@ -2889,7 +2911,7 @@ impl Clean for hir::Ty { // Substitute private type aliases if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) { if !cx.renderinfo.borrow().access_levels.is_exported(def_id) { - alias = Some(&cx.tcx.hir().expect_item(hir_id).node); + alias = Some(&cx.tcx.hir().expect_item(hir_id).kind); } } }; @@ -3000,7 +3022,7 @@ impl Clean for hir::Ty { TyKind::Path(hir::QPath::TypeRelative(ref qself, ref segment)) => { let mut res = Res::Err; let ty = hir_ty_to_ty(cx.tcx, self); - if let ty::Projection(proj) = ty.sty { + if let ty::Projection(proj) = ty.kind { res = Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id); } let trait_path = hir::Path { @@ -3031,8 +3053,7 @@ impl Clean for hir::Ty { } TyKind::BareFn(ref barefn) => BareFunction(box barefn.clean(cx)), TyKind::Infer | TyKind::Err => Infer, - TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.node), - TyKind::CVarArgs(_) => CVarArgs, + TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.kind), } } } @@ -3040,7 +3061,7 @@ impl Clean for hir::Ty { impl<'tcx> Clean for Ty<'tcx> { fn clean(&self, cx: &DocContext<'_>) -> Type { debug!("cleaning type: {:?}", self); - match self.sty { + match self.kind { ty::Never => Never, ty::Bool => Primitive(PrimitiveType::Bool), ty::Char => Primitive(PrimitiveType::Char), @@ -3103,9 +3124,9 @@ impl<'tcx> Clean for Ty<'tcx> { let path = external_path(cx, cx.tcx.item_name(did), None, false, vec![], InternalSubsts::empty()); ResolvedPath { - path: path, + path, param_names: None, - did: did, + did, is_generic: false, } } @@ -3294,9 +3315,9 @@ pub enum Visibility { Restricted(DefId, Path), } -impl Clean> for hir::Visibility { - fn clean(&self, cx: &DocContext<'_>) -> Option { - Some(match self.node { +impl Clean for hir::Visibility { + fn clean(&self, cx: &DocContext<'_>) -> Visibility { + match self.node { hir::VisibilityKind::Public => Visibility::Public, hir::VisibilityKind::Inherited => Visibility::Inherited, hir::VisibilityKind::Crate(_) => Visibility::Crate, @@ -3305,13 +3326,13 @@ impl Clean> for hir::Visibility { let did = register_res(cx, path.res); Visibility::Restricted(did, path) } - }) + } } } -impl Clean> for ty::Visibility { - fn clean(&self, _: &DocContext<'_>) -> Option { - Some(if *self == ty::Visibility::Public { Public } else { Inherited }) +impl Clean for ty::Visibility { + fn clean(&self, _: &DocContext<'_>) -> Visibility { + if *self == ty::Visibility::Public { Public } else { Inherited } } } @@ -3428,7 +3449,7 @@ impl Clean for doctree::Variant<'_> { name: Some(self.name.clean(cx)), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - visibility: None, + visibility: Inherited, stability: cx.stability(self.id).clean(cx), deprecation: cx.deprecation(self.id).clean(cx), def_id: cx.tcx.hir().local_def_id(self.id), @@ -3471,7 +3492,7 @@ impl Clean for ty::VariantDef { name: Some(self.ident.clean(cx)), attrs: inline::load_attrs(cx, self.def_id).clean(cx), source: cx.tcx.def_span(self.def_id).clean(cx), - visibility: Some(Inherited), + visibility: Inherited, def_id: self.def_id, inner: VariantItem(Variant { kind }), stability: get_stability(cx, self.def_id), @@ -3574,16 +3595,6 @@ pub enum GenericArg { Const(Constant), } -impl fmt::Display for GenericArg { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - GenericArg::Lifetime(lt) => lt.fmt(f), - GenericArg::Type(ty) => ty.fmt(f), - GenericArg::Const(ct) => ct.fmt(f), - } - } -} - #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub enum GenericArgs { AngleBracketed { @@ -3713,13 +3724,6 @@ impl Clean for ast::Name { } } -impl Clean for InternedString { - #[inline] - fn clean(&self, _: &DocContext<'_>) -> String { - self.to_string() - } -} - #[derive(Clone, Debug)] pub struct Typedef { pub type_: Type, @@ -3859,17 +3863,19 @@ impl Clean for hir::Mutability { } } -#[derive(Clone, PartialEq, Eq, Copy, Debug, Hash)] +#[derive(Clone, PartialEq, Debug)] pub enum ImplPolarity { Positive, Negative, } -impl Clean for hir::ImplPolarity { +impl Clean for ty::ImplPolarity { fn clean(&self, _: &DocContext<'_>) -> ImplPolarity { match self { - &hir::ImplPolarity::Positive => ImplPolarity::Positive, - &hir::ImplPolarity::Negative => ImplPolarity::Negative, + &ty::ImplPolarity::Positive | + // FIXME: do we want to do something else here? + &ty::ImplPolarity::Reservation => ImplPolarity::Positive, + &ty::ImplPolarity::Negative => ImplPolarity::Negative, } } } @@ -3901,6 +3907,7 @@ impl Clean> for doctree::Impl<'_> { let mut ret = Vec::new(); let trait_ = self.trait_.clean(cx); let items = self.items.iter().map(|ii| ii.clean(cx)).collect::>(); + let def_id = cx.tcx.hir().local_def_id(self.id); // If this impl block is an implementation of the Deref trait, then we // need to try inlining the target's inherent impl blocks as well. @@ -3919,7 +3926,7 @@ impl Clean> for doctree::Impl<'_> { name: None, attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - def_id: cx.tcx.hir().local_def_id(self.id), + def_id, visibility: self.vis.clean(cx), stability: cx.stability(self.id).clean(cx), deprecation: cx.deprecation(self.id).clean(cx), @@ -3930,7 +3937,7 @@ impl Clean> for doctree::Impl<'_> { trait_, for_: self.for_.clean(cx), items, - polarity: Some(self.polarity.clean(cx)), + polarity: Some(cx.tcx.impl_polarity(def_id).clean(cx)), synthetic: false, blanket_impl: None, }) @@ -3987,7 +3994,6 @@ fn build_deref_target_impls(cx: &DocContext<'_>, Reference => None, Fn => None, Never => None, - CVarArgs => tcx.lang_items().va_list(), }; if let Some(did) = did { if !did.is_local() { @@ -4189,7 +4195,7 @@ fn name_from_pat(p: &hir::Pat) -> String { use rustc::hir::*; debug!("trying to get a name from pattern: {:?}", p); - match p.node { + match p.kind { PatKind::Wild => "_".to_string(), PatKind::Binding(_, _, ident, _) => ident.to_string(), PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p), @@ -4275,7 +4281,7 @@ fn resolve_type(cx: &DocContext<'_>, return Generic(kw::SelfUpper.to_string()); } Res::Def(DefKind::TyParam, _) if path.segments.len() == 1 => { - return Generic(format!("{:#}", path)); + return Generic(format!("{:#}", path.print())); } Res::SelfTy(..) | Res::Def(DefKind::TyParam, _) @@ -4283,7 +4289,7 @@ fn resolve_type(cx: &DocContext<'_>, _ => false, }; let did = register_res(&*cx, path.res); - ResolvedPath { path: path, param_names: None, did: did, is_generic: is_generic } + ResolvedPath { path, param_names: None, did, is_generic } } pub fn register_res(cx: &DocContext<'_>, res: Res) -> DefId { @@ -4344,7 +4350,7 @@ impl Clean for doctree::Macro<'_> { name: Some(name.clone()), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - visibility: Some(Public), + visibility: Public, stability: cx.stability(self.hid).clean(cx), deprecation: cx.deprecation(self.hid).clean(cx), def_id: self.def_id, @@ -4372,7 +4378,7 @@ impl Clean for doctree::ProcMacro<'_> { name: Some(self.name.clean(cx)), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - visibility: Some(Public), + visibility: Public, stability: cx.stability(self.id).clean(cx), deprecation: cx.deprecation(self.id).clean(cx), def_id: cx.tcx.hir().local_def_id(self.id), @@ -4516,7 +4522,6 @@ struct RegionDeps<'tcx> { smaller: FxHashSet> } -#[derive(Eq, PartialEq, Hash, Debug)] enum SimpleBound { TraitBound(Vec, Vec, Vec, hir::TraitBoundModifier), Outlives(Lifetime), diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs index 8758ab1969..853170542e 100644 --- a/src/librustdoc/clean/simplify.rs +++ b/src/librustdoc/clean/simplify.rs @@ -35,7 +35,7 @@ pub fn where_clauses(cx: &DocContext<'_>, clauses: Vec) -> Vec { match ty { clean::Generic(s) => params.entry(s).or_default() .extend(bounds), - t => tybounds.push((t, ty_bounds(bounds))), + t => tybounds.push((t, bounds)), } } WP::RegionPredicate { lifetime, bounds } => { @@ -45,11 +45,6 @@ pub fn where_clauses(cx: &DocContext<'_>, clauses: Vec) -> Vec { } } - // Simplify the type parameter bounds on all the generics - let mut params = params.into_iter().map(|(k, v)| { - (k, ty_bounds(v)) - }).collect::>(); - // Look for equality predicates on associated types that can be merged into // general bound predicates equalities.retain(|&(ref lhs, ref rhs)| { @@ -73,7 +68,7 @@ pub fn where_clauses(cx: &DocContext<'_>, clauses: Vec) -> Vec { // And finally, let's reassemble everything let mut clauses = Vec::new(); clauses.extend(lifetimes.into_iter().map(|(lt, bounds)| { - WP::RegionPredicate { lifetime: lt, bounds: bounds } + WP::RegionPredicate { lifetime: lt, bounds } })); clauses.extend(params.into_iter().map(|(k, v)| { WP::BoundPredicate { @@ -82,10 +77,10 @@ pub fn where_clauses(cx: &DocContext<'_>, clauses: Vec) -> Vec { } })); clauses.extend(tybounds.into_iter().map(|(ty, bounds)| { - WP::BoundPredicate { ty: ty, bounds: bounds } + WP::BoundPredicate { ty, bounds } })); clauses.extend(equalities.into_iter().map(|(lhs, rhs)| { - WP::EqPredicate { lhs: lhs, rhs: rhs } + WP::EqPredicate { lhs, rhs } })); clauses } @@ -122,9 +117,9 @@ pub fn merge_bounds( }, }); } - PP::Parenthesized { ref mut output, .. } => { - assert!(output.is_none()); - if *rhs != clean::Type::Tuple(Vec::new()) { + PP::Parenthesized { ref mut output, .. } => match output { + Some(o) => assert_eq!(o, rhs), + None => if *rhs != clean::Type::Tuple(Vec::new()) { *output = Some(rhs.clone()); } } @@ -137,7 +132,7 @@ pub fn ty_params(mut params: Vec) -> Vec { - *bounds = ty_bounds(mem::take(bounds)); + *bounds = mem::take(bounds); } _ => panic!("expected only type parameters"), } @@ -145,10 +140,6 @@ pub fn ty_params(mut params: Vec) -> Vec) -> Vec { - bounds -} - fn trait_is_same_or_supertrait(cx: &DocContext<'_>, child: DefId, trait_: DefId) -> bool { if child == trait_ { diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index 19ea781430..0b8d4d6c30 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -53,6 +53,8 @@ pub struct Options { pub codegen_options_strs: Vec, /// Debugging (`-Z`) options to pass to the compiler. pub debugging_options: DebuggingOptions, + /// Debugging (`-Z`) options strings to pass to the compiler. + pub debugging_options_strs: Vec, /// The target used to compile the crate against. pub target: TargetTriple, /// Edition used when reading the crate. Defaults to "2015". Also used by default when @@ -343,11 +345,7 @@ impl Options { let output = matches.opt_str("o") .map(|s| PathBuf::from(&s)) .unwrap_or_else(|| PathBuf::from("doc")); - let mut cfgs = matches.opt_strs("cfg"); - cfgs.push("rustdoc".to_string()); - if should_test { - cfgs.push("doctest".to_string()); - } + let cfgs = matches.opt_strs("cfg"); let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s)); @@ -482,6 +480,7 @@ impl Options { let generate_redirect_pages = matches.opt_present("generate-redirect-pages"); let test_builder = matches.opt_str("test-builder").map(PathBuf::from); let codegen_options_strs = matches.opt_strs("C"); + let debugging_options_strs = matches.opt_strs("Z"); let lib_strs = matches.opt_strs("L"); let extern_strs = matches.opt_strs("extern"); let runtool = matches.opt_str("runtool"); @@ -503,6 +502,7 @@ impl Options { codegen_options, codegen_options_strs, debugging_options, + debugging_options_strs, target, edition, maybe_sysroot, diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 010e4cf6cd..b227f432a4 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -5,14 +5,13 @@ use rustc::hir::HirId; use rustc::middle::cstore::CrateStore; use rustc::middle::privacy::AccessLevels; use rustc::ty::{Ty, TyCtxt}; -use rustc::lint::{self, LintPass}; +use rustc::lint; use rustc::session::config::ErrorOutputType; use rustc::session::DiagnosticOutput; use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_interface::interface; use rustc_driver::abort_on_err; use rustc_resolve as resolve; -use rustc_metadata::cstore::CStore; use syntax::source_map; use syntax::attr; @@ -43,7 +42,6 @@ pub struct DocContext<'tcx> { pub tcx: TyCtxt<'tcx>, pub resolver: Rc>, - pub cstore: Lrc, /// Later on moved into `html::render::CACHE_KEY` pub renderinfo: RefCell, /// Later on moved through `clean::Crate` into `html::render::CACHE_KEY` @@ -117,9 +115,7 @@ impl<'tcx> DocContext<'tcx> { .def_path_table() .next_id() } else { - self.cstore - .def_path_table(crate_num) - .next_id() + self.enter_resolver(|r| r.cstore().def_path_table(crate_num).next_id()) }; DefId { @@ -234,7 +230,7 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt error_format, libs, externs, - cfgs, + mut cfgs, codegen_options, debugging_options, target, @@ -250,6 +246,9 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt .. } = options; + // Add the rustdoc cfg into the doc build. + cfgs.push("rustdoc".to_string()); + let cpath = Some(input.clone()); let input = Input::File(input); @@ -270,10 +269,9 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt whitelisted_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned()); let lints = || { - lint::builtin::HardwiredLints - .get_lints() + lint::builtin::HardwiredLints::get_lints() .into_iter() - .chain(rustc_lint::SoftLints.get_lints().into_iter()) + .chain(rustc_lint::SoftLints::get_lints().into_iter()) }; let lint_opts = lints().filter_map(|lint| { @@ -326,7 +324,7 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt let config = interface::Config { opts: sessopts, - crate_cfg: config::parse_cfgspecs(cfgs), + crate_cfg: interface::parse_cfgspecs(cfgs), input, input_path: cpath, output_file: None, @@ -336,6 +334,7 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt stderr: None, crate_name, lint_caps, + register_lints: None, }; interface::run_compiler_in_existing_thread_pool(config, |compiler| { @@ -373,7 +372,6 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt let mut ctxt = DocContext { tcx, resolver, - cstore: compiler.cstore().clone(), external_traits: Default::default(), active_extern_traits: Default::default(), renderinfo: RefCell::new(renderinfo), diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 6e453561f6..002ca6fe98 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -4,7 +4,7 @@ pub use self::StructType::*; use syntax::ast; use syntax::ast::Name; -use syntax::ext::base::MacroKind; +use syntax_pos::hygiene::MacroKind; use syntax_pos::{self, Span}; use rustc::hir; @@ -59,7 +59,7 @@ impl Module<'hir> { fns : Vec::new(), mods : Vec::new(), typedefs : Vec::new(), - opaque_tys : Vec::new(), + opaque_tys : Vec::new(), statics : Vec::new(), constants : Vec::new(), traits : Vec::new(), diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index dcd32192ff..4cde868201 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -99,10 +99,6 @@ impl Buffer { self.into_inner() } - crate fn with_formatter) -> fmt::Result>(&mut self, t: T) { - self.from_display(display_fn(move |f| (t)(f))); - } - crate fn from_display(&mut self, t: T) { if self.for_html { write!(self, "{}", t); @@ -112,30 +108,6 @@ impl Buffer { } } -/// Helper to render an optional visibility with a space after it (if the -/// visibility is preset) -#[derive(Copy, Clone)] -pub struct VisSpace<'a>(pub &'a Option); -/// Similarly to VisSpace, this structure is used to render a function style with a -/// space after it. -#[derive(Copy, Clone)] -pub struct UnsafetySpace(pub hir::Unsafety); -/// Similarly to VisSpace, this structure is used to render a function constness -/// with a space after it. -#[derive(Copy, Clone)] -pub struct ConstnessSpace(pub hir::Constness); -/// Similarly to VisSpace, this structure is used to render a function asyncness -/// with a space after it. -#[derive(Copy, Clone)] -pub struct AsyncSpace(pub hir::IsAsync); -/// Similar to VisSpace, but used for mutability -#[derive(Copy, Clone)] -pub struct MutableSpace(pub clean::Mutability); -/// Wrapper struct for emitting type parameter bounds. -pub struct GenericBounds<'a>(pub &'a [clean::GenericBound]); -pub struct AbiSpace(pub Abi); -pub struct DefaultSpace(pub bool); - /// Wrapper struct for properly emitting a function or method declaration. pub struct Function<'a> { /// The declaration to emit. @@ -161,102 +133,89 @@ pub struct WhereClause<'a>{ pub end_newline: bool, } -impl<'a> VisSpace<'a> { - pub fn get(self) -> &'a Option { - let VisSpace(v) = self; v - } -} - -impl UnsafetySpace { - pub fn get(&self) -> hir::Unsafety { - let UnsafetySpace(v) = *self; v - } -} - -impl ConstnessSpace { - pub fn get(&self) -> hir::Constness { - let ConstnessSpace(v) = *self; v - } -} - -fn comma_sep(items: &[T]) -> impl fmt::Display + '_ { +fn comma_sep(items: impl Iterator) -> impl fmt::Display { display_fn(move |f| { - for (i, item) in items.iter().enumerate() { + for (i, item) in items.enumerate() { if i != 0 { write!(f, ", ")?; } - fmt::Display::fmt(item, f)?; + fmt::Display::fmt(&item, f)?; } Ok(()) }) } -impl<'a> fmt::Display for GenericBounds<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +crate fn print_generic_bounds(bounds: &[clean::GenericBound]) -> impl fmt::Display + '_ { + display_fn(move |f| { let mut bounds_dup = FxHashSet::default(); - let &GenericBounds(bounds) = self; - for (i, bound) in bounds.iter().filter(|b| bounds_dup.insert(b.to_string())).enumerate() { + for (i, bound) in bounds.iter().filter(|b| { + bounds_dup.insert(b.print().to_string()) + }).enumerate() { if i > 0 { f.write_str(" + ")?; } - fmt::Display::fmt(bound, f)?; + fmt::Display::fmt(&bound.print(), f)?; } Ok(()) + }) +} + +impl clean::GenericParamDef { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match self.kind { + clean::GenericParamDefKind::Lifetime => write!(f, "{}", self.name), + clean::GenericParamDefKind::Type { ref bounds, ref default, .. } => { + f.write_str(&self.name)?; + + if !bounds.is_empty() { + if f.alternate() { + write!(f, ": {:#}", print_generic_bounds(bounds))?; + } else { + write!(f, ": {}", print_generic_bounds(bounds))?; + } + } + + if let Some(ref ty) = default { + if f.alternate() { + write!(f, " = {:#}", ty.print())?; + } else { + write!(f, " = {}", ty.print())?; + } + } + + Ok(()) + } + clean::GenericParamDefKind::Const { ref ty, .. } => { + f.write_str("const ")?; + f.write_str(&self.name)?; + + if f.alternate() { + write!(f, ": {:#}", ty.print()) + } else { + write!(f, ": {}", ty.print()) + } + } + } + }) } } -impl fmt::Display for clean::GenericParamDef { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.kind { - clean::GenericParamDefKind::Lifetime => write!(f, "{}", self.name), - clean::GenericParamDefKind::Type { ref bounds, ref default, .. } => { - f.write_str(&self.name)?; - - if !bounds.is_empty() { - if f.alternate() { - write!(f, ": {:#}", GenericBounds(bounds))?; - } else { - write!(f, ": {}", GenericBounds(bounds))?; - } - } - - if let Some(ref ty) = default { - if f.alternate() { - write!(f, " = {:#}", ty)?; - } else { - write!(f, " = {}", ty)?; - } - } - - Ok(()) +impl clean::Generics { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + let real_params = self.params + .iter() + .filter(|p| !p.is_synthetic_type_param()) + .collect::>(); + if real_params.is_empty() { + return Ok(()); } - clean::GenericParamDefKind::Const { ref ty, .. } => { - f.write_str("const ")?; - f.write_str(&self.name)?; - - if f.alternate() { - write!(f, ": {:#}", ty) - } else { - write!(f, ": {}", ty) - } + if f.alternate() { + write!(f, "<{:#}>", comma_sep(real_params.iter().map(|g| g.print()))) + } else { + write!(f, "<{}>", comma_sep(real_params.iter().map(|g| g.print()))) } - } - } -} - -impl fmt::Display for clean::Generics { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let real_params = self.params - .iter() - .filter(|p| !p.is_synthetic_type_param()) - .collect::>(); - if real_params.is_empty() { - return Ok(()); - } - if f.alternate() { - write!(f, "<{:#}>", comma_sep(&real_params)) - } else { - write!(f, "<{}>", comma_sep(&real_params)) - } + }) } } @@ -287,24 +246,26 @@ impl<'a> fmt::Display for WhereClause<'a> { &clean::WherePredicate::BoundPredicate { ref ty, ref bounds } => { let bounds = bounds; if f.alternate() { - clause.push_str(&format!("{:#}: {:#}", ty, GenericBounds(bounds))); + clause.push_str(&format!("{:#}: {:#}", + ty.print(), print_generic_bounds(bounds))); } else { - clause.push_str(&format!("{}: {}", ty, GenericBounds(bounds))); + clause.push_str(&format!("{}: {}", + ty.print(), print_generic_bounds(bounds))); } } &clean::WherePredicate::RegionPredicate { ref lifetime, ref bounds } => { clause.push_str(&format!("{}: {}", - lifetime, + lifetime.print(), bounds.iter() - .map(|b| b.to_string()) + .map(|b| b.print().to_string()) .collect::>() .join(" + "))); } &clean::WherePredicate::EqPredicate { ref lhs, ref rhs } => { if f.alternate() { - clause.push_str(&format!("{:#} == {:#}", lhs, rhs)); + clause.push_str(&format!("{:#} == {:#}", lhs.print(), rhs.print())); } else { - clause.push_str(&format!("{} == {}", lhs, rhs)); + clause.push_str(&format!("{} == {}", lhs.print(), rhs.print())); } } } @@ -336,153 +297,164 @@ impl<'a> fmt::Display for WhereClause<'a> { } } -impl fmt::Display for clean::Lifetime { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.get_ref())?; - Ok(()) +impl clean::Lifetime { + crate fn print(&self) -> &str { + self.get_ref() } } -impl fmt::Display for clean::Constant { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.expr, f) +impl clean::Constant { + crate fn print(&self) -> &str { + &self.expr } } -impl fmt::Display for clean::PolyTrait { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if !self.generic_params.is_empty() { - if f.alternate() { - write!(f, "for<{:#}> ", comma_sep(&self.generic_params))?; - } else { - write!(f, "for<{}> ", comma_sep(&self.generic_params))?; - } - } - if f.alternate() { - write!(f, "{:#}", self.trait_) - } else { - write!(f, "{}", self.trait_) - } - } -} - -impl fmt::Display for clean::GenericBound { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - clean::GenericBound::Outlives(ref lt) => { - write!(f, "{}", *lt) - } - clean::GenericBound::TraitBound(ref ty, modifier) => { - let modifier_str = match modifier { - hir::TraitBoundModifier::None => "", - hir::TraitBoundModifier::Maybe => "?", - }; +impl clean::PolyTrait { + fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + if !self.generic_params.is_empty() { if f.alternate() { - write!(f, "{}{:#}", modifier_str, *ty) + write!(f, "for<{:#}> ", + comma_sep(self.generic_params.iter().map(|g| g.print())))?; } else { - write!(f, "{}{}", modifier_str, *ty) + write!(f, "for<{}> ", + comma_sep(self.generic_params.iter().map(|g| g.print())))?; } } - } + if f.alternate() { + write!(f, "{:#}", self.trait_.print()) + } else { + write!(f, "{}", self.trait_.print()) + } + }) } } -impl fmt::Display for clean::GenericArgs { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - clean::GenericArgs::AngleBracketed { ref args, ref bindings } => { - if !args.is_empty() || !bindings.is_empty() { +impl clean::GenericBound { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match self { + clean::GenericBound::Outlives(lt) => { + write!(f, "{}", lt.print()) + } + clean::GenericBound::TraitBound(ty, modifier) => { + let modifier_str = match modifier { + hir::TraitBoundModifier::None => "", + hir::TraitBoundModifier::Maybe => "?", + }; if f.alternate() { - f.write_str("<")?; + write!(f, "{}{:#}", modifier_str, ty.print()) } else { - f.write_str("<")?; + write!(f, "{}{}", modifier_str, ty.print()) } + } + } + }) + } +} + +impl clean::GenericArgs { + fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match *self { + clean::GenericArgs::AngleBracketed { ref args, ref bindings } => { + if !args.is_empty() || !bindings.is_empty() { + if f.alternate() { + f.write_str("<")?; + } else { + f.write_str("<")?; + } + let mut comma = false; + for arg in args { + if comma { + f.write_str(", ")?; + } + comma = true; + if f.alternate() { + write!(f, "{:#}", arg.print())?; + } else { + write!(f, "{}", arg.print())?; + } + } + for binding in bindings { + if comma { + f.write_str(", ")?; + } + comma = true; + if f.alternate() { + write!(f, "{:#}", binding.print())?; + } else { + write!(f, "{}", binding.print())?; + } + } + if f.alternate() { + f.write_str(">")?; + } else { + f.write_str(">")?; + } + } + } + clean::GenericArgs::Parenthesized { ref inputs, ref output } => { + f.write_str("(")?; let mut comma = false; - for arg in args { + for ty in inputs { if comma { f.write_str(", ")?; } comma = true; if f.alternate() { - write!(f, "{:#}", *arg)?; + write!(f, "{:#}", ty.print())?; } else { - write!(f, "{}", *arg)?; + write!(f, "{}", ty.print())?; } } - for binding in bindings { - if comma { - f.write_str(", ")?; - } - comma = true; + f.write_str(")")?; + if let Some(ref ty) = *output { if f.alternate() { - write!(f, "{:#}", *binding)?; + write!(f, " -> {:#}", ty.print())?; } else { - write!(f, "{}", *binding)?; + write!(f, " -> {}", ty.print())?; } } - if f.alternate() { - f.write_str(">")?; - } else { - f.write_str(">")?; - } } } - clean::GenericArgs::Parenthesized { ref inputs, ref output } => { - f.write_str("(")?; - let mut comma = false; - for ty in inputs { - if comma { - f.write_str(", ")?; - } - comma = true; - if f.alternate() { - write!(f, "{:#}", *ty)?; - } else { - write!(f, "{}", *ty)?; - } - } - f.write_str(")")?; - if let Some(ref ty) = *output { - if f.alternate() { - write!(f, " -> {:#}", ty)?; - } else { - write!(f, " -> {}", ty)?; - } - } - } - } - Ok(()) + Ok(()) + }) } } -impl fmt::Display for clean::PathSegment { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.name)?; - if f.alternate() { - write!(f, "{:#}", self.args) - } else { - write!(f, "{}", self.args) - } +impl clean::PathSegment { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + f.write_str(&self.name)?; + if f.alternate() { + write!(f, "{:#}", self.args.print()) + } else { + write!(f, "{}", self.args.print()) + } + }) } } -impl fmt::Display for clean::Path { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.global { - f.write_str("::")? - } - - for (i, seg) in self.segments.iter().enumerate() { - if i > 0 { +impl clean::Path { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + if self.global { f.write_str("::")? } - if f.alternate() { - write!(f, "{:#}", seg)?; - } else { - write!(f, "{}", seg)?; + + for (i, seg) in self.segments.iter().enumerate() { + if i > 0 { + f.write_str("::")? + } + if f.alternate() { + write!(f, "{:#}", seg.print())?; + } else { + write!(f, "{}", seg.print())?; + } } - } - Ok(()) + Ok(()) + }) } } @@ -516,7 +488,7 @@ pub fn href(did: DefId) -> Option<(String, ItemType, Vec)> { url.push_str("/index.html"); } _ => { - url.push_str(shortty.css_class()); + url.push_str(shortty.as_str()); url.push_str("."); url.push_str(fqp.last().unwrap()); url.push_str(".html"); @@ -537,7 +509,7 @@ fn resolved_path(w: &mut fmt::Formatter<'_>, did: DefId, path: &clean::Path, } } if w.alternate() { - write!(w, "{}{:#}", &last.name, last.args)?; + write!(w, "{}{:#}", &last.name, last.args.print())?; } else { let path = if use_absolute { if let Some((_, _, fqp)) = href(did) { @@ -550,7 +522,7 @@ fn resolved_path(w: &mut fmt::Formatter<'_>, did: DefId, path: &clean::Path, } else { anchor(did, &last.name).to_string() }; - write!(w, "{}{}", path, last.args)?; + write!(w, "{}{}", path, last.args.print())?; } Ok(()) } @@ -606,7 +578,7 @@ fn tybounds(param_names: &Option>) -> impl fmt::Display Some(ref params) => { for param in params { write!(f, " + ")?; - fmt::Display::fmt(param, f)?; + fmt::Display::fmt(¶m.print(), f)?; } Ok(()) } @@ -644,14 +616,15 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> clean::BareFunction(ref decl) => { if f.alternate() { write!(f, "{}{:#}fn{:#}{:#}", - UnsafetySpace(decl.unsafety), - AbiSpace(decl.abi), - comma_sep(&decl.generic_params), - decl.decl) + decl.unsafety.print_with_space(), + print_abi_with_space(decl.abi), + decl.print_generic_params(), + decl.decl.print()) } else { - write!(f, "{}{}", UnsafetySpace(decl.unsafety), AbiSpace(decl.abi))?; + write!(f, "{}{}", + decl.unsafety.print_with_space(), print_abi_with_space(decl.abi))?; primitive_link(f, PrimitiveType::Fn, "fn")?; - write!(f, "{}{}", comma_sep(&decl.generic_params), decl.decl) + write!(f, "{}{}", decl.print_generic_params(), decl.decl.print()) } } clean::Tuple(ref typs) => { @@ -660,28 +633,30 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> &[ref one] => { primitive_link(f, PrimitiveType::Tuple, "(")?; // Carry `f.alternate()` into this display w/o branching manually. - fmt::Display::fmt(one, f)?; + fmt::Display::fmt(&one.print(), f)?; primitive_link(f, PrimitiveType::Tuple, ",)") } many => { primitive_link(f, PrimitiveType::Tuple, "(")?; - fmt::Display::fmt(&comma_sep(many), f)?; + for (i, item) in many.iter().enumerate() { + if i != 0 { write!(f, ", ")?; } + fmt::Display::fmt(&item.print(), f)?; + } primitive_link(f, PrimitiveType::Tuple, ")") } } } clean::Slice(ref t) => { primitive_link(f, PrimitiveType::Slice, "[")?; - fmt::Display::fmt(t, f)?; + fmt::Display::fmt(&t.print(), f)?; primitive_link(f, PrimitiveType::Slice, "]") } clean::Array(ref t, ref n) => { primitive_link(f, PrimitiveType::Array, "[")?; - fmt::Display::fmt(t, f)?; + fmt::Display::fmt(&t.print(), f)?; primitive_link(f, PrimitiveType::Array, &format!("; {}]", n)) } clean::Never => primitive_link(f, PrimitiveType::Never, "!"), - clean::CVarArgs => primitive_link(f, PrimitiveType::CVarArgs, "..."), clean::RawPointer(m, ref t) => { let m = match m { clean::Immutable => "const", @@ -691,24 +666,24 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} => { if f.alternate() { primitive_link(f, clean::PrimitiveType::RawPointer, - &format!("*{} {:#}", m, t)) + &format!("*{} {:#}", m, t.print())) } else { primitive_link(f, clean::PrimitiveType::RawPointer, - &format!("*{} {}", m, t)) + &format!("*{} {}", m, t.print())) } } _ => { primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{} ", m))?; - fmt::Display::fmt(t, f) + fmt::Display::fmt(&t.print(), f) } } } clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => { - let lt = match *l { - Some(ref l) => format!("{} ", *l), - _ => String::new(), + let lt = match l { + Some(l) => format!("{} ", l.print()), + _ => String::new() }; - let m = MutableSpace(mutability); + let m = mutability.print_with_space(); let amp = if f.alternate() { "&".to_string() } else { @@ -720,19 +695,19 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> clean::Generic(_) => { if f.alternate() { primitive_link(f, PrimitiveType::Slice, - &format!("{}{}{}[{:#}]", amp, lt, m, **bt)) + &format!("{}{}{}[{:#}]", amp, lt, m, bt.print())) } else { primitive_link(f, PrimitiveType::Slice, - &format!("{}{}{}[{}]", amp, lt, m, **bt)) + &format!("{}{}{}[{}]", amp, lt, m, bt.print())) } } _ => { primitive_link(f, PrimitiveType::Slice, &format!("{}{}{}[", amp, lt, m))?; if f.alternate() { - write!(f, "{:#}", **bt)?; + write!(f, "{:#}", bt.print())?; } else { - write!(f, "{}", **bt)?; + write!(f, "{}", bt.print())?; } primitive_link(f, PrimitiveType::Slice, "]") } @@ -756,9 +731,9 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> } clean::ImplTrait(ref bounds) => { if f.alternate() { - write!(f, "impl {:#}", GenericBounds(bounds)) + write!(f, "impl {:#}", print_generic_bounds(bounds)) } else { - write!(f, "impl {}", GenericBounds(bounds)) + write!(f, "impl {}", print_generic_bounds(bounds)) } } clean::QPath { ref name, ref self_type, ref trait_ } => { @@ -770,15 +745,15 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> }; if f.alternate() { if should_show_cast { - write!(f, "<{:#} as {:#}>::", self_type, trait_)? + write!(f, "<{:#} as {:#}>::", self_type.print(), trait_.print())? } else { - write!(f, "{:#}::", self_type)? + write!(f, "{:#}::", self_type.print())? } } else { if should_show_cast { - write!(f, "<{} as {}>::", self_type, trait_)? + write!(f, "<{} as {}>::", self_type.print(), trait_.print())? } else { - write!(f, "{}::", self_type)? + write!(f, "{}::", self_type.print())? } }; match *trait_ { @@ -818,55 +793,64 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> } } -impl fmt::Display for clean::Type { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt_type(self, f, false) +impl clean::Type { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + fmt_type(self, f, false) + }) } } -fn fmt_impl(i: &clean::Impl, - f: &mut fmt::Formatter<'_>, - link_trait: bool, - use_absolute: bool) -> fmt::Result { - if f.alternate() { - write!(f, "impl{:#} ", i.generics)?; - } else { - write!(f, "impl{} ", i.generics)?; +impl clean::Impl { + crate fn print(&self) -> impl fmt::Display + '_ { + self.print_inner(true, false) } - if let Some(ref ty) = i.trait_ { - if i.polarity == Some(clean::ImplPolarity::Negative) { - write!(f, "!")?; - } - - if link_trait { - fmt::Display::fmt(ty, f)?; - } else { - match *ty { - clean::ResolvedPath { param_names: None, ref path, is_generic: false, .. } => { - let last = path.segments.last().unwrap(); - fmt::Display::fmt(&last.name, f)?; - fmt::Display::fmt(&last.args, f)?; - } - _ => unreachable!(), + fn print_inner( + &self, + link_trait: bool, + use_absolute: bool, + ) -> impl fmt::Display + '_ { + display_fn(move |f| { + if f.alternate() { + write!(f, "impl{:#} ", self.generics.print())?; + } else { + write!(f, "impl{} ", self.generics.print())?; } - } - write!(f, " for ")?; - } - if let Some(ref ty) = i.blanket_impl { - fmt_type(ty, f, use_absolute)?; - } else { - fmt_type(&i.for_, f, use_absolute)?; - } + if let Some(ref ty) = self.trait_ { + if self.polarity == Some(clean::ImplPolarity::Negative) { + write!(f, "!")?; + } - fmt::Display::fmt(&WhereClause { gens: &i.generics, indent: 0, end_newline: true }, f)?; - Ok(()) -} + if link_trait { + fmt::Display::fmt(&ty.print(), f)?; + } else { + match ty { + clean::ResolvedPath { param_names: None, path, is_generic: false, .. } => { + let last = path.segments.last().unwrap(); + fmt::Display::fmt(&last.name, f)?; + fmt::Display::fmt(&last.args.print(), f)?; + } + _ => unreachable!(), + } + } + write!(f, " for ")?; + } -impl fmt::Display for clean::Impl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt_impl(self, f, true, false) + if let Some(ref ty) = self.blanket_impl { + fmt_type(ty, f, use_absolute)?; + } else { + fmt_type(&self.for_, f, use_absolute)?; + } + + fmt::Display::fmt(&WhereClause { + gens: &self.generics, + indent: 0, + end_newline: true, + }, f)?; + Ok(()) + }) } } @@ -874,275 +858,324 @@ impl fmt::Display for clean::Impl { pub fn fmt_impl_for_trait_page(i: &clean::Impl, f: &mut Buffer, use_absolute: bool) { - f.with_formatter(|f| fmt_impl(i, f, false, use_absolute)) + f.from_display(i.print_inner(false, use_absolute)) } -impl fmt::Display for clean::Arguments { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - for (i, input) in self.values.iter().enumerate() { - if !input.name.is_empty() { - write!(f, "{}: ", input.name)?; - } - if f.alternate() { - write!(f, "{:#}", input.type_)?; - } else { - write!(f, "{}", input.type_)?; - } - if i + 1 < self.values.len() { write!(f, ", ")?; } - } - Ok(()) - } -} - -impl fmt::Display for clean::FunctionRetTy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - clean::Return(clean::Tuple(ref tys)) if tys.is_empty() => Ok(()), - clean::Return(ref ty) if f.alternate() => write!(f, " -> {:#}", ty), - clean::Return(ref ty) => write!(f, " -> {}", ty), - clean::DefaultReturn => Ok(()), - } - } -} - -impl fmt::Display for clean::FnDecl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if f.alternate() { - write!(f, "({args:#}){arrow:#}", args = self.inputs, arrow = self.output) - } else { - write!(f, "({args}){arrow}", args = self.inputs, arrow = self.output) - } - } -} - -impl<'a> fmt::Display for Function<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let &Function { decl, header_len, indent, asyncness } = self; - let amp = if f.alternate() { "&" } else { "&" }; - let mut args = String::new(); - let mut args_plain = String::new(); - for (i, input) in decl.inputs.values.iter().enumerate() { - if i == 0 { - args.push_str("
"); - } - - if let Some(selfty) = input.to_self() { - match selfty { - clean::SelfValue => { - args.push_str("self"); - args_plain.push_str("self"); - } - clean::SelfBorrowed(Some(ref lt), mtbl) => { - args.push_str(&format!("{}{} {}self", amp, *lt, MutableSpace(mtbl))); - args_plain.push_str(&format!("&{} {}self", *lt, MutableSpace(mtbl))); - } - clean::SelfBorrowed(None, mtbl) => { - args.push_str(&format!("{}{}self", amp, MutableSpace(mtbl))); - args_plain.push_str(&format!("&{}self", MutableSpace(mtbl))); - } - clean::SelfExplicit(ref typ) => { - if f.alternate() { - args.push_str(&format!("self: {:#}", *typ)); - } else { - args.push_str(&format!("self: {}", *typ)); - } - args_plain.push_str(&format!("self: {:#}", *typ)); - } - } - } else { - if i > 0 { - args.push_str("
"); - args_plain.push_str(" "); - } +impl clean::Arguments { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + for (i, input) in self.values.iter().enumerate() { if !input.name.is_empty() { - args.push_str(&format!("{}: ", input.name)); - args_plain.push_str(&format!("{}: ", input.name)); + write!(f, "{}: ", input.name)?; } - if f.alternate() { - args.push_str(&format!("{:#}", input.type_)); + write!(f, "{:#}", input.type_.print())?; } else { - args.push_str(&input.type_.to_string()); + write!(f, "{}", input.type_.print())?; } - args_plain.push_str(&format!("{:#}", input.type_)); + if i + 1 < self.values.len() { write!(f, ", ")?; } } - if i + 1 < decl.inputs.values.len() { - args.push(','); - args_plain.push(','); - } - } - - let args_plain = format!("({})", args_plain); - - let output = if let hir::IsAsync::Async = asyncness { - Cow::Owned(decl.sugared_async_return_type()) - } else { - Cow::Borrowed(&decl.output) - }; - - let arrow_plain = format!("{:#}", &output); - let arrow = if f.alternate() { - format!("{:#}", &output) - } else { - output.to_string() - }; - - let declaration_len = header_len + args_plain.len() + arrow_plain.len(); - let output = if declaration_len > 80 { - let full_pad = format!("
{}", " ".repeat(indent + 4)); - let close_pad = format!("
{}", " ".repeat(indent)); - format!("({args}{close}){arrow}", - args = args.replace("
", &full_pad), - close = close_pad, - arrow = arrow) - } else { - format!("({args}){arrow}", args = args.replace("
", ""), arrow = arrow) - }; - - if f.alternate() { - write!(f, "{}", output.replace("
", "\n")) - } else { - write!(f, "{}", output) - } + Ok(()) + }) } } -impl<'a> fmt::Display for VisSpace<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self.get() { - Some(clean::Public) => f.write_str("pub "), - Some(clean::Inherited) | None => Ok(()), - Some(clean::Visibility::Crate) => write!(f, "pub(crate) "), - Some(clean::Visibility::Restricted(did, ref path)) => { - f.write_str("pub(")?; - if path.segments.len() != 1 - || (path.segments[0].name != "self" && path.segments[0].name != "super") - { - f.write_str("in ")?; +impl clean::FunctionRetTy { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match self { + clean::Return(clean::Tuple(tys)) if tys.is_empty() => Ok(()), + clean::Return(ty) if f.alternate() => write!(f, " -> {:#}", ty.print()), + clean::Return(ty) => write!(f, " -> {}", ty.print()), + clean::DefaultReturn => Ok(()), + } + }) + } +} + +impl clean::BareFunctionDecl { + fn print_generic_params(&self) -> impl fmt::Display + '_ { + comma_sep(self.generic_params.iter().map(|g| g.print())) + } +} + +impl clean::FnDecl { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + let ellipsis = if self.c_variadic { ", ..." } else { "" }; + if f.alternate() { + write!(f, + "({args:#}{ellipsis}){arrow:#}", + args = self.inputs.print(), ellipsis = ellipsis, arrow = self.output.print()) + } else { + write!(f, + "({args}{ellipsis}){arrow}", + args = self.inputs.print(), ellipsis = ellipsis, arrow = self.output.print()) + } + }) + } +} + + +impl Function<'_> { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + let &Function { decl, header_len, indent, asyncness } = self; + let amp = if f.alternate() { "&" } else { "&" }; + let mut args = String::new(); + let mut args_plain = String::new(); + for (i, input) in decl.inputs.values.iter().enumerate() { + if i == 0 { + args.push_str("
"); } - resolved_path(f, did, path, true, false)?; - f.write_str(") ") - } - } - } -} -impl fmt::Display for UnsafetySpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.get() { - hir::Unsafety::Unsafe => write!(f, "unsafe "), - hir::Unsafety::Normal => Ok(()) - } - } -} - -impl fmt::Display for ConstnessSpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.get() { - hir::Constness::Const => write!(f, "const "), - hir::Constness::NotConst => Ok(()) - } - } -} - -impl fmt::Display for AsyncSpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.0 { - hir::IsAsync::Async => write!(f, "async "), - hir::IsAsync::NotAsync => Ok(()), - } - } -} - -impl fmt::Display for clean::Import { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - clean::Import::Simple(ref name, ref src) => { - if *name == src.path.last_name() { - write!(f, "use {};", *src) + if let Some(selfty) = input.to_self() { + match selfty { + clean::SelfValue => { + args.push_str("self"); + args_plain.push_str("self"); + } + clean::SelfBorrowed(Some(ref lt), mtbl) => { + args.push_str( + &format!("{}{} {}self", amp, lt.print(), mtbl.print_with_space())); + args_plain.push_str( + &format!("&{} {}self", lt.print(), mtbl.print_with_space())); + } + clean::SelfBorrowed(None, mtbl) => { + args.push_str(&format!("{}{}self", amp, mtbl.print_with_space())); + args_plain.push_str(&format!("&{}self", mtbl.print_with_space())); + } + clean::SelfExplicit(ref typ) => { + if f.alternate() { + args.push_str(&format!("self: {:#}", typ.print())); + } else { + args.push_str(&format!("self: {}", typ.print())); + } + args_plain.push_str(&format!("self: {:#}", typ.print())); + } + } } else { - write!(f, "use {} as {};", *src, *name) - } - } - clean::Import::Glob(ref src) => { - if src.path.segments.is_empty() { - write!(f, "use *;") - } else { - write!(f, "use {}::*;", *src) - } - } - } - } -} - -impl fmt::Display for clean::ImportSource { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.did { - Some(did) => resolved_path(f, did, &self.path, true, false), - _ => { - for (i, seg) in self.path.segments.iter().enumerate() { if i > 0 { - write!(f, "::")? + args.push_str("
"); + args_plain.push_str(" "); + } + if !input.name.is_empty() { + args.push_str(&format!("{}: ", input.name)); + args_plain.push_str(&format!("{}: ", input.name)); } - write!(f, "{}", seg.name)?; - } - Ok(()) - } - } - } -} -impl fmt::Display for clean::TypeBinding { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.name)?; - match self.kind { - clean::TypeBindingKind::Equality { ref ty } => { - if f.alternate() { - write!(f, " = {:#}", ty)?; - } else { - write!(f, " = {}", ty)?; - } - } - clean::TypeBindingKind::Constraint { ref bounds } => { - if !bounds.is_empty() { if f.alternate() { - write!(f, ": {:#}", GenericBounds(bounds))?; + args.push_str(&format!("{:#}", input.type_.print())); } else { - write!(f, ": {}", GenericBounds(bounds))?; + args.push_str(&input.type_.print().to_string()); + } + args_plain.push_str(&format!("{:#}", input.type_.print())); + } + if i + 1 < decl.inputs.values.len() { + args.push(','); + args_plain.push(','); + } + } + + let mut args_plain = format!("({})", args_plain); + + if decl.c_variadic { + args.push_str(",
..."); + args_plain.push_str(", ..."); + } + + let output = if let hir::IsAsync::Async = asyncness { + Cow::Owned(decl.sugared_async_return_type()) + } else { + Cow::Borrowed(&decl.output) + }; + + let arrow_plain = format!("{:#}", &output.print()); + let arrow = if f.alternate() { + format!("{:#}", &output.print()) + } else { + output.print().to_string() + }; + + let declaration_len = header_len + args_plain.len() + arrow_plain.len(); + let output = if declaration_len > 80 { + let full_pad = format!("
{}", " ".repeat(indent + 4)); + let close_pad = format!("
{}", " ".repeat(indent)); + format!("({args}{close}){arrow}", + args = args.replace("
", &full_pad), + close = close_pad, + arrow = arrow) + } else { + format!("({args}){arrow}", args = args.replace("
", ""), arrow = arrow) + }; + + if f.alternate() { + write!(f, "{}", output.replace("
", "\n")) + } else { + write!(f, "{}", output) + } + }) + } +} + +impl clean::Visibility { + crate fn print_with_space(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match *self { + clean::Public => f.write_str("pub "), + clean::Inherited => Ok(()), + clean::Visibility::Crate => write!(f, "pub(crate) "), + clean::Visibility::Restricted(did, ref path) => { + f.write_str("pub(")?; + if path.segments.len() != 1 + || (path.segments[0].name != "self" && path.segments[0].name != "super") + { + f.write_str("in ")?; + } + resolved_path(f, did, path, true, false)?; + f.write_str(") ") + } + } + }) + } +} + +crate trait PrintWithSpace { + fn print_with_space(&self) -> &str; +} + +impl PrintWithSpace for hir::Unsafety { + fn print_with_space(&self) -> &str { + match self { + hir::Unsafety::Unsafe => "unsafe ", + hir::Unsafety::Normal => "" + } + } +} + +impl PrintWithSpace for hir::Constness { + fn print_with_space(&self) -> &str { + match self { + hir::Constness::Const => "const ", + hir::Constness::NotConst => "" + } + } +} + +impl PrintWithSpace for hir::IsAsync { + fn print_with_space(&self) -> &str { + match self { + hir::IsAsync::Async => "async ", + hir::IsAsync::NotAsync => "", + } + } +} + +impl clean::Import { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match *self { + clean::Import::Simple(ref name, ref src) => { + if *name == src.path.last_name() { + write!(f, "use {};", src.print()) + } else { + write!(f, "use {} as {};", src.print(), *name) + } + } + clean::Import::Glob(ref src) => { + if src.path.segments.is_empty() { + write!(f, "use *;") + } else { + write!(f, "use {}::*;", src.print()) } } } - } - Ok(()) + }) } } -impl fmt::Display for MutableSpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - MutableSpace(clean::Immutable) => Ok(()), - MutableSpace(clean::Mutable) => write!(f, "mut "), +impl clean::ImportSource { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match self.did { + Some(did) => resolved_path(f, did, &self.path, true, false), + _ => { + for (i, seg) in self.path.segments.iter().enumerate() { + if i > 0 { + write!(f, "::")? + } + write!(f, "{}", seg.name)?; + } + Ok(()) + } + } + }) + } +} + +impl clean::TypeBinding { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + f.write_str(&self.name)?; + match self.kind { + clean::TypeBindingKind::Equality { ref ty } => { + if f.alternate() { + write!(f, " = {:#}", ty.print())?; + } else { + write!(f, " = {}", ty.print())?; + } + } + clean::TypeBindingKind::Constraint { ref bounds } => { + if !bounds.is_empty() { + if f.alternate() { + write!(f, ": {:#}", print_generic_bounds(bounds))?; + } else { + write!(f, ": {}", print_generic_bounds(bounds))?; + } + } + } + } + Ok(()) + }) + } +} + +impl clean::Mutability { + crate fn print_with_space(&self) -> &str { + match self { + clean::Immutable => "", + clean::Mutable => "mut ", } } } -impl fmt::Display for AbiSpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +crate fn print_abi_with_space(abi: Abi) -> impl fmt::Display { + display_fn(move |f| { let quot = if f.alternate() { "\"" } else { """ }; - match self.0 { + match abi { Abi::Rust => Ok(()), abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()), } + }) +} + +crate fn print_default_space<'a>(v: bool) -> &'a str { + if v { + "default " + } else { + "" } } -impl fmt::Display for DefaultSpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.0 { - write!(f, "default ") - } else { - Ok(()) - } +impl clean::GenericArg { + crate fn print(&self) -> impl fmt::Display + '_ { + display_fn(move |f| { + match self { + clean::GenericArg::Lifetime(lt) => fmt::Display::fmt(<.print(), f), + clean::GenericArg::Type(ty) => fmt::Display::fmt(&ty.print(), f), + clean::GenericArg::Const(ct) => fmt::Display::fmt(&ct.print(), f), + } + }) } } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 5d86ee9721..30c9453a64 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -14,7 +14,7 @@ use std::io::prelude::*; use syntax::source_map::{SourceMap, FilePathMapping}; use syntax::parse::lexer; use syntax::parse::token::{self, Token}; -use syntax::parse; +use syntax::sess::ParseSess; use syntax::symbol::{kw, sym}; use syntax_pos::{Span, FileName}; @@ -33,7 +33,7 @@ pub fn render_with_highlighting( class, tooltip).unwrap(); } - let sess = parse::ParseSess::new(FilePathMapping::empty()); + let sess = ParseSess::new(FilePathMapping::empty()); let fm = sess.source_map().new_source_file( FileName::Custom(String::from("rustdoc-highlighting")), src.to_owned(), diff --git a/src/librustdoc/html/item_type.rs b/src/librustdoc/html/item_type.rs index cf51a4eb5a..f5e4592489 100644 --- a/src/librustdoc/html/item_type.rs +++ b/src/librustdoc/html/item_type.rs @@ -1,7 +1,7 @@ //! Item types. use std::fmt; -use syntax::ext::base::MacroKind; +use syntax_pos::hygiene::MacroKind; use crate::clean; /// Item type. Corresponds to `clean::ItemEnum` variants. @@ -46,14 +46,6 @@ pub enum ItemType { } -#[derive(Copy, Eq, PartialEq, Clone)] -pub enum NameSpace { - Type, - Value, - Macro, - Keyword, -} - impl<'a> From<&'a clean::Item> for ItemType { fn from(item: &'a clean::Item) -> ItemType { let inner = match item.inner { @@ -120,7 +112,7 @@ impl From for ItemType { } impl ItemType { - pub fn css_class(&self) -> &'static str { + pub fn as_str(&self) -> &'static str { match *self { ItemType::Module => "mod", ItemType::ExternCrate => "externcrate", @@ -151,7 +143,7 @@ impl ItemType { } } - pub fn name_space(&self) -> NameSpace { + pub fn name_space(&self) -> &'static str { match *self { ItemType::Struct | ItemType::Union | @@ -163,7 +155,7 @@ impl ItemType { ItemType::AssocType | ItemType::OpaqueTy | ItemType::TraitAlias | - ItemType::ForeignType => NameSpace::Type, + ItemType::ForeignType => NAMESPACE_TYPE, ItemType::ExternCrate | ItemType::Import | @@ -175,20 +167,20 @@ impl ItemType { ItemType::StructField | ItemType::Variant | ItemType::Constant | - ItemType::AssocConst => NameSpace::Value, + ItemType::AssocConst => NAMESPACE_VALUE, ItemType::Macro | ItemType::ProcAttribute | - ItemType::ProcDerive => NameSpace::Macro, + ItemType::ProcDerive => NAMESPACE_MACRO, - ItemType::Keyword => NameSpace::Keyword, + ItemType::Keyword => NAMESPACE_KEYWORD, } } } impl fmt::Display for ItemType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.css_class().fmt(f) + write!(f, "{}", self.as_str()) } } @@ -196,20 +188,3 @@ pub const NAMESPACE_TYPE: &'static str = "t"; pub const NAMESPACE_VALUE: &'static str = "v"; pub const NAMESPACE_MACRO: &'static str = "m"; pub const NAMESPACE_KEYWORD: &'static str = "k"; - -impl NameSpace { - pub fn to_static_str(&self) -> &'static str { - match *self { - NameSpace::Type => NAMESPACE_TYPE, - NameSpace::Value => NAMESPACE_VALUE, - NameSpace::Macro => NAMESPACE_MACRO, - NameSpace::Keyword => NAMESPACE_KEYWORD, - } - } -} - -impl fmt::Display for NameSpace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.to_static_str().fmt(f) - } -} diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index 56074f4ab1..697dee0216 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use crate::externalfiles::ExternalHtml; -use crate::html::render::SlashChecker; +use crate::html::render::ensure_trailing_slash; use crate::html::format::{Buffer, Print}; #[derive(Clone)] @@ -106,53 +106,6 @@ pub fn render(
{content}
\
\
\ - \ {after_content}\ ", name = it.name.as_ref().map(|x| &x[..]).unwrap_or(""), - ty = it.type_().css_class(), + ty = it.type_(), path = relpath); if parentlen == 0 { // There is no sidebar-items.js beyond the crate root path @@ -4301,8 +3736,7 @@ fn get_methods( ) -> Vec { i.items.iter().filter_map(|item| { match item.name { - // Maybe check with clean::Visibility::Public as well? - Some(ref name) if !name.is_empty() && item.visibility.is_some() && item.is_method() => { + Some(ref name) if !name.is_empty() && item.is_method() => { if !for_deref || should_render_item(item, deref_mut) { Some(format!("
{}", get_next_url(used_links, format!("method.{}", name)), @@ -4339,12 +3773,12 @@ fn sidebar_assoc_items(it: &clean::Item) -> String { let mut used_links = FxHashSet::default(); { - let used_links_bor = Rc::new(RefCell::new(&mut used_links)); + let used_links_bor = &mut used_links; let mut ret = v.iter() .filter(|i| i.inner_impl().trait_.is_none()) .flat_map(move |i| get_methods(i.inner_impl(), false, - &mut used_links_bor.borrow_mut(), false)) + used_links_bor, false)) .collect::>(); // We want links' order to be reproducible so we don't use unstable sort. ret.sort(); @@ -4370,9 +3804,10 @@ fn sidebar_assoc_items(it: &clean::Item) -> String { if let Some(impls) = inner_impl { out.push_str(""); out.push_str(&format!("Methods from {}<Target={}>", - Escape(&format!("{:#}", - impl_.inner_impl().trait_.as_ref().unwrap())), - Escape(&format!("{:#}", target)))); + Escape(&format!( + "{:#}", impl_.inner_impl().trait_.as_ref().unwrap().print() + )), + Escape(&format!("{:#}", target.print())))); out.push_str(""); let mut ret = impls.iter() .filter(|i| i.inner_impl().trait_.is_none()) @@ -4397,9 +3832,9 @@ fn sidebar_assoc_items(it: &clean::Item) -> String { .filter_map(|i| { let is_negative_impl = is_negative_impl(i.inner_impl()); if let Some(ref i) = i.inner_impl().trait_ { - let i_display = format!("{:#}", i); + let i_display = format!("{:#}", i.print()); let out = Escape(&i_display); - let encoded = small_url_encode(&format!("{:#}", i)); + let encoded = small_url_encode(&format!("{:#}", i.print())); let generated = format!("{}{}", encoded, if is_negative_impl { "!" } else { "" }, @@ -4471,14 +3906,17 @@ fn sidebar_struct(buf: &mut Buffer, it: &clean::Item, s: &clean::Struct) { } fn get_id_for_impl_on_foreign_type(for_: &clean::Type, trait_: &clean::Type) -> String { - small_url_encode(&format!("impl-{:#}-for-{:#}", trait_, for_)) + small_url_encode(&format!("impl-{:#}-for-{:#}", trait_.print(), for_.print())) } fn extract_for_impl_name(item: &clean::Item) -> Option<(String, String)> { match item.inner { clean::ItemEnum::ImplItem(ref i) => { if let Some(ref trait_) = i.trait_ { - Some((format!("{:#}", i.for_), get_id_for_impl_on_foreign_type(&i.for_, trait_))) + Some(( + format!("{:#}", i.for_.print()), + get_id_for_impl_on_foreign_type(&i.for_, trait_), + )) } else { None } @@ -4602,7 +4040,7 @@ fn sidebar_trait(buf: &mut Buffer, it: &clean::Item, t: &clean::Trait) { write!(buf, "
{}
", sidebar) } -fn sidebar_primitive(buf: &mut Buffer, it: &clean::Item, _p: &clean::PrimitiveType) { +fn sidebar_primitive(buf: &mut Buffer, it: &clean::Item) { let sidebar = sidebar_assoc_items(it); if !sidebar.is_empty() { @@ -4610,7 +4048,7 @@ fn sidebar_primitive(buf: &mut Buffer, it: &clean::Item, _p: &clean::PrimitiveTy } } -fn sidebar_typedef(buf: &mut Buffer, it: &clean::Item, _t: &clean::Typedef) { +fn sidebar_typedef(buf: &mut Buffer, it: &clean::Item) { let sidebar = sidebar_assoc_items(it); if !sidebar.is_empty() { @@ -4702,7 +4140,7 @@ fn item_ty_to_strs(ty: &ItemType) -> (&'static str, &'static str) { } } -fn sidebar_module(buf: &mut Buffer, _it: &clean::Item, items: &[clean::Item]) { +fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) { let mut sidebar = String::new(); if items.iter().any(|it| it.type_() == ItemType::ExternCrate || @@ -4780,16 +4218,12 @@ fn item_proc_macro(w: &mut Buffer, cx: &Context, it: &clean::Item, m: &clean::Pr document(w, cx, it) } -fn item_primitive(w: &mut Buffer, cx: &Context, - it: &clean::Item, - _p: &clean::PrimitiveType) { +fn item_primitive(w: &mut Buffer, cx: &Context, it: &clean::Item) { document(w, cx, it); render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All) } -fn item_keyword(w: &mut Buffer, cx: &Context, - it: &clean::Item, - _p: &str) { +fn item_keyword(w: &mut Buffer, cx: &Context, it: &clean::Item) { document(w, cx, it) } @@ -4799,37 +4233,6 @@ fn make_item_keywords(it: &clean::Item) -> String { format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap()) } -fn get_index_search_type(item: &clean::Item) -> Option { - let (all_types, ret_types) = match item.inner { - clean::FunctionItem(ref f) => (&f.all_types, &f.ret_types), - clean::MethodItem(ref m) => (&m.all_types, &m.ret_types), - clean::TyMethodItem(ref m) => (&m.all_types, &m.ret_types), - _ => return None, - }; - - let inputs = all_types.iter().map(|arg| { - get_index_type(&arg) - }).filter(|a| a.name.is_some()).collect(); - let output = ret_types.iter().map(|arg| { - get_index_type(&arg) - }).filter(|a| a.name.is_some()).collect::>(); - let output = if output.is_empty() { - None - } else { - Some(output) - }; - - Some(IndexItemFunctionType { inputs, output }) -} - -fn get_index_type(clean_type: &clean::Type) -> Type { - let t = Type { - name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()), - generics: get_generics(clean_type), - }; - t -} - /// Returns a list of all paths used in the type. /// This is used to help deduplicate imported impls /// for reexported types. If any of the contained @@ -4887,39 +4290,6 @@ fn collect_paths_for_type(first_ty: clean::Type) -> Vec { out } -fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option { - match *clean_type { - clean::ResolvedPath { ref path, .. } => { - let segments = &path.segments; - let path_segment = segments.into_iter().last().unwrap_or_else(|| panic!( - "get_index_type_name(clean_type: {:?}, accept_generic: {:?}) had length zero path", - clean_type, accept_generic - )); - Some(path_segment.name.clone()) - } - clean::Generic(ref s) if accept_generic => Some(s.clone()), - clean::Primitive(ref p) => Some(format!("{:?}", p)), - clean::BorrowedRef { ref type_, .. } => get_index_type_name(type_, accept_generic), - // FIXME: add all from clean::Type. - _ => None - } -} - -fn get_generics(clean_type: &clean::Type) -> Option> { - clean_type.generics() - .and_then(|types| { - let r = types.iter() - .filter_map(|t| get_index_type_name(t, false)) - .map(|s| s.to_ascii_lowercase()) - .collect::>(); - if r.is_empty() { - None - } else { - Some(r) - } - }) -} - -pub fn cache() -> Arc { +crate fn cache() -> Arc { CACHE_KEY.with(|c| c.borrow().clone()) } diff --git a/src/librustdoc/html/render/cache.rs b/src/librustdoc/html/render/cache.rs new file mode 100644 index 0000000000..65dd119c27 --- /dev/null +++ b/src/librustdoc/html/render/cache.rs @@ -0,0 +1,675 @@ +use crate::clean::{self, GetDefId, AttributesExt}; +use crate::fold::DocFolder; +use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId}; +use rustc::middle::privacy::AccessLevels; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use std::mem; +use std::path::{Path, PathBuf}; +use std::collections::BTreeMap; +use syntax::source_map::FileName; +use syntax::symbol::sym; +use serialize::json::{ToJson, Json, as_json}; + +use super::{ItemType, IndexItem, IndexItemFunctionType, Impl, shorten, plain_summary_line}; +use super::{Type, RenderInfo}; + +/// Indicates where an external crate can be found. +pub enum ExternalLocation { + /// Remote URL root of the external crate + Remote(String), + /// This external crate can be found in the local doc/ folder + Local, + /// The external crate could not be found. + Unknown, +} + +/// This cache is used to store information about the `clean::Crate` being +/// rendered in order to provide more useful documentation. This contains +/// information like all implementors of a trait, all traits a type implements, +/// documentation for all known traits, etc. +/// +/// This structure purposefully does not implement `Clone` because it's intended +/// to be a fairly large and expensive structure to clone. Instead this adheres +/// to `Send` so it may be stored in a `Arc` instance and shared among the various +/// rendering threads. +#[derive(Default)] +crate struct Cache { + /// Maps a type ID to all known implementations for that type. This is only + /// recognized for intra-crate `ResolvedPath` types, and is used to print + /// out extra documentation on the page of an enum/struct. + /// + /// The values of the map are a list of implementations and documentation + /// found on that implementation. + pub impls: FxHashMap>, + + /// Maintains a mapping of local crate `NodeId`s to the fully qualified name + /// and "short type description" of that node. This is used when generating + /// URLs when a type is being linked to. External paths are not located in + /// this map because the `External` type itself has all the information + /// necessary. + pub paths: FxHashMap, ItemType)>, + + /// Similar to `paths`, but only holds external paths. This is only used for + /// generating explicit hyperlinks to other crates. + pub external_paths: FxHashMap, ItemType)>, + + /// Maps local `DefId`s of exported types to fully qualified paths. + /// Unlike 'paths', this mapping ignores any renames that occur + /// due to 'use' statements. + /// + /// This map is used when writing out the special 'implementors' + /// javascript file. By using the exact path that the type + /// is declared with, we ensure that each path will be identical + /// to the path used if the corresponding type is inlined. By + /// doing this, we can detect duplicate impls on a trait page, and only display + /// the impl for the inlined type. + pub exact_paths: FxHashMap>, + + /// This map contains information about all known traits of this crate. + /// Implementations of a crate should inherit the documentation of the + /// parent trait if no extra documentation is specified, and default methods + /// should show up in documentation about trait implementations. + pub traits: FxHashMap, + + /// When rendering traits, it's often useful to be able to list all + /// implementors of the trait, and this mapping is exactly, that: a mapping + /// of trait ids to the list of known implementors of the trait + pub implementors: FxHashMap>, + + /// Cache of where external crate documentation can be found. + pub extern_locations: FxHashMap, + + /// Cache of where documentation for primitives can be found. + pub primitive_locations: FxHashMap, + + // Note that external items for which `doc(hidden)` applies to are shown as + // non-reachable while local items aren't. This is because we're reusing + // the access levels from the privacy check pass. + pub access_levels: AccessLevels, + + /// The version of the crate being documented, if given from the `--crate-version` flag. + pub crate_version: Option, + + // Private fields only used when initially crawling a crate to build a cache + + stack: Vec, + parent_stack: Vec, + parent_is_trait_impl: bool, + search_index: Vec, + stripped_mod: bool, + pub deref_trait_did: Option, + pub deref_mut_trait_did: Option, + pub owned_box_did: Option, + masked_crates: FxHashSet, + + // In rare case where a structure is defined in one module but implemented + // in another, if the implementing module is parsed before defining module, + // then the fully qualified name of the structure isn't presented in `paths` + // yet when its implementation methods are being indexed. Caches such methods + // and their parent id here and indexes them at the end of crate parsing. + orphan_impl_items: Vec<(DefId, clean::Item)>, + + // Similarly to `orphan_impl_items`, sometimes trait impls are picked up + // even though the trait itself is not exported. This can happen if a trait + // was defined in function/expression scope, since the impl will be picked + // up by `collect-trait-impls` but the trait won't be scraped out in the HIR + // crawl. In order to prevent crashes when looking for spotlight traits or + // when gathering trait documentation on a type, hold impls here while + // folding and add them to the cache later on if we find the trait. + orphan_trait_impls: Vec<(DefId, FxHashSet, Impl)>, + + /// Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias, + /// we need the alias element to have an array of items. + pub(super) aliases: FxHashMap>, +} + +impl Cache { + pub fn from_krate( + renderinfo: RenderInfo, + extern_html_root_urls: &BTreeMap, + dst: &Path, + mut krate: clean::Crate, + ) -> (clean::Crate, String, Cache) { + // Crawl the crate to build various caches used for the output + let RenderInfo { + inlined: _, + external_paths, + exact_paths, + access_levels, + deref_trait_did, + deref_mut_trait_did, + owned_box_did, + } = renderinfo; + + let external_paths = external_paths.into_iter() + .map(|(k, (v, t))| (k, (v, ItemType::from(t)))) + .collect(); + + let mut cache = Cache { + impls: Default::default(), + external_paths, + exact_paths, + paths: Default::default(), + implementors: Default::default(), + stack: Vec::new(), + parent_stack: Vec::new(), + search_index: Vec::new(), + parent_is_trait_impl: false, + extern_locations: Default::default(), + primitive_locations: Default::default(), + stripped_mod: false, + access_levels, + crate_version: krate.version.take(), + orphan_impl_items: Vec::new(), + orphan_trait_impls: Vec::new(), + traits: krate.external_traits.replace(Default::default()), + deref_trait_did, + deref_mut_trait_did, + owned_box_did, + masked_crates: mem::take(&mut krate.masked_crates), + aliases: Default::default(), + }; + + // Cache where all our extern crates are located + for &(n, ref e) in &krate.externs { + let src_root = match e.src { + FileName::Real(ref p) => match p.parent() { + Some(p) => p.to_path_buf(), + None => PathBuf::new(), + }, + _ => PathBuf::new(), + }; + let extern_url = extern_html_root_urls.get(&e.name).map(|u| &**u); + cache.extern_locations.insert(n, (e.name.clone(), src_root, + extern_location(e, extern_url, &dst))); + + let did = DefId { krate: n, index: CRATE_DEF_INDEX }; + cache.external_paths.insert(did, (vec![e.name.to_string()], ItemType::Module)); + } + + // Cache where all known primitives have their documentation located. + // + // Favor linking to as local extern as possible, so iterate all crates in + // reverse topological order. + for &(_, ref e) in krate.externs.iter().rev() { + for &(def_id, prim, _) in &e.primitives { + cache.primitive_locations.insert(prim, def_id); + } + } + for &(def_id, prim, _) in &krate.primitives { + cache.primitive_locations.insert(prim, def_id); + } + + cache.stack.push(krate.name.clone()); + krate = cache.fold_crate(krate); + + for (trait_did, dids, impl_) in cache.orphan_trait_impls.drain(..) { + if cache.traits.contains_key(&trait_did) { + for did in dids { + cache.impls.entry(did).or_insert(vec![]).push(impl_.clone()); + } + } + } + + // Build our search index + let index = build_index(&krate, &mut cache); + + (krate, index, cache) + } +} + +impl DocFolder for Cache { + fn fold_item(&mut self, item: clean::Item) -> Option { + if item.def_id.is_local() { + debug!("folding {} \"{:?}\", id {:?}", item.type_(), item.name, item.def_id); + } + + // If this is a stripped module, + // we don't want it or its children in the search index. + let orig_stripped_mod = match item.inner { + clean::StrippedItem(box clean::ModuleItem(..)) => { + mem::replace(&mut self.stripped_mod, true) + } + _ => self.stripped_mod, + }; + + // If the impl is from a masked crate or references something from a + // masked crate then remove it completely. + if let clean::ImplItem(ref i) = item.inner { + if self.masked_crates.contains(&item.def_id.krate) || + i.trait_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) || + i.for_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) { + return None; + } + } + + // Propagate a trait method's documentation to all implementors of the + // trait. + if let clean::TraitItem(ref t) = item.inner { + self.traits.entry(item.def_id).or_insert_with(|| t.clone()); + } + + // Collect all the implementors of traits. + if let clean::ImplItem(ref i) = item.inner { + if let Some(did) = i.trait_.def_id() { + if i.blanket_impl.is_none() { + self.implementors.entry(did).or_default().push(Impl { + impl_item: item.clone(), + }); + } + } + } + + // Index this method for searching later on. + if let Some(ref s) = item.name { + let (parent, is_inherent_impl_item) = match item.inner { + clean::StrippedItem(..) => ((None, None), false), + clean::AssocConstItem(..) | + clean::TypedefItem(_, true) if self.parent_is_trait_impl => { + // skip associated items in trait impls + ((None, None), false) + } + clean::AssocTypeItem(..) | + clean::TyMethodItem(..) | + clean::StructFieldItem(..) | + clean::VariantItem(..) => { + ((Some(*self.parent_stack.last().unwrap()), + Some(&self.stack[..self.stack.len() - 1])), + false) + } + clean::MethodItem(..) | clean::AssocConstItem(..) => { + if self.parent_stack.is_empty() { + ((None, None), false) + } else { + let last = self.parent_stack.last().unwrap(); + let did = *last; + let path = match self.paths.get(&did) { + // The current stack not necessarily has correlation + // for where the type was defined. On the other + // hand, `paths` always has the right + // information if present. + Some(&(ref fqp, ItemType::Trait)) | + Some(&(ref fqp, ItemType::Struct)) | + Some(&(ref fqp, ItemType::Union)) | + Some(&(ref fqp, ItemType::Enum)) => + Some(&fqp[..fqp.len() - 1]), + Some(..) => Some(&*self.stack), + None => None + }; + ((Some(*last), path), true) + } + } + _ => ((None, Some(&*self.stack)), false) + }; + + match parent { + (parent, Some(path)) if is_inherent_impl_item || (!self.stripped_mod) => { + debug_assert!(!item.is_stripped()); + + // A crate has a module at its root, containing all items, + // which should not be indexed. The crate-item itself is + // inserted later on when serializing the search-index. + if item.def_id.index != CRATE_DEF_INDEX { + self.search_index.push(IndexItem { + ty: item.type_(), + name: s.to_string(), + path: path.join("::"), + desc: shorten(plain_summary_line(item.doc_value())), + parent, + parent_idx: None, + search_type: get_index_search_type(&item), + }); + } + } + (Some(parent), None) if is_inherent_impl_item => { + // We have a parent, but we don't know where they're + // defined yet. Wait for later to index this item. + self.orphan_impl_items.push((parent, item.clone())); + } + _ => {} + } + } + + // Keep track of the fully qualified path for this item. + let pushed = match item.name { + Some(ref n) if !n.is_empty() => { + self.stack.push(n.to_string()); + true + } + _ => false, + }; + + match item.inner { + clean::StructItem(..) | clean::EnumItem(..) | + clean::TypedefItem(..) | clean::TraitItem(..) | + clean::FunctionItem(..) | clean::ModuleItem(..) | + clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) | + clean::ConstantItem(..) | clean::StaticItem(..) | + clean::UnionItem(..) | clean::ForeignTypeItem | + clean::MacroItem(..) | clean::ProcMacroItem(..) + if !self.stripped_mod => { + // Re-exported items mean that the same id can show up twice + // in the rustdoc ast that we're looking at. We know, + // however, that a re-exported item doesn't show up in the + // `public_items` map, so we can skip inserting into the + // paths map if there was already an entry present and we're + // not a public item. + if !self.paths.contains_key(&item.def_id) || + self.access_levels.is_public(item.def_id) + { + self.paths.insert(item.def_id, + (self.stack.clone(), item.type_())); + } + self.add_aliases(&item); + } + // Link variants to their parent enum because pages aren't emitted + // for each variant. + clean::VariantItem(..) if !self.stripped_mod => { + let mut stack = self.stack.clone(); + stack.pop(); + self.paths.insert(item.def_id, (stack, ItemType::Enum)); + } + + clean::PrimitiveItem(..) => { + self.add_aliases(&item); + self.paths.insert(item.def_id, (self.stack.clone(), + item.type_())); + } + + _ => {} + } + + // Maintain the parent stack + let orig_parent_is_trait_impl = self.parent_is_trait_impl; + let parent_pushed = match item.inner { + clean::TraitItem(..) | clean::EnumItem(..) | clean::ForeignTypeItem | + clean::StructItem(..) | clean::UnionItem(..) => { + self.parent_stack.push(item.def_id); + self.parent_is_trait_impl = false; + true + } + clean::ImplItem(ref i) => { + self.parent_is_trait_impl = i.trait_.is_some(); + match i.for_ { + clean::ResolvedPath{ did, .. } => { + self.parent_stack.push(did); + true + } + ref t => { + let prim_did = t.primitive_type().and_then(|t| { + self.primitive_locations.get(&t).cloned() + }); + match prim_did { + Some(did) => { + self.parent_stack.push(did); + true + } + None => false, + } + } + } + } + _ => false + }; + + // Once we've recursively found all the generics, hoard off all the + // implementations elsewhere. + let ret = self.fold_item_recur(item).and_then(|item| { + if let clean::Item { inner: clean::ImplItem(_), .. } = item { + // Figure out the id of this impl. This may map to a + // primitive rather than always to a struct/enum. + // Note: matching twice to restrict the lifetime of the `i` borrow. + let mut dids = FxHashSet::default(); + if let clean::Item { inner: clean::ImplItem(ref i), .. } = item { + match i.for_ { + clean::ResolvedPath { did, .. } | + clean::BorrowedRef { + type_: box clean::ResolvedPath { did, .. }, .. + } => { + dids.insert(did); + } + ref t => { + let did = t.primitive_type().and_then(|t| { + self.primitive_locations.get(&t).cloned() + }); + + if let Some(did) = did { + dids.insert(did); + } + } + } + + if let Some(generics) = i.trait_.as_ref().and_then(|t| t.generics()) { + for bound in generics { + if let Some(did) = bound.def_id() { + dids.insert(did); + } + } + } + } else { + unreachable!() + }; + let impl_item = Impl { + impl_item: item, + }; + if impl_item.trait_did().map_or(true, |d| self.traits.contains_key(&d)) { + for did in dids { + self.impls.entry(did).or_insert(vec![]).push(impl_item.clone()); + } + } else { + let trait_did = impl_item.trait_did().unwrap(); + self.orphan_trait_impls.push((trait_did, dids, impl_item)); + } + None + } else { + Some(item) + } + }); + + if pushed { self.stack.pop().unwrap(); } + if parent_pushed { self.parent_stack.pop().unwrap(); } + self.stripped_mod = orig_stripped_mod; + self.parent_is_trait_impl = orig_parent_is_trait_impl; + ret + } +} + +impl Cache { + fn add_aliases(&mut self, item: &clean::Item) { + if item.def_id.index == CRATE_DEF_INDEX { + return + } + if let Some(ref item_name) = item.name { + let path = self.paths.get(&item.def_id) + .map(|p| p.0[..p.0.len() - 1].join("::")) + .unwrap_or("std".to_owned()); + for alias in item.attrs.lists(sym::doc) + .filter(|a| a.check_name(sym::alias)) + .filter_map(|a| a.value_str() + .map(|s| s.to_string().replace("\"", ""))) + .filter(|v| !v.is_empty()) + .collect::>() + .into_iter() { + self.aliases.entry(alias) + .or_insert(Vec::with_capacity(1)) + .push(IndexItem { + ty: item.type_(), + name: item_name.to_string(), + path: path.clone(), + desc: shorten(plain_summary_line(item.doc_value())), + parent: None, + parent_idx: None, + search_type: get_index_search_type(&item), + }); + } + } + } +} + +/// Attempts to find where an external crate is located, given that we're +/// rendering in to the specified source destination. +fn extern_location(e: &clean::ExternalCrate, extern_url: Option<&str>, dst: &Path) + -> ExternalLocation +{ + use ExternalLocation::*; + // See if there's documentation generated into the local directory + let local_location = dst.join(&e.name); + if local_location.is_dir() { + return Local; + } + + if let Some(url) = extern_url { + let mut url = url.to_string(); + if !url.ends_with("/") { + url.push('/'); + } + return Remote(url); + } + + // Failing that, see if there's an attribute specifying where to find this + // external crate + e.attrs.lists(sym::doc) + .filter(|a| a.check_name(sym::html_root_url)) + .filter_map(|a| a.value_str()) + .map(|url| { + let mut url = url.to_string(); + if !url.ends_with("/") { + url.push('/') + } + Remote(url) + }).next().unwrap_or(Unknown) // Well, at least we tried. +} + +/// Builds the search index from the collected metadata +fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String { + let mut nodeid_to_pathid = FxHashMap::default(); + let mut crate_items = Vec::with_capacity(cache.search_index.len()); + let mut crate_paths = Vec::::new(); + + let Cache { ref mut search_index, + ref orphan_impl_items, + ref paths, .. } = *cache; + + // Attach all orphan items to the type's definition if the type + // has since been learned. + for &(did, ref item) in orphan_impl_items { + if let Some(&(ref fqp, _)) = paths.get(&did) { + search_index.push(IndexItem { + ty: item.type_(), + name: item.name.clone().unwrap(), + path: fqp[..fqp.len() - 1].join("::"), + desc: shorten(plain_summary_line(item.doc_value())), + parent: Some(did), + parent_idx: None, + search_type: get_index_search_type(&item), + }); + } + } + + // Reduce `NodeId` in paths into smaller sequential numbers, + // and prune the paths that do not appear in the index. + let mut lastpath = String::new(); + let mut lastpathid = 0usize; + + for item in search_index { + item.parent_idx = item.parent.map(|nodeid| { + if nodeid_to_pathid.contains_key(&nodeid) { + *nodeid_to_pathid.get(&nodeid).unwrap() + } else { + let pathid = lastpathid; + nodeid_to_pathid.insert(nodeid, pathid); + lastpathid += 1; + + let &(ref fqp, short) = paths.get(&nodeid).unwrap(); + crate_paths.push(((short as usize), fqp.last().unwrap().clone()).to_json()); + pathid + } + }); + + // Omit the parent path if it is same to that of the prior item. + if lastpath == item.path { + item.path.clear(); + } else { + lastpath = item.path.clone(); + } + crate_items.push(item.to_json()); + } + + let crate_doc = krate.module.as_ref().map(|module| { + shorten(plain_summary_line(module.doc_value())) + }).unwrap_or(String::new()); + + let mut crate_data = BTreeMap::new(); + crate_data.insert("doc".to_owned(), Json::String(crate_doc)); + crate_data.insert("i".to_owned(), Json::Array(crate_items)); + crate_data.insert("p".to_owned(), Json::Array(crate_paths)); + + // Collect the index into a string + format!("searchIndex[{}] = {};", + as_json(&krate.name), + Json::Object(crate_data)) +} + +fn get_index_search_type(item: &clean::Item) -> Option { + let (all_types, ret_types) = match item.inner { + clean::FunctionItem(ref f) => (&f.all_types, &f.ret_types), + clean::MethodItem(ref m) => (&m.all_types, &m.ret_types), + clean::TyMethodItem(ref m) => (&m.all_types, &m.ret_types), + _ => return None, + }; + + let inputs = all_types.iter().map(|arg| { + get_index_type(&arg) + }).filter(|a| a.name.is_some()).collect(); + let output = ret_types.iter().map(|arg| { + get_index_type(&arg) + }).filter(|a| a.name.is_some()).collect::>(); + let output = if output.is_empty() { + None + } else { + Some(output) + }; + + Some(IndexItemFunctionType { inputs, output }) +} + +fn get_index_type(clean_type: &clean::Type) -> Type { + let t = Type { + name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()), + generics: get_generics(clean_type), + }; + t +} + +fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option { + match *clean_type { + clean::ResolvedPath { ref path, .. } => { + let segments = &path.segments; + let path_segment = segments.into_iter().last().unwrap_or_else(|| panic!( + "get_index_type_name(clean_type: {:?}, accept_generic: {:?}) had length zero path", + clean_type, accept_generic + )); + Some(path_segment.name.clone()) + } + clean::Generic(ref s) if accept_generic => Some(s.clone()), + clean::Primitive(ref p) => Some(format!("{:?}", p)), + clean::BorrowedRef { ref type_, .. } => get_index_type_name(type_, accept_generic), + // FIXME: add all from clean::Type. + _ => None + } +} + +fn get_generics(clean_type: &clean::Type) -> Option> { + clean_type.generics() + .and_then(|types| { + let r = types.iter() + .filter_map(|t| get_index_type_name(t, false)) + .map(|s| s.to_ascii_lowercase()) + .collect::>(); + if r.is_empty() { + None + } else { + Some(r) + } + }) +} diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index 17a940cc4c..de19ca3ed3 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -79,6 +79,7 @@ function getSearchElement() { "derive", "traitalias"]; + var disableShortcuts = getCurrentValue("rustdoc-disable-shortcuts") === "true"; var search_input = getSearchInput(); // On the search screen, so you remain on the last tab you opened. @@ -294,7 +295,7 @@ function getSearchElement() { function handleShortcut(ev) { // Don't interfere with browser shortcuts - if (ev.ctrlKey || ev.altKey || ev.metaKey) { + if (ev.ctrlKey || ev.altKey || ev.metaKey || disableShortcuts === true) { return; } @@ -378,9 +379,13 @@ function getSearchElement() { set_fragment(cur_id); } - } else if (hasClass(document.getElementById("help"), "hidden") === false) { - addClass(document.getElementById("help"), "hidden"); - removeClass(document.body, "blur"); + } else if (hasClass(getHelpElement(), "hidden") === false) { + var help = getHelpElement(); + var is_inside_help_popup = ev.target !== help && help.contains(ev.target); + if (is_inside_help_popup === false) { + addClass(help, "hidden"); + removeClass(document.body, "blur"); + } } else { // Making a collapsed element visible on onhashchange seems // too late @@ -1217,7 +1222,7 @@ function getSearchElement() { } dst = dst[0]; if (window.location.pathname === dst.pathname) { - addClass(document.getElementById("search"), "hidden"); + addClass(getSearchElement(), "hidden"); removeClass(main, "hidden"); document.location.href = dst.href; } @@ -2453,7 +2458,7 @@ function getSearchElement() { function putBackSearch(search_input) { if (search_input.value !== "") { addClass(main, "hidden"); - removeClass(document.getElementById("search"), "hidden"); + removeClass(getSearchElement(), "hidden"); if (browserSupportsHistoryApi()) { history.replaceState(search_input.value, "", @@ -2552,6 +2557,53 @@ function getSearchElement() { } window.addSearchOptions = addSearchOptions; + + function buildHelperPopup() { + var popup = document.createElement("aside"); + addClass(popup, "hidden"); + popup.id = "help"; + + var container = document.createElement("div"); + var shortcuts = [ + ["?", "Show this help dialog"], + ["S", "Focus the search field"], + ["↑", "Move up in search results"], + ["↓", "Move down in search results"], + ["↹", "Switch tab"], + ["⏎", "Go to active search result"], + ["+", "Expand all sections"], + ["-", "Collapse all sections"], + ].map(x => "
" + x[0] + "
" + x[1] + "
").join(""); + var div_shortcuts = document.createElement("div"); + addClass(div_shortcuts, "shortcuts"); + div_shortcuts.innerHTML = "

Keyboard Shortcuts

" + shortcuts + "
"; + + var infos = [ + "Prefix searches with a type followed by a colon (e.g., fn:) to \ + restrict the search to a given type.", + "Accepted types are: fn, mod, struct, \ + enum, trait, type, macro, \ + and const.", + "Search functions by type signature (e.g., vec -> usize or \ + * -> vec)", + "Search multiple things at once by splitting your query with comma (e.g., \ + str,u8 or String,struct:Vec,test)", + "You can look for items with an exact name by putting double quotes around \ + your request: \"string\"", + "Look for items inside another one by searching for a path: vec::Vec", + ].map(x => "

" + x + "

").join(""); + var div_infos = document.createElement("div"); + addClass(div_infos, "infos"); + div_infos.innerHTML = "

Search Tricks

" + infos; + + container.appendChild(div_shortcuts); + container.appendChild(div_infos); + + popup.appendChild(container); + insertAfter(popup, getSearchElement()); + } + + buildHelperPopup(); }()); // Sets the focus on the search bar at the top of the page diff --git a/src/librustdoc/html/static/noscript.css b/src/librustdoc/html/static/noscript.css index 4a434d49e4..832bd9ba2d 100644 --- a/src/librustdoc/html/static/noscript.css +++ b/src/librustdoc/html/static/noscript.css @@ -5,3 +5,11 @@ .loading-content { display: none; } + +#main > h2 + div, #main > h3 + div { + display: block; +} + +#main > h2 + h3 { + display: flex; +} diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index 244b24af43..64c858238d 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -183,7 +183,7 @@ nav.sub { position: fixed; left: 0; top: 0; - height: 100vh; + bottom: 0; overflow: auto; } @@ -573,7 +573,7 @@ h4 > code, h3 > code, .invisible > code { margin-top: 0; } -nav { +nav:not(.sidebar) { border-bottom: 1px solid; padding-bottom: 10px; margin-bottom: 10px; diff --git a/src/librustdoc/html/static/themes/dark.css b/src/librustdoc/html/static/themes/dark.css index e44ae2ad10..c3116dbe7a 100644 --- a/src/librustdoc/html/static/themes/dark.css +++ b/src/librustdoc/html/static/themes/dark.css @@ -129,7 +129,7 @@ pre { pre.rust .comment { color: #8d8d8b; } pre.rust .doccomment { color: #8ca375; } -nav { +nav:not(.sidebar) { border-bottom-color: #4e4e4e; } nav.main .current { diff --git a/src/librustdoc/html/static/themes/light.css b/src/librustdoc/html/static/themes/light.css index 4c37000dde..e2bf9f9d2f 100644 --- a/src/librustdoc/html/static/themes/light.css +++ b/src/librustdoc/html/static/themes/light.css @@ -129,7 +129,7 @@ pre { pre.rust .comment { color: #8E908C; } pre.rust .doccomment { color: #4D4D4C; } -nav { +nav:not(.sidebar) { border-bottom-color: #e0e0e0; } nav.main .current { diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs index 2da7aceae8..0fb2f8dd79 100644 --- a/src/librustdoc/html/toc.rs +++ b/src/librustdoc/html/toc.rs @@ -1,10 +1,7 @@ //! Table-of-contents creation. -use std::fmt; -use std::string::String; - /// A (recursive) table of contents -#[derive(PartialEq)] +#[derive(Debug, PartialEq)] pub struct Toc { /// The levels are strictly decreasing, i.e. /// @@ -28,7 +25,7 @@ impl Toc { } } -#[derive(PartialEq)] +#[derive(Debug, PartialEq)] pub struct TocEntry { level: u32, sec_number: String, @@ -165,25 +162,23 @@ impl TocBuilder { } } -impl fmt::Debug for Toc { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(self, f) - } -} - -impl fmt::Display for Toc { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(fmt, "
    ")?; +impl Toc { + fn print_inner(&self, v: &mut String) { + v.push_str("
      "); for entry in &self.entries { - // recursively format this table of contents (the - // `{children}` is the key). - write!(fmt, - "\n
    • {num} {name}{children}
    • ", + // recursively format this table of contents + v.push_str(&format!("\n
    • {num} {name}", id = entry.id, - num = entry.sec_number, name = entry.name, - children = entry.children)? + num = entry.sec_number, name = entry.name)); + entry.children.print_inner(&mut *v); + v.push_str("
    • "); } - write!(fmt, "
    ") + v.push_str("
"); + } + crate fn print(&self) -> String { + let mut v = String::new(); + self.print_inner(&mut v); + v } } diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 0b9e717221..8cd32a3d1b 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -1,7 +1,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/")] -#![cfg_attr(bootstrap, feature(bind_by_move_pattern_guards))] #![feature(rustc_private)] #![feature(arbitrary_self_types)] #![feature(box_patterns)] @@ -15,9 +14,7 @@ #![feature(crate_visibility_modifier)] #![feature(const_fn)] #![feature(drain_filter)] -#![feature(inner_deref)] #![feature(never_type)] -#![feature(mem_take)] #![feature(unicode_internals)] #![recursion_limit="256"] @@ -26,6 +23,7 @@ extern crate getopts; extern crate env_logger; extern crate rustc; extern crate rustc_data_structures; +extern crate rustc_index; extern crate rustc_driver; extern crate rustc_resolve; extern crate rustc_lint; @@ -36,6 +34,7 @@ extern crate rustc_typeck; extern crate rustc_lexer; extern crate serialize; extern crate syntax; +extern crate syntax_expand; extern crate syntax_pos; extern crate test as testing; #[macro_use] extern crate log; @@ -487,8 +486,8 @@ where R: 'static + Send, krate.version = crate_version; f(Output { - krate: krate, - renderinfo: renderinfo, + krate, + renderinfo, renderopts, }) }); diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs index 4ee09f7096..dc1ca8d766 100644 --- a/src/librustdoc/passes/calculate_doc_coverage.rs +++ b/src/librustdoc/passes/calculate_doc_coverage.rs @@ -142,7 +142,8 @@ impl fold::DocFolder for CoverageCalculator { } clean::ImplItem(ref impl_) => { if let Some(ref tr) = impl_.trait_ { - debug!("impl {:#} for {:#} in {}", tr, impl_.for_, i.source.filename); + debug!("impl {:#} for {:#} in {}", + tr.print(), impl_.for_.print(), i.source.filename); // don't count trait impls, the missing-docs lint doesn't so we shouldn't // either @@ -151,11 +152,11 @@ impl fold::DocFolder for CoverageCalculator { // inherent impls *can* be documented, and those docs show up, but in most // cases it doesn't make sense, as all methods on a type are in one single // impl block - debug!("impl {:#} in {}", impl_.for_, i.source.filename); + debug!("impl {:#} in {}", impl_.for_.print(), i.source.filename); } } _ => { - debug!("counting {} {:?} in {}", i.type_(), i.name, i.source.filename); + debug!("counting {:?} {:?} in {}", i.type_(), i.name, i.source.filename); self.items.entry(i.source.filename.clone()) .or_default() .count_item(has_docs); diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs index 32044e48b6..10e15ab888 100644 --- a/src/librustdoc/passes/check_code_block_syntax.rs +++ b/src/librustdoc/passes/check_code_block_syntax.rs @@ -1,6 +1,7 @@ use errors::Applicability; use syntax::parse::lexer::{StringReader as Lexer}; -use syntax::parse::{ParseSess, token}; +use syntax::parse::token; +use syntax::sess::ParseSess; use syntax::source_map::FilePathMapping; use syntax_pos::{InnerSpan, FileName}; diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index d6073cdc1e..ab34f8daad 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -7,7 +7,7 @@ use rustc::ty; use rustc_resolve::ParentScope; use syntax; use syntax::ast::{self, Ident}; -use syntax::ext::base::SyntaxExtensionKind; +use syntax_expand::base::SyntaxExtensionKind; use syntax::feature_gate::UnstableFeatures; use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; @@ -155,7 +155,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { }; Ok((ty_res, Some(format!("{}.{}", out, item_name)))) } else { - match cx.tcx.type_of(did).sty { + match cx.tcx.type_of(did).kind { ty::Adt(def, _) => { if let Some(item) = if def.is_enum() { def.all_fields().find(|item| item.ident.name == item_name) @@ -237,7 +237,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> { }); if parent_node.is_some() { - debug!("got parent node for {} {:?}, id {:?}", item.type_(), item.name, item.def_id); + debug!("got parent node for {:?} {:?}, id {:?}", item.type_(), item.name, item.def_id); } let current_item = match item.inner { @@ -322,9 +322,26 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> { continue; } + // In order to correctly resolve intra-doc-links we need to + // pick a base AST node to work from. If the documentation for + // this module came from an inner comment (//!) then we anchor + // our name resolution *inside* the module. If, on the other + // hand it was an outer comment (///) then we anchor the name + // resolution in the parent module on the basis that the names + // used are more likely to be intended to be parent names. For + // this, we set base_node to None for inner comments since + // we've already pushed this node onto the resolution stack but + // for outer comments we explicitly try and resolve against the + // parent_node first. + let base_node = if item.is_mod() && item.attrs.inner_docs { + None + } else { + parent_node + }; + match kind { Some(ns @ ValueNS) => { - if let Ok(res) = self.resolve(path_str, ns, ¤t_item, parent_node) { + if let Ok(res) = self.resolve(path_str, ns, ¤t_item, base_node) { res } else { resolution_failure(cx, &item, path_str, &dox, link_range); @@ -335,7 +352,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> { } } Some(ns @ TypeNS) => { - if let Ok(res) = self.resolve(path_str, ns, ¤t_item, parent_node) { + if let Ok(res) = self.resolve(path_str, ns, ¤t_item, base_node) { res } else { resolution_failure(cx, &item, path_str, &dox, link_range); @@ -348,10 +365,10 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> { let candidates = PerNS { macro_ns: macro_resolve(cx, path_str).map(|res| (res, None)), type_ns: self - .resolve(path_str, TypeNS, ¤t_item, parent_node) + .resolve(path_str, TypeNS, ¤t_item, base_node) .ok(), value_ns: self - .resolve(path_str, ValueNS, ¤t_item, parent_node) + .resolve(path_str, ValueNS, ¤t_item, base_node) .ok() .and_then(|(res, fragment)| { // Constructors are picked up in the type namespace. @@ -432,13 +449,13 @@ fn macro_resolve(cx: &DocContext<'_>, path_str: &str) -> Option { let path = ast::Path::from_ident(Ident::from_str(path_str)); cx.enter_resolver(|resolver| { if let Ok((Some(ext), res)) = resolver.resolve_macro_path( - &path, None, &ParentScope::module(resolver.graph_root), false, false + &path, None, &ParentScope::module(resolver.graph_root()), false, false ) { if let SyntaxExtensionKind::LegacyBang { .. } = ext.kind { return Some(res.map_id(|_| panic!("unexpected id"))); } } - if let Some(res) = resolver.all_macros.get(&Symbol::intern(path_str)) { + if let Some(res) = resolver.all_macros().get(&Symbol::intern(path_str)) { return Some(res.map_id(|_| panic!("unexpected id"))); } None diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs index 14f8b16dc3..f6560218a7 100644 --- a/src/librustdoc/passes/mod.rs +++ b/src/librustdoc/passes/mod.rs @@ -153,7 +153,7 @@ impl<'a> DocFolder for Stripper<'a> { // We need to recurse into stripped modules to strip things // like impl methods but when doing so we must not add any // items to the `retained` set. - debug!("Stripper: recursing into stripped {} {:?}", i.type_(), i.name); + debug!("Stripper: recursing into stripped {:?} {:?}", i.type_(), i.name); let old = mem::replace(&mut self.update_retained, false); let ret = self.fold_item_recur(i); self.update_retained = old; @@ -178,20 +178,20 @@ impl<'a> DocFolder for Stripper<'a> { | clean::ForeignTypeItem => { if i.def_id.is_local() { if !self.access_levels.is_exported(i.def_id) { - debug!("Stripper: stripping {} {:?}", i.type_(), i.name); + debug!("Stripper: stripping {:?} {:?}", i.type_(), i.name); return None; } } } clean::StructFieldItem(..) => { - if i.visibility != Some(clean::Public) { + if i.visibility != clean::Public { return StripItem(i).strip(); } } clean::ModuleItem(..) => { - if i.def_id.is_local() && i.visibility != Some(clean::Public) { + if i.def_id.is_local() && i.visibility != clean::Public { debug!("Stripper: stripping module {:?}", i.name); let old = mem::replace(&mut self.update_retained, false); let ret = StripItem(self.fold_item_recur(i).unwrap()).strip(); @@ -299,7 +299,7 @@ impl DocFolder for ImportStripper { fn fold_item(&mut self, i: Item) -> Option { match i.inner { clean::ExternCrateItem(..) | clean::ImportItem(..) - if i.visibility != Some(clean::Public) => + if i.visibility != clean::Public => { None } diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs index da8977544f..0159e03f6f 100644 --- a/src/librustdoc/passes/strip_hidden.rs +++ b/src/librustdoc/passes/strip_hidden.rs @@ -39,7 +39,7 @@ struct Stripper<'a> { impl<'a> DocFolder for Stripper<'a> { fn fold_item(&mut self, i: Item) -> Option { if i.attrs.lists(sym::doc).has_word(sym::hidden) { - debug!("strip_hidden: stripping {} {:?}", i.type_(), i.name); + debug!("strip_hidden: stripping {:?} {:?}", i.type_(), i.name); // use a dedicated hidden item for given item type if any match i.inner { clean::StructFieldItem(..) | clean::ModuleItem(..) => { diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 424239c998..07dc1e4e91 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -62,9 +62,12 @@ pub fn run(options: Options) -> i32 { ..config::Options::default() }; + let mut cfgs = options.cfgs.clone(); + cfgs.push("rustdoc".to_owned()); + cfgs.push("doctest".to_owned()); let config = interface::Config { opts: sessopts, - crate_cfg: config::parse_cfgspecs(options.cfgs.clone()), + crate_cfg: interface::parse_cfgspecs(cfgs), input, input_path: None, output_file: None, @@ -74,6 +77,7 @@ pub fn run(options: Options) -> i32 { stderr: None, crate_name: options.crate_name.clone(), lint_caps: Default::default(), + register_lints: None, }; let mut test_args = options.test_args.clone(); @@ -277,6 +281,9 @@ fn run_test( for codegen_options_str in &options.codegen_options_strs { compiler.arg("-C").arg(&codegen_options_str); } + for debugging_option_str in &options.debugging_options_strs { + compiler.arg("-Z").arg(&debugging_option_str); + } if no_run { compiler.arg("--emit=metadata"); } @@ -391,7 +398,7 @@ pub fn make_test(s: &str, // Uses libsyntax to parse the doctest and find if there's a main fn and the extern // crate already is included. let (already_has_main, already_has_extern_crate, found_macro) = with_globals(edition, || { - use crate::syntax::{parse::{self, ParseSess}, source_map::FilePathMapping}; + use crate::syntax::{parse, sess::ParseSess, source_map::FilePathMapping}; use errors::emitter::EmitterWriter; use errors::Handler; @@ -425,7 +432,7 @@ pub fn make_test(s: &str, match parser.parse_item() { Ok(Some(item)) => { if !found_main { - if let ast::ItemKind::Fn(..) = item.node { + if let ast::ItemKind::Fn(..) = item.kind { if item.ident.name == sym::main { found_main = true; } @@ -433,7 +440,7 @@ pub fn make_test(s: &str, } if !found_extern_crate { - if let ast::ItemKind::ExternCrate(original) = item.node { + if let ast::ItemKind::ExternCrate(original) = item.kind { // This code will never be reached if `cratename` is none because // `found_extern_crate` is initialized to `true` if it is none. let cratename = cratename.unwrap(); @@ -446,7 +453,7 @@ pub fn make_test(s: &str, } if !found_macro { - if let ast::ItemKind::Mac(..) = item.node { + if let ast::ItemKind::Mac(..) = item.kind { found_macro = true; } } @@ -702,6 +709,7 @@ impl Tester for Collector { // compiler failures are test failures should_panic: testing::ShouldPanic::No, allow_fail: config.allow_fail, + test_type: testing::TestType::DocTest, }, testfn: testing::DynTestFn(box move || { let res = run_test( @@ -882,7 +890,7 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for HirCollector<'a, 'hir> { } fn visit_item(&mut self, item: &'hir hir::Item) { - let name = if let hir::ItemKind::Impl(.., ref ty, _) = item.node { + let name = if let hir::ItemKind::Impl(.., ref ty, _) = item.kind { self.map.hir_to_pretty_string(ty.hir_id) } else { item.ident.to_string() diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index ee330cb321..5a83569f02 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -8,9 +8,9 @@ use rustc::middle::privacy::AccessLevel; use rustc::util::nodemap::{FxHashSet, FxHashMap}; use rustc::ty::TyCtxt; use syntax::ast; -use syntax::ext::base::MacroKind; use syntax::source_map::Spanned; use syntax::symbol::sym; +use syntax_pos::hygiene::MacroKind; use syntax_pos::{self, Span}; use std::mem; @@ -320,7 +320,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { if !self.view_item_stack.insert(res_hir_id) { return false } let ret = match tcx.hir().get(res_hir_id) { - Node::Item(&hir::Item { node: hir::ItemKind::Mod(ref m), .. }) if glob => { + Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => { let prev = mem::replace(&mut self.inlining, true); for i in &m.item_ids { let i = self.cx.tcx.hir().expect_item(i.id); @@ -361,7 +361,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { self.store_path(def_id); } - match item.node { + match item.kind { hir::ItemKind::ForeignMod(ref fm) => { for item in &fm.items { self.visit_foreign_item(item, None, om); @@ -561,7 +561,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { om.foreigns.push(ForeignItem { id: item.hir_id, name: renamed.unwrap_or(item.ident).name, - kind: &item.node, + kind: &item.kind, vis: &item.vis, attrs: &item.attrs, whence: item.span diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs index d981740780..f2e9be14c8 100644 --- a/src/libserialize/collection_impls.rs +++ b/src/libserialize/collection_impls.rs @@ -143,7 +143,7 @@ impl Decodable for BTreeSet } impl Encodable for HashMap - where K: Encodable + Hash + Eq, + where K: Encodable + Eq, V: Encodable, S: BuildHasher, { @@ -180,7 +180,7 @@ impl Decodable for HashMap } impl Encodable for HashSet - where T: Encodable + Hash + Eq, + where T: Encodable + Eq, S: BuildHasher, { fn encode(&self, s: &mut E) -> Result<(), E::Error> { diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index d0007074a8..d2e360f5e2 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -1053,12 +1053,12 @@ impl Json { /// a value associated with the provided key is found. If no value is found /// or the Json value is not an Object, returns `None`. pub fn search(&self, key: &str) -> Option<&Json> { - match self { - &Json::Object(ref map) => { + match *self { + Json::Object(ref map) => { match map.get(key) { Some(json_value) => Some(json_value), None => { - for (_, v) in map { + for v in map.values() { match v.search(key) { x if x.is_some() => return x, _ => () @@ -1487,12 +1487,12 @@ impl> Parser { } fn parse_number(&mut self) -> JsonEvent { - let mut neg = false; - - if self.ch_is('-') { + let neg = if self.ch_is('-') { self.bump(); - neg = true; - } + true + } else { + false + }; let res = match self.parse_u64() { Ok(res) => res, @@ -2162,10 +2162,9 @@ impl crate::Decoder for Decoder { let s = self.read_str()?; { let mut it = s.chars(); - match (it.next(), it.next()) { + if let (Some(c), None) = (it.next(), it.next()) { // exactly one character - (Some(c), None) => return Ok(c), - _ => () + return Ok(c); } } Err(ExpectedError("single character string".to_owned(), s.to_string())) diff --git a/src/libstd/Cargo.toml b/src/libstd/Cargo.toml index af1d2402f8..c55911a33f 100644 --- a/src/libstd/Cargo.toml +++ b/src/libstd/Cargo.toml @@ -23,7 +23,7 @@ libc = { version = "0.2.51", default-features = false, features = ['rustc-dep-of compiler_builtins = { version = "0.1.16" } profiler_builtins = { path = "../libprofiler_builtins", optional = true } unwind = { path = "../libunwind" } -hashbrown = { version = "0.5.0", features = ['rustc-dep-of-std'] } +hashbrown = { version = "0.6.2", default-features = false, features = ['rustc-dep-of-std'] } [dependencies.backtrace_rs] package = "backtrace" @@ -50,12 +50,12 @@ dlmalloc = { version = "0.1", features = ['rustc-dep-of-std'] } [target.x86_64-fortanix-unknown-sgx.dependencies] fortanix-sgx-abi = { version = "0.3.2", features = ['rustc-dep-of-std'] } +[target.'cfg(all(any(target_arch = "x86_64", target_arch = "aarch64"), target_os = "hermit"))'.dependencies] +hermit-abi = { version = "0.1", features = ['rustc-dep-of-std'] } + [target.wasm32-wasi.dependencies] wasi = { version = "0.7.0", features = ['rustc-dep-of-std', 'alloc'] } -[build-dependencies] -cc = "1.0" - [features] default = ["std_detect_file_io", "std_detect_dlsym_getauxval"] diff --git a/src/libstd/backtrace.rs b/src/libstd/backtrace.rs index 61c42a5607..9f400713a8 100644 --- a/src/libstd/backtrace.rs +++ b/src/libstd/backtrace.rs @@ -113,7 +113,7 @@ pub struct Backtrace { /// The current status of a backtrace, indicating whether it was captured or /// whether it is empty for some other reason. #[non_exhaustive] -#[derive(Debug)] +#[derive(Debug, PartialEq, Eq)] pub enum BacktraceStatus { /// Capturing a backtrace is not supported, likely because it's not /// implemented for the current platform. diff --git a/src/libstd/build.rs b/src/libstd/build.rs index 8db7bc12cd..1f839f1653 100644 --- a/src/libstd/build.rs +++ b/src/libstd/build.rs @@ -54,5 +54,7 @@ fn main() { } println!("cargo:rustc-link-lib=c"); println!("cargo:rustc-link-lib=compiler_rt"); + } else if target.contains("hermit") { + println!("cargo:rustc-link-lib=hermit"); } } diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index a0538986a2..de2f12c9f3 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -192,14 +192,9 @@ use crate::sys; /// ``` /// use std::collections::HashMap; /// -/// fn main() { -/// let timber_resources: HashMap<&str, i32> = -/// [("Norway", 100), -/// ("Denmark", 50), -/// ("Iceland", 10)] -/// .iter().cloned().collect(); -/// // use the values stored in map -/// } +/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)] +/// .iter().cloned().collect(); +/// // use the values stored in map /// ``` #[derive(Clone)] @@ -714,7 +709,6 @@ where /// # Examples /// /// ``` - /// #![feature(map_get_key_value)] /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); @@ -722,7 +716,7 @@ where /// assert_eq!(map.get_key_value(&1), Some((&1, &"a"))); /// assert_eq!(map.get_key_value(&2), None); /// ``` - #[unstable(feature = "map_get_key_value", issue = "49347")] + #[stable(feature = "map_get_key_value", since = "1.40.0")] #[inline] pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> where @@ -1824,7 +1818,7 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> { type Item = &'a K; #[inline] - fn next(&mut self) -> Option<(&'a K)> { + fn next(&mut self) -> Option<&'a K> { self.inner.next().map(|(k, _)| k) } #[inline] @@ -1847,7 +1841,7 @@ impl<'a, K, V> Iterator for Values<'a, K, V> { type Item = &'a V; #[inline] - fn next(&mut self) -> Option<(&'a V)> { + fn next(&mut self) -> Option<&'a V> { self.inner.next().map(|(_, v)| v) } #[inline] @@ -1870,7 +1864,7 @@ impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; #[inline] - fn next(&mut self) -> Option<(&'a mut V)> { + fn next(&mut self) -> Option<&'a mut V> { self.inner.next().map(|(_, v)| v) } #[inline] @@ -2036,6 +2030,31 @@ impl<'a, K, V> Entry<'a, K, V> { Vacant(entry) => Vacant(entry), } } + + /// Sets the value of the entry, and returns an OccupiedEntry. + /// + /// # Examples + /// + /// ``` + /// #![feature(entry_insert)] + /// use std::collections::HashMap; + /// + /// let mut map: HashMap<&str, String> = HashMap::new(); + /// let entry = map.entry("poneyland").insert("hoho".to_string()); + /// + /// assert_eq!(entry.key(), &"poneyland"); + /// ``` + #[inline] + #[unstable(feature = "entry_insert", issue = "65225")] + pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V> { + match self { + Occupied(mut entry) => { + entry.insert(value); + entry + }, + Vacant(entry) => entry.insert_entry(value), + } + } } impl<'a, K, V: Default> Entry<'a, K, V> { @@ -2353,6 +2372,28 @@ impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { pub fn insert(self, value: V) -> &'a mut V { self.base.insert(value) } + + /// Sets the value of the entry with the VacantEntry's key, + /// and returns an OccupiedEntry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::HashMap; + /// use std::collections::hash_map::Entry; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// if let Entry::Vacant(o) = map.entry("poneyland") { + /// o.insert(37); + /// } + /// assert_eq!(map["poneyland"], 37); + /// ``` + #[inline] + fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V> { + let base = self.base.insert_entry(value); + OccupiedEntry { base } + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -2368,6 +2409,8 @@ where } } +/// Inserts all new key-values from the iterator and replaces values with existing +/// keys with new values returned from the iterator. #[stable(feature = "rust1", since = "1.0.0")] impl Extend<(K, V)> for HashMap where diff --git a/src/libstd/collections/hash/set.rs b/src/libstd/collections/hash/set.rs index 26db651ef8..092fb44346 100644 --- a/src/libstd/collections/hash/set.rs +++ b/src/libstd/collections/hash/set.rs @@ -93,11 +93,9 @@ use super::map::{self, HashMap, Keys, RandomState}; /// ``` /// use std::collections::HashSet; /// -/// fn main() { -/// let viking_names: HashSet<&'static str> = -/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect(); -/// // use the values stored in the set -/// } +/// let viking_names: HashSet<&'static str> = +/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect(); +/// // use the values stored in the set /// ``` /// /// [`Cell`]: ../../std/cell/struct.Cell.html diff --git a/src/libstd/error.rs b/src/libstd/error.rs index 4a1bb75d58..6b9a35fccc 100644 --- a/src/libstd/error.rs +++ b/src/libstd/error.rs @@ -269,8 +269,8 @@ impl<'a, E: Error + 'a> From for Box { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, E: Error + Send + Sync + 'a> From for Box { - /// Converts a type of [`Error`] + [`trait@Send`] + [`trait@Sync`] into a box of - /// dyn [`Error`] + [`trait@Send`] + [`trait@Sync`]. + /// Converts a type of [`Error`] + [`Send`] + [`Sync`] into a box of + /// dyn [`Error`] + [`Send`] + [`Sync`]. /// /// [`Error`]: ../error/trait.Error.html /// @@ -313,7 +313,7 @@ impl<'a, E: Error + Send + Sync + 'a> From for Box for Box { - /// Converts a [`String`] into a box of dyn [`Error`] + [`trait@Send`] + [`trait@Sync`]. + /// Converts a [`String`] into a box of dyn [`Error`] + [`Send`] + [`Sync`]. /// /// [`Error`]: ../error/trait.Error.html /// @@ -377,7 +377,7 @@ impl From for Box { #[stable(feature = "rust1", since = "1.0.0")] impl<'a> From<&str> for Box { - /// Converts a [`str`] into a box of dyn [`Error`] + [`trait@Send`] + [`trait@Sync`]. + /// Converts a [`str`] into a box of dyn [`Error`] + [`Send`] + [`Sync`]. /// /// [`Error`]: ../error/trait.Error.html /// @@ -420,7 +420,7 @@ impl From<&str> for Box { #[stable(feature = "cow_box_error", since = "1.22.0")] impl<'a, 'b> From> for Box { - /// Converts a [`Cow`] into a box of dyn [`Error`] + [`trait@Send`] + [`trait@Sync`]. + /// Converts a [`Cow`] into a box of dyn [`Error`] + [`Send`] + [`Sync`]. /// /// [`Cow`]: ../borrow/enum.Cow.html /// [`Error`]: ../error/trait.Error.html diff --git a/src/libstd/ffi/c_str.rs b/src/libstd/ffi/c_str.rs index d7f4cc5d1f..6dcda98631 100644 --- a/src/libstd/ffi/c_str.rs +++ b/src/libstd/ffi/c_str.rs @@ -327,7 +327,31 @@ impl CString { /// [`NulError`]: struct.NulError.html #[stable(feature = "rust1", since = "1.0.0")] pub fn new>>(t: T) -> Result { - Self::_new(t.into()) + trait SpecIntoVec { + fn into_vec(self) -> Vec; + } + impl>> SpecIntoVec for T { + default fn into_vec(self) -> Vec { + self.into() + } + } + // Specialization for avoiding reallocation. + impl SpecIntoVec for &'_ [u8] { + fn into_vec(self) -> Vec { + let mut v = Vec::with_capacity(self.len() + 1); + v.extend(self); + v + } + } + impl SpecIntoVec for &'_ str { + fn into_vec(self) -> Vec { + let mut v = Vec::with_capacity(self.len() + 1); + v.extend(self.as_bytes()); + v + } + } + + Self::_new(SpecIntoVec::into_vec(t)) } fn _new(bytes: Vec) -> Result { @@ -919,7 +943,7 @@ impl Error for IntoStringError { "C string contained non-utf8 bytes" } - fn cause(&self) -> Option<&dyn Error> { + fn source(&self) -> Option<&(dyn Error + 'static)> { Some(&self.error) } } diff --git a/src/libstd/ffi/mod.rs b/src/libstd/ffi/mod.rs index 69fcfa8b39..28d9906eb9 100644 --- a/src/libstd/ffi/mod.rs +++ b/src/libstd/ffi/mod.rs @@ -163,7 +163,7 @@ pub use self::c_str::{FromBytesWithNulError}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::os_str::{OsString, OsStr}; -#[stable(feature = "raw_os", since = "1.1.0")] +#[stable(feature = "core_c_void", since = "1.30.0")] pub use core::ffi::c_void; #[unstable(feature = "c_variadic", diff --git a/src/libstd/fs.rs b/src/libstd/fs.rs index b5265fe369..30db6a58d4 100644 --- a/src/libstd/fs.rs +++ b/src/libstd/fs.rs @@ -29,7 +29,7 @@ use crate::time::SystemTime; /// /// # Examples /// -/// Creates a new file and write bytes to it: +/// Creates a new file and write bytes to it (you can also use [`write`]): /// /// ```no_run /// use std::fs::File; @@ -42,7 +42,7 @@ use crate::time::SystemTime; /// } /// ``` /// -/// Read the contents of a file into a [`String`]: +/// Read the contents of a file into a [`String`] (you can also use [`read`]): /// /// ```no_run /// use std::fs::File; @@ -89,6 +89,8 @@ use crate::time::SystemTime; /// [`Write`]: ../io/trait.Write.html /// [`BufReader`]: ../io/struct.BufReader.html /// [`sync_all`]: struct.File.html#method.sync_all +/// [`read`]: fn.read.html +/// [`write`]: fn.write.html #[stable(feature = "rust1", since = "1.0.0")] pub struct File { inner: fs_imp::File, @@ -114,6 +116,9 @@ pub struct Metadata(fs_imp::FileAttr); /// information like the entry's path and possibly other metadata can be /// learned. /// +/// The order in which this iterator returns entries is platform and filesystem +/// dependent. +/// /// # Errors /// /// This [`io::Result`] will be an [`Err`] if there's some sort of intermittent @@ -394,6 +399,37 @@ impl File { OpenOptions::new().write(true).create(true).truncate(true).open(path.as_ref()) } + /// Returns a new OpenOptions object. + /// + /// This function returns a new OpenOptions object that you can use to + /// open or create a file with specific options if `open()` or `create()` + /// are not appropriate. + /// + /// It is equivalent to `OpenOptions::new()` but allows you to write more + /// readable code. Instead of `OpenOptions::new().read(true).open("foo.txt")` + /// you can write `File::with_options().read(true).open("foo.txt"). This + /// also avoids the need to import `OpenOptions`. + /// + /// See the [`OpenOptions::new`] function for more details. + /// + /// [`OpenOptions::new`]: struct.OpenOptions.html#method.new + /// + /// # Examples + /// + /// ```no_run + /// #![feature(with_options)] + /// use std::fs::File; + /// + /// fn main() -> std::io::Result<()> { + /// let mut f = File::with_options().read(true).open("foo.txt")?; + /// Ok(()) + /// } + /// ``` + #[unstable(feature = "with_options", issue = "65439")] + pub fn with_options() -> OpenOptions { + OpenOptions::new() + } + /// Attempts to sync all OS-internal metadata to disk. /// /// This function will attempt to ensure that all in-memory data reaches the @@ -1087,13 +1123,14 @@ impl Metadata { /// Returns the creation time listed in this metadata. /// - /// The returned value corresponds to the `birthtime` field of `stat` on - /// Unix platforms and the `ftCreationTime` field on Windows platforms. + /// The returned value corresponds to the `btime` field of `statx` on + /// Linux kernel starting from to 4.11, the `birthtime` field of `stat` on other + /// Unix platforms, and the `ftCreationTime` field on Windows platforms. /// /// # Errors /// /// This field may not be available on all platforms, and will return an - /// `Err` on platforms where it is not available. + /// `Err` on platforms or filesystems where it is not available. /// /// # Examples /// @@ -1106,7 +1143,7 @@ impl Metadata { /// if let Ok(time) = metadata.created() { /// println!("{:?}", time); /// } else { - /// println!("Not supported on this platform"); + /// println!("Not supported on this platform or filesystem"); /// } /// Ok(()) /// } @@ -1962,6 +1999,9 @@ pub fn remove_dir_all>(path: P) -> io::Result<()> { /// /// [changes]: ../io/index.html#platform-specific-behavior /// +/// The order in which this iterator returns entries is platform and filesystem +/// dependent. +/// /// # Errors /// /// This function will return an error in the following situations, but is not @@ -1994,6 +2034,25 @@ pub fn remove_dir_all>(path: P) -> io::Result<()> { /// Ok(()) /// } /// ``` +/// +/// ```rust,no_run +/// use std::{fs, io}; +/// +/// fn main() -> io::Result<()> { +/// let mut entries = fs::read_dir(".")? +/// .map(|res| res.map(|e| e.path())) +/// .collect::, io::Error>>()?; +/// +/// // The order in which `read_dir` returns entries is not guaranteed. If reproducible +/// // ordering is required the entries should be explicitly sorted. +/// +/// entries.sort(); +/// +/// // The entries have now been sorted by their path. +/// +/// Ok(()) +/// } +/// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn read_dir>(path: P) -> io::Result { fs_imp::readdir(path.as_ref()).map(ReadDir) @@ -3087,8 +3146,10 @@ mod tests { #[cfg(windows)] let invalid_options = 87; // ERROR_INVALID_PARAMETER - #[cfg(unix)] + #[cfg(all(unix, not(target_os = "vxworks")))] let invalid_options = "Invalid argument"; + #[cfg(target_os = "vxworks")] + let invalid_options = "invalid argument"; // Test various combinations of creation modes and access modes. // @@ -3416,5 +3477,18 @@ mod tests { check!(a.created()); check!(b.created()); } + + if cfg!(target_os = "linux") { + // Not always available + match (a.created(), b.created()) { + (Ok(t1), Ok(t2)) => assert!(t1 <= t2), + (Err(e1), Err(e2)) if e1.kind() == ErrorKind::Other && + e2.kind() == ErrorKind::Other => {} + (a, b) => panic!( + "creation time must be always supported or not supported: {:?} {:?}", + a, b, + ), + } + } } } diff --git a/src/libstd/future.rs b/src/libstd/future.rs index 80b2c5f488..c65f71fb1a 100644 --- a/src/libstd/future.rs +++ b/src/libstd/future.rs @@ -26,7 +26,7 @@ pub fn from_generator>(x: T) -> impl Future>(T); // We rely on the fact that async/await futures are immovable in order to create diff --git a/src/libstd/io/stdio.rs b/src/libstd/io/stdio.rs index c798ee0e22..6574ef13db 100644 --- a/src/libstd/io/stdio.rs +++ b/src/libstd/io/stdio.rs @@ -762,7 +762,7 @@ pub fn set_print(sink: Option>) -> Option 12 { +/// break; +/// } +/// last = x; +/// } +/// +/// assert_eq!(last, 12); +/// println!("{}", last); +/// ``` +/// +/// A break expression is normally associated with the innermost loop enclosing the +/// `break` but a label can be used to specify which enclosing loop is affected. +/// +///```rust +/// 'outer: for i in 1..=5 { +/// println!("outer iteration (i): {}", i); +/// +/// 'inner: for j in 1..=200 { +/// println!(" inner iteration (j): {}", j); +/// if j >= 3 { +/// // breaks from inner loop, let's outer loop continue. +/// break; +/// } +/// if i >= 2 { +/// // breaks from outer loop, and directly to "Bye". +/// break 'outer; +/// } +/// } +/// } +/// println!("Bye."); +///``` +/// +/// When associated with `loop`, a break expression may be used to return a value from that loop. +/// This is only valid with `loop` and not with any other type of loop. +/// If no value is specified, `break;` returns `()`. +/// Every `break` within a loop must return the same type. +/// +/// ```rust +/// let (mut a, mut b) = (1, 1); +/// let result = loop { +/// if b > 10 { +/// break b; +/// } +/// let c = a + b; +/// a = b; +/// b = c; +/// }; +/// // first number in Fibonacci sequence over 10: +/// assert_eq!(result, 13); +/// println!("{}", result); +/// ``` +/// +/// For more details consult the [Reference on "break expression"] and the [Reference on "break and +/// loop values"]. +/// +/// [Reference on "break expression"]: ../reference/expressions/loop-expr.html#break-expressions +/// [Reference on "break and loop values"]: +/// ../reference/expressions/loop-expr.html#break-and-loop-values /// -/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601 mod break_keyword { } #[doc(keyword = "const")] @@ -96,9 +159,40 @@ mod const_keyword { } // /// Skip to the next iteration of a loop. /// -/// The documentation for this keyword is [not yet complete]. Pull requests welcome! +/// When `continue` is encountered, the current iteration is terminated, returning control to the +/// loop head, typically continuing with the next iteration. /// -/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601 +///```rust +/// // Printing odd numbers by skipping even ones +/// for number in 1..=10 { +/// if number % 2 == 0 { +/// continue; +/// } +/// println!("{}", number); +/// } +///``` +/// +/// Like `break`, `continue` is normally associated with the innermost enclosing loop, but labels +/// may be used to specify the affected loop. +/// +///```rust +/// // Print Odd numbers under 30 with unit <= 5 +/// 'tens: for ten in 0..3 { +/// 'units: for unit in 0..=9 { +/// if unit % 2 == 0 { +/// continue; +/// } +/// if unit > 5 { +/// continue 'tens; +/// } +/// println!("{}", ten * 10 + unit); +/// } +/// } +///``` +/// +/// See [continue expressions] from the reference for more details. +/// +/// [continue expressions]: ../reference/expressions/loop-expr.html#continue-expressions mod continue_keyword { } #[doc(keyword = "crate")] diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 21aeb9c26f..c7adad896a 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -220,7 +220,7 @@ #![cfg_attr(test, feature(print_internals, set_stdio, update_panic_count))] #![cfg_attr(all(target_vendor = "fortanix", target_env = "sgx"), - feature(slice_index_methods, decl_macro, coerce_unsized, + feature(slice_index_methods, coerce_unsized, sgx_platform, ptr_wrapping_offset_from))] #![cfg_attr(all(test, target_vendor = "fortanix", target_env = "sgx"), feature(fixed_size_array, maybe_uninit_extra))] @@ -238,7 +238,6 @@ #![feature(array_error_internals)] #![feature(asm)] #![feature(associated_type_bounds)] -#![cfg_attr(bootstrap, feature(bind_by_move_pattern_guards))] #![feature(box_syntax)] #![feature(c_variadic)] #![feature(cfg_target_has_atomic)] @@ -252,6 +251,7 @@ #![feature(container_error_extra)] #![feature(core_intrinsics)] #![feature(custom_test_frameworks)] +#![feature(decl_macro)] #![feature(doc_alias)] #![feature(doc_cfg)] #![feature(doc_keyword)] @@ -276,13 +276,14 @@ #![feature(link_args)] #![feature(linkage)] #![feature(log_syntax)] +#![feature(manually_drop_take)] +#![feature(matches_macro)] #![feature(maybe_uninit_ref)] #![feature(maybe_uninit_slice)] -#![feature(mem_take)] #![feature(needs_panic_runtime)] #![feature(never_type)] #![feature(nll)] -#![feature(non_exhaustive)] +#![cfg_attr(bootstrap, feature(non_exhaustive))] #![feature(on_unimplemented)] #![feature(optin_builtin_traits)] #![feature(panic_info_message)] @@ -299,6 +300,7 @@ #![feature(slice_concat_ext)] #![feature(slice_internals)] #![feature(slice_patterns)] +#![feature(specialization)] #![feature(staged_api)] #![feature(std_internals)] #![feature(stdsimd)] @@ -306,7 +308,6 @@ #![feature(str_internals)] #![feature(test)] #![feature(thread_local)] -#![feature(todo_macro)] #![feature(toowned_clone_into)] #![feature(trace_macros)] #![feature(try_reserve)] @@ -528,6 +529,7 @@ pub use core::{ writeln, // Unstable todo, + matches, }; // Re-export built-in macros defined through libcore. diff --git a/src/libstd/net/addr.rs b/src/libstd/net/addr.rs index ca86a17505..f9255b82fc 100644 --- a/src/libstd/net/addr.rs +++ b/src/libstd/net/addr.rs @@ -217,11 +217,9 @@ impl SocketAddr { /// ``` /// use std::net::{IpAddr, Ipv4Addr, SocketAddr}; /// - /// fn main() { - /// let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080); - /// assert_eq!(socket.is_ipv4(), true); - /// assert_eq!(socket.is_ipv6(), false); - /// } + /// let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080); + /// assert_eq!(socket.is_ipv4(), true); + /// assert_eq!(socket.is_ipv6(), false); /// ``` #[stable(feature = "sockaddr_checker", since = "1.16.0")] pub fn is_ipv4(&self) -> bool { @@ -244,12 +242,9 @@ impl SocketAddr { /// ``` /// use std::net::{IpAddr, Ipv6Addr, SocketAddr}; /// - /// fn main() { - /// let socket = SocketAddr::new( - /// IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 0, 1)), 8080); - /// assert_eq!(socket.is_ipv4(), false); - /// assert_eq!(socket.is_ipv6(), true); - /// } + /// let socket = SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 0, 1)), 8080); + /// assert_eq!(socket.is_ipv4(), false); + /// assert_eq!(socket.is_ipv6(), true); /// ``` #[stable(feature = "sockaddr_checker", since = "1.16.0")] pub fn is_ipv6(&self) -> bool { diff --git a/src/libstd/net/ip.rs b/src/libstd/net/ip.rs index 6b504056e5..70b68d1348 100644 --- a/src/libstd/net/ip.rs +++ b/src/libstd/net/ip.rs @@ -197,11 +197,8 @@ impl IpAddr { /// /// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// - /// fn main() { - /// assert_eq!(IpAddr::V4(Ipv4Addr::new(80, 9, 12, 3)).is_global(), true); - /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0x1c9, 0, 0, 0xafc8, 0, 0x1)).is_global(), - /// true); - /// } + /// assert_eq!(IpAddr::V4(Ipv4Addr::new(80, 9, 12, 3)).is_global(), true); + /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0x1c9, 0, 0, 0xafc8, 0, 0x1)).is_global(), true); /// ``` pub fn is_global(&self) -> bool { match self { @@ -251,11 +248,11 @@ impl IpAddr { /// /// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// - /// fn main() { - /// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_documentation(), true); - /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)) - /// .is_documentation(), true); - /// } + /// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_documentation(), true); + /// assert_eq!( + /// IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_documentation(), + /// true + /// ); /// ``` pub fn is_documentation(&self) -> bool { match self { @@ -275,11 +272,8 @@ impl IpAddr { /// ``` /// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// - /// fn main() { - /// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_ipv4(), true); - /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_ipv4(), - /// false); - /// } + /// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_ipv4(), true); + /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_ipv4(), false); /// ``` #[stable(feature = "ipaddr_checker", since = "1.16.0")] pub fn is_ipv4(&self) -> bool { @@ -300,11 +294,8 @@ impl IpAddr { /// ``` /// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// - /// fn main() { - /// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_ipv6(), false); - /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_ipv6(), - /// true); - /// } + /// assert_eq!(IpAddr::V4(Ipv4Addr::new(203, 0, 113, 6)).is_ipv6(), false); + /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0)).is_ipv6(), true); /// ``` #[stable(feature = "ipaddr_checker", since = "1.16.0")] pub fn is_ipv6(&self) -> bool { @@ -526,48 +517,46 @@ impl Ipv4Addr { /// /// use std::net::Ipv4Addr; /// - /// fn main() { - /// // private addresses are not global - /// assert_eq!(Ipv4Addr::new(10, 254, 0, 0).is_global(), false); - /// assert_eq!(Ipv4Addr::new(192, 168, 10, 65).is_global(), false); - /// assert_eq!(Ipv4Addr::new(172, 16, 10, 65).is_global(), false); + /// // private addresses are not global + /// assert_eq!(Ipv4Addr::new(10, 254, 0, 0).is_global(), false); + /// assert_eq!(Ipv4Addr::new(192, 168, 10, 65).is_global(), false); + /// assert_eq!(Ipv4Addr::new(172, 16, 10, 65).is_global(), false); /// - /// // the 0.0.0.0/8 block is not global - /// assert_eq!(Ipv4Addr::new(0, 1, 2, 3).is_global(), false); - /// // in particular, the unspecified address is not global - /// assert_eq!(Ipv4Addr::new(0, 0, 0, 0).is_global(), false); + /// // the 0.0.0.0/8 block is not global + /// assert_eq!(Ipv4Addr::new(0, 1, 2, 3).is_global(), false); + /// // in particular, the unspecified address is not global + /// assert_eq!(Ipv4Addr::new(0, 0, 0, 0).is_global(), false); /// - /// // the loopback address is not global - /// assert_eq!(Ipv4Addr::new(127, 0, 0, 1).is_global(), false); + /// // the loopback address is not global + /// assert_eq!(Ipv4Addr::new(127, 0, 0, 1).is_global(), false); /// - /// // link local addresses are not global - /// assert_eq!(Ipv4Addr::new(169, 254, 45, 1).is_global(), false); + /// // link local addresses are not global + /// assert_eq!(Ipv4Addr::new(169, 254, 45, 1).is_global(), false); /// - /// // the broadcast address is not global - /// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_global(), false); + /// // the broadcast address is not global + /// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_global(), false); /// - /// // the broadcast address is not global - /// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).is_global(), false); - /// assert_eq!(Ipv4Addr::new(198, 51, 100, 65).is_global(), false); - /// assert_eq!(Ipv4Addr::new(203, 0, 113, 6).is_global(), false); + /// // the broadcast address is not global + /// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).is_global(), false); + /// assert_eq!(Ipv4Addr::new(198, 51, 100, 65).is_global(), false); + /// assert_eq!(Ipv4Addr::new(203, 0, 113, 6).is_global(), false); /// - /// // shared addresses are not global - /// assert_eq!(Ipv4Addr::new(100, 100, 0, 0).is_global(), false); + /// // shared addresses are not global + /// assert_eq!(Ipv4Addr::new(100, 100, 0, 0).is_global(), false); /// - /// // addresses reserved for protocol assignment are not global - /// assert_eq!(Ipv4Addr::new(192, 0, 0, 0).is_global(), false); - /// assert_eq!(Ipv4Addr::new(192, 0, 0, 255).is_global(), false); + /// // addresses reserved for protocol assignment are not global + /// assert_eq!(Ipv4Addr::new(192, 0, 0, 0).is_global(), false); + /// assert_eq!(Ipv4Addr::new(192, 0, 0, 255).is_global(), false); /// - /// // addresses reserved for future use are not global - /// assert_eq!(Ipv4Addr::new(250, 10, 20, 30).is_global(), false); + /// // addresses reserved for future use are not global + /// assert_eq!(Ipv4Addr::new(250, 10, 20, 30).is_global(), false); /// - /// // addresses reserved for network devices benchmarking are not global - /// assert_eq!(Ipv4Addr::new(198, 18, 0, 0).is_global(), false); + /// // addresses reserved for network devices benchmarking are not global + /// assert_eq!(Ipv4Addr::new(198, 18, 0, 0).is_global(), false); /// - /// // All the other addresses are global - /// assert_eq!(Ipv4Addr::new(1, 1, 1, 1).is_global(), true); - /// assert_eq!(Ipv4Addr::new(80, 9, 12, 3).is_global(), true); - /// } + /// // All the other addresses are global + /// assert_eq!(Ipv4Addr::new(1, 1, 1, 1).is_global(), true); + /// assert_eq!(Ipv4Addr::new(80, 9, 12, 3).is_global(), true); /// ``` pub fn is_global(&self) -> bool { // check if this address is 192.0.0.9 or 192.0.0.10. These addresses are the only two @@ -600,11 +589,9 @@ impl Ipv4Addr { /// #![feature(ip)] /// use std::net::Ipv4Addr; /// - /// fn main() { - /// assert_eq!(Ipv4Addr::new(100, 64, 0, 0).is_shared(), true); - /// assert_eq!(Ipv4Addr::new(100, 127, 255, 255).is_shared(), true); - /// assert_eq!(Ipv4Addr::new(100, 128, 0, 0).is_shared(), false); - /// } + /// assert_eq!(Ipv4Addr::new(100, 64, 0, 0).is_shared(), true); + /// assert_eq!(Ipv4Addr::new(100, 127, 255, 255).is_shared(), true); + /// assert_eq!(Ipv4Addr::new(100, 128, 0, 0).is_shared(), false); /// ``` pub fn is_shared(&self) -> bool { self.octets()[0] == 100 && (self.octets()[1] & 0b1100_0000 == 0b0100_0000) @@ -631,14 +618,12 @@ impl Ipv4Addr { /// #![feature(ip)] /// use std::net::Ipv4Addr; /// - /// fn main() { - /// assert_eq!(Ipv4Addr::new(192, 0, 0, 0).is_ietf_protocol_assignment(), true); - /// assert_eq!(Ipv4Addr::new(192, 0, 0, 8).is_ietf_protocol_assignment(), true); - /// assert_eq!(Ipv4Addr::new(192, 0, 0, 9).is_ietf_protocol_assignment(), true); - /// assert_eq!(Ipv4Addr::new(192, 0, 0, 255).is_ietf_protocol_assignment(), true); - /// assert_eq!(Ipv4Addr::new(192, 0, 1, 0).is_ietf_protocol_assignment(), false); - /// assert_eq!(Ipv4Addr::new(191, 255, 255, 255).is_ietf_protocol_assignment(), false); - /// } + /// assert_eq!(Ipv4Addr::new(192, 0, 0, 0).is_ietf_protocol_assignment(), true); + /// assert_eq!(Ipv4Addr::new(192, 0, 0, 8).is_ietf_protocol_assignment(), true); + /// assert_eq!(Ipv4Addr::new(192, 0, 0, 9).is_ietf_protocol_assignment(), true); + /// assert_eq!(Ipv4Addr::new(192, 0, 0, 255).is_ietf_protocol_assignment(), true); + /// assert_eq!(Ipv4Addr::new(192, 0, 1, 0).is_ietf_protocol_assignment(), false); + /// assert_eq!(Ipv4Addr::new(191, 255, 255, 255).is_ietf_protocol_assignment(), false); /// ``` pub fn is_ietf_protocol_assignment(&self) -> bool { self.octets()[0] == 192 && self.octets()[1] == 0 && self.octets()[2] == 0 @@ -658,12 +643,10 @@ impl Ipv4Addr { /// #![feature(ip)] /// use std::net::Ipv4Addr; /// - /// fn main() { - /// assert_eq!(Ipv4Addr::new(198, 17, 255, 255).is_benchmarking(), false); - /// assert_eq!(Ipv4Addr::new(198, 18, 0, 0).is_benchmarking(), true); - /// assert_eq!(Ipv4Addr::new(198, 19, 255, 255).is_benchmarking(), true); - /// assert_eq!(Ipv4Addr::new(198, 20, 0, 0).is_benchmarking(), false); - /// } + /// assert_eq!(Ipv4Addr::new(198, 17, 255, 255).is_benchmarking(), false); + /// assert_eq!(Ipv4Addr::new(198, 18, 0, 0).is_benchmarking(), true); + /// assert_eq!(Ipv4Addr::new(198, 19, 255, 255).is_benchmarking(), true); + /// assert_eq!(Ipv4Addr::new(198, 20, 0, 0).is_benchmarking(), false); /// ``` pub fn is_benchmarking(&self) -> bool { self.octets()[0] == 198 && (self.octets()[1] & 0xfe) == 18 @@ -690,15 +673,12 @@ impl Ipv4Addr { /// #![feature(ip)] /// use std::net::Ipv4Addr; /// - /// fn main() { - /// assert_eq!(Ipv4Addr::new(240, 0, 0, 0).is_reserved(), true); - /// assert_eq!(Ipv4Addr::new(255, 255, 255, 254).is_reserved(), true); + /// assert_eq!(Ipv4Addr::new(240, 0, 0, 0).is_reserved(), true); + /// assert_eq!(Ipv4Addr::new(255, 255, 255, 254).is_reserved(), true); /// - /// assert_eq!(Ipv4Addr::new(239, 255, 255, 255).is_reserved(), false); - /// // The broadcast address is not considered as reserved for future use by this - /// // implementation - /// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_reserved(), false); - /// } + /// assert_eq!(Ipv4Addr::new(239, 255, 255, 255).is_reserved(), false); + /// // The broadcast address is not considered as reserved for future use by this implementation + /// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_reserved(), false); /// ``` pub fn is_reserved(&self) -> bool { self.octets()[0] & 240 == 240 && !self.is_broadcast() @@ -788,8 +768,10 @@ impl Ipv4Addr { /// ``` /// use std::net::{Ipv4Addr, Ipv6Addr}; /// - /// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).to_ipv6_compatible(), - /// Ipv6Addr::new(0, 0, 0, 0, 0, 0, 49152, 767)); + /// assert_eq!( + /// Ipv4Addr::new(192, 0, 2, 255).to_ipv6_compatible(), + /// Ipv6Addr::new(0, 0, 0, 0, 0, 0, 49152, 767) + /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn to_ipv6_compatible(&self) -> Ipv6Addr { @@ -1161,11 +1143,9 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_global(), true); - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0x1).is_global(), false); - /// assert_eq!(Ipv6Addr::new(0, 0, 0x1c9, 0, 0, 0xafc8, 0, 0x1).is_global(), true); - /// } + /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_global(), true); + /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0x1).is_global(), false); + /// assert_eq!(Ipv6Addr::new(0, 0, 0x1c9, 0, 0, 0xafc8, 0, 0x1).is_global(), true); /// ``` pub fn is_global(&self) -> bool { match self.multicast_scope() { @@ -1189,11 +1169,8 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unique_local(), - /// false); - /// assert_eq!(Ipv6Addr::new(0xfc02, 0, 0, 0, 0, 0, 0, 0).is_unique_local(), true); - /// } + /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unique_local(), false); + /// assert_eq!(Ipv6Addr::new(0xfc02, 0, 0, 0, 0, 0, 0, 0).is_unique_local(), true); /// ``` pub fn is_unique_local(&self) -> bool { (self.segments()[0] & 0xfe00) == 0xfc00 @@ -1223,21 +1200,19 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0, 0, 0, 0); - /// assert!(ip.is_unicast_link_local_strict()); + /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0, 0, 0, 0); + /// assert!(ip.is_unicast_link_local_strict()); /// - /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0xffff, 0xffff, 0xffff, 0xffff); - /// assert!(ip.is_unicast_link_local_strict()); + /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0xffff, 0xffff, 0xffff, 0xffff); + /// assert!(ip.is_unicast_link_local_strict()); /// - /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 1, 0, 0, 0, 0); - /// assert!(!ip.is_unicast_link_local_strict()); - /// assert!(ip.is_unicast_link_local()); + /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 1, 0, 0, 0, 0); + /// assert!(!ip.is_unicast_link_local_strict()); + /// assert!(ip.is_unicast_link_local()); /// - /// let ip = Ipv6Addr::new(0xfe81, 0, 0, 0, 0, 0, 0, 0); - /// assert!(!ip.is_unicast_link_local_strict()); - /// assert!(ip.is_unicast_link_local()); - /// } + /// let ip = Ipv6Addr::new(0xfe81, 0, 0, 0, 0, 0, 0, 0); + /// assert!(!ip.is_unicast_link_local_strict()); + /// assert!(ip.is_unicast_link_local()); /// ``` /// /// # See also @@ -1284,21 +1259,19 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0, 0, 0, 0); - /// assert!(ip.is_unicast_link_local()); + /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0, 0, 0, 0); + /// assert!(ip.is_unicast_link_local()); /// - /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0xffff, 0xffff, 0xffff, 0xffff); - /// assert!(ip.is_unicast_link_local()); + /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 0, 0xffff, 0xffff, 0xffff, 0xffff); + /// assert!(ip.is_unicast_link_local()); /// - /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 1, 0, 0, 0, 0); - /// assert!(ip.is_unicast_link_local()); - /// assert!(!ip.is_unicast_link_local_strict()); + /// let ip = Ipv6Addr::new(0xfe80, 0, 0, 1, 0, 0, 0, 0); + /// assert!(ip.is_unicast_link_local()); + /// assert!(!ip.is_unicast_link_local_strict()); /// - /// let ip = Ipv6Addr::new(0xfe81, 0, 0, 0, 0, 0, 0, 0); - /// assert!(ip.is_unicast_link_local()); - /// assert!(!ip.is_unicast_link_local_strict()); - /// } + /// let ip = Ipv6Addr::new(0xfe81, 0, 0, 0, 0, 0, 0, 0); + /// assert!(ip.is_unicast_link_local()); + /// assert!(!ip.is_unicast_link_local_strict()); /// ``` /// /// # See also @@ -1336,11 +1309,11 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unicast_site_local(), - /// false); - /// assert_eq!(Ipv6Addr::new(0xfec2, 0, 0, 0, 0, 0, 0, 0).is_unicast_site_local(), true); - /// } + /// assert_eq!( + /// Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unicast_site_local(), + /// false + /// ); + /// assert_eq!(Ipv6Addr::new(0xfec2, 0, 0, 0, 0, 0, 0, 0).is_unicast_site_local(), true); /// ``` /// /// # Warning @@ -1369,11 +1342,8 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_documentation(), - /// false); - /// assert_eq!(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0).is_documentation(), true); - /// } + /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_documentation(), false); + /// assert_eq!(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0).is_documentation(), true); /// ``` pub fn is_documentation(&self) -> bool { (self.segments()[0] == 0x2001) && (self.segments()[1] == 0xdb8) @@ -1407,11 +1377,8 @@ impl Ipv6Addr { /// /// use std::net::Ipv6Addr; /// - /// fn main() { - /// assert_eq!(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0).is_unicast_global(), false); - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unicast_global(), - /// true); - /// } + /// assert_eq!(Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 0).is_unicast_global(), false); + /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).is_unicast_global(), true); /// ``` pub fn is_unicast_global(&self) -> bool { !self.is_multicast() @@ -1431,11 +1398,11 @@ impl Ipv6Addr { /// /// use std::net::{Ipv6Addr, Ipv6MulticastScope}; /// - /// fn main() { - /// assert_eq!(Ipv6Addr::new(0xff0e, 0, 0, 0, 0, 0, 0, 0).multicast_scope(), - /// Some(Ipv6MulticastScope::Global)); - /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).multicast_scope(), None); - /// } + /// assert_eq!( + /// Ipv6Addr::new(0xff0e, 0, 0, 0, 0, 0, 0, 0).multicast_scope(), + /// Some(Ipv6MulticastScope::Global) + /// ); + /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x2ff).multicast_scope(), None); /// ``` pub fn multicast_scope(&self) -> Option { if self.is_multicast() { diff --git a/src/libstd/net/udp.rs b/src/libstd/net/udp.rs index a5e7cd992f..a9e4457f42 100644 --- a/src/libstd/net/udp.rs +++ b/src/libstd/net/udp.rs @@ -185,7 +185,6 @@ impl UdpSocket { /// # Examples /// /// ```no_run - /// #![feature(udp_peer_addr)] /// use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4, UdpSocket}; /// /// let socket = UdpSocket::bind("127.0.0.1:34254").expect("couldn't bind to address"); @@ -199,14 +198,13 @@ impl UdpSocket { /// [`NotConnected`]: ../../std/io/enum.ErrorKind.html#variant.NotConnected /// /// ```no_run - /// #![feature(udp_peer_addr)] /// use std::net::UdpSocket; /// /// let socket = UdpSocket::bind("127.0.0.1:34254").expect("couldn't bind to address"); /// assert_eq!(socket.peer_addr().unwrap_err().kind(), - /// ::std::io::ErrorKind::NotConnected); + /// std::io::ErrorKind::NotConnected); /// ``` - #[unstable(feature = "udp_peer_addr", issue = "59127")] + #[stable(feature = "udp_peer_addr", since = "1.40.0")] pub fn peer_addr(&self) -> io::Result { self.0.peer_addr() } diff --git a/src/libstd/os/hermit/fs.rs b/src/libstd/os/hermit/fs.rs deleted file mode 100644 index eb28a839ba..0000000000 --- a/src/libstd/os/hermit/fs.rs +++ /dev/null @@ -1,377 +0,0 @@ -#![stable(feature = "metadata_ext", since = "1.1.0")] - -use crate::fs::Metadata; -use crate::sys_common::AsInner; - -#[allow(deprecated)] -use crate::os::hermit::raw; - -/// OS-specific extensions to [`fs::Metadata`]. -/// -/// [`fs::Metadata`]: ../../../../std/fs/struct.Metadata.html -#[stable(feature = "metadata_ext", since = "1.1.0")] -pub trait MetadataExt { - /// Gain a reference to the underlying `stat` structure which contains - /// the raw information returned by the OS. - /// - /// The contents of the returned [`stat`] are **not** consistent across - /// Unix platforms. The `os::unix::fs::MetadataExt` trait contains the - /// cross-Unix abstractions contained within the raw stat. - /// - /// [`stat`]: ../../../../std/os/linux/raw/struct.stat.html - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let stat = meta.as_raw_stat(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - #[rustc_deprecated(since = "1.8.0", - reason = "deprecated in favor of the accessor \ - methods of this trait")] - #[allow(deprecated)] - fn as_raw_stat(&self) -> &raw::stat; - - /// Returns the device ID on which this file resides. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_dev()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_dev(&self) -> u64; - /// Returns the inode number. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_ino()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_ino(&self) -> u64; - /// Returns the file type and mode. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_mode()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_mode(&self) -> u32; - /// Returns the number of hard links to file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_nlink()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_nlink(&self) -> u64; - /// Returns the user ID of the file owner. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_uid()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_uid(&self) -> u32; - /// Returns the group ID of the file owner. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_gid()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_gid(&self) -> u32; - /// Returns the device ID that this file represents. Only relevant for special file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_rdev()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_rdev(&self) -> u64; - /// Returns the size of the file (if it is a regular file or a symbolic link) in bytes. - /// - /// The size of a symbolic link is the length of the pathname it contains, - /// without a terminating null byte. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_size()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_size(&self) -> u64; - /// Returns the last access time. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_atime()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_atime(&self) -> i64; - /// Returns the last access time, nano seconds part. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_atime_nsec()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_atime_nsec(&self) -> i64; - /// Returns the last modification time. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_mtime()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_mtime(&self) -> i64; - /// Returns the last modification time, nano seconds part. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_mtime_nsec()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_mtime_nsec(&self) -> i64; - /// Returns the last status change time. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_ctime()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_ctime(&self) -> i64; - /// Returns the last status change time, nano seconds part. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_ctime_nsec()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_ctime_nsec(&self) -> i64; - /// Returns the "preferred" blocksize for efficient filesystem I/O. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_blksize()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_blksize(&self) -> u64; - /// Returns the number of blocks allocated to the file, 512-byte units. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::io; - /// use std::os::linux::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// println!("{}", meta.st_blocks()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext2", since = "1.8.0")] - fn st_blocks(&self) -> u64; -} - -#[stable(feature = "metadata_ext", since = "1.1.0")] -impl MetadataExt for Metadata { - #[allow(deprecated)] - fn as_raw_stat(&self) -> &raw::stat { - unsafe { - &*(self.as_inner().as_inner() as *const libc::stat64 - as *const raw::stat) - } - } - fn st_dev(&self) -> u64 { - self.as_inner().as_inner().st_dev as u64 - } - fn st_ino(&self) -> u64 { - self.as_inner().as_inner().st_ino as u64 - } - fn st_mode(&self) -> u32 { - self.as_inner().as_inner().st_mode as u32 - } - fn st_nlink(&self) -> u64 { - self.as_inner().as_inner().st_nlink as u64 - } - fn st_uid(&self) -> u32 { - self.as_inner().as_inner().st_uid as u32 - } - fn st_gid(&self) -> u32 { - self.as_inner().as_inner().st_gid as u32 - } - fn st_rdev(&self) -> u64 { - self.as_inner().as_inner().st_rdev as u64 - } - fn st_size(&self) -> u64 { - self.as_inner().as_inner().st_size as u64 - } - fn st_atime(&self) -> i64 { - self.as_inner().as_inner().st_atime as i64 - } - fn st_atime_nsec(&self) -> i64 { - self.as_inner().as_inner().st_atime_nsec as i64 - } - fn st_mtime(&self) -> i64 { - self.as_inner().as_inner().st_mtime as i64 - } - fn st_mtime_nsec(&self) -> i64 { - self.as_inner().as_inner().st_mtime_nsec as i64 - } - fn st_ctime(&self) -> i64 { - self.as_inner().as_inner().st_ctime as i64 - } - fn st_ctime_nsec(&self) -> i64 { - self.as_inner().as_inner().st_ctime_nsec as i64 - } - fn st_blksize(&self) -> u64 { - self.as_inner().as_inner().st_blksize as u64 - } - fn st_blocks(&self) -> u64 { - self.as_inner().as_inner().st_blocks as u64 - } -} diff --git a/src/libstd/os/hermit/mod.rs b/src/libstd/os/hermit/mod.rs deleted file mode 100644 index 4dee2a6d43..0000000000 --- a/src/libstd/os/hermit/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -//! HermitCore-specific definitions - -#![stable(feature = "raw_ext", since = "1.1.0")] - -pub mod raw; -pub mod fs; diff --git a/src/libstd/os/hermit/raw.rs b/src/libstd/os/hermit/raw.rs deleted file mode 100644 index 0e232a808a..0000000000 --- a/src/libstd/os/hermit/raw.rs +++ /dev/null @@ -1,17 +0,0 @@ -//! HermitCore-specific raw type definitions - -#![stable(feature = "raw_ext", since = "1.1.0")] -#![rustc_deprecated(since = "1.8.0", - reason = "these type aliases are no longer supported by \ - the standard library, the `libc` crate on \ - crates.io should be used instead for the correct \ - definitions")] -#![allow(deprecated)] -#![allow(missing_debug_implementations)] - -#[stable(feature = "pthread_t", since = "1.8.0")] -pub use libc::pthread_t; - -#[doc(inline)] -#[stable(feature = "raw_ext", since = "1.1.0")] -pub use libc::{dev_t, mode_t, off_t, ino_t, nlink_t, blksize_t, blkcnt_t, stat, time_t}; diff --git a/src/libstd/os/mod.rs b/src/libstd/os/mod.rs index fcd81f0a1b..d44c8ca544 100644 --- a/src/libstd/os/mod.rs +++ b/src/libstd/os/mod.rs @@ -49,7 +49,6 @@ cfg_if::cfg_if! { #[cfg(target_os = "solaris")] pub mod solaris; #[cfg(target_os = "emscripten")] pub mod emscripten; #[cfg(target_os = "fuchsia")] pub mod fuchsia; -#[cfg(target_os = "hermit")] pub mod hermit; #[cfg(target_os = "redox")] pub mod redox; #[cfg(target_os = "wasi")] pub mod wasi; #[cfg(target_os = "vxworks")] pub mod vxworks; diff --git a/src/libstd/panic.rs b/src/libstd/panic.rs index 1d4fd98dd7..577673b7e4 100644 --- a/src/libstd/panic.rs +++ b/src/libstd/panic.rs @@ -12,7 +12,9 @@ use crate::ops::{Deref, DerefMut}; use crate::panicking; use crate::ptr::{Unique, NonNull}; use crate::rc::Rc; -use crate::sync::{Arc, Mutex, RwLock, atomic}; +use crate::sync::{Arc, Mutex, RwLock}; +#[cfg(not(bootstrap))] +use crate::sync::atomic; use crate::task::{Context, Poll}; use crate::thread::Result; @@ -240,49 +242,49 @@ impl RefUnwindSafe for Mutex {} #[stable(feature = "unwind_safe_lock_refs", since = "1.12.0")] impl RefUnwindSafe for RwLock {} -#[cfg(target_has_atomic = "ptr")] +#[cfg(target_has_atomic_load_store = "ptr")] #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] impl RefUnwindSafe for atomic::AtomicIsize {} -#[cfg(target_has_atomic = "8")] +#[cfg(target_has_atomic_load_store = "8")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicI8 {} -#[cfg(target_has_atomic = "16")] +#[cfg(target_has_atomic_load_store = "16")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicI16 {} -#[cfg(target_has_atomic = "32")] +#[cfg(target_has_atomic_load_store = "32")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicI32 {} -#[cfg(target_has_atomic = "64")] +#[cfg(target_has_atomic_load_store = "64")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicI64 {} -#[cfg(target_has_atomic = "128")] +#[cfg(target_has_atomic_load_store = "128")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicI128 {} -#[cfg(target_has_atomic = "ptr")] +#[cfg(target_has_atomic_load_store = "ptr")] #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] impl RefUnwindSafe for atomic::AtomicUsize {} -#[cfg(target_has_atomic = "8")] +#[cfg(target_has_atomic_load_store = "8")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicU8 {} -#[cfg(target_has_atomic = "16")] +#[cfg(target_has_atomic_load_store = "16")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicU16 {} -#[cfg(target_has_atomic = "32")] +#[cfg(target_has_atomic_load_store = "32")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicU32 {} -#[cfg(target_has_atomic = "64")] +#[cfg(target_has_atomic_load_store = "64")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicU64 {} -#[cfg(target_has_atomic = "128")] +#[cfg(target_has_atomic_load_store = "128")] #[unstable(feature = "integer_atomics", issue = "32976")] impl RefUnwindSafe for atomic::AtomicU128 {} -#[cfg(target_has_atomic = "8")] +#[cfg(target_has_atomic_load_store = "8")] #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] impl RefUnwindSafe for atomic::AtomicBool {} -#[cfg(target_has_atomic = "ptr")] +#[cfg(target_has_atomic_load_store = "ptr")] #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] impl RefUnwindSafe for atomic::AtomicPtr {} diff --git a/src/libstd/panicking.rs b/src/libstd/panicking.rs index 28fb402440..f76969146f 100644 --- a/src/libstd/panicking.rs +++ b/src/libstd/panicking.rs @@ -12,12 +12,13 @@ use core::panic::{BoxMeUp, PanicInfo, Location}; use crate::any::Any; use crate::fmt; use crate::intrinsics; -use crate::mem; -use crate::ptr; +use crate::mem::{self, ManuallyDrop}; use crate::raw; +use crate::sync::atomic::{AtomicBool, Ordering}; use crate::sys::stdio::panic_output; use crate::sys_common::rwlock::RWLock; -use crate::sys_common::{thread_info, util, backtrace}; +use crate::sys_common::{thread_info, util}; +use crate::sys_common::backtrace::{self, RustBacktrace}; use crate::thread; #[cfg(not(test))] @@ -158,16 +159,10 @@ pub fn take_hook() -> Box) + 'static + Sync + Send> { fn default_hook(info: &PanicInfo<'_>) { // If this is a double panic, make sure that we print a backtrace // for this panic. Otherwise only print it if logging is enabled. - let log_backtrace = if cfg!(feature = "backtrace") { - let panics = update_panic_count(0); - - if panics >= 2 { - Some(backtrace_rs::PrintFmt::Full) - } else { - backtrace::log_enabled() - } + let backtrace_env = if update_panic_count(0) >= 2 { + RustBacktrace::Print(backtrace_rs::PrintFmt::Full) } else { - None + backtrace::rust_backtrace_env() }; // The current implementation always returns `Some`. @@ -187,16 +182,16 @@ fn default_hook(info: &PanicInfo<'_>) { let _ = writeln!(err, "thread '{}' panicked at '{}', {}", name, msg, location); - if cfg!(feature = "backtrace") { - use crate::sync::atomic::{AtomicBool, Ordering}; + static FIRST_PANIC: AtomicBool = AtomicBool::new(true); - static FIRST_PANIC: AtomicBool = AtomicBool::new(true); - - if let Some(format) = log_backtrace { - let _ = backtrace::print(err, format); - } else if FIRST_PANIC.compare_and_swap(true, false, Ordering::SeqCst) { - let _ = writeln!(err, "note: run with `RUST_BACKTRACE=1` \ - environment variable to display a backtrace."); + match backtrace_env { + RustBacktrace::Print(format) => drop(backtrace::print(err, format)), + RustBacktrace::Disabled => {} + RustBacktrace::RuntimeDisabled => { + if FIRST_PANIC.swap(false, Ordering::SeqCst) { + let _ = writeln!(err, "note: run with `RUST_BACKTRACE=1` \ + environment variable to display a backtrace."); + } } } }; @@ -222,7 +217,7 @@ pub fn update_panic_count(amt: isize) -> usize { PANIC_COUNT.with(|c| { let next = (c.get() as isize + amt) as usize; c.set(next); - return next + next }) } @@ -231,10 +226,9 @@ pub use realstd::rt::update_panic_count; /// Invoke a closure, capturing the cause of an unwinding panic if one occurs. pub unsafe fn r#try R>(f: F) -> Result> { - #[allow(unions_with_drop_fields)] union Data { - f: F, - r: R, + f: ManuallyDrop, + r: ManuallyDrop, } // We do some sketchy operations with ownership here for the sake of @@ -265,7 +259,7 @@ pub unsafe fn r#try R>(f: F) -> Result> let mut any_data = 0; let mut any_vtable = 0; let mut data = Data { - f, + f: ManuallyDrop::new(f) }; let r = __rust_maybe_catch_panic(do_call::, @@ -275,7 +269,7 @@ pub unsafe fn r#try R>(f: F) -> Result> return if r == 0 { debug_assert!(update_panic_count(0) == 0); - Ok(data.r) + Ok(ManuallyDrop::into_inner(data.r)) } else { update_panic_count(-1); debug_assert!(update_panic_count(0) == 0); @@ -288,8 +282,9 @@ pub unsafe fn r#try R>(f: F) -> Result> fn do_call R, R>(data: *mut u8) { unsafe { let data = data as *mut Data; - let f = ptr::read(&mut (*data).f); - ptr::write(&mut (*data).r, f()); + let data = &mut (*data); + let f = ManuallyDrop::take(&mut data.f); + data.r = ManuallyDrop::new(f()); } } } @@ -328,10 +323,8 @@ pub fn begin_panic_fmt(msg: &fmt::Arguments<'_>, } let (file, line, col) = *file_line_col; - let info = PanicInfo::internal_constructor( - Some(msg), - Location::internal_constructor(file, line, col), - ); + let location = Location::internal_constructor(file, line, col); + let info = PanicInfo::internal_constructor(Some(msg), &location); continue_panic_fmt(&info) } @@ -458,10 +451,8 @@ fn rust_panic_with_hook(payload: &mut dyn BoxMeUp, } unsafe { - let mut info = PanicInfo::internal_constructor( - message, - Location::internal_constructor(file, line, col), - ); + let location = Location::internal_constructor(file, line, col); + let mut info = PanicInfo::internal_constructor(message, &location); HOOK_LOCK.read(); match HOOK { // Some platforms know that printing to stderr won't ever actually diff --git a/src/libstd/path.rs b/src/libstd/path.rs index fd6ff1032b..6d6bc76064 100644 --- a/src/libstd/path.rs +++ b/src/libstd/path.rs @@ -1363,20 +1363,24 @@ impl PathBuf { } fn _set_extension(&mut self, extension: &OsStr) -> bool { - if self.file_name().is_none() { - return false; - } - - let mut stem = match self.file_stem() { - Some(stem) => stem.to_os_string(), - None => OsString::new(), + let file_stem = match self.file_stem() { + None => return false, + Some(f) => os_str_as_u8_slice(f), }; - if !os_str_as_u8_slice(extension).is_empty() { - stem.push("."); - stem.push(extension); + // truncate until right after the file stem + let end_file_stem = file_stem[file_stem.len()..].as_ptr() as usize; + let start = os_str_as_u8_slice(&self.inner).as_ptr() as usize; + let v = self.as_mut_vec(); + v.truncate(end_file_stem.wrapping_sub(start)); + + // add the new extension, if any + let new = os_str_as_u8_slice(extension); + if !new.is_empty() { + v.reserve_exact(new.len() + 1); + v.push(b'.'); + v.extend_from_slice(new); } - self.set_file_name(&stem); true } @@ -1627,7 +1631,7 @@ impl<'a> From> for PathBuf { #[stable(feature = "shared_from_slice2", since = "1.24.0")] impl From for Arc { - /// Converts a Path into a Rc by copying the Path data into a new Rc buffer. + /// Converts a `PathBuf` into an `Arc` by moving the `PathBuf` data into a new `Arc` buffer. #[inline] fn from(s: PathBuf) -> Arc { let arc: Arc = Arc::from(s.into_os_string()); @@ -1637,7 +1641,7 @@ impl From for Arc { #[stable(feature = "shared_from_slice2", since = "1.24.0")] impl From<&Path> for Arc { - /// Converts a Path into a Rc by copying the Path data into a new Rc buffer. + /// Converts a `Path` into an `Arc` by copying the `Path` data into a new `Arc` buffer. #[inline] fn from(s: &Path) -> Arc { let arc: Arc = Arc::from(s.as_os_str()); @@ -1647,7 +1651,7 @@ impl From<&Path> for Arc { #[stable(feature = "shared_from_slice2", since = "1.24.0")] impl From for Rc { - /// Converts a Path into a Rc by copying the Path data into a new Rc buffer. + /// Converts a `PathBuf` into an `Rc` by moving the `PathBuf` data into a new `Rc` buffer. #[inline] fn from(s: PathBuf) -> Rc { let rc: Rc = Rc::from(s.into_os_string()); @@ -1657,7 +1661,7 @@ impl From for Rc { #[stable(feature = "shared_from_slice2", since = "1.24.0")] impl From<&Path> for Rc { - /// Converts a Path into a Rc by copying the Path data into a new Rc buffer. + /// Converts a `Path` into an `Rc` by copying the `Path` data into a new `Rc` buffer. #[inline] fn from(s: &Path) -> Rc { let rc: Rc = Rc::from(s.as_os_str()); @@ -2219,6 +2223,7 @@ impl Path { /// assert_eq!(Path::new("/etc").join("passwd"), PathBuf::from("/etc/passwd")); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] pub fn join>(&self, path: P) -> PathBuf { self._join(path.as_ref()) } diff --git a/src/libstd/primitive_docs.rs b/src/libstd/primitive_docs.rs index 45816ffd22..a72951c034 100644 --- a/src/libstd/primitive_docs.rs +++ b/src/libstd/primitive_docs.rs @@ -426,14 +426,12 @@ mod prim_unit { } /// /// use std::mem; /// -/// fn main() { -/// unsafe { -/// let my_num: *mut i32 = libc::malloc(mem::size_of::()) as *mut i32; -/// if my_num.is_null() { -/// panic!("failed to allocate memory"); -/// } -/// libc::free(my_num as *mut libc::c_void); +/// unsafe { +/// let my_num: *mut i32 = libc::malloc(mem::size_of::()) as *mut i32; +/// if my_num.is_null() { +/// panic!("failed to allocate memory"); /// } +/// libc::free(my_num as *mut libc::c_void); /// } /// ``` /// @@ -566,7 +564,9 @@ mod prim_array { } #[doc(alias = "[")] #[doc(alias = "]")] #[doc(alias = "[]")] -/// A dynamically-sized view into a contiguous sequence, `[T]`. +/// A dynamically-sized view into a contiguous sequence, `[T]`. Contiguous here +/// means that elements are laid out so that every element is the same +/// distance from its neighbors. /// /// *[See also the `std::slice` module](slice/index.html).* /// diff --git a/src/libstd/process.rs b/src/libstd/process.rs index da136ca6bf..4b0cf8312f 100644 --- a/src/libstd/process.rs +++ b/src/libstd/process.rs @@ -1488,12 +1488,12 @@ impl Child { /// } /// /// fn main() { -/// ::std::process::exit(match run_app() { -/// Ok(_) => 0, -/// Err(err) => { -/// eprintln!("error: {:?}", err); -/// 1 -/// } +/// std::process::exit(match run_app() { +/// Ok(_) => 0, +/// Err(err) => { +/// eprintln!("error: {:?}", err); +/// 1 +/// } /// }); /// } /// ``` diff --git a/src/libstd/rt.rs b/src/libstd/rt.rs index cf45eb0dab..63e35d5ed9 100644 --- a/src/libstd/rt.rs +++ b/src/libstd/rt.rs @@ -44,12 +44,9 @@ fn lang_start_internal(main: &(dyn Fn() -> i32 + Sync + crate::panic::RefUnwindS sys::args::init(argc, argv); // Let's run some code! - #[cfg(feature = "backtrace")] let exit_code = panic::catch_unwind(|| { sys_common::backtrace::__rust_begin_short_backtrace(move || main()) }); - #[cfg(not(feature = "backtrace"))] - let exit_code = panic::catch_unwind(move || main()); sys_common::cleanup(); exit_code.unwrap_or(101) as isize diff --git a/src/libstd/sync/mpsc/mod.rs b/src/libstd/sync/mpsc/mod.rs index 69ecd20106..c2884a28f3 100644 --- a/src/libstd/sync/mpsc/mod.rs +++ b/src/libstd/sync/mpsc/mod.rs @@ -1581,10 +1581,6 @@ impl error::Error for SendError { fn description(&self) -> &str { "sending on a closed channel" } - - fn cause(&self) -> Option<&dyn error::Error> { - None - } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1624,10 +1620,6 @@ impl error::Error for TrySendError { } } } - - fn cause(&self) -> Option<&dyn error::Error> { - None - } } #[stable(feature = "mpsc_error_conversions", since = "1.24.0")] @@ -1652,10 +1644,6 @@ impl error::Error for RecvError { fn description(&self) -> &str { "receiving on a closed channel" } - - fn cause(&self) -> Option<&dyn error::Error> { - None - } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1685,10 +1673,6 @@ impl error::Error for TryRecvError { } } } - - fn cause(&self) -> Option<&dyn error::Error> { - None - } } #[stable(feature = "mpsc_error_conversions", since = "1.24.0")] @@ -1726,10 +1710,6 @@ impl error::Error for RecvTimeoutError { } } } - - fn cause(&self) -> Option<&dyn error::Error> { - None - } } #[stable(feature = "mpsc_error_conversions", since = "1.24.0")] diff --git a/src/libstd/sync/once.rs b/src/libstd/sync/once.rs index e529b8c422..e28fbca7fa 100644 --- a/src/libstd/sync/once.rs +++ b/src/libstd/sync/once.rs @@ -60,10 +60,9 @@ use crate::thread::{self, Thread}; /// A synchronization primitive which can be used to run a one-time global /// initialization. Useful for one-time initialization for FFI or related -/// functionality. This type can only be constructed with the [`ONCE_INIT`] -/// value or the equivalent [`Once::new`] constructor. +/// functionality. This type can only be constructed with the [`Once::new`] +/// constructor. /// -/// [`ONCE_INIT`]: constant.ONCE_INIT.html /// [`Once::new`]: struct.Once.html#method.new /// /// # Examples diff --git a/src/libstd/sys/hermit/alloc.rs b/src/libstd/sys/hermit/alloc.rs new file mode 100644 index 0000000000..86cc446363 --- /dev/null +++ b/src/libstd/sys/hermit/alloc.rs @@ -0,0 +1,35 @@ +use crate::alloc::{GlobalAlloc, Layout, System}; +use crate::ptr; +use crate::sys::hermit::abi; + +#[stable(feature = "alloc_system_type", since = "1.28.0")] +unsafe impl GlobalAlloc for System { + #[inline] + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + abi::malloc(layout.size(), layout.align()) + } + + unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { + let addr = abi::malloc(layout.size(), layout.align()); + + if !addr.is_null() { + ptr::write_bytes( + addr, + 0x00, + layout.size() + ); + } + + addr + } + + #[inline] + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + abi::free(ptr, layout.size(), layout.align()) + } + + #[inline] + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { + abi::realloc(ptr, layout.size(), layout.align(), new_size) + } +} diff --git a/src/libstd/sys/hermit/args.rs b/src/libstd/sys/hermit/args.rs new file mode 100644 index 0000000000..5b1f3add51 --- /dev/null +++ b/src/libstd/sys/hermit/args.rs @@ -0,0 +1,82 @@ +use crate::ffi::OsString; +use crate::marker::PhantomData; +use crate::vec; + +/// One-time global initialization. +pub unsafe fn init(argc: isize, argv: *const *const u8) { imp::init(argc, argv) } + +/// One-time global cleanup. +pub unsafe fn cleanup() { imp::cleanup() } + +/// Returns the command line arguments +pub fn args() -> Args { + imp::args() +} + +pub struct Args { + iter: vec::IntoIter, + _dont_send_or_sync_me: PhantomData<*mut ()>, +} + +impl Args { + pub fn inner_debug(&self) -> &[OsString] { + self.iter.as_slice() + } +} + +impl Iterator for Args { + type Item = OsString; + fn next(&mut self) -> Option { self.iter.next() } + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +impl ExactSizeIterator for Args { + fn len(&self) -> usize { self.iter.len() } +} + +impl DoubleEndedIterator for Args { + fn next_back(&mut self) -> Option { self.iter.next_back() } +} + +mod imp { + use crate::sys_common::os_str_bytes::*; + use crate::ptr; + use crate::ffi::{CStr, OsString}; + use crate::marker::PhantomData; + use super::Args; + + use crate::sys_common::mutex::Mutex; + + static mut ARGC: isize = 0; + static mut ARGV: *const *const u8 = ptr::null(); + static LOCK: Mutex = Mutex::new(); + + pub unsafe fn init(argc: isize, argv: *const *const u8) { + let _guard = LOCK.lock(); + ARGC = argc; + ARGV = argv; + } + + pub unsafe fn cleanup() { + let _guard = LOCK.lock(); + ARGC = 0; + ARGV = ptr::null(); + } + + pub fn args() -> Args { + Args { + iter: clone().into_iter(), + _dont_send_or_sync_me: PhantomData + } + } + + fn clone() -> Vec { + unsafe { + let _guard = LOCK.lock(); + (0..ARGC).map(|i| { + let cstr = CStr::from_ptr(*ARGV.offset(i) as *const i8); + OsStringExt::from_vec(cstr.to_bytes().to_vec()) + }).collect() + } + } +} diff --git a/src/libstd/sys/hermit/cmath.rs b/src/libstd/sys/hermit/cmath.rs new file mode 100644 index 0000000000..fa7783122c --- /dev/null +++ b/src/libstd/sys/hermit/cmath.rs @@ -0,0 +1,29 @@ +// These symbols are all defined in `compiler-builtins` +extern { + pub fn acos(n: f64) -> f64; + pub fn acosf(n: f32) -> f32; + pub fn asin(n: f64) -> f64; + pub fn asinf(n: f32) -> f32; + pub fn atan(n: f64) -> f64; + pub fn atan2(a: f64, b: f64) -> f64; + pub fn atan2f(a: f32, b: f32) -> f32; + pub fn atanf(n: f32) -> f32; + pub fn cbrt(n: f64) -> f64; + pub fn cbrtf(n: f32) -> f32; + pub fn cosh(n: f64) -> f64; + pub fn coshf(n: f32) -> f32; + pub fn expm1(n: f64) -> f64; + pub fn expm1f(n: f32) -> f32; + pub fn fdim(a: f64, b: f64) -> f64; + pub fn fdimf(a: f32, b: f32) -> f32; + pub fn hypot(x: f64, y: f64) -> f64; + pub fn hypotf(x: f32, y: f32) -> f32; + pub fn log1p(n: f64) -> f64; + pub fn log1pf(n: f32) -> f32; + pub fn sinh(n: f64) -> f64; + pub fn sinhf(n: f32) -> f32; + pub fn tan(n: f64) -> f64; + pub fn tanf(n: f32) -> f32; + pub fn tanh(n: f64) -> f64; + pub fn tanhf(n: f32) -> f32; +} diff --git a/src/libstd/sys/hermit/condvar.rs b/src/libstd/sys/hermit/condvar.rs new file mode 100644 index 0000000000..8e52b3da1b --- /dev/null +++ b/src/libstd/sys/hermit/condvar.rs @@ -0,0 +1,62 @@ +use crate::cmp; +use crate::sys::hermit::abi; +use crate::sys::mutex::Mutex; +use crate::time::Duration; + +pub struct Condvar { + identifier: usize, +} + +impl Condvar { + pub const fn new() -> Condvar { + Condvar { identifier: 0 } + } + + #[inline] + pub unsafe fn init(&mut self) { + // nothing to do + } + + pub unsafe fn notify_one(&self) { + let _ = abi::notify(self.id(), 1); + } + + #[inline] + pub unsafe fn notify_all(&self) { + let _ = abi::notify(self.id(), -1 /* =all */); + } + + pub unsafe fn wait(&self, mutex: &Mutex) { + // add current task to the wait queue + let _ = abi::add_queue(self.id(), -1 /* no timeout */); + mutex.unlock(); + let _ = abi::wait(self.id()); + mutex.lock(); + } + + pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool { + let nanos = dur.as_nanos(); + let nanos = cmp::min(i64::max_value() as u128, nanos); + + // add current task to the wait queue + let _ = abi::add_queue(self.id(), nanos as i64); + + mutex.unlock(); + // If the return value is !0 then a timeout happened, so we return + // `false` as we weren't actually notified. + let ret = abi::wait(self.id()) == 0; + mutex.lock(); + + ret + } + + #[inline] + pub unsafe fn destroy(&self) { + let _ = abi::destroy_queue(self.id()); + } + + #[inline] + fn id(&self) -> usize { + &self.identifier as *const usize as usize + } +} diff --git a/src/libstd/sys/hermit/env.rs b/src/libstd/sys/hermit/env.rs new file mode 100644 index 0000000000..7a0fcb31ef --- /dev/null +++ b/src/libstd/sys/hermit/env.rs @@ -0,0 +1,9 @@ +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = "hermit"; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ""; + pub const DLL_EXTENSION: &str = ""; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} diff --git a/src/libstd/sys/hermit/fast_thread_local.rs b/src/libstd/sys/hermit/fast_thread_local.rs new file mode 100644 index 0000000000..05464787a0 --- /dev/null +++ b/src/libstd/sys/hermit/fast_thread_local.rs @@ -0,0 +1,4 @@ +#![cfg(target_thread_local)] +#![unstable(feature = "thread_local_internals", issue = "0")] + +pub use crate::sys_common::thread_local::register_dtor_fallback as register_dtor; diff --git a/src/libstd/sys/hermit/fd.rs b/src/libstd/sys/hermit/fd.rs new file mode 100644 index 0000000000..84c5473664 --- /dev/null +++ b/src/libstd/sys/hermit/fd.rs @@ -0,0 +1,82 @@ +#![unstable(reason = "not public", issue = "0", feature = "fd")] + +use crate::io::{self, Read, ErrorKind}; +use crate::mem; +use crate::sys::cvt; +use crate::sys::hermit::abi; +use crate::sys_common::AsInner; + +#[derive(Debug)] +pub struct FileDesc { + fd: i32, +} + +impl FileDesc { + pub fn new(fd: i32) -> FileDesc { + FileDesc { fd } + } + + pub fn raw(&self) -> i32 { self.fd } + + /// Extracts the actual file descriptor without closing it. + pub fn into_raw(self) -> i32 { + let fd = self.fd; + mem::forget(self); + fd + } + + pub fn read(&self, buf: &mut [u8]) -> io::Result { + let result = unsafe { abi::read(self.fd, buf.as_mut_ptr(), buf.len()) }; + cvt(result as i32) + } + + pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { + let mut me = self; + (&mut me).read_to_end(buf) + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + let result = unsafe { abi::write(self.fd, buf.as_ptr(), buf.len()) }; + cvt(result as i32) + } + + pub fn duplicate(&self) -> io::Result { + self.duplicate_path(&[]) + } + pub fn duplicate_path(&self, _path: &[u8]) -> io::Result { + Err(io::Error::new(ErrorKind::Other, "duplicate isn't supported")) + } + + pub fn nonblocking(&self) -> io::Result { + Ok(false) + } + + pub fn set_cloexec(&self) -> io::Result<()> { + Err(io::Error::new(ErrorKind::Other, "cloexec isn't supported")) + } + + pub fn set_nonblocking(&self, _nonblocking: bool) -> io::Result<()> { + Err(io::Error::new(ErrorKind::Other, "nonblocking isn't supported")) + } +} + +impl<'a> Read for &'a FileDesc { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + (**self).read(buf) + } +} + +impl AsInner for FileDesc { + fn as_inner(&self) -> &i32 { &self.fd } +} + +impl Drop for FileDesc { + fn drop(&mut self) { + // Note that errors are ignored when closing a file descriptor. The + // reason for this is that if an error occurs we don't actually know if + // the file descriptor was closed or not, and if we retried (for + // something like EINTR), we might close another valid file descriptor + // (opened after we closed ours. + let _ = unsafe { abi::close(self.fd) }; + } +} diff --git a/src/libstd/sys/hermit/fs.rs b/src/libstd/sys/hermit/fs.rs new file mode 100644 index 0000000000..f8e5844a16 --- /dev/null +++ b/src/libstd/sys/hermit/fs.rs @@ -0,0 +1,387 @@ +use crate::ffi::{OsString, CString, CStr}; +use crate::fmt; +use crate::io::{self, Error, ErrorKind}; +use crate::hash::{Hash, Hasher}; +use crate::io::{SeekFrom, IoSlice, IoSliceMut}; +use crate::path::{Path, PathBuf}; +use crate::sys::time::SystemTime; +use crate::sys::{unsupported, Void}; +use crate::sys::hermit::abi; +use crate::sys::hermit::fd::FileDesc; +use crate::sys::cvt; +use crate::sys_common::os_str_bytes::OsStrExt; + +pub use crate::sys_common::fs::copy; +//pub use crate::sys_common::fs::remove_dir_all; + +fn cstr(path: &Path) -> io::Result { + Ok(CString::new(path.as_os_str().as_bytes())?) +} +//const O_ACCMODE: i32 = 00000003; +const O_RDONLY: i32 = 00000000; +const O_WRONLY: i32 = 00000001; +const O_RDWR: i32 = 00000002; +const O_CREAT: i32 = 00000100; +const O_EXCL: i32 = 00000200; +const O_TRUNC: i32 = 00001000; +const O_APPEND: i32 = 00002000; + +#[derive(Debug)] +pub struct File(FileDesc); + +pub struct FileAttr(Void); + +pub struct ReadDir(Void); + +pub struct DirEntry(Void); + +#[derive(Clone, Debug)] +pub struct OpenOptions { + // generic + read: bool, + write: bool, + append: bool, + truncate: bool, + create: bool, + create_new: bool, + // system-specific + mode: i32 +} + +pub struct FilePermissions(Void); + +pub struct FileType(Void); + +#[derive(Debug)] +pub struct DirBuilder { } + +impl FileAttr { + pub fn size(&self) -> u64 { + match self.0 {} + } + + pub fn perm(&self) -> FilePermissions { + match self.0 {} + } + + pub fn file_type(&self) -> FileType { + match self.0 {} + } + + pub fn modified(&self) -> io::Result { + match self.0 {} + } + + pub fn accessed(&self) -> io::Result { + match self.0 {} + } + + pub fn created(&self) -> io::Result { + match self.0 {} + } +} + +impl Clone for FileAttr { + fn clone(&self) -> FileAttr { + match self.0 {} + } +} + +impl FilePermissions { + pub fn readonly(&self) -> bool { + match self.0 {} + } + + pub fn set_readonly(&mut self, _readonly: bool) { + match self.0 {} + } +} + +impl Clone for FilePermissions { + fn clone(&self) -> FilePermissions { + match self.0 {} + } +} + +impl PartialEq for FilePermissions { + fn eq(&self, _other: &FilePermissions) -> bool { + match self.0 {} + } +} + +impl Eq for FilePermissions { +} + +impl fmt::Debug for FilePermissions { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +impl FileType { + pub fn is_dir(&self) -> bool { + match self.0 {} + } + + pub fn is_file(&self) -> bool { + match self.0 {} + } + + pub fn is_symlink(&self) -> bool { + match self.0 {} + } +} + +impl Clone for FileType { + fn clone(&self) -> FileType { + match self.0 {} + } +} + +impl Copy for FileType {} + +impl PartialEq for FileType { + fn eq(&self, _other: &FileType) -> bool { + match self.0 {} + } +} + +impl Eq for FileType { +} + +impl Hash for FileType { + fn hash(&self, _h: &mut H) { + match self.0 {} + } +} + +impl fmt::Debug for FileType { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +impl fmt::Debug for ReadDir { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +impl Iterator for ReadDir { + type Item = io::Result; + + fn next(&mut self) -> Option> { + match self.0 {} + } +} + +impl DirEntry { + pub fn path(&self) -> PathBuf { + match self.0 {} + } + + pub fn file_name(&self) -> OsString { + match self.0 {} + } + + pub fn metadata(&self) -> io::Result { + match self.0 {} + } + + pub fn file_type(&self) -> io::Result { + match self.0 {} + } +} + +impl OpenOptions { + pub fn new() -> OpenOptions { + OpenOptions { + // generic + read: false, + write: false, + append: false, + truncate: false, + create: false, + create_new: false, + // system-specific + mode: 0x777 + } + } + + pub fn read(&mut self, read: bool) { self.read = read; } + pub fn write(&mut self, write: bool) { self.write = write; } + pub fn append(&mut self, append: bool) { self.append = append; } + pub fn truncate(&mut self, truncate: bool) { self.truncate = truncate; } + pub fn create(&mut self, create: bool) { self.create = create; } + pub fn create_new(&mut self, create_new: bool) { self.create_new = create_new; } + + fn get_access_mode(&self) -> io::Result { + match (self.read, self.write, self.append) { + (true, false, false) => Ok(O_RDONLY), + (false, true, false) => Ok(O_WRONLY), + (true, true, false) => Ok(O_RDWR), + (false, _, true) => Ok(O_WRONLY | O_APPEND), + (true, _, true) => Ok(O_RDWR | O_APPEND), + (false, false, false) => { + Err(io::Error::new(ErrorKind::InvalidInput, "invalid access mode")) + }, + } + } + + fn get_creation_mode(&self) -> io::Result { + match (self.write, self.append) { + (true, false) => {} + (false, false) => + if self.truncate || self.create || self.create_new { + return Err(io::Error::new(ErrorKind::InvalidInput, "invalid creation mode")); + }, + (_, true) => + if self.truncate && !self.create_new { + return Err(io::Error::new(ErrorKind::InvalidInput, "invalid creation mode")); + }, + } + + Ok(match (self.create, self.truncate, self.create_new) { + (false, false, false) => 0, + (true, false, false) => O_CREAT, + (false, true, false) => O_TRUNC, + (true, true, false) => O_CREAT | O_TRUNC, + (_, _, true) => O_CREAT | O_EXCL, + }) + } +} + +impl File { + pub fn open(path: &Path, opts: &OpenOptions) -> io::Result { + let path = cstr(path)?; + File::open_c(&path, opts) + } + + pub fn open_c(path: &CStr, opts: &OpenOptions) -> io::Result { + let mut flags = opts.get_access_mode()?; + flags = flags | opts.get_creation_mode()?; + + let mode; + if flags & O_CREAT == O_CREAT { + mode = opts.mode; + } else { + mode = 0; + } + + let fd = unsafe { cvt(abi::open(path.as_ptr(), flags, mode))? }; + Ok(File(FileDesc::new(fd as i32))) + } + + pub fn file_attr(&self) -> io::Result { + Err(Error::from_raw_os_error(22)) + } + + pub fn fsync(&self) -> io::Result<()> { + Err(Error::from_raw_os_error(22)) + } + + pub fn datasync(&self) -> io::Result<()> { + self.fsync() + } + + pub fn truncate(&self, _size: u64) -> io::Result<()> { + Err(Error::from_raw_os_error(22)) + } + + pub fn read(&self, buf: &mut [u8]) -> io::Result { + self.0.read(buf) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + crate::io::default_read_vectored(|buf| self.read(buf), bufs) + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + self.0.write(buf) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + crate::io::default_write_vectored(|buf| self.write(buf), bufs) + } + + pub fn flush(&self) -> io::Result<()> { + Ok(()) + } + + pub fn seek(&self, _pos: SeekFrom) -> io::Result { + Err(Error::from_raw_os_error(22)) + } + + pub fn duplicate(&self) -> io::Result { + Err(Error::from_raw_os_error(22)) + } + + pub fn set_permissions(&self, _perm: FilePermissions) -> io::Result<()> { + Err(Error::from_raw_os_error(22)) + } + + pub fn diverge(&self) -> ! { + loop {} + } +} + +impl DirBuilder { + pub fn new() -> DirBuilder { + DirBuilder { } + } + + pub fn mkdir(&self, _p: &Path) -> io::Result<()> { + unsupported() + } +} + +pub fn readdir(_p: &Path) -> io::Result { + unsupported() +} + +pub fn unlink(path: &Path) -> io::Result<()> { + let name = cstr(path)?; + let _ = unsafe { cvt(abi::unlink(name.as_ptr()))? }; + Ok(()) +} + +pub fn rename(_old: &Path, _new: &Path) -> io::Result<()> { + unsupported() +} + +pub fn set_perm(_p: &Path, perm: FilePermissions) -> io::Result<()> { + match perm.0 {} +} + +pub fn rmdir(_p: &Path) -> io::Result<()> { + unsupported() +} + +pub fn remove_dir_all(_path: &Path) -> io::Result<()> { + //unsupported() + Ok(()) +} + +pub fn readlink(_p: &Path) -> io::Result { + unsupported() +} + +pub fn symlink(_src: &Path, _dst: &Path) -> io::Result<()> { + unsupported() +} + +pub fn link(_src: &Path, _dst: &Path) -> io::Result<()> { + unsupported() +} + +pub fn stat(_p: &Path) -> io::Result { + unsupported() +} + +pub fn lstat(_p: &Path) -> io::Result { + unsupported() +} + +pub fn canonicalize(_p: &Path) -> io::Result { + unsupported() +} diff --git a/src/libstd/sys/hermit/io.rs b/src/libstd/sys/hermit/io.rs new file mode 100644 index 0000000000..976e122463 --- /dev/null +++ b/src/libstd/sys/hermit/io.rs @@ -0,0 +1,46 @@ +use crate::mem; + +pub struct IoSlice<'a>(&'a [u8]); + +impl<'a> IoSlice<'a> { + #[inline] + pub fn new(buf: &'a [u8]) -> IoSlice<'a> { + IoSlice(buf) + } + + #[inline] + pub fn advance(&mut self, n: usize) { + self.0 = &self.0[n..] + } + + #[inline] + pub fn as_slice(&self) -> &[u8] { + self.0 + } +} + +pub struct IoSliceMut<'a>(&'a mut [u8]); + +impl<'a> IoSliceMut<'a> { + #[inline] + pub fn new(buf: &'a mut [u8]) -> IoSliceMut<'a> { + IoSliceMut(buf) + } + + #[inline] + pub fn advance(&mut self, n: usize) { + let slice = mem::replace(&mut self.0, &mut []); + let (_, remaining) = slice.split_at_mut(n); + self.0 = remaining; + } + + #[inline] + pub fn as_slice(&self) -> &[u8] { + self.0 + } + + #[inline] + pub fn as_mut_slice(&mut self) -> &mut [u8] { + self.0 + } +} diff --git a/src/libstd/sys/hermit/memchr.rs b/src/libstd/sys/hermit/memchr.rs new file mode 100644 index 0000000000..9967482197 --- /dev/null +++ b/src/libstd/sys/hermit/memchr.rs @@ -0,0 +1 @@ +pub use core::slice::memchr::{memchr, memrchr}; diff --git a/src/libstd/sys/hermit/mod.rs b/src/libstd/sys/hermit/mod.rs new file mode 100644 index 0000000000..d435963176 --- /dev/null +++ b/src/libstd/sys/hermit/mod.rs @@ -0,0 +1,147 @@ +//! System bindings for HermitCore +//! +//! This module contains the facade (aka platform-specific) implementations of +//! OS level functionality for HermitCore. +//! +//! This is all super highly experimental and not actually intended for +//! wide/production use yet, it's still all in the experimental category. This +//! will likely change over time. +//! +//! Currently all functions here are basically stubs that immediately return +//! errors. The hope is that with a portability lint we can turn actually just +//! remove all this and just omit parts of the standard library if we're +//! compiling for wasm. That way it's a compile time error for something that's +//! guaranteed to be a runtime error! + +use crate::os::raw::c_char; +use crate::intrinsics; + +pub mod alloc; +pub mod args; +pub mod condvar; +pub mod stdio; +pub mod memchr; +pub mod io; +pub mod mutex; +pub mod rwlock; +pub mod os; +pub mod cmath; +pub mod thread; +pub mod env; +pub mod fs; +pub mod fd; +pub mod net; +pub mod path; +pub mod pipe; +pub mod process; +pub mod stack_overflow; +pub mod time; +pub mod thread_local; +pub mod fast_thread_local; + +pub use crate::sys_common::os_str_bytes as os_str; +use crate::io::ErrorKind; + +#[allow(unused_extern_crates)] +pub extern crate hermit_abi as abi; + +pub fn unsupported() -> crate::io::Result { + Err(unsupported_err()) +} + +pub fn unsupported_err() -> crate::io::Error { + crate::io::Error::new(crate::io::ErrorKind::Other, + "operation not supported on HermitCore yet") +} + +// This enum is used as the storage for a bunch of types which can't actually +// exist. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] +pub enum Void {} + +pub unsafe fn strlen(start: *const c_char) -> usize { + let mut str = start; + + while *str != 0 { + str = str.offset(1); + } + + (str as usize) - (start as usize) +} + +#[no_mangle] +pub extern "C" fn floor(x: f64) -> f64 { + unsafe { + intrinsics::floorf64(x) + } +} + +pub unsafe fn abort_internal() -> ! { + abi::abort(); +} + +// FIXME: just a workaround to test the system +pub fn hashmap_random_keys() -> (u64, u64) { + (1, 2) +} + +// This function is needed by the panic runtime. The symbol is named in +// pre-link args for the target specification, so keep that in sync. +#[cfg(not(test))] +#[no_mangle] +// NB. used by both libunwind and libpanic_abort +pub unsafe extern "C" fn __rust_abort() { + abort_internal(); +} + +#[cfg(not(test))] +pub fn init() { + unsafe { + let _ = net::init(); + } +} + +#[cfg(not(test))] +#[no_mangle] +pub unsafe extern "C" fn runtime_entry(argc: i32, argv: *const *const c_char, + env: *const *const c_char) -> ! { + extern "C" { + fn main(argc: isize, argv: *const *const c_char) -> i32; + } + + // initialize environment + os::init_environment(env as *const *const i8); + + let result = main(argc as isize, argv); + + abi::exit(result); +} + +pub fn decode_error_kind(errno: i32) -> ErrorKind { + match errno { + x if x == 13 as i32 => ErrorKind::PermissionDenied, + x if x == 98 as i32 => ErrorKind::AddrInUse, + x if x == 99 as i32 => ErrorKind::AddrNotAvailable, + x if x == 11 as i32 => ErrorKind::WouldBlock, + x if x == 103 as i32 => ErrorKind::ConnectionAborted, + x if x == 111 as i32 => ErrorKind::ConnectionRefused, + x if x == 104 as i32 => ErrorKind::ConnectionReset, + x if x == 17 as i32 => ErrorKind::AlreadyExists, + x if x == 4 as i32 => ErrorKind::Interrupted, + x if x == 22 as i32 => ErrorKind::InvalidInput, + x if x == 2 as i32 => ErrorKind::NotFound, + x if x == 107 as i32 => ErrorKind::NotConnected, + x if x == 1 as i32 => ErrorKind::PermissionDenied, + x if x == 32 as i32 => ErrorKind::BrokenPipe, + x if x == 110 as i32 => ErrorKind::TimedOut, + _ => ErrorKind::Other, + } +} + +pub fn cvt(result: i32) -> crate::io::Result { + if result < 0 { + Err(crate::io::Error::from_raw_os_error(-result)) + } else { + Ok(result as usize) + } +} diff --git a/src/libstd/sys/hermit/mutex.rs b/src/libstd/sys/hermit/mutex.rs new file mode 100644 index 0000000000..9414bf8fbb --- /dev/null +++ b/src/libstd/sys/hermit/mutex.rs @@ -0,0 +1,77 @@ +use crate::ptr; +use crate::ffi::c_void; +use crate::sys::hermit::abi; + +pub struct Mutex { + inner: *const c_void +} + +unsafe impl Send for Mutex {} +unsafe impl Sync for Mutex {} + +impl Mutex { + pub const fn new() -> Mutex { + Mutex { inner: ptr::null() } + } + + #[inline] + pub unsafe fn init(&mut self) { + let _ = abi::sem_init(&mut self.inner as *mut *const c_void, 1); + } + + #[inline] + pub unsafe fn lock(&self) { + let _ = abi::sem_timedwait(self.inner, 0); + } + + #[inline] + pub unsafe fn unlock(&self) { + let _ = abi::sem_post(self.inner); + } + + #[inline] + pub unsafe fn try_lock(&self) -> bool { + let result = abi::sem_trywait(self.inner); + result == 0 + } + + #[inline] + pub unsafe fn destroy(&self) { + let _ = abi::sem_destroy(self.inner); + } +} + +pub struct ReentrantMutex { + inner: *const c_void +} + +impl ReentrantMutex { + pub unsafe fn uninitialized() -> ReentrantMutex { + ReentrantMutex { inner: ptr::null() } + } + + #[inline] + pub unsafe fn init(&mut self) { + let _ = abi::recmutex_init(&mut self.inner as *mut *const c_void); + } + + #[inline] + pub unsafe fn lock(&self) { + let _ = abi::recmutex_lock(self.inner); + } + + #[inline] + pub unsafe fn try_lock(&self) -> bool { + true + } + + #[inline] + pub unsafe fn unlock(&self) { + let _ = abi::recmutex_unlock(self.inner); + } + + #[inline] + pub unsafe fn destroy(&self) { + let _ = abi::recmutex_destroy(self.inner); + } +} diff --git a/src/libstd/sys/hermit/net.rs b/src/libstd/sys/hermit/net.rs new file mode 100644 index 0000000000..5b7ff64227 --- /dev/null +++ b/src/libstd/sys/hermit/net.rs @@ -0,0 +1,364 @@ +use crate::fmt; +use crate::convert::TryFrom; +use crate::io::{self, IoSlice, IoSliceMut}; +use crate::net::{SocketAddr, Shutdown, Ipv4Addr, Ipv6Addr}; +use crate::str; +use crate::sys::{unsupported, Void}; +use crate::time::Duration; + +//// Iinitializes HermitCore's network stack +pub unsafe fn init() -> io::Result<()> { + Ok(()) +} + +pub struct TcpStream(Void); + +impl TcpStream { + pub fn connect(_: io::Result<&SocketAddr>) -> io::Result { + unsupported() + } + + pub fn connect_timeout(_: &SocketAddr, _: Duration) -> io::Result { + unsupported() + } + + pub fn set_read_timeout(&self, _: Option) -> io::Result<()> { + match self.0 {} + } + + pub fn set_write_timeout(&self, _: Option) -> io::Result<()> { + match self.0 {} + } + + pub fn read_timeout(&self) -> io::Result> { + match self.0 {} + } + + pub fn write_timeout(&self) -> io::Result> { + match self.0 {} + } + + pub fn peek(&self, _: &mut [u8]) -> io::Result { + match self.0 {} + } + + pub fn read(&self, _: &mut [u8]) -> io::Result { + match self.0 {} + } + + pub fn read_vectored(&self, _: &mut [IoSliceMut<'_>]) -> io::Result { + match self.0 {} + } + + pub fn write(&self, _: &[u8]) -> io::Result { + match self.0 {} + } + + pub fn write_vectored(&self, _: &[IoSlice<'_>]) -> io::Result { + match self.0 {} + } + + pub fn peer_addr(&self) -> io::Result { + match self.0 {} + } + + pub fn socket_addr(&self) -> io::Result { + match self.0 {} + } + + pub fn shutdown(&self, _: Shutdown) -> io::Result<()> { + match self.0 {} + } + + pub fn duplicate(&self) -> io::Result { + match self.0 {} + } + + pub fn set_nodelay(&self, _: bool) -> io::Result<()> { + match self.0 {} + } + + pub fn nodelay(&self) -> io::Result { + match self.0 {} + } + + pub fn set_ttl(&self, _: u32) -> io::Result<()> { + match self.0 {} + } + + pub fn ttl(&self) -> io::Result { + match self.0 {} + } + + pub fn take_error(&self) -> io::Result> { + match self.0 {} + } + + pub fn set_nonblocking(&self, _: bool) -> io::Result<()> { + match self.0 {} + } +} + +impl fmt::Debug for TcpStream { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +pub struct TcpListener(Void); + +impl TcpListener { + pub fn bind(_: io::Result<&SocketAddr>) -> io::Result { + unsupported() + } + + pub fn socket_addr(&self) -> io::Result { + match self.0 {} + } + + pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> { + match self.0 {} + } + + pub fn duplicate(&self) -> io::Result { + match self.0 {} + } + + pub fn set_ttl(&self, _: u32) -> io::Result<()> { + match self.0 {} + } + + pub fn ttl(&self) -> io::Result { + match self.0 {} + } + + pub fn set_only_v6(&self, _: bool) -> io::Result<()> { + match self.0 {} + } + + pub fn only_v6(&self) -> io::Result { + match self.0 {} + } + + pub fn take_error(&self) -> io::Result> { + match self.0 {} + } + + pub fn set_nonblocking(&self, _: bool) -> io::Result<()> { + match self.0 {} + } +} + +impl fmt::Debug for TcpListener { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +pub struct UdpSocket(Void); + +impl UdpSocket { + pub fn bind(_: io::Result<&SocketAddr>) -> io::Result { + unsupported() + } + + pub fn peer_addr(&self) -> io::Result { + match self.0 {} + } + + pub fn socket_addr(&self) -> io::Result { + match self.0 {} + } + + pub fn recv_from(&self, _: &mut [u8]) -> io::Result<(usize, SocketAddr)> { + match self.0 {} + } + + pub fn peek_from(&self, _: &mut [u8]) -> io::Result<(usize, SocketAddr)> { + match self.0 {} + } + + pub fn send_to(&self, _: &[u8], _: &SocketAddr) -> io::Result { + match self.0 {} + } + + pub fn duplicate(&self) -> io::Result { + match self.0 {} + } + + pub fn set_read_timeout(&self, _: Option) -> io::Result<()> { + match self.0 {} + } + + pub fn set_write_timeout(&self, _: Option) -> io::Result<()> { + match self.0 {} + } + + pub fn read_timeout(&self) -> io::Result> { + match self.0 {} + } + + pub fn write_timeout(&self) -> io::Result> { + match self.0 {} + } + + pub fn set_broadcast(&self, _: bool) -> io::Result<()> { + match self.0 {} + } + + pub fn broadcast(&self) -> io::Result { + match self.0 {} + } + + pub fn set_multicast_loop_v4(&self, _: bool) -> io::Result<()> { + match self.0 {} + } + + pub fn multicast_loop_v4(&self) -> io::Result { + match self.0 {} + } + + pub fn set_multicast_ttl_v4(&self, _: u32) -> io::Result<()> { + match self.0 {} + } + + pub fn multicast_ttl_v4(&self) -> io::Result { + match self.0 {} + } + + pub fn set_multicast_loop_v6(&self, _: bool) -> io::Result<()> { + match self.0 {} + } + + pub fn multicast_loop_v6(&self) -> io::Result { + match self.0 {} + } + + pub fn join_multicast_v4(&self, _: &Ipv4Addr, _: &Ipv4Addr) + -> io::Result<()> { + match self.0 {} + } + + pub fn join_multicast_v6(&self, _: &Ipv6Addr, _: u32) + -> io::Result<()> { + match self.0 {} + } + + pub fn leave_multicast_v4(&self, _: &Ipv4Addr, _: &Ipv4Addr) + -> io::Result<()> { + match self.0 {} + } + + pub fn leave_multicast_v6(&self, _: &Ipv6Addr, _: u32) + -> io::Result<()> { + match self.0 {} + } + + pub fn set_ttl(&self, _: u32) -> io::Result<()> { + match self.0 {} + } + + pub fn ttl(&self) -> io::Result { + match self.0 {} + } + + pub fn take_error(&self) -> io::Result> { + match self.0 {} + } + + pub fn set_nonblocking(&self, _: bool) -> io::Result<()> { + match self.0 {} + } + + pub fn recv(&self, _: &mut [u8]) -> io::Result { + match self.0 {} + } + + pub fn peek(&self, _: &mut [u8]) -> io::Result { + match self.0 {} + } + + pub fn send(&self, _: &[u8]) -> io::Result { + match self.0 {} + } + + pub fn connect(&self, _: io::Result<&SocketAddr>) -> io::Result<()> { + match self.0 {} + } +} + +impl fmt::Debug for UdpSocket { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +pub struct LookupHost(Void); + +impl LookupHost { + pub fn port(&self) -> u16 { + match self.0 {} + } +} + +impl Iterator for LookupHost { + type Item = SocketAddr; + fn next(&mut self) -> Option { + match self.0 {} + } +} + +impl TryFrom<&str> for LookupHost { + type Error = io::Error; + + fn try_from(_v: &str) -> io::Result { + unsupported() + } +} + +impl<'a> TryFrom<(&'a str, u16)> for LookupHost { + type Error = io::Error; + + fn try_from(_v: (&'a str, u16)) -> io::Result { + unsupported() + } +} + +#[allow(nonstandard_style)] +pub mod netc { + pub const AF_INET: u8 = 0; + pub const AF_INET6: u8 = 1; + pub type sa_family_t = u8; + + #[derive(Copy, Clone)] + pub struct in_addr { + pub s_addr: u32, + } + + #[derive(Copy, Clone)] + pub struct sockaddr_in { + pub sin_family: sa_family_t, + pub sin_port: u16, + pub sin_addr: in_addr, + } + + #[derive(Copy, Clone)] + pub struct in6_addr { + pub s6_addr: [u8; 16], + } + + #[derive(Copy, Clone)] + pub struct sockaddr_in6 { + pub sin6_family: sa_family_t, + pub sin6_port: u16, + pub sin6_addr: in6_addr, + pub sin6_flowinfo: u32, + pub sin6_scope_id: u32, + } + + #[derive(Copy, Clone)] + pub struct sockaddr { + } + + pub type socklen_t = usize; +} diff --git a/src/libstd/sys/hermit/os.rs b/src/libstd/sys/hermit/os.rs new file mode 100644 index 0000000000..8a25cbcf07 --- /dev/null +++ b/src/libstd/sys/hermit/os.rs @@ -0,0 +1,174 @@ +use crate::error::Error as StdError; +use crate::ffi::{CStr, OsString, OsStr}; +use crate::fmt; +use crate::io; +use crate::marker::PhantomData; +use crate::memchr; +use crate::path::{self, PathBuf}; +use crate::ptr; +use crate::str; +use crate::sys::{unsupported, Void}; +use crate::collections::HashMap; +use crate::vec; +use crate::sync::Mutex; +use crate::sys_common::os_str_bytes::*; +use crate::sys::hermit::abi; + +pub fn errno() -> i32 { + 0 +} + +pub fn error_string(_errno: i32) -> String { + "operation successful".to_string() +} + +pub fn getcwd() -> io::Result { + unsupported() +} + +pub fn chdir(_: &path::Path) -> io::Result<()> { + unsupported() +} + +pub struct SplitPaths<'a>(&'a Void); + +pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> { + panic!("unsupported") +} + +impl<'a> Iterator for SplitPaths<'a> { + type Item = PathBuf; + fn next(&mut self) -> Option { + match *self.0 {} + } +} + +#[derive(Debug)] +pub struct JoinPathsError; + +pub fn join_paths(_paths: I) -> Result + where I: Iterator, T: AsRef +{ + Err(JoinPathsError) +} + +impl fmt::Display for JoinPathsError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + "not supported on hermit yet".fmt(f) + } +} + +impl StdError for JoinPathsError { + fn description(&self) -> &str { + "not supported on hermit yet" + } +} + +pub fn current_exe() -> io::Result { + unsupported() +} + +static mut ENV: Option>> = None; + +pub fn init_environment(env: *const *const i8) { + unsafe { + ENV = Some(Mutex::new(HashMap::new())); + + let mut guard = ENV.as_ref().unwrap().lock().unwrap(); + let mut environ = env; + while environ != ptr::null() && *environ != ptr::null() { + if let Some((key,value)) = parse(CStr::from_ptr(*environ).to_bytes()) { + guard.insert(key, value); + } + environ = environ.offset(1); + } + } + + fn parse(input: &[u8]) -> Option<(OsString, OsString)> { + // Strategy (copied from glibc): Variable name and value are separated + // by an ASCII equals sign '='. Since a variable name must not be + // empty, allow variable names starting with an equals sign. Skip all + // malformed lines. + if input.is_empty() { + return None; + } + let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); + pos.map(|p| ( + OsStringExt::from_vec(input[..p].to_vec()), + OsStringExt::from_vec(input[p+1..].to_vec()), + )) + } +} + +pub struct Env { + iter: vec::IntoIter<(OsString, OsString)>, + _dont_send_or_sync_me: PhantomData<*mut ()>, +} + +impl Iterator for Env { + type Item = (OsString, OsString); + fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() } + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +/// Returns a vector of (variable, value) byte-vector pairs for all the +/// environment variables of the current process. +pub fn env() -> Env { + unsafe { + let guard = ENV.as_ref().unwrap().lock().unwrap(); + let mut result = Vec::new(); + + for (key, value) in guard.iter() { + result.push((key.clone(), value.clone())); + } + + return Env { + iter: result.into_iter(), + _dont_send_or_sync_me: PhantomData, + } + } +} + +pub fn getenv(k: &OsStr) -> io::Result> { + unsafe { + match ENV.as_ref().unwrap().lock().unwrap().get_mut(k) { + Some(value) => { Ok(Some(value.clone())) }, + None => { Ok(None) }, + } + } +} + +pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + unsafe { + let (k, v) = (k.to_owned(), v.to_owned()); + ENV.as_ref().unwrap().lock().unwrap().insert(k, v); + } + Ok(()) +} + +pub fn unsetenv(k: &OsStr) -> io::Result<()> { + unsafe { + ENV.as_ref().unwrap().lock().unwrap().remove(k); + } + Ok(()) +} + +pub fn temp_dir() -> PathBuf { + panic!("no filesystem on hermit") +} + +pub fn home_dir() -> Option { + None +} + +pub fn exit(code: i32) -> ! { + unsafe { + abi::exit(code); + } +} + +pub fn getpid() -> u32 { + unsafe { + abi::getpid() + } +} diff --git a/src/libstd/sys/hermit/path.rs b/src/libstd/sys/hermit/path.rs new file mode 100644 index 0000000000..7a18395610 --- /dev/null +++ b/src/libstd/sys/hermit/path.rs @@ -0,0 +1,19 @@ +use crate::path::Prefix; +use crate::ffi::OsStr; + +#[inline] +pub fn is_sep_byte(b: u8) -> bool { + b == b'/' +} + +#[inline] +pub fn is_verbatim_sep(b: u8) -> bool { + b == b'/' +} + +pub fn parse_prefix(_: &OsStr) -> Option> { + None +} + +pub const MAIN_SEP_STR: &str = "/"; +pub const MAIN_SEP: char = '/'; diff --git a/src/libstd/sys/hermit/pipe.rs b/src/libstd/sys/hermit/pipe.rs new file mode 100644 index 0000000000..9f07f05436 --- /dev/null +++ b/src/libstd/sys/hermit/pipe.rs @@ -0,0 +1,33 @@ +use crate::io::{self, IoSlice, IoSliceMut}; +use crate::sys::Void; + +pub struct AnonPipe(Void); + +impl AnonPipe { + pub fn read(&self, _buf: &mut [u8]) -> io::Result { + match self.0 {} + } + + pub fn read_vectored(&self, _bufs: &mut [IoSliceMut<'_>]) -> io::Result { + match self.0 {} + } + + pub fn write(&self, _buf: &[u8]) -> io::Result { + match self.0 {} + } + + pub fn write_vectored(&self, _bufs: &[IoSlice<'_>]) -> io::Result { + match self.0 {} + } + + pub fn diverge(&self) -> ! { + match self.0 {} + } +} + +pub fn read2(p1: AnonPipe, + _v1: &mut Vec, + _p2: AnonPipe, + _v2: &mut Vec) -> io::Result<()> { + match p1.0 {} +} diff --git a/src/libstd/sys/hermit/process.rs b/src/libstd/sys/hermit/process.rs new file mode 100644 index 0000000000..edf933d10e --- /dev/null +++ b/src/libstd/sys/hermit/process.rs @@ -0,0 +1,154 @@ +use crate::ffi::OsStr; +use crate::fmt; +use crate::io; +use crate::sys::fs::File; +use crate::sys::pipe::AnonPipe; +use crate::sys::{unsupported, Void}; +use crate::sys_common::process::CommandEnv; + +pub use crate::ffi::OsString as EnvKey; + +//////////////////////////////////////////////////////////////////////////////// +// Command +//////////////////////////////////////////////////////////////////////////////// + +pub struct Command { + env: CommandEnv, +} + +// passed back to std::process with the pipes connected to the child, if any +// were requested +pub struct StdioPipes { + pub stdin: Option, + pub stdout: Option, + pub stderr: Option, +} + +pub enum Stdio { + Inherit, + Null, + MakePipe, +} + +impl Command { + pub fn new(_program: &OsStr) -> Command { + Command { + env: Default::default() + } + } + + pub fn arg(&mut self, _arg: &OsStr) { + } + + pub fn env_mut(&mut self) -> &mut CommandEnv { + &mut self.env + } + + pub fn cwd(&mut self, _dir: &OsStr) { + } + + pub fn stdin(&mut self, _stdin: Stdio) { + } + + pub fn stdout(&mut self, _stdout: Stdio) { + } + + pub fn stderr(&mut self, _stderr: Stdio) { + } + + pub fn spawn(&mut self, _default: Stdio, _needs_stdin: bool) + -> io::Result<(Process, StdioPipes)> { + unsupported() + } +} + +impl From for Stdio { + fn from(pipe: AnonPipe) -> Stdio { + pipe.diverge() + } +} + +impl From for Stdio { + fn from(file: File) -> Stdio { + file.diverge() + } +} + +impl fmt::Debug for Command { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + Ok(()) + } +} + +pub struct ExitStatus(Void); + +impl ExitStatus { + pub fn success(&self) -> bool { + match self.0 {} + } + + pub fn code(&self) -> Option { + match self.0 {} + } +} + +impl Clone for ExitStatus { + fn clone(&self) -> ExitStatus { + match self.0 {} + } +} + +impl Copy for ExitStatus {} + +impl PartialEq for ExitStatus { + fn eq(&self, _other: &ExitStatus) -> bool { + match self.0 {} + } +} + +impl Eq for ExitStatus { +} + +impl fmt::Debug for ExitStatus { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +impl fmt::Display for ExitStatus { + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 {} + } +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub struct ExitCode(bool); + +impl ExitCode { + pub const SUCCESS: ExitCode = ExitCode(false); + pub const FAILURE: ExitCode = ExitCode(true); + + pub fn as_i32(&self) -> i32 { + self.0 as i32 + } +} + +pub struct Process(Void); + +impl Process { + pub fn id(&self) -> u32 { + match self.0 {} + } + + pub fn kill(&mut self) -> io::Result<()> { + match self.0 {} + } + + pub fn wait(&mut self) -> io::Result { + match self.0 {} + } + + pub fn try_wait(&mut self) -> io::Result> { + match self.0 {} + } +} diff --git a/src/libstd/sys/hermit/rwlock.rs b/src/libstd/sys/hermit/rwlock.rs new file mode 100644 index 0000000000..990e755111 --- /dev/null +++ b/src/libstd/sys/hermit/rwlock.rs @@ -0,0 +1,51 @@ +use super::mutex::Mutex; + +pub struct RWLock { + mutex: Mutex +} + +unsafe impl Send for RWLock {} +unsafe impl Sync for RWLock {} + +impl RWLock { + pub const fn new() -> RWLock { + RWLock { + mutex: Mutex::new() + } + } + + #[inline] + pub unsafe fn read(&self) { + self.mutex.lock(); + } + + #[inline] + pub unsafe fn try_read(&self) -> bool { + self.mutex.try_lock() + } + + #[inline] + pub unsafe fn write(&self) { + self.mutex.lock(); + } + + #[inline] + pub unsafe fn try_write(&self) -> bool { + self.mutex.try_lock() + } + + #[inline] + pub unsafe fn read_unlock(&self) { + self.mutex.unlock(); + } + + #[inline] + pub unsafe fn write_unlock(&self) { + self.mutex.unlock(); + } + + #[inline] + pub unsafe fn destroy(&self) { + self.mutex.destroy(); + } +} diff --git a/src/libstd/sys/hermit/stack_overflow.rs b/src/libstd/sys/hermit/stack_overflow.rs new file mode 100644 index 0000000000..b339e433e7 --- /dev/null +++ b/src/libstd/sys/hermit/stack_overflow.rs @@ -0,0 +1,15 @@ +pub struct Handler; + +impl Handler { + pub unsafe fn new() -> Handler { + Handler + } +} + +#[inline] +pub unsafe fn init() { +} + +#[inline] +pub unsafe fn cleanup() { +} diff --git a/src/libstd/sys/hermit/stdio.rs b/src/libstd/sys/hermit/stdio.rs new file mode 100644 index 0000000000..9505f02fda --- /dev/null +++ b/src/libstd/sys/hermit/stdio.rs @@ -0,0 +1,119 @@ +use crate::io; +use crate::io::{IoSlice, IoSliceMut}; +use crate::sys::hermit::abi; + +pub struct Stdin; +pub struct Stdout; +pub struct Stderr; + +impl Stdin { + pub fn new() -> io::Result { + Ok(Stdin) + } + + pub fn read(&self, data: &mut [u8]) -> io::Result { + self.read_vectored(&mut [IoSliceMut::new(data)]) + } + + pub fn read_vectored(&self, _data: &mut [IoSliceMut<'_>]) -> io::Result { + //ManuallyDrop::new(unsafe { WasiFd::from_raw(libc::STDIN_FILENO as u32) }) + // .read(data) + Ok(0) + } + +} + +impl Stdout { + pub fn new() -> io::Result { + Ok(Stdout) + } + + pub fn write(&self, data: &[u8]) -> io::Result { + let len; + + unsafe { + len = abi::write(1, data.as_ptr() as *const u8, data.len()) + } + + if len < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Stdout is not able to print")) + } else { + Ok(len as usize) + } + } + + pub fn write_vectored(&self, data: &[IoSlice<'_>]) -> io::Result { + let len; + + unsafe { + len = abi::write(1, data.as_ptr() as *const u8, data.len()) + } + + if len < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Stdout is not able to print")) + } else { + Ok(len as usize) + } + } + + pub fn flush(&self) -> io::Result<()> { + Ok(()) + } +} + +impl Stderr { + pub fn new() -> io::Result { + Ok(Stderr) + } + + pub fn write(&self, data: &[u8]) -> io::Result { + let len; + + unsafe { + len = abi::write(2, data.as_ptr() as *const u8, data.len()) + } + + if len < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Stderr is not able to print")) + } else { + Ok(len as usize) + } + } + + pub fn write_vectored(&self, data: &[IoSlice<'_>]) -> io::Result { + let len; + + unsafe { + len = abi::write(2, data.as_ptr() as *const u8, data.len()) + } + + if len < 0 { + Err(io::Error::new(io::ErrorKind::Other, "Stderr is not able to print")) + } else { + Ok(len as usize) + } + } + + pub fn flush(&self) -> io::Result<()> { + Ok(()) + } +} + +impl io::Write for Stderr { + fn write(&mut self, data: &[u8]) -> io::Result { + (&*self).write(data) + } + fn flush(&mut self) -> io::Result<()> { + (&*self).flush() + } +} + +pub const STDIN_BUF_SIZE: usize = 0; + +pub fn is_ebadf(_err: &io::Error) -> bool { + true +} + +pub fn panic_output() -> Option { + Stderr::new().ok() +} diff --git a/src/libstd/sys/hermit/thread.rs b/src/libstd/sys/hermit/thread.rs new file mode 100644 index 0000000000..99a9c830c9 --- /dev/null +++ b/src/libstd/sys/hermit/thread.rs @@ -0,0 +1,116 @@ +#![allow(dead_code)] + +use crate::ffi::CStr; +use crate::io; +use crate::sys::hermit::abi; +use crate::time::Duration; +use crate::mem; +use crate::fmt; +use core::u32; + +use crate::sys_common::thread::*; + +pub type Tid = abi::Tid; + +/// Priority of a task +#[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Copy)] +pub struct Priority(u8); + +impl Priority { + pub const fn into(self) -> u8 { + self.0 + } + + pub const fn from(x: u8) -> Self { + Priority(x) + } +} + +impl fmt::Display for Priority { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +pub const NORMAL_PRIO: Priority = Priority::from(2); + +pub struct Thread { + tid: Tid +} + +unsafe impl Send for Thread {} +unsafe impl Sync for Thread {} + +pub const DEFAULT_MIN_STACK_SIZE: usize = 262144; + +impl Thread { + pub unsafe fn new_with_coreid(_stack: usize, p: Box, core_id: isize) + -> io::Result + { + let p = box p; + let mut tid: Tid = u32::MAX; + let ret = abi::spawn(&mut tid as *mut Tid, thread_start, + &*p as *const _ as *const u8 as usize, + Priority::into(NORMAL_PRIO), core_id); + + return if ret == 0 { + mem::forget(p); // ownership passed to pthread_create + Ok(Thread { tid: tid }) + } else { + Err(io::Error::new(io::ErrorKind::Other, "Unable to create thread!")) + }; + + extern fn thread_start(main: usize) { + unsafe { + start_thread(main as *mut u8); + } + } + } + + pub unsafe fn new(stack: usize, p: Box) + -> io::Result + { + Thread::new_with_coreid(stack, p, -1 /* = no specific core */) + } + + #[inline] + pub fn yield_now() { + unsafe { + abi::yield_now(); + } + } + + #[inline] + pub fn set_name(_name: &CStr) { + // nope + } + + #[inline] + pub fn sleep(dur: Duration) { + unsafe { + abi::usleep(dur.as_micros() as u64); + } + } + + pub fn join(self) { + unsafe { + let _ = abi::join(self.tid); + } + } + + #[inline] + pub fn id(&self) -> Tid { self.tid } + + #[inline] + pub fn into_id(self) -> Tid { + let id = self.tid; + mem::forget(self); + id + } +} + +pub mod guard { + pub type Guard = !; + pub unsafe fn current() -> Option { None } + pub unsafe fn init() -> Option { None } +} diff --git a/src/libstd/sys/hermit/thread_local.rs b/src/libstd/sys/hermit/thread_local.rs new file mode 100644 index 0000000000..4bc8c4d588 --- /dev/null +++ b/src/libstd/sys/hermit/thread_local.rs @@ -0,0 +1,61 @@ +#![allow(dead_code)] // not used on all platforms + +use crate::collections::BTreeMap; +use crate::ptr; +use crate::sync::atomic::{AtomicUsize, Ordering}; + +pub type Key = usize; + +type Dtor = unsafe extern fn(*mut u8); + +static NEXT_KEY: AtomicUsize = AtomicUsize::new(0); + +static mut KEYS: *mut BTreeMap> = ptr::null_mut(); + +#[thread_local] +static mut LOCALS: *mut BTreeMap = ptr::null_mut(); + +unsafe fn keys() -> &'static mut BTreeMap> { + if KEYS == ptr::null_mut() { + KEYS = Box::into_raw(Box::new(BTreeMap::new())); + } + &mut *KEYS +} + +unsafe fn locals() -> &'static mut BTreeMap { + if LOCALS == ptr::null_mut() { + LOCALS = Box::into_raw(Box::new(BTreeMap::new())); + } + &mut *LOCALS +} + +#[inline] +pub unsafe fn create(dtor: Option) -> Key { + let key = NEXT_KEY.fetch_add(1, Ordering::SeqCst); + keys().insert(key, dtor); + key +} + +#[inline] +pub unsafe fn get(key: Key) -> *mut u8 { + if let Some(&entry) = locals().get(&key) { + entry + } else { + ptr::null_mut() + } +} + +#[inline] +pub unsafe fn set(key: Key, value: *mut u8) { + locals().insert(key, value); +} + +#[inline] +pub unsafe fn destroy(key: Key) { + keys().remove(&key); +} + +#[inline] +pub fn requires_synchronized_create() -> bool { + false +} diff --git a/src/libstd/sys/hermit/time.rs b/src/libstd/sys/hermit/time.rs new file mode 100644 index 0000000000..8372189546 --- /dev/null +++ b/src/libstd/sys/hermit/time.rs @@ -0,0 +1,176 @@ +#![allow(dead_code)] + +use crate::time::Duration; +use crate::cmp::Ordering; +use crate::convert::TryInto; +use core::hash::{Hash, Hasher}; +use crate::sys::hermit::abi; +use crate::sys::hermit::abi::{CLOCK_REALTIME, CLOCK_MONOTONIC, NSEC_PER_SEC}; +use crate::sys::hermit::abi::timespec; + +#[derive(Copy, Clone, Debug)] +struct Timespec { + t: timespec +} + +impl Timespec { + const fn zero() -> Timespec { + Timespec { + t: timespec { tv_sec: 0, tv_nsec: 0 }, + } + } + + fn sub_timespec(&self, other: &Timespec) -> Result { + if self >= other { + Ok(if self.t.tv_nsec >= other.t.tv_nsec { + Duration::new((self.t.tv_sec - other.t.tv_sec) as u64, + (self.t.tv_nsec - other.t.tv_nsec) as u32) + } else { + Duration::new((self.t.tv_sec - 1 - other.t.tv_sec) as u64, + self.t.tv_nsec as u32 + (NSEC_PER_SEC as u32) - + other.t.tv_nsec as u32) + }) + } else { + match other.sub_timespec(self) { + Ok(d) => Err(d), + Err(d) => Ok(d), + } + } + } + + fn checked_add_duration(&self, other: &Duration) -> Option { + let mut secs = other + .as_secs() + .try_into() // <- target type would be `libc::time_t` + .ok() + .and_then(|secs| self.t.tv_sec.checked_add(secs))?; + + // Nano calculations can't overflow because nanos are <1B which fit + // in a u32. + let mut nsec = other.subsec_nanos() + self.t.tv_nsec as u32; + if nsec >= NSEC_PER_SEC as u32 { + nsec -= NSEC_PER_SEC as u32; + secs = secs.checked_add(1)?; + } + Some(Timespec { + t: timespec { + tv_sec: secs, + tv_nsec: nsec as _, + }, + }) + } + + fn checked_sub_duration(&self, other: &Duration) -> Option { + let mut secs = other + .as_secs() + .try_into() // <- target type would be `libc::time_t` + .ok() + .and_then(|secs| self.t.tv_sec.checked_sub(secs))?; + + // Similar to above, nanos can't overflow. + let mut nsec = self.t.tv_nsec as i32 - other.subsec_nanos() as i32; + if nsec < 0 { + nsec += NSEC_PER_SEC as i32; + secs = secs.checked_sub(1)?; + } + Some(Timespec { + t: timespec { + tv_sec: secs, + tv_nsec: nsec as _, + }, + }) + } +} + +impl PartialEq for Timespec { + fn eq(&self, other: &Timespec) -> bool { + self.t.tv_sec == other.t.tv_sec && self.t.tv_nsec == other.t.tv_nsec + } +} + +impl Eq for Timespec {} + +impl PartialOrd for Timespec { + fn partial_cmp(&self, other: &Timespec) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Timespec { + fn cmp(&self, other: &Timespec) -> Ordering { + let me = (self.t.tv_sec, self.t.tv_nsec); + let other = (other.t.tv_sec, other.t.tv_nsec); + me.cmp(&other) + } +} + +impl Hash for Timespec { + fn hash(&self, state: &mut H) { + self.t.tv_sec.hash(state); + self.t.tv_nsec.hash(state); + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] +pub struct Instant { + t: Timespec, +} + +impl Instant { + pub fn now() -> Instant { + let mut time: Timespec = Timespec::zero(); + let _ = unsafe { abi::clock_gettime(CLOCK_MONOTONIC, &mut time.t as *mut timespec) }; + + Instant { t: time } + } + + pub const fn zero() -> Instant { + Instant { t: Timespec::zero() } + } + + pub fn actually_monotonic() -> bool { + true + } + + pub fn checked_sub_instant(&self, other: &Instant) -> Option { + self.t.sub_timespec(&other.t).ok() + } + + pub fn checked_add_duration(&self, other: &Duration) -> Option { + Some(Instant { t: self.t.checked_add_duration(other)? }) + } + + pub fn checked_sub_duration(&self, other: &Duration) -> Option { + Some(Instant { t: self.t.checked_sub_duration(other)? }) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct SystemTime { + t: Timespec, +} + +pub const UNIX_EPOCH: SystemTime = SystemTime { + t: Timespec::zero(), +}; + +impl SystemTime { + pub fn now() -> SystemTime { + let mut time: Timespec = Timespec::zero(); + let _ = unsafe { abi::clock_gettime(CLOCK_REALTIME, &mut time.t as *mut timespec) }; + + SystemTime { t: time } + } + + pub fn sub_time(&self, other: &SystemTime) -> Result { + self.t.sub_timespec(&other.t) + } + + pub fn checked_add_duration(&self, other: &Duration) -> Option { + Some(SystemTime { t: self.t.checked_add_duration(other)? }) + } + + pub fn checked_sub_duration(&self, other: &Duration) -> Option { + Some(SystemTime { t: self.t.checked_sub_duration(other)? }) + } +} diff --git a/src/libstd/sys/mod.rs b/src/libstd/sys/mod.rs index 5a5859a6ad..16b0539cdb 100644 --- a/src/libstd/sys/mod.rs +++ b/src/libstd/sys/mod.rs @@ -35,6 +35,9 @@ cfg_if::cfg_if! { } else if #[cfg(target_os = "cloudabi")] { mod cloudabi; pub use self::cloudabi::*; + } else if #[cfg(target_os = "hermit")] { + mod hermit; + pub use self::hermit::*; } else if #[cfg(target_os = "wasi")] { mod wasi; pub use self::wasi::*; @@ -60,6 +63,7 @@ cfg_if::cfg_if! { #[stable(feature = "rust1", since = "1.0.0")] pub use self::ext as unix_ext; } else if #[cfg(any(target_os = "cloudabi", + target_os = "hermit", target_arch = "wasm32", all(target_vendor = "fortanix", target_env = "sgx")))] { // On CloudABI and wasm right now the module below doesn't compile diff --git a/src/libstd/sys/sgx/abi/entry.S b/src/libstd/sys/sgx/abi/entry.S index c35e49b1dc..cd26c7ca20 100644 --- a/src/libstd/sys/sgx/abi/entry.S +++ b/src/libstd/sys/sgx/abi/entry.S @@ -119,8 +119,14 @@ sgx_entry: mov %rbx,%gs:tcsls_tcs_addr stmxcsr %gs:tcsls_user_mxcsr fnstcw %gs:tcsls_user_fcw + /* reset user state */ - cld /* x86-64 ABI requires DF to be unset at function entry/exit */ +/* - DF flag: x86-64 ABI requires DF to be unset at function entry/exit */ +/* - AC flag: AEX on misaligned memory accesses leaks side channel info */ + pushfq + andq $~0x40400, (%rsp) + popfq + /* check for debug buffer pointer */ testb $0xff,DEBUG(%rip) jz .Lskip_debug_init diff --git a/src/libstd/sys/unix/alloc.rs b/src/libstd/sys/unix/alloc.rs index f47dc92d2d..cf4900b489 100644 --- a/src/libstd/sys/unix/alloc.rs +++ b/src/libstd/sys/unix/alloc.rs @@ -53,7 +53,6 @@ unsafe impl GlobalAlloc for System { } #[cfg(any(target_os = "android", - target_os = "hermit", target_os = "redox", target_os = "solaris"))] #[inline] @@ -79,7 +78,6 @@ unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { } #[cfg(not(any(target_os = "android", - target_os = "hermit", target_os = "redox", target_os = "solaris")))] #[inline] diff --git a/src/libstd/sys/unix/args.rs b/src/libstd/sys/unix/args.rs index 288e9b5c12..82ef35ea7b 100644 --- a/src/libstd/sys/unix/args.rs +++ b/src/libstd/sys/unix/args.rs @@ -56,7 +56,6 @@ impl DoubleEndedIterator for Args { target_os = "haiku", target_os = "l4re", target_os = "fuchsia", - target_os = "hermit", target_os = "redox"))] mod imp { use crate::os::unix::prelude::*; diff --git a/src/libstd/sys/unix/condvar.rs b/src/libstd/sys/unix/condvar.rs index 0a93fbf8ea..6be844ded1 100644 --- a/src/libstd/sys/unix/condvar.rs +++ b/src/libstd/sys/unix/condvar.rs @@ -31,7 +31,6 @@ impl Condvar { target_os = "ios", target_os = "l4re", target_os = "android", - target_os = "hermit", target_os = "redox"))] pub unsafe fn init(&mut self) {} @@ -39,7 +38,6 @@ impl Condvar { target_os = "ios", target_os = "l4re", target_os = "android", - target_os = "hermit", target_os = "redox")))] pub unsafe fn init(&mut self) { use crate::mem::MaybeUninit; @@ -78,8 +76,7 @@ impl Condvar { // from changes made to the system time. #[cfg(not(any(target_os = "macos", target_os = "ios", - target_os = "android", - target_os = "hermit")))] + target_os = "android")))] pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool { use crate::mem; @@ -109,7 +106,7 @@ impl Condvar { // This implementation is modeled after libcxx's condition_variable // https://github.com/llvm-mirror/libcxx/blob/release_35/src/condition_variable.cpp#L46 // https://github.com/llvm-mirror/libcxx/blob/release_35/include/__mutex_base#L367 - #[cfg(any(target_os = "macos", target_os = "ios", target_os = "android", target_os = "hermit"))] + #[cfg(any(target_os = "macos", target_os = "ios", target_os = "android"))] pub unsafe fn wait_timeout(&self, mutex: &Mutex, mut dur: Duration) -> bool { use crate::ptr; use crate::time::Instant; diff --git a/src/libstd/sys/unix/env.rs b/src/libstd/sys/unix/env.rs index d724eeb8b3..984bcfa450 100644 --- a/src/libstd/sys/unix/env.rs +++ b/src/libstd/sys/unix/env.rs @@ -152,17 +152,6 @@ pub mod os { pub const EXE_EXTENSION: &str = ""; } -#[cfg(target_os = "hermit")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "hermit"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - #[cfg(target_os = "redox")] pub mod os { pub const FAMILY: &str = "unix"; diff --git a/src/libstd/sys/unix/fast_thread_local.rs b/src/libstd/sys/unix/fast_thread_local.rs index 952ba40ee8..d7e733b7fa 100644 --- a/src/libstd/sys/unix/fast_thread_local.rs +++ b/src/libstd/sys/unix/fast_thread_local.rs @@ -10,7 +10,8 @@ // fallback implementation to use as well. // // Due to rust-lang/rust#18804, make sure this is not generic! -#[cfg(any(target_os = "linux", target_os = "fuchsia", target_os = "hermit", target_os = "redox"))] +#[cfg(any(target_os = "linux", target_os = "fuchsia", target_os = "hermit", target_os = "redox", + target_os = "emscripten"))] pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern fn(*mut u8)) { use crate::mem; use crate::sys_common::thread_local::register_dtor_fallback; diff --git a/src/libstd/sys/unix/fd.rs b/src/libstd/sys/unix/fd.rs index ac43526b50..ba611a6b7e 100644 --- a/src/libstd/sys/unix/fd.rs +++ b/src/libstd/sys/unix/fd.rs @@ -71,22 +71,7 @@ impl FileDesc { #[cfg(target_os = "android")] use super::android::cvt_pread64; - #[cfg(target_os = "emscripten")] - unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64) - -> io::Result - { - use crate::convert::TryInto; - use libc::pread64; - // pread64 on emscripten actually takes a 32 bit offset - if let Ok(o) = offset.try_into() { - cvt(pread64(fd, buf, count, o)) - } else { - Err(io::Error::new(io::ErrorKind::InvalidInput, - "cannot pread >2GB")) - } - } - - #[cfg(not(any(target_os = "android", target_os = "emscripten")))] + #[cfg(not(target_os = "android"))] unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64) -> io::Result { @@ -128,22 +113,7 @@ impl FileDesc { #[cfg(target_os = "android")] use super::android::cvt_pwrite64; - #[cfg(target_os = "emscripten")] - unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64) - -> io::Result - { - use crate::convert::TryInto; - use libc::pwrite64; - // pwrite64 on emscripten actually takes a 32 bit offset - if let Ok(o) = offset.try_into() { - cvt(pwrite64(fd, buf, count, o)) - } else { - Err(io::Error::new(io::ErrorKind::InvalidInput, - "cannot pwrite >2GB")) - } - } - - #[cfg(not(any(target_os = "android", target_os = "emscripten")))] + #[cfg(not(target_os = "android"))] unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64) -> io::Result { diff --git a/src/libstd/sys/unix/fs.rs b/src/libstd/sys/unix/fs.rs index 3b1eb86b84..cbf751bec9 100644 --- a/src/libstd/sys/unix/fs.rs +++ b/src/libstd/sys/unix/fs.rs @@ -41,11 +41,151 @@ pub use crate::sys_common::fs::remove_dir_all; pub struct File(FileDesc); -#[derive(Clone)] -pub struct FileAttr { - stat: stat64, +// FIXME: This should be available on Linux with all `target_arch` and `target_env`. +// https://github.com/rust-lang/libc/issues/1545 +macro_rules! cfg_has_statx { + ({ $($then_tt:tt)* } else { $($else_tt:tt)* }) => { + cfg_if::cfg_if! { + if #[cfg(all(target_os = "linux", target_env = "gnu", any( + target_arch = "x86", + target_arch = "arm", + // target_arch = "mips", + target_arch = "powerpc", + target_arch = "x86_64", + // target_arch = "aarch64", + target_arch = "powerpc64", + // target_arch = "mips64", + // target_arch = "s390x", + target_arch = "sparc64", + )))] { + $($then_tt)* + } else { + $($else_tt)* + } + } + }; + ($($block_inner:tt)*) => { + #[cfg(all(target_os = "linux", target_env = "gnu", any( + target_arch = "x86", + target_arch = "arm", + // target_arch = "mips", + target_arch = "powerpc", + target_arch = "x86_64", + // target_arch = "aarch64", + target_arch = "powerpc64", + // target_arch = "mips64", + // target_arch = "s390x", + target_arch = "sparc64", + )))] + { + $($block_inner)* + } + }; } +cfg_has_statx! {{ + #[derive(Clone)] + pub struct FileAttr { + stat: stat64, + statx_extra_fields: Option, + } + + #[derive(Clone)] + struct StatxExtraFields { + // This is needed to check if btime is supported by the filesystem. + stx_mask: u32, + stx_btime: libc::statx_timestamp, + } + + // We prefer `statx` on Linux if available, which contains file creation time. + // Default `stat64` contains no creation time. + unsafe fn try_statx( + fd: c_int, + path: *const libc::c_char, + flags: i32, + mask: u32, + ) -> Option> { + use crate::sync::atomic::{AtomicU8, Ordering}; + + // Linux kernel prior to 4.11 or glibc prior to glibc 2.28 don't support `statx` + // We store the availability in global to avoid unnecessary syscalls. + // 0: Unknown + // 1: Not available + // 2: Available + static STATX_STATE: AtomicU8 = AtomicU8::new(0); + syscall! { + fn statx( + fd: c_int, + pathname: *const libc::c_char, + flags: c_int, + mask: libc::c_uint, + statxbuf: *mut libc::statx + ) -> c_int + } + + match STATX_STATE.load(Ordering::Relaxed) { + 0 => { + // It is a trick to call `statx` with NULL pointers to check if the syscall + // is available. According to the manual, it is expected to fail with EFAULT. + // We do this mainly for performance, since it is nearly hundreds times + // faster than a normal successfull call. + let err = cvt(statx(0, ptr::null(), 0, libc::STATX_ALL, ptr::null_mut())) + .err() + .and_then(|e| e.raw_os_error()); + // We don't check `err == Some(libc::ENOSYS)` because the syscall may be limited + // and returns `EPERM`. Listing all possible errors seems not a good idea. + // See: https://github.com/rust-lang/rust/issues/65662 + if err != Some(libc::EFAULT) { + STATX_STATE.store(1, Ordering::Relaxed); + return None; + } + STATX_STATE.store(2, Ordering::Relaxed); + } + 1 => return None, + _ => {} + } + + let mut buf: libc::statx = mem::zeroed(); + if let Err(err) = cvt(statx(fd, path, flags, mask, &mut buf)) { + return Some(Err(err)); + } + + // We cannot fill `stat64` exhaustively because of private padding fields. + let mut stat: stat64 = mem::zeroed(); + // `c_ulong` on gnu-mips, `dev_t` otherwise + stat.st_dev = libc::makedev(buf.stx_dev_major, buf.stx_dev_minor) as _; + stat.st_ino = buf.stx_ino as libc::ino64_t; + stat.st_nlink = buf.stx_nlink as libc::nlink_t; + stat.st_mode = buf.stx_mode as libc::mode_t; + stat.st_uid = buf.stx_uid as libc::uid_t; + stat.st_gid = buf.stx_gid as libc::gid_t; + stat.st_rdev = libc::makedev(buf.stx_rdev_major, buf.stx_rdev_minor) as _; + stat.st_size = buf.stx_size as off64_t; + stat.st_blksize = buf.stx_blksize as libc::blksize_t; + stat.st_blocks = buf.stx_blocks as libc::blkcnt64_t; + stat.st_atime = buf.stx_atime.tv_sec as libc::time_t; + // `i64` on gnu-x86_64-x32, `c_ulong` otherwise. + stat.st_atime_nsec = buf.stx_atime.tv_nsec as _; + stat.st_mtime = buf.stx_mtime.tv_sec as libc::time_t; + stat.st_mtime_nsec = buf.stx_mtime.tv_nsec as _; + stat.st_ctime = buf.stx_ctime.tv_sec as libc::time_t; + stat.st_ctime_nsec = buf.stx_ctime.tv_nsec as _; + + let extra = StatxExtraFields { + stx_mask: buf.stx_mask, + stx_btime: buf.stx_btime, + }; + + Some(Ok(FileAttr { stat, statx_extra_fields: Some(extra) })) + } + +} else { + #[derive(Clone)] + pub struct FileAttr { + stat: stat64, + } +}} + // all DirEntry's will have a reference to this struct struct InnerReadDir { dirp: Dir, @@ -97,6 +237,20 @@ pub struct FileType { mode: mode_t } #[derive(Debug)] pub struct DirBuilder { mode: mode_t } +cfg_has_statx! {{ + impl FileAttr { + fn from_stat64(stat: stat64) -> Self { + Self { stat, statx_extra_fields: None } + } + } +} else { + impl FileAttr { + fn from_stat64(stat: stat64) -> Self { + Self { stat } + } + } +}} + impl FileAttr { pub fn size(&self) -> u64 { self.stat.st_size as u64 } pub fn perm(&self) -> FilePermissions { @@ -164,6 +318,22 @@ impl FileAttr { target_os = "macos", target_os = "ios")))] pub fn created(&self) -> io::Result { + cfg_has_statx! { + if let Some(ext) = &self.statx_extra_fields { + return if (ext.stx_mask & libc::STATX_BTIME) != 0 { + Ok(SystemTime::from(libc::timespec { + tv_sec: ext.stx_btime.tv_sec as libc::time_t, + tv_nsec: ext.stx_btime.tv_nsec as _, + })) + } else { + Err(io::Error::new( + io::ErrorKind::Other, + "creation time is not available for the filesystem", + )) + }; + } + } + Err(io::Error::new(io::ErrorKind::Other, "creation time is not available on this platform \ currently")) @@ -306,12 +476,25 @@ impl DirEntry { #[cfg(any(target_os = "linux", target_os = "emscripten", target_os = "android"))] pub fn metadata(&self) -> io::Result { - let fd = cvt(unsafe {dirfd(self.dir.inner.dirp.0)})?; + let fd = cvt(unsafe { dirfd(self.dir.inner.dirp.0) })?; + let name = self.entry.d_name.as_ptr(); + + cfg_has_statx! { + if let Some(ret) = unsafe { try_statx( + fd, + name, + libc::AT_SYMLINK_NOFOLLOW | libc::AT_STATX_SYNC_AS_STAT, + libc::STATX_ALL, + ) } { + return ret; + } + } + let mut stat: stat64 = unsafe { mem::zeroed() }; cvt(unsafe { - fstatat64(fd, self.entry.d_name.as_ptr(), &mut stat, libc::AT_SYMLINK_NOFOLLOW) + fstatat64(fd, name, &mut stat, libc::AT_SYMLINK_NOFOLLOW) })?; - Ok(FileAttr { stat }) + Ok(FileAttr::from_stat64(stat)) } #[cfg(not(any(target_os = "linux", target_os = "emscripten", target_os = "android")))] @@ -319,12 +502,12 @@ impl DirEntry { lstat(&self.path()) } - #[cfg(any(target_os = "solaris", target_os = "haiku", target_os = "hermit"))] + #[cfg(any(target_os = "solaris", target_os = "haiku"))] pub fn file_type(&self) -> io::Result { lstat(&self.path()).map(|m| m.file_type()) } - #[cfg(not(any(target_os = "solaris", target_os = "haiku", target_os = "hermit")))] + #[cfg(not(any(target_os = "solaris", target_os = "haiku")))] pub fn file_type(&self) -> io::Result { match self.entry.d_type { libc::DT_CHR => Ok(FileType { mode: libc::S_IFCHR }), @@ -347,7 +530,6 @@ impl DirEntry { target_os = "haiku", target_os = "l4re", target_os = "fuchsia", - target_os = "hermit", target_os = "redox"))] pub fn ino(&self) -> u64 { self.entry.d_ino as u64 @@ -378,8 +560,7 @@ impl DirEntry { target_os = "linux", target_os = "emscripten", target_os = "l4re", - target_os = "haiku", - target_os = "hermit"))] + target_os = "haiku"))] fn name_bytes(&self) -> &[u8] { unsafe { CStr::from_ptr(self.entry.d_name.as_ptr()).to_bytes() @@ -517,11 +698,24 @@ impl File { } pub fn file_attr(&self) -> io::Result { + let fd = self.0.raw(); + + cfg_has_statx! { + if let Some(ret) = unsafe { try_statx( + fd, + b"\0" as *const _ as *const libc::c_char, + libc::AT_EMPTY_PATH | libc::AT_STATX_SYNC_AS_STAT, + libc::STATX_ALL, + ) } { + return ret; + } + } + let mut stat: stat64 = unsafe { mem::zeroed() }; cvt(unsafe { - fstat64(self.0.raw(), &mut stat) + fstat64(fd, &mut stat) })?; - Ok(FileAttr { stat }) + Ok(FileAttr::from_stat64(stat)) } pub fn fsync(&self) -> io::Result<()> { @@ -602,8 +796,6 @@ impl File { SeekFrom::End(off) => (libc::SEEK_END, off), SeekFrom::Current(off) => (libc::SEEK_CUR, off), }; - #[cfg(target_os = "emscripten")] - let pos = pos as i32; let n = cvt(unsafe { lseek64(self.0.raw(), pos, whence) })?; Ok(n as u64) } @@ -798,20 +990,44 @@ pub fn link(src: &Path, dst: &Path) -> io::Result<()> { pub fn stat(p: &Path) -> io::Result { let p = cstr(p)?; + + cfg_has_statx! { + if let Some(ret) = unsafe { try_statx( + libc::AT_FDCWD, + p.as_ptr(), + libc::AT_STATX_SYNC_AS_STAT, + libc::STATX_ALL, + ) } { + return ret; + } + } + let mut stat: stat64 = unsafe { mem::zeroed() }; cvt(unsafe { stat64(p.as_ptr(), &mut stat) })?; - Ok(FileAttr { stat }) + Ok(FileAttr::from_stat64(stat)) } pub fn lstat(p: &Path) -> io::Result { let p = cstr(p)?; + + cfg_has_statx! { + if let Some(ret) = unsafe { try_statx( + libc::AT_FDCWD, + p.as_ptr(), + libc::AT_SYMLINK_NOFOLLOW | libc::AT_STATX_SYNC_AS_STAT, + libc::STATX_ALL, + ) } { + return ret; + } + } + let mut stat: stat64 = unsafe { mem::zeroed() }; cvt(unsafe { lstat64(p.as_ptr(), &mut stat) })?; - Ok(FileAttr { stat }) + Ok(FileAttr::from_stat64(stat)) } pub fn canonicalize(p: &Path) -> io::Result { diff --git a/src/libstd/sys/unix/mod.rs b/src/libstd/sys/unix/mod.rs index b1f7aac8b4..d0bed0f038 100644 --- a/src/libstd/sys/unix/mod.rs +++ b/src/libstd/sys/unix/mod.rs @@ -16,7 +16,6 @@ use crate::io::ErrorKind; #[cfg(all(not(rustdoc), target_os = "emscripten"))] pub use crate::os::emscripten as platform; #[cfg(all(not(rustdoc), target_os = "fuchsia"))] pub use crate::os::fuchsia as platform; #[cfg(all(not(rustdoc), target_os = "l4re"))] pub use crate::os::linux as platform; -#[cfg(all(not(rustdoc), target_os = "hermit"))] pub use crate::os::hermit as platform; #[cfg(all(not(rustdoc), target_os = "redox"))] pub use crate::os::redox as platform; pub use self::rand::hashmap_random_keys; diff --git a/src/libstd/sys/unix/os.rs b/src/libstd/sys/unix/os.rs index 169bb57ef7..10cdb25999 100644 --- a/src/libstd/sys/unix/os.rs +++ b/src/libstd/sys/unix/os.rs @@ -43,7 +43,6 @@ extern { #[cfg_attr(any(target_os = "netbsd", target_os = "openbsd", target_os = "android", - target_os = "hermit", target_os = "redox", target_env = "newlib"), link_name = "__errno")] @@ -394,7 +393,7 @@ pub fn current_exe() -> io::Result { crate::fs::read_to_string("sys:exe").map(PathBuf::from) } -#[cfg(any(target_os = "fuchsia", target_os = "l4re", target_os = "hermit"))] +#[cfg(any(target_os = "fuchsia", target_os = "l4re"))] pub fn current_exe() -> io::Result { use crate::io::ErrorKind; Err(io::Error::new(ErrorKind::Other, "Not yet implemented!")) diff --git a/src/libstd/sys/unix/process/mod.rs b/src/libstd/sys/unix/process/mod.rs index 056a20345f..553e980f08 100644 --- a/src/libstd/sys/unix/process/mod.rs +++ b/src/libstd/sys/unix/process/mod.rs @@ -1,5 +1,5 @@ -pub use self::process_common::{Command, ExitStatus, ExitCode, Stdio, StdioPipes}; -pub use self::process_inner::Process; +pub use self::process_common::{Command, ExitCode, Stdio, StdioPipes}; +pub use self::process_inner::{ExitStatus, Process}; pub use crate::ffi::OsString as EnvKey; mod process_common; diff --git a/src/libstd/sys/unix/process/process_common.rs b/src/libstd/sys/unix/process/process_common.rs index 713d308555..4edd2ebf8c 100644 --- a/src/libstd/sys/unix/process/process_common.rs +++ b/src/libstd/sys/unix/process/process_common.rs @@ -393,57 +393,6 @@ impl fmt::Debug for Command { } } -/// Unix exit statuses -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -pub struct ExitStatus(c_int); - -impl ExitStatus { - pub fn new(status: c_int) -> ExitStatus { - ExitStatus(status) - } - - fn exited(&self) -> bool { - unsafe { libc::WIFEXITED(self.0) } - } - - pub fn success(&self) -> bool { - self.code() == Some(0) - } - - pub fn code(&self) -> Option { - if self.exited() { - Some(unsafe { libc::WEXITSTATUS(self.0) }) - } else { - None - } - } - - pub fn signal(&self) -> Option { - if !self.exited() { - Some(unsafe { libc::WTERMSIG(self.0) }) - } else { - None - } - } -} - -impl From for ExitStatus { - fn from(a: c_int) -> ExitStatus { - ExitStatus(a) - } -} - -impl fmt::Display for ExitStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(code) = self.code() { - write!(f, "exit code: {}", code) - } else { - let signal = self.signal().unwrap(); - write!(f, "signal: {}", signal) - } - } -} - #[derive(PartialEq, Eq, Clone, Copy, Debug)] pub struct ExitCode(u8); diff --git a/src/libstd/sys/unix/process/process_fuchsia.rs b/src/libstd/sys/unix/process/process_fuchsia.rs index fff9fc6b3b..2b1a3ecfd7 100644 --- a/src/libstd/sys/unix/process/process_fuchsia.rs +++ b/src/libstd/sys/unix/process/process_fuchsia.rs @@ -1,11 +1,13 @@ +use crate::convert::TryInto; use crate::io; +use crate::fmt; use crate::mem; use crate::ptr; use crate::sys::process::zircon::{Handle, zx_handle_t}; use crate::sys::process::process_common::*; -use libc::size_t; +use libc::{c_int, size_t}; //////////////////////////////////////////////////////////////////////////////// // Command @@ -160,7 +162,7 @@ impl Process { return Err(io::Error::new(io::ErrorKind::InvalidData, "Failed to get exit status of process")); } - Ok(ExitStatus::new(proc_info.rec.return_code)) + Ok(ExitStatus(proc_info.return_code)) } pub fn try_wait(&mut self) -> io::Result> { @@ -190,6 +192,36 @@ impl Process { return Err(io::Error::new(io::ErrorKind::InvalidData, "Failed to get exit status of process")); } - Ok(Some(ExitStatus::new(proc_info.rec.return_code))) + Ok(Some(ExitStatus(proc_info.return_code))) + } +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub struct ExitStatus(i64); + +impl ExitStatus { + pub fn success(&self) -> bool { + self.code() == Some(0) + } + + pub fn code(&self) -> Option { + // FIXME: support extracting return code as an i64 + self.0.try_into().ok() + } + + pub fn signal(&self) -> Option { + None + } +} + +impl From for ExitStatus { + fn from(a: c_int) -> ExitStatus { + ExitStatus(a as i64) + } +} + +impl fmt::Display for ExitStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "exit code: {}", self.0) } } diff --git a/src/libstd/sys/unix/process/process_unix.rs b/src/libstd/sys/unix/process/process_unix.rs index e6a742bd45..507dc68926 100644 --- a/src/libstd/sys/unix/process/process_unix.rs +++ b/src/libstd/sys/unix/process/process_unix.rs @@ -1,3 +1,4 @@ +use crate::fmt; use crate::io::{self, Error, ErrorKind}; use crate::ptr; use crate::sys::cvt; @@ -441,3 +442,54 @@ impl Process { } } } + +/// Unix exit statuses +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub struct ExitStatus(c_int); + +impl ExitStatus { + pub fn new(status: c_int) -> ExitStatus { + ExitStatus(status) + } + + fn exited(&self) -> bool { + unsafe { libc::WIFEXITED(self.0) } + } + + pub fn success(&self) -> bool { + self.code() == Some(0) + } + + pub fn code(&self) -> Option { + if self.exited() { + Some(unsafe { libc::WEXITSTATUS(self.0) }) + } else { + None + } + } + + pub fn signal(&self) -> Option { + if !self.exited() { + Some(unsafe { libc::WTERMSIG(self.0) }) + } else { + None + } + } +} + +impl From for ExitStatus { + fn from(a: c_int) -> ExitStatus { + ExitStatus(a) + } +} + +impl fmt::Display for ExitStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(code) = self.code() { + write!(f, "exit code: {}", code) + } else { + let signal = self.signal().unwrap(); + write!(f, "signal: {}", signal) + } + } +} diff --git a/src/libstd/sys/unix/process/zircon.rs b/src/libstd/sys/unix/process/zircon.rs index 1ba48de3c0..188a6b5f2d 100644 --- a/src/libstd/sys/unix/process/zircon.rs +++ b/src/libstd/sys/unix/process/zircon.rs @@ -65,29 +65,14 @@ impl Drop for Handle { } } -// Common ZX_INFO header -#[derive(Default)] -#[repr(C)] -pub struct zx_info_header_t { - pub topic: u32, // identifies the info struct - pub avail_topic_size: u16, // “native” size of the struct - pub topic_size: u16, // size of the returned struct (<=topic_size) - pub avail_count: u32, // number of records the kernel has - pub count: u32, // number of records returned (limited by buffer size) -} - -#[derive(Default)] -#[repr(C)] -pub struct zx_record_process_t { - pub return_code: c_int, -} - // Returned for topic ZX_INFO_PROCESS #[derive(Default)] #[repr(C)] pub struct zx_info_process_t { - pub hdr: zx_info_header_t, - pub rec: zx_record_process_t, + pub return_code: i64, + pub started: bool, + pub exited: bool, + pub debugger_attached: bool, } extern { diff --git a/src/libstd/sys/unix/rand.rs b/src/libstd/sys/unix/rand.rs index c5be176330..be112f6fc0 100644 --- a/src/libstd/sys/unix/rand.rs +++ b/src/libstd/sys/unix/rand.rs @@ -8,7 +8,7 @@ pub fn hashmap_random_keys() -> (u64, u64) { mem::size_of_val(&v)); imp::fill_bytes(view); } - return v + v } #[cfg(all(unix, diff --git a/src/libstd/sys/unix/thread.rs b/src/libstd/sys/unix/thread.rs index 988881e359..72b0ac493d 100644 --- a/src/libstd/sys/unix/thread.rs +++ b/src/libstd/sys/unix/thread.rs @@ -140,7 +140,6 @@ impl Thread { target_os = "haiku", target_os = "l4re", target_os = "emscripten", - target_os = "hermit", target_os = "redox"))] pub fn set_name(_name: &CStr) { // Newlib, Illumos, Haiku, and Emscripten have no way to set a thread name. diff --git a/src/libstd/sys/unix/time.rs b/src/libstd/sys/unix/time.rs index fd6796ad22..a9122defa5 100644 --- a/src/libstd/sys/unix/time.rs +++ b/src/libstd/sys/unix/time.rs @@ -371,9 +371,9 @@ mod inner { } } - #[cfg(not(any(target_os = "dragonfly", target_os = "hermit")))] + #[cfg(not(target_os = "dragonfly"))] pub type clock_t = libc::c_int; - #[cfg(any(target_os = "dragonfly", target_os = "hermit"))] + #[cfg(target_os = "dragonfly")] pub type clock_t = libc::c_ulong; fn now(clock: clock_t) -> Timespec { diff --git a/src/libstd/sys/vxworks/ext/io.rs b/src/libstd/sys/vxworks/ext/io.rs index 6bcc59495e..df6255a3e9 100644 --- a/src/libstd/sys/vxworks/ext/io.rs +++ b/src/libstd/sys/vxworks/ext/io.rs @@ -6,7 +6,8 @@ use crate::fs; use crate::os::raw; use crate::sys; use crate::io; -use crate::sys_common::{AsInner, FromInner, IntoInner}; +use crate::sys_common::{self, AsInner, FromInner, IntoInner}; +use crate::net; /// Raw file descriptors. #[stable(feature = "rust1", since = "1.0.0")] @@ -110,3 +111,61 @@ impl<'a> AsRawFd for io::StdoutLock<'a> { impl<'a> AsRawFd for io::StderrLock<'a> { fn as_raw_fd(&self) -> RawFd { libc::STDERR_FILENO } } + +#[stable(feature = "rust1", since = "1.0.0")] +impl AsRawFd for net::TcpStream { + fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl AsRawFd for net::TcpListener { + fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl AsRawFd for net::UdpSocket { + fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } +} + +#[stable(feature = "from_raw_os", since = "1.1.0")] +impl FromRawFd for net::TcpStream { + unsafe fn from_raw_fd(fd: RawFd) -> net::TcpStream { + let socket = sys::net::Socket::from_inner(fd); + net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(socket)) + } +} + +#[stable(feature = "from_raw_os", since = "1.1.0")] +impl FromRawFd for net::TcpListener { + unsafe fn from_raw_fd(fd: RawFd) -> net::TcpListener { + let socket = sys::net::Socket::from_inner(fd); + net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(socket)) + } +} + +#[stable(feature = "from_raw_os", since = "1.1.0")] +impl FromRawFd for net::UdpSocket { + unsafe fn from_raw_fd(fd: RawFd) -> net::UdpSocket { + let socket = sys::net::Socket::from_inner(fd); + net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(socket)) + } +} + +#[stable(feature = "into_raw_os", since = "1.4.0")] +impl IntoRawFd for net::TcpStream { + fn into_raw_fd(self) -> RawFd { + self.into_inner().into_socket().into_inner() + } +} +#[stable(feature = "into_raw_os", since = "1.4.0")] +impl IntoRawFd for net::TcpListener { + fn into_raw_fd(self) -> RawFd { + self.into_inner().into_socket().into_inner() + } +} +#[stable(feature = "into_raw_os", since = "1.4.0")] +impl IntoRawFd for net::UdpSocket { + fn into_raw_fd(self) -> RawFd { + self.into_inner().into_socket().into_inner() + } +} diff --git a/src/libstd/sys/vxworks/ext/mod.rs b/src/libstd/sys/vxworks/ext/mod.rs index c2ebc38c30..d0f467b303 100644 --- a/src/libstd/sys/vxworks/ext/mod.rs +++ b/src/libstd/sys/vxworks/ext/mod.rs @@ -1,4 +1,3 @@ -// Uhhh #![stable(feature = "rust1", since = "1.0.0")] #![allow(missing_docs)] @@ -7,7 +6,6 @@ pub mod ffi; pub mod fs; pub mod raw; pub mod process; -pub mod net; #[stable(feature = "rust1", since = "1.0.0")] pub mod prelude { diff --git a/src/libstd/sys/vxworks/ext/net.rs b/src/libstd/sys/vxworks/ext/net.rs deleted file mode 100644 index 3f0a7e9e84..0000000000 --- a/src/libstd/sys/vxworks/ext/net.rs +++ /dev/null @@ -1,1825 +0,0 @@ -#![stable(feature = "unix_socket", since = "1.10.0")] - -//! Unix-specific networking functionality - -#[cfg(unix)] -use libc; - -use crate::ascii; -use crate::ffi::OsStr; -use crate::fmt; -use crate::io::{self, Initializer, IoSlice, IoSliceMut}; -use crate::mem; -use crate::net::{self, Shutdown}; -use crate::os::unix::ffi::OsStrExt; -use crate::os::unix::io::{RawFd, AsRawFd, FromRawFd, IntoRawFd}; -use crate::path::Path; -use crate::time::Duration; -use crate::sys::{self, cvt}; -use crate::sys::net::Socket; -use crate::sys_common::{self, AsInner, FromInner, IntoInner}; - -const MSG_NOSIGNAL: libc::c_int = 0x0; - -fn sun_path_offset(addr: &libc::sockaddr_un) -> usize { - // Work with an actual instance of the type since using a null pointer is UB - let base = addr as *const _ as usize; - let path = &addr.sun_path as *const _ as usize; - path - base -} - -unsafe fn sockaddr_un(path: &Path) -> io::Result<(libc::sockaddr_un, libc::socklen_t)> { - let mut addr: libc::sockaddr_un = mem::zeroed(); - addr.sun_family = libc::AF_UNIX as libc::sa_family_t; - - let bytes = path.as_os_str().as_bytes(); - - if bytes.contains(&0) { - return Err(io::Error::new(io::ErrorKind::InvalidInput, - "paths may not contain interior null bytes")); - } - - if bytes.len() >= addr.sun_path.len() { - return Err(io::Error::new(io::ErrorKind::InvalidInput, - "path must be shorter than SUN_LEN")); - } - for (dst, src) in addr.sun_path.iter_mut().zip(bytes.iter()) { - *dst = *src as libc::c_char; - } - // null byte for pathname addresses is already there because we zeroed the - // struct - - let mut len = sun_path_offset(&addr) + bytes.len(); - match bytes.get(0) { - Some(&0) | None => {} - Some(_) => len += 1, - } - Ok((addr, len as libc::socklen_t)) -} - -enum AddressKind<'a> { - Unnamed, - Pathname(&'a Path), - Abstract(&'a [u8]), -} - -/// An address associated with a Unix socket. -/// -/// # Examples -/// -/// ``` -/// use std::os::unix::net::UnixListener; -/// -/// let socket = match UnixListener::bind("/tmp/sock") { -/// Ok(sock) => sock, -/// Err(e) => { -/// println!("Couldn't bind: {:?}", e); -/// return -/// } -/// }; -/// let addr = socket.local_addr().expect("Couldn't get local address"); -/// ``` -#[derive(Clone)] -#[stable(feature = "unix_socket", since = "1.10.0")] -pub struct SocketAddr { - addr: libc::sockaddr_un, - len: libc::socklen_t, -} - -impl SocketAddr { - fn new(f: F) -> io::Result - where F: FnOnce(*mut libc::sockaddr, *mut libc::socklen_t) -> libc::c_int - { - unsafe { - let mut addr: libc::sockaddr_un = mem::zeroed(); - let mut len = mem::size_of::() as libc::socklen_t; - cvt(f(&mut addr as *mut _ as *mut _, &mut len))?; - SocketAddr::from_parts(addr, len) - } - } - - fn from_parts(addr: libc::sockaddr_un, mut len: libc::socklen_t) -> io::Result { - if len == 0 { - // When there is a datagram from unnamed unix socket - // linux returns zero bytes of address - len = sun_path_offset(&addr) as libc::socklen_t; // i.e., zero-length address - } else if addr.sun_family != libc::AF_UNIX as libc::sa_family_t { - return Err(io::Error::new(io::ErrorKind::InvalidInput, - "file descriptor did not correspond to a Unix socket")); - } - - Ok(SocketAddr { - addr, - len, - }) - } - - /// Returns `true` if the address is unnamed. - /// - /// # Examples - /// - /// A named address: - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let socket = UnixListener::bind("/tmp/sock").unwrap(); - /// let addr = socket.local_addr().expect("Couldn't get local address"); - /// assert_eq!(addr.is_unnamed(), false); - /// ``` - /// - /// An unnamed address: - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// - /// let socket = UnixDatagram::unbound().unwrap(); - /// let addr = socket.local_addr().expect("Couldn't get local address"); - /// assert_eq!(addr.is_unnamed(), true); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn is_unnamed(&self) -> bool { - if let AddressKind::Unnamed = self.address() { - true - } else { - false - } - } - - /// Returns the contents of this address if it is a `pathname` address. - /// - /// # Examples - /// - /// With a pathname: - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// use std::path::Path; - /// - /// let socket = UnixListener::bind("/tmp/sock").unwrap(); - /// let addr = socket.local_addr().expect("Couldn't get local address"); - /// assert_eq!(addr.as_pathname(), Some(Path::new("/tmp/sock"))); - /// ``` - /// - /// Without a pathname: - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// - /// let socket = UnixDatagram::unbound().unwrap(); - /// let addr = socket.local_addr().expect("Couldn't get local address"); - /// assert_eq!(addr.as_pathname(), None); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn as_pathname(&self) -> Option<&Path> { - if let AddressKind::Pathname(path) = self.address() { - Some(path) - } else { - None - } - } - - fn address<'a>(&'a self) -> AddressKind<'a> { - let len = self.len as usize - sun_path_offset(&self.addr); - let path = unsafe { mem::transmute::<&[libc::c_char], &[u8]>(&self.addr.sun_path) }; - - if self.addr.sun_path[0] == 0 { - AddressKind::Abstract(&path[1..len]) - } else { - AddressKind::Pathname(OsStr::from_bytes(&path[..len - 1]).as_ref()) - } - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl fmt::Debug for SocketAddr { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.address() { - AddressKind::Unnamed => write!(fmt, "(unnamed)"), - AddressKind::Abstract(name) => write!(fmt, "{} (abstract)", AsciiEscaped(name)), - AddressKind::Pathname(path) => write!(fmt, "{:?} (pathname)", path), - } - } -} - -struct AsciiEscaped<'a>(&'a [u8]); - -impl<'a> fmt::Display for AsciiEscaped<'a> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(fmt, "\"")?; - for byte in self.0.iter().cloned().flat_map(ascii::escape_default) { - write!(fmt, "{}", byte as char)?; - } - write!(fmt, "\"") - } -} - -/// A Unix stream socket. -/// -/// # Examples -/// -/// ```no_run -/// use std::os::unix::net::UnixStream; -/// use std::io::prelude::*; -/// -/// let mut stream = UnixStream::connect("/path/to/my/socket").unwrap(); -/// stream.write_all(b"hello world").unwrap(); -/// let mut response = String::new(); -/// stream.read_to_string(&mut response).unwrap(); -/// println!("{}", response); -/// ``` -#[stable(feature = "unix_socket", since = "1.10.0")] -pub struct UnixStream(Socket); - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl fmt::Debug for UnixStream { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut builder = fmt.debug_struct("UnixStream"); - builder.field("fd", self.0.as_inner()); - if let Ok(addr) = self.local_addr() { - builder.field("local", &addr); - } - if let Ok(addr) = self.peer_addr() { - builder.field("peer", &addr); - } - builder.finish() - } -} - -impl UnixStream { - /// Connects to the socket named by `path`. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let socket = match UnixStream::connect("/tmp/sock") { - /// Ok(sock) => sock, - /// Err(e) => { - /// println!("Couldn't connect: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn connect>(path: P) -> io::Result { - fn inner(path: &Path) -> io::Result { - unsafe { - let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?; - let (addr, len) = sockaddr_un(path)?; - - cvt(libc::connect(*inner.as_inner(), &addr as *const _ as *const _, len))?; - Ok(UnixStream(inner)) - } - } - inner(path.as_ref()) - } - - /// Creates an unnamed pair of connected sockets. - /// - /// Returns two `UnixStream`s which are connected to each other. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let (sock1, sock2) = match UnixStream::pair() { - /// Ok((sock1, sock2)) => (sock1, sock2), - /// Err(e) => { - /// println!("Couldn't create a pair of sockets: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn pair() -> io::Result<(UnixStream, UnixStream)> { - let (i1, i2) = Socket::new_pair(libc::AF_UNIX, libc::SOCK_STREAM)?; - Ok((UnixStream(i1), UnixStream(i2))) - } - - /// Creates a new independently owned handle to the underlying socket. - /// - /// The returned `UnixStream` is a reference to the same stream that this - /// object references. Both handles will read and write the same stream of - /// data, and options set on one stream will be propagated to the other - /// stream. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// let sock_copy = socket.try_clone().expect("Couldn't clone socket"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn try_clone(&self) -> io::Result { - self.0.duplicate().map(UnixStream) - } - - /// Returns the socket address of the local half of this connection. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// let addr = socket.local_addr().expect("Couldn't get local address"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn local_addr(&self) -> io::Result { - SocketAddr::new(|addr, len| unsafe { libc::getsockname(*self.0.as_inner(), addr, len) }) - } - - /// Returns the socket address of the remote half of this connection. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// let addr = socket.peer_addr().expect("Couldn't get peer address"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn peer_addr(&self) -> io::Result { - SocketAddr::new(|addr, len| unsafe { libc::getpeername(*self.0.as_inner(), addr, len) }) - } - - /// Sets the read timeout for the socket. - /// - /// If the provided value is [`None`], then [`read`] calls will block - /// indefinitely. An [`Err`] is returned if the zero [`Duration`] is passed to this - /// method. - /// - /// [`None`]: ../../../../std/option/enum.Option.html#variant.None - /// [`Err`]: ../../../../std/result/enum.Result.html#variant.Err - /// [`read`]: ../../../../std/io/trait.Read.html#tymethod.read - /// [`Duration`]: ../../../../std/time/struct.Duration.html - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// use std::time::Duration; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// socket.set_read_timeout(Some(Duration::new(1, 0))).expect("Couldn't set read timeout"); - /// ``` - /// - /// An [`Err`] is returned if the zero [`Duration`] is passed to this - /// method: - /// - /// ```no_run - /// use std::io; - /// use std::os::unix::net::UnixStream; - /// use std::time::Duration; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// let result = socket.set_read_timeout(Some(Duration::new(0, 0))); - /// let err = result.unwrap_err(); - /// assert_eq!(err.kind(), io::ErrorKind::InvalidInput) - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_read_timeout(&self, timeout: Option) -> io::Result<()> { - self.0.set_timeout(timeout, libc::SO_RCVTIMEO) - } - - /// Sets the write timeout for the socket. - /// - /// If the provided value is [`None`], then [`write`] calls will block - /// indefinitely. An [`Err`] is returned if the zero [`Duration`] is - /// passed to this method. - /// - /// [`None`]: ../../../../std/option/enum.Option.html#variant.None - /// [`Err`]: ../../../../std/result/enum.Result.html#variant.Err - /// [`write`]: ../../../../std/io/trait.Write.html#tymethod.write - /// [`Duration`]: ../../../../std/time/struct.Duration.html - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// use std::time::Duration; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// socket.set_write_timeout(Some(Duration::new(1, 0))).expect("Couldn't set write timeout"); - /// ``` - /// - /// An [`Err`] is returned if the zero [`Duration`] is passed to this - /// method: - /// - /// ```no_run - /// use std::io; - /// use std::net::UdpSocket; - /// use std::time::Duration; - /// - /// let socket = UdpSocket::bind("127.0.0.1:34254").unwrap(); - /// let result = socket.set_write_timeout(Some(Duration::new(0, 0))); - /// let err = result.unwrap_err(); - /// assert_eq!(err.kind(), io::ErrorKind::InvalidInput) - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_write_timeout(&self, timeout: Option) -> io::Result<()> { - self.0.set_timeout(timeout, libc::SO_SNDTIMEO) - } - - /// Returns the read timeout of this socket. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// use std::time::Duration; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// socket.set_read_timeout(Some(Duration::new(1, 0))).expect("Couldn't set read timeout"); - /// assert_eq!(socket.read_timeout().unwrap(), Some(Duration::new(1, 0))); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn read_timeout(&self) -> io::Result> { - self.0.timeout(libc::SO_RCVTIMEO) - } - - /// Returns the write timeout of this socket. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// use std::time::Duration; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// socket.set_write_timeout(Some(Duration::new(1, 0))).expect("Couldn't set write timeout"); - /// assert_eq!(socket.write_timeout().unwrap(), Some(Duration::new(1, 0))); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn write_timeout(&self) -> io::Result> { - self.0.timeout(libc::SO_SNDTIMEO) - } - - /// Moves the socket into or out of nonblocking mode. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// socket.set_nonblocking(true).expect("Couldn't set nonblocking"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - self.0.set_nonblocking(nonblocking) - } - - /// Returns the value of the `SO_ERROR` option. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// if let Ok(Some(err)) = socket.take_error() { - /// println!("Got error: {:?}", err); - /// } - /// ``` - /// - /// # Platform specific - /// On Redox this always returns `None`. - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn take_error(&self) -> io::Result> { - self.0.take_error() - } - - /// Shuts down the read, write, or both halves of this connection. - /// - /// This function will cause all pending and future I/O calls on the - /// specified portions to immediately return with an appropriate value - /// (see the documentation of [`Shutdown`]). - /// - /// [`Shutdown`]: ../../../../std/net/enum.Shutdown.html - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixStream; - /// use std::net::Shutdown; - /// - /// let socket = UnixStream::connect("/tmp/sock").unwrap(); - /// socket.shutdown(Shutdown::Both).expect("shutdown function failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { - self.0.shutdown(how) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl io::Read for UnixStream { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - io::Read::read(&mut &*self, buf) - } - - fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - io::Read::read_vectored(&mut &*self, bufs) - } - - #[inline] - unsafe fn initializer(&self) -> Initializer { - Initializer::nop() - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl<'a> io::Read for &'a UnixStream { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.0.read(buf) - } - - fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - self.0.read_vectored(bufs) - } - - #[inline] - unsafe fn initializer(&self) -> Initializer { - Initializer::nop() - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl io::Write for UnixStream { - fn write(&mut self, buf: &[u8]) -> io::Result { - io::Write::write(&mut &*self, buf) - } - - fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result { - io::Write::write_vectored(&mut &*self, bufs) - } - - fn flush(&mut self) -> io::Result<()> { - io::Write::flush(&mut &*self) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl<'a> io::Write for &'a UnixStream { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - - fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result { - self.0.write_vectored(bufs) - } - - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl AsRawFd for UnixStream { - fn as_raw_fd(&self) -> RawFd { - *self.0.as_inner() - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl FromRawFd for UnixStream { - unsafe fn from_raw_fd(fd: RawFd) -> UnixStream { - UnixStream(Socket::from_inner(fd)) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl IntoRawFd for UnixStream { - fn into_raw_fd(self) -> RawFd { - self.0.into_inner() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for net::TcpStream { - fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for net::TcpListener { - fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for net::UdpSocket { - fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } -} - -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for net::TcpStream { - unsafe fn from_raw_fd(fd: RawFd) -> net::TcpStream { - let socket = sys::net::Socket::from_inner(fd); - net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(socket)) - } -} - -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for net::TcpListener { - unsafe fn from_raw_fd(fd: RawFd) -> net::TcpListener { - let socket = sys::net::Socket::from_inner(fd); - net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(socket)) - } -} - -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for net::UdpSocket { - unsafe fn from_raw_fd(fd: RawFd) -> net::UdpSocket { - let socket = sys::net::Socket::from_inner(fd); - net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(socket)) - } -} - -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for net::TcpStream { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_socket().into_inner() - } -} -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for net::TcpListener { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_socket().into_inner() - } -} -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for net::UdpSocket { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_socket().into_inner() - } -} - -/// A structure representing a Unix domain socket server. -/// -/// # Examples -/// -/// ```no_run -/// use std::thread; -/// use std::os::unix::net::{UnixStream, UnixListener}; -/// -/// fn handle_client(stream: UnixStream) { -/// // ... -/// } -/// -/// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); -/// -/// // accept connections and process them, spawning a new thread for each one -/// for stream in listener.incoming() { -/// match stream { -/// Ok(stream) => { -/// /* connection succeeded */ -/// thread::spawn(|| handle_client(stream)); -/// } -/// Err(err) => { -/// /* connection failed */ -/// break; -/// } -/// } -/// } -/// ``` -#[stable(feature = "unix_socket", since = "1.10.0")] -pub struct UnixListener(Socket); - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl fmt::Debug for UnixListener { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut builder = fmt.debug_struct("UnixListener"); - builder.field("fd", self.0.as_inner()); - if let Ok(addr) = self.local_addr() { - builder.field("local", &addr); - } - builder.finish() - } -} - -impl UnixListener { - /// Creates a new `UnixListener` bound to the specified socket. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let listener = match UnixListener::bind("/path/to/the/socket") { - /// Ok(sock) => sock, - /// Err(e) => { - /// println!("Couldn't connect: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn bind>(path: P) -> io::Result { - fn inner(path: &Path) -> io::Result { - unsafe { - let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?; - let (addr, len) = sockaddr_un(path)?; - - cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len as _))?; - cvt(libc::listen(*inner.as_inner(), 128))?; - - Ok(UnixListener(inner)) - } - } - inner(path.as_ref()) - } - - /// Accepts a new incoming connection to this listener. - /// - /// This function will block the calling thread until a new Unix connection - /// is established. When established, the corresponding [`UnixStream`] and - /// the remote peer's address will be returned. - /// - /// [`UnixStream`]: ../../../../std/os/unix/net/struct.UnixStream.html - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); - /// - /// match listener.accept() { - /// Ok((socket, addr)) => println!("Got a client: {:?}", addr), - /// Err(e) => println!("accept function failed: {:?}", e), - /// } - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn accept(&self) -> io::Result<(UnixStream, SocketAddr)> { - let mut storage: libc::sockaddr_un = unsafe { mem::zeroed() }; - let mut len = mem::size_of_val(&storage) as libc::socklen_t; - let sock = self.0.accept(&mut storage as *mut _ as *mut _, &mut len)?; - let addr = SocketAddr::from_parts(storage, len)?; - Ok((UnixStream(sock), addr)) - } - - /// Creates a new independently owned handle to the underlying socket. - /// - /// The returned `UnixListener` is a reference to the same socket that this - /// object references. Both handles can be used to accept incoming - /// connections and options set on one listener will affect the other. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); - /// - /// let listener_copy = listener.try_clone().expect("try_clone failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn try_clone(&self) -> io::Result { - self.0.duplicate().map(UnixListener) - } - - /// Returns the local socket address of this listener. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); - /// - /// let addr = listener.local_addr().expect("Couldn't get local address"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn local_addr(&self) -> io::Result { - SocketAddr::new(|addr, len| unsafe { libc::getsockname(*self.0.as_inner(), addr, len) }) - } - - /// Moves the socket into or out of nonblocking mode. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); - /// - /// listener.set_nonblocking(true).expect("Couldn't set non blocking"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - self.0.set_nonblocking(nonblocking) - } - - /// Returns the value of the `SO_ERROR` option. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixListener; - /// - /// let listener = UnixListener::bind("/tmp/sock").unwrap(); - /// - /// if let Ok(Some(err)) = listener.take_error() { - /// println!("Got error: {:?}", err); - /// } - /// ``` - /// - /// # Platform specific - /// On Redox this always returns `None`. - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn take_error(&self) -> io::Result> { - self.0.take_error() - } - - /// Returns an iterator over incoming connections. - /// - /// The iterator will never return [`None`] and will also not yield the - /// peer's [`SocketAddr`] structure. - /// - /// [`None`]: ../../../../std/option/enum.Option.html#variant.None - /// [`SocketAddr`]: struct.SocketAddr.html - /// - /// # Examples - /// - /// ```no_run - /// use std::thread; - /// use std::os::unix::net::{UnixStream, UnixListener}; - /// - /// fn handle_client(stream: UnixStream) { - /// // ... - /// } - /// - /// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); - /// - /// for stream in listener.incoming() { - /// match stream { - /// Ok(stream) => { - /// thread::spawn(|| handle_client(stream)); - /// } - /// Err(err) => { - /// break; - /// } - /// } - /// } - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn incoming<'a>(&'a self) -> Incoming<'a> { - Incoming { listener: self } - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl AsRawFd for UnixListener { - fn as_raw_fd(&self) -> RawFd { - *self.0.as_inner() - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl FromRawFd for UnixListener { - unsafe fn from_raw_fd(fd: RawFd) -> UnixListener { - UnixListener(Socket::from_inner(fd)) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl IntoRawFd for UnixListener { - fn into_raw_fd(self) -> RawFd { - self.0.into_inner() - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl<'a> IntoIterator for &'a UnixListener { - type Item = io::Result; - type IntoIter = Incoming<'a>; - - fn into_iter(self) -> Incoming<'a> { - self.incoming() - } -} - -/// An iterator over incoming connections to a [`UnixListener`]. -/// -/// It will never return [`None`]. -/// -/// [`None`]: ../../../../std/option/enum.Option.html#variant.None -/// [`UnixListener`]: struct.UnixListener.html -/// -/// # Examples -/// -/// ```no_run -/// use std::thread; -/// use std::os::unix::net::{UnixStream, UnixListener}; -/// -/// fn handle_client(stream: UnixStream) { -/// // ... -/// } -/// -/// let listener = UnixListener::bind("/path/to/the/socket").unwrap(); -/// -/// for stream in listener.incoming() { -/// match stream { -/// Ok(stream) => { -/// thread::spawn(|| handle_client(stream)); -/// } -/// Err(err) => { -/// break; -/// } -/// } -/// } -/// ``` -#[derive(Debug)] -#[stable(feature = "unix_socket", since = "1.10.0")] -pub struct Incoming<'a> { - listener: &'a UnixListener, -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl<'a> Iterator for Incoming<'a> { - type Item = io::Result; - - fn next(&mut self) -> Option> { - Some(self.listener.accept().map(|s| s.0)) - } - - fn size_hint(&self) -> (usize, Option) { - (usize::max_value(), None) - } -} - -/// A Unix datagram socket. -/// -/// # Examples -/// -/// ```no_run -/// use std::os::unix::net::UnixDatagram; -/// -/// let socket = UnixDatagram::bind("/path/to/my/socket").unwrap(); -/// socket.send_to(b"hello world", "/path/to/other/socket").unwrap(); -/// let mut buf = [0; 100]; -/// let (count, address) = socket.recv_from(&mut buf).unwrap(); -/// println!("socket {:?} sent {:?}", address, &buf[..count]); -/// ``` -#[stable(feature = "unix_socket", since = "1.10.0")] -pub struct UnixDatagram(Socket); - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl fmt::Debug for UnixDatagram { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut builder = fmt.debug_struct("UnixDatagram"); - builder.field("fd", self.0.as_inner()); - if let Ok(addr) = self.local_addr() { - builder.field("local", &addr); - } - if let Ok(addr) = self.peer_addr() { - builder.field("peer", &addr); - } - builder.finish() - } -} - -impl UnixDatagram { - /// Creates a Unix datagram socket bound to the given path. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = match UnixDatagram::bind("/path/to/the/socket") { - /// Ok(sock) => sock, - /// Err(e) => { - /// println!("Couldn't bind: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn bind>(path: P) -> io::Result { - fn inner(path: &Path) -> io::Result { - unsafe { - let socket = UnixDatagram::unbound()?; - let (addr, len) = sockaddr_un(path)?; - - cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len as _))?; - - Ok(socket) - } - } - inner(path.as_ref()) - } - - /// Creates a Unix Datagram socket which is not bound to any address. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = match UnixDatagram::unbound() { - /// Ok(sock) => sock, - /// Err(e) => { - /// println!("Couldn't unbound: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn unbound() -> io::Result { - let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_DGRAM)?; - Ok(UnixDatagram(inner)) - } - - /// Creates an unnamed pair of connected sockets. - /// - /// Returns two `UnixDatagrams`s which are connected to each other. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let (sock1, sock2) = match UnixDatagram::pair() { - /// Ok((sock1, sock2)) => (sock1, sock2), - /// Err(e) => { - /// println!("Couldn't unbound: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn pair() -> io::Result<(UnixDatagram, UnixDatagram)> { - let (i1, i2) = Socket::new_pair(libc::AF_UNIX, libc::SOCK_DGRAM)?; - Ok((UnixDatagram(i1), UnixDatagram(i2))) - } - - /// Connects the socket to the specified address. - /// - /// The [`send`] method may be used to send data to the specified address. - /// [`recv`] and [`recv_from`] will only receive data from that address. - /// - /// [`send`]: #method.send - /// [`recv`]: #method.recv - /// [`recv_from`]: #method.recv_from - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// match sock.connect("/path/to/the/socket") { - /// Ok(sock) => sock, - /// Err(e) => { - /// println!("Couldn't connect: {:?}", e); - /// return - /// } - /// }; - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn connect>(&self, path: P) -> io::Result<()> { - fn inner(d: &UnixDatagram, path: &Path) -> io::Result<()> { - unsafe { - let (addr, len) = sockaddr_un(path)?; - - cvt(libc::connect(*d.0.as_inner(), &addr as *const _ as *const _, len))?; - - Ok(()) - } - } - inner(self, path.as_ref()) - } - - /// Creates a new independently owned handle to the underlying socket. - /// - /// The returned `UnixDatagram` is a reference to the same socket that this - /// object references. Both handles can be used to accept incoming - /// connections and options set on one side will affect the other. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::bind("/path/to/the/socket").unwrap(); - /// - /// let sock_copy = sock.try_clone().expect("try_clone failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn try_clone(&self) -> io::Result { - self.0.duplicate().map(UnixDatagram) - } - - /// Returns the address of this socket. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::bind("/path/to/the/socket").unwrap(); - /// - /// let addr = sock.local_addr().expect("Couldn't get local address"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn local_addr(&self) -> io::Result { - SocketAddr::new(|addr, len| unsafe { libc::getsockname(*self.0.as_inner(), addr, len) }) - } - - /// Returns the address of this socket's peer. - /// - /// The [`connect`] method will connect the socket to a peer. - /// - /// [`connect`]: #method.connect - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.connect("/path/to/the/socket").unwrap(); - /// - /// let addr = sock.peer_addr().expect("Couldn't get peer address"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn peer_addr(&self) -> io::Result { - SocketAddr::new(|addr, len| unsafe { libc::getpeername(*self.0.as_inner(), addr, len) }) - } - - /// Receives data from the socket. - /// - /// On success, returns the number of bytes read and the address from - /// whence the data came. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// let mut buf = vec![0; 10]; - /// match sock.recv_from(buf.as_mut_slice()) { - /// Ok((size, sender)) => println!("received {} bytes from {:?}", size, sender), - /// Err(e) => println!("recv_from function failed: {:?}", e), - /// } - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { - let mut count = 0; - let addr = SocketAddr::new(|addr, len| { - unsafe { - count = libc::recvfrom(*self.0.as_inner(), - buf.as_mut_ptr() as *mut _, - buf.len(), - 0, - addr, - len); - if count > 0 { - 1 - } else if count == 0 { - 0 - } else { - -1 - } - } - })?; - - Ok((count as usize, addr)) - } - - /// Receives data from the socket. - /// - /// On success, returns the number of bytes read. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::bind("/path/to/the/socket").unwrap(); - /// let mut buf = vec![0; 10]; - /// sock.recv(buf.as_mut_slice()).expect("recv function failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn recv(&self, buf: &mut [u8]) -> io::Result { - self.0.read(buf) - } - - /// Sends data on the socket to the specified address. - /// - /// On success, returns the number of bytes written. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.send_to(b"omelette au fromage", "/some/sock").expect("send_to function failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn send_to>(&self, buf: &[u8], path: P) -> io::Result { - fn inner(d: &UnixDatagram, buf: &[u8], path: &Path) -> io::Result { - unsafe { - let (addr, len) = sockaddr_un(path)?; - - let count = cvt(libc::sendto(*d.0.as_inner(), - buf.as_ptr() as *const _, - buf.len(), - MSG_NOSIGNAL, - &addr as *const _ as *const _, - len))?; - Ok(count as usize) - } - } - inner(self, buf, path.as_ref()) - } - - /// Sends data on the socket to the socket's peer. - /// - /// The peer address may be set by the `connect` method, and this method - /// will return an error if the socket has not already been connected. - /// - /// On success, returns the number of bytes written. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.connect("/some/sock").expect("Couldn't connect"); - /// sock.send(b"omelette au fromage").expect("send_to function failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn send(&self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - - /// Sets the read timeout for the socket. - /// - /// If the provided value is [`None`], then [`recv`] and [`recv_from`] calls will - /// block indefinitely. An [`Err`] is returned if the zero [`Duration`] - /// is passed to this method. - /// - /// [`None`]: ../../../../std/option/enum.Option.html#variant.None - /// [`Err`]: ../../../../std/result/enum.Result.html#variant.Err - /// [`recv`]: #method.recv - /// [`recv_from`]: #method.recv_from - /// [`Duration`]: ../../../../std/time/struct.Duration.html - /// - /// # Examples - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// use std::time::Duration; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.set_read_timeout(Some(Duration::new(1, 0))).expect("set_read_timeout function failed"); - /// ``` - /// - /// An [`Err`] is returned if the zero [`Duration`] is passed to this - /// method: - /// - /// ```no_run - /// use std::io; - /// use std::os::unix::net::UnixDatagram; - /// use std::time::Duration; - /// - /// let socket = UnixDatagram::unbound().unwrap(); - /// let result = socket.set_read_timeout(Some(Duration::new(0, 0))); - /// let err = result.unwrap_err(); - /// assert_eq!(err.kind(), io::ErrorKind::InvalidInput) - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_read_timeout(&self, timeout: Option) -> io::Result<()> { - self.0.set_timeout(timeout, libc::SO_RCVTIMEO) - } - - /// Sets the write timeout for the socket. - /// - /// If the provided value is [`None`], then [`send`] and [`send_to`] calls will - /// block indefinitely. An [`Err`] is returned if the zero [`Duration`] is passed to this - /// method. - /// - /// [`None`]: ../../../../std/option/enum.Option.html#variant.None - /// [`send`]: #method.send - /// [`send_to`]: #method.send_to - /// [`Duration`]: ../../../../std/time/struct.Duration.html - /// - /// # Examples - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// use std::time::Duration; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.set_write_timeout(Some(Duration::new(1, 0))) - /// .expect("set_write_timeout function failed"); - /// ``` - /// - /// An [`Err`] is returned if the zero [`Duration`] is passed to this - /// method: - /// - /// ```no_run - /// use std::io; - /// use std::os::unix::net::UnixDatagram; - /// use std::time::Duration; - /// - /// let socket = UnixDatagram::unbound().unwrap(); - /// let result = socket.set_write_timeout(Some(Duration::new(0, 0))); - /// let err = result.unwrap_err(); - /// assert_eq!(err.kind(), io::ErrorKind::InvalidInput) - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_write_timeout(&self, timeout: Option) -> io::Result<()> { - self.0.set_timeout(timeout, libc::SO_SNDTIMEO) - } - - /// Returns the read timeout of this socket. - /// - /// # Examples - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// use std::time::Duration; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.set_read_timeout(Some(Duration::new(1, 0))).expect("set_read_timeout function failed"); - /// assert_eq!(sock.read_timeout().unwrap(), Some(Duration::new(1, 0))); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn read_timeout(&self) -> io::Result> { - self.0.timeout(libc::SO_RCVTIMEO) - } - - /// Returns the write timeout of this socket. - /// - /// # Examples - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// use std::time::Duration; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.set_write_timeout(Some(Duration::new(1, 0))) - /// .expect("set_write_timeout function failed"); - /// assert_eq!(sock.write_timeout().unwrap(), Some(Duration::new(1, 0))); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn write_timeout(&self) -> io::Result> { - self.0.timeout(libc::SO_SNDTIMEO) - } - - /// Moves the socket into or out of nonblocking mode. - /// - /// # Examples - /// - /// ``` - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.set_nonblocking(true).expect("set_nonblocking function failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - self.0.set_nonblocking(nonblocking) - } - - /// Returns the value of the `SO_ERROR` option. - /// - /// # Examples - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// if let Ok(Some(err)) = sock.take_error() { - /// println!("Got error: {:?}", err); - /// } - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn take_error(&self) -> io::Result> { - self.0.take_error() - } - - /// Shut down the read, write, or both halves of this connection. - /// - /// This function will cause all pending and future I/O calls on the - /// specified portions to immediately return with an appropriate value - /// (see the documentation of [`Shutdown`]). - /// - /// [`Shutdown`]: ../../../../std/net/enum.Shutdown.html - /// - /// ```no_run - /// use std::os::unix::net::UnixDatagram; - /// use std::net::Shutdown; - /// - /// let sock = UnixDatagram::unbound().unwrap(); - /// sock.shutdown(Shutdown::Both).expect("shutdown function failed"); - /// ``` - #[stable(feature = "unix_socket", since = "1.10.0")] - pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { - self.0.shutdown(how) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl AsRawFd for UnixDatagram { - fn as_raw_fd(&self) -> RawFd { - *self.0.as_inner() - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl FromRawFd for UnixDatagram { - unsafe fn from_raw_fd(fd: RawFd) -> UnixDatagram { - UnixDatagram(Socket::from_inner(fd)) - } -} - -#[stable(feature = "unix_socket", since = "1.10.0")] -impl IntoRawFd for UnixDatagram { - fn into_raw_fd(self) -> RawFd { - self.0.into_inner() - } -} - -#[cfg(all(test, not(target_os = "emscripten")))] -mod test { - use crate::thread; - use crate::io::{self, ErrorKind}; - use crate::io::prelude::*; - use crate::time::Duration; - use crate::sys_common::io::test::tmpdir; - - use super::*; - - macro_rules! or_panic { - ($e:expr) => { - match $e { - Ok(e) => e, - Err(e) => panic!("{}", e), - } - } - } - - #[test] - fn basic() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - let msg1 = b"hello"; - let msg2 = b"world!"; - - let listener = or_panic!(UnixListener::bind(&socket_path)); - let thread = thread::spawn(move || { - let mut stream = or_panic!(listener.accept()).0; - let mut buf = [0; 5]; - or_panic!(stream.read(&mut buf)); - assert_eq!(&msg1[..], &buf[..]); - or_panic!(stream.write_all(msg2)); - }); - - let mut stream = or_panic!(UnixStream::connect(&socket_path)); - assert_eq!(Some(&*socket_path), - stream.peer_addr().unwrap().as_pathname()); - or_panic!(stream.write_all(msg1)); - let mut buf = vec![]; - or_panic!(stream.read_to_end(&mut buf)); - assert_eq!(&msg2[..], &buf[..]); - drop(stream); - - thread.join().unwrap(); - } - - #[test] - fn vectored() { - let (mut s1, mut s2) = or_panic!(UnixStream::pair()); - - let len = or_panic!(s1.write_vectored( - &[IoSlice::new(b"hello"), IoSlice::new(b" "), IoSlice::new(b"world!")], - )); - assert_eq!(len, 12); - - let mut buf1 = [0; 6]; - let mut buf2 = [0; 7]; - let len = or_panic!(s2.read_vectored( - &mut [IoSliceMut::new(&mut buf1), IoSliceMut::new(&mut buf2)], - )); - assert_eq!(len, 12); - assert_eq!(&buf1, b"hello "); - assert_eq!(&buf2, b"world!\0"); - } - - #[test] - fn pair() { - let msg1 = b"hello"; - let msg2 = b"world!"; - - let (mut s1, mut s2) = or_panic!(UnixStream::pair()); - let thread = thread::spawn(move || { - // s1 must be moved in or the test will hang! - let mut buf = [0; 5]; - or_panic!(s1.read(&mut buf)); - assert_eq!(&msg1[..], &buf[..]); - or_panic!(s1.write_all(msg2)); - }); - - or_panic!(s2.write_all(msg1)); - let mut buf = vec![]; - or_panic!(s2.read_to_end(&mut buf)); - assert_eq!(&msg2[..], &buf[..]); - drop(s2); - - thread.join().unwrap(); - } - - #[test] - fn try_clone() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - let msg1 = b"hello"; - let msg2 = b"world"; - - let listener = or_panic!(UnixListener::bind(&socket_path)); - let thread = thread::spawn(move || { - let mut stream = or_panic!(listener.accept()).0; - or_panic!(stream.write_all(msg1)); - or_panic!(stream.write_all(msg2)); - }); - - let mut stream = or_panic!(UnixStream::connect(&socket_path)); - let mut stream2 = or_panic!(stream.try_clone()); - - let mut buf = [0; 5]; - or_panic!(stream.read(&mut buf)); - assert_eq!(&msg1[..], &buf[..]); - or_panic!(stream2.read(&mut buf)); - assert_eq!(&msg2[..], &buf[..]); - - thread.join().unwrap(); - } - - #[test] - fn iter() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - - let listener = or_panic!(UnixListener::bind(&socket_path)); - let thread = thread::spawn(move || { - for stream in listener.incoming().take(2) { - let mut stream = or_panic!(stream); - let mut buf = [0]; - or_panic!(stream.read(&mut buf)); - } - }); - - for _ in 0..2 { - let mut stream = or_panic!(UnixStream::connect(&socket_path)); - or_panic!(stream.write_all(&[0])); - } - - thread.join().unwrap(); - } - - #[test] - fn long_path() { - let dir = tmpdir(); - let socket_path = dir.path() - .join("asdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfa\ - sasdfasdfasdasdfasdfasdfadfasdfasdfasdfasdfasdf"); - match UnixStream::connect(&socket_path) { - Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {} - Err(e) => panic!("unexpected error {}", e), - Ok(_) => panic!("unexpected success"), - } - - match UnixListener::bind(&socket_path) { - Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {} - Err(e) => panic!("unexpected error {}", e), - Ok(_) => panic!("unexpected success"), - } - - match UnixDatagram::bind(&socket_path) { - Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {} - Err(e) => panic!("unexpected error {}", e), - Ok(_) => panic!("unexpected success"), - } - } - - #[test] - fn timeouts() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - - let _listener = or_panic!(UnixListener::bind(&socket_path)); - - let stream = or_panic!(UnixStream::connect(&socket_path)); - let dur = Duration::new(15410, 0); - - assert_eq!(None, or_panic!(stream.read_timeout())); - - or_panic!(stream.set_read_timeout(Some(dur))); - assert_eq!(Some(dur), or_panic!(stream.read_timeout())); - - assert_eq!(None, or_panic!(stream.write_timeout())); - - or_panic!(stream.set_write_timeout(Some(dur))); - assert_eq!(Some(dur), or_panic!(stream.write_timeout())); - - or_panic!(stream.set_read_timeout(None)); - assert_eq!(None, or_panic!(stream.read_timeout())); - - or_panic!(stream.set_write_timeout(None)); - assert_eq!(None, or_panic!(stream.write_timeout())); - } - - #[test] - fn test_read_timeout() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - - let _listener = or_panic!(UnixListener::bind(&socket_path)); - - let mut stream = or_panic!(UnixStream::connect(&socket_path)); - or_panic!(stream.set_read_timeout(Some(Duration::from_millis(1000)))); - - let mut buf = [0; 10]; - let kind = stream.read_exact(&mut buf).err().expect("expected error").kind(); - assert!(kind == ErrorKind::WouldBlock || kind == ErrorKind::TimedOut, - "unexpected_error: {:?}", kind); - } - - #[test] - fn test_read_with_timeout() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - - let listener = or_panic!(UnixListener::bind(&socket_path)); - - let mut stream = or_panic!(UnixStream::connect(&socket_path)); - or_panic!(stream.set_read_timeout(Some(Duration::from_millis(1000)))); - - let mut other_end = or_panic!(listener.accept()).0; - or_panic!(other_end.write_all(b"hello world")); - - let mut buf = [0; 11]; - or_panic!(stream.read(&mut buf)); - assert_eq!(b"hello world", &buf[..]); - - let kind = stream.read_exact(&mut buf).err().expect("expected error").kind(); - assert!(kind == ErrorKind::WouldBlock || kind == ErrorKind::TimedOut, - "unexpected_error: {:?}", kind); - } - - // Ensure the `set_read_timeout` and `set_write_timeout` calls return errors - // when passed zero Durations - #[test] - fn test_unix_stream_timeout_zero_duration() { - let dir = tmpdir(); - let socket_path = dir.path().join("sock"); - - let listener = or_panic!(UnixListener::bind(&socket_path)); - let stream = or_panic!(UnixStream::connect(&socket_path)); - - let result = stream.set_write_timeout(Some(Duration::new(0, 0))); - let err = result.unwrap_err(); - assert_eq!(err.kind(), ErrorKind::InvalidInput); - - let result = stream.set_read_timeout(Some(Duration::new(0, 0))); - let err = result.unwrap_err(); - assert_eq!(err.kind(), ErrorKind::InvalidInput); - - drop(listener); - } - - #[test] - fn test_unix_datagram() { - let dir = tmpdir(); - let path1 = dir.path().join("sock1"); - let path2 = dir.path().join("sock2"); - - let sock1 = or_panic!(UnixDatagram::bind(&path1)); - let sock2 = or_panic!(UnixDatagram::bind(&path2)); - - let msg = b"hello world"; - or_panic!(sock1.send_to(msg, &path2)); - let mut buf = [0; 11]; - or_panic!(sock2.recv_from(&mut buf)); - assert_eq!(msg, &buf[..]); - } - - #[test] - fn test_unnamed_unix_datagram() { - let dir = tmpdir(); - let path1 = dir.path().join("sock1"); - - let sock1 = or_panic!(UnixDatagram::bind(&path1)); - let sock2 = or_panic!(UnixDatagram::unbound()); - - let msg = b"hello world"; - or_panic!(sock2.send_to(msg, &path1)); - let mut buf = [0; 11]; - let (usize, addr) = or_panic!(sock1.recv_from(&mut buf)); - assert_eq!(usize, 11); - assert!(addr.is_unnamed()); - assert_eq!(msg, &buf[..]); - } - - #[test] - fn test_connect_unix_datagram() { - let dir = tmpdir(); - let path1 = dir.path().join("sock1"); - let path2 = dir.path().join("sock2"); - - let bsock1 = or_panic!(UnixDatagram::bind(&path1)); - let bsock2 = or_panic!(UnixDatagram::bind(&path2)); - let sock = or_panic!(UnixDatagram::unbound()); - or_panic!(sock.connect(&path1)); - - // Check send() - let msg = b"hello there"; - or_panic!(sock.send(msg)); - let mut buf = [0; 11]; - let (usize, addr) = or_panic!(bsock1.recv_from(&mut buf)); - assert_eq!(usize, 11); - assert!(addr.is_unnamed()); - assert_eq!(msg, &buf[..]); - - // Changing default socket works too - or_panic!(sock.connect(&path2)); - or_panic!(sock.send(msg)); - or_panic!(bsock2.recv_from(&mut buf)); - } - - #[test] - fn test_unix_datagram_recv() { - let dir = tmpdir(); - let path1 = dir.path().join("sock1"); - - let sock1 = or_panic!(UnixDatagram::bind(&path1)); - let sock2 = or_panic!(UnixDatagram::unbound()); - or_panic!(sock2.connect(&path1)); - - let msg = b"hello world"; - or_panic!(sock2.send(msg)); - let mut buf = [0; 11]; - let size = or_panic!(sock1.recv(&mut buf)); - assert_eq!(size, 11); - assert_eq!(msg, &buf[..]); - } - - #[test] - fn datagram_pair() { - let msg1 = b"hello"; - let msg2 = b"world!"; - - let (s1, s2) = or_panic!(UnixDatagram::pair()); - let thread = thread::spawn(move || { - // s1 must be moved in or the test will hang! - let mut buf = [0; 5]; - or_panic!(s1.recv(&mut buf)); - assert_eq!(&msg1[..], &buf[..]); - or_panic!(s1.send(msg2)); - }); - - or_panic!(s2.send(msg1)); - let mut buf = [0; 6]; - or_panic!(s2.recv(&mut buf)); - assert_eq!(&msg2[..], &buf[..]); - drop(s2); - - thread.join().unwrap(); - } - - // Ensure the `set_read_timeout` and `set_write_timeout` calls return errors - // when passed zero Durations - #[test] - fn test_unix_datagram_timeout_zero_duration() { - let dir = tmpdir(); - let path = dir.path().join("sock"); - - let datagram = or_panic!(UnixDatagram::bind(&path)); - - let result = datagram.set_write_timeout(Some(Duration::new(0, 0))); - let err = result.unwrap_err(); - assert_eq!(err.kind(), ErrorKind::InvalidInput); - - let result = datagram.set_read_timeout(Some(Duration::new(0, 0))); - let err = result.unwrap_err(); - assert_eq!(err.kind(), ErrorKind::InvalidInput); - } - - #[test] - fn abstract_namespace_not_allowed() { - assert!(UnixStream::connect("\0asdf").is_err()); - } -} diff --git a/src/libstd/sys/vxworks/fs.rs b/src/libstd/sys/vxworks/fs.rs index 51fdb1c0e5..adb08d8005 100644 --- a/src/libstd/sys/vxworks/fs.rs +++ b/src/libstd/sys/vxworks/fs.rs @@ -400,13 +400,27 @@ impl FromInner for File { impl fmt::Debug for File { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fn get_path(_fd: c_int) -> Option { - // FIXME(#:(): implement this for VxWorks - None + fn get_path(fd: c_int) -> Option { + let mut buf = vec![0;libc::PATH_MAX as usize]; + let n = unsafe { libc::ioctl(fd, libc::FIOGETNAME, buf.as_ptr()) }; + if n == -1 { + return None; + } + let l = buf.iter().position(|&c| c == 0).unwrap(); + buf.truncate(l as usize); + Some(PathBuf::from(OsString::from_vec(buf))) } - fn get_mode(_fd: c_int) -> Option<(bool, bool)> { - // FIXME(#:(): implement this for VxWorks - None + fn get_mode(fd: c_int) -> Option<(bool, bool)> { + let mode = unsafe { libc::fcntl(fd, libc::F_GETFL) }; + if mode == -1 { + return None; + } + match mode & libc::O_ACCMODE { + libc::O_RDONLY => Some((true, false)), + libc::O_RDWR => Some((true, true)), + libc::O_WRONLY => Some((false, true)), + _ => None + } } let fd = self.0.raw(); diff --git a/src/libstd/sys/vxworks/rwlock.rs b/src/libstd/sys/vxworks/rwlock.rs index 718f422ed1..19b123f2b6 100644 --- a/src/libstd/sys/vxworks/rwlock.rs +++ b/src/libstd/sys/vxworks/rwlock.rs @@ -25,7 +25,7 @@ impl RWLock { let r = libc::pthread_rwlock_rdlock(self.inner.get()); if r == libc::EAGAIN { panic!("rwlock maximum reader count exceeded"); - } else if r == libc::EDEADLK || *self.write_locked.get() { + } else if r == libc::EDEADLK || (r == 0 && *self.write_locked.get()) { if r == 0 { self.raw_unlock(); } diff --git a/src/libstd/sys/wasi/thread.rs b/src/libstd/sys/wasi/thread.rs index 28a504f197..6ce4142028 100644 --- a/src/libstd/sys/wasi/thread.rs +++ b/src/libstd/sys/wasi/thread.rs @@ -31,10 +31,10 @@ impl Thread { let nanos = dur.as_nanos(); assert!(nanos <= u64::max_value() as u128); - const CLOCK_ID: wasi::Userdata = 0x0123_45678; + const USERDATA: wasi::Userdata = 0x0123_45678; let clock = wasi::raw::__wasi_subscription_u_clock_t { - identifier: CLOCK_ID, + identifier: 0, clock_id: wasi::CLOCK_MONOTONIC, timeout: nanos as u64, precision: 0, @@ -42,7 +42,7 @@ impl Thread { }; let in_ = [wasi::Subscription { - userdata: 0, + userdata: USERDATA, type_: wasi::EVENTTYPE_CLOCK, u: wasi::raw::__wasi_subscription_u { clock: clock }, }]; @@ -53,7 +53,7 @@ impl Thread { }; match (res, event) { (Ok(1), wasi::Event { - userdata: CLOCK_ID, + userdata: USERDATA, error: 0, type_: wasi::EVENTTYPE_CLOCK, .. diff --git a/src/libstd/sys/windows/fs.rs b/src/libstd/sys/windows/fs.rs index 204f6af5fc..4160123c9a 100644 --- a/src/libstd/sys/windows/fs.rs +++ b/src/libstd/sys/windows/fs.rs @@ -412,7 +412,7 @@ impl File { pub fn duplicate(&self) -> io::Result { Ok(File { - handle: self.handle.duplicate(0, true, c::DUPLICATE_SAME_ACCESS)?, + handle: self.handle.duplicate(0, false, c::DUPLICATE_SAME_ACCESS)?, }) } diff --git a/src/libstd/sys/windows/handle.rs b/src/libstd/sys/windows/handle.rs index 3e5aa69335..3986cda1a5 100644 --- a/src/libstd/sys/windows/handle.rs +++ b/src/libstd/sys/windows/handle.rs @@ -46,7 +46,7 @@ impl Handle { pub fn into_raw(self) -> c::HANDLE { let ret = self.raw(); mem::forget(self); - return ret; + ret } } diff --git a/src/libstd/sys/windows/mutex.rs b/src/libstd/sys/windows/mutex.rs index 37cbdcefce..79dec1adf4 100644 --- a/src/libstd/sys/windows/mutex.rs +++ b/src/libstd/sys/windows/mutex.rs @@ -144,7 +144,7 @@ fn kind() -> Kind { Some(..) => Kind::SRWLock, }; KIND.store(ret as usize, Ordering::SeqCst); - return ret; + ret } pub struct ReentrantMutex { inner: UnsafeCell> } diff --git a/src/libstd/sys/windows/process.rs b/src/libstd/sys/windows/process.rs index 8658deb854..096b7bea8a 100644 --- a/src/libstd/sys/windows/process.rs +++ b/src/libstd/sys/windows/process.rs @@ -257,7 +257,7 @@ impl Stdio { let ret = io.duplicate(0, true, c::DUPLICATE_SAME_ACCESS); io.into_raw(); - return ret + ret } Err(..) => Ok(Handle::new(c::INVALID_HANDLE_VALUE)), } @@ -472,9 +472,8 @@ fn make_command_line(prog: &OsStr, args: &[OsString]) -> io::Result> { cmd.push('"' as u16); } - let mut iter = arg.encode_wide(); let mut backslashes: usize = 0; - while let Some(x) = iter.next() { + for x in arg.encode_wide() { if x == '\\' as u16 { backslashes += 1; } else { diff --git a/src/libstd/sys/windows/rand.rs b/src/libstd/sys/windows/rand.rs index c9bcb5d741..993831bec1 100644 --- a/src/libstd/sys/windows/rand.rs +++ b/src/libstd/sys/windows/rand.rs @@ -13,7 +13,7 @@ pub fn hashmap_random_keys() -> (u64, u64) { panic!("couldn't generate random bytes: {}", io::Error::last_os_error()); } - return v + v } #[cfg(target_vendor = "uwp")] diff --git a/src/libstd/sys/windows/thread_local.rs b/src/libstd/sys/windows/thread_local.rs index 4c9734fa0a..728257cdd4 100644 --- a/src/libstd/sys/windows/thread_local.rs +++ b/src/libstd/sys/windows/thread_local.rs @@ -52,7 +52,7 @@ pub unsafe fn create(dtor: Option) -> Key { if let Some(f) = dtor { register_dtor(key, f); } - return key; + key } #[inline] diff --git a/src/libstd/sys/windows/time.rs b/src/libstd/sys/windows/time.rs index e0f0e3a1a4..bd533c93d4 100644 --- a/src/libstd/sys/windows/time.rs +++ b/src/libstd/sys/windows/time.rs @@ -80,7 +80,7 @@ impl SystemTime { unsafe { let mut t: SystemTime = mem::zeroed(); c::GetSystemTimeAsFileTime(&mut t.t); - return t + t } } @@ -228,7 +228,7 @@ mod perf_counter { FREQUENCY = frequency; STATE.store(2, SeqCst); } - return frequency; + frequency } } diff --git a/src/libstd/sys_common/backtrace.rs b/src/libstd/sys_common/backtrace.rs index 01711d415d..9c406ec39c 100644 --- a/src/libstd/sys_common/backtrace.rs +++ b/src/libstd/sys_common/backtrace.rs @@ -7,6 +7,7 @@ use crate::io; use crate::borrow::Cow; use crate::io::prelude::*; use crate::path::{self, Path, PathBuf}; +use crate::sync::atomic::{self, Ordering}; use crate::sys::mutex::Mutex; use backtrace_rs::{BacktraceFmt, BytesOrWideString, PrintFmt}; @@ -115,8 +116,10 @@ unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt:: Ok(()) } -/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`. -#[inline(never)] +/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`. Note that +/// this is only inline(never) when backtraces in libstd are enabled, otherwise +/// it's fine to optimize away. +#[cfg_attr(feature = "backtrace", inline(never))] pub fn __rust_begin_short_backtrace(f: F) -> T where F: FnOnce() -> T, @@ -126,42 +129,49 @@ where f() } +pub enum RustBacktrace { + Print(PrintFmt), + Disabled, + RuntimeDisabled, +} + // For now logging is turned off by default, and this function checks to see // whether the magical environment variable is present to see if it's turned on. -pub fn log_enabled() -> Option { - use crate::sync::atomic::{self, Ordering}; +pub fn rust_backtrace_env() -> RustBacktrace { + // If the `backtrace` feature of this crate isn't enabled quickly return + // `None` so this can be constant propagated all over the place to turn + // optimize away callers. + if !cfg!(feature = "backtrace") { + return RustBacktrace::Disabled; + } // Setting environment variables for Fuchsia components isn't a standard // or easily supported workflow. For now, always display backtraces. if cfg!(target_os = "fuchsia") { - return Some(PrintFmt::Full); + return RustBacktrace::Print(PrintFmt::Full); } static ENABLED: atomic::AtomicIsize = atomic::AtomicIsize::new(0); match ENABLED.load(Ordering::SeqCst) { 0 => {} - 1 => return None, - 2 => return Some(PrintFmt::Short), - _ => return Some(PrintFmt::Full), + 1 => return RustBacktrace::RuntimeDisabled, + 2 => return RustBacktrace::Print(PrintFmt::Short), + _ => return RustBacktrace::Print(PrintFmt::Full), } - let val = env::var_os("RUST_BACKTRACE").and_then(|x| { - if &x == "0" { - None - } else if &x == "full" { - Some(PrintFmt::Full) - } else { - Some(PrintFmt::Short) - } - }); - ENABLED.store( - match val { - Some(v) => v as isize, - None => 1, - }, - Ordering::SeqCst, - ); - val + let (format, cache) = env::var_os("RUST_BACKTRACE") + .map(|x| { + if &x == "0" { + (RustBacktrace::RuntimeDisabled, 1) + } else if &x == "full" { + (RustBacktrace::Print(PrintFmt::Full), 3) + } else { + (RustBacktrace::Print(PrintFmt::Short), 2) + } + }) + .unwrap_or((RustBacktrace::RuntimeDisabled, 1)); + ENABLED.store(cache, Ordering::SeqCst); + format } /// Prints the filename of the backtrace frame. diff --git a/src/libstd/sys_common/mod.rs b/src/libstd/sys_common/mod.rs index cba3eca538..7a0bcd03d7 100644 --- a/src/libstd/sys_common/mod.rs +++ b/src/libstd/sys_common/mod.rs @@ -49,6 +49,7 @@ pub mod mutex; unix, target_os = "redox", target_os = "cloudabi", + target_os = "hermit", target_arch = "wasm32", all(target_vendor = "fortanix", target_env = "sgx")))] pub mod os_str_bytes; @@ -67,6 +68,7 @@ pub mod fs; cfg_if::cfg_if! { if #[cfg(any(target_os = "cloudabi", target_os = "l4re", + target_os = "hermit", all(target_arch = "wasm32", not(target_os = "emscripten")), all(target_vendor = "fortanix", target_env = "sgx")))] { pub use crate::sys::net; diff --git a/src/libstd/sys_common/os_str_bytes.rs b/src/libstd/sys_common/os_str_bytes.rs index d734f412bf..3753269adf 100644 --- a/src/libstd/sys_common/os_str_bytes.rs +++ b/src/libstd/sys_common/os_str_bytes.rs @@ -193,7 +193,7 @@ impl Slice { pub trait OsStringExt { /// Creates an [`OsString`] from a byte vector. /// - /// See the module docmentation for an example. + /// See the module documentation for an example. /// /// [`OsString`]: ../../../ffi/struct.OsString.html #[stable(feature = "rust1", since = "1.0.0")] @@ -201,7 +201,7 @@ pub trait OsStringExt { /// Yields the underlying byte vector of this [`OsString`]. /// - /// See the module docmentation for an example. + /// See the module documentation for an example. /// /// [`OsString`]: ../../../ffi/struct.OsString.html #[stable(feature = "rust1", since = "1.0.0")] @@ -226,14 +226,14 @@ pub trait OsStrExt { #[stable(feature = "rust1", since = "1.0.0")] /// Creates an [`OsStr`] from a byte slice. /// - /// See the module docmentation for an example. + /// See the module documentation for an example. /// /// [`OsStr`]: ../../../ffi/struct.OsStr.html fn from_bytes(slice: &[u8]) -> &Self; /// Gets the underlying byte view of the [`OsStr`] slice. /// - /// See the module docmentation for an example. + /// See the module documentation for an example. /// /// [`OsStr`]: ../../../ffi/struct.OsStr.html #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/thread/local.rs b/src/libstd/thread/local.rs index f85b5d632f..cfaab4e22e 100644 --- a/src/libstd/thread/local.rs +++ b/src/libstd/thread/local.rs @@ -236,8 +236,8 @@ impl LocalKey { #[stable(feature = "rust1", since = "1.0.0")] pub fn with(&'static self, f: F) -> R where F: FnOnce(&T) -> R { - self.try_with(f).expect("cannot access a TLS value during or \ - after it is destroyed") + self.try_with(f).expect("cannot access a Thread Local Storage value \ + during or after destruction") } /// Acquires a reference to the value in this TLS key. @@ -509,9 +509,8 @@ pub mod os { pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> { let ptr = self.os.get() as *mut Value; if ptr as usize > 1 { - match (*ptr).inner.get() { - Some(ref value) => return Some(value), - None => {}, + if let Some(ref value) = (*ptr).inner.get() { + return Some(value); } } self.try_initialize(init) diff --git a/src/libstd/thread/mod.rs b/src/libstd/thread/mod.rs index 764041d2f4..0ffa6ace2e 100644 --- a/src/libstd/thread/mod.rs +++ b/src/libstd/thread/mod.rs @@ -465,12 +465,9 @@ impl Builder { } thread_info::set(imp::guard::current(), their_thread); - #[cfg(feature = "backtrace")] let try_result = panic::catch_unwind(panic::AssertUnwindSafe(|| { crate::sys_common::backtrace::__rust_begin_short_backtrace(f) })); - #[cfg(not(feature = "backtrace"))] - let try_result = panic::catch_unwind(panic::AssertUnwindSafe(f)); *their_packet.get() = Some(try_result); }; diff --git a/src/libstd/time.rs b/src/libstd/time.rs index 3bf2b8be1f..e1ae01b602 100644 --- a/src/libstd/time.rs +++ b/src/libstd/time.rs @@ -15,10 +15,10 @@ use crate::cmp; use crate::error::Error; use crate::fmt; -use crate::ops::{Add, Sub, AddAssign, SubAssign}; +use crate::ops::{Add, AddAssign, Sub, SubAssign}; use crate::sys::time; -use crate::sys_common::FromInner; use crate::sys_common::mutex::Mutex; +use crate::sys_common::FromInner; #[stable(feature = "time", since = "1.3.0")] pub use core::time::Duration; @@ -216,17 +216,17 @@ impl Instant { // * https://bugzilla.mozilla.org/show_bug.cgi?id=1487778 - a similar // Firefox bug // - // It simply seems that this it just happens so that a lot in the wild - // we're seeing panics across various platforms where consecutive calls + // It seems that this just happens a lot in the wild. + // We're seeing panics across various platforms where consecutive calls // to `Instant::now`, such as via the `elapsed` function, are panicking // as they're going backwards. Placed here is a last-ditch effort to try // to fix things up. We keep a global "latest now" instance which is // returned instead of what the OS says if the OS goes backwards. // - // To hopefully mitigate the impact of this though a few platforms are + // To hopefully mitigate the impact of this, a few platforms are // whitelisted as "these at least haven't gone backwards yet". if time::Instant::actually_monotonic() { - return Instant(os_now) + return Instant(os_now); } static LOCK: Mutex = Mutex::new(); @@ -353,8 +353,7 @@ impl Add for Instant { /// /// [`checked_add`]: ../../std/time/struct.Instant.html#method.checked_add fn add(self, other: Duration) -> Instant { - self.checked_add(other) - .expect("overflow when adding duration to instant") + self.checked_add(other).expect("overflow when adding duration to instant") } } @@ -370,8 +369,7 @@ impl Sub for Instant { type Output = Instant; fn sub(self, other: Duration) -> Instant { - self.checked_sub(other) - .expect("overflow when subtracting duration from instant") + self.checked_sub(other).expect("overflow when subtracting duration from instant") } } @@ -464,8 +462,7 @@ impl SystemTime { /// println!("{:?}", difference); /// ``` #[stable(feature = "time2", since = "1.8.0")] - pub fn duration_since(&self, earlier: SystemTime) - -> Result { + pub fn duration_since(&self, earlier: SystemTime) -> Result { self.0.sub_time(&earlier.0).map_err(SystemTimeError) } @@ -532,8 +529,7 @@ impl Add for SystemTime { /// /// [`checked_add`]: ../../std/time/struct.SystemTime.html#method.checked_add fn add(self, dur: Duration) -> SystemTime { - self.checked_add(dur) - .expect("overflow when adding duration to instant") + self.checked_add(dur).expect("overflow when adding duration to instant") } } @@ -549,8 +545,7 @@ impl Sub for SystemTime { type Output = SystemTime; fn sub(self, dur: Duration) -> SystemTime { - self.checked_sub(dur) - .expect("overflow when subtracting duration from instant") + self.checked_sub(dur).expect("overflow when subtracting duration from instant") } } @@ -626,7 +621,9 @@ impl SystemTimeError { #[stable(feature = "time2", since = "1.8.0")] impl Error for SystemTimeError { - fn description(&self) -> &str { "other time was not earlier than self" } + fn description(&self) -> &str { + "other time was not earlier than self" + } } #[stable(feature = "time2", since = "1.8.0")] @@ -644,17 +641,16 @@ impl FromInner for SystemTime { #[cfg(test)] mod tests { - use super::{Instant, SystemTime, Duration, UNIX_EPOCH}; + use super::{Duration, Instant, SystemTime, UNIX_EPOCH}; macro_rules! assert_almost_eq { - ($a:expr, $b:expr) => ({ + ($a:expr, $b:expr) => {{ let (a, b) = ($a, $b); if a != b { - let (a, b) = if a > b {(a, b)} else {(b, a)}; - assert!(a - Duration::new(0, 1000) <= b, - "{:?} is not almost equal to {:?}", a, b); + let (a, b) = if a > b { (a, b) } else { (b, a) }; + assert!(a - Duration::new(0, 1000) <= b, "{:?} is not almost equal to {:?}", a, b); } - }) + }}; } #[test] @@ -729,7 +725,7 @@ mod tests { fn instant_saturating_duration_since_nopanic() { let a = Instant::now(); let ret = (a - Duration::new(1, 0)).saturating_duration_since(a); - assert_eq!(ret, Duration::new(0,0)); + assert_eq!(ret, Duration::new(0, 0)); } #[test] @@ -755,15 +751,14 @@ mod tests { let second = Duration::new(1, 0); assert_almost_eq!(a.duration_since(a - second).unwrap(), second); - assert_almost_eq!(a.duration_since(a + second).unwrap_err() - .duration(), second); + assert_almost_eq!(a.duration_since(a + second).unwrap_err().duration(), second); assert_almost_eq!(a - second + second, a); assert_almost_eq!(a.checked_sub(second).unwrap().checked_add(second).unwrap(), a); let one_second_from_epoch = UNIX_EPOCH + Duration::new(1, 0); - let one_second_from_epoch2 = UNIX_EPOCH + Duration::new(0, 500_000_000) - + Duration::new(0, 500_000_000); + let one_second_from_epoch2 = + UNIX_EPOCH + Duration::new(0, 500_000_000) + Duration::new(0, 500_000_000); assert_eq!(one_second_from_epoch, one_second_from_epoch2); // checked_add_duration will not panic on overflow diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml index d4a9acc156..3ce47e6a7b 100644 --- a/src/libsyntax/Cargo.toml +++ b/src/libsyntax/Cargo.toml @@ -10,7 +10,7 @@ path = "lib.rs" doctest = false [dependencies] -bitflags = "1.0" +bitflags = "1.2.1" rustc_serialize = { path = "../libserialize", package = "serialize" } log = "0.4" scoped-tls = "1.0" @@ -18,7 +18,7 @@ lazy_static = "1.0.0" syntax_pos = { path = "../libsyntax_pos" } errors = { path = "../librustc_errors", package = "rustc_errors" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_index = { path = "../librustc_index" } rustc_lexer = { path = "../librustc_lexer" } -rustc_macros = { path = "../librustc_macros" } rustc_target = { path = "../librustc_target" } smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index b634dcca7f..8b96704884 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1,35 +1,63 @@ -// The Rust abstract syntax tree. +//! The Rust abstract syntax tree module. +//! +//! This module contains common structures forming the language AST. +//! Two main entities in the module are [`Item`] (which represents an AST element with +//! additional metadata), and [`ItemKind`] (which represents a concrete type and contains +//! information specific to the type of the item). +//! +//! Other module items that worth mentioning: +//! - [`Ty`] and [`TyKind`]: A parsed Rust type. +//! - [`Expr`] and [`ExprKind`]: A parsed Rust expression. +//! - [`Pat`] and [`PatKind`]: A parsed Rust pattern. Patterns are often dual to expressions. +//! - [`Stmt`] and [`StmtKind`]: An executable action that does not return a value. +//! - [`FnDecl`], [`FnHeader`] and [`Param`]: Metadata associated with a function declaration. +//! - [`Generics`], [`GenericParam`], [`WhereClause`]: Metadata associated with generic parameters. +//! - [`EnumDef`] and [`Variant`]: Enum declaration. +//! - [`Lit`] and [`LitKind`]: Literal expressions. +//! - [`MacroDef`], [`MacStmtStyle`], [`Mac`], [`MacDelimeter`]: Macro definition and invocation. +//! - [`Attribute`]: Metadata associated with item. +//! - [`UnOp`], [`UnOpKind`], [`BinOp`], [`BinOpKind`]: Unary and binary operators. pub use GenericArgs::*; pub use UnsafeSource::*; -pub use crate::symbol::{Ident, Symbol as Name}; pub use crate::util::parser::ExprPrecedence; -use crate::ext::hygiene::ExpnId; +pub use rustc_target::abi::FloatTy; +pub use syntax_pos::symbol::{Ident, Symbol as Name}; + use crate::parse::token::{self, DelimToken}; -use crate::print::pprust; use crate::ptr::P; use crate::source_map::{dummy_spanned, respan, Spanned}; -use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::TokenStream; -use crate::ThinVec; -use rustc_data_structures::indexed_vec::Idx; -#[cfg(target_arch = "x86_64")] -use rustc_data_structures::static_assert_size; -use rustc_target::spec::abi::Abi; -use syntax_pos::{Span, DUMMY_SP}; +use syntax_pos::symbol::{kw, sym, Symbol}; +use syntax_pos::{Span, DUMMY_SP, ExpnId}; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; +use rustc_data_structures::thin_vec::ThinVec; +use rustc_index::vec::Idx; use rustc_serialize::{self, Decoder, Encoder}; -use std::fmt; +use rustc_target::spec::abi::Abi; -pub use rustc_target::abi::FloatTy; +#[cfg(target_arch = "x86_64")] +use rustc_data_structures::static_assert_size; + +use std::fmt; #[cfg(test)] mod tests; +/// A "Label" is an identifier of some point in sources, +/// e.g. in the following code: +/// +/// ```rust +/// 'outer: loop { +/// break 'outer; +/// } +/// ``` +/// +/// `'outer` is a label. #[derive(Clone, RustcEncodable, RustcDecodable, Copy)] pub struct Label { pub ident: Ident, @@ -41,6 +69,8 @@ impl fmt::Debug for Label { } } +/// A "Lifetime" is an annotation of the scope in which variable +/// can be used, e.g. `'a` in `&'a i32`. #[derive(Clone, RustcEncodable, RustcDecodable, Copy)] pub struct Lifetime { pub id: NodeId, @@ -70,7 +100,7 @@ impl fmt::Display for Lifetime { /// along with a bunch of supporting information. /// /// E.g., `std::cmp::PartialEq`. -#[derive(Clone, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Path { pub span: Span, /// The segments in the path: the things separated by `::`. @@ -86,18 +116,6 @@ impl PartialEq for Path { } } -impl fmt::Debug for Path { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "path({})", pprust::path_to_string(self)) - } -} - -impl fmt::Display for Path { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", pprust::path_to_string(self)) - } -} - impl Path { // Convert a span and an identifier to the corresponding // one-segment path. @@ -175,10 +193,14 @@ impl GenericArgs { } } +/// Concrete argument in the sequence of generic args. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum GenericArg { + /// `'a` in `Foo<'a>` Lifetime(Lifetime), + /// `Bar` in `Foo` Type(P), + /// `1` in `Foo<1>` Const(AnonConst), } @@ -241,9 +263,8 @@ impl ParenthesizedArgs { // hack to ensure that we don't try to access the private parts of `NodeId` in this module mod node_id_inner { - use rustc_data_structures::indexed_vec::Idx; - use rustc_data_structures::newtype_index; - newtype_index! { + use rustc_index::vec::Idx; + rustc_index::newtype_index! { pub struct NodeId { ENCODABLE = custom DEBUG_FORMAT = "NodeId({})" @@ -472,7 +493,7 @@ pub enum NestedMetaItem { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MetaItem { pub path: Path, - pub node: MetaItemKind, + pub kind: MetaItemKind, pub span: Span, } @@ -508,24 +529,18 @@ pub struct Block { pub span: Span, } -#[derive(Clone, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Pat { pub id: NodeId, - pub node: PatKind, + pub kind: PatKind, pub span: Span, } -impl fmt::Debug for Pat { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self)) - } -} - impl Pat { /// Attempt reparsing the pattern as a type. /// This is intended for use by diagnostics. pub(super) fn to_ty(&self) -> Option> { - let node = match &self.node { + let kind = match &self.kind { // In a type expression `_` is an inference variable. PatKind::Wild => TyKind::Infer, // An IDENT pattern with no binding mode would be valid as path to a type. E.g. `u32`. @@ -555,7 +570,7 @@ impl Pat { }; Some(P(Ty { - node, + kind, id: self.id, span: self.span, })) @@ -569,16 +584,25 @@ impl Pat { return; } - match &self.node { + match &self.kind { + // Walk into the pattern associated with `Ident` (if any). PatKind::Ident(_, _, Some(p)) => p.walk(it), + + // Walk into each field of struct. PatKind::Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk(it)), + + // Sequence of patterns. PatKind::TupleStruct(_, s) | PatKind::Tuple(s) | PatKind::Slice(s) | PatKind::Or(s) => s.iter().for_each(|p| p.walk(it)), + + // Trivial wrappers over inner patterns. PatKind::Box(s) | PatKind::Ref(s, _) | PatKind::Paren(s) => s.walk(it), + + // These patterns do not contain subpatterns, skip. PatKind::Wild | PatKind::Rest | PatKind::Lit(_) @@ -591,7 +615,7 @@ impl Pat { /// Is this a `..` pattern? pub fn is_rest(&self) -> bool { - match self.node { + match self.kind { PatKind::Rest => true, _ => false, } @@ -630,7 +654,9 @@ pub enum RangeEnd { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum RangeSyntax { + /// `...` DotDotDot, + /// `..=` DotDotEq, } @@ -789,6 +815,8 @@ impl BinOpKind { pub fn is_comparison(&self) -> bool { use BinOpKind::*; + // Note for developers: please keep this as is; + // we want compilation to fail if another variant is added. match *self { Eq | Lt | Le | Ne | Gt | Ge => true, And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false, @@ -803,6 +831,9 @@ impl BinOpKind { pub type BinOp = Spanned; +/// Unary operator. +/// +/// Note that `&data` is not an operator, it's an `AddrOf` expression. #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum UnOp { /// The `*` operator for dereferencing @@ -832,59 +863,46 @@ impl UnOp { } /// A statement -#[derive(Clone, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Stmt { pub id: NodeId, - pub node: StmtKind, + pub kind: StmtKind, pub span: Span, } impl Stmt { pub fn add_trailing_semicolon(mut self) -> Self { - self.node = match self.node { + self.kind = match self.kind { StmtKind::Expr(expr) => StmtKind::Semi(expr), StmtKind::Mac(mac) => { StmtKind::Mac(mac.map(|(mac, _style, attrs)| (mac, MacStmtStyle::Semicolon, attrs))) } - node => node, + kind => kind, }; self } pub fn is_item(&self) -> bool { - match self.node { + match self.kind { StmtKind::Item(_) => true, _ => false, } } pub fn is_expr(&self) -> bool { - match self.node { + match self.kind { StmtKind::Expr(_) => true, _ => false, } } } -impl fmt::Debug for Stmt { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "stmt({}: {})", - self.id.to_string(), - pprust::stmt_to_string(self) - ) - } -} - -#[derive(Clone, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum StmtKind { /// A local (let) binding. Local(P), - /// An item definition. Item(P), - /// Expr without trailing semi-colon. Expr(P), /// Expr with a trailing semi-colon. @@ -931,14 +949,18 @@ pub struct Local { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Arm { pub attrs: Vec, + /// Match arm pattern, e.g. `10` in `match foo { 10 => {}, _ => {} }` pub pat: P, + /// Match arm guard, e.g. `n > 10` in `match foo { n if n > 10 => {}, _ => {} }` pub guard: Option>, + /// Match arm body. pub body: P, pub span: Span, pub id: NodeId, pub is_placeholder: bool, } +/// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct field. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Field { pub ident: Ident, @@ -974,10 +996,10 @@ pub struct AnonConst { } /// An expression. -#[derive(Clone, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Expr { pub id: NodeId, - pub node: ExprKind, + pub kind: ExprKind, pub span: Span, pub attrs: ThinVec, } @@ -990,12 +1012,12 @@ impl Expr { /// Returns `true` if this expression would be valid somewhere that expects a value; /// for example, an `if` condition. pub fn returns(&self) -> bool { - if let ExprKind::Block(ref block, _) = self.node { - match block.stmts.last().map(|last_stmt| &last_stmt.node) { + if let ExprKind::Block(ref block, _) = self.kind { + match block.stmts.last().map(|last_stmt| &last_stmt.kind) { // Implicit return Some(&StmtKind::Expr(_)) => true, Some(&StmtKind::Semi(ref expr)) => { - if let ExprKind::Ret(_) = expr.node { + if let ExprKind::Ret(_) = expr.kind { // Last statement is explicit return. true } else { @@ -1012,7 +1034,7 @@ impl Expr { } fn to_bound(&self) -> Option { - match &self.node { + match &self.kind { ExprKind::Path(None, path) => Some(GenericBound::Trait( PolyTraitRef::new(Vec::new(), path.clone(), self.span), TraitBoundModifier::None, @@ -1021,18 +1043,25 @@ impl Expr { } } + /// Attempts to reparse as `Ty` (for diagnostic purposes). pub(super) fn to_ty(&self) -> Option> { - let node = match &self.node { + let kind = match &self.kind { + // Trivial conversions. ExprKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()), ExprKind::Mac(mac) => TyKind::Mac(mac.clone()), + ExprKind::Paren(expr) => expr.to_ty().map(TyKind::Paren)?, + ExprKind::AddrOf(mutbl, expr) => expr .to_ty() .map(|ty| TyKind::Rptr(None, MutTy { ty, mutbl: *mutbl }))?, + ExprKind::Repeat(expr, expr_len) => { expr.to_ty().map(|ty| TyKind::Array(ty, expr_len.clone()))? } + ExprKind::Array(exprs) if exprs.len() == 1 => exprs[0].to_ty().map(TyKind::Slice)?, + ExprKind::Tup(exprs) => { let tys = exprs .iter() @@ -1040,6 +1069,10 @@ impl Expr { .collect::>>()?; TyKind::Tup(tys) } + + // If binary operator is `Add` and both `lhs` and `rhs` are trait bounds, + // then type of result is trait object. + // Othewise we don't assume the result type. ExprKind::Binary(binop, lhs, rhs) if binop.node == BinOpKind::Add => { if let (Some(lhs), Some(rhs)) = (lhs.to_bound(), rhs.to_bound()) { TyKind::TraitObject(vec![lhs, rhs], TraitObjectSyntax::None) @@ -1047,18 +1080,20 @@ impl Expr { return None; } } + + // This expression doesn't look like a type syntactically. _ => return None, }; Some(P(Ty { - node, + kind, id: self.id, span: self.span, })) } pub fn precedence(&self) -> ExprPrecedence { - match self.node { + match self.kind { ExprKind::Box(_) => ExprPrecedence::Box, ExprKind::Array(_) => ExprPrecedence::Array, ExprKind::Call(..) => ExprPrecedence::Call, @@ -1101,12 +1136,6 @@ impl Expr { } } -impl fmt::Debug for Expr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self)) - } -} - /// Limit types of a range (inclusive or exclusive) #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum RangeLimits { @@ -1279,10 +1308,12 @@ pub struct QSelf { pub position: usize, } -/// A capture clause. +/// A capture clause used in closures and `async` blocks. #[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum CaptureBy { + /// `move |x| y + x`. Value, + /// `move` keyword was not specified. Ref, } @@ -1331,9 +1362,11 @@ impl MacDelimiter { } } +/// Represents a macro definition. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MacroDef { pub tokens: TokenStream, + /// `true` if macro was defined with `macro_rules`. pub legacy: bool, } @@ -1343,6 +1376,7 @@ impl MacroDef { } } +// Clippy uses Hash and PartialEq #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)] pub enum StrStyle { /// A regular string, like `"foo"`. @@ -1361,20 +1395,26 @@ pub struct Lit { /// The "semantic" representation of the literal lowered from the original tokens. /// Strings are unescaped, hexadecimal forms are eliminated, etc. /// FIXME: Remove this and only create the semantic representation during lowering to HIR. - pub node: LitKind, + pub kind: LitKind, pub span: Span, } +// Clippy uses Hash and PartialEq +/// Type of the integer literal based on provided suffix. #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)] pub enum LitIntType { + /// e.g. `42_i32`. Signed(IntTy), + /// e.g. `42_u32`. Unsigned(UintTy), + /// e.g. `42`. Unsuffixed, } /// Literal kind. /// /// E.g., `"foo"`, `42`, `12.34`, or `bool`. +// Clippy uses Hash and PartialEq #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Hash, PartialEq)] pub enum LitKind { /// A string literal (`"foo"`). @@ -1425,7 +1465,16 @@ impl LitKind { /// Returns `true` if this literal has no suffix. /// Note: this will return true for literals with prefixes such as raw strings and byte strings. pub fn is_unsuffixed(&self) -> bool { + !self.is_suffixed() + } + + /// Returns `true` if this literal has a suffix. + pub fn is_suffixed(&self) -> bool { match *self { + // suffixed variants + LitKind::Int(_, LitIntType::Signed(..)) + | LitKind::Int(_, LitIntType::Unsigned(..)) + | LitKind::Float(..) => true, // unsuffixed variants LitKind::Str(..) | LitKind::ByteStr(..) @@ -1434,18 +1483,9 @@ impl LitKind { | LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::FloatUnsuffixed(..) | LitKind::Bool(..) - | LitKind::Err(..) => true, - // suffixed variants - LitKind::Int(_, LitIntType::Signed(..)) - | LitKind::Int(_, LitIntType::Unsigned(..)) - | LitKind::Float(..) => false, + | LitKind::Err(..) => false, } } - - /// Returns `true` if this literal has a suffix. - pub fn is_suffixed(&self) -> bool { - !self.is_unsuffixed() - } } // N.B., If you change this, you'll probably want to change the corresponding @@ -1474,7 +1514,7 @@ pub struct TraitItem { pub ident: Ident, pub attrs: Vec, pub generics: Generics, - pub node: TraitItemKind, + pub kind: TraitItemKind, pub span: Span, /// See `Item::tokens` for what this is. pub tokens: Option, @@ -1497,7 +1537,7 @@ pub struct ImplItem { pub defaultness: Defaultness, pub attrs: Vec, pub generics: Generics, - pub node: ImplItemKind, + pub kind: ImplItemKind, pub span: Span, /// See `Item::tokens` for what this is. pub tokens: Option, @@ -1661,19 +1701,13 @@ pub enum AssocTyConstraintKind { }, } -#[derive(Clone, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Ty { pub id: NodeId, - pub node: TyKind, + pub kind: TyKind, pub span: Span, } -impl fmt::Debug for Ty { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "type({})", pprust::ty_to_string(self)) - } -} - #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct BareFnTy { pub unsafety: Unsafety, @@ -1820,12 +1854,13 @@ pub enum SelfKind { pub type ExplicitSelf = Spanned; impl Param { + /// Attempts to cast parameter to `ExplicitSelf`. pub fn to_self(&self) -> Option { - if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node { + if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.kind { if ident.name == kw::SelfLower { - return match self.ty.node { + return match self.ty.kind { TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))), - TyKind::Rptr(lt, MutTy { ref ty, mutbl }) if ty.node.is_implicit_self() => { + TyKind::Rptr(lt, MutTy { ref ty, mutbl }) if ty.kind.is_implicit_self() => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } _ => Some(respan( @@ -1838,26 +1873,28 @@ impl Param { None } + /// Returns `true` if parameter is `self`. pub fn is_self(&self) -> bool { - if let PatKind::Ident(_, ident, _) = self.pat.node { + if let PatKind::Ident(_, ident, _) = self.pat.kind { ident.name == kw::SelfLower } else { false } } + /// Builds a `Param` object from `ExplicitSelf`. pub fn from_self(attrs: ThinVec, eself: ExplicitSelf, eself_ident: Ident) -> Param { let span = eself.span.to(eself_ident.span); let infer_ty = P(Ty { id: DUMMY_NODE_ID, - node: TyKind::ImplicitSelf, + kind: TyKind::ImplicitSelf, span, }); let param = |mutbl, ty| Param { attrs, pat: P(Pat { id: DUMMY_NODE_ID, - node: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None), + kind: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None), span, }), span, @@ -1872,7 +1909,7 @@ impl Param { Mutability::Immutable, P(Ty { id: DUMMY_NODE_ID, - node: TyKind::Rptr( + kind: TyKind::Rptr( lt, MutTy { ty: infer_ty, @@ -1886,14 +1923,16 @@ impl Param { } } -/// A header (not the body) of a function declaration. +/// A signature (not the body) of a function declaration. /// /// E.g., `fn foo(bar: baz)`. +/// +/// Please note that it's different from `FnHeader` structure +/// which contains metadata about function safety, asyncness, constness and ABI. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct FnDecl { pub inputs: Vec, pub output: FunctionRetTy, - pub c_variadic: bool, } impl FnDecl { @@ -1901,7 +1940,13 @@ impl FnDecl { self.inputs.get(0).and_then(Param::to_self) } pub fn has_self(&self) -> bool { - self.inputs.get(0).map(Param::is_self).unwrap_or(false) + self.inputs.get(0).map_or(false, Param::is_self) + } + pub fn c_variadic(&self) -> bool { + self.inputs.last().map_or(false, |arg| match arg.ty.kind { + TyKind::CVarArgs => true, + _ => false, + }) } } @@ -1954,6 +1999,8 @@ pub enum Constness { NotConst, } +/// Item defaultness. +/// For details see the [RFC #2532](https://github.com/rust-lang/rfcs/pull/2532). #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum Defaultness { Default, @@ -2045,6 +2092,7 @@ pub struct EnumDef { pub variants: Vec, } +/// Enum variant. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Variant { /// Name of the variant. @@ -2135,18 +2183,31 @@ impl rustc_serialize::Decodable for AttrId { } } +#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] +pub struct AttrItem { + pub path: Path, + pub tokens: TokenStream, +} + /// Metadata associated with an item. /// Doc-comments are promoted to attributes that have `is_sugared_doc = true`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Attribute { + pub item: AttrItem, pub id: AttrId, + /// Denotes if the attribute decorates the following construct (outer) + /// or the construct this attribute is contained within (inner). pub style: AttrStyle, - pub path: Path, - pub tokens: TokenStream, pub is_sugared_doc: bool, pub span: Span, } +// Compatibility impl to avoid churn, consider removing. +impl std::ops::Deref for Attribute { + type Target = AttrItem; + fn deref(&self) -> &Self::Target { &self.item } +} + /// `TraitRef`s appear in impls. /// /// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all @@ -2269,7 +2330,7 @@ pub struct Item { pub ident: Ident, pub attrs: Vec, pub id: NodeId, - pub node: ItemKind, + pub kind: ItemKind, pub vis: Visibility, pub span: Span, @@ -2421,7 +2482,7 @@ impl ItemKind { pub struct ForeignItem { pub ident: Ident, pub attrs: Vec, - pub node: ForeignItemKind, + pub kind: ForeignItemKind, pub id: NodeId, pub span: Span, pub vis: Visibility, diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs index b5037b75f7..5217f63660 100644 --- a/src/libsyntax/attr/builtin.rs +++ b/src/libsyntax/attr/builtin.rs @@ -2,9 +2,9 @@ use crate::ast::{self, Attribute, MetaItem, NestedMetaItem}; use crate::early_buffered_lints::BufferedEarlyLintId; -use crate::ext::base::ExtCtxt; use crate::feature_gate::{Features, GatedCfg}; -use crate::parse::ParseSess; +use crate::print::pprust; +use crate::sess::ParseSess; use errors::{Applicability, Handler}; use syntax_pos::hygiene::Transparency; @@ -31,12 +31,16 @@ pub struct AttributeTemplate { } impl AttributeTemplate { + pub fn only_word() -> Self { + Self { word: true, list: None, name_value_str: None } + } + /// Checks that the given meta-item is compatible with this template. fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool { match meta_item_kind { ast::MetaItemKind::Word => self.word, ast::MetaItemKind::List(..) => self.list.is_some(), - ast::MetaItemKind::NameValue(lit) if lit.node.is_str() => self.name_value_str.is_some(), + ast::MetaItemKind::NameValue(lit) if lit.kind.is_str() => self.name_value_str.is_some(), ast::MetaItemKind::NameValue(..) => false, } } @@ -80,7 +84,7 @@ fn handle_errors(sess: &ParseSess, span: Span, error: AttrError) { } } -#[derive(Copy, Clone, Hash, PartialEq, RustcEncodable, RustcDecodable)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum InlineAttr { None, Hint, @@ -88,7 +92,7 @@ pub enum InlineAttr { Never, } -#[derive(Copy, Clone, Hash, PartialEq, RustcEncodable, RustcDecodable)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub enum OptimizeAttr { None, Speed, @@ -106,7 +110,7 @@ pub fn find_unwind_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> Op attrs.iter().fold(None, |ia, attr| { if attr.check_name(sym::unwind) { if let Some(meta) = attr.meta() { - if let MetaItemKind::List(items) = meta.node { + if let MetaItemKind::List(items) = meta.kind { if items.len() == 1 { if items[0].check_name(sym::allowed) { return Some(UnwindAttr::Allowed); @@ -239,11 +243,15 @@ fn find_stability_generic<'a, I>(sess: &ParseSess, allow_const_fn_ptr = true; } // attributes with data - else if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta { + else if let Some(MetaItem { kind: MetaItemKind::List(ref metas), .. }) = meta { let meta = meta.as_ref().unwrap(); let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { - handle_errors(sess, meta.span, AttrError::MultipleItem(meta.path.to_string())); + handle_errors( + sess, + meta.span, + AttrError::MultipleItem(pprust::path_to_string(&meta.path)), + ); return false } if let Some(v) = meta.value_str() { @@ -271,7 +279,10 @@ fn find_stability_generic<'a, I>(sess: &ParseSess, handle_errors( sess, mi.span, - AttrError::UnknownMetaItem(mi.path.to_string(), expected), + AttrError::UnknownMetaItem( + pprust::path_to_string(&mi.path), + expected, + ), ); continue 'outer } @@ -362,7 +373,7 @@ fn find_stability_generic<'a, I>(sess: &ParseSess, sess, meta.span(), AttrError::UnknownMetaItem( - mi.path.to_string(), + pprust::path_to_string(&mi.path), &["feature", "reason", "issue", "soft"] ), ); @@ -434,7 +445,8 @@ fn find_stability_generic<'a, I>(sess: &ParseSess, sess, meta.span(), AttrError::UnknownMetaItem( - mi.path.to_string(), &["since", "note"], + pprust::path_to_string(&mi.path), + &["since", "note"], ), ); continue 'outer @@ -534,17 +546,17 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat if cfg.path.segments.len() != 1 { return error(cfg.path.span, "`cfg` predicate key must be an identifier"); } - match &cfg.node { + match &cfg.kind { MetaItemKind::List(..) => { error(cfg.span, "unexpected parentheses after `cfg` predicate key") } - MetaItemKind::NameValue(lit) if !lit.node.is_str() => { + MetaItemKind::NameValue(lit) if !lit.kind.is_str() => { handle_errors( sess, lit.span, AttrError::UnsupportedLiteral( "literal in `cfg` predicate value must be a string", - lit.node.is_bytestr() + lit.kind.is_bytestr() ), ); true @@ -563,7 +575,7 @@ pub fn eval_condition(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F) -> bool where F: FnMut(&ast::MetaItem) -> bool { - match cfg.node { + match cfg.kind { ast::MetaItemKind::List(ref mis) => { for mi in mis.iter() { if !mi.is_meta_item() { @@ -597,8 +609,11 @@ pub fn eval_condition(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F) !eval_condition(mis[0].meta_item().unwrap(), sess, eval) }, _ => { - span_err!(sess.span_diagnostic, cfg.span, E0537, - "invalid predicate `{}`", cfg.path); + span_err!( + sess.span_diagnostic, cfg.span, E0537, + "invalid predicate `{}`", + pprust::path_to_string(&cfg.path) + ); false } } @@ -609,8 +624,7 @@ pub fn eval_condition(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F) } } - -#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] +#[derive(RustcEncodable, RustcDecodable, Clone)] pub struct Deprecation { pub since: Option, pub note: Option, @@ -641,8 +655,11 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess, break } - let meta = attr.meta().unwrap(); - depr = match &meta.node { + let meta = match attr.meta() { + Some(meta) => meta, + None => continue, + }; + depr = match &meta.kind { MetaItemKind::Word => Some(Deprecation { since: None, note: None }), MetaItemKind::NameValue(..) => { meta.value_str().map(|note| { @@ -653,7 +670,9 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess, let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { handle_errors( - sess, meta.span, AttrError::MultipleItem(meta.path.to_string()) + sess, + meta.span, + AttrError::MultipleItem(pprust::path_to_string(&meta.path)), ); return false } @@ -668,7 +687,7 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess, AttrError::UnsupportedLiteral( "literal in `deprecated` \ value must be a string", - lit.node.is_bytestr() + lit.kind.is_bytestr() ), ); } else { @@ -691,8 +710,10 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess, handle_errors( sess, meta.span(), - AttrError::UnknownMetaItem(mi.path.to_string(), - &["since", "note"]), + AttrError::UnknownMetaItem( + pprust::path_to_string(&mi.path), + &["since", "note"], + ), ); continue 'outer } @@ -730,7 +751,7 @@ pub enum ReprAttr { ReprAlign(u32), } -#[derive(Eq, Hash, PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)] +#[derive(Eq, PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)] pub enum IntType { SignedInt(ast::IntTy), UnsignedInt(ast::UintTy) @@ -811,14 +832,14 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec { let mut literal_error = None; if name == sym::align { recognised = true; - match parse_alignment(&value.node) { + match parse_alignment(&value.kind) { Ok(literal) => acc.push(ReprAlign(literal)), Err(message) => literal_error = Some(message) }; } else if name == sym::packed { recognised = true; - match parse_alignment(&value.node) { + match parse_alignment(&value.kind) { Ok(literal) => acc.push(ReprPacked(literal)), Err(message) => literal_error = Some(message) }; @@ -830,11 +851,11 @@ pub fn find_repr_attrs(sess: &ParseSess, attr: &Attribute) -> Vec { } else { if let Some(meta_item) = item.meta_item() { if meta_item.check_name(sym::align) { - if let MetaItemKind::NameValue(ref value) = meta_item.node { + if let MetaItemKind::NameValue(ref value) = meta_item.kind { recognised = true; let mut err = struct_span_err!(diagnostic, item.span(), E0693, "incorrect `repr(align)` attribute format"); - match value.node { + match value.kind { ast::LitKind::Int(int, ast::LitIntType::Unsuffixed) => { err.span_suggestion( item.span(), @@ -921,14 +942,7 @@ pub fn find_transparency( (transparency.map_or(fallback, |t| t.0), error) } -pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) { - // All the built-in macro attributes are "words" at the moment. - let template = AttributeTemplate { word: true, list: None, name_value_str: None }; - let attr = ecx.attribute(meta_item.clone()); - check_builtin_attribute(ecx.parse_sess, &attr, name, template); -} - -crate fn check_builtin_attribute( +pub fn check_builtin_attribute( sess: &ParseSess, attr: &ast::Attribute, name: Symbol, template: AttributeTemplate ) { // Some special attributes like `cfg` must be checked @@ -941,7 +955,7 @@ crate fn check_builtin_attribute( name == sym::test || name == sym::bench; match attr.parse_meta(sess) { - Ok(meta) => if !should_skip(name) && !template.compatible(&meta.node) { + Ok(meta) => if !should_skip(name) && !template.compatible(&meta.kind) { let error_msg = format!("malformed `{}` attribute input", name); let mut msg = "attribute must be of the form ".to_owned(); let mut suggestions = vec![]; diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 9d06b926f9..3e240a855e 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -9,19 +9,20 @@ pub use StabilityLevel::*; pub use crate::ast::Attribute; use crate::ast; -use crate::ast::{AttrId, AttrStyle, Name, Ident, Path, PathSegment}; +use crate::ast::{AttrItem, AttrId, AttrStyle, Name, Ident, Path, PathSegment}; use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem}; use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam}; use crate::mut_visit::visit_clobber; -use crate::source_map::{BytePos, Spanned, DUMMY_SP}; -use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::parse::parser::Parser; -use crate::parse::{ParseSess, PResult}; +use crate::source_map::{BytePos, Spanned}; +use crate::parse::lexer::comments::doc_comment_style; +use crate::parse; +use crate::parse::PResult; use crate::parse::token::{self, Token}; use crate::ptr::P; +use crate::sess::ParseSess; use crate::symbol::{sym, Symbol}; use crate::ThinVec; -use crate::tokenstream::{TokenStream, TokenTree, DelimSpan}; +use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use crate::GLOBALS; use log::debug; @@ -174,7 +175,7 @@ impl Attribute { pub fn meta_item_list(&self) -> Option> { match self.meta() { - Some(MetaItem { node: MetaItemKind::List(list), .. }) => Some(list), + Some(MetaItem { kind: MetaItemKind::List(list), .. }) => Some(list), _ => None } } @@ -210,16 +211,16 @@ impl MetaItem { // #[attribute(name = "value")] // ^^^^^^^^^^^^^^ pub fn name_value_literal(&self) -> Option<&Lit> { - match &self.node { + match &self.kind { MetaItemKind::NameValue(v) => Some(v), _ => None, } } pub fn value_str(&self) -> Option { - match self.node { + match self.kind { MetaItemKind::NameValue(ref v) => { - match v.node { + match v.kind { LitKind::Str(ref s, _) => Some(*s), _ => None, } @@ -229,14 +230,14 @@ impl MetaItem { } pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { - match self.node { + match self.kind { MetaItemKind::List(ref l) => Some(&l[..]), _ => None } } pub fn is_word(&self) -> bool { - match self.node { + match self.kind { MetaItemKind::Word => true, _ => false, } @@ -255,95 +256,37 @@ impl MetaItem { } } -impl Attribute { - /// Extracts the `MetaItem` from inside this `Attribute`. - pub fn meta(&self) -> Option { +impl AttrItem { + crate fn meta(&self, span: Span) -> Option { let mut tokens = self.tokens.trees().peekable(); Some(MetaItem { path: self.path.clone(), - node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) { + kind: if let Some(kind) = MetaItemKind::from_tokens(&mut tokens) { if tokens.peek().is_some() { return None; } - node + kind } else { return None; }, - span: self.span, + span, }) } +} - pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T> - where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, - { - let mut parser = Parser::new( - sess, - self.tokens.clone(), - None, - false, - false, - Some("attribute"), - ); - let result = f(&mut parser)?; - if parser.token != token::Eof { - parser.unexpected()?; - } - Ok(result) - } - - pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec> - where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, - { - if self.tokens.is_empty() { - return Ok(Vec::new()); - } - self.parse(sess, |parser| { - parser.expect(&token::OpenDelim(token::Paren))?; - let mut list = Vec::new(); - while !parser.eat(&token::CloseDelim(token::Paren)) { - list.push(f(parser)?); - if !parser.eat(&token::Comma) { - parser.expect(&token::CloseDelim(token::Paren))?; - break - } - } - Ok(list) - }) +impl Attribute { + /// Extracts the MetaItem from inside this Attribute. + pub fn meta(&self) -> Option { + self.item.meta(self.span) } pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> { Ok(MetaItem { path: self.path.clone(), - node: self.parse(sess, |parser| parser.parse_meta_item_kind())?, + kind: parse::parse_in_attr(sess, self, |p| p.parse_meta_item_kind())?, span: self.span, }) } - - /// Converts `self` to a normal `#[doc="foo"]` comment, if it is a - /// comment like `///` or `/** */`. (Returns `self` unchanged for - /// non-sugared doc attributes.) - pub fn with_desugared_doc(&self, f: F) -> T where - F: FnOnce(&Attribute) -> T, - { - if self.is_sugared_doc { - let comment = self.value_str().unwrap(); - let meta = mk_name_value_item_str( - Ident::with_dummy_span(sym::doc), - Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())), - DUMMY_SP, - ); - f(&Attribute { - id: self.id, - style: self.style, - path: meta.path, - tokens: meta.node.tokens(meta.span), - is_sugared_doc: true, - span: self.span, - }) - } else { - f(self) - } - } } /* Constructors */ @@ -356,15 +299,15 @@ pub fn mk_name_value_item_str(ident: Ident, str: Symbol, str_span: Span) -> Meta pub fn mk_name_value_item(ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem { let lit = Lit::from_lit_kind(lit_kind, lit_span); let span = ident.span.to(lit_span); - MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(lit) } + MetaItem { path: Path::from_ident(ident), span, kind: MetaItemKind::NameValue(lit) } } pub fn mk_list_item(ident: Ident, items: Vec) -> MetaItem { - MetaItem { path: Path::from_ident(ident), span: ident.span, node: MetaItemKind::List(items) } + MetaItem { path: Path::from_ident(ident), span: ident.span, kind: MetaItemKind::List(items) } } pub fn mk_word_item(ident: Ident) -> MetaItem { - MetaItem { path: Path::from_ident(ident), span: ident.span, node: MetaItemKind::Word } + MetaItem { path: Path::from_ident(ident), span: ident.span, kind: MetaItemKind::Word } } pub fn mk_nested_word_item(ident: Ident) -> NestedMetaItem { @@ -384,10 +327,9 @@ crate fn mk_attr_id() -> AttrId { pub fn mk_attr(style: AttrStyle, path: Path, tokens: TokenStream, span: Span) -> Attribute { Attribute { + item: AttrItem { path, tokens }, id: mk_attr_id(), style, - path, - tokens, is_sugared_doc: false, span, } @@ -395,12 +337,12 @@ pub fn mk_attr(style: AttrStyle, path: Path, tokens: TokenStream, span: Span) -> /// Returns an inner attribute with the given value and span. pub fn mk_attr_inner(item: MetaItem) -> Attribute { - mk_attr(AttrStyle::Inner, item.path, item.node.tokens(item.span), item.span) + mk_attr(AttrStyle::Inner, item.path, item.kind.tokens(item.span), item.span) } /// Returns an outer attribute with the given value and span. pub fn mk_attr_outer(item: MetaItem) -> Attribute { - mk_attr(AttrStyle::Outer, item.path, item.node.tokens(item.span), item.span) + mk_attr(AttrStyle::Outer, item.path, item.kind.tokens(item.span), item.span) } pub fn mk_sugared_doc_attr(text: Symbol, span: Span) -> Attribute { @@ -408,10 +350,12 @@ pub fn mk_sugared_doc_attr(text: Symbol, span: Span) -> Attribute { let lit_kind = LitKind::Str(text, ast::StrStyle::Cooked); let lit = Lit::from_lit_kind(lit_kind, span); Attribute { + item: AttrItem { + path: Path::from_ident(Ident::with_dummy_span(sym::doc).with_span_pos(span)), + tokens: MetaItemKind::NameValue(lit).tokens(span), + }, id: mk_attr_id(), style, - path: Path::from_ident(Ident::with_dummy_span(sym::doc).with_span_pos(span)), - tokens: MetaItemKind::NameValue(lit).tokens(span), is_sugared_doc: true, span, } @@ -469,7 +413,7 @@ pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: Symbol) -> Option } impl MetaItem { - fn tokens(&self) -> TokenStream { + fn token_trees_and_joints(&self) -> Vec { let mut idents = vec![]; let mut last_pos = BytePos(0 as u32); for (i, segment) in self.path.segments.iter().enumerate() { @@ -483,8 +427,8 @@ impl MetaItem { idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into()); last_pos = segment.ident.span.hi(); } - self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents); - TokenStream::new(idents) + idents.extend(self.kind.token_trees_and_joints(self.span)); + idents } fn from_tokens(tokens: &mut iter::Peekable) -> Option @@ -524,32 +468,33 @@ impl MetaItem { } Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt { token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident), - token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), + token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span), token::Nonterminal::NtPath(ref path) => path.clone(), _ => return None, }, _ => return None, }; let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi()); - let node = MetaItemKind::from_tokens(tokens)?; - let hi = match node { + let kind = MetaItemKind::from_tokens(tokens)?; + let hi = match kind { MetaItemKind::NameValue(ref lit) => lit.span.hi(), MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()), _ => path.span.hi(), }; let span = path.span.with_hi(hi); - Some(MetaItem { path, node, span }) + Some(MetaItem { path, kind, span }) } } impl MetaItemKind { - pub fn tokens(&self, span: Span) -> TokenStream { + pub fn token_trees_and_joints(&self, span: Span) -> Vec { match *self { - MetaItemKind::Word => TokenStream::empty(), + MetaItemKind::Word => vec![], MetaItemKind::NameValue(ref lit) => { - let mut vec = vec![TokenTree::token(token::Eq, span).into()]; - lit.tokens().append_to_tree_and_joint_vec(&mut vec); - TokenStream::new(vec) + vec![ + TokenTree::token(token::Eq, span).into(), + lit.token_tree().into(), + ] } MetaItemKind::List(ref list) => { let mut tokens = Vec::new(); @@ -557,17 +502,26 @@ impl MetaItemKind { if i > 0 { tokens.push(TokenTree::token(token::Comma, span).into()); } - item.tokens().append_to_tree_and_joint_vec(&mut tokens); + tokens.extend(item.token_trees_and_joints()) } - TokenTree::Delimited( - DelimSpan::from_single(span), - token::Paren, - TokenStream::new(tokens).into(), - ).into() + vec![ + TokenTree::Delimited( + DelimSpan::from_single(span), + token::Paren, + TokenStream::new(tokens).into(), + ).into() + ] } } } + // Premature conversions of `TokenTree`s to `TokenStream`s can hurt + // performance. Do not use this function if `token_trees_and_joints()` can + // be used instead. + pub fn tokens(&self, span: Span) -> TokenStream { + TokenStream::new(self.token_trees_and_joints(span)) + } + fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, { @@ -609,10 +563,10 @@ impl NestedMetaItem { } } - fn tokens(&self) -> TokenStream { + fn token_trees_and_joints(&self) -> Vec { match *self { - NestedMetaItem::MetaItem(ref item) => item.tokens(), - NestedMetaItem::Literal(ref lit) => lit.tokens(), + NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(), + NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()], } } @@ -702,11 +656,11 @@ impl HasAttrs for StmtKind { impl HasAttrs for Stmt { fn attrs(&self) -> &[ast::Attribute] { - self.node.attrs() + self.kind.attrs() } fn visit_attrs)>(&mut self, f: F) { - self.node.visit_attrs(f); + self.kind.visit_attrs(f); } } diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 7eeea4e7bd..6003fd1d28 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -10,8 +10,9 @@ use crate::attr; use crate::ast; use crate::edition::Edition; use crate::mut_visit::*; -use crate::parse::{token, ParseSess}; +use crate::parse; use crate::ptr::P; +use crate::sess::ParseSess; use crate::symbol::sym; use crate::util::map_in_place::MapInPlace; @@ -56,6 +57,7 @@ pub fn features(mut krate: ast::Crate, sess: &ParseSess, edition: Edition, (krate, features) } +#[macro_export] macro_rules! configure { ($this:ident, $node:ident) => { match $this.configure($node) { @@ -111,25 +113,8 @@ impl<'a> StripUnconfigured<'a> { return vec![]; } - let (cfg_predicate, expanded_attrs) = match attr.parse(self.sess, |parser| { - parser.expect(&token::OpenDelim(token::Paren))?; - - let cfg_predicate = parser.parse_meta_item()?; - parser.expect(&token::Comma)?; - - // Presumably, the majority of the time there will only be one attr. - let mut expanded_attrs = Vec::with_capacity(1); - - while !parser.check(&token::CloseDelim(token::Paren)) { - let lo = parser.token.span.lo(); - let (path, tokens) = parser.parse_meta_item_unrestricted()?; - expanded_attrs.push((path, tokens, parser.prev_span.with_lo(lo))); - parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; - } - - parser.expect(&token::CloseDelim(token::Paren))?; - Ok((cfg_predicate, expanded_attrs)) - }) { + let res = parse::parse_in_attr(self.sess, &attr, |p| p.parse_cfg_attr()); + let (cfg_predicate, expanded_attrs) = match res { Ok(result) => result, Err(mut e) => { e.emit(); @@ -150,11 +135,10 @@ impl<'a> StripUnconfigured<'a> { // `cfg_attr` inside of another `cfg_attr`. E.g. // `#[cfg_attr(false, cfg_attr(true, some_attr))]`. expanded_attrs.into_iter() - .flat_map(|(path, tokens, span)| self.process_cfg_attr(ast::Attribute { + .flat_map(|(item, span)| self.process_cfg_attr(ast::Attribute { + item, id: attr::mk_attr_id(), style: attr.style, - path, - tokens, is_sugared_doc: false, span, })) @@ -298,7 +282,7 @@ impl<'a> StripUnconfigured<'a> { } pub fn configure_pat(&mut self, pat: &mut P) { - if let ast::PatKind::Struct(_path, fields, _etc) = &mut pat.node { + if let ast::PatKind::Struct(_path, fields, _etc) = &mut pat.kind { fields.flat_map_in_place(|field| self.configure(field)); } } @@ -321,13 +305,13 @@ impl<'a> MutVisitor for StripUnconfigured<'a> { fn visit_expr(&mut self, expr: &mut P) { self.configure_expr(expr); - self.configure_expr_kind(&mut expr.node); + self.configure_expr_kind(&mut expr.kind); noop_visit_expr(expr, self); } fn filter_map_expr(&mut self, expr: P) -> Option> { let mut expr = configure!(self, expr); - self.configure_expr_kind(&mut expr.node); + self.configure_expr_kind(&mut expr.kind); noop_visit_expr(&mut expr, self); Some(expr) } diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs index 36c1da2929..5cc953b906 100644 --- a/src/libsyntax/early_buffered_lints.rs +++ b/src/libsyntax/early_buffered_lints.rs @@ -11,6 +11,7 @@ use syntax_pos::MultiSpan; pub enum BufferedEarlyLintId { IllFormedAttributeInput, MetaVariableMisuse, + IncompleteInclude, } /// Stores buffered lint info which can later be passed to `librustc`. diff --git a/src/libsyntax/entry.rs b/src/libsyntax/entry.rs index 0b6cf30bd2..34b5b1e5b5 100644 --- a/src/libsyntax/entry.rs +++ b/src/libsyntax/entry.rs @@ -13,7 +13,7 @@ pub enum EntryPointType { // Beware, this is duplicated in librustc/middle/entry.rs, make sure to keep // them in sync. pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType { - match item.node { + match item.kind { ItemKind::Fn(..) => { if attr::contains_name(&item.attrs, sym::start) { EntryPointType::Start diff --git a/src/libsyntax/error_codes.rs b/src/libsyntax/error_codes.rs index 9925dd8ada..941df5ea57 100644 --- a/src/libsyntax/error_codes.rs +++ b/src/libsyntax/error_codes.rs @@ -144,6 +144,44 @@ fn deprecated_function() {} ``` "##, +E0550: r##" +More than one `deprecated` attribute has been put on an item. + +Erroneous code example: + +```compile_fail,E0550 +#[deprecated(note = "because why not?")] +#[deprecated(note = "right?")] // error! +fn the_banished() {} +``` + +The `deprecated` attribute can only be present **once** on an item. + +``` +#[deprecated(note = "because why not, right?")] +fn the_banished() {} // ok! +``` +"##, + +E0551: r##" +An invalid meta-item was used inside an attribute. + +Erroneous code example: + +```compile_fail,E0551 +#[deprecated(note)] // error! +fn i_am_deprecated() {} +``` + +Meta items are the key-value pairs inside of an attribute. To fix this issue, +you need to give a value to the `note` key. Example: + +``` +#[deprecated(note = "because")] // ok! +fn i_am_deprecated() {} +``` +"##, + E0552: r##" A unrecognized representation attribute was used. @@ -189,6 +227,25 @@ If you need the feature, make sure to use a nightly release of the compiler (but be warned that the feature may be removed or altered in the future). "##, +E0556: r##" +The `feature` attribute was badly formed. + +Erroneous code example: + +```compile_fail,E0556 +#![feature(foo_bar_baz, foo(bar), foo = "baz", foo)] // error! +#![feature] // error! +#![feature = "foo"] // error! +``` + +The `feature` attribute only accept a "feature flag" and can only be used on +nightly. Example: + +```ignore (only works in nightly) +#![feature(flag)] +``` +"##, + E0557: r##" A feature attribute named a feature that has been removed. @@ -238,6 +295,33 @@ named `file_that_doesnt_exist.rs` or `file_that_doesnt_exist/mod.rs` in the same directory. "##, +E0584: r##" +A doc comment that is not attached to anything has been encountered. + +Erroneous code example: + +```compile_fail,E0584 +trait Island { + fn lost(); + + /// I'm lost! +} +``` + +A little reminder: a doc comment has to be placed before the item it's supposed +to document. So if you want to document the `Island` trait, you need to put a +doc comment before it, not inside it. Same goes for the `lost` method: the doc +comment needs to be before it: + +``` +/// I'm THE island! +trait Island { + /// I'm lost! + fn lost(); +} +``` +"##, + E0585: r##" A documentation comment that doesn't document anything was found. @@ -403,7 +487,6 @@ Erroneous code example: // `test_2018_feature` is // included in the Rust 2018 edition ``` - "##, E0725: r##" @@ -421,6 +504,20 @@ Delete the offending feature attribute, or add it to the list of allowed features in the `-Z allow_features` flag. "##, +E0743: r##" +C-variadic has been used on a non-foreign function. + +Erroneous code example: + +```compile_fail,E0743 +fn foo2(x: u8, ...) {} // error! +``` + +Only foreign functions can use C-variadic (`...`). It is used to give an +undefined number of parameters to a given function (like `printf` in C). The +equivalent in Rust would be to use macros directly. +"##, + ; E0539, // incorrect meta item @@ -435,12 +532,8 @@ features in the `-Z allow_features` flag. // rustc_deprecated attribute must be paired with either stable or unstable // attribute E0549, - E0550, // multiple deprecated attributes - E0551, // incorrect meta item E0553, // multiple rustc_const_unstable attributes // E0555, // replaced with a generic attribute input check - E0556, // malformed feature, expected just one word - E0584, // file for module `..` found at both .. and .. E0629, // missing 'feature' (rustc_const_unstable) // rustc_const_unstable attribute must be paired with stable/unstable // attribute diff --git a/src/libsyntax/ext/allocator.rs b/src/libsyntax/expand/allocator.rs similarity index 98% rename from src/libsyntax/ext/allocator.rs rename to src/libsyntax/expand/allocator.rs index 99aeb5414c..20487b9af0 100644 --- a/src/libsyntax/ext/allocator.rs +++ b/src/libsyntax/expand/allocator.rs @@ -1,5 +1,5 @@ use crate::{ast, attr, visit}; -use crate::symbol::{sym, Symbol}; +use syntax_pos::symbol::{sym, Symbol}; use syntax_pos::Span; #[derive(Clone, Copy)] diff --git a/src/libsyntax/expand/mod.rs b/src/libsyntax/expand/mod.rs new file mode 100644 index 0000000000..038f60287b --- /dev/null +++ b/src/libsyntax/expand/mod.rs @@ -0,0 +1,21 @@ +//! Definitions shared by macros / syntax extensions and e.g. librustc. + +use crate::ast::Attribute; +use syntax_pos::symbol::sym; + +pub mod allocator; + +bitflags::bitflags! { + /// Built-in derives that need some extra tracking beyond the usual macro functionality. + #[derive(Default)] + pub struct SpecialDerives: u8 { + const PARTIAL_EQ = 1 << 0; + const EQ = 1 << 1; + const COPY = 1 << 2; + } +} + +pub fn is_proc_macro_attr(attr: &Attribute) -> bool { + [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive] + .iter().any(|kind| attr.check_name(*kind)) +} diff --git a/src/libsyntax/feature_gate/accepted.rs b/src/libsyntax/feature_gate/accepted.rs index 5538daf388..a1cf2d4210 100644 --- a/src/libsyntax/feature_gate/accepted.rs +++ b/src/libsyntax/feature_gate/accepted.rs @@ -245,6 +245,14 @@ declare_features! ( (accepted, bind_by_move_pattern_guards, "1.39.0", Some(15287), None), /// Allows attributes in formal function parameters. (accepted, param_attrs, "1.39.0", Some(60406), None), + /// Allows macro invocations in `extern {}` blocks. + (accepted, macros_in_extern, "1.40.0", Some(49476), None), + /// Allows future-proofing enums/structs with the `#[non_exhaustive]` attribute (RFC 2008). + (accepted, non_exhaustive, "1.40.0", Some(44109), None), + /// Allows calling constructor functions in `const fn`. + (accepted, const_constructor, "1.40.0", Some(61456), None), + /// Allows the use of `#[cfg(doctest)]`, set when rustdoc is collecting doctests. + (accepted, cfg_doctest, "1.40.0", Some(62210), None), // ------------------------------------------------------------------------- // feature-group-end: accepted features diff --git a/src/libsyntax/feature_gate/active.rs b/src/libsyntax/feature_gate/active.rs index 38c16dbac6..736a363bbf 100644 --- a/src/libsyntax/feature_gate/active.rs +++ b/src/libsyntax/feature_gate/active.rs @@ -330,8 +330,13 @@ declare_features! ( /// Allows exhaustive pattern matching on types that contain uninhabited types. (active, exhaustive_patterns, "1.13.0", Some(51085), None), - /// Allows untagged unions `union U { ... }`. - (active, untagged_unions, "1.13.0", Some(32836), None), + /// Allows `union`s to implement `Drop`. Moreover, `union`s may now include fields + /// that don't implement `Copy` as long as they don't have any drop glue. + /// This is checked recursively. On encountering type variable where no progress can be made, + /// `T: Copy` is used as a substitute for "no drop glue". + /// + /// NOTE: A limited form of `union U { ... }` was accepted in 1.19.0. + (active, untagged_unions, "1.13.0", Some(55149), None), /// Allows `#[link(..., cfg(..))]`. (active, link_cfg, "1.14.0", Some(37406), None), @@ -378,9 +383,6 @@ declare_features! ( /// Allows `#[doc(include = "some-file")]`. (active, external_doc, "1.22.0", Some(44732), None), - /// Allows future-proofing enums/structs with the `#[non_exhaustive]` attribute (RFC 2008). - (active, non_exhaustive, "1.22.0", Some(44109), None), - /// Allows using `crate` as visibility modifier, synonymous with `pub(crate)`. (active, crate_visibility_modifier, "1.23.0", Some(53120), None), @@ -402,9 +404,6 @@ declare_features! ( /// Allows infering `'static` outlives requirements (RFC 2093). (active, infer_static_outlives_requirements, "1.26.0", Some(54185), None), - /// Allows macro invocations in `extern {}` blocks. - (active, macros_in_extern, "1.27.0", Some(49476), None), - /// Allows accessing fields of unions inside `const` functions. (active, const_fn_union, "1.27.0", Some(51909), None), @@ -489,9 +488,6 @@ declare_features! ( /// Allows the user of associated type bounds. (active, associated_type_bounds, "1.34.0", Some(52662), None), - /// Allows calling constructor functions in `const fn`. - (active, const_constructor, "1.37.0", Some(61456), None), - /// Allows `if/while p && let q = r && ...` chains. (active, let_chains, "1.37.0", Some(53667), None), @@ -510,9 +506,6 @@ declare_features! ( /// Allows `async || body` closures. (active, async_closure, "1.37.0", Some(62290), None), - /// Allows the use of `#[cfg(doctest)]`; set when rustdoc is collecting doctests. - (active, cfg_doctest, "1.37.0", Some(62210), None), - /// Allows `[x; N]` where `x` is a constant (RFC 2203). (active, const_in_array_repeat_expressions, "1.37.0", Some(49147), None), @@ -522,6 +515,24 @@ declare_features! ( /// Allows the use of or-patterns (e.g., `0 | 1`). (active, or_patterns, "1.38.0", Some(54883), None), + /// Allows the definition of `const extern fn` and `const unsafe extern fn`. + (active, const_extern_fn, "1.40.0", Some(64926), None), + + /// Allows the use of raw-dylibs (RFC 2627). + (active, raw_dylib, "1.40.0", Some(58713), None), + + /// Allows `#[track_caller]` to be used which provides + /// accurate caller location reporting during panic (RFC 2091). + (active, track_caller, "1.40.0", Some(47809), None), + + /// Allows making `dyn Trait` well-formed even if `Trait` is not object safe. + /// In that case, `dyn Trait: Trait` does not hold. Moreover, coercions and + /// casts in safe Rust to `dyn Trait` for such a `Trait` is also forbidden. + (active, object_safe_for_dispatch, "1.40.0", Some(43561), None), + + /// Allows using the `efiapi` ABI. + (active, abi_efiapi, "1.40.0", Some(65815), None), + // ------------------------------------------------------------------------- // feature-group-end: actual feature gates // ------------------------------------------------------------------------- @@ -536,4 +547,6 @@ pub const INCOMPLETE_FEATURES: &[Symbol] = &[ sym::const_generics, sym::or_patterns, sym::let_chains, + sym::raw_dylib, + sym::track_caller, ]; diff --git a/src/libsyntax/feature_gate/builtin_attrs.rs b/src/libsyntax/feature_gate/builtin_attrs.rs index b6e13200f3..eb811c3e0f 100644 --- a/src/libsyntax/feature_gate/builtin_attrs.rs +++ b/src/libsyntax/feature_gate/builtin_attrs.rs @@ -9,8 +9,8 @@ use super::active::Features; use crate::ast; use crate::attr::AttributeTemplate; +use crate::sess::ParseSess; use crate::symbol::{Symbol, sym}; -use crate::parse::ParseSess; use syntax_pos::Span; use rustc_data_structures::fx::FxHashMap; @@ -29,8 +29,8 @@ const GATED_CFGS: &[(Symbol, Symbol, GateFn)] = &[ // (name in cfg, feature, function to check if the feature is enabled) (sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)), (sym::target_has_atomic, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)), + (sym::target_has_atomic_load_store, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)), (sym::rustdoc, sym::doc_cfg, cfg_fn!(doc_cfg)), - (sym::doctest, sym::cfg_doctest, cfg_fn!(cfg_doctest)), ]; #[derive(Debug)] @@ -251,6 +251,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ungated!(path, Normal, template!(NameValueStr: "file")), ungated!(no_std, CrateLevel, template!(Word)), ungated!(no_implicit_prelude, Normal, template!(Word)), + ungated!(non_exhaustive, Whitelisted, template!(Word)), // Runtime ungated!(windows_subsystem, Whitelisted, template!(NameValueStr: "windows|console")), @@ -276,12 +277,35 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ "the `link_args` attribute is experimental and not portable across platforms, \ it is recommended to use `#[link(name = \"foo\")] instead", ), + gated!( + link_ordinal, Whitelisted, template!(List: "ordinal"), raw_dylib, + experimental!(link_ordinal) + ), // Plugins: - ungated!(plugin_registrar, Normal, template!(Word)), - gated!( - plugin, CrateLevel, template!(List: "name|name(args)"), - "compiler plugins are experimental and possibly buggy", + ( + sym::plugin_registrar, Normal, template!(Word), + Gated( + Stability::Deprecated( + "https://github.com/rust-lang/rust/pull/64675", + Some("may be removed in a future compiler version"), + ), + sym::plugin_registrar, + "compiler plugins are deprecated", + cfg_fn!(plugin_registrar) + ) + ), + ( + sym::plugin, CrateLevel, template!(List: "name|name(args)"), + Gated( + Stability::Deprecated( + "https://github.com/rust-lang/rust/pull/64675", + Some("may be removed in a future compiler version"), + ), + sym::plugin, + "compiler plugins are deprecated", + cfg_fn!(plugin) + ) ), // Testing: @@ -290,9 +314,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ test_runner, CrateLevel, template!(List: "path"), custom_test_frameworks, "custom test frameworks are an unstable feature", ), - - // RFC #2008 - gated!(non_exhaustive, Whitelisted, template!(Word), experimental!(non_exhaustive)), // RFC #1268 gated!(marker, Normal, template!(Word), marker_trait_attr, experimental!(marker)), gated!( @@ -307,6 +328,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ), gated!(ffi_returns_twice, Whitelisted, template!(Word), experimental!(ffi_returns_twice)), + gated!(track_caller, Whitelisted, template!(Word), experimental!(track_caller)), // ========================================================================== // Internal attributes: Stability, deprecation, and unsafe: @@ -457,7 +479,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ // ========================================================================== // Internal attributes, Misc: // ========================================================================== - gated!( lang, Normal, template!(NameValueStr: "name"), lang_items, "language items are subject to change", @@ -498,6 +519,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ overflow checking behavior of several libcore functions that are inlined \ across crates and will never be stable", ), + rustc_attr!(rustc_reservation_impl, Normal, template!(NameValueStr: "reservation message"), + "the `#[rustc_reservation_impl]` attribute is internally used \ + for reserving for `for From for T` impl" + ), rustc_attr!( rustc_test_marker, Normal, template!(Word), "the `#[rustc_test_marker]` attribute is used internally to track tests", diff --git a/src/libsyntax/feature_gate/check.rs b/src/libsyntax/feature_gate/check.rs index b50ca1ad1c..13a24ca046 100644 --- a/src/libsyntax/feature_gate/check.rs +++ b/src/libsyntax/feature_gate/check.rs @@ -5,14 +5,14 @@ use super::builtin_attrs::{AttributeGate, BUILTIN_ATTRIBUTE_MAP}; use crate::ast::{ self, AssocTyConstraint, AssocTyConstraintKind, NodeId, GenericParam, GenericParamKind, - PatKind, RangeEnd, + PatKind, RangeEnd, VariantData, }; use crate::attr::{self, check_builtin_attribute}; use crate::source_map::Spanned; use crate::edition::{ALL_EDITIONS, Edition}; use crate::visit::{self, FnKind, Visitor}; -use crate::parse::{token, ParseSess}; -use crate::parse::parser::Parser; +use crate::parse::token; +use crate::sess::ParseSess; use crate::symbol::{Symbol, sym}; use crate::tokenstream::TokenTree; @@ -56,7 +56,7 @@ macro_rules! gate_feature { }; } -crate fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { +pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { PostExpansionVisitor { parse_sess, features }.visit_attribute(attr) } @@ -234,6 +234,10 @@ impl<'a> PostExpansionVisitor<'a> { gate_feature_post!(&self, abi_amdgpu_kernel, span, "amdgpu-kernel ABI is experimental and subject to change"); }, + Abi::EfiApi => { + gate_feature_post!(&self, abi_efiapi, span, + "efiapi ABI is experimental and subject to change"); + }, // Stable Abi::Cdecl | Abi::Stdcall | @@ -246,6 +250,70 @@ impl<'a> PostExpansionVisitor<'a> { Abi::System => {} } } + + fn maybe_report_invalid_custom_discriminants(&self, variants: &[ast::Variant]) { + let has_fields = variants.iter().any(|variant| match variant.data { + VariantData::Tuple(..) | VariantData::Struct(..) => true, + VariantData::Unit(..) => false, + }); + + let discriminant_spans = variants.iter().filter(|variant| match variant.data { + VariantData::Tuple(..) | VariantData::Struct(..) => false, + VariantData::Unit(..) => true, + }) + .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span)) + .collect::>(); + + if !discriminant_spans.is_empty() && has_fields { + let mut err = feature_err( + self.parse_sess, + sym::arbitrary_enum_discriminant, + discriminant_spans.clone(), + crate::feature_gate::GateIssue::Language, + "custom discriminant values are not allowed in enums with tuple or struct variants", + ); + for sp in discriminant_spans { + err.span_label(sp, "disallowed custom discriminant"); + } + for variant in variants.iter() { + match &variant.data { + VariantData::Struct(..) => { + err.span_label( + variant.span, + "struct variant defined here", + ); + } + VariantData::Tuple(..) => { + err.span_label( + variant.span, + "tuple variant defined here", + ); + } + VariantData::Unit(..) => {} + } + } + err.emit(); + } + } + + fn check_gat(&self, generics: &ast::Generics, span: Span) { + if !generics.params.is_empty() { + gate_feature_post!( + &self, + generic_associated_types, + span, + "generic associated types are unstable" + ); + } + if !generics.where_clause.predicates.is_empty() { + gate_feature_post!( + &self, + generic_associated_types, + span, + "where clauses on associated types are unstable" + ); + } + } } impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { @@ -302,7 +370,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_item(&mut self, i: &'a ast::Item) { - match i.node { + match i.kind { ast::ItemKind::ForeignMod(ref foreign_module) => { self.check_abi(foreign_module.abi, i.span); } @@ -353,7 +421,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { let has_feature = self.features.arbitrary_enum_discriminant; if !has_feature && !i.span.allows_unstable(sym::arbitrary_enum_discriminant) { - Parser::maybe_report_invalid_custom_discriminants(self.parse_sess, &variants); + self.maybe_report_invalid_custom_discriminants(&variants); } } @@ -408,7 +476,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_foreign_item(&mut self, i: &'a ast::ForeignItem) { - match i.node { + match i.kind { ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => { let link_name = attr::first_attr_value_str_by_name(&i.attrs, sym::link_name); @@ -432,7 +500,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_ty(&mut self, ty: &'a ast::Ty) { - match ty.node { + match ty.kind { ast::TyKind::BareFn(ref bare_fn_ty) => { self.check_abi(bare_fn_ty.abi, ty.span); } @@ -447,7 +515,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) { if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty { - if let ast::TyKind::Never = output_ty.node { + if let ast::TyKind::Never = output_ty.kind { // Do nothing. } else { self.visit_ty(output_ty) @@ -456,7 +524,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_expr(&mut self, e: &'a ast::Expr) { - match e.node { + match e.kind { ast::ExprKind::Box(_) => { gate_feature_post!(&self, box_syntax, e.span, EXPLAIN_BOX_SYNTAX); } @@ -487,11 +555,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_pat(&mut self, pattern: &'a ast::Pat) { - match &pattern.node { + match &pattern.kind { PatKind::Slice(pats) => { for pat in &*pats { let span = pat.span; - let inner_pat = match &pat.node { + let inner_pat = match &pat.kind { PatKind::Ident(.., Some(pat)) => pat, _ => pat, }; @@ -531,7 +599,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { self.check_abi(header.abi, span); } - if fn_decl.c_variadic { + if fn_decl.c_variadic() { gate_feature_post!(&self, c_variadic, span, "C-variadic functions are unstable"); } @@ -559,12 +627,12 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_trait_item(&mut self, ti: &'a ast::TraitItem) { - match ti.node { + match ti.kind { ast::TraitItemKind::Method(ref sig, ref block) => { if block.is_none() { self.check_abi(sig.header.abi, ti.span); } - if sig.decl.c_variadic { + if sig.decl.c_variadic() { gate_feature_post!(&self, c_variadic, ti.span, "C-variadic functions are unstable"); } @@ -579,14 +647,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { gate_feature_post!(&self, associated_type_defaults, ti.span, "associated type defaults are unstable"); } - if !ti.generics.params.is_empty() { - gate_feature_post!(&self, generic_associated_types, ti.span, - "generic associated types are unstable"); - } - if !ti.generics.where_clause.predicates.is_empty() { - gate_feature_post!(&self, generic_associated_types, ti.span, - "where clauses on associated types are unstable"); - } + self.check_gat(&ti.generics, ti.span); } _ => {} } @@ -600,8 +661,13 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { "specialization is unstable"); } - match ii.node { - ast::ImplItemKind::Method(..) => {} + match ii.kind { + ast::ImplItemKind::Method(ref sig, _) => { + if sig.decl.c_variadic() { + gate_feature_post!(&self, c_variadic, ii.span, + "C-variadic functions are unstable"); + } + } ast::ImplItemKind::OpaqueTy(..) => { gate_feature_post!( &self, @@ -611,14 +677,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ); } ast::ImplItemKind::TyAlias(_) => { - if !ii.generics.params.is_empty() { - gate_feature_post!(&self, generic_associated_types, ii.span, - "generic associated types are unstable"); - } - if !ii.generics.where_clause.predicates.is_empty() { - gate_feature_post!(&self, generic_associated_types, ii.span, - "where clauses on associated types are unstable"); - } + self.check_gat(&ii.generics, ii.span); } _ => {} } @@ -764,7 +823,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute], } if let Some(allowed) = allow_features.as_ref() { - if allowed.iter().find(|f| *f == name.as_str()).is_none() { + if allowed.iter().find(|&f| f == &name.as_str() as &str).is_none() { span_err!(span_handler, mi.span(), E0725, "the feature `{}` is not in the list of allowed features", name); @@ -816,6 +875,38 @@ pub fn check_crate(krate: &ast::Crate, gate_all!(async_closure, "async closures are unstable"); gate_all!(yields, generators, "yield syntax is experimental"); gate_all!(or_patterns, "or-patterns syntax is experimental"); + gate_all!(const_extern_fn, "`const extern fn` definitions are unstable"); + + // All uses of `gate_all!` below this point were added in #65742, + // and subsequently disabled (with the non-early gating readded). + macro_rules! gate_all { + ($gate:ident, $msg:literal) => { + // FIXME(eddyb) do something more useful than always + // disabling these uses of early feature-gatings. + if false { + for span in &*parse_sess.gated_spans.$gate.borrow() { + gate_feature!(&visitor, $gate, *span, $msg); + } + } + } + } + + gate_all!(trait_alias, "trait aliases are experimental"); + gate_all!(associated_type_bounds, "associated type bounds are unstable"); + gate_all!(crate_visibility_modifier, "`crate` visibility modifier is experimental"); + gate_all!(const_generics, "const generics are unstable"); + gate_all!(decl_macro, "`macro` is experimental"); + gate_all!(box_patterns, "box pattern syntax is experimental"); + gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental"); + gate_all!(try_blocks, "`try` blocks are unstable"); + gate_all!(label_break_value, "labels on blocks are unstable"); + gate_all!(box_syntax, "box expression syntax is experimental; you can call `Box::new` instead"); + + // To avoid noise about type ascription in common syntax errors, + // only emit if it is the *only* error. (Also check it last.) + if parse_sess.span_diagnostic.err_count() == 0 { + gate_all!(type_ascription, "type ascription is experimental"); + } visit::walk_crate(&mut visitor, krate); } @@ -849,25 +940,19 @@ impl UnstableFeatures { pub fn is_nightly_build(&self) -> bool { match *self { UnstableFeatures::Allow | UnstableFeatures::Cheat => true, - _ => false, + UnstableFeatures::Disallow => false, } } } fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, unstable: UnstableFeatures) { - let allow_features = match unstable { - UnstableFeatures::Allow => true, - UnstableFeatures::Disallow => false, - UnstableFeatures::Cheat => true - }; - if !allow_features { - for attr in &krate.attrs { - if attr.check_name(sym::feature) { - let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)"); - span_err!(span_handler, attr.span, E0554, - "`#![feature]` may not be used on the {} release channel", - release_channel); - } + if !unstable.is_nightly_build() { + for attr in krate.attrs.iter().filter(|attr| attr.check_name(sym::feature)) { + span_err!( + span_handler, attr.span, E0554, + "`#![feature]` may not be used on the {} release channel", + option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)") + ); } } } diff --git a/src/libsyntax/feature_gate/mod.rs b/src/libsyntax/feature_gate/mod.rs index ca13ab3620..ba970618c0 100644 --- a/src/libsyntax/feature_gate/mod.rs +++ b/src/libsyntax/feature_gate/mod.rs @@ -58,8 +58,7 @@ pub use builtin_attrs::{ deprecated_attributes, is_builtin_attr, is_builtin_attr_name, }; pub use check::{ - check_crate, get_features, feature_err, emit_feature_err, + check_crate, check_attribute, get_features, feature_err, emit_feature_err, Stability, GateIssue, UnstableFeatures, EXPLAIN_STMT_ATTR_SYNTAX, EXPLAIN_UNSIZED_TUPLE_COERCION, }; -crate use check::check_attribute; diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 5cdea3aabb..0b15793837 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -12,7 +12,7 @@ use crate::source_map::{SourceMap, FilePathMapping}; use errors::registry::Registry; -use errors::{SubDiagnostic, CodeSuggestion, SourceMapper}; +use errors::{SubDiagnostic, CodeSuggestion, SourceMapper, SourceMapperDyn}; use errors::{DiagnosticId, Applicability}; use errors::emitter::{Emitter, HumanReadableErrorType}; @@ -25,6 +25,9 @@ use std::sync::{Arc, Mutex}; use rustc_serialize::json::{as_json, as_pretty_json}; +#[cfg(test)] +mod tests; + pub struct JsonEmitter { dst: Box, registry: Option, @@ -89,8 +92,8 @@ impl JsonEmitter { } impl Emitter for JsonEmitter { - fn emit_diagnostic(&mut self, db: &errors::Diagnostic) { - let data = Diagnostic::from_errors_diagnostic(db, self); + fn emit_diagnostic(&mut self, diag: &errors::Diagnostic) { + let data = Diagnostic::from_errors_diagnostic(diag, self); let result = if self.pretty { writeln!(&mut self.dst, "{}", as_pretty_json(&data)) } else { @@ -112,6 +115,17 @@ impl Emitter for JsonEmitter { panic!("failed to print notification: {:?}", e); } } + + fn source_map(&self) -> Option<&Lrc> { + Some(&self.sm) + } + + fn should_show_explain(&self) -> bool { + match self.json_rendered { + HumanReadableErrorType::Short(_) => false, + _ => true, + } + } } // The following data types are provided just for serialisation. @@ -198,10 +212,10 @@ struct ArtifactNotification<'a> { } impl Diagnostic { - fn from_errors_diagnostic(db: &errors::Diagnostic, + fn from_errors_diagnostic(diag: &errors::Diagnostic, je: &JsonEmitter) -> Diagnostic { - let sugg = db.suggestions.iter().map(|sugg| { + let sugg = diag.suggestions.iter().map(|sugg| { Diagnostic { message: sugg.msg.clone(), code: None, @@ -230,30 +244,30 @@ impl Diagnostic { let output = buf.clone(); je.json_rendered.new_emitter( Box::new(buf), Some(je.sm.clone()), false, None, je.external_macro_backtrace - ).ui_testing(je.ui_testing).emit_diagnostic(db); + ).ui_testing(je.ui_testing).emit_diagnostic(diag); let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap(); let output = String::from_utf8(output).unwrap(); Diagnostic { - message: db.message(), - code: DiagnosticCode::map_opt_string(db.code.clone(), je), - level: db.level.to_str(), - spans: DiagnosticSpan::from_multispan(&db.span, je), - children: db.children.iter().map(|c| { + message: diag.message(), + code: DiagnosticCode::map_opt_string(diag.code.clone(), je), + level: diag.level.to_str(), + spans: DiagnosticSpan::from_multispan(&diag.span, je), + children: diag.children.iter().map(|c| { Diagnostic::from_sub_diagnostic(c, je) }).chain(sugg).collect(), rendered: Some(output), } } - fn from_sub_diagnostic(db: &SubDiagnostic, je: &JsonEmitter) -> Diagnostic { + fn from_sub_diagnostic(diag: &SubDiagnostic, je: &JsonEmitter) -> Diagnostic { Diagnostic { - message: db.message(), + message: diag.message(), code: None, - level: db.level.to_str(), - spans: db.render_span.as_ref() + level: diag.level.to_str(), + spans: diag.render_span.as_ref() .map(|sp| DiagnosticSpan::from_multispan(sp, je)) - .unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)), + .unwrap_or_else(|| DiagnosticSpan::from_multispan(&diag.span, je)), children: vec![], rendered: None, } @@ -325,8 +339,8 @@ impl DiagnosticSpan { DiagnosticSpan { file_name: start.file.name.to_string(), - byte_start: span.lo().0 - start.file.start_pos.0, - byte_end: span.hi().0 - start.file.start_pos.0, + byte_start: start.file.original_relative_byte_pos(span.lo()).0, + byte_end: start.file.original_relative_byte_pos(span.hi()).0, line_start: start.line, line_end: end.line, column_start: start.col.0 + 1, diff --git a/src/libsyntax/json/tests.rs b/src/libsyntax/json/tests.rs new file mode 100644 index 0000000000..eb0d9ef394 --- /dev/null +++ b/src/libsyntax/json/tests.rs @@ -0,0 +1,186 @@ +use super::*; + +use crate::json::JsonEmitter; +use crate::source_map::{FilePathMapping, SourceMap}; +use crate::tests::Shared; +use crate::with_default_globals; + +use errors::emitter::{ColorConfig, HumanReadableErrorType}; +use errors::Handler; +use rustc_serialize::json::decode; +use syntax_pos::{BytePos, Span}; + +use std::str; + +#[derive(RustcDecodable, Debug, PartialEq, Eq)] +struct TestData { + spans: Vec, +} + +#[derive(RustcDecodable, Debug, PartialEq, Eq)] +struct SpanTestData { + pub byte_start: u32, + pub byte_end: u32, + pub line_start: u32, + pub column_start: u32, + pub line_end: u32, + pub column_end: u32, +} + +/// Test the span yields correct positions in JSON. +fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) { + let expected_output = TestData { spans: vec![expected_output] }; + + with_default_globals(|| { + let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + sm.new_source_file(Path::new("test.rs").to_owned().into(), code.to_owned()); + + let output = Arc::new(Mutex::new(Vec::new())); + let je = JsonEmitter::new( + Box::new(Shared { data: output.clone() }), + None, + sm, + true, + HumanReadableErrorType::Short(ColorConfig::Never), + false, + ); + + let span = Span::with_root_ctxt(BytePos(span.0), BytePos(span.1)); + let handler = Handler::with_emitter(true, None, Box::new(je)); + handler.span_err(span, "foo"); + + let bytes = output.lock().unwrap(); + let actual_output = str::from_utf8(&bytes).unwrap(); + let actual_output: TestData = decode(actual_output).unwrap(); + + assert_eq!(expected_output, actual_output) + }) +} + +#[test] +fn empty() { + test_positions( + " ", + (0, 1), + SpanTestData { + byte_start: 0, + byte_end: 1, + line_start: 1, + column_start: 1, + line_end: 1, + column_end: 2, + }, + ) +} + +#[test] +fn bom() { + test_positions( + "\u{feff} ", + (0, 1), + SpanTestData { + byte_start: 3, + byte_end: 4, + line_start: 1, + column_start: 1, + line_end: 1, + column_end: 2, + }, + ) +} + +#[test] +fn lf_newlines() { + test_positions( + "\nmod foo;\nmod bar;\n", + (5, 12), + SpanTestData { + byte_start: 5, + byte_end: 12, + line_start: 2, + column_start: 5, + line_end: 3, + column_end: 3, + }, + ) +} + +#[test] +fn crlf_newlines() { + test_positions( + "\r\nmod foo;\r\nmod bar;\r\n", + (5, 12), + SpanTestData { + byte_start: 6, + byte_end: 14, + line_start: 2, + column_start: 5, + line_end: 3, + column_end: 3, + }, + ) +} + +#[test] +fn crlf_newlines_with_bom() { + test_positions( + "\u{feff}\r\nmod foo;\r\nmod bar;\r\n", + (5, 12), + SpanTestData { + byte_start: 9, + byte_end: 17, + line_start: 2, + column_start: 5, + line_end: 3, + column_end: 3, + }, + ) +} + +#[test] +fn span_before_crlf() { + test_positions( + "foo\r\nbar", + (2, 3), + SpanTestData { + byte_start: 2, + byte_end: 3, + line_start: 1, + column_start: 3, + line_end: 1, + column_end: 4, + }, + ) +} + +#[test] +fn span_on_crlf() { + test_positions( + "foo\r\nbar", + (3, 4), + SpanTestData { + byte_start: 3, + byte_end: 5, + line_start: 1, + column_start: 4, + line_end: 2, + column_end: 1, + }, + ) +} + +#[test] +fn span_after_crlf() { + test_positions( + "foo\r\nbar", + (4, 5), + SpanTestData { + byte_start: 5, + byte_end: 6, + line_start: 2, + column_start: 1, + line_end: 2, + column_end: 2, + }, + ) +} diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index b4ae1e87bc..7be6e6c7e1 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -7,27 +7,21 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))] -#![cfg_attr(bootstrap, feature(bind_by_move_pattern_guards))] #![feature(box_syntax)] #![feature(const_fn)] #![feature(const_transmute)] #![feature(crate_visibility_modifier)] #![feature(label_break_value)] -#![feature(mem_take)] #![feature(nll)] -#![feature(proc_macro_diagnostic)] -#![feature(proc_macro_internals)] -#![feature(proc_macro_span)] #![feature(try_trait)] +#![feature(slice_patterns)] #![feature(unicode_internals)] #![recursion_limit="256"] -extern crate proc_macro; - pub use errors; use rustc_data_structures::sync::Lock; -use rustc_data_structures::bit_set::GrowableBitSet; +use rustc_index::bit_set::GrowableBitSet; pub use rustc_data_structures::thin_vec::ThinVec; use ast::AttrId; use syntax_pos::edition::Edition; @@ -35,43 +29,7 @@ use syntax_pos::edition::Edition; #[cfg(test)] mod tests; -const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments"); - -// A variant of 'try!' that panics on an Err. This is used as a crutch on the -// way towards a non-panic!-prone parser. It should be used for fatal parsing -// errors; eventually we plan to convert all code using panictry to just use -// normal try. -#[macro_export] -macro_rules! panictry { - ($e:expr) => ({ - use std::result::Result::{Ok, Err}; - use errors::FatalError; - match $e { - Ok(e) => e, - Err(mut e) => { - e.emit(); - FatalError.raise() - } - } - }) -} - -// A variant of 'panictry!' that works on a Vec instead of a single DiagnosticBuilder. -macro_rules! panictry_buffer { - ($handler:expr, $e:expr) => ({ - use std::result::Result::{Ok, Err}; - use errors::FatalError; - match $e { - Ok(e) => e, - Err(errs) => { - for e in errs { - $handler.emit_diagnostic(&e); - } - FatalError.raise() - } - } - }) -} +pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments"); #[macro_export] macro_rules! unwrap_or { @@ -92,7 +50,7 @@ pub struct Globals { impl Globals { fn new(edition: Edition) -> Globals { Globals { - // We have no idea how many attributes their will be, so just + // We have no idea how many attributes there will be, so just // initiate the vectors with 0 bits. We'll grow them as necessary. used_attrs: Lock::new(GrowableBitSet::new_empty()), known_attrs: Lock::new(GrowableBitSet::new_empty()), @@ -137,9 +95,9 @@ pub mod json; pub mod ast; pub mod attr; +pub mod expand; pub mod source_map; -#[macro_use] -pub mod config; +#[macro_use] pub mod config; pub mod entry; pub mod feature_gate; pub mod mut_visit; @@ -148,6 +106,7 @@ pub mod ptr; pub mod show_span; pub use syntax_pos::edition; pub use syntax_pos::symbol; +pub mod sess; pub mod tokenstream; pub mod visit; @@ -157,24 +116,4 @@ pub mod print { mod helpers; } -pub mod ext { - mod placeholders; - mod proc_macro_server; - - pub use syntax_pos::hygiene; - pub mod allocator; - pub mod base; - pub mod build; - pub mod expand; - pub mod proc_macro; - - pub mod tt { - pub mod transcribe; - pub mod macro_check; - pub mod macro_parser; - pub mod macro_rules; - pub mod quoted; - } -} - pub mod early_buffered_lints; diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 5a37222ee5..60ee17d09b 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -432,9 +432,9 @@ pub fn noop_visit_ty_constraint( } pub fn noop_visit_ty(ty: &mut P, vis: &mut T) { - let Ty { id, node, span } = ty.deref_mut(); + let Ty { id, kind, span } = ty.deref_mut(); vis.visit_id(id); - match node { + match kind { TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err | TyKind::Never | TyKind::CVarArgs => {} TyKind::Slice(ty) => vis.visit_ty(ty), @@ -550,7 +550,8 @@ pub fn noop_visit_local(local: &mut P, vis: &mut T) { } pub fn noop_visit_attribute(attr: &mut Attribute, vis: &mut T) { - let Attribute { id: _, style: _, path, tokens, is_sugared_doc: _, span } = attr; + let Attribute { item: AttrItem { path, tokens }, id: _, style: _, is_sugared_doc: _, span } + = attr; vis.visit_path(path); vis.visit_tts(tokens); vis.visit_span(span); @@ -576,8 +577,8 @@ pub fn noop_visit_meta_list_item(li: &mut NestedMetaItem, vis: &m } pub fn noop_visit_meta_item(mi: &mut MetaItem, vis: &mut T) { - let MetaItem { path: _, node, span } = mi; - match node { + let MetaItem { path: _, kind, span } = mi; + match kind { MetaItemKind::Word => {} MetaItemKind::List(mis) => visit_vec(mis, |mi| vis.visit_meta_list_item(mi)), MetaItemKind::NameValue(_s) => {} @@ -609,10 +610,8 @@ pub fn noop_visit_tt(tt: &mut TokenTree, vis: &mut T) { } pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &mut T) { - visit_opt(tts, |tts| { - let tts = Lrc::make_mut(tts); - visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree)); - }) + let tts = Lrc::make_mut(tts); + visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree)); } // Applies ident visitor if it's an ident; applies other visits to interpolated nodes. @@ -681,7 +680,10 @@ pub fn noop_visit_interpolated(nt: &mut token::Nonterminal, vis: token::NtIdent(ident, _is_raw) => vis.visit_ident(ident), token::NtLifetime(ident) => vis.visit_ident(ident), token::NtLiteral(expr) => vis.visit_expr(expr), - token::NtMeta(meta) => vis.visit_meta_item(meta), + token::NtMeta(AttrItem { path, tokens }) => { + vis.visit_path(path); + vis.visit_tts(tokens); + } token::NtPath(path) => vis.visit_path(path), token::NtTT(tt) => vis.visit_tt(tt), token::NtImplItem(item) => @@ -717,7 +719,7 @@ pub fn noop_visit_asyncness(asyncness: &mut IsAsync, vis: &mut T) } pub fn noop_visit_fn_decl(decl: &mut P, vis: &mut T) { - let FnDecl { inputs, output, c_variadic: _ } = decl.deref_mut(); + let FnDecl { inputs, output } = decl.deref_mut(); inputs.flat_map_in_place(|param| vis.flat_map_param(param)); match output { FunctionRetTy::Default(span) => vis.visit_span(span), @@ -921,12 +923,12 @@ pub fn noop_visit_item_kind(kind: &mut ItemKind, vis: &mut T) { pub fn noop_flat_map_trait_item(mut item: TraitItem, vis: &mut T) -> SmallVec<[TraitItem; 1]> { - let TraitItem { id, ident, attrs, generics, node, span, tokens: _ } = &mut item; + let TraitItem { id, ident, attrs, generics, kind, span, tokens: _ } = &mut item; vis.visit_id(id); vis.visit_ident(ident); visit_attrs(attrs, vis); vis.visit_generics(generics); - match node { + match kind { TraitItemKind::Const(ty, default) => { vis.visit_ty(ty); visit_opt(default, |default| vis.visit_expr(default)); @@ -951,14 +953,14 @@ pub fn noop_flat_map_trait_item(mut item: TraitItem, vis: &mut T) pub fn noop_flat_map_impl_item(mut item: ImplItem, visitor: &mut T) -> SmallVec<[ImplItem; 1]> { - let ImplItem { id, ident, vis, defaultness: _, attrs, generics, node, span, tokens: _ } = + let ImplItem { id, ident, vis, defaultness: _, attrs, generics, kind, span, tokens: _ } = &mut item; visitor.visit_id(id); visitor.visit_ident(ident); visitor.visit_vis(vis); visit_attrs(attrs, visitor); visitor.visit_generics(generics); - match node { + match kind { ImplItemKind::Const(ty, expr) => { visitor.visit_ty(ty); visitor.visit_expr(expr); @@ -994,7 +996,7 @@ pub fn noop_visit_crate(krate: &mut Crate, vis: &mut T) { id: DUMMY_NODE_ID, vis: respan(span.shrink_to_lo(), VisibilityKind::Public), span, - node: ItemKind::Mod(module), + kind: ItemKind::Mod(module), tokens: None, }); let items = vis.flat_map_item(item); @@ -1004,8 +1006,8 @@ pub fn noop_visit_crate(krate: &mut Crate, vis: &mut T) { let module = Mod { inner: span, items: vec![], inline: true }; Crate { module, attrs: vec![], span } } else if len == 1 { - let Item { attrs, span, node, .. } = items.into_iter().next().unwrap().into_inner(); - match node { + let Item { attrs, span, kind, .. } = items.into_iter().next().unwrap().into_inner(); + match kind { ItemKind::Mod(module) => Crate { module, attrs, span }, _ => panic!("visitor converted a module to not a module"), } @@ -1018,11 +1020,11 @@ pub fn noop_visit_crate(krate: &mut Crate, vis: &mut T) { // Mutates one item into possibly many items. pub fn noop_flat_map_item(mut item: P, visitor: &mut T) -> SmallVec<[P; 1]> { - let Item { ident, attrs, id, node, vis, span, tokens: _ } = item.deref_mut(); + let Item { ident, attrs, id, kind, vis, span, tokens: _ } = item.deref_mut(); visitor.visit_ident(ident); visit_attrs(attrs, visitor); visitor.visit_id(id); - visitor.visit_item_kind(node); + visitor.visit_item_kind(kind); visitor.visit_vis(vis); visitor.visit_span(span); @@ -1035,10 +1037,10 @@ pub fn noop_flat_map_item(mut item: P, visitor: &mut T) pub fn noop_flat_map_foreign_item(mut item: ForeignItem, visitor: &mut T) -> SmallVec<[ForeignItem; 1]> { - let ForeignItem { ident, attrs, node, id, span, vis } = &mut item; + let ForeignItem { ident, attrs, kind, id, span, vis } = &mut item; visitor.visit_ident(ident); visit_attrs(attrs, visitor); - match node { + match kind { ForeignItemKind::Fn(fdec, generics) => { visitor.visit_fn_decl(fdec); visitor.visit_generics(generics); @@ -1055,9 +1057,9 @@ pub fn noop_flat_map_foreign_item(mut item: ForeignItem, visitor: } pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { - let Pat { id, node, span } = pat.deref_mut(); + let Pat { id, kind, span } = pat.deref_mut(); vis.visit_id(id); - match node { + match kind { PatKind::Wild | PatKind::Rest => {} PatKind::Ident(_binding_mode, ident, sub) => { vis.visit_ident(ident); @@ -1097,8 +1099,8 @@ pub fn noop_visit_anon_const(AnonConst { id, value }: &mut AnonCo vis.visit_expr(value); } -pub fn noop_visit_expr(Expr { node, id, span, attrs }: &mut Expr, vis: &mut T) { - match node { +pub fn noop_visit_expr(Expr { kind, id, span, attrs }: &mut Expr, vis: &mut T) { + match kind { ExprKind::Box(expr) => vis.visit_expr(expr), ExprKind::Array(exprs) => visit_exprs(exprs, vis), ExprKind::Repeat(expr, count) => { @@ -1247,19 +1249,19 @@ pub fn noop_filter_map_expr(mut e: P, vis: &mut T) -> Optio Some({ vis.visit_expr(&mut e); e }) } -pub fn noop_flat_map_stmt(Stmt { node, mut span, mut id }: Stmt, vis: &mut T) +pub fn noop_flat_map_stmt(Stmt { kind, mut span, mut id }: Stmt, vis: &mut T) -> SmallVec<[Stmt; 1]> { vis.visit_id(&mut id); vis.visit_span(&mut span); - noop_flat_map_stmt_kind(node, vis).into_iter().map(|node| { - Stmt { id, node, span } + noop_flat_map_stmt_kind(kind, vis).into_iter().map(|kind| { + Stmt { id, kind, span } }).collect() } -pub fn noop_flat_map_stmt_kind(node: StmtKind, vis: &mut T) +pub fn noop_flat_map_stmt_kind(kind: StmtKind, vis: &mut T) -> SmallVec<[StmtKind; 1]> { - match node { + match kind { StmtKind::Local(mut local) => smallvec![StmtKind::Local({ vis.visit_local(&mut local); local })], StmtKind::Item(item) => vis.flat_map_item(item).into_iter().map(StmtKind::Item).collect(), diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs index 6ebfab3a13..4456068875 100644 --- a/src/libsyntax/parse/classify.rs +++ b/src/libsyntax/parse/classify.rs @@ -12,7 +12,7 @@ use crate::ast; /// |x| 5 /// isn't parsed as (if true {...} else {...} | x) | 5 pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool { - match e.node { + match e.kind { ast::ExprKind::If(..) | ast::ExprKind::Match(..) | ast::ExprKind::Block(..) | diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 5121a9ef7b..ac79ce323b 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -176,7 +176,7 @@ fn split_block_comment_into_lines( // it appears this function is called only from pprust... that's // probably not a good thing. -pub fn gather_comments(sess: &ParseSess, path: FileName, src: String) -> Vec { +crate fn gather_comments(sess: &ParseSess, path: FileName, src: String) -> Vec { let cm = SourceMap::new(sess.source_map().path_mapping().clone()); let source_file = cm.new_source_file(path, src); let text = (*source_file.src.as_ref().unwrap()).clone(); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d16889a91e..48cf117f65 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1,5 +1,5 @@ -use crate::parse::ParseSess; use crate::parse::token::{self, Token, TokenKind}; +use crate::sess::ParseSess; use crate::symbol::{sym, Symbol}; use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char}; @@ -8,9 +8,7 @@ use syntax_pos::{BytePos, Pos, Span}; use rustc_lexer::Base; use rustc_lexer::unescape; -use std::borrow::Cow; use std::char; -use std::iter; use std::convert::TryInto; use rustc_data_structures::sync::Lrc; use log::debug; @@ -25,7 +23,7 @@ mod unicode_chars; #[derive(Clone, Debug)] pub struct UnmatchedBrace { pub expected_delim: token::DelimToken, - pub found_delim: token::DelimToken, + pub found_delim: Option, pub found_span: Span, pub unclosed_span: Option, pub candidate_span: Option, @@ -49,7 +47,7 @@ impl<'a> StringReader<'a> { source_file: Lrc, override_span: Option) -> Self { if source_file.src.is_none() { - sess.span_diagnostic.bug(&format!("Cannot lex source_file without source: {}", + sess.span_diagnostic.bug(&format!("cannot lex `source_file` without source: {}", source_file.name)); } @@ -181,18 +179,7 @@ impl<'a> StringReader<'a> { let string = self.str_from(start); // comments with only more "/"s are not doc comments let tok = if is_doc_comment(string) { - let mut idx = 0; - loop { - idx = match string[idx..].find('\r') { - None => break, - Some(it) => idx + it + 1 - }; - if string[idx..].chars().next() != Some('\n') { - self.err_span_(start + BytePos(idx as u32 - 1), - start + BytePos(idx as u32), - "bare CR not allowed in doc-comment"); - } - } + self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment"); token::DocComment(Symbol::intern(string)) } else { token::Comment @@ -217,15 +204,10 @@ impl<'a> StringReader<'a> { } let tok = if is_doc_comment { - let has_cr = string.contains('\r'); - let string = if has_cr { - self.translate_crlf(start, - string, - "bare CR not allowed in block doc-comment") - } else { - string.into() - }; - token::DocComment(Symbol::intern(&string[..])) + self.forbid_bare_cr(start, + string, + "bare CR not allowed in block doc-comment"); + token::DocComment(Symbol::intern(string)) } else { token::Comment }; @@ -491,49 +473,16 @@ impl<'a> StringReader<'a> { &self.src[self.src_index(start)..self.src_index(end)] } - /// Converts CRLF to LF in the given string, raising an error on bare CR. - fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> Cow<'b, str> { - let mut chars = s.char_indices().peekable(); - while let Some((i, ch)) = chars.next() { - if ch == '\r' { - if let Some((lf_idx, '\n')) = chars.peek() { - return translate_crlf_(self, start, s, *lf_idx, chars, errmsg).into(); - } - let pos = start + BytePos(i as u32); - let end_pos = start + BytePos((i + ch.len_utf8()) as u32); - self.err_span_(pos, end_pos, errmsg); - } - } - return s.into(); - - fn translate_crlf_(rdr: &StringReader<'_>, - start: BytePos, - s: &str, - mut j: usize, - mut chars: iter::Peekable>, - errmsg: &str) - -> String { - let mut buf = String::with_capacity(s.len()); - // Skip first CR - buf.push_str(&s[.. j - 1]); - while let Some((i, ch)) = chars.next() { - if ch == '\r' { - if j < i { - buf.push_str(&s[j..i]); - } - let next = i + ch.len_utf8(); - j = next; - if chars.peek().map(|(_, ch)| *ch) != Some('\n') { - let pos = start + BytePos(i as u32); - let end_pos = start + BytePos(next as u32); - rdr.err_span_(pos, end_pos, errmsg); - } - } - } - if j < s.len() { - buf.push_str(&s[j..]); - } - buf + fn forbid_bare_cr(&self, start: BytePos, s: &str, errmsg: &str) { + let mut idx = 0; + loop { + idx = match s[idx..].find('\r') { + None => break, + Some(it) => idx + it + 1 + }; + self.err_span_(start + BytePos(idx as u32 - 1), + start + BytePos(idx as u32), + errmsg); } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index e5ba7e4530..de8ac2c71e 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,7 +1,9 @@ +use rustc_data_structures::fx::FxHashMap; use syntax_pos::Span; +use super::{StringReader, UnmatchedBrace}; + use crate::print::pprust::token_to_string; -use crate::parse::lexer::{StringReader, UnmatchedBrace}; use crate::parse::token::{self, Token}; use crate::parse::PResult; use crate::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint}; @@ -16,6 +18,7 @@ impl<'a> StringReader<'a> { unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), last_unclosed_found_span: None, + last_delim_empty_block_spans: FxHashMap::default() }; let res = tt_reader.parse_all_token_trees(); (res, tt_reader.unmatched_braces) @@ -34,6 +37,7 @@ struct TokenTreesReader<'a> { /// Used only for error recovery when arriving to EOF with mismatched braces. matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, last_unclosed_found_span: Option, + last_delim_empty_block_spans: FxHashMap } impl<'a> TokenTreesReader<'a> { @@ -76,6 +80,13 @@ impl<'a> TokenTreesReader<'a> { .struct_span_err(self.token.span, msg); for &(_, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); + self.unmatched_braces.push(UnmatchedBrace { + expected_delim: token::DelimToken::Brace, + found_delim: None, + found_span: self.token.span, + unclosed_span: Some(sp), + candidate_span: None, + }); } if let Some((delim, _)) = self.open_braces.last() { @@ -121,13 +132,20 @@ impl<'a> TokenTreesReader<'a> { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); + let close_brace_span = self.token.span; + + if tts.is_empty() { + let empty_block_span = open_brace_span.to(close_brace_span); + self.last_delim_empty_block_spans.insert(delim, empty_block_span); + } + if self.open_braces.len() == 0 { // Clear up these spans to avoid suggesting them as we've found // properly matched delimiters so far for an entire block. self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.token.span), + (open_brace, open_brace_span, close_brace_span), ); } // Parse the close delimiter. @@ -159,7 +177,7 @@ impl<'a> TokenTreesReader<'a> { let (tok, _) = self.open_braces.pop().unwrap(); self.unmatched_braces.push(UnmatchedBrace { expected_delim: tok, - found_delim: other, + found_delim: Some(other), found_span: self.token.span, unclosed_span: unclosed_delimiter, candidate_span: candidate, @@ -193,13 +211,20 @@ impl<'a> TokenTreesReader<'a> { tts.into() ).into()) }, - token::CloseDelim(_) => { + token::CloseDelim(delim) => { // An unexpected closing delimiter (i.e., there is no // matching opening delimiter). let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.string_reader.sess.span_diagnostic .struct_span_err(self.token.span, &msg); + + if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) { + err.span_label( + span, + "this block is empty, you might have not meant to close it" + ); + } err.span_label(self.token.span, "unexpected close delimiter"); Err(err) }, diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 36233de3cf..7952e293a5 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -1,14 +1,10 @@ //! Code related to parsing literals. use crate::ast::{self, Lit, LitKind}; -use crate::parse::parser::Parser; -use crate::parse::PResult; -use crate::parse::token::{self, Token, TokenKind}; -use crate::print::pprust; +use crate::parse::token::{self, Token}; use crate::symbol::{kw, sym, Symbol}; -use crate::tokenstream::{TokenStream, TokenTree}; +use crate::tokenstream::TokenTree; -use errors::{Applicability, Handler}; use log::debug; use rustc_data_structures::sync::Lrc; use syntax_pos::Span; @@ -28,72 +24,6 @@ crate enum LitError { IntTooLarge, } -impl LitError { - fn report(&self, diag: &Handler, lit: token::Lit, span: Span) { - let token::Lit { kind, suffix, .. } = lit; - match *self { - // `NotLiteral` is not an error by itself, so we don't report - // it and give the parser opportunity to try something else. - LitError::NotLiteral => {} - // `LexerError` *is* an error, but it was already reported - // by lexer, so here we don't report it the second time. - LitError::LexerError => {} - LitError::InvalidSuffix => { - expect_no_suffix( - diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix - ); - } - LitError::InvalidIntSuffix => { - let suf = suffix.expect("suffix error with no suffix").as_str(); - if looks_like_width_suffix(&['i', 'u'], &suf) { - // If it looks like a width, try to be helpful. - let msg = format!("invalid width `{}` for integer literal", &suf[1..]); - diag.struct_span_err(span, &msg) - .help("valid widths are 8, 16, 32, 64 and 128") - .emit(); - } else { - let msg = format!("invalid suffix `{}` for integer literal", suf); - diag.struct_span_err(span, &msg) - .span_label(span, format!("invalid suffix `{}`", suf)) - .help("the suffix must be one of the integral types (`u32`, `isize`, etc)") - .emit(); - } - } - LitError::InvalidFloatSuffix => { - let suf = suffix.expect("suffix error with no suffix").as_str(); - if looks_like_width_suffix(&['f'], &suf) { - // If it looks like a width, try to be helpful. - let msg = format!("invalid width `{}` for float literal", &suf[1..]); - diag.struct_span_err(span, &msg) - .help("valid widths are 32 and 64") - .emit(); - } else { - let msg = format!("invalid suffix `{}` for float literal", suf); - diag.struct_span_err(span, &msg) - .span_label(span, format!("invalid suffix `{}`", suf)) - .help("valid suffixes are `f32` and `f64`") - .emit(); - } - } - LitError::NonDecimalFloat(base) => { - let descr = match base { - 16 => "hexadecimal", - 8 => "octal", - 2 => "binary", - _ => unreachable!(), - }; - diag.struct_span_err(span, &format!("{} float literal is not supported", descr)) - .span_label(span, "not supported") - .emit(); - } - LitError::IntTooLarge => { - diag.struct_span_err(span, "integer literal is too large") - .emit(); - } - } - } -} - impl LitKind { /// Converts literal token into a semantic literal. fn from_lit_token(lit: token::Lit) -> Result { @@ -204,7 +134,7 @@ impl LitKind { let (kind, symbol, suffix) = match *self { LitKind::Str(symbol, ast::StrStyle::Cooked) => { // Don't re-intern unless the escaped string is different. - let s = &symbol.as_str(); + let s: &str = &symbol.as_str(); let escaped = s.escape_default().to_string(); let symbol = if escaped == *s { symbol } else { Symbol::intern(&escaped) }; (token::Str, symbol, None) @@ -254,8 +184,8 @@ impl LitKind { impl Lit { /// Converts literal token into an AST literal. - fn from_lit_token(token: token::Lit, span: Span) -> Result { - Ok(Lit { token, node: LitKind::from_lit_token(token)?, span }) + crate fn from_lit_token(token: token::Lit, span: Span) -> Result { + Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span }) } /// Converts arbitrary token into an AST literal. @@ -267,7 +197,7 @@ impl Lit { lit, token::Interpolated(ref nt) => { if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt { - if let ast::ExprKind::Lit(lit) = &expr.node { + if let ast::ExprKind::Lit(lit) = &expr.kind { return Ok(lit.clone()); } } @@ -282,113 +212,20 @@ impl Lit { /// Attempts to recover an AST literal from semantic literal. /// This function is used when the original token doesn't exist (e.g. the literal is created /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). - pub fn from_lit_kind(node: LitKind, span: Span) -> Lit { - Lit { token: node.to_lit_token(), node, span } + pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit { + Lit { token: kind.to_lit_token(), kind, span } } - /// Losslessly convert an AST literal into a token stream. - crate fn tokens(&self) -> TokenStream { + /// Losslessly convert an AST literal into a token tree. + crate fn token_tree(&self) -> TokenTree { let token = match self.token.kind { token::Bool => token::Ident(self.token.symbol, false), _ => token::Literal(self.token), }; - TokenTree::token(token, self.span).into() + TokenTree::token(token, self.span) } } -impl<'a> Parser<'a> { - /// Matches `lit = true | false | token_lit`. - crate fn parse_lit(&mut self) -> PResult<'a, Lit> { - let mut recovered = None; - if self.token == token::Dot { - // Attempt to recover `.4` as `0.4`. - recovered = self.look_ahead(1, |next_token| { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) - = next_token.kind { - if self.token.span.hi() == next_token.span.lo() { - let s = String::from("0.") + &symbol.as_str(); - let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.token.span.to(next_token.span))); - } - } - None - }); - if let Some(token) = &recovered { - self.bump(); - self.diagnostic() - .struct_span_err(token.span, "float literals must have an integer part") - .span_suggestion( - token.span, - "must have an integer part", - pprust::token_to_string(token), - Applicability::MachineApplicable, - ) - .emit(); - } - } - - let token = recovered.as_ref().unwrap_or(&self.token); - match Lit::from_token(token) { - Ok(lit) => { - self.bump(); - Ok(lit) - } - Err(LitError::NotLiteral) => { - let msg = format!("unexpected token: {}", self.this_token_descr()); - Err(self.span_fatal(token.span, &msg)) - } - Err(err) => { - let (lit, span) = (token.expect_lit(), token.span); - self.bump(); - err.report(&self.sess.span_diagnostic, lit, span); - // Pack possible quotes and prefixes from the original literal into - // the error literal's symbol so they can be pretty-printed faithfully. - let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); - let symbol = Symbol::intern(&suffixless_lit.to_string()); - let lit = token::Lit::new(token::Err, symbol, lit.suffix); - Lit::from_lit_token(lit, span).map_err(|_| unreachable!()) - } - } - } -} - -crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option) { - if let Some(suf) = suffix { - let mut err = if kind == "a tuple index" && - [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) { - // #59553: warn instead of reject out of hand to allow the fix to percolate - // through the ecosystem when people fix their macros - let mut err = diag.struct_span_warn( - sp, - &format!("suffixes on {} are invalid", kind), - ); - err.note(&format!( - "`{}` is *temporarily* accepted on tuple index fields as it was \ - incorrectly accepted on stable for a few releases", - suf, - )); - err.help( - "on proc macros, you'll want to use `syn::Index::from` or \ - `proc_macro::Literal::*_unsuffixed` for code that will desugar \ - to tuple field access", - ); - err.note( - "for more context, see https://github.com/rust-lang/rust/issues/60210", - ); - err - } else { - diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind)) - }; - err.span_label(sp, format!("invalid suffix `{}`", suf)); - err.emit(); - } -} - -// Checks if `s` looks like i32 or u1234 etc. -fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { - s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit()) -} - fn strip_underscores(symbol: Symbol) -> Symbol { // Do not allocate a new string unless necessary. let s = symbol.as_str(); @@ -426,15 +263,12 @@ fn integer_lit(symbol: Symbol, suffix: Option) -> Result 1 && s.as_bytes()[0] == b'0' { - match s.as_bytes()[1] { - b'x' => base = 16, - b'o' => base = 8, - b'b' => base = 2, - _ => {} - } - } + let base = match s.as_bytes() { + [b'0', b'x', ..] => 16, + [b'0', b'o', ..] => 8, + [b'0', b'b', ..] => 2, + _ => 10, + }; let ty = match suffix { Some(suf) => match suf { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index fa4c104312..6d8ecdf805 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1,40 +1,33 @@ //! The main parser interface. -use crate::ast::{self, CrateConfig, NodeId}; -use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; -use crate::source_map::{SourceMap, FilePathMapping}; -use crate::feature_gate::UnstableFeatures; -use crate::parse::parser::Parser; -use crate::parse::parser::emit_unclosed_delims; -use crate::parse::token::TokenKind; -use crate::tokenstream::{TokenStream, TokenTree}; +use crate::ast; +use crate::parse::parser::{Parser, emit_unclosed_delims, make_unclosed_delims_error}; +use crate::parse::token::Nonterminal; +use crate::tokenstream::{self, TokenStream, TokenTree}; use crate::print::pprust; -use crate::symbol::Symbol; +use crate::sess::ParseSess; -use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder}; -use rustc_data_structures::fx::{FxHashSet, FxHashMap}; +use errors::{FatalError, Level, Diagnostic, DiagnosticBuilder}; #[cfg(target_arch = "x86_64")] use rustc_data_structures::static_assert_size; -use rustc_data_structures::sync::{Lrc, Lock, Once}; -use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; -use syntax_pos::edition::Edition; -use syntax_pos::hygiene::ExpnId; +use rustc_data_structures::sync::Lrc; +use syntax_pos::{Span, SourceFile, FileName}; use std::borrow::Cow; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::str; +use log::info; + #[cfg(test)] mod tests; #[macro_use] pub mod parser; -pub mod attr; pub mod lexer; pub mod token; crate mod classify; -crate mod diagnostics; crate mod literal; crate mod unescape_error_reporting; @@ -45,110 +38,6 @@ pub type PResult<'a, T> = Result>; #[cfg(target_arch = "x86_64")] static_assert_size!(PResult<'_, bool>, 16); -/// Collected spans during parsing for places where a certain feature was -/// used and should be feature gated accordingly in `check_crate`. -#[derive(Default)] -pub struct GatedSpans { - /// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`. - pub let_chains: Lock>, - /// Spans collected for gating `async_closure`, e.g. `async || ..`. - pub async_closure: Lock>, - /// Spans collected for gating `yield e?` expressions (`generators` gate). - pub yields: Lock>, - /// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`. - pub or_patterns: Lock>, -} - -/// Info about a parsing session. -pub struct ParseSess { - pub span_diagnostic: Handler, - pub unstable_features: UnstableFeatures, - pub config: CrateConfig, - pub edition: Edition, - pub missing_fragment_specifiers: Lock>, - /// Places where raw identifiers were used. This is used for feature-gating raw identifiers. - pub raw_identifier_spans: Lock>, - /// Used to determine and report recursive module inclusions. - included_mod_stack: Lock>, - source_map: Lrc, - pub buffered_lints: Lock>, - /// Contains the spans of block expressions that could have been incomplete based on the - /// operation token that followed it, but that the parser cannot identify without further - /// analysis. - pub ambiguous_block_expr_parse: Lock>, - pub injected_crate_name: Once, - pub gated_spans: GatedSpans, -} - -impl ParseSess { - pub fn new(file_path_mapping: FilePathMapping) -> Self { - let cm = Lrc::new(SourceMap::new(file_path_mapping)); - let handler = Handler::with_tty_emitter( - ColorConfig::Auto, - true, - None, - Some(cm.clone()), - ); - ParseSess::with_span_handler(handler, cm) - } - - pub fn with_span_handler(handler: Handler, source_map: Lrc) -> Self { - Self { - span_diagnostic: handler, - unstable_features: UnstableFeatures::from_environment(), - config: FxHashSet::default(), - edition: ExpnId::root().expn_data().edition, - missing_fragment_specifiers: Lock::new(FxHashSet::default()), - raw_identifier_spans: Lock::new(Vec::new()), - included_mod_stack: Lock::new(vec![]), - source_map, - buffered_lints: Lock::new(vec![]), - ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), - injected_crate_name: Once::new(), - gated_spans: GatedSpans::default(), - } - } - - #[inline] - pub fn source_map(&self) -> &SourceMap { - &self.source_map - } - - pub fn buffer_lint>(&self, - lint_id: BufferedEarlyLintId, - span: S, - id: NodeId, - msg: &str, - ) { - self.buffered_lints.with_lock(|buffered_lints| { - buffered_lints.push(BufferedEarlyLint{ - span: span.into(), - id, - msg: msg.into(), - lint_id, - }); - }); - } - - /// Extend an error with a suggestion to wrap an expression with parentheses to allow the - /// parser to continue parsing the following operation as part of the same expression. - pub fn expr_parentheses_needed( - &self, - err: &mut DiagnosticBuilder<'_>, - span: Span, - alt_snippet: Option, - ) { - if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) { - err.span_suggestion( - span, - "parentheses are required to parse this as an expression", - format!("({})", snippet), - Applicability::MachineApplicable, - ); - } - } -} - #[derive(Clone)] pub struct Directory<'a> { pub path: Cow<'a, Path>, @@ -170,6 +59,23 @@ pub enum DirectoryOwnership { // uses a HOF to parse anything, and includes file and // `source_str`. +/// A variant of 'panictry!' that works on a Vec instead of a single DiagnosticBuilder. +macro_rules! panictry_buffer { + ($handler:expr, $e:expr) => ({ + use std::result::Result::{Ok, Err}; + use errors::FatalError; + match $e { + Ok(e) => e, + Err(errs) => { + for e in errs { + $handler.emit_diagnostic(&e); + } + FatalError.raise() + } + } + }) +} + pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { let mut parser = new_parser_from_file(sess, input); parser.parse_crate_mod() @@ -202,7 +108,7 @@ pub fn parse_stream_from_source_str( sess.source_map().new_source_file(name, source), override_span, ); - emit_unclosed_delims(&mut errors, &sess.span_diagnostic); + emit_unclosed_delims(&mut errors, &sess); stream } @@ -336,18 +242,9 @@ pub fn maybe_file_to_stream( err.buffer(&mut buffer); // Not using `emit_unclosed_delims` to use `db.buffer` for unmatched in unmatched_braces { - let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( - "incorrect close delimiter: `{}`", - pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)), - )); - db.span_label(unmatched.found_span, "incorrect close delimiter"); - if let Some(sp) = unmatched.candidate_span { - db.span_label(sp, "close delimiter possibly meant for this"); + if let Some(err) = make_unclosed_delims_error(unmatched, &sess) { + err.buffer(&mut buffer); } - if let Some(sp) = unmatched.unclosed_span { - db.span_label(sp, "un-closed delimiter"); - } - db.buffer(&mut buffer); } Err(buffer) } @@ -382,26 +279,152 @@ pub fn stream_to_parser_with_base_dir<'a>( Parser::new(sess, stream, Some(base_dir), true, false, None) } -/// A sequence separator. -pub struct SeqSep { - /// The separator token. - pub sep: Option, - /// `true` if a trailing separator is allowed. - pub trailing_sep_allowed: bool, +/// Runs the given subparser `f` on the tokens of the given `attr`'s item. +pub fn parse_in_attr<'a, T>( + sess: &'a ParseSess, + attr: &ast::Attribute, + mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, +) -> PResult<'a, T> { + let mut parser = Parser::new( + sess, + attr.tokens.clone(), + None, + false, + false, + Some("attribute"), + ); + let result = f(&mut parser)?; + if parser.token != token::Eof { + parser.unexpected()?; + } + Ok(result) } -impl SeqSep { - pub fn trailing_allowed(t: TokenKind) -> SeqSep { - SeqSep { - sep: Some(t), - trailing_sep_allowed: true, - } - } +// NOTE(Centril): The following probably shouldn't be here but it acknowledges the +// fact that architecturally, we are using parsing (read on below to understand why). - pub fn none() -> SeqSep { - SeqSep { - sep: None, - trailing_sep_allowed: false, +pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> TokenStream { + // A `Nonterminal` is often a parsed AST item. At this point we now + // need to convert the parsed AST to an actual token stream, e.g. + // un-parse it basically. + // + // Unfortunately there's not really a great way to do that in a + // guaranteed lossless fashion right now. The fallback here is to just + // stringify the AST node and reparse it, but this loses all span + // information. + // + // As a result, some AST nodes are annotated with the token stream they + // came from. Here we attempt to extract these lossless token streams + // before we fall back to the stringification. + let tokens = match *nt { + Nonterminal::NtItem(ref item) => { + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } + Nonterminal::NtTraitItem(ref item) => { + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) + } + Nonterminal::NtImplItem(ref item) => { + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) + } + Nonterminal::NtIdent(ident, is_raw) => { + Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) + } + Nonterminal::NtLifetime(ident) => { + Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) + } + Nonterminal::NtTT(ref tt) => { + Some(tt.clone().into()) + } + _ => None, + }; + + // FIXME(#43081): Avoid this pretty-print + reparse hack + let source = pprust::nonterminal_to_string(nt); + let filename = FileName::macro_expansion_source_code(&source); + let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span)); + + // During early phases of the compiler the AST could get modified + // directly (e.g., attributes added or removed) and the internal cache + // of tokens my not be invalidated or updated. Consequently if the + // "lossless" token stream disagrees with our actual stringification + // (which has historically been much more battle-tested) then we go + // with the lossy stream anyway (losing span information). + // + // Note that the comparison isn't `==` here to avoid comparing spans, + // but it *also* is a "probable" equality which is a pretty weird + // definition. We mostly want to catch actual changes to the AST + // like a `#[cfg]` being processed or some weird `macro_rules!` + // expansion. + // + // What we *don't* want to catch is the fact that a user-defined + // literal like `0xf` is stringified as `15`, causing the cached token + // stream to not be literal `==` token-wise (ignoring spans) to the + // token stream we got from stringification. + // + // Instead the "probably equal" check here is "does each token + // recursively have the same discriminant?" We basically don't look at + // the token values here and assume that such fine grained token stream + // modifications, including adding/removing typically non-semantic + // tokens such as extra braces and commas, don't happen. + if let Some(tokens) = tokens { + if tokens.probably_equal_for_proc_macro(&tokens_for_real) { + return tokens + } + info!("cached tokens found, but they're not \"probably equal\", \ + going with stringified version"); } + return tokens_for_real +} + +fn prepend_attrs( + sess: &ParseSess, + attrs: &[ast::Attribute], + tokens: Option<&tokenstream::TokenStream>, + span: syntax_pos::Span +) -> Option { + let tokens = tokens?; + if attrs.len() == 0 { + return Some(tokens.clone()) + } + let mut builder = tokenstream::TokenStreamBuilder::new(); + for attr in attrs { + assert_eq!(attr.style, ast::AttrStyle::Outer, + "inner attributes should prevent cached tokens from existing"); + + let source = pprust::attribute_to_string(attr); + let macro_filename = FileName::macro_expansion_source_code(&source); + if attr.is_sugared_doc { + let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); + builder.push(stream); + continue + } + + // synthesize # [ $path $tokens ] manually here + let mut brackets = tokenstream::TokenStreamBuilder::new(); + + // For simple paths, push the identifier directly + if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { + let ident = attr.path.segments[0].ident; + let token = token::Ident(ident.name, ident.as_str().starts_with("r#")); + brackets.push(tokenstream::TokenTree::token(token, ident.span)); + + // ... and for more complicated paths, fall back to a reparse hack that + // should eventually be removed. + } else { + let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); + brackets.push(stream); + } + + brackets.push(attr.tokens.clone()); + + // The span we list here for `#` and for `[ ... ]` are both wrong in + // that it encompasses more than each token, but it hopefully is "good + // enough" for now at least. + builder.push(tokenstream::TokenTree::token(token::Pound, attr.span)); + let delim_span = tokenstream::DelimSpan::from_single(attr.span); + builder.push(tokenstream::TokenTree::Delimited( + delim_span, token::DelimToken::Bracket, brackets.build().into())); + } + builder.push(tokens.clone()); + Some(builder.build()) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 204f9c3409..e6e9d37962 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1,33 +1,33 @@ +pub mod attr; mod expr; mod pat; mod item; -pub use item::AliasKind; mod module; -pub use module::{ModulePath, ModulePathSuccess}; mod ty; mod path; pub use path::PathStyle; mod stmt; mod generics; +mod diagnostics; +use diagnostics::Error; use crate::ast::{ - self, DUMMY_NODE_ID, AttrStyle, Attribute, BindingMode, CrateSugar, FnDecl, Ident, - IsAsync, MacDelimiter, Mutability, Param, StrStyle, SelfKind, TyKind, Visibility, - VisibilityKind, Unsafety, + self, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident, + IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety, }; -use crate::parse::{ParseSess, PResult, Directory, DirectoryOwnership, SeqSep, literal, token}; -use crate::parse::diagnostics::{Error, dummy_arg}; +use crate::parse::{PResult, Directory, DirectoryOwnership}; use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::parse::token::{Token, TokenKind, DelimToken}; +use crate::parse::token::{self, Token, TokenKind, DelimToken}; use crate::print::pprust; use crate::ptr::P; -use crate::source_map::{self, respan}; +use crate::sess::ParseSess; +use crate::source_map::respan; use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use crate::ThinVec; -use errors::{Applicability, DiagnosticId, FatalError}; +use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; use rustc_target::spec::abi::{self, Abi}; use syntax_pos::{Span, BytePos, DUMMY_SP, FileName}; use log::debug; @@ -44,14 +44,14 @@ bitflags::bitflags! { } #[derive(Clone, Copy, PartialEq, Debug)] -crate enum SemiColonMode { +enum SemiColonMode { Break, Ignore, Comma, } #[derive(Clone, Copy, PartialEq, Debug)] -crate enum BlockMode { +enum BlockMode { Break, Ignore, } @@ -86,13 +86,6 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath { } } -fn maybe_append(mut lhs: Vec, mut rhs: Option>) -> Vec { - if let Some(ref mut rhs) = rhs { - lhs.append(rhs); - } - lhs -} - #[derive(Debug, Clone, Copy, PartialEq)] enum PrevTokenKind { DocComment, @@ -124,39 +117,38 @@ pub struct Parser<'a> { prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. - crate directory: Directory<'a>, + pub(super) directory: Directory<'a>, /// `true` to parse sub-modules in other files. - pub recurse_into_file_modules: bool, + pub(super) recurse_into_file_modules: bool, /// Name of the root module this parser originated from. If `None`, then the /// name is not known. This does not change while the parser is descending /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option, - crate expected_tokens: Vec, + expected_tokens: Vec, token_cursor: TokenCursor, desugar_doc_comments: bool, /// `true` we should configure out of line modules as we parse. - pub cfg_mods: bool, + cfg_mods: bool, /// This field is used to keep track of how many left angle brackets we have seen. This is /// required in order to detect extra leading left angle brackets (`<` characters) and error /// appropriately. /// /// See the comments in the `parse_path_segment` function for more details. - crate unmatched_angle_bracket_count: u32, - crate max_angle_bracket_count: u32, + unmatched_angle_bracket_count: u32, + max_angle_bracket_count: u32, /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery /// it gets removed from here. Every entry left at the end gets emitted as an independent /// error. - crate unclosed_delims: Vec, - crate last_unexpected_token_span: Option, - crate last_type_ascription: Option<(Span, bool /* likely path typo */)>, + pub(super) unclosed_delims: Vec, + last_unexpected_token_span: Option, + pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, /// If present, this `Parser` is not parsing Rust code but rather a macro call. - crate subparser_name: Option<&'static str>, + subparser_name: Option<&'static str>, } impl<'a> Drop for Parser<'a> { fn drop(&mut self) { - let diag = self.diagnostic(); - emit_unclosed_delims(&mut self.unclosed_delims, diag); + emit_unclosed_delims(&mut self.unclosed_delims, &self.sess); } } @@ -194,7 +186,7 @@ struct TokenCursorFrame { /// You can find some more example usage of this in the `collect_tokens` method /// on the parser. #[derive(Clone)] -crate enum LastToken { +enum LastToken { Collecting(Vec), Was(Option), } @@ -285,10 +277,10 @@ impl TokenCursor { token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] - .iter().cloned().collect::().into() + .iter().cloned().collect::() } else { [TokenTree::token(token::Pound, sp), body] - .iter().cloned().collect::().into() + .iter().cloned().collect::() }, ))); @@ -297,7 +289,7 @@ impl TokenCursor { } #[derive(Clone, PartialEq)] -crate enum TokenType { +enum TokenType { Token(TokenKind), Keyword(Symbol), Operator, @@ -309,7 +301,7 @@ crate enum TokenType { } impl TokenType { - crate fn to_string(&self) -> String { + fn to_string(&self) -> String { match *self { TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)), TokenType::Keyword(kw) => format!("`{}`", kw), @@ -324,11 +316,35 @@ impl TokenType { } #[derive(Copy, Clone, Debug)] -crate enum TokenExpectType { +enum TokenExpectType { Expect, NoExpect, } +/// A sequence separator. +struct SeqSep { + /// The separator token. + sep: Option, + /// `true` if a trailing separator is allowed. + trailing_sep_allowed: bool, +} + +impl SeqSep { + fn trailing_allowed(t: TokenKind) -> SeqSep { + SeqSep { + sep: Some(t), + trailing_sep_allowed: true, + } + } + + fn none() -> SeqSep { + SeqSep { + sep: None, + trailing_sep_allowed: false, + } + } +} + impl<'a> Parser<'a> { pub fn new( sess: &'a ParseSess, @@ -405,7 +421,7 @@ impl<'a> Parser<'a> { pprust::token_to_string(&self.token) } - crate fn token_descr(&self) -> Option<&'static str> { + fn token_descr(&self) -> Option<&'static str> { Some(match &self.token.kind { _ if self.token.is_special_ident() => "reserved identifier", _ if self.token.is_used_keyword() => "keyword", @@ -415,7 +431,7 @@ impl<'a> Parser<'a> { }) } - crate fn this_token_descr(&self) -> String { + pub(super) fn this_token_descr(&self) -> String { if let Some(prefix) = self.token_descr() { format!("{} `{}`", prefix, self.this_token_to_string()) } else { @@ -426,7 +442,9 @@ impl<'a> Parser<'a> { crate fn unexpected(&mut self) -> PResult<'a, T> { match self.expect_one_of(&[], &[]) { Err(e) => Err(e), - Ok(_) => unreachable!(), + // We can get `Ok(true)` from `recover_closing_delimiter` + // which is called in `expected_one_of_not_found`. + Ok(_) => FatalError.raise(), } } @@ -465,7 +483,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { + fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { self.parse_ident_common(true) } @@ -498,7 +516,7 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &TokenKind) -> bool { + fn check(&mut self, tok: &TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present @@ -511,14 +529,16 @@ impl<'a> Parser<'a> { is_present } + /// If the next token is the given keyword, returns `true` without eating it. + /// An expectation is also added for diagnostics purposes. fn check_keyword(&mut self, kw: Symbol) -> bool { self.expected_tokens.push(TokenType::Keyword(kw)); self.token.is_keyword(kw) } - /// If the next token is the given keyword, eats it and returns - /// `true`. Otherwise, returns `false`. - pub fn eat_keyword(&mut self, kw: Symbol) -> bool { + /// If the next token is the given keyword, eats it and returns `true`. + /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. + fn eat_keyword(&mut self, kw: Symbol) -> bool { if self.check_keyword(kw) { self.bump(); true @@ -547,40 +567,38 @@ impl<'a> Parser<'a> { } } - crate fn check_ident(&mut self) -> bool { - if self.token.is_ident() { + fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool { + if ok { true } else { - self.expected_tokens.push(TokenType::Ident); + self.expected_tokens.push(typ); false } } + fn check_ident(&mut self) -> bool { + self.check_or_expected(self.token.is_ident(), TokenType::Ident) + } + fn check_path(&mut self) -> bool { - if self.token.is_path_start() { - true - } else { - self.expected_tokens.push(TokenType::Path); - false - } + self.check_or_expected(self.token.is_path_start(), TokenType::Path) } fn check_type(&mut self) -> bool { - if self.token.can_begin_type() { - true - } else { - self.expected_tokens.push(TokenType::Type); - false - } + self.check_or_expected(self.token.can_begin_type(), TokenType::Type) } fn check_const_arg(&mut self) -> bool { - if self.token.can_begin_const_arg() { - true - } else { - self.expected_tokens.push(TokenType::Const); - false - } + self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const) + } + + /// Checks to see if the next token is either `+` or `+=`. + /// Otherwise returns `false`. + fn check_plus(&mut self) -> bool { + self.check_or_expected( + self.token.is_like_plus(), + TokenType::Token(token::BinOp(token::Plus)), + ) } /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=` @@ -604,18 +622,6 @@ impl<'a> Parser<'a> { } } - /// Checks to see if the next token is either `+` or `+=`. - /// Otherwise returns `false`. - fn check_plus(&mut self) -> bool { - if self.token.is_like_plus() { - true - } - else { - self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); - false - } - } - /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { @@ -650,10 +656,6 @@ impl<'a> Parser<'a> { } } - fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { - literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix) - } - /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single /// `<` and continue. If `<-` is seen, replaces it with a single `<` /// and continue. If a `<` is not seen, returns false. @@ -739,7 +741,7 @@ impl<'a> Parser<'a> { /// Parses a sequence, including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_end( + fn parse_seq_to_end( &mut self, ket: &TokenKind, sep: SeqSep, @@ -755,7 +757,7 @@ impl<'a> Parser<'a> { /// Parses a sequence, not including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_before_end( + fn parse_seq_to_before_end( &mut self, ket: &TokenKind, sep: SeqSep, @@ -773,7 +775,7 @@ impl<'a> Parser<'a> { }) } - crate fn parse_seq_to_before_tokens( + fn parse_seq_to_before_tokens( &mut self, kets: &[&TokenKind], sep: SeqSep, @@ -910,15 +912,15 @@ impl<'a> Parser<'a> { self.expected_tokens.clear(); } - pub fn look_ahead(&self, dist: usize, f: F) -> R where - F: FnOnce(&Token) -> R, - { + /// Look-ahead `dist` tokens of `self.token` and get access to that token there. + /// When `dist == 0` then the current token is looked at. + pub fn look_ahead(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R { if dist == 0 { - return f(&self.token); + return looker(&self.token); } let frame = &self.token_cursor.frame; - f(&match frame.tree_cursor.look_ahead(dist - 1) { + looker(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(token) => token, TokenTree::Delimited(dspan, delim, _) => @@ -954,109 +956,6 @@ impl<'a> Parser<'a> { } } - fn is_named_argument(&self) -> bool { - let offset = match self.token.kind { - token::Interpolated(ref nt) => match **nt { - token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), - _ => 0, - } - token::BinOp(token::And) | token::AndAnd => 1, - _ if self.token.is_keyword(kw::Mut) => 1, - _ => 0, - }; - - self.look_ahead(offset, |t| t.is_ident()) && - self.look_ahead(offset + 1, |t| t == &token::Colon) - } - - /// Skips unexpected attributes and doc comments in this position and emits an appropriate - /// error. - /// This version of parse param doesn't necessarily require identifier names. - fn parse_param_general( - &mut self, - is_self_allowed: bool, - is_trait_item: bool, - allow_c_variadic: bool, - is_name_required: impl Fn(&token::Token) -> bool, - ) -> PResult<'a, Param> { - let lo = self.token.span; - let attrs = self.parse_outer_attributes()?; - - // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here. - if let Some(mut param) = self.parse_self_param()? { - param.attrs = attrs.into(); - return if is_self_allowed { - Ok(param) - } else { - self.recover_bad_self_param(param, is_trait_item) - }; - } - - let is_name_required = is_name_required(&self.token); - let (pat, ty) = if is_name_required || self.is_named_argument() { - debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required); - - let pat = self.parse_fn_param_pat()?; - if let Err(mut err) = self.expect(&token::Colon) { - if let Some(ident) = self.parameter_without_type( - &mut err, - pat, - is_name_required, - is_trait_item, - ) { - err.emit(); - return Ok(dummy_arg(ident)); - } else { - return Err(err); - } - } - - self.eat_incorrect_doc_comment_for_param_type(); - (pat, self.parse_ty_common(true, true, allow_c_variadic)?) - } else { - debug!("parse_param_general ident_to_pat"); - let parser_snapshot_before_ty = self.clone(); - self.eat_incorrect_doc_comment_for_param_type(); - let mut ty = self.parse_ty_common(true, true, allow_c_variadic); - if ty.is_ok() && self.token != token::Comma && - self.token != token::CloseDelim(token::Paren) { - // This wasn't actually a type, but a pattern looking like a type, - // so we are going to rollback and re-parse for recovery. - ty = self.unexpected(); - } - match ty { - Ok(ty) => { - let ident = Ident::new(kw::Invalid, self.prev_span); - let bm = BindingMode::ByValue(Mutability::Immutable); - let pat = self.mk_pat_ident(ty.span, bm, ident); - (pat, ty) - } - Err(mut err) => { - // If this is a C-variadic argument and we hit an error, return the - // error. - if self.token == token::DotDotDot { - return Err(err); - } - // Recover from attempting to parse the argument as a type without pattern. - err.cancel(); - mem::replace(self, parser_snapshot_before_ty); - self.recover_arg_parse()? - } - } - }; - - let span = lo.to(self.token.span); - - Ok(Param { - attrs: attrs.into(), - id: ast::DUMMY_NODE_ID, - is_placeholder: false, - pat, - span, - ty, - }) - } - /// Parses mutability (`mut` or nothing). fn parse_mutability(&mut self) -> Mutability { if self.eat_keyword(kw::Mut) { @@ -1066,6 +965,17 @@ impl<'a> Parser<'a> { } } + /// Possibly parses mutability (`const` or `mut`). + fn parse_const_or_mut(&mut self) -> Option { + if self.eat_keyword(kw::Mut) { + Some(Mutability::Mutable) + } else if self.eat_keyword(kw::Const) { + Some(Mutability::Immutable) + } else { + None + } + } + fn parse_field_name(&mut self) -> PResult<'a, Ident> { if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind { @@ -1100,9 +1010,10 @@ impl<'a> Parser<'a> { Ok((delim, tts.into())) } - fn parse_or_use_outer_attributes(&mut self, - already_parsed_attrs: Option>) - -> PResult<'a, ThinVec> { + fn parse_or_use_outer_attributes( + &mut self, + already_parsed_attrs: Option>, + ) -> PResult<'a, ThinVec> { if let Some(attrs) = already_parsed_attrs { Ok(attrs) } else { @@ -1110,7 +1021,7 @@ impl<'a> Parser<'a> { } } - crate fn process_potential_macro_variable(&mut self) { + pub fn process_potential_macro_variable(&mut self) { self.token = match self.token.kind { token::Dollar if self.token.span.from_expansion() && self.look_ahead(1, |t| t.is_ident()) => { @@ -1144,7 +1055,7 @@ impl<'a> Parser<'a> { } /// Parses a single token tree from the input. - crate fn parse_token_tree(&mut self) -> TokenTree { + pub fn parse_token_tree(&mut self) -> TokenTree { match self.token.kind { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, @@ -1189,206 +1100,12 @@ impl<'a> Parser<'a> { /// Evaluates the closure with restrictions in place. /// /// Afters the closure is evaluated, restrictions are reset. - fn with_res(&mut self, r: Restrictions, f: F) -> T - where F: FnOnce(&mut Self) -> T - { + fn with_res(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T { let old = self.restrictions; - self.restrictions = r; - let r = f(self); + self.restrictions = res; + let res = f(self); self.restrictions = old; - return r; - - } - - fn parse_fn_params(&mut self, named_params: bool, allow_c_variadic: bool) - -> PResult<'a, (Vec , bool)> { - let sp = self.token.span; - let mut c_variadic = false; - let (params, _): (Vec>, _) = self.parse_paren_comma_seq(|p| { - let do_not_enforce_named_arguments_for_c_variadic = - |token: &token::Token| -> bool { - if token == &token::DotDotDot { - false - } else { - named_params - } - }; - match p.parse_param_general( - false, - false, - allow_c_variadic, - do_not_enforce_named_arguments_for_c_variadic - ) { - Ok(param) => { - if let TyKind::CVarArgs = param.ty.node { - c_variadic = true; - if p.token != token::CloseDelim(token::Paren) { - let span = p.token.span; - p.span_err(span, - "`...` must be the last argument of a C-variadic function"); - Ok(None) - } else { - Ok(Some(param)) - } - } else { - Ok(Some(param)) - } - }, - Err(mut e) => { - e.emit(); - let lo = p.prev_span; - // Skip every token until next possible arg or end. - p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); - // Create a placeholder argument for proper arg count (issue #34264). - let span = lo.to(p.prev_span); - Ok(Some(dummy_arg(Ident::new(kw::Invalid, span)))) - } - } - })?; - - let params: Vec<_> = params.into_iter().filter_map(|x| x).collect(); - - if c_variadic && params.len() <= 1 { - self.span_err(sp, - "C-variadic function must be declared with at least one named argument"); - } - - Ok((params, c_variadic)) - } - - /// Returns the parsed optional self parameter and whether a self shortcut was used. - /// - /// See `parse_self_param_with_attrs` to collect attributes. - fn parse_self_param(&mut self) -> PResult<'a, Option> { - let expect_ident = |this: &mut Self| match this.token.kind { - // Preserve hygienic context. - token::Ident(name, _) => - { let span = this.token.span; this.bump(); Ident::new(name, span) } - _ => unreachable!() - }; - let isolated_self = |this: &mut Self, n| { - this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) && - this.look_ahead(n + 1, |t| t != &token::ModSep) - }; - - // Parse optional `self` parameter of a method. - // Only a limited set of initial token sequences is considered `self` parameters; anything - // else is parsed as a normal function parameter list, so some lookahead is required. - let eself_lo = self.token.span; - let (eself, eself_ident, eself_hi) = match self.token.kind { - token::BinOp(token::And) => { - // `&self` - // `&mut self` - // `&'lt self` - // `&'lt mut self` - // `¬_self` - (if isolated_self(self, 1) { - self.bump(); - SelfKind::Region(None, Mutability::Immutable) - } else if self.is_keyword_ahead(1, &[kw::Mut]) && - isolated_self(self, 2) { - self.bump(); - self.bump(); - SelfKind::Region(None, Mutability::Mutable) - } else if self.look_ahead(1, |t| t.is_lifetime()) && - isolated_self(self, 2) { - self.bump(); - let lt = self.expect_lifetime(); - SelfKind::Region(Some(lt), Mutability::Immutable) - } else if self.look_ahead(1, |t| t.is_lifetime()) && - self.is_keyword_ahead(2, &[kw::Mut]) && - isolated_self(self, 3) { - self.bump(); - let lt = self.expect_lifetime(); - self.bump(); - SelfKind::Region(Some(lt), Mutability::Mutable) - } else { - return Ok(None); - }, expect_ident(self), self.prev_span) - } - token::BinOp(token::Star) => { - // `*self` - // `*const self` - // `*mut self` - // `*not_self` - // Emit special error for `self` cases. - let msg = "cannot pass `self` by raw pointer"; - (if isolated_self(self, 1) { - self.bump(); - self.struct_span_err(self.token.span, msg) - .span_label(self.token.span, msg) - .emit(); - SelfKind::Value(Mutability::Immutable) - } else if self.look_ahead(1, |t| t.is_mutability()) && - isolated_self(self, 2) { - self.bump(); - self.bump(); - self.struct_span_err(self.token.span, msg) - .span_label(self.token.span, msg) - .emit(); - SelfKind::Value(Mutability::Immutable) - } else { - return Ok(None); - }, expect_ident(self), self.prev_span) - } - token::Ident(..) => { - if isolated_self(self, 0) { - // `self` - // `self: TYPE` - let eself_ident = expect_ident(self); - let eself_hi = self.prev_span; - (if self.eat(&token::Colon) { - let ty = self.parse_ty()?; - SelfKind::Explicit(ty, Mutability::Immutable) - } else { - SelfKind::Value(Mutability::Immutable) - }, eself_ident, eself_hi) - } else if self.token.is_keyword(kw::Mut) && - isolated_self(self, 1) { - // `mut self` - // `mut self: TYPE` - self.bump(); - let eself_ident = expect_ident(self); - let eself_hi = self.prev_span; - (if self.eat(&token::Colon) { - let ty = self.parse_ty()?; - SelfKind::Explicit(ty, Mutability::Mutable) - } else { - SelfKind::Value(Mutability::Mutable) - }, eself_ident, eself_hi) - } else { - return Ok(None); - } - } - _ => return Ok(None), - }; - - let eself = source_map::respan(eself_lo.to(eself_hi), eself); - Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident))) - } - - /// Parses the parameter list and result type of a function that may have a `self` parameter. - fn parse_fn_decl_with_self( - &mut self, - is_name_required: impl Copy + Fn(&token::Token) -> bool, - ) -> PResult<'a, P> { - // Parse the arguments, starting out with `self` being allowed... - let mut is_self_allowed = true; - let (mut inputs, _): (Vec<_>, _) = self.parse_paren_comma_seq(|p| { - let res = p.parse_param_general(is_self_allowed, true, false, is_name_required); - // ...but now that we've parsed the first argument, `self` is no longer allowed. - is_self_allowed = false; - res - })?; - - // Replace duplicated recovered params with `_` pattern to avoid unecessary errors. - self.deduplicate_recovered_params_names(&mut inputs); - - Ok(P(FnDecl { - inputs, - output: self.parse_ret_ty(true)?, - c_variadic: false - })) + res } fn is_crate_vis(&self) -> bool { @@ -1406,6 +1123,7 @@ impl<'a> Parser<'a> { self.expected_tokens.push(TokenType::Keyword(kw::Crate)); if self.is_crate_vis() { self.bump(); // `crate` + self.sess.gated_spans.crate_visibility_modifier.borrow_mut().push(self.prev_span); return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate))); } @@ -1422,100 +1140,118 @@ impl<'a> Parser<'a> { // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so // by the following tokens. - if self.is_keyword_ahead(1, &[kw::Crate]) && - self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)` + if self.is_keyword_ahead(1, &[kw::Crate]) + && self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)` { - // `pub(crate)` + // Parse `pub(crate)`. self.bump(); // `(` self.bump(); // `crate` self.expect(&token::CloseDelim(token::Paren))?; // `)` - let vis = respan( - lo.to(self.prev_span), - VisibilityKind::Crate(CrateSugar::PubCrate), - ); - return Ok(vis) + let vis = VisibilityKind::Crate(CrateSugar::PubCrate); + return Ok(respan(lo.to(self.prev_span), vis)); } else if self.is_keyword_ahead(1, &[kw::In]) { - // `pub(in path)` + // Parse `pub(in path)`. self.bump(); // `(` self.bump(); // `in` let path = self.parse_path(PathStyle::Mod)?; // `path` self.expect(&token::CloseDelim(token::Paren))?; // `)` - let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { + let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID, - }); - return Ok(vis) - } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) && - self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower]) + }; + return Ok(respan(lo.to(self.prev_span), vis)); + } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) + && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower]) { - // `pub(self)` or `pub(super)` + // Parse `pub(self)` or `pub(super)`. self.bump(); // `(` let path = self.parse_path(PathStyle::Mod)?; // `super`/`self` self.expect(&token::CloseDelim(token::Paren))?; // `)` - let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { + let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID, - }); - return Ok(vis) - } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct - // `pub(something) fn ...` or `struct X { pub(something) y: Z }` - self.bump(); // `(` - let msg = "incorrect visibility restriction"; - let suggestion = r##"some possible visibility restrictions are: -`pub(crate)`: visible only on the current crate -`pub(super)`: visible only in the current module's parent -`pub(in path::to::module)`: visible only on the specified path"##; - let path = self.parse_path(PathStyle::Mod)?; - let sp = path.span; - let help_msg = format!("make this visible only to module `{}` with `in`", path); - self.expect(&token::CloseDelim(token::Paren))?; // `)` - struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg) - .help(suggestion) - .span_suggestion( - sp, - &help_msg, - format!("in {}", path), - Applicability::MachineApplicable, - ) - .emit(); // Emit diagnostic, but continue with public visibility. + }; + return Ok(respan(lo.to(self.prev_span), vis)); + } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct. + self.recover_incorrect_vis_restriction()?; + // Emit diagnostic, but continue with public visibility. } } Ok(respan(lo, VisibilityKind::Public)) } + /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }` + fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { + self.bump(); // `(` + let path = self.parse_path(PathStyle::Mod)?; + self.expect(&token::CloseDelim(token::Paren))?; // `)` + + let msg = "incorrect visibility restriction"; + let suggestion = r##"some possible visibility restrictions are: +`pub(crate)`: visible only on the current crate +`pub(super)`: visible only in the current module's parent +`pub(in path::to::module)`: visible only on the specified path"##; + + let path_str = pprust::path_to_string(&path); + + struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg) + .help(suggestion) + .span_suggestion( + path.span, + &format!("make this visible only to module `{}` with `in`", path_str), + format!("in {}", path_str), + Applicability::MachineApplicable, + ) + .emit(); + + Ok(()) + } + + /// Parses `extern` followed by an optional ABI string, or nothing. + fn parse_extern_abi(&mut self) -> PResult<'a, Abi> { + if self.eat_keyword(kw::Extern) { + Ok(self.parse_opt_abi()?.unwrap_or(Abi::C)) + } else { + Ok(Abi::Rust) + } + } + /// Parses a string as an ABI spec on an extern type or module. Consumes /// the `extern` keyword, if one is found. fn parse_opt_abi(&mut self) -> PResult<'a, Option> { match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) | token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => { - let sp = self.token.span; - self.expect_no_suffix(sp, "an ABI spec", suffix); + self.expect_no_suffix(self.token.span, "an ABI spec", suffix); self.bump(); match abi::lookup(&symbol.as_str()) { Some(abi) => Ok(Some(abi)), None => { - let prev_span = self.prev_span; - struct_span_err!( - self.sess.span_diagnostic, - prev_span, - E0703, - "invalid ABI: found `{}`", - symbol - ) - .span_label(prev_span, "invalid ABI") - .help(&format!("valid ABIs: {}", abi::all_names().join(", "))) - .emit(); + self.error_on_invalid_abi(symbol); Ok(None) } } } - _ => Ok(None), } } + /// Emit an error where `symbol` is an invalid ABI. + fn error_on_invalid_abi(&self, symbol: Symbol) { + let prev_span = self.prev_span; + struct_span_err!( + self.sess.span_diagnostic, + prev_span, + E0703, + "invalid ABI: found `{}`", + symbol + ) + .span_label(prev_span, "invalid ABI") + .help(&format!("valid ABIs: {}", abi::all_names().join(", "))) + .emit(); + } + /// We are parsing `async fn`. If we are on Rust 2015, emit an error. fn ban_async_in_2015(&self, async_span: Span) { if async_span.rust_2015() { @@ -1529,9 +1265,10 @@ impl<'a> Parser<'a> { } } - fn collect_tokens(&mut self, f: F) -> PResult<'a, (R, TokenStream)> - where F: FnOnce(&mut Self) -> PResult<'a, R> - { + fn collect_tokens( + &mut self, + f: impl FnOnce(&mut Self) -> PResult<'a, R>, + ) -> PResult<'a, (R, TokenStream)> { // Record all tokens we parse when parsing this item. let mut tokens = Vec::new(); let prev_collecting = match self.token_cursor.frame.last_token { @@ -1552,7 +1289,7 @@ impl<'a> Parser<'a> { // This can happen due to a bad interaction of two unrelated recovery mechanisms with // mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(` // (#62881). - return Ok((ret?, TokenStream::new(vec![]))); + return Ok((ret?, TokenStream::default())); } else { &mut self.token_cursor.stack[prev].last_token }; @@ -1567,7 +1304,7 @@ impl<'a> Parser<'a> { // This can happen due to a bad interaction of two unrelated recovery mechanisms // with mismatched delimiters *and* recovery lookahead on the likely typo // `pub ident(` (#62895, different but similar to the case above). - return Ok((ret?, TokenStream::new(vec![]))); + return Ok((ret?, TokenStream::default())); } }; @@ -1605,7 +1342,7 @@ impl<'a> Parser<'a> { *t == token::BinOp(token::Star)) } - pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { + fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { let ret = match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => (symbol, ast::StrStyle::Cooked, suffix), @@ -1632,41 +1369,33 @@ impl<'a> Parser<'a> { } } } - - fn report_invalid_macro_expansion_item(&self) { - self.struct_span_err( - self.prev_span, - "macros that expand to items must be delimited with braces or followed by a semicolon", - ).multipart_suggestion( - "change the delimiters to curly braces", - vec![ - (self.prev_span.with_hi(self.prev_span.lo() + BytePos(1)), String::from(" {")), - (self.prev_span.with_lo(self.prev_span.hi() - BytePos(1)), '}'.to_string()), - ], - Applicability::MaybeIncorrect, - ).span_suggestion( - self.sess.source_map.next_point(self.prev_span), - "add a semicolon", - ';'.to_string(), - Applicability::MaybeIncorrect, - ).emit(); - } } -pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, handler: &errors::Handler) { - for unmatched in unclosed_delims.iter() { - let mut err = handler.struct_span_err(unmatched.found_span, &format!( - "incorrect close delimiter: `{}`", - pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)), - )); - err.span_label(unmatched.found_span, "incorrect close delimiter"); - if let Some(sp) = unmatched.candidate_span { - err.span_label(sp, "close delimiter possibly meant for this"); - } - if let Some(sp) = unmatched.unclosed_span { - err.span_label(sp, "un-closed delimiter"); - } - err.emit(); +crate fn make_unclosed_delims_error( + unmatched: UnmatchedBrace, + sess: &ParseSess, +) -> Option> { + // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to + // `unmatched_braces` only for error recovery in the `Parser`. + let found_delim = unmatched.found_delim?; + let mut err = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + pprust::token_kind_to_string(&token::CloseDelim(found_delim)), + )); + err.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + err.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "un-closed delimiter"); + } + Some(err) +} + +pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, sess: &ParseSess) { + *sess.reached_eof.borrow_mut() |= unclosed_delims.iter() + .any(|unmatched_delim| unmatched_delim.found_delim.is_none()); + for unmatched in unclosed_delims.drain(..) { + make_unclosed_delims_error(unmatched, sess).map(|mut e| e.emit()); } - unclosed_delims.clear(); } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/parser/attr.rs similarity index 82% rename from src/libsyntax/parse/attr.rs rename to src/libsyntax/parse/parser/attr.rs index cf6151d17b..188a144cac 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/parser/attr.rs @@ -1,13 +1,11 @@ +use super::{SeqSep, PResult, Parser, TokenType, PathStyle}; use crate::attr; use crate::ast; -use crate::parse::{SeqSep, PResult}; use crate::parse::token::{self, Nonterminal, DelimToken}; -use crate::parse::parser::{Parser, TokenType, PathStyle}; use crate::tokenstream::{TokenStream, TokenTree}; use crate::source_map::Span; use log::debug; -use smallvec::smallvec; #[derive(Debug)] enum InnerAttributeParsePolicy<'a> { @@ -20,7 +18,7 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ impl<'a> Parser<'a> { /// Parses attributes that appear before an item. - crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec> { + pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec> { let mut attrs: Vec = Vec::new(); let mut just_parsed_doc_comment = false; loop { @@ -84,13 +82,14 @@ impl<'a> Parser<'a> { /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy` /// that prescribes how to handle inner attributes. - fn parse_attribute_with_inner_parse_policy(&mut self, - inner_parse_policy: InnerAttributeParsePolicy<'_>) - -> PResult<'a, ast::Attribute> { + fn parse_attribute_with_inner_parse_policy( + &mut self, + inner_parse_policy: InnerAttributeParsePolicy<'_> + ) -> PResult<'a, ast::Attribute> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, path, tokens, style) = match self.token.kind { + let (span, item, style) = match self.token.kind { token::Pound => { let lo = self.token.span; self.bump(); @@ -107,7 +106,7 @@ impl<'a> Parser<'a> { }; self.expect(&token::OpenDelim(token::Bracket))?; - let (path, tokens) = self.parse_meta_item_unrestricted()?; + let item = self.parse_attr_item()?; self.expect(&token::CloseDelim(token::Bracket))?; let hi = self.prev_span; @@ -142,7 +141,7 @@ impl<'a> Parser<'a> { } } - (attr_sp, path, tokens, style) + (attr_sp, item, style) } _ => { let token_str = self.this_token_to_string(); @@ -151,10 +150,9 @@ impl<'a> Parser<'a> { }; Ok(ast::Attribute { + item, id: attr::mk_attr_id(), style, - path, - tokens, is_sugared_doc: false, span, }) @@ -167,19 +165,19 @@ impl<'a> Parser<'a> { /// PATH `[` TOKEN_STREAM `]` /// PATH `{` TOKEN_STREAM `}` /// PATH - /// PATH `=` TOKEN_TREE + /// PATH `=` UNSUFFIXED_LIT /// The delimiters or `=` are still put into the resulting token stream. - pub fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { - let meta = match self.token.kind { + pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { + let item = match self.token.kind { token::Interpolated(ref nt) => match **nt { - Nonterminal::NtMeta(ref meta) => Some(meta.clone()), + Nonterminal::NtMeta(ref item) => Some(item.clone()), _ => None, }, _ => None, }; - Ok(if let Some(meta) = meta { + Ok(if let Some(item) = item { self.bump(); - (meta.path, meta.node.tokens(meta.span)) + item } else { let path = self.parse_path(PathStyle::Mod)?; let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) || @@ -194,19 +192,19 @@ impl<'a> Parser<'a> { is_interpolated_expr = true; } } - let tokens = if is_interpolated_expr { + let token_tree = if is_interpolated_expr { // We need to accept arbitrary interpolated expressions to continue // supporting things like `doc = $expr` that work on stable. // Non-literal interpolated expressions are rejected after expansion. - self.parse_token_tree().into() + self.parse_token_tree() } else { - self.parse_unsuffixed_lit()?.tokens() + self.parse_unsuffixed_lit()?.token_tree() }; - TokenStream::from_streams(smallvec![eq.into(), tokens]) + TokenStream::new(vec![eq.into(), token_tree.into()]) } else { - TokenStream::empty() + TokenStream::default() }; - (path, tokens) + ast::AttrItem { path, tokens } }) } @@ -249,7 +247,7 @@ impl<'a> Parser<'a> { let lit = self.parse_lit()?; debug!("checking if {:?} is unusuffixed", lit); - if !lit.node.is_unsuffixed() { + if !lit.kind.is_unsuffixed() { let msg = "suffixed literals are not allowed in attributes"; self.diagnostic().struct_span_err(lit.span, msg) .help("instead of using a suffixed literal \ @@ -261,9 +259,30 @@ impl<'a> Parser<'a> { Ok(lit) } + /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited. + crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> { + self.expect(&token::OpenDelim(token::Paren))?; + + let cfg_predicate = self.parse_meta_item()?; + self.expect(&token::Comma)?; + + // Presumably, the majority of the time there will only be one attr. + let mut expanded_attrs = Vec::with_capacity(1); + + while !self.check(&token::CloseDelim(token::Paren)) { + let lo = self.token.span.lo(); + let item = self.parse_attr_item()?; + expanded_attrs.push((item, self.prev_span.with_lo(lo))); + self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; + } + + self.expect(&token::CloseDelim(token::Paren))?; + Ok((cfg_predicate, expanded_attrs)) + } + /// Matches the following grammar (per RFC 1559). /// - /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; + /// meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token.kind { @@ -274,16 +293,21 @@ impl<'a> Parser<'a> { _ => None, }; - if let Some(meta) = nt_meta { - self.bump(); - return Ok(meta); + if let Some(item) = nt_meta { + return match item.meta(item.path.span) { + Some(meta) => { + self.bump(); + Ok(meta) + } + None => self.unexpected(), + } } let lo = self.token.span; let path = self.parse_path(PathStyle::Mod)?; - let node = self.parse_meta_item_kind()?; + let kind = self.parse_meta_item_kind()?; let span = lo.to(self.prev_span); - Ok(ast::MetaItem { path, node, span }) + Ok(ast::MetaItem { path, kind, span }) } crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs similarity index 71% rename from src/libsyntax/parse/diagnostics.rs rename to src/libsyntax/parse/parser/diagnostics.rs index 59de5f1412..fcf3b4c0aa 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/parser/diagnostics.rs @@ -1,11 +1,12 @@ +use super::{ + BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, + SeqSep, PResult, Parser +}; use crate::ast::{ self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, - Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData, + Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, }; -use crate::feature_gate::{feature_err, UnstableFeatures}; -use crate::parse::{SeqSep, PResult, Parser, ParseSess}; -use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType}; -use crate::parse::token::{self, TokenKind}; +use crate::parse::token::{self, TokenKind, token_can_begin_expr}; use crate::print::pprust; use crate::ptr::P; use crate::symbol::{kw, sym}; @@ -17,15 +18,17 @@ use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError}; use log::{debug, trace}; use std::mem; +const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; + /// Creates a placeholder argument. -crate fn dummy_arg(ident: Ident) -> Param { +pub(super) fn dummy_arg(ident: Ident) -> Param { let pat = P(Pat { id: ast::DUMMY_NODE_ID, - node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), + kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), span: ident.span, }); let ty = Ty { - node: TyKind::Err, + kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID }; @@ -121,7 +124,7 @@ impl Error { } } -pub trait RecoverQPath: Sized + 'static { +pub(super) trait RecoverQPath: Sized + 'static { const PATH_STYLE: PathStyle = PathStyle::Expr; fn to_ty(&self) -> Option>; fn recovered(qself: Option, path: ast::Path) -> Self; @@ -135,7 +138,7 @@ impl RecoverQPath for Ty { fn recovered(qself: Option, path: ast::Path) -> Self { Self { span: path.span, - node: TyKind::Path(qself, path), + kind: TyKind::Path(qself, path), id: ast::DUMMY_NODE_ID, } } @@ -148,7 +151,7 @@ impl RecoverQPath for Pat { fn recovered(qself: Option, path: ast::Path) -> Self { Self { span: path.span, - node: PatKind::Path(qself, path), + kind: PatKind::Path(qself, path), id: ast::DUMMY_NODE_ID, } } @@ -161,51 +164,61 @@ impl RecoverQPath for Expr { fn recovered(qself: Option, path: ast::Path) -> Self { Self { span: path.span, - node: ExprKind::Path(qself, path), + kind: ExprKind::Path(qself, path), attrs: ThinVec::new(), id: ast::DUMMY_NODE_ID, } } } +/// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`. +crate enum ConsumeClosingDelim { + Yes, + No, +} + impl<'a> Parser<'a> { pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { self.span_fatal(self.token.span, m) } - pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + crate fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(sp, m) } - pub fn span_fatal_err>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { + pub(super) fn span_fatal_err>( + &self, + sp: S, + err: Error, + ) -> DiagnosticBuilder<'a> { err.span_err(sp, self.diagnostic()) } - pub fn bug(&self, m: &str) -> ! { + pub(super) fn bug(&self, m: &str) -> ! { self.sess.span_diagnostic.span_bug(self.token.span, m) } - pub fn span_err>(&self, sp: S, m: &str) { + pub(super) fn span_err>(&self, sp: S, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } - crate fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + pub fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_err(sp, m) } - crate fn span_bug>(&self, sp: S, m: &str) -> ! { + pub fn span_bug>(&self, sp: S, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } - crate fn diagnostic(&self) -> &'a errors::Handler { + pub(super) fn diagnostic(&self) -> &'a errors::Handler { &self.sess.span_diagnostic } - crate fn span_to_snippet(&self, span: Span) -> Result { + pub(super) fn span_to_snippet(&self, span: Span) -> Result { self.sess.source_map().span_to_snippet(span) } - crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { + pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err( self.token.span, &format!("expected identifier, found {}", self.this_token_descr()), @@ -236,7 +249,7 @@ impl<'a> Parser<'a> { err } - pub fn expected_one_of_not_found( + pub(super) fn expected_one_of_not_found( &mut self, edible: &[TokenKind], inedible: &[TokenKind], @@ -267,23 +280,23 @@ impl<'a> Parser<'a> { expected.sort_by_cached_key(|x| x.to_string()); expected.dedup(); let expect = tokens_to_string(&expected[..]); - let actual = self.this_token_to_string(); + let actual = self.this_token_descr(); let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { let short_expect = if expected.len() > 6 { format!("{} possible tokens", expected.len()) } else { expect.clone() }; - (format!("expected one of {}, found `{}`", expect, actual), + (format!("expected one of {}, found {}", expect, actual), (self.sess.source_map().next_point(self.prev_span), format!("expected one of {} here", short_expect))) } else if expected.is_empty() { - (format!("unexpected token: `{}`", actual), + (format!("unexpected token: {}", actual), (self.prev_span, "unexpected token after this".to_string())) } else { - (format!("expected {}, found `{}`", expect, actual), + (format!("expected {}, found {}", expect, actual), (self.sess.source_map().next_point(self.prev_span), - format!("expected {} here", expect))) + format!("expected {}", expect))) }; self.last_unexpected_token_span = Some(self.token.span); let mut err = self.fatal(&msg_exp); @@ -319,58 +332,28 @@ impl<'a> Parser<'a> { } } - let is_semi_suggestable = expected.iter().any(|t| match t { - TokenType::Token(token::Semi) => true, // We expect a `;` here. - _ => false, - }) && ( // A `;` would be expected before the current keyword. - self.token.is_keyword(kw::Break) || - self.token.is_keyword(kw::Continue) || - self.token.is_keyword(kw::For) || - self.token.is_keyword(kw::If) || - self.token.is_keyword(kw::Let) || - self.token.is_keyword(kw::Loop) || - self.token.is_keyword(kw::Match) || - self.token.is_keyword(kw::Return) || - self.token.is_keyword(kw::While) - ); let sm = self.sess.source_map(); - match (sm.lookup_line(self.token.span.lo()), sm.lookup_line(sp.lo())) { - (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => { - // The spans are in different lines, expected `;` and found `let` or `return`. - // High likelihood that it is only a missing `;`. - err.span_suggestion_short( - label_sp, - "a semicolon may be missing here", - ";".to_string(), - Applicability::MaybeIncorrect, - ); - err.emit(); - return Ok(true); - } - (Ok(ref a), Ok(ref b)) if a.line == b.line => { - // When the spans are in the same line, it means that the only content between - // them is whitespace, point at the found token in that case: - // - // X | () => { syntax error }; - // | ^^^^^ expected one of 8 possible tokens here - // - // instead of having: - // - // X | () => { syntax error }; - // | -^^^^^ unexpected token - // | | - // | expected one of 8 possible tokens here - err.span_label(self.token.span, label_exp); - } - _ if self.prev_span == syntax_pos::DUMMY_SP => { - // Account for macro context where the previous span might not be - // available to avoid incorrect output (#54841). - err.span_label(self.token.span, "unexpected token"); - } - _ => { - err.span_label(sp, label_exp); - err.span_label(self.token.span, "unexpected token"); - } + if self.prev_span == DUMMY_SP { + // Account for macro context where the previous span might not be + // available to avoid incorrect output (#54841). + err.span_label(self.token.span, label_exp); + } else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) { + // When the spans are in the same line, it means that the only content between + // them is whitespace, point at the found token in that case: + // + // X | () => { syntax error }; + // | ^^^^^ expected one of 8 possible tokens here + // + // instead of having: + // + // X | () => { syntax error }; + // | -^^^^^ unexpected token + // | | + // | expected one of 8 possible tokens here + err.span_label(self.token.span, label_exp); + } else { + err.span_label(sp, label_exp); + err.span_label(self.token.span, "unexpected token"); } self.maybe_annotate_with_ascription(&mut err, false); Err(err) @@ -386,14 +369,17 @@ impl<'a> Parser<'a> { let next_pos = sm.lookup_char_pos(self.token.span.lo()); let op_pos = sm.lookup_char_pos(sp.hi()); + let allow_unstable = self.sess.unstable_features.is_nightly_build(); + if likely_path { err.span_suggestion( sp, "maybe write a path separator here", "::".to_string(), - match self.sess.unstable_features { - UnstableFeatures::Disallow => Applicability::MachineApplicable, - _ => Applicability::MaybeIncorrect, + if allow_unstable { + Applicability::MaybeIncorrect + } else { + Applicability::MachineApplicable }, ); } else if op_pos.line != next_pos.line && maybe_expected_semicolon { @@ -403,14 +389,13 @@ impl<'a> Parser<'a> { ";".to_string(), Applicability::MaybeIncorrect, ); - } else if let UnstableFeatures::Disallow = self.sess.unstable_features { - err.span_label(sp, "tried to parse a type due to this"); - } else { + } else if allow_unstable { err.span_label(sp, "tried to parse a type due to this type ascription"); - } - if let UnstableFeatures::Disallow = self.sess.unstable_features { - // Give extra information about type ascription only if it's a nightly compiler. } else { + err.span_label(sp, "tried to parse a type due to this"); + } + if allow_unstable { + // Give extra information about type ascription only if it's a nightly compiler. err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \ type: `: `"); err.note("for more information, see \ @@ -421,7 +406,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { + pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { if let Err(ref mut err) = self.parse_seq_to_before_tokens( kets, SeqSep::none(), @@ -439,7 +424,7 @@ impl<'a> Parser<'a> { /// let _ = vec![1, 2, 3].into_iter().collect::>>>(); /// ^^ help: remove extra angle brackets /// ``` - crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { + pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { // This function is intended to be invoked after parsing a path segment where there are two // cases: // @@ -543,38 +528,157 @@ impl<'a> Parser<'a> { } /// Produces an error if comparison operators are chained (RFC #558). - /// We only need to check the LHS, not the RHS, because all comparison ops - /// have same precedence and are left-associative. - crate fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) -> PResult<'a, ()> { - debug_assert!(outer_op.is_comparison(), - "check_no_chained_comparison: {:?} is not comparison", - outer_op); - match lhs.node { + /// We only need to check the LHS, not the RHS, because all comparison ops have same + /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`). + /// + /// This can also be hit if someone incorrectly writes `foo()` when they should have used + /// the turbofish (`foo::()`) syntax. We attempt some heuristic recovery if that is the + /// case. + /// + /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left + /// associative we can infer that we have: + /// + /// outer_op + /// / \ + /// inner_op r2 + /// / \ + /// l1 r1 + pub(super) fn check_no_chained_comparison( + &mut self, + lhs: &Expr, + outer_op: &AssocOp, + ) -> PResult<'a, Option>> { + debug_assert!( + outer_op.is_comparison(), + "check_no_chained_comparison: {:?} is not comparison", + outer_op, + ); + + let mk_err_expr = |this: &Self, span| { + Ok(Some(this.mk_expr(span, ExprKind::Err, ThinVec::new()))) + }; + + match lhs.kind { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // Respan to include both operators. - let op_span = op.span.to(self.token.span); + let op_span = op.span.to(self.prev_span); let mut err = self.struct_span_err( op_span, "chained comparison operators require parentheses", ); + + let suggest = |err: &mut DiagnosticBuilder<'_>| { + err.span_suggestion_verbose( + op_span.shrink_to_lo(), + TURBOFISH, + "::".to_string(), + Applicability::MaybeIncorrect, + ); + }; + if op.node == BinOpKind::Lt && *outer_op == AssocOp::Less || // Include `<` to provide this recommendation *outer_op == AssocOp::Greater // even in a case like the following: { // Foo>> - err.help( - "use `::<...>` instead of `<...>` if you meant to specify type arguments"); - err.help("or use `(...)` if you meant to specify fn arguments"); - // These cases cause too many knock-down errors, bail out (#61329). - return Err(err); + if *outer_op == AssocOp::Less { + let snapshot = self.clone(); + self.bump(); + // So far we have parsed `foo(` or `foo< bar >::`, so we rewind the + // parser and bail out. + mem::replace(self, snapshot.clone()); + } + } + return if token::ModSep == self.token.kind { + // We have some certainty that this was a bad turbofish at this point. + // `foo< bar >::` + suggest(&mut err); + + let snapshot = self.clone(); + self.bump(); // `::` + + // Consume the rest of the likely `foo::new()` or return at `foo`. + match self.parse_expr() { + Ok(_) => { + // 99% certain that the suggestion is correct, continue parsing. + err.emit(); + // FIXME: actually check that the two expressions in the binop are + // paths and resynthesize new fn call expression instead of using + // `ExprKind::Err` placeholder. + mk_err_expr(self, lhs.span.to(self.prev_span)) + } + Err(mut expr_err) => { + expr_err.cancel(); + // Not entirely sure now, but we bubble the error up with the + // suggestion. + mem::replace(self, snapshot); + Err(err) + } + } + } else if token::OpenDelim(token::Paren) == self.token.kind { + // We have high certainty that this was a bad turbofish at this point. + // `foo< bar >(` + suggest(&mut err); + // Consume the fn call arguments. + match self.consume_fn_args() { + Err(()) => Err(err), + Ok(()) => { + err.emit(); + // FIXME: actually check that the two expressions in the binop are + // paths and resynthesize new fn call expression instead of using + // `ExprKind::Err` placeholder. + mk_err_expr(self, lhs.span.to(self.prev_span)) + } + } + } else { + // All we know is that this is `foo < bar >` and *nothing* else. Try to + // be helpful, but don't attempt to recover. + err.help(TURBOFISH); + err.help("or use `(...)` if you meant to specify fn arguments"); + // These cases cause too many knock-down errors, bail out (#61329). + Err(err) + }; } err.emit(); } _ => {} } - Ok(()) + Ok(None) } - crate fn maybe_report_ambiguous_plus( + fn consume_fn_args(&mut self) -> Result<(), ()> { + let snapshot = self.clone(); + self.bump(); // `(` + + // Consume the fn call arguments. + let modifiers = [ + (token::OpenDelim(token::Paren), 1), + (token::CloseDelim(token::Paren), -1), + ]; + self.consume_tts(1, &modifiers[..]); + + if self.token.kind == token::Eof { + // Not entirely sure that what we consumed were fn arguments, rollback. + mem::replace(self, snapshot); + Err(()) + } else { + // 99% certain that the suggestion is correct, continue parsing. + Ok(()) + } + } + + pub(super) fn maybe_report_ambiguous_plus( &mut self, allow_plus: bool, impl_dyn_multi: bool, @@ -593,55 +697,7 @@ impl<'a> Parser<'a> { } } - crate fn maybe_report_invalid_custom_discriminants( - sess: &ParseSess, - variants: &[ast::Variant], - ) { - let has_fields = variants.iter().any(|variant| match variant.data { - VariantData::Tuple(..) | VariantData::Struct(..) => true, - VariantData::Unit(..) => false, - }); - - let discriminant_spans = variants.iter().filter(|variant| match variant.data { - VariantData::Tuple(..) | VariantData::Struct(..) => false, - VariantData::Unit(..) => true, - }) - .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span)) - .collect::>(); - - if !discriminant_spans.is_empty() && has_fields { - let mut err = feature_err( - sess, - sym::arbitrary_enum_discriminant, - discriminant_spans.clone(), - crate::feature_gate::GateIssue::Language, - "custom discriminant values are not allowed in enums with tuple or struct variants", - ); - for sp in discriminant_spans { - err.span_label(sp, "disallowed custom discriminant"); - } - for variant in variants.iter() { - match &variant.data { - VariantData::Struct(..) => { - err.span_label( - variant.span, - "struct variant defined here", - ); - } - VariantData::Tuple(..) => { - err.span_label( - variant.span, - "tuple variant defined here", - ); - } - VariantData::Unit(..) => {} - } - } - err.emit(); - } - } - - crate fn maybe_recover_from_bad_type_plus( + pub(super) fn maybe_recover_from_bad_type_plus( &mut self, allow_plus: bool, ty: &Ty, @@ -663,7 +719,7 @@ impl<'a> Parser<'a> { pprust::ty_to_string(ty) ); - match ty.node { + match ty.kind { TyKind::Rptr(ref lifetime, ref mut_ty) => { let sum_with_parens = pprust::to_string(|s| { s.s.word("&"); @@ -695,7 +751,7 @@ impl<'a> Parser<'a> { /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`. /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem` /// tail, and combines them into a `::AssocItem` expression/pattern/type. - crate fn maybe_recover_from_bad_qpath( + pub(super) fn maybe_recover_from_bad_qpath( &mut self, base: P, allow_recovery: bool, @@ -711,7 +767,7 @@ impl<'a> Parser<'a> { /// Given an already parsed `Ty`, parses the `::AssocItem` tail and /// combines them into a `::AssocItem` expression/pattern/type. - crate fn maybe_recover_from_bad_qpath_stage_2( + pub(super) fn maybe_recover_from_bad_qpath_stage_2( &mut self, ty_span: Span, ty: P, @@ -734,7 +790,7 @@ impl<'a> Parser<'a> { // This is a best-effort recovery. path.span, "try", - format!("<{}>::{}", ty_str, path), + format!("<{}>::{}", ty_str, pprust::path_to_string(&path)), Applicability::MaybeIncorrect, ) .emit(); @@ -750,7 +806,7 @@ impl<'a> Parser<'a> { ))) } - crate fn maybe_consume_incorrect_semicolon(&mut self, items: &[P]) -> bool { + pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P]) -> bool { if self.eat(&token::Semi) { let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`"); err.span_suggestion_short( @@ -761,7 +817,7 @@ impl<'a> Parser<'a> { ); if !items.is_empty() { let previous_item = &items[items.len() - 1]; - let previous_item_kind_name = match previous_item.node { + let previous_item_kind_name = match previous_item.kind { // Say "braced struct" because tuple-structs and // braceless-empty-struct declarations do take a semicolon. ItemKind::Struct(..) => Some("braced struct"), @@ -786,7 +842,7 @@ impl<'a> Parser<'a> { /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub fn unexpected_try_recover( + pub(super) fn unexpected_try_recover( &mut self, t: &TokenKind, ) -> PResult<'a, bool /* recovered */> { @@ -822,21 +878,65 @@ impl<'a> Parser<'a> { } } let sm = self.sess.source_map(); - match (sm.lookup_line(prev_sp.lo()), sm.lookup_line(sp.lo())) { - (Ok(ref a), Ok(ref b)) if a.line == b.line => { - // When the spans are in the same line, it means that the only content - // between them is whitespace, point only at the found token. - err.span_label(sp, label_exp); - } - _ => { - err.span_label(prev_sp, label_exp); - err.span_label(sp, "unexpected token"); - } + if !sm.is_multiline(prev_sp.until(sp)) { + // When the spans are in the same line, it means that the only content + // between them is whitespace, point only at the found token. + err.span_label(sp, label_exp); + } else { + err.span_label(prev_sp, label_exp); + err.span_label(sp, "unexpected token"); } Err(err) } - crate fn parse_semi_or_incorrect_foreign_fn_body( + pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> { + if self.eat(&token::Semi) { + return Ok(()); + } + let sm = self.sess.source_map(); + let msg = format!("expected `;`, found `{}`", self.this_token_descr()); + let appl = Applicability::MachineApplicable; + if self.token.span == DUMMY_SP || self.prev_span == DUMMY_SP { + // Likely inside a macro, can't provide meaninful suggestions. + return self.expect(&token::Semi).map(|_| ()); + } else if !sm.is_multiline(self.prev_span.until(self.token.span)) { + // The current token is in the same line as the prior token, not recoverable. + } else if self.look_ahead(1, |t| t == &token::CloseDelim(token::Brace) + || token_can_begin_expr(t) && t.kind != token::Colon + ) && [token::Comma, token::Colon].contains(&self.token.kind) { + // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is + // either `,` or `:`, and the next token could either start a new statement or is a + // block close. For example: + // + // let x = 32: + // let y = 42; + self.bump(); + let sp = self.prev_span; + self.struct_span_err(sp, &msg) + .span_suggestion(sp, "change this to `;`", ";".to_string(), appl) + .emit(); + return Ok(()) + } else if self.look_ahead(0, |t| t == &token::CloseDelim(token::Brace) || ( + token_can_begin_expr(t) + && t != &token::Semi + && t != &token::Pound // Avoid triggering with too many trailing `#` in raw string. + )) { + // Missing semicolon typo. This is triggered if the next token could either start a + // new statement or is a block close. For example: + // + // let x = 32 + // let y = 42; + let sp = self.prev_span.shrink_to_hi(); + self.struct_span_err(sp, &msg) + .span_label(self.token.span, "unexpected token") + .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl) + .emit(); + return Ok(()) + } + self.expect(&token::Semi).map(|_| ()) // Error unconditionally + } + + pub(super) fn parse_semi_or_incorrect_foreign_fn_body( &mut self, ident: &Ident, extern_sp: Span, @@ -863,7 +963,7 @@ impl<'a> Parser<'a> { Err(mut err) => { err.cancel(); mem::replace(self, parser_snapshot); - self.expect(&token::Semi)?; + self.expect_semi()?; } } } else { @@ -874,7 +974,7 @@ impl<'a> Parser<'a> { /// Consumes alternative await syntaxes like `await!()`, `await `, /// `await? `, `await()`, and `await { }`. - crate fn parse_incorrect_await_syntax( + pub(super) fn parse_incorrect_await_syntax( &mut self, lo: Span, await_sp: Span, @@ -915,7 +1015,7 @@ impl<'a> Parser<'a> { .unwrap_or_else(|_| pprust::expr_to_string(&expr)); let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" }); let sp = lo.to(hi); - let app = match expr.node { + let app = match expr.kind { ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await ?` _ => Applicability::MachineApplicable, }; @@ -926,7 +1026,7 @@ impl<'a> Parser<'a> { } /// If encountering `future.await()`, consumes and emits an error. - crate fn recover_from_await_method_call(&mut self) { + pub(super) fn recover_from_await_method_call(&mut self) { if self.token == token::OpenDelim(token::Paren) && self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) { @@ -949,7 +1049,7 @@ impl<'a> Parser<'a> { /// and suggest writing `for $pat in $expr` instead. /// /// This should be called before parsing the `$block`. - crate fn recover_parens_around_for_head( + pub(super) fn recover_parens_around_for_head( &mut self, pat: P, expr: &Expr, @@ -978,7 +1078,7 @@ impl<'a> Parser<'a> { .emit(); // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint. - pat.and_then(|pat| match pat.node { + pat.and_then(|pat| match pat.kind { PatKind::Paren(pat) => pat, _ => P(pat), }) @@ -987,7 +1087,7 @@ impl<'a> Parser<'a> { } } - crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool { + pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool { self.token.is_ident() && if let ast::ExprKind::Path(..) = node { true } else { false } && !self.token.is_reserved_ident() && // v `foo:bar(baz)` @@ -1001,7 +1101,7 @@ impl<'a> Parser<'a> { self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::` } - crate fn recover_seq_parse_error( + pub(super) fn recover_seq_parse_error( &mut self, delim: token::DelimToken, lo: Span, @@ -1011,14 +1111,14 @@ impl<'a> Parser<'a> { Ok(x) => x, Err(mut err) => { err.emit(); - // Recover from parse error. - self.consume_block(delim); + // Recover from parse error, callers expect the closing delim to be consumed. + self.consume_block(delim, ConsumeClosingDelim::Yes); self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new()) } } } - crate fn recover_closing_delimiter( + pub(super) fn recover_closing_delimiter( &mut self, tokens: &[TokenKind], mut err: DiagnosticBuilder<'a>, @@ -1041,6 +1141,11 @@ impl<'a> Parser<'a> { // Don't attempt to recover from this unclosed delimiter more than once. let unmatched = self.unclosed_delims.remove(pos); let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); + if unmatched.found_delim.is_none() { + // We encountered `Eof`, set this fact here to avoid complaining about missing + // `fn main()` when we found place to suggest the closing brace. + *self.sess.reached_eof.borrow_mut() = true; + } // We want to suggest the inclusion of the closing delimiter where it makes // the most sense, which is immediately after the last token: @@ -1060,17 +1165,29 @@ impl<'a> Parser<'a> { delim.to_string(), Applicability::MaybeIncorrect, ); - err.emit(); - self.expected_tokens.clear(); // reduce errors - Ok(true) + if unmatched.found_delim.is_none() { + // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown + // errors which would be emitted elsewhere in the parser and let other error + // recovery consume the rest of the file. + Err(err) + } else { + err.emit(); + self.expected_tokens.clear(); // Reduce the number of errors. + Ok(true) + } } _ => Err(err), } } /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid. - crate fn eat_bad_pub(&mut self) { - if self.token.is_keyword(kw::Pub) { + pub(super) fn eat_bad_pub(&mut self) { + // When `unclosed_delims` is populated, it means that the code being parsed is already + // quite malformed, which might mean that, for example, a pub struct definition could be + // parsed as being a trait item, which is invalid and this error would trigger + // unconditionally, resulting in misleading diagnostics. Because of this, we only attempt + // this nice to have recovery for code that is otherwise well formed. + if self.token.is_keyword(kw::Pub) && self.unclosed_delims.is_empty() { match self.parse_visibility(false) { Ok(vis) => { self.diagnostic() @@ -1087,7 +1204,7 @@ impl<'a> Parser<'a> { /// statement. This is something of a best-effort heuristic. /// /// We terminate when we find an unmatched `}` (without consuming it). - crate fn recover_stmt(&mut self) { + pub(super) fn recover_stmt(&mut self) { self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore) } @@ -1098,7 +1215,11 @@ impl<'a> Parser<'a> { /// /// If `break_on_block` is `Break`, then we will stop consuming tokens /// after finding (and consuming) a brace-delimited block. - crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) { + pub(super) fn recover_stmt_( + &mut self, + break_on_semi: SemiColonMode, + break_on_block: BlockMode, + ) { let mut brace_depth = 0; let mut bracket_depth = 0; let mut in_block = false; @@ -1166,7 +1287,7 @@ impl<'a> Parser<'a> { } } - crate fn check_for_for_in_in_typo(&mut self, in_span: Span) { + pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) { if self.eat_keyword(kw::In) { // a common typo: `for _ in in bar {}` self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`") @@ -1180,14 +1301,14 @@ impl<'a> Parser<'a> { } } - crate fn expected_semi_or_open_brace(&mut self) -> PResult<'a, ast::TraitItem> { + pub(super) fn expected_semi_or_open_brace(&mut self) -> PResult<'a, T> { let token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); err.span_label(self.token.span, "expected `;` or `{`"); Err(err) } - crate fn eat_incorrect_doc_comment_for_param_type(&mut self) { + pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) { if let token::DocComment(_) = self.token.kind { self.struct_span_err( self.token.span, @@ -1215,11 +1336,12 @@ impl<'a> Parser<'a> { } } - crate fn parameter_without_type( + pub(super) fn parameter_without_type( &mut self, err: &mut DiagnosticBuilder<'_>, pat: P, require_name: bool, + is_self_allowed: bool, is_trait_item: bool, ) -> Option { // If we find a pattern followed by an identifier, it could be an (incorrect) @@ -1237,18 +1359,31 @@ impl<'a> Parser<'a> { Applicability::HasPlaceholders, ); return Some(ident); - } else if let PatKind::Ident(_, ident, _) = pat.node { + } else if let PatKind::Ident(_, ident, _) = pat.kind { if require_name && ( is_trait_item || self.token == token::Comma || + self.token == token::Lt || self.token == token::CloseDelim(token::Paren) - ) { // `fn foo(a, b) {}` or `fn foo(usize, usize) {}` - err.span_suggestion( - pat.span, - "if this was a parameter name, give it a type", - format!("{}: TypeName", ident), - Applicability::HasPlaceholders, - ); + ) { // `fn foo(a, b) {}`, `fn foo(a, b) {}` or `fn foo(usize, usize) {}` + if is_self_allowed { + err.span_suggestion( + pat.span, + "if this is a `self` type, give it a parameter name", + format!("self: {}", ident), + Applicability::MaybeIncorrect, + ); + } + // Avoid suggesting that `fn foo(HashMap)` is fixed with a change to + // `fn foo(HashMap: TypeName)`. + if self.token != token::Lt { + err.span_suggestion( + pat.span, + "if this was a parameter name, give it a type", + format!("{}: TypeName", ident), + Applicability::HasPlaceholders, + ); + } err.span_suggestion( pat.span, "if this is a type, explicitly ignore the parameter name", @@ -1256,13 +1391,15 @@ impl<'a> Parser<'a> { Applicability::MachineApplicable, ); err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); - return Some(ident); + + // Don't attempt to recover by using the `X` in `X` as the parameter name. + return if self.token == token::Lt { None } else { Some(ident) }; } } None } - crate fn recover_arg_parse(&mut self) -> PResult<'a, (P, P)> { + pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P, P)> { let pat = self.parse_pat(Some("argument name"))?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; @@ -1283,20 +1420,20 @@ impl<'a> Parser<'a> { // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. let pat = P(Pat { - node: PatKind::Wild, + kind: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID }); Ok((pat, ty)) } - crate fn recover_bad_self_param( + pub(super) fn recover_bad_self_param( &mut self, mut param: ast::Param, is_trait_item: bool, ) -> PResult<'a, ast::Param> { let sp = param.pat.span; - param.ty.node = TyKind::Err; + param.ty.kind = TyKind::Err; let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function"); if is_trait_item { err.span_label(sp, "must be the first associated function parameter"); @@ -1308,15 +1445,26 @@ impl<'a> Parser<'a> { Ok(param) } - crate fn consume_block(&mut self, delim: token::DelimToken) { + pub(super) fn consume_block( + &mut self, + delim: token::DelimToken, + consume_close: ConsumeClosingDelim, + ) { let mut brace_depth = 0; loop { if self.eat(&token::OpenDelim(delim)) { brace_depth += 1; - } else if self.eat(&token::CloseDelim(delim)) { + } else if self.check(&token::CloseDelim(delim)) { if brace_depth == 0 { + if let ConsumeClosingDelim::Yes = consume_close { + // Some of the callers of this method expect to be able to parse the + // closing delimiter themselves, so we leave it alone. Otherwise we advance + // the parser. + self.bump(); + } return; } else { + self.bump(); brace_depth -= 1; continue; } @@ -1328,7 +1476,7 @@ impl<'a> Parser<'a> { } } - crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { + pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { let sp = self.sess.source_map().next_point(self.token.span); @@ -1348,6 +1496,23 @@ impl<'a> Parser<'a> { err } + fn consume_tts( + &mut self, + mut acc: i64, // `i64` because malformed code can have more closing delims than opening. + // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`. + modifier: &[(token::TokenKind, i64)], + ) { + while acc > 0 { + if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) { + acc += *val; + } + if self.token.kind == token::Eof { + break; + } + self.bump(); + } + } + /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors. /// /// This is necessary because at this point we don't know whether we parsed a function with @@ -1356,11 +1521,11 @@ impl<'a> Parser<'a> { /// the parameters are *names* (so we don't emit errors about not being able to find `b` in /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`, /// we deduplicate them to not complain about duplicated parameter names. - crate fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec) { + pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec) { let mut seen_inputs = FxHashSet::default(); for input in fn_inputs.iter_mut() { let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = ( - &input.pat.node, &input.ty.node, + &input.pat.kind, &input.ty.kind, ) { Some(*ident) } else { @@ -1368,7 +1533,7 @@ impl<'a> Parser<'a> { }; if let Some(ident) = opt_ident { if seen_inputs.contains(&ident) { - input.pat.node = PatKind::Wild; + input.pat.kind = PatKind::Wild; } seen_inputs.insert(ident); } diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index d0c865a7b8..97b1092452 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1,18 +1,18 @@ -use super::{ - Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode, SemiColonMode, - SeqSep, TokenExpectType, -}; +use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode}; +use super::{SemiColonMode, SeqSep, TokenExpectType}; use super::pat::{GateOr, PARAM_EXPECTED}; +use super::diagnostics::Error; + +use crate::parse::literal::LitError; use crate::ast::{ self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode, Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind, - FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, + FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit, }; use crate::maybe_recover_from_interpolated_ty_qpath; use crate::parse::classify; -use crate::parse::token::{self, Token}; -use crate::parse::diagnostics::Error; +use crate::parse::token::{self, Token, TokenKind}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::{self, Span}; @@ -20,6 +20,7 @@ use crate::symbol::{kw, sym}; use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; use errors::Applicability; +use syntax_pos::Symbol; use std::mem; use rustc_data_structures::thin_vec::ThinVec; @@ -210,7 +211,7 @@ impl<'a> Parser<'a> { // it refers to. Interpolated identifiers are unwrapped early and never show up here // as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process // it as "interpolated", it doesn't change the answer for non-interpolated idents. - let lhs_span = match (self.prev_token_kind, &lhs.node) { + let lhs_span = match (self.prev_token_kind, &lhs.kind) { (PrevTokenKind::Interpolated, _) => self.prev_span, (PrevTokenKind::Ident, &ExprKind::Path(None, ref path)) if path.segments.len() == 1 => self.prev_span, @@ -238,17 +239,20 @@ impl<'a> Parser<'a> { self.bump(); if op.is_comparison() { - self.check_no_chained_comparison(&lhs, &op)?; + if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { + return Ok(expr); + } } // Special cases: if op == AssocOp::As { lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; continue } else if op == AssocOp::Colon { - let maybe_path = self.could_ascription_be_path(&lhs.node); + let maybe_path = self.could_ascription_be_path(&lhs.kind); self.last_type_ascription = Some((self.prev_span, maybe_path)); lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?; + self.sess.gated_spans.type_ascription.borrow_mut().push(lhs.span); continue } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to @@ -420,7 +424,7 @@ impl<'a> Parser<'a> { self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator") .span_suggestion_short( span_of_tilde, - "use `!` to perform bitwise negation", + "use `!` to perform bitwise not", "!".to_owned(), Applicability::MachineApplicable ) @@ -450,7 +454,9 @@ impl<'a> Parser<'a> { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - (lo.to(span), ExprKind::Box(e)) + let span = lo.to(span); + self.sess.gated_spans.box_syntax.borrow_mut().push(span); + (span, ExprKind::Box(e)) } token::Ident(..) if self.token.is_ident_named(sym::not) => { // `not` is just an ordinary identifier in Rust-the-language, @@ -550,12 +556,15 @@ impl<'a> Parser<'a> { // Report non-fatal diagnostics, keep `x as usize` as an expression // in AST and continue parsing. - let msg = format!("`<` is interpreted as a start of generic \ - arguments for `{}`, not a {}", path, op_noun); + let msg = format!( + "`<` is interpreted as a start of generic arguments for `{}`, not a {}", + pprust::path_to_string(&path), + op_noun, + ); let span_after_type = parser_snapshot_after_type.token.span; let expr = mk_expr(self, P(Ty { span: path.span, - node: TyKind::Path(None, path), + kind: TyKind::Path(None, path), id: DUMMY_NODE_ID, })); @@ -614,7 +623,7 @@ impl<'a> Parser<'a> { expr.map(|mut expr| { attrs.extend::>(expr.attrs.into()); expr.attrs = attrs; - match expr.node { + match expr.kind { ExprKind::If(..) if !expr.attrs.is_empty() => { // Just point to the first attribute in there... let span = expr.attrs[0].span; @@ -1067,8 +1076,167 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr, true) } + /// Matches `lit = true | false | token_lit`. + pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> { + let mut recovered = None; + if self.token == token::Dot { + // Attempt to recover `.4` as `0.4`. + recovered = self.look_ahead(1, |next_token| { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) + = next_token.kind { + if self.token.span.hi() == next_token.span.lo() { + let s = String::from("0.") + &symbol.as_str(); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + return Some(Token::new(kind, self.token.span.to(next_token.span))); + } + } + None + }); + if let Some(token) = &recovered { + self.bump(); + self.struct_span_err(token.span, "float literals must have an integer part") + .span_suggestion( + token.span, + "must have an integer part", + pprust::token_to_string(token), + Applicability::MachineApplicable, + ) + .emit(); + } + } + + let token = recovered.as_ref().unwrap_or(&self.token); + match Lit::from_token(token) { + Ok(lit) => { + self.bump(); + Ok(lit) + } + Err(LitError::NotLiteral) => { + let msg = format!("unexpected token: {}", self.this_token_descr()); + Err(self.span_fatal(token.span, &msg)) + } + Err(err) => { + let (lit, span) = (token.expect_lit(), token.span); + self.bump(); + self.error_literal_from_token(err, lit, span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let lit = token::Lit::new(token::Err, symbol, lit.suffix); + Lit::from_lit_token(lit, span).map_err(|_| unreachable!()) + } + } + } + + fn error_literal_from_token(&self, err: LitError, lit: token::Lit, span: Span) { + // Checks if `s` looks like i32 or u1234 etc. + fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { + s.len() > 1 + && s.starts_with(first_chars) + && s[1..].chars().all(|c| c.is_ascii_digit()) + } + + let token::Lit { kind, suffix, .. } = lit; + match err { + // `NotLiteral` is not an error by itself, so we don't report + // it and give the parser opportunity to try something else. + LitError::NotLiteral => {} + // `LexerError` *is* an error, but it was already reported + // by lexer, so here we don't report it the second time. + LitError::LexerError => {} + LitError::InvalidSuffix => { + self.expect_no_suffix( + span, + &format!("{} {} literal", kind.article(), kind.descr()), + suffix, + ); + } + LitError::InvalidIntSuffix => { + let suf = suffix.expect("suffix error with no suffix").as_str(); + if looks_like_width_suffix(&['i', 'u'], &suf) { + // If it looks like a width, try to be helpful. + let msg = format!("invalid width `{}` for integer literal", &suf[1..]); + self.struct_span_err(span, &msg) + .help("valid widths are 8, 16, 32, 64 and 128") + .emit(); + } else { + let msg = format!("invalid suffix `{}` for integer literal", suf); + self.struct_span_err(span, &msg) + .span_label(span, format!("invalid suffix `{}`", suf)) + .help("the suffix must be one of the integral types (`u32`, `isize`, etc)") + .emit(); + } + } + LitError::InvalidFloatSuffix => { + let suf = suffix.expect("suffix error with no suffix").as_str(); + if looks_like_width_suffix(&['f'], &suf) { + // If it looks like a width, try to be helpful. + let msg = format!("invalid width `{}` for float literal", &suf[1..]); + self.struct_span_err(span, &msg) + .help("valid widths are 32 and 64") + .emit(); + } else { + let msg = format!("invalid suffix `{}` for float literal", suf); + self.struct_span_err(span, &msg) + .span_label(span, format!("invalid suffix `{}`", suf)) + .help("valid suffixes are `f32` and `f64`") + .emit(); + } + } + LitError::NonDecimalFloat(base) => { + let descr = match base { + 16 => "hexadecimal", + 8 => "octal", + 2 => "binary", + _ => unreachable!(), + }; + self.struct_span_err(span, &format!("{} float literal is not supported", descr)) + .span_label(span, "not supported") + .emit(); + } + LitError::IntTooLarge => { + self.struct_span_err(span, "integer literal is too large") + .emit(); + } + } + } + + pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { + if let Some(suf) = suffix { + let mut err = if kind == "a tuple index" + && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) + { + // #59553: warn instead of reject out of hand to allow the fix to percolate + // through the ecosystem when people fix their macros + let mut err = self.sess.span_diagnostic.struct_span_warn( + sp, + &format!("suffixes on {} are invalid", kind), + ); + err.note(&format!( + "`{}` is *temporarily* accepted on tuple index fields as it was \ + incorrectly accepted on stable for a few releases", + suf, + )); + err.help( + "on proc macros, you'll want to use `syn::Index::from` or \ + `proc_macro::Literal::*_unsuffixed` for code that will desugar \ + to tuple field access", + ); + err.note( + "for more context, see https://github.com/rust-lang/rust/issues/60210", + ); + err + } else { + self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind)) + }; + err.span_label(sp, format!("invalid suffix `{}`", suf)); + err.emit(); + } + } + /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). - crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { + pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); let minus_lo = self.token.span; @@ -1088,13 +1256,17 @@ impl<'a> Parser<'a> { } /// Parses a block or unsafe block. - crate fn parse_block_expr( + pub(super) fn parse_block_expr( &mut self, opt_label: Option

(&mut self, mut predicate: P) -> Option where @@ -3285,40 +3331,6 @@ macro_rules! iterator { Some(next_back_unchecked!(self)) } } - - #[inline] - fn try_rfold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try - { - // manual unrolling is needed when there are conditional exits from the loop - let mut accum = init; - unsafe { - while len!(self) >= 4 { - accum = f(accum, next_back_unchecked!(self))?; - accum = f(accum, next_back_unchecked!(self))?; - accum = f(accum, next_back_unchecked!(self))?; - accum = f(accum, next_back_unchecked!(self))?; - } - // inlining is_empty everywhere makes a huge performance difference - while !is_empty!(self) { - accum = f(accum, next_back_unchecked!(self))?; - } - } - Try::from_ok(accum) - } - - #[inline] - fn rfold(mut self, init: Acc, mut f: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, - { - // Let LLVM unroll this, rather than using the default - // impl that would force the manual unrolling above - let mut accum = init; - while let Some(x) = self.next_back() { - accum = f(accum, x); - } - accum - } } #[stable(feature = "fused", since = "1.26.0")] diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index a6ec757faf..f67012d8f2 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -176,7 +176,7 @@ Section: Creating a string /// ``` /// fn from_utf8_lossy(mut input: &[u8], mut push: F) where F: FnMut(&str) { /// loop { -/// match ::std::str::from_utf8(input) { +/// match std::str::from_utf8(input) { /// Ok(valid) => { /// push(valid); /// break @@ -184,7 +184,7 @@ Section: Creating a string /// Err(error) => { /// let (valid, after_valid) = input.split_at(error.valid_up_to()); /// unsafe { -/// push(::std::str::from_utf8_unchecked(valid)) +/// push(std::str::from_utf8_unchecked(valid)) /// } /// push("\u{FFFD}"); /// @@ -2090,7 +2090,6 @@ impl str { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_str_len"))] pub const fn len(&self) -> usize { self.as_bytes().len() } @@ -2110,7 +2109,6 @@ impl str { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_str_len"))] pub const fn is_empty(&self) -> bool { self.len() == 0 } @@ -2168,9 +2166,9 @@ impl str { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline(always)] - #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_str_as_bytes"))] // SAFETY: const sound because we transmute two types with the same layout - #[cfg_attr(not(bootstrap), allow_internal_unstable(const_fn_union))] + #[allow(unused_attributes)] + #[allow_internal_unstable(const_fn_union)] pub const fn as_bytes(&self) -> &[u8] { #[repr(C)] union Slices<'a> { diff --git a/src/libcore/sync/atomic.rs b/src/libcore/sync/atomic.rs index c9ccef972c..73d5abf1ae 100644 --- a/src/libcore/sync/atomic.rs +++ b/src/libcore/sync/atomic.rs @@ -18,11 +18,11 @@ //! //! Each method takes an [`Ordering`] which represents the strength of //! the memory barrier for that operation. These orderings are the -//! same as [LLVM atomic orderings][1]. For more information see the [nomicon][2]. +//! same as the [C++20 atomic orderings][1]. For more information see the [nomicon][2]. //! //! [`Ordering`]: enum.Ordering.html //! -//! [1]: https://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations +//! [1]: https://en.cppreference.com/w/cpp/atomic/memory_order //! [2]: ../../../nomicon/atomics.html //! //! Atomic variables are safe to share between threads (they implement [`Sync`]) @@ -113,8 +113,8 @@ //! ``` #![stable(feature = "rust1", since = "1.0.0")] -#![cfg_attr(not(target_has_atomic = "8"), allow(dead_code))] -#![cfg_attr(not(target_has_atomic = "8"), allow(unused_imports))] +#![cfg_attr(not(target_has_atomic_load_store = "8"), allow(dead_code))] +#![cfg_attr(not(target_has_atomic_load_store = "8"), allow(unused_imports))] use self::Ordering::*; @@ -160,14 +160,14 @@ pub fn spin_loop_hint() { /// This type has the same in-memory representation as a [`bool`]. /// /// [`bool`]: ../../../std/primitive.bool.html -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] #[stable(feature = "rust1", since = "1.0.0")] #[repr(C, align(1))] pub struct AtomicBool { v: UnsafeCell, } -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for AtomicBool { /// Creates an `AtomicBool` initialized to `false`. @@ -177,14 +177,14 @@ impl Default for AtomicBool { } // Send is implicitly implemented for AtomicBool. -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for AtomicBool {} /// A raw pointer type which can be safely shared between threads. /// /// This type has the same in-memory representation as a `*mut T`. -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(target_pointer_width = "16", repr(C, align(2)))] #[cfg_attr(target_pointer_width = "32", repr(C, align(4)))] @@ -193,7 +193,7 @@ pub struct AtomicPtr { p: UnsafeCell<*mut T>, } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for AtomicPtr { /// Creates a null `AtomicPtr`. @@ -202,10 +202,10 @@ impl Default for AtomicPtr { } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for AtomicPtr {} -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for AtomicPtr {} @@ -217,8 +217,8 @@ unsafe impl Sync for AtomicPtr {} /// operations synchronize other memory while additionally preserving a total order of such /// operations across all threads. /// -/// Rust's memory orderings are [the same as -/// LLVM's](https://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations). +/// Rust's memory orderings are [the same as those of +/// C++20](https://en.cppreference.com/w/cpp/atomic/memory_order). /// /// For more information see the [nomicon]. /// @@ -231,9 +231,9 @@ unsafe impl Sync for AtomicPtr {} pub enum Ordering { /// No ordering constraints, only atomic operations. /// - /// Corresponds to LLVM's [`Monotonic`] ordering. + /// Corresponds to [`memory_order_relaxed`] in C++20. /// - /// [`Monotonic`]: https://llvm.org/docs/Atomics.html#monotonic + /// [`memory_order_relaxed`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Relaxed_ordering #[stable(feature = "rust1", since = "1.0.0")] Relaxed, /// When coupled with a store, all previous operations become ordered @@ -246,11 +246,12 @@ pub enum Ordering { /// /// This ordering is only applicable for operations that can perform a store. /// - /// Corresponds to LLVM's [`Release`] ordering. + /// Corresponds to [`memory_order_release`] in C++20. /// - /// [`Release`]: https://llvm.org/docs/Atomics.html#release - /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire - /// [`Relaxed`]: https://llvm.org/docs/Atomics.html#monotonic + /// [`Release`]: #variant.Release + /// [`Acquire`]: #variant.Acquire + /// [`Relaxed`]: #variant.Relaxed + /// [`memory_order_release`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering #[stable(feature = "rust1", since = "1.0.0")] Release, /// When coupled with a load, if the loaded value was written by a store operation with @@ -263,11 +264,12 @@ pub enum Ordering { /// /// This ordering is only applicable for operations that can perform a load. /// - /// Corresponds to LLVM's [`Acquire`] ordering. + /// Corresponds to [`memory_order_acquire`] in C++20. /// - /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire - /// [`Release`]: https://llvm.org/docs/Atomics.html#release - /// [`Relaxed`]: https://llvm.org/docs/Atomics.html#monotonic + /// [`Acquire`]: #variant.Acquire + /// [`Release`]: #variant.Release + /// [`Relaxed`]: #variant.Relaxed + /// [`memory_order_acquire`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering #[stable(feature = "rust1", since = "1.0.0")] Acquire, /// Has the effects of both [`Acquire`] and [`Release`] together: @@ -275,28 +277,28 @@ pub enum Ordering { /// /// Notice that in the case of `compare_and_swap`, it is possible that the operation ends up /// not performing any store and hence it has just [`Acquire`] ordering. However, - /// [`AcqRel`][`AcquireRelease`] will never perform [`Relaxed`] accesses. + /// `AcqRel` will never perform [`Relaxed`] accesses. /// /// This ordering is only applicable for operations that combine both loads and stores. /// - /// Corresponds to LLVM's [`AcquireRelease`] ordering. + /// Corresponds to [`memory_order_acq_rel`] in C++20. /// - /// [`AcquireRelease`]: https://llvm.org/docs/Atomics.html#acquirerelease - /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire - /// [`Release`]: https://llvm.org/docs/Atomics.html#release - /// [`Relaxed`]: https://llvm.org/docs/Atomics.html#monotonic + /// [`memory_order_acq_rel`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering + /// [`Acquire`]: #variant.Acquire + /// [`Release`]: #variant.Release + /// [`Relaxed`]: #variant.Relaxed #[stable(feature = "rust1", since = "1.0.0")] AcqRel, /// Like [`Acquire`]/[`Release`]/[`AcqRel`] (for load, store, and load-with-store /// operations, respectively) with the additional guarantee that all threads see all /// sequentially consistent operations in the same order. /// - /// Corresponds to LLVM's [`SequentiallyConsistent`] ordering. + /// Corresponds to [`memory_order_seq_cst`] in C++20. /// - /// [`SequentiallyConsistent`]: https://llvm.org/docs/Atomics.html#sequentiallyconsistent - /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire - /// [`Release`]: https://llvm.org/docs/Atomics.html#release - /// [`AcqRel`]: https://llvm.org/docs/Atomics.html#acquirerelease + /// [`memory_order_seq_cst`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Sequentially-consistent_ordering + /// [`Acquire`]: #variant.Acquire + /// [`Release`]: #variant.Release + /// [`AcqRel`]: #variant.AcqRel #[stable(feature = "rust1", since = "1.0.0")] SeqCst, } @@ -304,7 +306,7 @@ pub enum Ordering { /// An [`AtomicBool`] initialized to `false`. /// /// [`AtomicBool`]: struct.AtomicBool.html -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_deprecated( since = "1.34.0", @@ -313,7 +315,7 @@ pub enum Ordering { )] pub const ATOMIC_BOOL_INIT: AtomicBool = AtomicBool::new(false); -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] impl AtomicBool { /// Creates a new `AtomicBool`. /// @@ -462,7 +464,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn swap(&self, val: bool, order: Ordering) -> bool { unsafe { atomic_swap(self.v.get(), val as u8, order) != 0 } } @@ -500,7 +502,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn compare_and_swap(&self, current: bool, new: bool, order: Ordering) -> bool { match self.compare_exchange(current, new, order, strongest_failure_ordering(order)) { Ok(x) => x, @@ -551,7 +553,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn compare_exchange(&self, current: bool, new: bool, @@ -607,7 +609,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn compare_exchange_weak(&self, current: bool, new: bool, @@ -658,7 +660,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn fetch_and(&self, val: bool, order: Ordering) -> bool { unsafe { atomic_and(self.v.get(), val as u8, order) != 0 } } @@ -700,7 +702,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool { // We can't use atomic_nand here because it can result in a bool with // an invalid value. This happens because the atomic operation is done @@ -753,7 +755,7 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn fetch_or(&self, val: bool, order: Ordering) -> bool { unsafe { atomic_or(self.v.get(), val as u8, order) != 0 } } @@ -794,13 +796,13 @@ impl AtomicBool { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "8")] pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool { unsafe { atomic_xor(self.v.get(), val as u8, order) != 0 } } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] impl AtomicPtr { /// Creates a new `AtomicPtr`. /// @@ -951,7 +953,7 @@ impl AtomicPtr { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "ptr")] pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T { unsafe { atomic_swap(self.p.get() as *mut usize, ptr as usize, order) as *mut T } } @@ -987,7 +989,7 @@ impl AtomicPtr { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "ptr")] pub fn compare_and_swap(&self, current: *mut T, new: *mut T, order: Ordering) -> *mut T { match self.compare_exchange(current, new, order, strongest_failure_ordering(order)) { Ok(x) => x, @@ -1029,7 +1031,7 @@ impl AtomicPtr { /// ``` #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "ptr")] pub fn compare_exchange(&self, current: *mut T, new: *mut T, @@ -1089,7 +1091,7 @@ impl AtomicPtr { /// ``` #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] - #[cfg(target_has_atomic = "cas")] + #[cfg(target_has_atomic = "ptr")] pub fn compare_exchange_weak(&self, current: *mut T, new: *mut T, @@ -1110,7 +1112,7 @@ impl AtomicPtr { } } -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] #[stable(feature = "atomic_bool_from", since = "1.24.0")] impl From for AtomicBool { /// Converts a `bool` into an `AtomicBool`. @@ -1126,16 +1128,17 @@ impl From for AtomicBool { fn from(b: bool) -> Self { Self::new(b) } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "atomic_from", since = "1.23.0")] impl From<*mut T> for AtomicPtr { #[inline] fn from(p: *mut T) -> Self { Self::new(p) } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] macro_rules! atomic_int { - ($stable:meta, + ($cfg_cas:meta, + $stable:meta, $stable_cxchg:meta, $stable_debug:meta, $stable_access:meta, @@ -1356,7 +1359,7 @@ assert_eq!(some_var.swap(10, Ordering::Relaxed), 5); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn swap(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_swap(self.v.get(), val, order) } } @@ -1396,7 +1399,7 @@ assert_eq!(some_var.load(Ordering::Relaxed), 10); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn compare_and_swap(&self, current: $int_type, new: $int_type, @@ -1454,7 +1457,7 @@ assert_eq!(some_var.load(Ordering::Relaxed), 10); ```"), #[inline] #[$stable_cxchg] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn compare_exchange(&self, current: $int_type, new: $int_type, @@ -1506,7 +1509,7 @@ loop { ```"), #[inline] #[$stable_cxchg] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn compare_exchange_weak(&self, current: $int_type, new: $int_type, @@ -1544,7 +1547,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 10); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_add(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_add(self.v.get(), val, order) } } @@ -1576,7 +1579,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 10); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_sub(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_sub(self.v.get(), val, order) } } @@ -1611,7 +1614,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b100001); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_and(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_and(self.v.get(), val, order) } } @@ -1647,7 +1650,7 @@ assert_eq!(foo.load(Ordering::SeqCst), !(0x13 & 0x31)); ```"), #[inline] #[$stable_nand] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_nand(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_nand(self.v.get(), val, order) } } @@ -1682,7 +1685,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b111111); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_or(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_or(self.v.get(), val, order) } } @@ -1717,7 +1720,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b011110); ```"), #[inline] #[$stable] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_xor(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { atomic_xor(self.v.get(), val, order) } } @@ -1767,7 +1770,7 @@ assert_eq!(x.load(Ordering::SeqCst), 9); #[unstable(feature = "no_more_cas", reason = "no more CAS loops in user code", issue = "48655")] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_update(&self, mut f: F, fetch_order: Ordering, @@ -1828,7 +1831,7 @@ assert!(max_foo == 42); #[unstable(feature = "atomic_min_max", reason = "easier and faster min/max than writing manual CAS loop", issue = "48655")] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_max(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { $max_fn(self.v.get(), val, order) } } @@ -1880,7 +1883,7 @@ assert_eq!(min_foo, 12); #[unstable(feature = "atomic_min_max", reason = "easier and faster min/max than writing manual CAS loop", issue = "48655")] - #[cfg(target_has_atomic = "cas")] + #[$cfg_cas] pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type { unsafe { $min_fn(self.v.get(), val, order) } } @@ -1890,8 +1893,9 @@ assert_eq!(min_foo, 12); } } -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] atomic_int! { + cfg(target_has_atomic = "8"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -1906,8 +1910,9 @@ atomic_int! { "AtomicI8::new(0)", i8 AtomicI8 ATOMIC_I8_INIT } -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] atomic_int! { + cfg(target_has_atomic = "8"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -1922,8 +1927,9 @@ atomic_int! { "AtomicU8::new(0)", u8 AtomicU8 ATOMIC_U8_INIT } -#[cfg(target_has_atomic = "16")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "16"))] atomic_int! { + cfg(target_has_atomic = "16"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -1938,8 +1944,9 @@ atomic_int! { "AtomicI16::new(0)", i16 AtomicI16 ATOMIC_I16_INIT } -#[cfg(target_has_atomic = "16")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "16"))] atomic_int! { + cfg(target_has_atomic = "16"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -1954,8 +1961,9 @@ atomic_int! { "AtomicU16::new(0)", u16 AtomicU16 ATOMIC_U16_INIT } -#[cfg(target_has_atomic = "32")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "32"))] atomic_int! { + cfg(target_has_atomic = "32"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -1970,8 +1978,9 @@ atomic_int! { "AtomicI32::new(0)", i32 AtomicI32 ATOMIC_I32_INIT } -#[cfg(target_has_atomic = "32")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "32"))] atomic_int! { + cfg(target_has_atomic = "32"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -1986,8 +1995,12 @@ atomic_int! { "AtomicU32::new(0)", u32 AtomicU32 ATOMIC_U32_INIT } -#[cfg(target_has_atomic = "64")] +#[cfg(any( + all(bootstrap, target_has_atomic = "64"), + target_has_atomic_load_store = "64" +))] atomic_int! { + cfg(target_has_atomic = "64"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -2002,8 +2015,12 @@ atomic_int! { "AtomicI64::new(0)", i64 AtomicI64 ATOMIC_I64_INIT } -#[cfg(target_has_atomic = "64")] +#[cfg(any( + all(bootstrap, target_has_atomic = "64"), + target_has_atomic_load_store = "64" +))] atomic_int! { + cfg(target_has_atomic = "64"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), stable(feature = "integer_atomics_stable", since = "1.34.0"), @@ -2018,8 +2035,9 @@ atomic_int! { "AtomicU64::new(0)", u64 AtomicU64 ATOMIC_U64_INIT } -#[cfg(target_has_atomic = "128")] +#[cfg(target_has_atomic_load_store = "128")] atomic_int! { + cfg(target_has_atomic = "128"), unstable(feature = "integer_atomics", issue = "32976"), unstable(feature = "integer_atomics", issue = "32976"), unstable(feature = "integer_atomics", issue = "32976"), @@ -2034,8 +2052,9 @@ atomic_int! { "AtomicI128::new(0)", i128 AtomicI128 ATOMIC_I128_INIT } -#[cfg(target_has_atomic = "128")] +#[cfg(target_has_atomic_load_store = "128")] atomic_int! { + cfg(target_has_atomic = "128"), unstable(feature = "integer_atomics", issue = "32976"), unstable(feature = "integer_atomics", issue = "32976"), unstable(feature = "integer_atomics", issue = "32976"), @@ -2050,20 +2069,24 @@ atomic_int! { "AtomicU128::new(0)", u128 AtomicU128 ATOMIC_U128_INIT } +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[cfg(target_pointer_width = "16")] macro_rules! ptr_width { () => { 2 } } +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[cfg(target_pointer_width = "32")] macro_rules! ptr_width { () => { 4 } } +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[cfg(target_pointer_width = "64")] macro_rules! ptr_width { () => { 8 } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] atomic_int!{ + cfg(target_has_atomic = "ptr"), stable(feature = "rust1", since = "1.0.0"), stable(feature = "extended_compare_and_swap", since = "1.10.0"), stable(feature = "atomic_debug", since = "1.3.0"), @@ -2078,8 +2101,9 @@ atomic_int!{ "AtomicIsize::new(0)", isize AtomicIsize ATOMIC_ISIZE_INIT } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] atomic_int!{ + cfg(target_has_atomic = "ptr"), stable(feature = "rust1", since = "1.0.0"), stable(feature = "extended_compare_and_swap", since = "1.10.0"), stable(feature = "atomic_debug", since = "1.3.0"), @@ -2096,7 +2120,7 @@ atomic_int!{ } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] fn strongest_failure_ordering(order: Ordering) -> Ordering { match order { Release => Relaxed, @@ -2130,7 +2154,7 @@ unsafe fn atomic_load(dst: *const T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xchg_acq(dst, val), @@ -2143,7 +2167,7 @@ unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { /// Returns the previous value (like __sync_fetch_and_add). #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xadd_acq(dst, val), @@ -2156,7 +2180,7 @@ unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { /// Returns the previous value (like __sync_fetch_and_sub). #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xsub_acq(dst, val), @@ -2168,7 +2192,7 @@ unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_compare_exchange(dst: *mut T, old: T, new: T, @@ -2193,7 +2217,7 @@ unsafe fn atomic_compare_exchange(dst: *mut T, } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_compare_exchange_weak(dst: *mut T, old: T, new: T, @@ -2218,7 +2242,7 @@ unsafe fn atomic_compare_exchange_weak(dst: *mut T, } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_and_acq(dst, val), @@ -2230,7 +2254,7 @@ unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_nand_acq(dst, val), @@ -2242,7 +2266,7 @@ unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_or_acq(dst, val), @@ -2254,7 +2278,7 @@ unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xor_acq(dst, val), @@ -2267,7 +2291,7 @@ unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { /// returns the max value (signed comparison) #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_max(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_max_acq(dst, val), @@ -2280,7 +2304,7 @@ unsafe fn atomic_max(dst: *mut T, val: T, order: Ordering) -> T { /// returns the min value (signed comparison) #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_min(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_min_acq(dst, val), @@ -2293,7 +2317,7 @@ unsafe fn atomic_min(dst: *mut T, val: T, order: Ordering) -> T { /// returns the max value (signed comparison) #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_umax(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_umax_acq(dst, val), @@ -2306,7 +2330,7 @@ unsafe fn atomic_umax(dst: *mut T, val: T, order: Ordering) -> T { /// returns the min value (signed comparison) #[inline] -#[cfg(target_has_atomic = "cas")] +#[cfg(target_has_atomic = "8")] unsafe fn atomic_umin(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_umin_acq(dst, val), @@ -2504,7 +2528,7 @@ pub fn compiler_fence(order: Ordering) { } -#[cfg(target_has_atomic = "8")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))] #[stable(feature = "atomic_debug", since = "1.3.0")] impl fmt::Debug for AtomicBool { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -2512,7 +2536,7 @@ impl fmt::Debug for AtomicBool { } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "atomic_debug", since = "1.3.0")] impl fmt::Debug for AtomicPtr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -2520,7 +2544,7 @@ impl fmt::Debug for AtomicPtr { } } -#[cfg(target_has_atomic = "ptr")] +#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))] #[stable(feature = "atomic_pointer", since = "1.24.0")] impl fmt::Pointer for AtomicPtr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/src/libcore/task/wake.rs b/src/libcore/task/wake.rs index 65e2936428..6f841bd2ad 100644 --- a/src/libcore/task/wake.rs +++ b/src/libcore/task/wake.rs @@ -217,7 +217,7 @@ impl fmt::Debug for Context<'_> { /// This handle encapsulates a [`RawWaker`] instance, which defines the /// executor-specific wakeup behavior. /// -/// Implements [`Clone`], [`trait@Send`], and [`trait@Sync`]. +/// Implements [`Clone`], [`Send`], and [`Sync`]. /// /// [`RawWaker`]: struct.RawWaker.html #[repr(transparent)] diff --git a/src/libcore/tests/array.rs b/src/libcore/tests/array.rs index 9e133ac568..4f3b79c78b 100644 --- a/src/libcore/tests/array.rs +++ b/src/libcore/tests/array.rs @@ -1,4 +1,4 @@ -use core::array::FixedSizeArray; +use core::array::{FixedSizeArray, IntoIter}; use core::convert::TryFrom; #[test] @@ -40,3 +40,208 @@ fn array_try_from() { 30 31 32 } } + + +#[test] +fn iterator_collect() { + let arr = [0, 1, 2, 5, 9]; + let v: Vec<_> = IntoIter::new(arr.clone()).collect(); + assert_eq!(&arr[..], &v[..]); +} + +#[test] +fn iterator_rev_collect() { + let arr = [0, 1, 2, 5, 9]; + let v: Vec<_> = IntoIter::new(arr.clone()).rev().collect(); + assert_eq!(&v[..], &[9, 5, 2, 1, 0]); +} + +#[test] +fn iterator_nth() { + let v = [0, 1, 2, 3, 4]; + for i in 0..v.len() { + assert_eq!(IntoIter::new(v.clone()).nth(i).unwrap(), v[i]); + } + assert_eq!(IntoIter::new(v.clone()).nth(v.len()), None); + + let mut iter = IntoIter::new(v); + assert_eq!(iter.nth(2).unwrap(), v[2]); + assert_eq!(iter.nth(1).unwrap(), v[4]); +} + +#[test] +fn iterator_last() { + let v = [0, 1, 2, 3, 4]; + assert_eq!(IntoIter::new(v).last().unwrap(), 4); + assert_eq!(IntoIter::new([0]).last().unwrap(), 0); + + let mut it = IntoIter::new([0, 9, 2, 4]); + assert_eq!(it.next_back(), Some(4)); + assert_eq!(it.last(), Some(2)); +} + +#[test] +fn iterator_clone() { + let mut it = IntoIter::new([0, 2, 4, 6, 8]); + assert_eq!(it.next(), Some(0)); + assert_eq!(it.next_back(), Some(8)); + let mut clone = it.clone(); + assert_eq!(it.next_back(), Some(6)); + assert_eq!(clone.next_back(), Some(6)); + assert_eq!(it.next_back(), Some(4)); + assert_eq!(clone.next_back(), Some(4)); + assert_eq!(it.next(), Some(2)); + assert_eq!(clone.next(), Some(2)); +} + +#[test] +fn iterator_fused() { + let mut it = IntoIter::new([0, 9, 2]); + assert_eq!(it.next(), Some(0)); + assert_eq!(it.next(), Some(9)); + assert_eq!(it.next(), Some(2)); + assert_eq!(it.next(), None); + assert_eq!(it.next(), None); + assert_eq!(it.next(), None); + assert_eq!(it.next(), None); + assert_eq!(it.next(), None); +} + +#[test] +fn iterator_len() { + let mut it = IntoIter::new([0, 1, 2, 5, 9]); + assert_eq!(it.size_hint(), (5, Some(5))); + assert_eq!(it.len(), 5); + assert_eq!(it.is_empty(), false); + + assert_eq!(it.next(), Some(0)); + assert_eq!(it.size_hint(), (4, Some(4))); + assert_eq!(it.len(), 4); + assert_eq!(it.is_empty(), false); + + assert_eq!(it.next_back(), Some(9)); + assert_eq!(it.size_hint(), (3, Some(3))); + assert_eq!(it.len(), 3); + assert_eq!(it.is_empty(), false); + + // Empty + let it = IntoIter::new([] as [String; 0]); + assert_eq!(it.size_hint(), (0, Some(0))); + assert_eq!(it.len(), 0); + assert_eq!(it.is_empty(), true); +} + +#[test] +fn iterator_count() { + let v = [0, 1, 2, 3, 4]; + assert_eq!(IntoIter::new(v.clone()).count(), 5); + + let mut iter2 = IntoIter::new(v); + iter2.next(); + iter2.next(); + assert_eq!(iter2.count(), 3); +} + +#[test] +fn iterator_flat_map() { + assert!((0..5).flat_map(|i| IntoIter::new([2 * i, 2 * i + 1])).eq(0..10)); +} + +#[test] +fn iterator_debug() { + let arr = [0, 1, 2, 5, 9]; + assert_eq!( + format!("{:?}", IntoIter::new(arr)), + "IntoIter([0, 1, 2, 5, 9])", + ); +} + +#[test] +fn iterator_drops() { + use core::cell::Cell; + + // This test makes sure the correct number of elements are dropped. The `R` + // type is just a reference to a `Cell` that is incremented when an `R` is + // dropped. + + #[derive(Clone)] + struct Foo<'a>(&'a Cell); + + impl Drop for Foo<'_> { + fn drop(&mut self) { + self.0.set(self.0.get() + 1); + } + } + + fn five(i: &Cell) -> [Foo<'_>; 5] { + // This is somewhat verbose because `Foo` does not implement `Copy` + // since it implements `Drop`. Consequently, we cannot write + // `[Foo(i); 5]`. + [Foo(i), Foo(i), Foo(i), Foo(i), Foo(i)] + } + + // Simple: drop new iterator. + let i = Cell::new(0); + { + IntoIter::new(five(&i)); + } + assert_eq!(i.get(), 5); + + // Call `next()` once. + let i = Cell::new(0); + { + let mut iter = IntoIter::new(five(&i)); + let _x = iter.next(); + assert_eq!(i.get(), 0); + assert_eq!(iter.count(), 4); + assert_eq!(i.get(), 4); + } + assert_eq!(i.get(), 5); + + // Check `clone` and calling `next`/`next_back`. + let i = Cell::new(0); + { + let mut iter = IntoIter::new(five(&i)); + iter.next(); + assert_eq!(i.get(), 1); + iter.next_back(); + assert_eq!(i.get(), 2); + + let mut clone = iter.clone(); + assert_eq!(i.get(), 2); + + iter.next(); + assert_eq!(i.get(), 3); + + clone.next(); + assert_eq!(i.get(), 4); + + assert_eq!(clone.count(), 2); + assert_eq!(i.get(), 6); + } + assert_eq!(i.get(), 8); + + // Check via `nth`. + let i = Cell::new(0); + { + let mut iter = IntoIter::new(five(&i)); + let _x = iter.nth(2); + assert_eq!(i.get(), 2); + let _y = iter.last(); + assert_eq!(i.get(), 3); + } + assert_eq!(i.get(), 5); + + // Check every element. + let i = Cell::new(0); + for (index, _x) in IntoIter::new(five(&i)).enumerate() { + assert_eq!(i.get(), index); + } + assert_eq!(i.get(), 5); + + let i = Cell::new(0); + for (index, _x) in IntoIter::new(five(&i)).rev().enumerate() { + assert_eq!(i.get(), index); + } + assert_eq!(i.get(), 5); +} diff --git a/src/libcore/tests/fmt/builders.rs b/src/libcore/tests/fmt/builders.rs index 200659b91b..2557244328 100644 --- a/src/libcore/tests/fmt/builders.rs +++ b/src/libcore/tests/fmt/builders.rs @@ -319,6 +319,46 @@ mod debug_map { format!("{:#?}", Bar)); } + #[test] + fn test_entry_err() { + // Ensure errors in a map entry don't trigger panics (#65231) + use std::fmt::Write; + + struct ErrorFmt; + + impl fmt::Debug for ErrorFmt { + fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { + Err(fmt::Error) + } + } + + struct KeyValue(usize, K, V); + + impl fmt::Debug for KeyValue + where + K: fmt::Debug, + V: fmt::Debug, + { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut map = fmt.debug_map(); + + for _ in 0..self.0 { + map.entry(&self.1, &self.2); + } + + map.finish() + } + } + + let mut buf = String::new(); + + assert!(write!(&mut buf, "{:?}", KeyValue(1, ErrorFmt, "bar")).is_err()); + assert!(write!(&mut buf, "{:?}", KeyValue(1, "foo", ErrorFmt)).is_err()); + + assert!(write!(&mut buf, "{:?}", KeyValue(2, ErrorFmt, "bar")).is_err()); + assert!(write!(&mut buf, "{:?}", KeyValue(2, "foo", ErrorFmt)).is_err()); + } + #[test] #[should_panic] fn test_invalid_key_when_entry_is_incomplete() { diff --git a/src/libcore/tests/lib.rs b/src/libcore/tests/lib.rs index 3566135602..b28ed2eaa0 100644 --- a/src/libcore/tests/lib.rs +++ b/src/libcore/tests/lib.rs @@ -31,6 +31,7 @@ #![feature(slice_partition_dedup)] #![feature(int_error_matching)] #![feature(const_fn)] +#![feature(array_value_iter)] #![feature(iter_partition_in_place)] #![feature(iter_is_partitioned)] #![feature(iter_order_by)] diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index f9c1be20b8..d22420e76d 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -410,7 +410,7 @@ impl<'a> Parser<'a> { &self.input[start..self.input.len()] } - /// Parses an Argument structure, or what's contained within braces inside the format string + /// Parses an `Argument` structure, or what's contained within braces inside the format string. fn argument(&mut self) -> Argument<'a> { let pos = self.position(); let format = self.format(); @@ -464,7 +464,7 @@ impl<'a> Parser<'a> { } /// Parses a format specifier at the current position, returning all of the - /// relevant information in the FormatSpec struct. + /// relevant information in the `FormatSpec` struct. fn format(&mut self) -> FormatSpec<'a> { let mut spec = FormatSpec { fill: None, @@ -571,7 +571,7 @@ impl<'a> Parser<'a> { spec } - /// Parses a Count parameter at the current position. This does not check + /// Parses a `Count` parameter at the current position. This does not check /// for 'CountIsNextParam' because that is only used in precision, not /// width. fn count(&mut self, start: usize) -> (Count, Option) { diff --git a/src/libpanic_abort/lib.rs b/src/libpanic_abort/lib.rs index fd144d6b67..5509f47bc8 100644 --- a/src/libpanic_abort/lib.rs +++ b/src/libpanic_abort/lib.rs @@ -54,7 +54,8 @@ pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 { core::intrinsics::abort(); } - #[cfg(all(target_vendor="fortanix", target_env="sgx"))] + #[cfg(any(target_os = "hermit", + all(target_vendor="fortanix", target_env="sgx")))] unsafe fn abort() -> ! { // call std::sys::abort_internal extern "C" { pub fn __rust_abort() -> !; } diff --git a/src/libpanic_unwind/dwarf/eh.rs b/src/libpanic_unwind/dwarf/eh.rs index 07fa297184..1e9e7e4b83 100644 --- a/src/libpanic_unwind/dwarf/eh.rs +++ b/src/libpanic_unwind/dwarf/eh.rs @@ -51,7 +51,7 @@ pub enum EHAction { pub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = "ios", target_arch = "arm")); -pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) +pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>, foreign_exception: bool) -> Result { if lsda.is_null() { @@ -96,7 +96,7 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) return Ok(EHAction::None) } else { let lpad = lpad_base + cs_lpad; - return Ok(interpret_cs_action(cs_action, lpad)) + return Ok(interpret_cs_action(cs_action, lpad, foreign_exception)) } } } @@ -121,16 +121,23 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) // Can never have null landing pad for sjlj -- that would have // been indicated by a -1 call site index. let lpad = (cs_lpad + 1) as usize; - return Ok(interpret_cs_action(cs_action, lpad)) + return Ok(interpret_cs_action(cs_action, lpad, foreign_exception)) } } } } -fn interpret_cs_action(cs_action: u64, lpad: usize) -> EHAction { +fn interpret_cs_action(cs_action: u64, lpad: usize, foreign_exception: bool) -> EHAction { if cs_action == 0 { + // If cs_action is 0 then this is a cleanup (Drop::drop). We run these + // for both Rust panics and foriegn exceptions. EHAction::Cleanup(lpad) + } else if foreign_exception { + // catch_unwind should not catch foreign exceptions, only Rust panics. + // Instead just continue unwinding. + EHAction::None } else { + // Stop unwinding Rust panics at catch_unwind. EHAction::Catch(lpad) } } diff --git a/src/libpanic_unwind/emcc.rs b/src/libpanic_unwind/emcc.rs index 18e9006468..3d0d5a4151 100644 --- a/src/libpanic_unwind/emcc.rs +++ b/src/libpanic_unwind/emcc.rs @@ -15,14 +15,48 @@ use alloc::boxed::Box; use libc::{self, c_int}; use unwind as uw; +// This matches the layout of std::type_info in C++ +#[repr(C)] +struct TypeInfo { + vtable: *const usize, + name: *const u8, +} +unsafe impl Sync for TypeInfo {} + +extern "C" { + // The leading `\x01` byte here is actually a magical signal to LLVM to + // *not* apply any other mangling like prefixing with a `_` character. + // + // This symbol is the vtable used by C++'s `std::type_info`. Objects of type + // `std::type_info`, type descriptors, have a pointer to this table. Type + // descriptors are referenced by the C++ EH structures defined above and + // that we construct below. + // + // Note that the real size is larger than 3 usize, but we only need our + // vtable to point to the third element. + #[link_name = "\x01_ZTVN10__cxxabiv117__class_type_infoE"] + static CLASS_TYPE_INFO_VTABLE: [usize; 3]; +} + +// std::type_info for a rust_panic class +#[lang = "eh_catch_typeinfo"] +static EXCEPTION_TYPE_INFO: TypeInfo = TypeInfo { + // Normally we would use .as_ptr().add(2) but this doesn't work in a const context. + vtable: unsafe { &CLASS_TYPE_INFO_VTABLE[2] }, + // This intentionally doesn't use the normal name mangling scheme because + // we don't want C++ to be able to produce or catch Rust panics. + name: b"rust_panic\0".as_ptr(), +}; + pub fn payload() -> *mut u8 { ptr::null_mut() } pub unsafe fn cleanup(ptr: *mut u8) -> Box { assert!(!ptr.is_null()); - let ex = ptr::read(ptr as *mut _); - __cxa_free_exception(ptr as *mut _); + let adjusted_ptr = __cxa_begin_catch(ptr as *mut libc::c_void); + let ex = ptr::read(adjusted_ptr as *mut _); + __cxa_end_catch(); ex } @@ -32,11 +66,8 @@ pub unsafe fn panic(data: Box) -> u32 { if exception == ptr::null_mut() { return uw::_URC_FATAL_PHASE1_ERROR as u32; } - let exception = exception as *mut Box; - ptr::write(exception, data); - __cxa_throw(exception as *mut _, ptr::null_mut(), ptr::null_mut()); - - unreachable!() + ptr::write(exception as *mut _, data); + __cxa_throw(exception as *mut _, &EXCEPTION_TYPE_INFO, ptr::null_mut()); } #[lang = "eh_personality"] @@ -52,10 +83,11 @@ unsafe extern "C" fn rust_eh_personality(version: c_int, extern "C" { fn __cxa_allocate_exception(thrown_size: libc::size_t) -> *mut libc::c_void; - fn __cxa_free_exception(thrown_exception: *mut libc::c_void); + fn __cxa_begin_catch(thrown_exception: *mut libc::c_void) -> *mut libc::c_void; + fn __cxa_end_catch(); fn __cxa_throw(thrown_exception: *mut libc::c_void, - tinfo: *mut libc::c_void, - dest: *mut libc::c_void); + tinfo: *const TypeInfo, + dest: *mut libc::c_void) -> !; fn __gxx_personality_v0(version: c_int, actions: uw::_Unwind_Action, exception_class: uw::_Unwind_Exception_Class, diff --git a/src/libpanic_unwind/gcc.rs b/src/libpanic_unwind/gcc.rs index 236ed15050..4f572fe21b 100644 --- a/src/libpanic_unwind/gcc.rs +++ b/src/libpanic_unwind/gcc.rs @@ -133,133 +133,176 @@ const UNWIND_DATA_REG: (i32, i32) = (0, 1); // R0, R1 // https://github.com/gcc-mirror/gcc/blob/master/libstdc++-v3/libsupc++/eh_personality.cc // https://github.com/gcc-mirror/gcc/blob/trunk/libgcc/unwind-c.c -// The personality routine for most of our targets, except ARM, which has a slightly different ABI -// (however, iOS goes here as it uses SjLj unwinding). Also, the 64-bit Windows implementation -// lives in seh64_gnu.rs -#[cfg(all(any(target_os = "ios", target_os = "netbsd", not(target_arch = "arm"))))] -#[lang = "eh_personality"] -#[no_mangle] -#[allow(unused)] -unsafe extern "C" fn rust_eh_personality(version: c_int, - actions: uw::_Unwind_Action, - exception_class: uw::_Unwind_Exception_Class, - exception_object: *mut uw::_Unwind_Exception, - context: *mut uw::_Unwind_Context) - -> uw::_Unwind_Reason_Code { - if version != 1 { - return uw::_URC_FATAL_PHASE1_ERROR; - } - let eh_action = match find_eh_action(context) { - Ok(action) => action, - Err(_) => return uw::_URC_FATAL_PHASE1_ERROR, - }; - if actions as i32 & uw::_UA_SEARCH_PHASE as i32 != 0 { - match eh_action { - EHAction::None | - EHAction::Cleanup(_) => return uw::_URC_CONTINUE_UNWIND, - EHAction::Catch(_) => return uw::_URC_HANDLER_FOUND, - EHAction::Terminate => return uw::_URC_FATAL_PHASE1_ERROR, +cfg_if::cfg_if! { + if #[cfg(all(target_arch = "arm", not(target_os = "ios"), not(target_os = "netbsd")))] { + // ARM EHABI personality routine. + // http://infocenter.arm.com/help/topic/com.arm.doc.ihi0038b/IHI0038B_ehabi.pdf + // + // iOS uses the default routine instead since it uses SjLj unwinding. + #[lang = "eh_personality"] + #[no_mangle] + unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, + exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code { + let state = state as c_int; + let action = state & uw::_US_ACTION_MASK as c_int; + let search_phase = if action == uw::_US_VIRTUAL_UNWIND_FRAME as c_int { + // Backtraces on ARM will call the personality routine with + // state == _US_VIRTUAL_UNWIND_FRAME | _US_FORCE_UNWIND. In those cases + // we want to continue unwinding the stack, otherwise all our backtraces + // would end at __rust_try + if state & uw::_US_FORCE_UNWIND as c_int != 0 { + return continue_unwind(exception_object, context); + } + true + } else if action == uw::_US_UNWIND_FRAME_STARTING as c_int { + false + } else if action == uw::_US_UNWIND_FRAME_RESUME as c_int { + return continue_unwind(exception_object, context); + } else { + return uw::_URC_FAILURE; + }; + + // The DWARF unwinder assumes that _Unwind_Context holds things like the function + // and LSDA pointers, however ARM EHABI places them into the exception object. + // To preserve signatures of functions like _Unwind_GetLanguageSpecificData(), which + // take only the context pointer, GCC personality routines stash a pointer to + // exception_object in the context, using location reserved for ARM's + // "scratch register" (r12). + uw::_Unwind_SetGR(context, + uw::UNWIND_POINTER_REG, + exception_object as uw::_Unwind_Ptr); + // ...A more principled approach would be to provide the full definition of ARM's + // _Unwind_Context in our libunwind bindings and fetch the required data from there + // directly, bypassing DWARF compatibility functions. + + let exception_class = (*exception_object).exception_class; + let foreign_exception = exception_class != rust_exception_class(); + let eh_action = match find_eh_action(context, foreign_exception) { + Ok(action) => action, + Err(_) => return uw::_URC_FAILURE, + }; + if search_phase { + match eh_action { + EHAction::None | + EHAction::Cleanup(_) => return continue_unwind(exception_object, context), + EHAction::Catch(_) => return uw::_URC_HANDLER_FOUND, + EHAction::Terminate => return uw::_URC_FAILURE, + } + } else { + match eh_action { + EHAction::None => return continue_unwind(exception_object, context), + EHAction::Cleanup(lpad) | + EHAction::Catch(lpad) => { + uw::_Unwind_SetGR(context, UNWIND_DATA_REG.0, + exception_object as uintptr_t); + uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); + uw::_Unwind_SetIP(context, lpad); + return uw::_URC_INSTALL_CONTEXT; + } + EHAction::Terminate => return uw::_URC_FAILURE, + } + } + + // On ARM EHABI the personality routine is responsible for actually + // unwinding a single stack frame before returning (ARM EHABI Sec. 6.1). + unsafe fn continue_unwind(exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code { + if __gnu_unwind_frame(exception_object, context) == uw::_URC_NO_REASON { + uw::_URC_CONTINUE_UNWIND + } else { + uw::_URC_FAILURE + } + } + // defined in libgcc + extern "C" { + fn __gnu_unwind_frame(exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code; + } } } else { - match eh_action { - EHAction::None => return uw::_URC_CONTINUE_UNWIND, - EHAction::Cleanup(lpad) | - EHAction::Catch(lpad) => { - uw::_Unwind_SetGR(context, UNWIND_DATA_REG.0, exception_object as uintptr_t); - uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); - uw::_Unwind_SetIP(context, lpad); - return uw::_URC_INSTALL_CONTEXT; + // Default personality routine, which is used directly on most targets + // and indirectly on Windows x86_64 via SEH. + unsafe extern "C" fn rust_eh_personality_impl(version: c_int, + actions: uw::_Unwind_Action, + exception_class: uw::_Unwind_Exception_Class, + exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code { + if version != 1 { + return uw::_URC_FATAL_PHASE1_ERROR; + } + let foreign_exception = exception_class != rust_exception_class(); + let eh_action = match find_eh_action(context, foreign_exception) { + Ok(action) => action, + Err(_) => return uw::_URC_FATAL_PHASE1_ERROR, + }; + if actions as i32 & uw::_UA_SEARCH_PHASE as i32 != 0 { + match eh_action { + EHAction::None | + EHAction::Cleanup(_) => uw::_URC_CONTINUE_UNWIND, + EHAction::Catch(_) => uw::_URC_HANDLER_FOUND, + EHAction::Terminate => uw::_URC_FATAL_PHASE1_ERROR, + } + } else { + match eh_action { + EHAction::None => uw::_URC_CONTINUE_UNWIND, + EHAction::Cleanup(lpad) | + EHAction::Catch(lpad) => { + uw::_Unwind_SetGR(context, UNWIND_DATA_REG.0, + exception_object as uintptr_t); + uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); + uw::_Unwind_SetIP(context, lpad); + uw::_URC_INSTALL_CONTEXT + } + EHAction::Terminate => uw::_URC_FATAL_PHASE2_ERROR, + } + } + } + + cfg_if::cfg_if! { + if #[cfg(all(windows, target_arch = "x86_64", target_env = "gnu"))] { + // On x86_64 MinGW targets, the unwinding mechanism is SEH however the unwind + // handler data (aka LSDA) uses GCC-compatible encoding. + #[lang = "eh_personality"] + #[no_mangle] + #[allow(nonstandard_style)] + unsafe extern "C" fn rust_eh_personality(exceptionRecord: *mut uw::EXCEPTION_RECORD, + establisherFrame: uw::LPVOID, + contextRecord: *mut uw::CONTEXT, + dispatcherContext: *mut uw::DISPATCHER_CONTEXT) + -> uw::EXCEPTION_DISPOSITION { + uw::_GCC_specific_handler(exceptionRecord, + establisherFrame, + contextRecord, + dispatcherContext, + rust_eh_personality_impl) + } + } else { + // The personality routine for most of our targets. + #[lang = "eh_personality"] + #[no_mangle] + unsafe extern "C" fn rust_eh_personality(version: c_int, + actions: uw::_Unwind_Action, + exception_class: uw::_Unwind_Exception_Class, + exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code { + rust_eh_personality_impl(version, + actions, + exception_class, + exception_object, + context) + } } - EHAction::Terminate => return uw::_URC_FATAL_PHASE2_ERROR, } } } -// ARM EHABI personality routine. -// http://infocenter.arm.com/help/topic/com.arm.doc.ihi0038b/IHI0038B_ehabi.pdf -#[cfg(all(target_arch = "arm", not(target_os = "ios"), not(target_os = "netbsd")))] -#[lang = "eh_personality"] -#[no_mangle] -unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, - exception_object: *mut uw::_Unwind_Exception, - context: *mut uw::_Unwind_Context) - -> uw::_Unwind_Reason_Code { - let state = state as c_int; - let action = state & uw::_US_ACTION_MASK as c_int; - let search_phase = if action == uw::_US_VIRTUAL_UNWIND_FRAME as c_int { - // Backtraces on ARM will call the personality routine with - // state == _US_VIRTUAL_UNWIND_FRAME | _US_FORCE_UNWIND. In those cases - // we want to continue unwinding the stack, otherwise all our backtraces - // would end at __rust_try - if state & uw::_US_FORCE_UNWIND as c_int != 0 { - return continue_unwind(exception_object, context); - } - true - } else if action == uw::_US_UNWIND_FRAME_STARTING as c_int { - false - } else if action == uw::_US_UNWIND_FRAME_RESUME as c_int { - return continue_unwind(exception_object, context); - } else { - return uw::_URC_FAILURE; - }; - - // The DWARF unwinder assumes that _Unwind_Context holds things like the function - // and LSDA pointers, however ARM EHABI places them into the exception object. - // To preserve signatures of functions like _Unwind_GetLanguageSpecificData(), which - // take only the context pointer, GCC personality routines stash a pointer to exception_object - // in the context, using location reserved for ARM's "scratch register" (r12). - uw::_Unwind_SetGR(context, - uw::UNWIND_POINTER_REG, - exception_object as uw::_Unwind_Ptr); - // ...A more principled approach would be to provide the full definition of ARM's - // _Unwind_Context in our libunwind bindings and fetch the required data from there directly, - // bypassing DWARF compatibility functions. - - let eh_action = match find_eh_action(context) { - Ok(action) => action, - Err(_) => return uw::_URC_FAILURE, - }; - if search_phase { - match eh_action { - EHAction::None | - EHAction::Cleanup(_) => return continue_unwind(exception_object, context), - EHAction::Catch(_) => return uw::_URC_HANDLER_FOUND, - EHAction::Terminate => return uw::_URC_FAILURE, - } - } else { - match eh_action { - EHAction::None => return continue_unwind(exception_object, context), - EHAction::Cleanup(lpad) | - EHAction::Catch(lpad) => { - uw::_Unwind_SetGR(context, UNWIND_DATA_REG.0, exception_object as uintptr_t); - uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); - uw::_Unwind_SetIP(context, lpad); - return uw::_URC_INSTALL_CONTEXT; - } - EHAction::Terminate => return uw::_URC_FAILURE, - } - } - - // On ARM EHABI the personality routine is responsible for actually - // unwinding a single stack frame before returning (ARM EHABI Sec. 6.1). - unsafe fn continue_unwind(exception_object: *mut uw::_Unwind_Exception, - context: *mut uw::_Unwind_Context) - -> uw::_Unwind_Reason_Code { - if __gnu_unwind_frame(exception_object, context) == uw::_URC_NO_REASON { - uw::_URC_CONTINUE_UNWIND - } else { - uw::_URC_FAILURE - } - } - // defined in libgcc - extern "C" { - fn __gnu_unwind_frame(exception_object: *mut uw::_Unwind_Exception, - context: *mut uw::_Unwind_Context) - -> uw::_Unwind_Reason_Code; - } -} - -unsafe fn find_eh_action(context: *mut uw::_Unwind_Context) +unsafe fn find_eh_action(context: *mut uw::_Unwind_Context, foreign_exception: bool) -> Result { let lsda = uw::_Unwind_GetLanguageSpecificData(context) as *const u8; @@ -273,11 +316,11 @@ unsafe fn find_eh_action(context: *mut uw::_Unwind_Context) get_text_start: &|| uw::_Unwind_GetTextRelBase(context), get_data_start: &|| uw::_Unwind_GetDataRelBase(context), }; - eh::find_eh_action(lsda, &eh_context) + eh::find_eh_action(lsda, &eh_context, foreign_exception) } // See docs in the `unwind` module. -#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))] +#[cfg(all(target_os="windows", any(target_arch = "x86", target_arch = "x86_64"), target_env="gnu"))] #[lang = "eh_unwind_resume"] #[unwind(allowed)] unsafe extern "C" fn rust_eh_unwind_resume(panic_ctx: *mut u8) -> ! { diff --git a/src/libpanic_unwind/hermit.rs b/src/libpanic_unwind/hermit.rs new file mode 100644 index 0000000000..8bee6ff09e --- /dev/null +++ b/src/libpanic_unwind/hermit.rs @@ -0,0 +1,21 @@ +//! Unwinding for *hermit* target. +//! +//! Right now we don't support this, so this is just stubs. + +use alloc::boxed::Box; +use core::ptr; +use core::any::Any; + +pub fn payload() -> *mut u8 { + ptr::null_mut() +} + +pub unsafe fn cleanup(_ptr: *mut u8) -> Box { + extern "C" { pub fn __rust_abort() -> !; } + __rust_abort(); +} + +pub unsafe fn panic(_data: Box) -> u32 { + extern "C" { pub fn __rust_abort() -> !; } + __rust_abort(); +} diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs index 06e6e768f4..d2a0ef7b1d 100644 --- a/src/libpanic_unwind/lib.rs +++ b/src/libpanic_unwind/lib.rs @@ -5,9 +5,8 @@ //! essentially gets categorized into three buckets currently: //! //! 1. MSVC targets use SEH in the `seh.rs` file. -//! 2. The 64-bit MinGW target half-uses SEH and half-use gcc-like information -//! in the `seh64_gnu.rs` module. -//! 3. All other targets use libunwind/libgcc in the `gcc/mod.rs` module. +//! 2. Emscripten uses C++ exceptions in the `emcc.rs` file. +//! 3. All other targets use libunwind/libgcc in the `gcc.rs` file. //! //! More documentation about each implementation can be found in the respective //! module. @@ -43,15 +42,15 @@ cfg_if::cfg_if! { } else if #[cfg(target_arch = "wasm32")] { #[path = "dummy.rs"] mod imp; + } else if #[cfg(target_os = "hermit")] { + #[path = "hermit.rs"] + mod imp; } else if #[cfg(all(target_env = "msvc", target_arch = "aarch64"))] { #[path = "dummy.rs"] mod imp; } else if #[cfg(target_env = "msvc")] { #[path = "seh.rs"] mod imp; - } else if #[cfg(all(windows, target_arch = "x86_64", target_env = "gnu"))] { - #[path = "seh64_gnu.rs"] - mod imp; } else { // Rust runtime's startup objects depend on these symbols, so make them public. #[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))] @@ -62,7 +61,6 @@ cfg_if::cfg_if! { } mod dwarf; -mod windows; // Entry point for catching an exception, implemented using the `try` intrinsic // in the compiler. diff --git a/src/libpanic_unwind/seh.rs b/src/libpanic_unwind/seh.rs index 809e461981..621813a2fe 100644 --- a/src/libpanic_unwind/seh.rs +++ b/src/libpanic_unwind/seh.rs @@ -51,9 +51,7 @@ use alloc::boxed::Box; use core::any::Any; use core::mem; use core::raw; - -use crate::windows as c; -use libc::{c_int, c_uint}; +use libc::{c_int, c_uint, c_void}; // First up, a whole bunch of type definitions. There's a few platform-specific // oddities here, and a lot that's just blatantly copied from LLVM. The purpose @@ -76,18 +74,19 @@ use libc::{c_int, c_uint}; // sort of operation. For example, if you compile this C++ code on MSVC and emit // the LLVM IR: // -// #include +// #include +// +// struct rust_panic { +// uint64_t x[2]; +// } // // void foo() { -// uint64_t a[2] = {0, 1}; +// rust_panic a = {0, 1}; // throw a; // } // // That's essentially what we're trying to emulate. Most of the constant values -// below were just copied from LLVM, I'm at least not 100% sure what's going on -// everywhere. For example the `.PA_K\0` and `.PEA_K\0` strings below (stuck in -// the names of a few of these) I'm not actually sure what they do, but it seems -// to mirror what LLVM does! +// below were just copied from LLVM, // // In any case, these structures are all constructed in a similar manner, and // it's just somewhat verbose for us. @@ -98,10 +97,9 @@ use libc::{c_int, c_uint}; #[macro_use] mod imp { pub type ptr_t = *mut u8; - pub const OFFSET: i32 = 4; + #[cfg(bootstrap)] pub const NAME1: [u8; 7] = [b'.', b'P', b'A', b'_', b'K', 0, 0]; - pub const NAME2: [u8; 7] = [b'.', b'P', b'A', b'X', 0, 0, 0]; macro_rules! ptr { (0) => (core::ptr::null_mut()); @@ -113,10 +111,9 @@ mod imp { #[macro_use] mod imp { pub type ptr_t = u32; - pub const OFFSET: i32 = 8; + #[cfg(bootstrap)] pub const NAME1: [u8; 7] = [b'.', b'P', b'E', b'A', b'_', b'K', 0]; - pub const NAME2: [u8; 7] = [b'.', b'P', b'E', b'A', b'X', 0, 0]; extern "C" { pub static __ImageBase: u8; @@ -141,7 +138,7 @@ pub struct _ThrowInfo { #[repr(C)] pub struct _CatchableTypeArray { pub nCatchableTypes: c_int, - pub arrayOfCatchableTypes: [imp::ptr_t; 2], + pub arrayOfCatchableTypes: [imp::ptr_t; 1], } #[repr(C)] @@ -164,9 +161,19 @@ pub struct _PMD { pub struct _TypeDescriptor { pub pVFTable: *const u8, pub spare: *mut u8, + #[cfg(bootstrap)] pub name: [u8; 7], + #[cfg(not(bootstrap))] + pub name: [u8; 11], } +// Note that we intentionally ignore name mangling rules here: we don't want C++ +// to be able to catch Rust panics by simply declaring a `struct rust_panic`. +#[cfg(bootstrap)] +use imp::NAME1 as TYPE_NAME; +#[cfg(not(bootstrap))] +const TYPE_NAME: [u8; 11] = *b"rust_panic\0"; + static mut THROW_INFO: _ThrowInfo = _ThrowInfo { attributes: 0, pnfnUnwind: ptr!(0), @@ -175,31 +182,22 @@ static mut THROW_INFO: _ThrowInfo = _ThrowInfo { }; static mut CATCHABLE_TYPE_ARRAY: _CatchableTypeArray = _CatchableTypeArray { - nCatchableTypes: 2, - arrayOfCatchableTypes: [ptr!(0), ptr!(0)], + nCatchableTypes: 1, + arrayOfCatchableTypes: [ptr!(0)], }; -static mut CATCHABLE_TYPE1: _CatchableType = _CatchableType { - properties: 1, +static mut CATCHABLE_TYPE: _CatchableType = _CatchableType { + properties: 0, pType: ptr!(0), thisDisplacement: _PMD { mdisp: 0, pdisp: -1, vdisp: 0, }, - sizeOrOffset: imp::OFFSET, - copy_function: ptr!(0), -}; - -static mut CATCHABLE_TYPE2: _CatchableType = _CatchableType { - properties: 1, - pType: ptr!(0), - thisDisplacement: _PMD { - mdisp: 0, - pdisp: -1, - vdisp: 0, - }, - sizeOrOffset: imp::OFFSET, + #[cfg(bootstrap)] + sizeOrOffset: mem::size_of::<*mut u64>() as c_int, + #[cfg(not(bootstrap))] + sizeOrOffset: mem::size_of::<[u64; 2]>() as c_int, copy_function: ptr!(0), }; @@ -215,22 +213,17 @@ extern "C" { static TYPE_INFO_VTABLE: *const u8; } -// We use #[lang = "msvc_try_filter"] here as this is the type descriptor which +// We use #[lang = "eh_catch_typeinfo"] here as this is the type descriptor which // we'll use in LLVM's `catchpad` instruction which ends up also being passed as // an argument to the C++ personality function. // // Again, I'm not entirely sure what this is describing, it just seems to work. -#[cfg_attr(not(test), lang = "msvc_try_filter")] -static mut TYPE_DESCRIPTOR1: _TypeDescriptor = _TypeDescriptor { +#[cfg_attr(bootstrap, lang = "msvc_try_filter")] +#[cfg_attr(not(any(test, bootstrap)), lang = "eh_catch_typeinfo")] +static mut TYPE_DESCRIPTOR: _TypeDescriptor = _TypeDescriptor { pVFTable: unsafe { &TYPE_INFO_VTABLE } as *const _ as *const _, spare: core::ptr::null_mut(), - name: imp::NAME1, -}; - -static mut TYPE_DESCRIPTOR2: _TypeDescriptor = _TypeDescriptor { - pVFTable: unsafe { &TYPE_INFO_VTABLE } as *const _ as *const _, - spare: core::ptr::null_mut(), - name: imp::NAME2, + name: TYPE_NAME, }; pub unsafe fn panic(data: Box) -> u32 { @@ -246,6 +239,11 @@ pub unsafe fn panic(data: Box) -> u32 { let ptrs = mem::transmute::<_, raw::TraitObject>(data); let mut ptrs = [ptrs.data as u64, ptrs.vtable as u64]; let mut ptrs_ptr = ptrs.as_mut_ptr(); + let throw_ptr = if cfg!(bootstrap) { + &mut ptrs_ptr as *mut _ as *mut _ + } else { + ptrs_ptr as *mut _ + }; // This... may seems surprising, and justifiably so. On 32-bit MSVC the // pointers between these structure are just that, pointers. On 64-bit MSVC, @@ -270,17 +268,17 @@ pub unsafe fn panic(data: Box) -> u32 { atomic_store(&mut THROW_INFO.pCatchableTypeArray as *mut _ as *mut u32, ptr!(&CATCHABLE_TYPE_ARRAY as *const _) as u32); atomic_store(&mut CATCHABLE_TYPE_ARRAY.arrayOfCatchableTypes[0] as *mut _ as *mut u32, - ptr!(&CATCHABLE_TYPE1 as *const _) as u32); - atomic_store(&mut CATCHABLE_TYPE_ARRAY.arrayOfCatchableTypes[1] as *mut _ as *mut u32, - ptr!(&CATCHABLE_TYPE2 as *const _) as u32); - atomic_store(&mut CATCHABLE_TYPE1.pType as *mut _ as *mut u32, - ptr!(&TYPE_DESCRIPTOR1 as *const _) as u32); - atomic_store(&mut CATCHABLE_TYPE2.pType as *mut _ as *mut u32, - ptr!(&TYPE_DESCRIPTOR2 as *const _) as u32); + ptr!(&CATCHABLE_TYPE as *const _) as u32); + atomic_store(&mut CATCHABLE_TYPE.pType as *mut _ as *mut u32, + ptr!(&TYPE_DESCRIPTOR as *const _) as u32); - c::_CxxThrowException(&mut ptrs_ptr as *mut _ as *mut _, - &mut THROW_INFO as *mut _ as *mut _); - u32::max_value() + extern "system" { + #[unwind(allowed)] + pub fn _CxxThrowException(pExceptionObject: *mut c_void, pThrowInfo: *mut u8) -> !; + } + + _CxxThrowException(throw_ptr, + &mut THROW_INFO as *mut _ as *mut _); } pub fn payload() -> [u64; 2] { diff --git a/src/libpanic_unwind/seh64_gnu.rs b/src/libpanic_unwind/seh64_gnu.rs deleted file mode 100644 index 457ffcd34f..0000000000 --- a/src/libpanic_unwind/seh64_gnu.rs +++ /dev/null @@ -1,127 +0,0 @@ -//! Unwinding implementation of top of native Win64 SEH, -//! however the unwind handler data (aka LSDA) uses GCC-compatible encoding. - -#![allow(nonstandard_style)] -#![allow(private_no_mangle_fns)] - -use alloc::boxed::Box; - -use core::any::Any; -use core::intrinsics; -use core::ptr; -use crate::dwarf::eh::{EHContext, EHAction, find_eh_action}; -use crate::windows as c; - -// Define our exception codes: -// according to http://msdn.microsoft.com/en-us/library/het71c37(v=VS.80).aspx, -// [31:30] = 3 (error), 2 (warning), 1 (info), 0 (success) -// [29] = 1 (user-defined) -// [28] = 0 (reserved) -// we define bits: -// [24:27] = type -// [0:23] = magic -const ETYPE: c::DWORD = 0b1110_u32 << 28; -const MAGIC: c::DWORD = 0x525354; // "RST" - -const RUST_PANIC: c::DWORD = ETYPE | (1 << 24) | MAGIC; - -#[repr(C)] -struct PanicData { - data: Box, -} - -pub unsafe fn panic(data: Box) -> u32 { - let panic_ctx = Box::new(PanicData { data }); - let params = [Box::into_raw(panic_ctx) as c::ULONG_PTR]; - c::RaiseException(RUST_PANIC, - c::EXCEPTION_NONCONTINUABLE, - params.len() as c::DWORD, - ¶ms as *const c::ULONG_PTR); - u32::max_value() -} - -pub fn payload() -> *mut u8 { - ptr::null_mut() -} - -pub unsafe fn cleanup(ptr: *mut u8) -> Box { - let panic_ctx = Box::from_raw(ptr as *mut PanicData); - return panic_ctx.data; -} - -// SEH doesn't support resuming unwinds after calling a landing pad like -// libunwind does. For this reason, MSVC compiler outlines landing pads into -// separate functions that can be called directly from the personality function -// but are nevertheless able to find and modify stack frame of the "parent" -// function. -// -// Since this cannot be done with libdwarf-style landing pads, -// rust_eh_personality instead catches RUST_PANICs, runs the landing pad, then -// reraises the exception. -// -// Note that it makes certain assumptions about the exception: -// -// 1. That RUST_PANIC is non-continuable, so no lower stack frame may choose to -// resume execution. -// 2. That the first parameter of the exception is a pointer to an extra data -// area (PanicData). -// Since these assumptions do not generally hold true for foreign exceptions -// (system faults, C++ exceptions, etc), we make no attempt to invoke our -// landing pads (and, thus, destructors!) for anything other than RUST_PANICs. -// This is considered acceptable, because the behavior of throwing exceptions -// through a C ABI boundary is undefined. - -#[lang = "eh_personality"] -#[cfg(not(test))] -unsafe extern "C" fn rust_eh_personality(exceptionRecord: *mut c::EXCEPTION_RECORD, - establisherFrame: c::LPVOID, - contextRecord: *mut c::CONTEXT, - dispatcherContext: *mut c::DISPATCHER_CONTEXT) - -> c::EXCEPTION_DISPOSITION { - let er = &*exceptionRecord; - let dc = &*dispatcherContext; - - if er.ExceptionFlags & c::EXCEPTION_UNWIND == 0 { - // we are in the dispatch phase - if er.ExceptionCode == RUST_PANIC { - if let Some(lpad) = find_landing_pad(dc) { - c::RtlUnwindEx(establisherFrame, - lpad as c::LPVOID, - exceptionRecord, - er.ExceptionInformation[0] as c::LPVOID, // pointer to PanicData - contextRecord, - dc.HistoryTable); - } - } - } - c::ExceptionContinueSearch -} - -#[lang = "eh_unwind_resume"] -#[unwind(allowed)] -unsafe extern "C" fn rust_eh_unwind_resume(panic_ctx: c::LPVOID) -> ! { - let params = [panic_ctx as c::ULONG_PTR]; - c::RaiseException(RUST_PANIC, - c::EXCEPTION_NONCONTINUABLE, - params.len() as c::DWORD, - ¶ms as *const c::ULONG_PTR); - intrinsics::abort(); -} - -unsafe fn find_landing_pad(dc: &c::DISPATCHER_CONTEXT) -> Option { - let eh_ctx = EHContext { - // The return address points 1 byte past the call instruction, - // which could be in the next IP range in LSDA range table. - ip: dc.ControlPc as usize - 1, - func_start: dc.ImageBase as usize + (*dc.FunctionEntry).BeginAddress as usize, - get_text_start: &|| dc.ImageBase as usize, - get_data_start: &|| unimplemented!(), - }; - match find_eh_action(dc.HandlerData, &eh_ctx) { - Err(_) | - Ok(EHAction::None) => None, - Ok(EHAction::Cleanup(lpad)) | - Ok(EHAction::Catch(lpad)) => Some(lpad), - Ok(EHAction::Terminate) => intrinsics::abort(), - } -} diff --git a/src/libpanic_unwind/windows.rs b/src/libpanic_unwind/windows.rs deleted file mode 100644 index 3257a9d25a..0000000000 --- a/src/libpanic_unwind/windows.rs +++ /dev/null @@ -1,86 +0,0 @@ -#![allow(nonstandard_style)] -#![allow(dead_code)] -#![cfg(windows)] - -use libc::{c_long, c_ulong, c_void}; - -pub type DWORD = c_ulong; -pub type LONG = c_long; -pub type ULONG_PTR = usize; -pub type LPVOID = *mut c_void; - -pub const EXCEPTION_MAXIMUM_PARAMETERS: usize = 15; -pub const EXCEPTION_NONCONTINUABLE: DWORD = 0x1; // Noncontinuable exception -pub const EXCEPTION_UNWINDING: DWORD = 0x2; // Unwind is in progress -pub const EXCEPTION_EXIT_UNWIND: DWORD = 0x4; // Exit unwind is in progress -pub const EXCEPTION_TARGET_UNWIND: DWORD = 0x20; // Target unwind in progress -pub const EXCEPTION_COLLIDED_UNWIND: DWORD = 0x40; // Collided exception handler call -pub const EXCEPTION_UNWIND: DWORD = EXCEPTION_UNWINDING | EXCEPTION_EXIT_UNWIND | - EXCEPTION_TARGET_UNWIND | - EXCEPTION_COLLIDED_UNWIND; - -#[repr(C)] -pub struct EXCEPTION_RECORD { - pub ExceptionCode: DWORD, - pub ExceptionFlags: DWORD, - pub ExceptionRecord: *mut EXCEPTION_RECORD, - pub ExceptionAddress: LPVOID, - pub NumberParameters: DWORD, - pub ExceptionInformation: [LPVOID; EXCEPTION_MAXIMUM_PARAMETERS], -} - -#[repr(C)] -pub struct EXCEPTION_POINTERS { - pub ExceptionRecord: *mut EXCEPTION_RECORD, - pub ContextRecord: *mut CONTEXT, -} - -pub enum UNWIND_HISTORY_TABLE {} - -#[repr(C)] -pub struct RUNTIME_FUNCTION { - pub BeginAddress: DWORD, - pub EndAddress: DWORD, - pub UnwindData: DWORD, -} - -pub enum CONTEXT {} - -#[repr(C)] -pub struct DISPATCHER_CONTEXT { - pub ControlPc: LPVOID, - pub ImageBase: LPVOID, - pub FunctionEntry: *const RUNTIME_FUNCTION, - pub EstablisherFrame: LPVOID, - pub TargetIp: LPVOID, - pub ContextRecord: *const CONTEXT, - pub LanguageHandler: LPVOID, - pub HandlerData: *const u8, - pub HistoryTable: *const UNWIND_HISTORY_TABLE, -} - -#[repr(C)] -pub enum EXCEPTION_DISPOSITION { - ExceptionContinueExecution, - ExceptionContinueSearch, - ExceptionNestedException, - ExceptionCollidedUnwind, -} -pub use self::EXCEPTION_DISPOSITION::*; - -extern "system" { - #[unwind(allowed)] - pub fn RaiseException(dwExceptionCode: DWORD, - dwExceptionFlags: DWORD, - nNumberOfArguments: DWORD, - lpArguments: *const ULONG_PTR); - #[unwind(allowed)] - pub fn RtlUnwindEx(TargetFrame: LPVOID, - TargetIp: LPVOID, - ExceptionRecord: *const EXCEPTION_RECORD, - ReturnValue: LPVOID, - OriginalContext: *const CONTEXT, - HistoryTable: *const UNWIND_HISTORY_TABLE); - #[unwind(allowed)] - pub fn _CxxThrowException(pExceptionObject: *mut c_void, pThrowInfo: *mut u8); -} diff --git a/src/libproc_macro/bridge/client.rs b/src/libproc_macro/bridge/client.rs index 5c543165bc..9643dba997 100644 --- a/src/libproc_macro/bridge/client.rs +++ b/src/libproc_macro/bridge/client.rs @@ -15,8 +15,9 @@ macro_rules! define_handles { } impl HandleCounters { - // FIXME(#53451) public to work around `Cannot create local mono-item` ICE. - pub extern "C" fn get() -> &'static Self { + // FIXME(eddyb) use a reference to the `static COUNTERS`, intead of + // a wrapper `fn` pointer, once `const fn` can reference `static`s. + extern "C" fn get() -> &'static Self { static COUNTERS: HandleCounters = HandleCounters { $($oty: AtomicUsize::new(1),)* $($ity: AtomicUsize::new(1),)* @@ -333,16 +334,19 @@ impl Bridge<'_> { #[repr(C)] #[derive(Copy, Clone)] pub struct Client { + // FIXME(eddyb) use a reference to the `static COUNTERS`, intead of + // a wrapper `fn` pointer, once `const fn` can reference `static`s. pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters, pub(super) run: extern "C" fn(Bridge<'_>, F) -> Buffer, pub(super) f: F, } -// FIXME(#53451) public to work around `Cannot create local mono-item` ICE, -// affecting not only the function itself, but also the `BridgeState` `thread_local!`. -pub extern "C" fn __run_expand1( +/// Client-side helper for handling client panics, entering the bridge, +/// deserializing input and serializing output. +// FIXME(eddyb) maybe replace `Bridge::enter` with this? +fn run_client DecodeMut<'a, 's, ()>, R: Encode<()>>( mut bridge: Bridge<'_>, - f: fn(crate::TokenStream) -> crate::TokenStream, + f: impl FnOnce(A) -> R, ) -> Buffer { // The initial `cached_buffer` contains the input. let mut b = bridge.cached_buffer.take(); @@ -350,12 +354,12 @@ pub extern "C" fn __run_expand1( panic::catch_unwind(panic::AssertUnwindSafe(|| { bridge.enter(|| { let reader = &mut &b[..]; - let input = TokenStream::decode(reader, &mut ()); + let input = A::decode(reader, &mut ()); // Put the `cached_buffer` back in the `Bridge`, for requests. Bridge::with(|bridge| bridge.cached_buffer = b.take()); - let output = f(crate::TokenStream(input)).0; + let output = f(input); // Take the `cached_buffer` back out, for the output value. b = Bridge::with(|bridge| bridge.cached_buffer.take()); @@ -383,65 +387,35 @@ pub extern "C" fn __run_expand1( impl Client crate::TokenStream> { pub const fn expand1(f: fn(crate::TokenStream) -> crate::TokenStream) -> Self { + extern "C" fn run( + bridge: Bridge<'_>, + f: impl FnOnce(crate::TokenStream) -> crate::TokenStream, + ) -> Buffer { + run_client(bridge, |input| f(crate::TokenStream(input)).0) + } Client { get_handle_counters: HandleCounters::get, - run: __run_expand1, + run, f, } } } -// FIXME(#53451) public to work around `Cannot create local mono-item` ICE, -// affecting not only the function itself, but also the `BridgeState` `thread_local!`. -pub extern "C" fn __run_expand2( - mut bridge: Bridge<'_>, - f: fn(crate::TokenStream, crate::TokenStream) -> crate::TokenStream, -) -> Buffer { - // The initial `cached_buffer` contains the input. - let mut b = bridge.cached_buffer.take(); - - panic::catch_unwind(panic::AssertUnwindSafe(|| { - bridge.enter(|| { - let reader = &mut &b[..]; - let input = TokenStream::decode(reader, &mut ()); - let input2 = TokenStream::decode(reader, &mut ()); - - // Put the `cached_buffer` back in the `Bridge`, for requests. - Bridge::with(|bridge| bridge.cached_buffer = b.take()); - - let output = f(crate::TokenStream(input), crate::TokenStream(input2)).0; - - // Take the `cached_buffer` back out, for the output value. - b = Bridge::with(|bridge| bridge.cached_buffer.take()); - - // HACK(eddyb) Separate encoding a success value (`Ok(output)`) - // from encoding a panic (`Err(e: PanicMessage)`) to avoid - // having handles outside the `bridge.enter(|| ...)` scope, and - // to catch panics that could happen while encoding the success. - // - // Note that panics should be impossible beyond this point, but - // this is defensively trying to avoid any accidental panicking - // reaching the `extern "C"` (which should `abort` but may not - // at the moment, so this is also potentially preventing UB). - b.clear(); - Ok::<_, ()>(output).encode(&mut b, &mut ()); - }) - })) - .map_err(PanicMessage::from) - .unwrap_or_else(|e| { - b.clear(); - Err::<(), _>(e).encode(&mut b, &mut ()); - }); - b -} - impl Client crate::TokenStream> { pub const fn expand2( f: fn(crate::TokenStream, crate::TokenStream) -> crate::TokenStream ) -> Self { + extern "C" fn run( + bridge: Bridge<'_>, + f: impl FnOnce(crate::TokenStream, crate::TokenStream) -> crate::TokenStream, + ) -> Buffer { + run_client(bridge, |(input, input2)| { + f(crate::TokenStream(input), crate::TokenStream(input2)).0 + }) + } Client { get_handle_counters: HandleCounters::get, - run: __run_expand2, + run, f, } } diff --git a/src/libproc_macro/bridge/mod.rs b/src/libproc_macro/bridge/mod.rs index 3c48466fff..c26b59f473 100644 --- a/src/libproc_macro/bridge/mod.rs +++ b/src/libproc_macro/bridge/mod.rs @@ -148,6 +148,7 @@ macro_rules! with_api { fn debug($self: $S::Span) -> String; fn def_site() -> $S::Span; fn call_site() -> $S::Span; + fn mixed_site() -> $S::Span; fn source_file($self: $S::Span) -> $S::SourceFile; fn parent($self: $S::Span) -> Option<$S::Span>; fn source($self: $S::Span) -> $S::Span; diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index d408fef751..6166561d87 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -25,8 +25,7 @@ #![feature(extern_types)] #![feature(in_band_lifetimes)] #![feature(optin_builtin_traits)] -#![feature(mem_take)] -#![feature(non_exhaustive)] +#![cfg_attr(bootstrap, feature(non_exhaustive))] #![feature(rustc_attrs)] #![feature(specialization)] @@ -227,7 +226,7 @@ pub mod token_stream { /// To quote `$` itself, use `$$`. #[unstable(feature = "proc_macro_quote", issue = "54722")] #[allow_internal_unstable(proc_macro_def_site)] -#[cfg_attr(not(bootstrap), rustc_builtin_macro)] +#[rustc_builtin_macro] pub macro quote ($($t:tt)*) { /* compiler built-in */ } #[unstable(feature = "proc_macro_internals", issue = "27812")] @@ -271,6 +270,15 @@ impl Span { Span(bridge::client::Span::call_site()) } + /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro + /// definition site (local variables, labels, `$crate`) and sometimes at the macro + /// call site (everything else). + /// The span location is taken from the call-site. + #[unstable(feature = "proc_macro_mixed_site", issue = "65049")] + pub fn mixed_site() -> Span { + Span(bridge::client::Span::mixed_site()) + } + /// The original source file into which this span points. #[unstable(feature = "proc_macro_span", issue = "54725")] pub fn source_file(&self) -> SourceFile { diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 0834faf132..de67f46eba 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -10,29 +10,34 @@ path = "lib.rs" doctest = false [dependencies] +# Prevent cc from upgrading all the way to 1.0.46, +# which fails the build (see e.g. #65445.) +cc = "=1.0.37" + arena = { path = "../libarena" } -bitflags = "1.0" +bitflags = "1.2.1" fmt_macros = { path = "../libfmt_macros" } graphviz = { path = "../libgraphviz" } jobserver = "0.1" num_cpus = "1.0" scoped-tls = "1.0" log = { version = "0.4", features = ["release_max_level_info", "std"] } -rustc-rayon = "0.2.0" -rustc-rayon-core = "0.2.0" +rustc-rayon = "0.3.0" +rustc-rayon-core = "0.3.0" polonius-engine = "0.10.0" rustc_apfloat = { path = "../librustc_apfloat" } rustc_target = { path = "../librustc_target" } rustc_macros = { path = "../librustc_macros" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_index = { path = "../librustc_index" } errors = { path = "../librustc_errors", package = "rustc_errors" } rustc_serialize = { path = "../libserialize", package = "serialize" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } -backtrace = "0.3.3" +backtrace = "0.3.40" parking_lot = "0.9" byteorder = { version = "1.3" } chalk-engine = { version = "0.9.0", default-features=false } rustc_fs_util = { path = "../librustc_fs_util" } -smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } -measureme = "0.3" +smallvec = { version = "0.6.8", features = ["union", "may_dangle"] } +measureme = "0.4" diff --git a/src/librustc/arena.rs b/src/librustc/arena.rs index d4fc1b1283..3daf0fc9df 100644 --- a/src/librustc/arena.rs +++ b/src/librustc/arena.rs @@ -26,12 +26,12 @@ macro_rules! arena_types { [] steal_mir: rustc::ty::steal::Steal>, [] mir: rustc::mir::Body<$tcx>, [] steal_promoted: rustc::ty::steal::Steal< - rustc_data_structures::indexed_vec::IndexVec< + rustc_index::vec::IndexVec< rustc::mir::Promoted, rustc::mir::Body<$tcx> > >, - [] promoted: rustc_data_structures::indexed_vec::IndexVec< + [] promoted: rustc_index::vec::IndexVec< rustc::mir::Promoted, rustc::mir::Body<$tcx> >, @@ -45,7 +45,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc::traits::specialization_graph::Graph, [] region_scope_tree: rustc::middle::region::ScopeTree, [] item_local_set: rustc::util::nodemap::ItemLocalSet, - [decode] mir_const_qualif: rustc_data_structures::bit_set::BitSet, + [decode] mir_const_qualif: rustc_index::bit_set::BitSet, [] trait_impls_of: rustc::ty::trait_def::TraitImpls, [] dropck_outlives: rustc::infer::canonical::Canonical<'tcx, @@ -86,7 +86,6 @@ macro_rules! arena_types { rustc::infer::canonical::QueryResponse<'tcx, rustc::ty::Ty<'tcx>> >, [few] crate_inherent_impls: rustc::ty::CrateInherentImpls, - [decode] borrowck: rustc::middle::borrowck::BorrowCheckResult, [few] upstream_monomorphizations: rustc::util::nodemap::DefIdMap< rustc_data_structures::fx::FxHashMap< @@ -99,7 +98,6 @@ macro_rules! arena_types { rustc::hir::def_id::DefId, >, [few] resolve_lifetimes: rustc::middle::resolve_lifetime::ResolveLifetimes, - [decode] generic_predicates: rustc::ty::GenericPredicates<'tcx>, [few] lint_levels: rustc::lint::LintLevelMap, [few] stability_index: rustc::middle::stability::Index<'tcx>, [few] features: syntax::feature_gate::Features, diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 3d5e7dd0af..dea8d70aaf 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -59,7 +59,7 @@ use crate::ich::{Fingerprint, StableHashingContext}; use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use std::fmt; use std::hash::Hash; -use syntax_pos::symbol::InternedString; +use syntax_pos::symbol::Symbol; use crate::traits; use crate::traits::query::{ CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpAscribeUserTypeGoal, @@ -114,7 +114,6 @@ macro_rules! define_dep_nodes { impl DepKind { #[allow(unreachable_code)] - #[inline] pub fn can_reconstruct_query_key<$tcx>(&self) -> bool { match *self { $( @@ -150,7 +149,6 @@ macro_rules! define_dep_nodes { } } - #[inline(always)] pub fn is_eval_always(&self) -> bool { match *self { $( @@ -199,7 +197,6 @@ macro_rules! define_dep_nodes { impl DepNode { #[allow(unreachable_code, non_snake_case)] - #[inline(always)] pub fn new<'tcx>(tcx: TyCtxt<'tcx>, dep: DepConstructor<'tcx>) -> DepNode @@ -219,14 +216,16 @@ macro_rules! define_dep_nodes { hash }; - if cfg!(debug_assertions) && - !dep_node.kind.can_reconstruct_query_key() && - (tcx.sess.opts.debugging_opts.incremental_info || - tcx.sess.opts.debugging_opts.query_dep_graph) + #[cfg(debug_assertions)] { - tcx.dep_graph.register_dep_node_debug_str(dep_node, || { - arg.to_debug_str(tcx) - }); + if !dep_node.kind.can_reconstruct_query_key() && + (tcx.sess.opts.debugging_opts.incremental_info || + tcx.sess.opts.debugging_opts.query_dep_graph) + { + tcx.dep_graph.register_dep_node_debug_str(dep_node, || { + arg.to_debug_str(tcx) + }); + } } return dep_node; @@ -242,14 +241,16 @@ macro_rules! define_dep_nodes { hash }; - if cfg!(debug_assertions) && - !dep_node.kind.can_reconstruct_query_key() && - (tcx.sess.opts.debugging_opts.incremental_info || - tcx.sess.opts.debugging_opts.query_dep_graph) + #[cfg(debug_assertions)] { - tcx.dep_graph.register_dep_node_debug_str(dep_node, || { - tupled_args.to_debug_str(tcx) - }); + if !dep_node.kind.can_reconstruct_query_key() && + (tcx.sess.opts.debugging_opts.incremental_info || + tcx.sess.opts.debugging_opts.query_dep_graph) + { + tcx.dep_graph.register_dep_node_debug_str(dep_node, || { + tupled_args.to_debug_str(tcx) + }); + } } return dep_node; @@ -267,7 +268,6 @@ macro_rules! define_dep_nodes { /// Construct a DepNode from the given DepKind and DefPathHash. This /// method will assert that the given DepKind actually requires a /// single DefId/DefPathHash parameter. - #[inline(always)] pub fn from_def_path_hash(kind: DepKind, def_path_hash: DefPathHash) -> DepNode { @@ -281,7 +281,6 @@ macro_rules! define_dep_nodes { /// Creates a new, parameterless DepNode. This method will assert /// that the DepNode corresponding to the given DepKind actually /// does not require any parameters. - #[inline(always)] pub fn new_no_params(kind: DepKind) -> DepNode { debug_assert!(!kind.has_params()); DepNode { @@ -300,7 +299,6 @@ macro_rules! define_dep_nodes { /// DepNode. Condition (2) might not be fulfilled if a DepNode /// refers to something from the previous compilation session that /// has been removed. - #[inline] pub fn extract_def_id(&self, tcx: TyCtxt<'_>) -> Option { if self.kind.can_reconstruct_query_key() { let def_path_hash = DefPathHash(self.hash); @@ -386,14 +384,12 @@ impl fmt::Debug for DepNode { impl DefPathHash { - #[inline(always)] pub fn to_dep_node(self, kind: DepKind) -> DepNode { DepNode::from_def_path_hash(kind, self) } } impl DefId { - #[inline(always)] pub fn to_dep_node(self, tcx: TyCtxt<'_>, kind: DepKind) -> DepNode { DepNode::from_def_path_hash(kind, tcx.def_path_hash(self)) } @@ -430,7 +426,7 @@ rustc_dep_node_append!([define_dep_nodes!][ <'tcx> [anon] TraitSelect, - [] CompileCodegenUnit(InternedString), + [] CompileCodegenUnit(Symbol), [eval_always] Analysis(CrateNum), ]); diff --git a/src/librustc/dep_graph/dep_tracking_map.rs b/src/librustc/dep_graph/dep_tracking_map.rs deleted file mode 100644 index ee22d0b755..0000000000 --- a/src/librustc/dep_graph/dep_tracking_map.rs +++ /dev/null @@ -1,87 +0,0 @@ -use rustc_data_structures::fx::FxHashMap; -use std::cell::RefCell; -use std::hash::Hash; -use std::marker::PhantomData; -use crate::util::common::MemoizationMap; - -use super::{DepKind, DepNodeIndex, DepGraph}; - -/// A DepTrackingMap offers a subset of the `Map` API and ensures that -/// we make calls to `read` and `write` as appropriate. We key the -/// maps with a unique type for brevity. -pub struct DepTrackingMap { - phantom: PhantomData, - graph: DepGraph, - map: FxHashMap, -} - -pub trait DepTrackingMapConfig { - type Key: Eq + Hash + Clone; - type Value: Clone; - fn to_dep_kind() -> DepKind; -} - -impl DepTrackingMap { - pub fn new(graph: DepGraph) -> DepTrackingMap { - DepTrackingMap { - phantom: PhantomData, - graph, - map: Default::default(), - } - } -} - -impl MemoizationMap for RefCell> { - type Key = M::Key; - type Value = M::Value; - - /// Memoizes an entry in the dep-tracking-map. If the entry is not - /// already present, then `op` will be executed to compute its value. - /// The resulting dependency graph looks like this: - /// - /// [op] -> Map(key) -> CurrentTask - /// - /// Here, `[op]` represents whatever nodes `op` reads in the - /// course of execution; `Map(key)` represents the node for this - /// map, and `CurrentTask` represents the current task when - /// `memoize` is invoked. - /// - /// **Important:** when `op` is invoked, the current task will be - /// switched to `Map(key)`. Therefore, if `op` makes use of any - /// HIR nodes or shared state accessed through its closure - /// environment, it must explicitly register a read of that - /// state. As an example, see `type_of_item` in `collect`, - /// which looks something like this: - /// - /// ``` - /// fn type_of_item(..., item: &hir::Item) -> Ty<'tcx> { - /// let item_def_id = ccx.tcx.hir().local_def_id(it.hir_id); - /// ccx.tcx.item_types.memoized(item_def_id, || { - /// ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id)); // (*) - /// compute_type_of_item(ccx, item) - /// }); - /// } - /// ``` - /// - /// The key is the line marked `(*)`: the closure implicitly - /// accesses the body of the item `item`, so we register a read - /// from `Hir(item_def_id)`. - fn memoize(&self, key: M::Key, op: OP) -> M::Value - where OP: FnOnce() -> M::Value - { - let graph; - { - let this = self.borrow(); - if let Some(&(ref result, dep_node)) = this.map.get(&key) { - this.graph.read_index(dep_node); - return result.clone(); - } - graph = this.graph.clone(); - } - - let (result, dep_node) = graph.with_anon_task(M::to_dep_kind(), op); - self.borrow_mut().map.insert(key, (result.clone(), dep_node)); - graph.read_index(dep_node); - result - } -} diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index e76a70350b..0104507f70 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -1,15 +1,16 @@ use errors::Diagnostic; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexVec}; use smallvec::SmallVec; -use rustc_data_structures::sync::{Lrc, Lock, AtomicU32, Ordering}; +use rustc_data_structures::sync::{Lrc, Lock, AtomicU32, AtomicU64, Ordering}; +use rustc_data_structures::sharded::{self, Sharded}; +use std::sync::atomic::Ordering::SeqCst; use std::env; use std::hash::Hash; use std::collections::hash_map::Entry; use std::mem; use crate::ty::{self, TyCtxt}; -use crate::util::common::{ProfileQueriesMsg, profq_msg}; use parking_lot::{Mutex, Condvar}; use crate::ich::{StableHashingContext, StableHashingContextProvider, Fingerprint}; @@ -26,15 +27,15 @@ pub struct DepGraph { data: Option>, } -newtype_index! { +rustc_index::newtype_index! { pub struct DepNodeIndex { .. } } impl DepNodeIndex { - const INVALID: DepNodeIndex = DepNodeIndex::MAX; + pub const INVALID: DepNodeIndex = DepNodeIndex::MAX; } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(PartialEq)] pub enum DepNodeColor { Red, Green(DepNodeIndex) @@ -54,7 +55,7 @@ struct DepGraphData { /// tracking. The `current` field is the dependency graph of only the /// current compilation session: We don't merge the previous dep-graph into /// current one anymore. - current: Lock, + current: CurrentDepGraph, /// The dep-graph from the previous compilation session. It contains all /// nodes and edges as well as all fingerprints of nodes that have them. @@ -75,9 +76,6 @@ struct DepGraphData { previous_work_products: FxHashMap, dep_node_debug: Lock>, - - // Used for testing, only populated when -Zquery-dep-graph is specified. - loaded_from_cache: Lock>, } pub fn hash_result(hcx: &mut StableHashingContext<'_>, result: &R) -> Option @@ -99,12 +97,11 @@ impl DepGraph { data: Some(Lrc::new(DepGraphData { previous_work_products: prev_work_products, dep_node_debug: Default::default(), - current: Lock::new(CurrentDepGraph::new(prev_graph_node_count)), + current: CurrentDepGraph::new(prev_graph_node_count), emitting_diagnostics: Default::default(), emitting_diagnostics_cond_var: Condvar::new(), previous: prev_graph, colors: DepNodeColorMap::new(prev_graph_node_count), - loaded_from_cache: Default::default(), })), } } @@ -122,13 +119,12 @@ impl DepGraph { } pub fn query(&self) -> DepGraphQuery { - let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); - let nodes: Vec<_> = current_dep_graph.data.iter().map(|n| n.node).collect(); + let data = self.data.as_ref().unwrap().current.data.lock(); + let nodes: Vec<_> = data.iter().map(|n| n.node).collect(); let mut edges = Vec::new(); - for (from, edge_targets) in current_dep_graph.data.iter() - .map(|d| (d.node, &d.edges)) { + for (from, edge_targets) in data.iter().map(|d| (d.node, &d.edges)) { for &edge_target in edge_targets.iter() { - let to = current_dep_graph.data[edge_target].node; + let to = data[edge_target].node; edges.push((from, to)); } } @@ -207,7 +203,7 @@ impl DepGraph { read_set: Default::default(), }), |data, key, fingerprint, task| { - data.borrow_mut().complete_task(key, task.unwrap(), fingerprint) + data.complete_task(key, task.unwrap(), fingerprint) }, hash_result) } @@ -228,7 +224,7 @@ impl DepGraph { self.with_task_impl(key, cx, input, true, identity_fn, |_| None, |data, key, fingerprint, _| { - data.borrow_mut().alloc_node(key, SmallVec::new(), fingerprint) + data.alloc_node(key, SmallVec::new(), fingerprint) }, hash_result::) } @@ -241,7 +237,7 @@ impl DepGraph { no_tcx: bool, task: fn(C, A) -> R, create_task: fn(DepNode) -> Option, - finish_task_and_alloc_depnode: fn(&Lock, + finish_task_and_alloc_depnode: fn(&CurrentDepGraph, DepNode, Fingerprint, Option) -> DepNodeIndex, @@ -260,10 +256,6 @@ impl DepGraph { // - we can get an idea of the runtime cost. let mut hcx = cx.get_stable_hashing_context(); - if cfg!(debug_assertions) { - profq_msg(hcx.sess(), ProfileQueriesMsg::TaskBegin(key.clone())) - }; - let result = if no_tcx { task(cx, arg) } else { @@ -279,10 +271,6 @@ impl DepGraph { }) }; - if cfg!(debug_assertions) { - profq_msg(hcx.sess(), ProfileQueriesMsg::TaskEnd) - }; - let current_fingerprint = hash_result(&mut hcx, &result); let dep_node_index = finish_task_and_alloc_depnode( @@ -363,7 +351,6 @@ impl DepGraph { (r, task_deps.into_inner()) }); let dep_node_index = data.current - .borrow_mut() .complete_anon_task(dep_kind, task_deps); (result, dep_node_index) } else { @@ -387,8 +374,7 @@ impl DepGraph { self.with_task_impl(key, cx, arg, false, task, |_| None, |data, key, fingerprint, _| { - let mut current = data.borrow_mut(); - current.alloc_node(key, smallvec![], fingerprint) + data.alloc_node(key, smallvec![], fingerprint) }, hash_result) } @@ -396,9 +382,9 @@ impl DepGraph { #[inline] pub fn read(&self, v: DepNode) { if let Some(ref data) = self.data { - let current = data.current.borrow_mut(); - if let Some(&dep_node_index) = current.node_to_node_index.get(&v) { - std::mem::drop(current); + let map = data.current.node_to_node_index.get_shard_by_value(&v).lock(); + if let Some(dep_node_index) = map.get(&v).copied() { + std::mem::drop(map); data.read_index(dep_node_index); } else { bug!("DepKind {:?} should be pre-allocated but isn't.", v.kind) @@ -419,8 +405,9 @@ impl DepGraph { .as_ref() .unwrap() .current - .borrow_mut() .node_to_node_index + .get_shard_by_value(dep_node) + .lock() .get(dep_node) .cloned() .unwrap() @@ -429,7 +416,11 @@ impl DepGraph { #[inline] pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool { if let Some(ref data) = self.data { - data.current.borrow_mut().node_to_node_index.contains_key(dep_node) + data.current + .node_to_node_index + .get_shard_by_value(&dep_node) + .lock() + .contains_key(dep_node) } else { false } @@ -437,8 +428,8 @@ impl DepGraph { #[inline] pub fn fingerprint_of(&self, dep_node_index: DepNodeIndex) -> Fingerprint { - let current = self.data.as_ref().expect("dep graph enabled").current.borrow_mut(); - current.data[dep_node_index].fingerprint + let data = self.data.as_ref().expect("dep graph enabled").current.data.lock(); + data[dep_node_index].fingerprint } pub fn prev_fingerprint_of(&self, dep_node: &DepNode) -> Option { @@ -492,32 +483,29 @@ impl DepGraph { pub fn edge_deduplication_data(&self) -> Option<(u64, u64)> { if cfg!(debug_assertions) { - let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); + let current_dep_graph = &self.data.as_ref().unwrap().current; - Some((current_dep_graph.total_read_count, - current_dep_graph.total_duplicate_read_count)) + Some((current_dep_graph.total_read_count.load(SeqCst), + current_dep_graph.total_duplicate_read_count.load(SeqCst))) } else { None } } pub fn serialize(&self) -> SerializedDepGraph { - let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); + let data = self.data.as_ref().unwrap().current.data.lock(); let fingerprints: IndexVec = - current_dep_graph.data.iter().map(|d| d.fingerprint).collect(); + data.iter().map(|d| d.fingerprint).collect(); let nodes: IndexVec = - current_dep_graph.data.iter().map(|d| d.node).collect(); + data.iter().map(|d| d.node).collect(); - let total_edge_count: usize = current_dep_graph.data.iter() - .map(|d| d.edges.len()) - .sum(); + let total_edge_count: usize = data.iter().map(|d| d.edges.len()).sum(); let mut edge_list_indices = IndexVec::with_capacity(nodes.len()); let mut edge_list_data = Vec::with_capacity(total_edge_count); - for (current_dep_node_index, edges) in current_dep_graph.data.iter_enumerated() - .map(|(i, d)| (i, &d.edges)) { + for (current_dep_node_index, edges) in data.iter_enumerated().map(|(i, d)| (i, &d.edges)) { let start = edge_list_data.len() as u32; // This should really just be a memcpy :/ edge_list_data.extend(edges.iter().map(|i| SerializedDepNodeIndex::new(i.index()))); @@ -590,7 +578,7 @@ impl DepGraph { // mark it as green by recursively marking all of its // dependencies green. self.try_mark_previous_green( - tcx.global_tcx(), + tcx, data, prev_index, &dep_node @@ -613,7 +601,11 @@ impl DepGraph { #[cfg(not(parallel_compiler))] { - debug_assert!(!data.current.borrow().node_to_node_index.contains_key(dep_node)); + debug_assert!(!data.current + .node_to_node_index + .get_shard_by_value(dep_node) + .lock() + .contains_key(dep_node)); debug_assert!(data.colors.get(prev_dep_node_index).is_none()); } @@ -746,15 +738,13 @@ impl DepGraph { // There may be multiple threads trying to mark the same dep node green concurrently let dep_node_index = { - let mut current = data.current.borrow_mut(); - // Copy the fingerprint from the previous graph, // so we don't have to recompute it let fingerprint = data.previous.fingerprint_by_index(prev_dep_node_index); // We allocating an entry for the node in the current dependency graph and // adding all the appropriate edges imported from the previous graph - current.intern_node(*dep_node, current_deps, fingerprint) + data.current.intern_node(*dep_node, current_deps, fingerprint) }; // ... emitting any stored diagnostic ... @@ -858,6 +848,8 @@ impl DepGraph { // This method will only load queries that will end up in the disk cache. // Other queries will not be executed. pub fn exec_cache_promotions(&self, tcx: TyCtxt<'_>) { + let _prof_timer = tcx.prof.generic_activity("incr_comp_query_cache_promotion"); + let data = self.data.as_ref().unwrap(); for prev_index in data.colors.values.indices() { match data.colors.get(prev_index) { @@ -874,25 +866,6 @@ impl DepGraph { } } } - - pub fn mark_loaded_from_cache(&self, dep_node_index: DepNodeIndex, state: bool) { - debug!("mark_loaded_from_cache({:?}, {})", - self.data.as_ref().unwrap().current.borrow().data[dep_node_index].node, - state); - - self.data - .as_ref() - .unwrap() - .loaded_from_cache - .borrow_mut() - .insert(dep_node_index, state); - } - - pub fn was_loaded_from_cache(&self, dep_node: &DepNode) -> Option { - let data = self.data.as_ref().unwrap(); - let dep_node_index = data.current.borrow().node_to_node_index[dep_node]; - data.loaded_from_cache.borrow().get(&dep_node_index).cloned() - } } /// A "work product" is an intermediate result that we save into the @@ -947,9 +920,27 @@ struct DepNodeData { fingerprint: Fingerprint, } +/// `CurrentDepGraph` stores the dependency graph for the current session. +/// It will be populated as we run queries or tasks. +/// +/// The nodes in it are identified by an index (`DepNodeIndex`). +/// The data for each node is stored in its `DepNodeData`, found in the `data` field. +/// +/// We never remove nodes from the graph: they are only added. +/// +/// This struct uses two locks internally. The `data` and `node_to_node_index` fields are +/// locked separately. Operations that take a `DepNodeIndex` typically just access +/// the data field. +/// +/// The only operation that must manipulate both locks is adding new nodes, in which case +/// we first acquire the `node_to_node_index` lock and then, once a new node is to be inserted, +/// acquire the lock on `data.` pub(super) struct CurrentDepGraph { - data: IndexVec, - node_to_node_index: FxHashMap, + data: Lock>, + node_to_node_index: Sharded>, + + /// Used to trap when a specific edge is added to the graph. + /// This is used for debug purposes and is only active with `debug_assertions`. #[allow(dead_code)] forbidden_edge: Option, @@ -966,8 +957,10 @@ pub(super) struct CurrentDepGraph { /// the `DepGraph` is created. anon_id_seed: Fingerprint, - total_read_count: u64, - total_duplicate_read_count: u64, + /// These are simple counters that are for profiling and + /// debugging and only active with `debug_assertions`. + total_read_count: AtomicU64, + total_duplicate_read_count: AtomicU64, } impl CurrentDepGraph { @@ -1001,20 +994,20 @@ impl CurrentDepGraph { let new_node_count_estimate = (prev_graph_node_count * 102) / 100 + 200; CurrentDepGraph { - data: IndexVec::with_capacity(new_node_count_estimate), - node_to_node_index: FxHashMap::with_capacity_and_hasher( - new_node_count_estimate, + data: Lock::new(IndexVec::with_capacity(new_node_count_estimate)), + node_to_node_index: Sharded::new(|| FxHashMap::with_capacity_and_hasher( + new_node_count_estimate / sharded::SHARDS, Default::default(), - ), + )), anon_id_seed: stable_hasher.finish(), forbidden_edge, - total_read_count: 0, - total_duplicate_read_count: 0, + total_read_count: AtomicU64::new(0), + total_duplicate_read_count: AtomicU64::new(0), } } fn complete_task( - &mut self, + &self, node: DepNode, task_deps: TaskDeps, fingerprint: Fingerprint @@ -1022,7 +1015,7 @@ impl CurrentDepGraph { self.alloc_node(node, task_deps.reads, fingerprint) } - fn complete_anon_task(&mut self, kind: DepKind, task_deps: TaskDeps) -> DepNodeIndex { + fn complete_anon_task(&self, kind: DepKind, task_deps: TaskDeps) -> DepNodeIndex { debug_assert!(!kind.is_eval_always()); let mut hasher = StableHasher::new(); @@ -1047,28 +1040,30 @@ impl CurrentDepGraph { } fn alloc_node( - &mut self, + &self, dep_node: DepNode, edges: SmallVec<[DepNodeIndex; 8]>, fingerprint: Fingerprint ) -> DepNodeIndex { - debug_assert!(!self.node_to_node_index.contains_key(&dep_node)); + debug_assert!(!self.node_to_node_index + .get_shard_by_value(&dep_node) + .lock() + .contains_key(&dep_node)); self.intern_node(dep_node, edges, fingerprint) } fn intern_node( - &mut self, + &self, dep_node: DepNode, edges: SmallVec<[DepNodeIndex; 8]>, fingerprint: Fingerprint ) -> DepNodeIndex { - debug_assert_eq!(self.node_to_node_index.len(), self.data.len()); - - match self.node_to_node_index.entry(dep_node) { + match self.node_to_node_index.get_shard_by_value(&dep_node).lock().entry(dep_node) { Entry::Occupied(entry) => *entry.get(), Entry::Vacant(entry) => { - let dep_node_index = DepNodeIndex::new(self.data.len()); - self.data.push(DepNodeData { + let mut data = self.data.lock(); + let dep_node_index = DepNodeIndex::new(data.len()); + data.push(DepNodeData { node: dep_node, edges, fingerprint @@ -1087,7 +1082,7 @@ impl DepGraphData { if let Some(task_deps) = icx.task_deps { let mut task_deps = task_deps.lock(); if cfg!(debug_assertions) { - self.current.lock().total_read_count += 1; + self.current.total_read_count.fetch_add(1, SeqCst); } if task_deps.read_set.insert(source) { task_deps.reads.push(source); @@ -1095,9 +1090,9 @@ impl DepGraphData { #[cfg(debug_assertions)] { if let Some(target) = task_deps.node { - let graph = self.current.lock(); - if let Some(ref forbidden_edge) = graph.forbidden_edge { - let source = graph.data[source].node; + let data = self.current.data.lock(); + if let Some(ref forbidden_edge) = self.current.forbidden_edge { + let source = data[source].node; if forbidden_edge.test(&source, &target) { bug!("forbidden edge {:?} -> {:?} created", source, @@ -1107,7 +1102,7 @@ impl DepGraphData { } } } else if cfg!(debug_assertions) { - self.current.lock().total_duplicate_read_count += 1; + self.current.total_duplicate_read_count.fetch_add(1, SeqCst); } } }) diff --git a/src/librustc/dep_graph/mod.rs b/src/librustc/dep_graph/mod.rs index 1535e6d349..43f3d7e89c 100644 --- a/src/librustc/dep_graph/mod.rs +++ b/src/librustc/dep_graph/mod.rs @@ -1,6 +1,5 @@ pub mod debug; mod dep_node; -mod dep_tracking_map; mod graph; mod prev; mod query; @@ -8,7 +7,6 @@ mod safe; mod serialized; pub mod cgu_reuse_tracker; -pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig}; pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, RecoverKey, label_strs}; pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, TaskDeps, hash_result}; pub use self::graph::WorkProductFileKind; diff --git a/src/librustc/dep_graph/serialized.rs b/src/librustc/dep_graph/serialized.rs index b64f71ed90..4302195755 100644 --- a/src/librustc/dep_graph/serialized.rs +++ b/src/librustc/dep_graph/serialized.rs @@ -2,9 +2,9 @@ use crate::dep_graph::DepNode; use crate::ich::Fingerprint; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_index::vec::{IndexVec, Idx}; -newtype_index! { +rustc_index::newtype_index! { pub struct SerializedDepNodeIndex { .. } } diff --git a/src/librustc/error_codes.rs b/src/librustc/error_codes.rs index f6564f1fcd..3e35add961 100644 --- a/src/librustc/error_codes.rs +++ b/src/librustc/error_codes.rs @@ -259,8 +259,8 @@ trait Foo { This is similar to the second sub-error, but subtler. It happens in situations like the following: -```compile_fail -trait Super {} +```compile_fail,E0038 +trait Super {} trait Trait: Super { } @@ -270,17 +270,21 @@ struct Foo; impl Super for Foo{} impl Trait for Foo {} + +fn main() { + let x: Box; +} ``` Here, the supertrait might have methods as follows: ``` -trait Super { - fn get_a(&self) -> A; // note that this is object safe! +trait Super { + fn get_a(&self) -> &A; // note that this is object safe! } ``` -If the trait `Foo` was deriving from something like `Super` or +If the trait `Trait` was deriving from something like `Super` or `Super` (where `Foo` itself is `Foo`), this is okay, because given a type `get_a()` will definitely return an object of that type. @@ -466,67 +470,6 @@ fn main() { ``` "##, -// This shouldn't really ever trigger since the repeated value error comes first -E0136: r##" -A binary can only have one entry point, and by default that entry point is the -function `main()`. If there are multiple such functions, please rename one. -"##, - -E0137: r##" -More than one function was declared with the `#[main]` attribute. - -Erroneous code example: - -```compile_fail,E0137 -#![feature(main)] - -#[main] -fn foo() {} - -#[main] -fn f() {} // error: multiple functions with a `#[main]` attribute -``` - -This error indicates that the compiler found multiple functions with the -`#[main]` attribute. This is an error because there must be a unique entry -point into a Rust program. Example: - -``` -#![feature(main)] - -#[main] -fn f() {} // ok! -``` -"##, - -E0138: r##" -More than one function was declared with the `#[start]` attribute. - -Erroneous code example: - -```compile_fail,E0138 -#![feature(start)] - -#[start] -fn foo(argc: isize, argv: *const *const u8) -> isize {} - -#[start] -fn f(argc: isize, argv: *const *const u8) -> isize {} -// error: multiple 'start' functions -``` - -This error indicates that the compiler found multiple functions with the -`#[start]` attribute. This is an error because there must be a unique entry -point into a Rust program. Example: - -``` -#![feature(start)] - -#[start] -fn foo(argc: isize, argv: *const *const u8) -> isize { 0 } // ok! -``` -"##, - E0139: r##" #### Note: this error code is no longer emitted by the compiler. @@ -1580,8 +1523,51 @@ where ``` "##, +E0495: r##" +A lifetime cannot be determined in the given situation. + +Erroneous code example: + +```compile_fail,E0495 +fn transmute_lifetime<'a, 'b, T>(t: &'a (T,)) -> &'b T { + match (&t,) { // error! + ((u,),) => u, + } +} + +let y = Box::new((42,)); +let x = transmute_lifetime(&y); +``` + +In this code, you have two ways to solve this issue: + 1. Enforce that `'a` lives at least as long as `'b`. + 2. Use the same lifetime requirement for both input and output values. + +So for the first solution, you can do it by replacing `'a` with `'a: 'b`: + +``` +fn transmute_lifetime<'a: 'b, 'b, T>(t: &'a (T,)) -> &'b T { + match (&t,) { // ok! + ((u,),) => u, + } +} +``` + +In the second you can do it by simply removing `'b` so they both use `'a`: + +``` +fn transmute_lifetime<'a, T>(t: &'a (T,)) -> &'a T { + match (&t,) { // ok! + ((u,),) => u, + } +} +``` +"##, + E0496: r##" -A lifetime name is shadowing another lifetime name. Erroneous code example: +A lifetime name is shadowing another lifetime name. + +Erroneous code example: ```compile_fail,E0496 struct Foo<'a> { @@ -1613,8 +1599,11 @@ fn main() { "##, E0497: r##" -A stability attribute was used outside of the standard library. Erroneous code -example: +#### Note: this error code is no longer emitted by the compiler. + +A stability attribute was used outside of the standard library. + +Erroneous code example: ```compile_fail #[stable] // error: stability attributes may not be used outside of the @@ -1626,33 +1615,6 @@ It is not possible to use stability attributes outside of the standard library. Also, for now, it is not possible to write deprecation messages either. "##, -E0512: r##" -Transmute with two differently sized types was attempted. Erroneous code -example: - -```compile_fail,E0512 -fn takes_u8(_: u8) {} - -fn main() { - unsafe { takes_u8(::std::mem::transmute(0u16)); } - // error: cannot transmute between types of different sizes, - // or dependently-sized types -} -``` - -Please use types with same size or use the expected type directly. Example: - -``` -fn takes_u8(_: u8) {} - -fn main() { - unsafe { takes_u8(::std::mem::transmute(0i8)); } // ok! - // or: - unsafe { takes_u8(0u8); } // ok! -} -``` -"##, - E0517: r##" This error indicates that a `#[repr(..)]` attribute was placed on an unsupported item. @@ -1787,6 +1749,27 @@ To understand better how closures work in Rust, read: https://doc.rust-lang.org/book/ch13-01-closures.html "##, +E0566: r##" +Conflicting representation hints have been used on a same item. + +Erroneous code example: + +``` +#[repr(u32, u64)] // warning! +enum Repr { A } +``` + +In most cases (if not all), using just one representation hint is more than +enough. If you want to have a representation hint depending on the current +architecture, use `cfg_attr`. Example: + +``` +#[cfg_attr(linux, repr(u32))] +#[cfg_attr(not(linux), repr(u64))] +enum Repr { A } +``` +"##, + E0580: r##" The `main` function was incorrectly declared. @@ -1847,84 +1830,6 @@ See [RFC 1522] for more details. [RFC 1522]: https://github.com/rust-lang/rfcs/blob/master/text/1522-conservative-impl-trait.md "##, -E0591: r##" -Per [RFC 401][rfc401], if you have a function declaration `foo`: - -``` -// For the purposes of this explanation, all of these -// different kinds of `fn` declarations are equivalent: -struct S; -fn foo(x: S) { /* ... */ } -# #[cfg(for_demonstration_only)] -extern "C" { fn foo(x: S); } -# #[cfg(for_demonstration_only)] -impl S { fn foo(self) { /* ... */ } } -``` - -the type of `foo` is **not** `fn(S)`, as one might expect. -Rather, it is a unique, zero-sized marker type written here as `typeof(foo)`. -However, `typeof(foo)` can be _coerced_ to a function pointer `fn(S)`, -so you rarely notice this: - -``` -# struct S; -# fn foo(_: S) {} -let x: fn(S) = foo; // OK, coerces -``` - -The reason that this matter is that the type `fn(S)` is not specific to -any particular function: it's a function _pointer_. So calling `x()` results -in a virtual call, whereas `foo()` is statically dispatched, because the type -of `foo` tells us precisely what function is being called. - -As noted above, coercions mean that most code doesn't have to be -concerned with this distinction. However, you can tell the difference -when using **transmute** to convert a fn item into a fn pointer. - -This is sometimes done as part of an FFI: - -```compile_fail,E0591 -extern "C" fn foo(userdata: Box) { - /* ... */ -} - -# fn callback(_: extern "C" fn(*mut i32)) {} -# use std::mem::transmute; -# unsafe { -let f: extern "C" fn(*mut i32) = transmute(foo); -callback(f); -# } -``` - -Here, transmute is being used to convert the types of the fn arguments. -This pattern is incorrect because, because the type of `foo` is a function -**item** (`typeof(foo)`), which is zero-sized, and the target type (`fn()`) -is a function pointer, which is not zero-sized. -This pattern should be rewritten. There are a few possible ways to do this: - -- change the original fn declaration to match the expected signature, - and do the cast in the fn body (the preferred option) -- cast the fn item fo a fn pointer before calling transmute, as shown here: - - ``` - # extern "C" fn foo(_: Box) {} - # use std::mem::transmute; - # unsafe { - let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_)); - let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too - # } - ``` - -The same applies to transmutes to `*mut fn()`, which were observed in practice. -Note though that use of this type is generally incorrect. -The intention is typically to describe a function pointer, but just `fn()` -alone suffices for that. `*mut fn()` is a pointer to a fn pointer. -(Since these values are typically just passed to C code, however, this rarely -makes a difference in practice.) - -[rfc401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md -"##, - E0593: r##" You tried to supply an `Fn`-based type with an incorrect number of arguments than what was expected. @@ -1941,21 +1846,6 @@ fn main() { ``` "##, -E0601: r##" -No `main` function was found in a binary crate. To fix this error, add a -`main` function. For example: - -``` -fn main() { - // Your program will start here. - println!("Hello world!"); -} -``` - -If you don't know the basics of Rust, you can go look to the Rust Book to get -started: https://doc.rust-lang.org/book/ -"##, - E0602: r##" An unknown lint was used on the command line. @@ -2115,6 +2005,24 @@ a (non-transparent) struct containing a single float, while `Grams` is a transparent wrapper around a float. This can make a difference for the ABI. "##, +E0697: r##" +A closure has been used as `static`. + +Erroneous code example: + +```compile_fail,E0697 +fn main() { + static || {}; // used as `static` +} +``` + +Closures cannot be used as `static`. They "save" the environment, +and as such a static closure would save only a static environment +which would consist only of variables with a static lifetime. Given +this it would be better to use a proper function. The easiest fix +is to remove the `static` keyword. +"##, + E0698: r##" When using generators (or async) all type variables must be bound so a generator can be constructed. @@ -2137,8 +2045,8 @@ so that a generator can then be constructed: async fn bar() -> () {} async fn foo() { - bar::().await; - // ^^^^^^^^ specify type explicitly + bar::().await; + // ^^^^^^^^ specify type explicitly } ``` "##, @@ -2197,8 +2105,6 @@ on something other than a struct or enum. Examples of erroneous code: ```compile_fail,E0701 -# #![feature(non_exhaustive)] - #[non_exhaustive] trait Foo { } ``` @@ -2217,6 +2123,171 @@ Examples of erroneous code: static X: u32 = 42; ``` "##, + +E0728: r##" +[`await`] has been used outside [`async`] function or block. + +Erroneous code examples: + +```edition2018,compile_fail,E0728 +# use std::pin::Pin; +# use std::future::Future; +# use std::task::{Context, Poll}; +# +# struct WakeOnceThenComplete(bool); +# +# fn wake_and_yield_once() -> WakeOnceThenComplete { +# WakeOnceThenComplete(false) +# } +# +# impl Future for WakeOnceThenComplete { +# type Output = (); +# fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> { +# if self.0 { +# Poll::Ready(()) +# } else { +# cx.waker().wake_by_ref(); +# self.0 = true; +# Poll::Pending +# } +# } +# } +# +fn foo() { + wake_and_yield_once().await // `await` is used outside `async` context +} +``` + +[`await`] is used to suspend the current computation until the given +future is ready to produce a value. So it is legal only within +an [`async`] context, like an `async fn` or an `async` block. + +```edition2018 +# use std::pin::Pin; +# use std::future::Future; +# use std::task::{Context, Poll}; +# +# struct WakeOnceThenComplete(bool); +# +# fn wake_and_yield_once() -> WakeOnceThenComplete { +# WakeOnceThenComplete(false) +# } +# +# impl Future for WakeOnceThenComplete { +# type Output = (); +# fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> { +# if self.0 { +# Poll::Ready(()) +# } else { +# cx.waker().wake_by_ref(); +# self.0 = true; +# Poll::Pending +# } +# } +# } +# +async fn foo() { + wake_and_yield_once().await // `await` is used within `async` function +} + +fn bar(x: u8) -> impl Future { + async move { + wake_and_yield_once().await; // `await` is used within `async` block + x + } +} +``` + +[`async`]: https://doc.rust-lang.org/std/keyword.async.html +[`await`]: https://doc.rust-lang.org/std/keyword.await.html +"##, + +E0734: r##" +A stability attribute has been used outside of the standard library. + +Erroneous code examples: + +```compile_fail,E0734 +#[rustc_deprecated(since = "b", reason = "text")] // invalid +#[stable(feature = "a", since = "b")] // invalid +#[unstable(feature = "b", issue = "0")] // invalid +fn foo(){} +``` + +These attributes are meant to only be used by the standard library and are +rejected in your own crates. +"##, + +E0736: r##" +`#[track_caller]` and `#[naked]` cannot both be applied to the same function. + +Erroneous code example: + +```compile_fail,E0736 +#![feature(track_caller)] + +#[naked] +#[track_caller] +fn foo() {} +``` + +This is primarily due to ABI incompatibilities between the two attributes. +See [RFC 2091] for details on this and other limitations. + +[RFC 2091]: https://github.com/rust-lang/rfcs/blob/master/text/2091-inline-semantic.md +"##, + +E0738: r##" +`#[track_caller]` cannot be used in traits yet. This is due to limitations in +the compiler which are likely to be temporary. See [RFC 2091] for details on +this and other restrictions. + +Erroneous example with a trait method implementation: + +```compile_fail,E0738 +#![feature(track_caller)] + +trait Foo { + fn bar(&self); +} + +impl Foo for u64 { + #[track_caller] + fn bar(&self) {} +} +``` + +Erroneous example with a blanket trait method implementation: + +```compile_fail,E0738 +#![feature(track_caller)] + +trait Foo { + #[track_caller] + fn bar(&self) {} + fn baz(&self); +} +``` + +Erroneous example with a trait method declaration: + +```compile_fail,E0738 +#![feature(track_caller)] + +trait Foo { + fn bar(&self) {} + + #[track_caller] + fn baz(&self); +} +``` + +Note that while the compiler may be able to support the attribute in traits in +the future, [RFC 2091] prohibits their implementation without a follow-up RFC. + +[RFC 2091]: https://github.com/rust-lang/rfcs/blob/master/text/2091-inline-semantic.md +"##, + ; // E0006, // merged with E0005 // E0101, // replaced with E0282 @@ -2226,7 +2297,7 @@ static X: u32 = 42; // E0272, // on_unimplemented #0 // E0273, // on_unimplemented #1 // E0274, // on_unimplemented #2 - E0278, // requirement is not satisfied +// E0278, // requirement is not satisfied E0279, // requirement is not satisfied E0280, // requirement is not satisfied // E0285, // overflow evaluation builtin bounds @@ -2258,9 +2329,6 @@ static X: u32 = 42; E0488, // lifetime of variable does not enclose its declaration E0489, // type/lifetime parameter not in scope here E0490, // a value of type `..` is borrowed for too long - E0495, // cannot infer an appropriate lifetime due to conflicting - // requirements - E0566, // conflicting representation hints E0623, // lifetime mismatch where both parameters are anonymous regions E0628, // generators cannot have explicit parameters E0631, // type mismatch in closure arguments @@ -2268,15 +2336,14 @@ static X: u32 = 42; E0657, // `impl Trait` can only capture lifetimes bound at the fn level E0687, // in-band lifetimes cannot be used in `fn`/`Fn` syntax E0688, // in-band lifetimes cannot be mixed with explicit lifetime binders - E0697, // closures cannot be static - E0707, // multiple elided lifetimes used in arguments of `async fn` +// E0707, // multiple elided lifetimes used in arguments of `async fn` E0708, // `async` non-`move` closures with parameters are not currently // supported - E0709, // multiple different lifetimes used in arguments of `async fn` +// E0709, // multiple different lifetimes used in arguments of `async fn` E0710, // an unknown tool name found in scoped lint E0711, // a feature has been declared with conflicting stability attributes // E0702, // replaced with a generic attribute input check E0726, // non-explicit (not `'_`) elided lifetime in unsupported position E0727, // `async` generators are not yet supported - E0728, // `await` must be in an `async` function or block + E0739, // invalid track_caller application/syntax } diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 1df09429e5..96562002aa 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -4,16 +4,24 @@ //! conflicts between multiple such attributes attached to the same //! item. -use crate::hir; +use crate::hir::{self, HirId, HirVec, Attribute, Item, ItemKind, TraitItem, TraitItemKind}; +use crate::hir::DUMMY_HIR_ID; use crate::hir::def_id::DefId; use crate::hir::intravisit::{self, Visitor, NestedVisitorMap}; +use crate::lint::builtin::UNUSED_ATTRIBUTES; use crate::ty::TyCtxt; use crate::ty::query::Providers; use std::fmt::{self, Display}; -use syntax::symbol::sym; +use syntax::{attr, symbol::sym}; use syntax_pos::Span; +#[derive(Copy, Clone, PartialEq)] +pub(crate) enum MethodKind { + Trait { body: bool }, + Inherent, +} + #[derive(Copy, Clone, PartialEq)] pub(crate) enum Target { ExternCrate, @@ -35,6 +43,12 @@ pub(crate) enum Target { Impl, Expression, Statement, + AssocConst, + Method(MethodKind), + AssocTy, + ForeignFn, + ForeignStatic, + ForeignTy, } impl Display for Target { @@ -59,29 +73,76 @@ impl Display for Target { Target::Impl => "item", Target::Expression => "expression", Target::Statement => "statement", + Target::AssocConst => "associated const", + Target::Method(_) => "method", + Target::AssocTy => "associated type", + Target::ForeignFn => "foreign function", + Target::ForeignStatic => "foreign static item", + Target::ForeignTy => "foreign type", }) } } impl Target { - pub(crate) fn from_item(item: &hir::Item) -> Target { - match item.node { - hir::ItemKind::ExternCrate(..) => Target::ExternCrate, - hir::ItemKind::Use(..) => Target::Use, - hir::ItemKind::Static(..) => Target::Static, - hir::ItemKind::Const(..) => Target::Const, - hir::ItemKind::Fn(..) => Target::Fn, - hir::ItemKind::Mod(..) => Target::Mod, - hir::ItemKind::ForeignMod(..) => Target::ForeignMod, - hir::ItemKind::GlobalAsm(..) => Target::GlobalAsm, - hir::ItemKind::TyAlias(..) => Target::TyAlias, - hir::ItemKind::OpaqueTy(..) => Target::OpaqueTy, - hir::ItemKind::Enum(..) => Target::Enum, - hir::ItemKind::Struct(..) => Target::Struct, - hir::ItemKind::Union(..) => Target::Union, - hir::ItemKind::Trait(..) => Target::Trait, - hir::ItemKind::TraitAlias(..) => Target::TraitAlias, - hir::ItemKind::Impl(..) => Target::Impl, + pub(crate) fn from_item(item: &Item) -> Target { + match item.kind { + ItemKind::ExternCrate(..) => Target::ExternCrate, + ItemKind::Use(..) => Target::Use, + ItemKind::Static(..) => Target::Static, + ItemKind::Const(..) => Target::Const, + ItemKind::Fn(..) => Target::Fn, + ItemKind::Mod(..) => Target::Mod, + ItemKind::ForeignMod(..) => Target::ForeignMod, + ItemKind::GlobalAsm(..) => Target::GlobalAsm, + ItemKind::TyAlias(..) => Target::TyAlias, + ItemKind::OpaqueTy(..) => Target::OpaqueTy, + ItemKind::Enum(..) => Target::Enum, + ItemKind::Struct(..) => Target::Struct, + ItemKind::Union(..) => Target::Union, + ItemKind::Trait(..) => Target::Trait, + ItemKind::TraitAlias(..) => Target::TraitAlias, + ItemKind::Impl(..) => Target::Impl, + } + } + + fn from_trait_item(trait_item: &TraitItem) -> Target { + match trait_item.kind { + TraitItemKind::Const(..) => Target::AssocConst, + TraitItemKind::Method(_, hir::TraitMethod::Required(_)) => { + Target::Method(MethodKind::Trait { body: false }) + } + TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => { + Target::Method(MethodKind::Trait { body: true }) + } + TraitItemKind::Type(..) => Target::AssocTy, + } + } + + fn from_foreign_item(foreign_item: &hir::ForeignItem) -> Target { + match foreign_item.kind { + hir::ForeignItemKind::Fn(..) => Target::ForeignFn, + hir::ForeignItemKind::Static(..) => Target::ForeignStatic, + hir::ForeignItemKind::Type => Target::ForeignTy, + } + } + + fn from_impl_item<'tcx>(tcx: TyCtxt<'tcx>, impl_item: &hir::ImplItem) -> Target { + match impl_item.kind { + hir::ImplItemKind::Const(..) => Target::AssocConst, + hir::ImplItemKind::Method(..) => { + let parent_hir_id = tcx.hir().get_parent_item(impl_item.hir_id); + let containing_item = tcx.hir().expect_item(parent_hir_id); + let containing_impl_is_for_trait = match &containing_item.kind { + hir::ItemKind::Impl(_, _, _, _, tr, _, _) => tr.is_some(), + _ => bug!("parent of an ImplItem must be an Impl"), + }; + if containing_impl_is_for_trait { + Target::Method(MethodKind::Trait { body: true }) + } else { + Target::Method(MethodKind::Inherent) + } + } + hir::ImplItemKind::TyAlias(..) | hir::ImplItemKind::OpaqueTy(..) => Target::AssocTy, } } } @@ -92,79 +153,194 @@ struct CheckAttrVisitor<'tcx> { impl CheckAttrVisitor<'tcx> { /// Checks any attribute. - fn check_attributes(&self, item: &hir::Item, target: Target) { - if target == Target::Fn || target == Target::Const { - self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.hir_id)); - } else if let Some(a) = item.attrs.iter().find(|a| a.check_name(sym::target_feature)) { - self.tcx.sess.struct_span_err(a.span, "attribute should be applied to a function") - .span_label(item.span, "not a function") - .emit(); + fn check_attributes( + &self, + hir_id: HirId, + attrs: &HirVec, + span: &Span, + target: Target, + item: Option<&Item>, + ) { + let mut is_valid = true; + for attr in attrs { + is_valid &= if attr.check_name(sym::inline) { + self.check_inline(hir_id, attr, span, target) + } else if attr.check_name(sym::non_exhaustive) { + self.check_non_exhaustive(attr, span, target) + } else if attr.check_name(sym::marker) { + self.check_marker(attr, span, target) + } else if attr.check_name(sym::target_feature) { + self.check_target_feature(attr, span, target) + } else if attr.check_name(sym::track_caller) { + self.check_track_caller(&attr.span, attrs, span, target) + } else { + true + }; } - for attr in &item.attrs { - if attr.check_name(sym::inline) { - self.check_inline(attr, &item.span, target) - } else if attr.check_name(sym::non_exhaustive) { - self.check_non_exhaustive(attr, item, target) - } else if attr.check_name(sym::marker) { - self.check_marker(attr, item, target) + if !is_valid { + return; + } + + if target == Target::Fn { + self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(hir_id)); + } + + self.check_repr(attrs, span, target, item); + self.check_used(attrs, target); + } + + /// Checks if an `#[inline]` is applied to a function or a closure. Returns `true` if valid. + fn check_inline(&self, hir_id: HirId, attr: &Attribute, span: &Span, target: Target) -> bool { + match target { + Target::Fn | Target::Closure | Target::Method(MethodKind::Trait { body: true }) + | Target::Method(MethodKind::Inherent) => true, + Target::Method(MethodKind::Trait { body: false }) | Target::ForeignFn => { + self.tcx.struct_span_lint_hir( + UNUSED_ATTRIBUTES, + hir_id, + attr.span, + "`#[inline]` is ignored on function prototypes", + ).emit(); + true + } + // FIXME(#65833): We permit associated consts to have an `#[inline]` attribute with + // just a lint, because we previously erroneously allowed it and some crates used it + // accidentally, to to be compatible with crates depending on them, we can't throw an + // error here. + Target::AssocConst => { + self.tcx.struct_span_lint_hir( + UNUSED_ATTRIBUTES, + hir_id, + attr.span, + "`#[inline]` is ignored on constants", + ).warn("this was previously accepted by the compiler but is \ + being phased out; it will become a hard error in \ + a future release!") + .note("for more information, see issue #65833 \ + ") + .emit(); + true + } + _ => { + struct_span_err!( + self.tcx.sess, + attr.span, + E0518, + "attribute should be applied to function or closure", + ).span_label(*span, "not a function or closure") + .emit(); + false } } - - self.check_repr(item, target); - self.check_used(item, target); } - /// Checks if an `#[inline]` is applied to a function or a closure. - fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) { - if target != Target::Fn && target != Target::Closure { - struct_span_err!(self.tcx.sess, - attr.span, - E0518, - "attribute should be applied to function or closure") - .span_label(*span, "not a function or closure") + /// Checks if a `#[track_caller]` is applied to a non-naked function. Returns `true` if valid. + fn check_track_caller( + &self, + attr_span: &Span, + attrs: &HirVec, + span: &Span, + target: Target, + ) -> bool { + match target { + Target::Fn if attr::contains_name(attrs, sym::naked) => { + struct_span_err!( + self.tcx.sess, + *attr_span, + E0736, + "cannot use `#[track_caller]` with `#[naked]`", + ).emit(); + false + } + Target::Fn | Target::Method(MethodKind::Inherent) => true, + Target::Method(_) => { + struct_span_err!( + self.tcx.sess, + *attr_span, + E0738, + "`#[track_caller]` may not be used on trait methods", + ).emit(); + false + } + _ => { + struct_span_err!( + self.tcx.sess, + *attr_span, + E0739, + "attribute should be applied to function" + ) + .span_label(*span, "not a function") .emit(); + false + } } } - /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. - fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { + /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. Returns `true` if valid. + fn check_non_exhaustive( + &self, + attr: &Attribute, + span: &Span, + target: Target, + ) -> bool { match target { - Target::Struct | Target::Enum => { /* Valid */ }, + Target::Struct | Target::Enum => true, _ => { struct_span_err!(self.tcx.sess, attr.span, E0701, "attribute can only be applied to a struct or enum") - .span_label(item.span, "not a struct or enum") + .span_label(*span, "not a struct or enum") .emit(); - return; + false } } } - /// Checks if the `#[marker]` attribute on an `item` is valid. - fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { + /// Checks if the `#[marker]` attribute on an `item` is valid. Returns `true` if valid. + fn check_marker(&self, attr: &Attribute, span: &Span, target: Target) -> bool { match target { - Target::Trait => { /* Valid */ }, + Target::Trait => true, _ => { self.tcx.sess .struct_span_err(attr.span, "attribute can only be applied to a trait") - .span_label(item.span, "not a trait") + .span_label(*span, "not a trait") .emit(); - return; + false } } } + /// Checks if the `#[target_feature]` attribute on `item` is valid. Returns `true` if valid. + fn check_target_feature(&self, attr: &Attribute, span: &Span, target: Target) -> bool { + match target { + Target::Fn | Target::Method(MethodKind::Trait { body: true }) + | Target::Method(MethodKind::Inherent) => true, + _ => { + self.tcx.sess + .struct_span_err(attr.span, "attribute should be applied to a function") + .span_label(*span, "not a function") + .emit(); + false + }, + } + } + /// Checks if the `#[repr]` attributes on `item` are valid. - fn check_repr(&self, item: &hir::Item, target: Target) { + fn check_repr( + &self, + attrs: &HirVec, + span: &Span, + target: Target, + item: Option<&Item>, + ) { // Extract the names of all repr hints, e.g., [foo, bar, align] for: // ``` // #[repr(foo)] // #[repr(bar, align(8))] // ``` - let hints: Vec<_> = item.attrs + let hints: Vec<_> = attrs .iter() .filter(|attr| attr.check_name(sym::repr)) .filter_map(|attr| attr.meta_item_list()) @@ -222,7 +398,7 @@ impl CheckAttrVisitor<'tcx> { }; self.emit_repr_error( hint.span(), - item.span, + *span, &format!("attribute should be applied to {}", allowed_targets), &format!("not {} {}", article, allowed_targets), ) @@ -241,7 +417,7 @@ impl CheckAttrVisitor<'tcx> { // Warn on repr(u8, u16), repr(C, simd), and c-like-enum-repr(C, u8) if (int_reprs > 1) || (is_simd && is_c) - || (int_reprs == 1 && is_c && is_c_like_enum(item)) { + || (int_reprs == 1 && is_c && item.map_or(false, |item| is_c_like_enum(item))) { let hint_spans: Vec<_> = hint_spans.collect(); span_warn!(self.tcx.sess, hint_spans, E0566, "conflicting representation hints"); @@ -262,10 +438,10 @@ impl CheckAttrVisitor<'tcx> { fn check_stmt_attributes(&self, stmt: &hir::Stmt) { // When checking statements ignore expressions, they will be checked later - if let hir::StmtKind::Local(ref l) = stmt.node { + if let hir::StmtKind::Local(ref l) = stmt.kind { for attr in l.attrs.iter() { if attr.check_name(sym::inline) { - self.check_inline(attr, &stmt.span, Target::Statement); + self.check_inline(DUMMY_HIR_ID, attr, &stmt.span, Target::Statement); } if attr.check_name(sym::repr) { self.emit_repr_error( @@ -280,13 +456,13 @@ impl CheckAttrVisitor<'tcx> { } fn check_expr_attributes(&self, expr: &hir::Expr) { - let target = match expr.node { + let target = match expr.kind { hir::ExprKind::Closure(..) => Target::Closure, _ => Target::Expression, }; for attr in expr.attrs.iter() { if attr.check_name(sym::inline) { - self.check_inline(attr, &expr.span, target); + self.check_inline(DUMMY_HIR_ID, attr, &expr.span, target); } if attr.check_name(sym::repr) { self.emit_repr_error( @@ -299,8 +475,8 @@ impl CheckAttrVisitor<'tcx> { } } - fn check_used(&self, item: &hir::Item, target: Target) { - for attr in &item.attrs { + fn check_used(&self, attrs: &HirVec, target: Target) { + for attr in attrs { if attr.check_name(sym::used) && target != Target::Static { self.tcx.sess .span_err(attr.span, "attribute must be applied to a `static` variable"); @@ -314,12 +490,29 @@ impl Visitor<'tcx> for CheckAttrVisitor<'tcx> { NestedVisitorMap::OnlyBodies(&self.tcx.hir()) } - fn visit_item(&mut self, item: &'tcx hir::Item) { + fn visit_item(&mut self, item: &'tcx Item) { let target = Target::from_item(item); - self.check_attributes(item, target); + self.check_attributes(item.hir_id, &item.attrs, &item.span, target, Some(item)); intravisit::walk_item(self, item) } + fn visit_trait_item(&mut self, trait_item: &'tcx TraitItem) { + let target = Target::from_trait_item(trait_item); + self.check_attributes(trait_item.hir_id, &trait_item.attrs, &trait_item.span, target, None); + intravisit::walk_trait_item(self, trait_item) + } + + fn visit_foreign_item(&mut self, f_item: &'tcx hir::ForeignItem) { + let target = Target::from_foreign_item(f_item); + self.check_attributes(f_item.hir_id, &f_item.attrs, &f_item.span, target, None); + intravisit::walk_foreign_item(self, f_item) + } + + fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) { + let target = Target::from_impl_item(self.tcx, impl_item); + self.check_attributes(impl_item.hir_id, &impl_item.attrs, &impl_item.span, target, None); + intravisit::walk_impl_item(self, impl_item) + } fn visit_stmt(&mut self, stmt: &'tcx hir::Stmt) { self.check_stmt_attributes(stmt); @@ -332,12 +525,12 @@ impl Visitor<'tcx> for CheckAttrVisitor<'tcx> { } } -fn is_c_like_enum(item: &hir::Item) -> bool { - if let hir::ItemKind::Enum(ref def, _) = item.node { +fn is_c_like_enum(item: &Item) -> bool { + if let ItemKind::Enum(ref def, _) = item.kind { for variant in &def.variants { match variant.data { hir::VariantData::Unit(..) => { /* continue */ } - _ => { return false; } + _ => return false, } } true diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index f7d31ca06e..d4d7af92fe 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -6,8 +6,8 @@ use crate::ty; use crate::util::nodemap::DefIdMap; use syntax::ast; -use syntax::ext::base::MacroKind; use syntax::ast::NodeId; +use syntax_pos::hygiene::MacroKind; use syntax_pos::Span; use rustc_macros::HashStable; diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index d0bdc14913..13200b38f2 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -1,9 +1,9 @@ use crate::ty::{self, TyCtxt}; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use std::fmt; use std::u32; -newtype_index! { +rustc_index::newtype_index! { pub struct CrateId { ENCODABLE = custom } @@ -87,7 +87,7 @@ impl fmt::Display for CrateNum { impl rustc_serialize::UseSpecializedEncodable for CrateNum {} impl rustc_serialize::UseSpecializedDecodable for CrateNum {} -newtype_index! { +rustc_index::newtype_index! { /// A DefIndex is an index into the hir-map for a crate, identifying a /// particular definition. It should really be considered an interned /// shorthand for a particular DefPath. diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 1f125de967..920635d838 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -203,7 +203,7 @@ pub trait Visitor<'v>: Sized { /// Invoked to visit the body of a function, method or closure. Like /// visit_nested_item, does nothing by default unless you override - /// `nested_visit_map` to return other htan `None`, in which case it will walk + /// `nested_visit_map` to return other than `None`, in which case it will walk /// the body. fn visit_nested_body(&mut self, id: BodyId) { let opt_body = self.nested_visit_map().intra().map(|map| map.body(id)); @@ -465,7 +465,7 @@ pub fn walk_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Param) { pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_vis(&item.vis); visitor.visit_ident(item.ident); - match item.node { + match item.kind { ItemKind::ExternCrate(orig_name) => { visitor.visit_id(item.hir_id); if let Some(orig_name) = orig_name { @@ -594,7 +594,7 @@ pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { visitor.visit_id(typ.hir_id); - match typ.node { + match typ.kind { TyKind::Slice(ref ty) => { visitor.visit_ty(ty) } @@ -633,9 +633,6 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { TyKind::Typeof(ref expression) => { visitor.visit_anon_const(expression) } - TyKind::CVarArgs(ref lt) => { - visitor.visit_lifetime(lt) - } TyKind::Infer | TyKind::Err => {} } } @@ -696,7 +693,7 @@ pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V, pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { visitor.visit_id(pattern.hir_id); - match pattern.node { + match pattern.kind { PatKind::TupleStruct(ref qpath, ref children, _) => { visitor.visit_qpath(qpath, pattern.hir_id, pattern.span); walk_list!(visitor, visit_pat, children); @@ -743,7 +740,7 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v visitor.visit_vis(&foreign_item.vis); visitor.visit_ident(foreign_item.ident); - match foreign_item.node { + match foreign_item.kind { ForeignItemKind::Fn(ref function_declaration, ref param_names, ref generics) => { visitor.visit_generics(generics); visitor.visit_fn_decl(function_declaration); @@ -856,7 +853,7 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai visitor.visit_ident(trait_item.ident); walk_list!(visitor, visit_attribute, &trait_item.attrs); visitor.visit_generics(&trait_item.generics); - match trait_item.node { + match trait_item.kind { TraitItemKind::Const(ref ty, default) => { visitor.visit_id(trait_item.hir_id); visitor.visit_ty(ty); @@ -905,7 +902,7 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt ref defaultness, ref attrs, ref generics, - ref node, + ref kind, span: _, } = *impl_item; @@ -914,7 +911,7 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt visitor.visit_defaultness(defaultness); walk_list!(visitor, visit_attribute, attrs); visitor.visit_generics(generics); - match *node { + match *kind { ImplItemKind::Const(ref ty, body) => { visitor.visit_id(impl_item.hir_id); visitor.visit_ty(ty); @@ -974,7 +971,7 @@ pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) { pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt) { visitor.visit_id(statement.hir_id); - match statement.node { + match statement.kind { StmtKind::Local(ref local) => visitor.visit_local(local), StmtKind::Item(item) => visitor.visit_nested_item(item), StmtKind::Expr(ref expression) | @@ -992,7 +989,7 @@ pub fn walk_anon_const<'v, V: Visitor<'v>>(visitor: &mut V, constant: &'v AnonCo pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_id(expression.hir_id); walk_list!(visitor, visit_attribute, expression.attrs.iter()); - match expression.node { + match expression.kind { ExprKind::Box(ref subexpression) => { visitor.visit_expr(subexpression) } @@ -1103,7 +1100,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) { visitor.visit_id(arm.hir_id); - walk_list!(visitor, visit_pat, &arm.pats); + visitor.visit_pat(&arm.pat); if let Some(ref g) = arm.guard { match g { Guard::If(ref e) => visitor.visit_expr(e), diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 2ad5424755..e665e058e6 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -43,6 +43,7 @@ use crate::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; use crate::hir::def::{Namespace, Res, DefKind, PartialRes, PerNS}; use crate::hir::{GenericArg, ConstArg}; use crate::hir::ptr::P; +use crate::lint; use crate::lint::builtin::{self, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES, ELIDED_LIFETIMES_IN_PATHS}; use crate::middle::cstore::CrateStore; @@ -52,7 +53,7 @@ use crate::util::common::FN_OUTPUT_NAME; use crate::util::nodemap::{DefIdMap, NodeMap}; use errors::Applicability; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_data_structures::thin_vec::ThinVec; use rustc_data_structures::sync::Lrc; @@ -64,14 +65,15 @@ use syntax::ast; use syntax::ptr::P as AstP; use syntax::ast::*; use syntax::errors; -use syntax::ext::base::SpecialDerives; -use syntax::ext::hygiene::ExpnId; +use syntax::expand::SpecialDerives; use syntax::print::pprust; +use syntax::parse::token::{self, Nonterminal, Token}; +use syntax::tokenstream::{TokenStream, TokenTree}; +use syntax::sess::ParseSess; use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned}; use syntax::symbol::{kw, sym, Symbol}; -use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax::parse::token::{self, Token}; use syntax::visit::{self, Visitor}; +use syntax_pos::hygiene::ExpnId; use syntax_pos::Span; const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF; @@ -82,10 +84,13 @@ pub struct LoweringContext<'a> { /// Used to assign IDs to HIR nodes that do not directly correspond to AST nodes. sess: &'a Session, - cstore: &'a dyn CrateStore, - resolver: &'a mut dyn Resolver, + /// HACK(Centril): there is a cyclic dependency between the parser and lowering + /// if we don't have this function pointer. To avoid that dependency so that + /// librustc is independent of the parser, we use dynamic dispatch here. + nt_to_tokenstream: NtToTokenstream, + /// The items being lowered are collected here. items: BTreeMap, @@ -97,7 +102,7 @@ pub struct LoweringContext<'a> { trait_impls: BTreeMap>, - modules: BTreeMap, + modules: BTreeMap, generator_kind: Option, @@ -141,7 +146,7 @@ pub struct LoweringContext<'a> { /// vector. in_scope_lifetimes: Vec, - current_module: NodeId, + current_module: hir::HirId, type_def_lifetime_params: DefIdMap, @@ -154,6 +159,8 @@ pub struct LoweringContext<'a> { } pub trait Resolver { + fn cstore(&self) -> &dyn CrateStore; + /// Obtains resolution for a `NodeId` with a single resolution. fn get_partial_res(&mut self, id: NodeId) -> Option; @@ -178,8 +185,12 @@ pub trait Resolver { ) -> (ast::Path, Res); fn has_derives(&self, node_id: NodeId, derives: SpecialDerives) -> bool; + + fn lint_buffer(&mut self) -> &mut lint::LintBuffer; } +type NtToTokenstream = fn(&Nonterminal, &ParseSess, Span) -> TokenStream; + /// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree, /// and if so, what meaning it has. #[derive(Debug)] @@ -232,21 +243,23 @@ impl<'a> ImplTraitContext<'a> { pub fn lower_crate( sess: &Session, - cstore: &dyn CrateStore, dep_graph: &DepGraph, krate: &Crate, resolver: &mut dyn Resolver, + nt_to_tokenstream: NtToTokenstream, ) -> hir::Crate { // We're constructing the HIR here; we don't care what we will // read, since we haven't even constructed the *input* to // incr. comp. yet. dep_graph.assert_ignored(); + let _prof_timer = sess.prof.generic_activity("hir_lowering"); + LoweringContext { crate_root: sess.parse_sess.injected_crate_name.try_get().copied(), sess, - cstore, resolver, + nt_to_tokenstream, items: BTreeMap::new(), trait_items: BTreeMap::new(), impl_items: BTreeMap::new(), @@ -262,7 +275,7 @@ pub fn lower_crate( is_in_dyn_type: false, anonymous_lifetime_mode: AnonymousLifetimeMode::PassThrough, type_def_lifetime_params: Default::default(), - current_module: CRATE_NODE_ID, + current_module: hir::CRATE_HIR_ID, current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)], item_local_id_counters: Default::default(), node_id_to_hir_id: IndexVec::new(), @@ -346,7 +359,7 @@ struct ImplTraitTypeIdVisitor<'a> { ids: &'a mut SmallVec<[NodeId; 1]> } impl<'a, 'b> Visitor<'a> for ImplTraitTypeIdVisitor<'b> { fn visit_ty(&mut self, ty: &'a Ty) { - match ty.node { + match ty.kind { | TyKind::Typeof(_) | TyKind::BareFn(_) => return, @@ -425,7 +438,7 @@ impl<'a> LoweringContext<'a> { impl<'tcx, 'interner> Visitor<'tcx> for MiscCollector<'tcx, 'interner> { fn visit_pat(&mut self, p: &'tcx Pat) { - if let PatKind::Paren(..) | PatKind::Rest = p.node { + if let PatKind::Paren(..) | PatKind::Rest = p.kind { // Doesn't generate a HIR node } else if let Some(owner) = self.hir_id_owner { self.lctx.lower_node_id_with_owner(p.id, owner); @@ -434,39 +447,10 @@ impl<'a> LoweringContext<'a> { visit::walk_pat(self, p) } - // HACK(or_patterns; Centril | dlrobertson): Avoid creating - // HIR nodes for `PatKind::Or` for the top level of a `ast::Arm`. - // This is a temporary hack that should go away once we push down - // `arm.pats: HirVec>` -> `arm.pat: P` to HIR. // Centril - fn visit_arm(&mut self, arm: &'tcx Arm) { - match &arm.pat.node { - PatKind::Or(pats) => pats.iter().for_each(|p| self.visit_pat(p)), - _ => self.visit_pat(&arm.pat), - } - walk_list!(self, visit_expr, &arm.guard); - self.visit_expr(&arm.body); - walk_list!(self, visit_attribute, &arm.attrs); - } - - // HACK(or_patterns; Centril | dlrobertson): Same as above. // Centril - fn visit_expr(&mut self, e: &'tcx Expr) { - if let ExprKind::Let(pat, scrutinee) = &e.node { - walk_list!(self, visit_attribute, e.attrs.iter()); - match &pat.node { - PatKind::Or(pats) => pats.iter().for_each(|p| self.visit_pat(p)), - _ => self.visit_pat(&pat), - } - self.visit_expr(scrutinee); - self.visit_expr_post(e); - return; - } - visit::walk_expr(self, e) - } - fn visit_item(&mut self, item: &'tcx Item) { let hir_id = self.lctx.allocate_hir_id_counter(item.id); - match item.node { + match item.kind { ItemKind::Struct(_, ref generics) | ItemKind::Union(_, ref generics) | ItemKind::Enum(_, ref generics) @@ -498,7 +482,7 @@ impl<'a> LoweringContext<'a> { fn visit_trait_item(&mut self, item: &'tcx TraitItem) { self.lctx.allocate_hir_id_counter(item.id); - match item.node { + match item.kind { TraitItemKind::Method(_, None) => { // Ignore patterns in trait methods without bodies self.with_hir_id_owner(None, |this| { @@ -526,7 +510,7 @@ impl<'a> LoweringContext<'a> { } fn visit_ty(&mut self, t: &'tcx Ty) { - match t.node { + match t.kind { // Mirrors the case in visit::walk_ty TyKind::BareFn(ref f) => { walk_list!( @@ -809,15 +793,15 @@ impl<'a> LoweringContext<'a> { // really show up for end-user. let (str_name, kind) = match hir_name { ParamName::Plain(ident) => ( - ident.as_interned_str(), + ident.name, hir::LifetimeParamKind::InBand, ), ParamName::Fresh(_) => ( - kw::UnderscoreLifetime.as_interned_str(), + kw::UnderscoreLifetime, hir::LifetimeParamKind::Elided, ), ParamName::Error => ( - kw::UnderscoreLifetime.as_interned_str(), + kw::UnderscoreLifetime, hir::LifetimeParamKind::Error, ), }; @@ -997,7 +981,7 @@ impl<'a> LoweringContext<'a> { if id.is_local() { self.resolver.definitions().def_key(id.index) } else { - self.cstore.def_key(id) + self.resolver.cstore().def_key(id) } } @@ -1017,10 +1001,12 @@ impl<'a> LoweringContext<'a> { // lower attributes (we use the AST version) there is nowhere to keep // the `HirId`s. We don't actually need HIR version of attributes anyway. Attribute { + item: AttrItem { + path: attr.path.clone(), + tokens: self.lower_token_stream(attr.tokens.clone()), + }, id: attr.id, style: attr.style, - path: attr.path.clone(), - tokens: self.lower_token_stream(attr.tokens.clone()), is_sugared_doc: attr.is_sugared_doc, span: attr.span, } @@ -1047,7 +1033,7 @@ impl<'a> LoweringContext<'a> { fn lower_token(&mut self, token: Token) -> TokenStream { match token.kind { token::Interpolated(nt) => { - let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span); + let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span); self.lower_token_stream(tts) } _ => TokenTree::Token(token).into(), @@ -1133,7 +1119,7 @@ impl<'a> LoweringContext<'a> { let ty = this.lower_ty( &Ty { id: this.sess.next_node_id(), - node: TyKind::ImplTrait(impl_trait_node_id, bounds.clone()), + kind: TyKind::ImplTrait(impl_trait_node_id, bounds.clone()), span: constraint.span, }, itctx, @@ -1194,14 +1180,14 @@ impl<'a> LoweringContext<'a> { let id = self.lower_node_id(t.id); let qpath = self.lower_qpath(t.id, qself, path, param_mode, itctx); let ty = self.ty_path(id, t.span, qpath); - if let hir::TyKind::TraitObject(..) = ty.node { + if let hir::TyKind::TraitObject(..) = ty.kind { self.maybe_lint_bare_trait(t.span, t.id, qself.is_none() && path.is_global()); } ty } fn lower_ty_direct(&mut self, t: &Ty, mut itctx: ImplTraitContext<'_>) -> hir::Ty { - let kind = match t.node { + let kind = match t.kind { TyKind::Infer => hir::TyKind::Infer, TyKind::Err => hir::TyKind::Err, TyKind::Slice(ref ty) => hir::TyKind::Slice(self.lower_ty(ty, itctx)), @@ -1364,17 +1350,12 @@ impl<'a> LoweringContext<'a> { } } } - TyKind::Mac(_) => bug!("`TyMac` should have been expanded by now"), - TyKind::CVarArgs => { - // Create the implicit lifetime of the "spoofed" `VaListImpl`. - let span = self.sess.source_map().next_point(t.span.shrink_to_lo()); - let lt = self.new_implicit_lifetime(span); - hir::TyKind::CVarArgs(lt) - }, + TyKind::Mac(_) => bug!("`TyKind::Mac` should have been expanded by now"), + TyKind::CVarArgs => bug!("`TyKind::CVarArgs` should have been handled elsewhere"), }; hir::Ty { - node: kind, + kind, span: t.span, hir_id: self.lower_node_id(t.id), } @@ -1474,7 +1455,7 @@ impl<'a> LoweringContext<'a> { hir_id: opaque_ty_id, ident: Ident::invalid(), attrs: Default::default(), - node: opaque_ty_item_kind, + kind: opaque_ty_item_kind, vis: respan(span.shrink_to_lo(), hir::VisibilityKind::Inherited), span: opaque_ty_span, }; @@ -1534,7 +1515,7 @@ impl<'a> LoweringContext<'a> { fn visit_ty(&mut self, t: &'v hir::Ty) { // Don't collect elided lifetimes used inside of `fn()` syntax. - if let hir::TyKind::BareFn(_) = t.node { + if let hir::TyKind::BareFn(_) = t.kind { let old_collect_elided_lifetimes = self.collect_elided_lifetimes; self.collect_elided_lifetimes = false; @@ -1610,7 +1591,7 @@ impl<'a> LoweringContext<'a> { self.context.resolver.definitions().create_def_with_parent( self.parent, def_node_id, - DefPathData::LifetimeNs(name.ident().as_interned_str()), + DefPathData::LifetimeNs(name.ident().name), ExpnId::root(), lifetime.span); @@ -1747,8 +1728,8 @@ impl<'a> LoweringContext<'a> { return n; } assert!(!def_id.is_local()); - let item_generics = - self.cstore.item_generics_cloned_untracked(def_id, self.sess); + let item_generics = self.resolver.cstore() + .item_generics_cloned_untracked(def_id, self.sess); let n = item_generics.own_counts().lifetimes; self.type_def_lifetime_params.insert(def_id, n); n @@ -1879,7 +1860,7 @@ impl<'a> LoweringContext<'a> { GenericArgs::Parenthesized(ref data) => match parenthesized_generic_args { ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data), ParenthesizedGenericArgs::Warn => { - self.sess.buffer_lint( + self.resolver.lint_buffer().buffer_lint( PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES, CRATE_NODE_ID, data.span, @@ -1893,15 +1874,16 @@ impl<'a> LoweringContext<'a> { if let Ok(snippet) = self.sess.source_map().span_to_snippet(data.span) { // Do not suggest going from `Trait()` to `Trait<>` if data.inputs.len() > 0 { - let split = snippet.find('(').unwrap(); - let trait_name = &snippet[0..split]; - let args = &snippet[split + 1 .. snippet.len() - 1]; - err.span_suggestion( - data.span, - "use angle brackets instead", - format!("{}<{}>", trait_name, args), - Applicability::MaybeIncorrect, - ); + if let Some(split) = snippet.find('(') { + let trait_name = &snippet[0..split]; + let args = &snippet[split + 1 .. snippet.len() - 1]; + err.span_suggestion( + data.span, + "use angle brackets instead", + format!("{}<{}>", trait_name, args), + Applicability::MaybeIncorrect, + ); + } } }; err.emit(); @@ -1975,7 +1957,7 @@ impl<'a> LoweringContext<'a> { } AnonymousLifetimeMode::PassThrough | AnonymousLifetimeMode::ReportError => { - self.sess.buffer_lint_with_diagnostic( + self.resolver.lint_buffer().buffer_lint_with_diagnostic( ELIDED_LIFETIMES_IN_PATHS, CRATE_NODE_ID, path_span, @@ -2055,7 +2037,7 @@ impl<'a> LoweringContext<'a> { .map(|ty| this.lower_ty_direct(ty, ImplTraitContext::disallowed())) .collect(); let mk_tup = |this: &mut Self, tys, span| { - hir::Ty { node: hir::TyKind::Tup(tys), hir_id: this.next_id(), span } + hir::Ty { kind: hir::TyKind::Tup(tys), hir_id: this.next_id(), span } }; ( hir::GenericArgs { @@ -2122,9 +2104,16 @@ impl<'a> LoweringContext<'a> { } fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> hir::HirVec { - decl.inputs + // Skip the `...` (`CVarArgs`) trailing arguments from the AST, + // as they are not explicit in HIR/Ty function signatures. + // (instead, the `c_variadic` flag is set to `true`) + let mut inputs = &decl.inputs[..]; + if decl.c_variadic() { + inputs = &inputs[..inputs.len() - 1]; + } + inputs .iter() - .map(|param| match param.pat.node { + .map(|param| match param.pat.kind { PatKind::Ident(_, ident, _) => ident, _ => Ident::new(kw::Invalid, param.pat.span), }) @@ -2159,10 +2148,19 @@ impl<'a> LoweringContext<'a> { self.anonymous_lifetime_mode }; + let c_variadic = decl.c_variadic(); + // Remember how many lifetimes were already around so that we can // only look at the lifetime parameters introduced by the arguments. let inputs = self.with_anonymous_lifetime_mode(lt_mode, |this| { - decl.inputs + // Skip the `...` (`CVarArgs`) trailing arguments from the AST, + // as they are not explicit in HIR/Ty function signatures. + // (instead, the `c_variadic` flag is set to `true`) + let mut inputs = &decl.inputs[..]; + if c_variadic { + inputs = &inputs[..inputs.len() - 1]; + } + inputs .iter() .map(|param| { if let Some((_, ibty)) = &mut in_band_ty_params { @@ -2184,9 +2182,7 @@ impl<'a> LoweringContext<'a> { match decl.output { FunctionRetTy::Ty(ref ty) => match in_band_ty_params { Some((def_id, _)) if impl_trait_return_allow => { - hir::Return(self.lower_ty(ty, - ImplTraitContext::OpaqueTy(Some(def_id)) - )) + hir::Return(self.lower_ty(ty, ImplTraitContext::OpaqueTy(Some(def_id)))) } _ => { hir::Return(self.lower_ty(ty, ImplTraitContext::disallowed())) @@ -2199,27 +2195,27 @@ impl<'a> LoweringContext<'a> { P(hir::FnDecl { inputs, output, - c_variadic: decl.c_variadic, + c_variadic, implicit_self: decl.inputs.get(0).map_or( hir::ImplicitSelfKind::None, |arg| { - let is_mutable_pat = match arg.pat.node { + let is_mutable_pat = match arg.pat.kind { PatKind::Ident(BindingMode::ByValue(mt), _, _) | PatKind::Ident(BindingMode::ByRef(mt), _, _) => mt == Mutability::Mutable, _ => false, }; - match arg.ty.node { + match arg.ty.kind { TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut, TyKind::ImplicitSelf => hir::ImplicitSelfKind::Imm, // Given we are only considering `ImplicitSelf` types, we needn't consider // the case where we have a mutable pattern to a reference as that would // no longer be an `ImplicitSelf`. - TyKind::Rptr(_, ref mt) if mt.ty.node.is_implicit_self() && + TyKind::Rptr(_, ref mt) if mt.ty.kind.is_implicit_self() && mt.mutbl == ast::Mutability::Mutable => hir::ImplicitSelfKind::MutRef, - TyKind::Rptr(_, ref mt) if mt.ty.node.is_implicit_self() => + TyKind::Rptr(_, ref mt) if mt.ty.kind.is_implicit_self() => hir::ImplicitSelfKind::ImmRef, _ => hir::ImplicitSelfKind::None, } @@ -2434,7 +2430,7 @@ impl<'a> LoweringContext<'a> { let opaque_ty_ref = hir::TyKind::Def(hir::ItemId { id: opaque_ty_id }, generic_args.into()); hir::FunctionRetTy::Return(P(hir::Ty { - node: opaque_ty_ref, + kind: opaque_ty_ref, span, hir_id: self.next_id(), })) @@ -2455,7 +2451,7 @@ impl<'a> LoweringContext<'a> { FunctionRetTy::Default(ret_ty_span) => { P(hir::Ty { hir_id: self.next_id(), - node: hir::TyKind::Tup(hir_vec![]), + kind: hir::TyKind::Tup(hir_vec![]), span: *ret_ty_span, }) } @@ -2691,7 +2687,7 @@ impl<'a> LoweringContext<'a> { for (index, stmt) in b.stmts.iter().enumerate() { if index == b.stmts.len() - 1 { - if let StmtKind::Expr(ref e) = stmt.node { + if let StmtKind::Expr(ref e) = stmt.kind { expr = Some(P(self.lower_expr(e))); } else { stmts.extend(self.lower_stmt(stmt)); @@ -2719,7 +2715,7 @@ impl<'a> LoweringContext<'a> { } fn lower_pat(&mut self, p: &Pat) -> P { - let node = match p.node { + let node = match p.kind { PatKind::Wild => hir::PatKind::Wild, PatKind::Ident(ref binding_mode, ident, ref sub) => { let lower_sub = |this: &mut Self| sub.as_ref().map(|x| this.lower_pat(x)); @@ -2836,7 +2832,7 @@ impl<'a> LoweringContext<'a> { let mut iter = pats.iter(); while let Some(pat) = iter.next() { // Interpret the first `((ref mut?)? x @)? ..` pattern as a subslice pattern. - match pat.node { + match pat.kind { PatKind::Rest => { prev_rest_span = Some(pat.span); slice = Some(self.pat_wild_with_node_id_of(pat)); @@ -2858,7 +2854,7 @@ impl<'a> LoweringContext<'a> { while let Some(pat) = iter.next() { // There was a previous subslice pattern; make sure we don't allow more. - let rest_span = match pat.node { + let rest_span = match pat.kind { PatKind::Rest => Some(pat.span), PatKind::Ident(.., Some(ref sub)) if sub.is_rest() => { // The `HirValidator` is merciless; add a `_` pattern to avoid ICEs. @@ -2915,10 +2911,10 @@ impl<'a> LoweringContext<'a> { } /// Construct a `Pat` with the `HirId` of `p.id` lowered. - fn pat_with_node_id_of(&mut self, p: &Pat, node: hir::PatKind) -> P { + fn pat_with_node_id_of(&mut self, p: &Pat, kind: hir::PatKind) -> P { P(hir::Pat { hir_id: self.lower_node_id(p.id), - node, + kind, span: p.span, }) } @@ -2962,7 +2958,7 @@ impl<'a> LoweringContext<'a> { } fn lower_stmt(&mut self, s: &Stmt) -> SmallVec<[hir::Stmt; 1]> { - let node = match s.node { + let kind = match s.kind { StmtKind::Local(ref l) => { let (l, item_ids) = self.lower_local(l); let mut ids: SmallVec<[hir::Stmt; 1]> = item_ids @@ -2975,7 +2971,7 @@ impl<'a> LoweringContext<'a> { ids.push({ hir::Stmt { hir_id: self.lower_node_id(s.id), - node: hir::StmtKind::Local(P(l)), + kind: hir::StmtKind::Local(P(l)), span: s.span, } }); @@ -2993,7 +2989,7 @@ impl<'a> LoweringContext<'a> { hir::Stmt { hir_id, - node: hir::StmtKind::Item(item_id), + kind: hir::StmtKind::Item(item_id), span: s.span, } }) @@ -3005,7 +3001,7 @@ impl<'a> LoweringContext<'a> { }; smallvec![hir::Stmt { hir_id: self.lower_node_id(s.id), - node, + kind, span: s.span, }] } @@ -3042,8 +3038,8 @@ impl<'a> LoweringContext<'a> { // Helper methods for building HIR. - fn stmt(&mut self, span: Span, node: hir::StmtKind) -> hir::Stmt { - hir::Stmt { span, node, hir_id: self.next_id() } + fn stmt(&mut self, span: Span, kind: hir::StmtKind) -> hir::Stmt { + hir::Stmt { span, kind, hir_id: self.next_id() } } fn stmt_expr(&mut self, span: Span, expr: hir::Expr) -> hir::Stmt { @@ -3143,7 +3139,7 @@ impl<'a> LoweringContext<'a> { ( P(hir::Pat { hir_id, - node: hir::PatKind::Binding(bm, hir_id, ident.with_span_pos(span), None), + kind: hir::PatKind::Binding(bm, hir_id, ident.with_span_pos(span), None), span, }), hir_id @@ -3154,10 +3150,10 @@ impl<'a> LoweringContext<'a> { self.pat(span, hir::PatKind::Wild) } - fn pat(&mut self, span: Span, pat: hir::PatKind) -> P { + fn pat(&mut self, span: Span, kind: hir::PatKind) -> P { P(hir::Pat { hir_id: self.next_id(), - node: pat, + kind, span, }) } @@ -3195,7 +3191,7 @@ impl<'a> LoweringContext<'a> { } fn ty_path(&mut self, mut hir_id: hir::HirId, span: Span, qpath: hir::QPath) -> hir::Ty { - let node = match qpath { + let kind = match qpath { hir::QPath::Resolved(None, path) => { // Turn trait object paths into `TyKind::TraitObject` instead. match path.res { @@ -3219,9 +3215,10 @@ impl<'a> LoweringContext<'a> { } _ => hir::TyKind::Path(qpath), }; + hir::Ty { hir_id, - node, + kind, span, } } @@ -3296,10 +3293,14 @@ impl<'a> LoweringContext<'a> { let id = self.sess.next_node_id(); self.new_named_lifetime(id, span, hir::LifetimeName::Error) } - // This is the normal case. - AnonymousLifetimeMode::PassThrough => self.new_implicit_lifetime(span), - - AnonymousLifetimeMode::ReportError => self.new_error_lifetime(None, span), + // `PassThrough` is the normal case. + // `new_error_lifetime`, which would usually be used in the case of `ReportError`, + // is unsuitable here, as these can occur from missing lifetime parameters in a + // `PathSegment`, for which there is no associated `'_` or `&T` with no explicit + // lifetime. Instead, we simply create an implicit lifetime, which will be checked + // later, at which point a suitable error will be emitted. + | AnonymousLifetimeMode::PassThrough + | AnonymousLifetimeMode::ReportError => self.new_implicit_lifetime(span), } } @@ -3349,7 +3350,7 @@ impl<'a> LoweringContext<'a> { } } - fn maybe_lint_bare_trait(&self, span: Span, id: NodeId, is_global: bool) { + fn maybe_lint_bare_trait(&mut self, span: Span, id: NodeId, is_global: bool) { // FIXME(davidtwco): This is a hack to detect macros which produce spans of the // call site which do not have a macro backtrace. See #61963. let is_macro_callsite = self.sess.source_map() @@ -3357,7 +3358,7 @@ impl<'a> LoweringContext<'a> { .map(|snippet| snippet.starts_with("#[")) .unwrap_or(true); if !is_macro_callsite { - self.sess.buffer_lint_with_diagnostic( + self.resolver.lint_buffer().buffer_lint_with_diagnostic( builtin::BARE_TRAIT_OBJECTS, id, span, @@ -3409,7 +3410,7 @@ pub fn is_range_literal(sess: &Session, expr: &hir::Expr) -> bool { } }; - match expr.node { + match expr.kind { // All built-in range literals but `..=` and `..` desugar to `Struct`s. ExprKind::Struct(ref qpath, _, _) => { if let QPath::Resolved(None, ref path) = **qpath { @@ -3424,8 +3425,8 @@ pub fn is_range_literal(sess: &Session, expr: &hir::Expr) -> bool { // `..=` desugars into `::std::ops::RangeInclusive::new(...)`. ExprKind::Call(ref func, _) => { - if let ExprKind::Path(QPath::TypeRelative(ref ty, ref segment)) = func.node { - if let TyKind::Path(QPath::Resolved(None, ref path)) = ty.node { + if let ExprKind::Path(QPath::TypeRelative(ref ty, ref segment)) = func.kind { + if let TyKind::Path(QPath::Resolved(None, ref path)) = ty.kind { let new_call = segment.ident.as_str() == "new"; return is_range_path(&path) && is_lit(sess, &expr.span) && new_call; } diff --git a/src/librustc/hir/lowering/expr.rs b/src/librustc/hir/lowering/expr.rs index 990728fa0e..73db762a64 100644 --- a/src/librustc/hir/lowering/expr.rs +++ b/src/librustc/hir/lowering/expr.rs @@ -17,7 +17,7 @@ impl LoweringContext<'_> { } pub(super) fn lower_expr(&mut self, e: &Expr) -> hir::Expr { - let kind = match e.node { + let kind = match e.kind { ExprKind::Box(ref inner) => hir::ExprKind::Box(P(self.lower_expr(inner))), ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), ExprKind::Repeat(ref expr, ref count) => { @@ -54,7 +54,7 @@ impl LoweringContext<'_> { let ohs = P(self.lower_expr(ohs)); hir::ExprKind::Unary(op, ohs) } - ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.node.clone())), + ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())), ExprKind::Cast(ref expr, ref ty) => { let expr = P(self.lower_expr(expr)); hir::ExprKind::Cast(expr, self.lower_ty(ty, ImplTraitContext::disallowed())) @@ -89,9 +89,14 @@ impl LoweringContext<'_> { hir::MatchSource::Normal, ), ExprKind::Async(capture_clause, closure_node_id, ref block) => { - self.make_async_expr(capture_clause, closure_node_id, None, block.span, |this| { - this.with_new_scopes(|this| this.lower_block_expr(block)) - }) + self.make_async_expr( + capture_clause, + closure_node_id, + None, + block.span, + hir::AsyncGeneratorKind::Block, + |this| this.with_new_scopes(|this| this.lower_block_expr(block)), + ) } ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr), ExprKind::Closure( @@ -184,7 +189,7 @@ impl LoweringContext<'_> { hir::Expr { hir_id: self.lower_node_id(e.id), - node: kind, + kind, span: e.span, attrs: e.attrs.clone(), } @@ -230,11 +235,20 @@ impl LoweringContext<'_> { /// ``` fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind { // If we got here, the `let` expression is not allowed. - self.sess - .struct_span_err(span, "`let` expressions are not supported here") - .note("only supported directly in conditions of `if`- and `while`-expressions") - .note("as well as when nested within `&&` and parenthesis in those conditions") - .emit(); + + if self.sess.opts.unstable_features.is_nightly_build() { + self.sess + .struct_span_err(span, "`let` expressions are not supported here") + .note("only supported directly in conditions of `if`- and `while`-expressions") + .note("as well as when nested within `&&` and parenthesis in those conditions") + .emit(); + } + else { + self.sess + .struct_span_err(span, "expected expression, found statement (`let`)") + .note("variable declaration using `let` is a statement") + .emit(); + } // For better recovery, we emit: // ``` @@ -247,14 +261,14 @@ impl LoweringContext<'_> { // 4. The return type of the block is `bool` which seems like what the user wanted. let scrutinee = self.lower_expr(scrutinee); let then_arm = { - let pat = self.lower_pat_top_hack(pat); + let pat = self.lower_pat(pat); let expr = self.expr_bool(span, true); self.arm(pat, P(expr)) }; let else_arm = { let pat = self.pat_wild(span); let expr = self.expr_bool(span, false); - self.arm(hir_vec![pat], P(expr)) + self.arm(pat, P(expr)) }; hir::ExprKind::Match( P(scrutinee), @@ -278,15 +292,15 @@ impl LoweringContext<'_> { None => (self.expr_block_empty(span), false), Some(els) => (self.lower_expr(els), true), }; - let else_arm = self.arm(hir_vec![else_pat], P(else_expr)); + let else_arm = self.arm(else_pat, P(else_expr)); // Handle then + scrutinee: let then_expr = self.lower_block_expr(then); - let (then_pat, scrutinee, desugar) = match cond.node { + let (then_pat, scrutinee, desugar) = match cond.kind { // ` => `: ExprKind::Let(ref pat, ref scrutinee) => { let scrutinee = self.lower_expr(scrutinee); - let pat = self.lower_pat_top_hack(pat); + let pat = self.lower_pat(pat); (pat, scrutinee, hir::MatchSource::IfLetDesugar { contains_else_clause }) } // `true => `: @@ -303,7 +317,7 @@ impl LoweringContext<'_> { // let temporaries live outside of `cond`. let cond = self.expr_drop_temps(span_block, P(cond), ThinVec::new()); let pat = self.pat_bool(span, true); - (hir_vec![pat], cond, hir::MatchSource::IfDesugar { contains_else_clause }) + (pat, cond, hir::MatchSource::IfDesugar { contains_else_clause }) } }; let then_arm = self.arm(then_pat, P(then_expr)); @@ -327,12 +341,12 @@ impl LoweringContext<'_> { let else_arm = { let else_pat = self.pat_wild(span); let else_expr = self.expr_break(span, ThinVec::new()); - self.arm(hir_vec![else_pat], else_expr) + self.arm(else_pat, else_expr) }; // Handle then + scrutinee: let then_expr = self.lower_block_expr(body); - let (then_pat, scrutinee, desugar, source) = match cond.node { + let (then_pat, scrutinee, desugar, source) = match cond.kind { ExprKind::Let(ref pat, ref scrutinee) => { // to: // @@ -343,7 +357,7 @@ impl LoweringContext<'_> { // } // } let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee)); - let pat = self.lower_pat_top_hack(pat); + let pat = self.lower_pat(pat); (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet) } _ => { @@ -371,7 +385,7 @@ impl LoweringContext<'_> { let cond = self.expr_drop_temps(span_block, P(cond), ThinVec::new()); // `true => `: let pat = self.pat_bool(span, true); - (hir_vec![pat], cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While) + (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While) } }; let then_arm = self.arm(then_pat, P(then_expr)); @@ -392,19 +406,35 @@ impl LoweringContext<'_> { ) } + /// Desugar `try { ; }` into `{ ; ::std::ops::Try::from_ok() }`, + /// `try { ; }` into `{ ; ::std::ops::Try::from_ok(()) }` + /// and save the block id to use it as a break target for desugaring of the `?` operator. fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind { self.with_catch_scope(body.id, |this| { - let unstable_span = this.mark_span_with_reason( + let mut block = this.lower_block(body, true).into_inner(); + + let try_span = this.mark_span_with_reason( DesugaringKind::TryBlock, body.span, this.allow_try_trait.clone(), ); - let mut block = this.lower_block(body, true).into_inner(); - let tail = block.expr.take().map_or_else( - || this.expr_unit(this.sess.source_map().end_point(unstable_span)), + + // Final expression of the block (if present) or `()` with span at the end of block + let tail_expr = block.expr.take().map_or_else( + || this.expr_unit(this.sess.source_map().end_point(try_span)), |x: P| x.into_inner(), ); - block.expr = Some(this.wrap_in_try_constructor(sym::from_ok, tail, unstable_span)); + + let ok_wrapped_span = this.mark_span_with_reason( + DesugaringKind::TryBlock, + tail_expr.span, + None + ); + + // `::std::ops::Try::from_ok($tail_expr)` + block.expr = Some(this.wrap_in_try_constructor( + sym::from_ok, try_span, tail_expr, ok_wrapped_span)); + hir::ExprKind::Block(P(block), None) }) } @@ -412,19 +442,20 @@ impl LoweringContext<'_> { fn wrap_in_try_constructor( &mut self, method: Symbol, - e: hir::Expr, - unstable_span: Span, + method_span: Span, + expr: hir::Expr, + overall_span: Span, ) -> P { let path = &[sym::ops, sym::Try, method]; - let from_err = P(self.expr_std_path(unstable_span, path, None, ThinVec::new())); - P(self.expr_call(e.span, from_err, hir_vec![e])) + let constructor = P(self.expr_std_path(method_span, path, None, ThinVec::new())); + P(self.expr_call(overall_span, constructor, hir_vec![expr])) } fn lower_arm(&mut self, arm: &Arm) -> hir::Arm { hir::Arm { hir_id: self.next_id(), attrs: self.lower_attrs(&arm.attrs), - pats: self.lower_pat_top_hack(&arm.pat), + pat: self.lower_pat(&arm.pat), guard: match arm.guard { Some(ref x) => Some(hir::Guard::If(P(self.lower_expr(x)))), _ => None, @@ -434,22 +465,13 @@ impl LoweringContext<'_> { } } - /// HACK(or_patterns; Centril | dlrobertson): For now we don't push down top level or-patterns - /// `p | q` into `hir::PatKind::Or(...)` as post-lowering bits of the compiler are not ready - /// to deal with it. This should by fixed by pushing it down to HIR and then HAIR. - fn lower_pat_top_hack(&mut self, pat: &Pat) -> HirVec> { - match pat.node { - PatKind::Or(ref ps) => ps.iter().map(|x| self.lower_pat(x)).collect(), - _ => hir_vec![self.lower_pat(pat)], - } - } - pub(super) fn make_async_expr( &mut self, capture_clause: CaptureBy, closure_node_id: NodeId, ret_ty: Option>, span: Span, + async_gen_kind: hir::AsyncGeneratorKind, body: impl FnOnce(&mut LoweringContext<'_>) -> hir::Expr, ) -> hir::ExprKind { let capture_clause = self.lower_capture_clause(capture_clause); @@ -460,16 +482,15 @@ impl LoweringContext<'_> { let ast_decl = FnDecl { inputs: vec![], output, - c_variadic: false }; let decl = self.lower_fn_decl(&ast_decl, None, /* impl trait allowed */ false, None); let body_id = self.lower_fn_body(&ast_decl, |this| { - this.generator_kind = Some(hir::GeneratorKind::Async); + this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind)); body(this) }); // `static || -> { body }`: - let generator_node = hir::ExprKind::Closure( + let generator_kind = hir::ExprKind::Closure( capture_clause, decl, body_id, @@ -478,7 +499,7 @@ impl LoweringContext<'_> { ); let generator = hir::Expr { hir_id: self.lower_node_id(closure_node_id), - node: generator_node, + kind: generator_kind, span, attrs: ThinVec::new(), }; @@ -516,7 +537,7 @@ impl LoweringContext<'_> { /// ``` fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind { match self.generator_kind { - Some(hir::GeneratorKind::Async) => {}, + Some(hir::GeneratorKind::Async(_)) => {}, Some(hir::GeneratorKind::Gen) | None => { let mut err = struct_span_err!( @@ -592,7 +613,7 @@ impl LoweringContext<'_> { ); P(this.expr(await_span, expr_break, ThinVec::new())) }); - self.arm(hir_vec![ready_pat], break_x) + self.arm(ready_pat, break_x) }; // `::std::task::Poll::Pending => {}` @@ -603,7 +624,7 @@ impl LoweringContext<'_> { hir_vec![], ); let empty_block = P(self.expr_block_empty(span)); - self.arm(hir_vec![pending_pat], empty_block) + self.arm(pending_pat, empty_block) }; let inner_match_stmt = { @@ -635,7 +656,7 @@ impl LoweringContext<'_> { // loop { .. } let loop_expr = P(hir::Expr { hir_id: loop_hir_id, - node: hir::ExprKind::Loop( + kind: hir::ExprKind::Loop( loop_block, None, hir::LoopSource::Loop, @@ -645,7 +666,7 @@ impl LoweringContext<'_> { }); // mut pinned => loop { ... } - let pinned_arm = self.arm(hir_vec![pinned_pat], loop_expr); + let pinned_arm = self.arm(pinned_pat, loop_expr); // match { // mut pinned => loop { .. } @@ -715,14 +736,13 @@ impl LoweringContext<'_> { E0628, "generators cannot have explicit parameters" ); - self.sess.abort_if_errors(); } Some(match movability { Movability::Movable => hir::GeneratorMovability::Movable, Movability::Static => hir::GeneratorMovability::Static, }) }, - Some(hir::GeneratorKind::Async) => { + Some(hir::GeneratorKind::Async(_)) => { bug!("non-`async` closure body turned `async` during lowering"); }, None => { @@ -750,7 +770,6 @@ impl LoweringContext<'_> { let outer_decl = FnDecl { inputs: decl.inputs.clone(), output: FunctionRetTy::Default(fn_decl_span), - c_variadic: false, }; // We need to lower the declaration outside the new scope, because we // have to conserve the state of being inside a loop condition for the @@ -782,10 +801,12 @@ impl LoweringContext<'_> { None }; let async_body = this.make_async_expr( - capture_clause, closure_id, async_ret_ty, body.span, - |this| { - this.with_new_scopes(|this| this.lower_expr(body)) - } + capture_clause, + closure_id, + async_ret_ty, + body.span, + hir::AsyncGeneratorKind::Closure, + |this| this.with_new_scopes(|this| this.lower_expr(body)), ); this.expr(fn_decl_span, async_body, ThinVec::new()) }); @@ -1001,14 +1022,14 @@ impl LoweringContext<'_> { fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind { match self.generator_kind { Some(hir::GeneratorKind::Gen) => {}, - Some(hir::GeneratorKind::Async) => { + Some(hir::GeneratorKind::Async(_)) => { span_err!( self.sess, span, E0727, "`async` generators are not yet supported", ); - self.sess.abort_if_errors(); + return hir::ExprKind::Err; }, None => self.generator_kind = Some(hir::GeneratorKind::Gen), } @@ -1050,10 +1071,9 @@ impl LoweringContext<'_> { ) -> hir::Expr { // expand let mut head = self.lower_expr(head); - let head_sp = head.span; let desugared_span = self.mark_span_with_reason( DesugaringKind::ForLoop, - head_sp, + head.span, None, ); head.span = desugared_span; @@ -1079,7 +1099,7 @@ impl LoweringContext<'_> { ThinVec::new(), )); let some_pat = self.pat_some(pat.span, val_pat); - self.arm(hir_vec![some_pat], assign) + self.arm(some_pat, assign) }; // `::std::option::Option::None => break` @@ -1087,7 +1107,7 @@ impl LoweringContext<'_> { let break_expr = self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new())); let pat = self.pat_none(e.span); - self.arm(hir_vec![pat], break_expr) + self.arm(pat, break_expr) }; // `mut iter` @@ -1099,21 +1119,21 @@ impl LoweringContext<'_> { // `match ::std::iter::Iterator::next(&mut iter) { ... }` let match_expr = { - let iter = P(self.expr_ident(head_sp, iter, iter_pat_nid)); - let ref_mut_iter = self.expr_mut_addr_of(head_sp, iter); + let iter = P(self.expr_ident(desugared_span, iter, iter_pat_nid)); + let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter); let next_path = &[sym::iter, sym::Iterator, sym::next]; let next_expr = P(self.expr_call_std_path( - head_sp, + desugared_span, next_path, hir_vec![ref_mut_iter], )); let arms = hir_vec![pat_arm, break_arm]; - self.expr_match(head_sp, next_expr, arms, hir::MatchSource::ForLoopDesugar) + self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar) }; - let match_stmt = self.stmt_expr(head_sp, match_expr); + let match_stmt = self.stmt_expr(desugared_span, match_expr); - let next_expr = P(self.expr_ident(head_sp, next_ident, next_pat_hid)); + let next_expr = P(self.expr_ident(desugared_span, next_ident, next_pat_hid)); // `let mut __next` let next_let = self.stmt_let_pat( @@ -1128,7 +1148,7 @@ impl LoweringContext<'_> { let pat = self.lower_pat(pat); let pat_let = self.stmt_let_pat( ThinVec::new(), - head_sp, + desugared_span, Some(next_expr), pat, hir::LocalSource::ForLoopDesugar, @@ -1145,34 +1165,34 @@ impl LoweringContext<'_> { )); // `[opt_ident]: loop { ... }` - let loop_expr = hir::ExprKind::Loop( + let kind = hir::ExprKind::Loop( loop_block, self.lower_label(opt_label), hir::LoopSource::ForLoop, ); let loop_expr = P(hir::Expr { hir_id: self.lower_node_id(e.id), - node: loop_expr, + kind, span: e.span, attrs: ThinVec::new(), }); // `mut iter => { ... }` - let iter_arm = self.arm(hir_vec![iter_pat], loop_expr); + let iter_arm = self.arm(iter_pat, loop_expr); // `match ::std::iter::IntoIterator::into_iter() { ... }` let into_iter_expr = { let into_iter_path = &[sym::iter, sym::IntoIterator, sym::into_iter]; P(self.expr_call_std_path( - head_sp, + desugared_span, into_iter_path, hir_vec![head], )) }; let match_expr = P(self.expr_match( - head_sp, + desugared_span, into_iter_expr, hir_vec![iter_arm], hir::MatchSource::ForLoopDesugar, @@ -1184,7 +1204,7 @@ impl LoweringContext<'_> { // surrounding scope of the `match` since the `match` is not a terminating scope. // // Also, add the attributes to the outer returned expr node. - self.expr_drop_temps(head_sp, match_expr, e.attrs.clone()) + self.expr_drop_temps(desugared_span, match_expr, e.attrs.clone()) } /// Desugar `ExprKind::Try` from: `?` into: @@ -1244,7 +1264,7 @@ impl LoweringContext<'_> { ThinVec::from(attrs.clone()), )); let ok_pat = self.pat_ok(span, val_pat); - self.arm(hir_vec![ok_pat], val_expr) + self.arm(ok_pat, val_expr) }; // `Err(err) => #[allow(unreachable_code)] @@ -1258,7 +1278,7 @@ impl LoweringContext<'_> { self.expr_call_std_path(try_span, from_path, hir_vec![err_expr]) }; let from_err_expr = - self.wrap_in_try_constructor(sym::from_error, from_expr, unstable_span); + self.wrap_in_try_constructor(sym::from_error, unstable_span, from_expr, try_span); let thin_attrs = ThinVec::from(attrs); let catch_scope = self.catch_scopes.last().map(|x| *x); let ret_expr = if let Some(catch_node) = catch_scope { @@ -1279,7 +1299,7 @@ impl LoweringContext<'_> { }; let err_pat = self.pat_err(try_span, err_local); - self.arm(hir_vec![err_pat], ret_expr) + self.arm(err_pat, ret_expr) }; hir::ExprKind::Match( @@ -1453,15 +1473,10 @@ impl LoweringContext<'_> { pub(super) fn expr( &mut self, span: Span, - node: hir::ExprKind, + kind: hir::ExprKind, attrs: ThinVec ) -> hir::Expr { - hir::Expr { - hir_id: self.next_id(), - node, - span, - attrs, - } + hir::Expr { hir_id: self.next_id(), kind, span, attrs } } fn field(&mut self, ident: Ident, expr: P, span: Span) -> hir::Field { @@ -1474,14 +1489,11 @@ impl LoweringContext<'_> { } } - /// HACK(or_patterns; Centril | dlrobertson): For now we don't push down top level or-patterns - /// `p | q` into `hir::PatKind::Or(...)` as post-lowering bits of the compiler are not ready - /// to deal with it. This should by fixed by pushing it down to HIR and then HAIR. - fn arm(&mut self, pats: HirVec>, expr: P) -> hir::Arm { + fn arm(&mut self, pat: P, expr: P) -> hir::Arm { hir::Arm { hir_id: self.next_id(), attrs: hir_vec![], - pats, + pat, guard: None, span: expr.span, body: expr, diff --git a/src/librustc/hir/lowering/item.rs b/src/librustc/hir/lowering/item.rs index 61be40a6b9..9da87090c7 100644 --- a/src/librustc/hir/lowering/item.rs +++ b/src/librustc/hir/lowering/item.rs @@ -18,7 +18,7 @@ use smallvec::SmallVec; use syntax::attr; use syntax::ast::*; use syntax::visit::{self, Visitor}; -use syntax::ext::base::SpecialDerives; +use syntax::expand::SpecialDerives; use syntax::source_map::{respan, DesugaringKind, Spanned}; use syntax::symbol::{kw, sym}; use syntax_pos::Span; @@ -45,14 +45,16 @@ impl<'tcx, 'interner> ItemLowerer<'tcx, 'interner> { impl<'tcx, 'interner> Visitor<'tcx> for ItemLowerer<'tcx, 'interner> { fn visit_mod(&mut self, m: &'tcx Mod, _s: Span, _attrs: &[Attribute], n: NodeId) { - self.lctx.modules.insert(n, hir::ModuleItems { + let hir_id = self.lctx.lower_node_id(n); + + self.lctx.modules.insert(hir_id, hir::ModuleItems { items: BTreeSet::new(), trait_items: BTreeSet::new(), impl_items: BTreeSet::new(), }); let old = self.lctx.current_module; - self.lctx.current_module = n; + self.lctx.current_module = hir_id; visit::walk_mod(self, m); self.lctx.current_module = old; } @@ -71,7 +73,7 @@ impl<'tcx, 'interner> Visitor<'tcx> for ItemLowerer<'tcx, 'interner> { if let Some(hir_id) = item_hir_id { self.lctx.with_parent_item_lifetime_defs(hir_id, |this| { let this = &mut ItemLowerer { lctx: this }; - if let ItemKind::Impl(.., ref opt_trait_ref, _, _) = item.node { + if let ItemKind::Impl(.., ref opt_trait_ref, _, _) = item.kind { this.with_trait_impl_ref(opt_trait_ref, |this| { visit::walk_item(this, item) }); @@ -117,7 +119,7 @@ impl LoweringContext<'_> { ) -> T { let old_len = self.in_scope_lifetimes.len(); - let parent_generics = match self.items.get(&parent_hir_id).unwrap().node { + let parent_generics = match self.items.get(&parent_hir_id).unwrap().kind { hir::ItemKind::Impl(_, _, _, ref generics, ..) | hir::ItemKind::Trait(_, _, ref generics, ..) => { &generics.params[..] @@ -166,7 +168,7 @@ impl LoweringContext<'_> { } pub(super) fn lower_item_id(&mut self, i: &Item) -> SmallVec<[hir::ItemId; 1]> { - let node_ids = match i.node { + let node_ids = match i.kind { ItemKind::Use(ref use_tree) => { let mut vec = smallvec![i.id]; self.lower_item_id_use_tree(use_tree, i.id, &mut vec); @@ -233,7 +235,7 @@ impl LoweringContext<'_> { } let attrs = attrs.into(); - if let ItemKind::MacroDef(ref def) = i.node { + if let ItemKind::MacroDef(ref def) = i.kind { if !def.legacy || attr::contains_name(&i.attrs, sym::macro_export) { let body = self.lower_token_stream(def.stream()); let hir_id = self.lower_node_id(i.id); @@ -252,13 +254,13 @@ impl LoweringContext<'_> { return None; } - let node = self.lower_item_kind(i.id, &mut ident, &attrs, &mut vis, &i.node); + let kind = self.lower_item_kind(i.id, &mut ident, &attrs, &mut vis, &i.kind); Some(hir::Item { hir_id: self.lower_node_id(i.id), ident, attrs, - node, + kind, vis, span: i.span, }) @@ -540,7 +542,7 @@ impl LoweringContext<'_> { let res = this.lower_res(res); let path = this.lower_path_extra(res, &path, ParamMode::Explicit, None); - let item = hir::ItemKind::Use(P(path), hir::UseKind::Single); + let kind = hir::ItemKind::Use(P(path), hir::UseKind::Single); let vis = this.rebuild_vis(&vis); this.insert_item( @@ -548,7 +550,7 @@ impl LoweringContext<'_> { hir_id: new_id, ident, attrs: attrs.into_iter().cloned().collect(), - node: item, + kind, vis, span, }, @@ -556,8 +558,7 @@ impl LoweringContext<'_> { }); } - let path = - P(self.lower_path_extra(ret_res, &path, ParamMode::Explicit, None)); + let path = P(self.lower_path_extra(ret_res, &path, ParamMode::Explicit, None)); hir::ItemKind::Use(path, hir::UseKind::Single) } UseTreeKind::Glob => { @@ -621,7 +622,7 @@ impl LoweringContext<'_> { let mut vis = this.rebuild_vis(&vis); let mut ident = *ident; - let item = this.lower_use_tree(use_tree, + let kind = this.lower_use_tree(use_tree, &prefix, id, &mut vis, @@ -633,7 +634,7 @@ impl LoweringContext<'_> { hir_id: new_hir_id, ident, attrs: attrs.into_iter().cloned().collect(), - node: item, + kind, vis, span: use_tree.span, }, @@ -710,7 +711,7 @@ impl LoweringContext<'_> { hir_id: self.lower_node_id(i.id), ident: i.ident, attrs: self.lower_attrs(&i.attrs), - node: match i.node { + kind: match i.kind { ForeignItemKind::Fn(ref fdec, ref generics) => { let (generics, (fn_dec, fn_args)) = self.add_in_band_defs( generics, @@ -787,7 +788,7 @@ impl LoweringContext<'_> { } fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField { - let ty = if let TyKind::Path(ref qself, ref path) = f.ty.node { + let ty = if let TyKind::Path(ref qself, ref path) = f.ty.kind { let t = self.lower_path_ty( &f.ty, qself, @@ -816,7 +817,7 @@ impl LoweringContext<'_> { fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem { let trait_item_def_id = self.resolver.definitions().local_def_id(i.id); - let (generics, node) = match i.node { + let (generics, kind) = match i.kind { TraitItemKind::Const(ref ty, ref default) => ( self.lower_generics(&i.generics, ImplTraitContext::disallowed()), hir::TraitItemKind::Const( @@ -850,14 +851,14 @@ impl LoweringContext<'_> { } TraitItemKind::Type(ref bounds, ref default) => { let generics = self.lower_generics(&i.generics, ImplTraitContext::disallowed()); - let node = hir::TraitItemKind::Type( + let kind = hir::TraitItemKind::Type( self.lower_param_bounds(bounds, ImplTraitContext::disallowed()), default .as_ref() .map(|x| self.lower_ty(x, ImplTraitContext::disallowed())), ); - (generics, node) + (generics, kind) }, TraitItemKind::Macro(..) => bug!("macro item shouldn't exist at this point"), }; @@ -867,13 +868,13 @@ impl LoweringContext<'_> { ident: i.ident, attrs: self.lower_attrs(&i.attrs), generics, - node, + kind, span: i.span, } } fn lower_trait_item_ref(&mut self, i: &TraitItem) -> hir::TraitItemRef { - let (kind, has_default) = match i.node { + let (kind, has_default) = match i.kind { TraitItemKind::Const(_, ref default) => { (hir::AssocItemKind::Const, default.is_some()) } @@ -900,7 +901,7 @@ impl LoweringContext<'_> { fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem { let impl_item_def_id = self.resolver.definitions().local_def_id(i.id); - let (generics, node) = match i.node { + let (generics, kind) = match i.kind { ImplItemKind::Const(ref ty, ref expr) => ( self.lower_generics(&i.generics, ImplTraitContext::disallowed()), hir::ImplItemKind::Const( @@ -944,7 +945,7 @@ impl LoweringContext<'_> { generics, vis: self.lower_visibility(&i.vis, None), defaultness: self.lower_defaultness(i.defaultness, true /* [1] */), - node, + kind, span: i.span, } @@ -958,7 +959,7 @@ impl LoweringContext<'_> { span: i.span, vis: self.lower_visibility(&i.vis, Some(i.id)), defaultness: self.lower_defaultness(i.defaultness, true /* [1] */), - kind: match i.node { + kind: match i.kind { ImplItemKind::Const(..) => hir::AssocItemKind::Const, ImplItemKind::TyAlias(..) => hir::AssocItemKind::Type, ImplItemKind::OpaqueTy(..) => hir::AssocItemKind::OpaqueTy, @@ -1131,7 +1132,7 @@ impl LoweringContext<'_> { // Check if this is a binding pattern, if so, we can optimize and avoid adding a // `let = __argN;` statement. In this case, we do not rename the parameter. - let (ident, is_simple_parameter) = match parameter.pat.node { + let (ident, is_simple_parameter) = match parameter.pat.kind { hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, _) => (ident, true), _ => { @@ -1221,7 +1222,11 @@ impl LoweringContext<'_> { } let async_expr = this.make_async_expr( - CaptureBy::Value, closure_id, None, body.span, + CaptureBy::Value, + closure_id, + None, + body.span, + hir::AsyncGeneratorKind::Fn, |this| { // Create a block from the user's function body: let user_body = this.lower_block_expr(body); @@ -1341,7 +1346,7 @@ impl LoweringContext<'_> { ); }; // Check if the where clause type is a plain type parameter. - match bound_pred.bounded_ty.node { + match bound_pred.bounded_ty.kind { TyKind::Path(None, ref path) if path.segments.len() == 1 && bound_pred.bound_generic_params.is_empty() => diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index 351f5818f7..f670d5abe8 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -37,19 +37,25 @@ trait MaybeFnLike { fn is_fn_like(&self) -> bool; } impl MaybeFnLike for ast::Item { fn is_fn_like(&self) -> bool { - match self.node { ast::ItemKind::Fn(..) => true, _ => false, } + match self.kind { + ast::ItemKind::Fn(..) => true, + _ => false, + } } } impl MaybeFnLike for ast::ImplItem { fn is_fn_like(&self) -> bool { - match self.node { ast::ImplItemKind::Method(..) => true, _ => false, } + match self.kind { + ast::ImplItemKind::Method(..) => true, + _ => false, + } } } impl MaybeFnLike for ast::TraitItem { fn is_fn_like(&self) -> bool { - match self.node { + match self.kind { ast::TraitItemKind::Method(_, ast::TraitMethod::Provided(_)) => true, _ => false, } @@ -58,7 +64,7 @@ impl MaybeFnLike for ast::TraitItem { impl MaybeFnLike for ast::Expr { fn is_fn_like(&self) -> bool { - match self.node { + match self.kind { ast::ExprKind::Closure(..) => true, _ => false, } @@ -212,7 +218,7 @@ impl<'a> FnLikeNode<'a> { C: FnOnce(ClosureParts<'a>) -> A, { match self.node { - map::Node::Item(i) => match i.node { + map::Node::Item(i) => match i.kind { ast::ItemKind::Fn(ref decl, header, ref generics, block) => item_fn(ItemFnParts { id: i.hir_id, @@ -227,21 +233,21 @@ impl<'a> FnLikeNode<'a> { }), _ => bug!("item FnLikeNode that is not fn-like"), }, - map::Node::TraitItem(ti) => match ti.node { + map::Node::TraitItem(ti) => match ti.kind { ast::TraitItemKind::Method(ref sig, ast::TraitMethod::Provided(body)) => { method(ti.hir_id, ti.ident, sig, None, body, ti.span, &ti.attrs) } _ => bug!("trait method FnLikeNode that is not fn-like"), }, map::Node::ImplItem(ii) => { - match ii.node { + match ii.kind { ast::ImplItemKind::Method(ref sig, body) => { method(ii.hir_id, ii.ident, sig, Some(&ii.vis), body, ii.span, &ii.attrs) } _ => bug!("impl method FnLikeNode that is not fn-like") } }, - map::Node::Expr(e) => match e.node { + map::Node::Expr(e) => match e.kind { ast::ExprKind::Closure(_, ref decl, block, _fn_decl_span, _gen) => closure(ClosureParts::new(&decl, block, e.hir_id, e.span, &e.attrs)), _ => bug!("expr FnLikeNode that is not fn-like"), diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index 4179cf2ff8..b0fa844c81 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -5,7 +5,7 @@ use crate::hir::map::HirEntryMap; use crate::hir::def_id::{LOCAL_CRATE, CrateNum}; use crate::hir::intravisit::{Visitor, NestedVisitorMap}; use rustc_data_structures::svh::Svh; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use crate::ich::Fingerprint; use crate::middle::cstore::CrateStore; use crate::session::CrateDisambiguator; @@ -17,7 +17,7 @@ use syntax_pos::Span; use std::iter::repeat; use crate::ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; /// A visitor that walks over the HIR and collects `Node`s into a HIR map. pub(super) struct NodeCollector<'a, 'hir> { @@ -149,7 +149,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { let mut collector = NodeCollector { krate, source_map: sess.source_map(), - map: vec![None; definitions.def_index_count()], + map: IndexVec::from_elem_n(IndexVec::new(), definitions.def_index_count()), parent_node: hir::CRATE_HIR_ID, current_signature_dep_index: root_mod_sig_dep_index, current_full_dep_index: root_mod_full_dep_index, @@ -186,13 +186,13 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { }); let mut upstream_crates: Vec<_> = cstore.crates_untracked().iter().map(|&cnum| { - let name = cstore.crate_name_untracked(cnum).as_interned_str(); + let name = cstore.crate_name_untracked(cnum); let disambiguator = cstore.crate_disambiguator_untracked(cnum).to_fingerprint(); let hash = cstore.crate_hash_untracked(cnum); (name, disambiguator, hash) }).collect(); - upstream_crates.sort_unstable_by_key(|&(name, dis, _)| (name, dis)); + upstream_crates.sort_unstable_by_key(|&(name, dis, _)| (name.as_str(), dis)); // We hash the final, remapped names of all local source files so we // don't have to include the path prefix remapping commandline args. @@ -227,12 +227,8 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { fn insert_entry(&mut self, id: HirId, entry: Entry<'hir>) { debug!("hir_map: {:?} => {:?}", id, entry); - let local_map = &mut self.map[id.owner.index()]; + let local_map = &mut self.map[id.owner]; let i = id.local_id.as_u32() as usize; - if local_map.is_none() { - *local_map = Some(IndexVec::with_capacity(i + 1)); - } - let local_map = local_map.as_mut().unwrap(); let len = local_map.len(); if i >= len { local_map.extend(repeat(None).take(i - len + 1)); @@ -378,7 +374,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { self.with_dep_node_owner(i.hir_id.owner, i, |this| { this.insert(i.span, i.hir_id, Node::Item(i)); this.with_parent(i.hir_id, |this| { - if let ItemKind::Struct(ref struct_def, _) = i.node { + if let ItemKind::Struct(ref struct_def, _) = i.kind { // If this is a tuple or unit-like struct, register the constructor. if let Some(ctor_hir_id) = struct_def.ctor_hir_id() { this.insert(i.span, ctor_hir_id, Node::Ctor(struct_def)); @@ -427,7 +423,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { } fn visit_pat(&mut self, pat: &'hir Pat) { - let node = if let PatKind::Binding(..) = pat.node { + let node = if let PatKind::Binding(..) = pat.kind { Node::Binding(pat) } else { Node::Pat(pat) @@ -602,9 +598,7 @@ impl<'hir, T> HashStable> for HirItemLike where T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'hir>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'hir>, hasher: &mut StableHasher) { hcx.while_hashing_hir_bodies(self.hash_bodies, |hcx| { self.item_like.hash_stable(hcx, hasher); }); diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index d1cc7a8ce9..70dc2248e0 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -2,10 +2,10 @@ use crate::hir::map::definitions::*; use crate::hir::def_id::DefIndex; use syntax::ast::*; -use syntax::ext::hygiene::ExpnId; use syntax::visit; use syntax::symbol::{kw, sym}; use syntax::parse::token::{self, Token}; +use syntax_pos::hygiene::ExpnId; use syntax_pos::Span; /// Creates `DefId`s for nodes in the AST. @@ -57,7 +57,7 @@ impl<'a> DefCollector<'a> { // For async functions, we need to create their inner defs inside of a // closure to match their desugared representation. - let fn_def_data = DefPathData::ValueNs(name.as_interned_str()); + let fn_def_data = DefPathData::ValueNs(name); let fn_def = self.create_def(id, fn_def_data, span); return self.with_parent(fn_def, |this| { this.create_def(return_impl_trait_id, DefPathData::ImplTrait, span); @@ -75,22 +75,22 @@ impl<'a> DefCollector<'a> { } fn collect_field(&mut self, field: &'a StructField, index: Option) { + let index = |this: &Self| index.unwrap_or_else(|| { + let node_id = NodeId::placeholder_from_expn_id(this.expansion); + this.definitions.placeholder_field_index(node_id) + }); + if field.is_placeholder { + self.definitions.set_placeholder_field_index(field.id, index(self)); self.visit_macro_invoc(field.id); } else { - let name = field.ident.map(|ident| ident.name) - .or_else(|| index.map(sym::integer)) - .unwrap_or_else(|| { - let node_id = NodeId::placeholder_from_expn_id(self.expansion); - sym::integer(self.definitions.placeholder_field_indices[&node_id]) - }) - .as_interned_str(); + let name = field.ident.map_or_else(|| sym::integer(index(self)), |ident| ident.name); let def = self.create_def(field.id, DefPathData::ValueNs(name), field.span); self.with_parent(def, |this| visit::walk_struct_field(this, field)); } } - pub fn visit_macro_invoc(&mut self, id: NodeId) { + fn visit_macro_invoc(&mut self, id: NodeId) { self.definitions.set_invocation_parent(id.placeholder_to_expn_id(), self.parent_def); } } @@ -101,7 +101,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { // Pick the def data. This need not be unique, but the more // information we encapsulate into, the better - let def_data = match i.node { + let def_data = match i.kind { ItemKind::Impl(..) => DefPathData::Impl, ItemKind::Mod(..) if i.ident.name == kw::Invalid => { return visit::walk_item(self, i); @@ -109,7 +109,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { ItemKind::Mod(..) | ItemKind::Trait(..) | ItemKind::TraitAlias(..) | ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) | ItemKind::OpaqueTy(..) | ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | - ItemKind::TyAlias(..) => DefPathData::TypeNs(i.ident.as_interned_str()), + ItemKind::TyAlias(..) => DefPathData::TypeNs(i.ident.name), ItemKind::Fn( ref decl, ref header, @@ -127,8 +127,8 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { ) } ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) => - DefPathData::ValueNs(i.ident.as_interned_str()), - ItemKind::MacroDef(..) => DefPathData::MacroNs(i.ident.as_interned_str()), + DefPathData::ValueNs(i.ident.name), + ItemKind::MacroDef(..) => DefPathData::MacroNs(i.ident.name), ItemKind::Mac(..) => return self.visit_macro_invoc(i.id), ItemKind::GlobalAsm(..) => DefPathData::Misc, ItemKind::Use(..) => { @@ -138,7 +138,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { let def = self.create_def(i.id, def_data, i.span); self.with_parent(def, |this| { - match i.node { + match i.kind { ItemKind::Struct(ref struct_def, _) | ItemKind::Union(ref struct_def, _) => { // If this is a unit or tuple-like struct, register the constructor. if let Some(ctor_hir_id) = struct_def.ctor_id() { @@ -157,12 +157,12 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) { - if let ForeignItemKind::Macro(_) = foreign_item.node { + if let ForeignItemKind::Macro(_) = foreign_item.kind { return self.visit_macro_invoc(foreign_item.id); } let def = self.create_def(foreign_item.id, - DefPathData::ValueNs(foreign_item.ident.as_interned_str()), + DefPathData::ValueNs(foreign_item.ident.name), foreign_item.span); self.with_parent(def, |this| { @@ -175,7 +175,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { return self.visit_macro_invoc(v.id); } let def = self.create_def(v.id, - DefPathData::TypeNs(v.ident.as_interned_str()), + DefPathData::TypeNs(v.ident.name), v.span); self.with_parent(def, |this| { if let Some(ctor_hir_id) = v.data.ctor_id() { @@ -191,9 +191,6 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { // and every such attribute expands into a single field after it's resolved. for (index, field) in data.fields().iter().enumerate() { self.collect_field(field, Some(index)); - if field.is_placeholder && field.ident.is_none() { - self.definitions.placeholder_field_indices.insert(field.id, index); - } } } @@ -202,7 +199,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { self.visit_macro_invoc(param.id); return; } - let name = param.ident.as_interned_str(); + let name = param.ident.name; let def_path_data = match param.kind { GenericParamKind::Lifetime { .. } => DefPathData::LifetimeNs(name), GenericParamKind::Type { .. } => DefPathData::TypeNs(name), @@ -214,11 +211,11 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_trait_item(&mut self, ti: &'a TraitItem) { - let def_data = match ti.node { + let def_data = match ti.kind { TraitItemKind::Method(..) | TraitItemKind::Const(..) => - DefPathData::ValueNs(ti.ident.as_interned_str()), + DefPathData::ValueNs(ti.ident.name), TraitItemKind::Type(..) => { - DefPathData::TypeNs(ti.ident.as_interned_str()) + DefPathData::TypeNs(ti.ident.name) }, TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id), }; @@ -228,7 +225,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_impl_item(&mut self, ii: &'a ImplItem) { - let def_data = match ii.node { + let def_data = match ii.kind { ImplItemKind::Method(MethodSig { ref header, ref decl, @@ -243,12 +240,10 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { body, ) } - ImplItemKind::Method(..) | ImplItemKind::Const(..) => - DefPathData::ValueNs(ii.ident.as_interned_str()), + ImplItemKind::Method(..) | + ImplItemKind::Const(..) => DefPathData::ValueNs(ii.ident.name), ImplItemKind::TyAlias(..) | - ImplItemKind::OpaqueTy(..) => { - DefPathData::TypeNs(ii.ident.as_interned_str()) - }, + ImplItemKind::OpaqueTy(..) => DefPathData::TypeNs(ii.ident.name), ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id), }; @@ -257,7 +252,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_pat(&mut self, pat: &'a Pat) { - match pat.node { + match pat.kind { PatKind::Mac(..) => return self.visit_macro_invoc(pat.id), _ => visit::walk_pat(self, pat), } @@ -271,7 +266,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_expr(&mut self, expr: &'a Expr) { - let parent_def = match expr.node { + let parent_def = match expr.kind { ExprKind::Mac(..) => return self.visit_macro_invoc(expr.id), ExprKind::Closure(_, asyncness, ..) => { // Async closures desugar to closures inside of closures, so @@ -292,7 +287,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_ty(&mut self, ty: &'a Ty) { - match ty.node { + match ty.kind { TyKind::Mac(..) => return self.visit_macro_invoc(ty.id), TyKind::ImplTrait(node_id, _) => { self.create_def(node_id, DefPathData::ImplTrait, ty.span); @@ -303,7 +298,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_stmt(&mut self, stmt: &'a Stmt) { - match stmt.node { + match stmt.kind { StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id), _ => visit::walk_stmt(self, stmt), } @@ -312,7 +307,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { fn visit_token(&mut self, t: Token) { if let token::Interpolated(nt) = t.kind { if let token::NtExpr(ref expr) = *nt { - if let ExprKind::Mac(..) = expr.node { + if let ExprKind::Mac(..) = expr.kind { self.visit_macro_invoc(expr.id); } } diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 187bc59332..450ab94717 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -11,14 +11,14 @@ use crate::session::CrateDisambiguator; use crate::util::nodemap::NodeMap; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::{IndexVec}; +use rustc_index::vec::{IndexVec}; use rustc_data_structures::stable_hasher::StableHasher; use std::borrow::Borrow; use std::fmt::Write; use std::hash::Hash; use syntax::ast; -use syntax::ext::hygiene::ExpnId; -use syntax::symbol::{Symbol, sym, InternedString}; +use syntax_pos::symbol::{Symbol, sym}; +use syntax_pos::hygiene::ExpnId; use syntax_pos::{Span, DUMMY_SP}; /// The `DefPathTable` maps `DefIndex`es to `DefKey`s and vice versa. @@ -27,8 +27,8 @@ use syntax_pos::{Span, DUMMY_SP}; /// There is one `DefPathTable` for each crate. #[derive(Clone, Default, RustcDecodable, RustcEncodable)] pub struct DefPathTable { - index_to_key: Vec, - def_path_hashes: Vec, + index_to_key: IndexVec, + def_path_hashes: IndexVec, } impl DefPathTable { @@ -53,14 +53,14 @@ impl DefPathTable { #[inline(always)] pub fn def_key(&self, index: DefIndex) -> DefKey { - self.index_to_key[index.index()].clone() + self.index_to_key[index] } #[inline(always)] pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash { - let ret = self.def_path_hashes[index.index()]; - debug!("def_path_hash({:?}) = {:?}", index, ret); - return ret + let hash = self.def_path_hashes[index]; + debug!("def_path_hash({:?}) = {:?}", index, hash); + hash } pub fn add_def_path_hashes_to(&self, @@ -92,7 +92,7 @@ impl DefPathTable { pub struct Definitions { table: DefPathTable, node_to_def_index: NodeMap, - def_index_to_node: Vec, + def_index_to_node: IndexVec, pub(super) node_to_hir_id: IndexVec, /// If `ExpnId` is an ID of some macro expansion, /// then `DefId` is the normal module (`mod`) in which the expanded macro was defined. @@ -105,13 +105,13 @@ pub struct Definitions { /// we know what parent node that fragment should be attached to thanks to this table. invocation_parents: FxHashMap, /// Indices of unnamed struct or variant fields with unresolved attributes. - pub(super) placeholder_field_indices: NodeMap, + pub placeholder_field_indices: NodeMap, } /// A unique identifier that we can use to lookup a definition /// precisely. It combines the index of the definition's parent (if /// any) with a `DisambiguatedDefPathData`. -#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub struct DefKey { /// The parent path. pub parent: Option, @@ -136,7 +136,9 @@ impl DefKey { ::std::mem::discriminant(data).hash(&mut hasher); if let Some(name) = data.get_opt_name() { - name.hash(&mut hasher); + // Get a stable hash by considering the symbol chars rather than + // the symbol index. + name.as_str().hash(&mut hasher); } disambiguator.hash(&mut hasher); @@ -162,13 +164,13 @@ impl DefKey { /// between them. This introduces some artificial ordering dependency /// but means that if you have, e.g., two impls for the same type in /// the same module, they do get distinct `DefId`s. -#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub struct DisambiguatedDefPathData { pub data: DefPathData, pub disambiguator: u32 } -#[derive(Clone, Debug, Hash, RustcEncodable, RustcDecodable)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct DefPath { /// The path leading from the crate root to the item. pub data: Vec, @@ -218,7 +220,7 @@ impl DefPath { for component in &self.data { write!(s, "::{}[{}]", - component.data.as_interned_str(), + component.data.as_symbol(), component.disambiguator) .unwrap(); } @@ -238,11 +240,11 @@ impl DefPath { for component in &self.data { if component.disambiguator == 0 { - write!(s, "::{}", component.data.as_interned_str()).unwrap(); + write!(s, "::{}", component.data.as_symbol()).unwrap(); } else { write!(s, "{}[{}]", - component.data.as_interned_str(), + component.data.as_symbol(), component.disambiguator) .unwrap(); } @@ -262,11 +264,11 @@ impl DefPath { opt_delimiter.map(|d| s.push(d)); opt_delimiter = Some('-'); if component.disambiguator == 0 { - write!(s, "{}", component.data.as_interned_str()).unwrap(); + write!(s, "{}", component.data.as_symbol()).unwrap(); } else { write!(s, "{}[{}]", - component.data.as_interned_str(), + component.data.as_symbol(), component.disambiguator) .unwrap(); } @@ -275,7 +277,7 @@ impl DefPath { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum DefPathData { // Root: these should only be used for the root nodes, because // they are treated specially by the `def_path` function. @@ -290,13 +292,13 @@ pub enum DefPathData { /// An impl. Impl, /// Something in the type namespace. - TypeNs(InternedString), + TypeNs(Symbol), /// Something in the value namespace. - ValueNs(InternedString), + ValueNs(Symbol), /// Something in the macro namespace. - MacroNs(InternedString), + MacroNs(Symbol), /// Something in the lifetime namespace. - LifetimeNs(InternedString), + LifetimeNs(Symbol), /// A closure expression. ClosureExpr, @@ -311,7 +313,7 @@ pub enum DefPathData { /// Identifies a piece of crate metadata that is global to a whole crate /// (as opposed to just one item). `GlobalMetaData` components are only /// supposed to show up right below the crate root. - GlobalMetaData(InternedString), + GlobalMetaData(Symbol), } #[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug, @@ -357,7 +359,7 @@ impl Definitions { #[inline] pub fn opt_def_index(&self, node: ast::NodeId) -> Option { - self.node_to_def_index.get(&node).cloned() + self.node_to_def_index.get(&node).copied() } #[inline] @@ -373,7 +375,7 @@ impl Definitions { #[inline] pub fn as_local_node_id(&self, def_id: DefId) -> Option { if def_id.krate == LOCAL_CRATE { - let node_id = self.def_index_to_node[def_id.index.index()]; + let node_id = self.def_index_to_node[def_id.index]; if node_id != ast::DUMMY_NODE_ID { return Some(node_id); } @@ -402,7 +404,7 @@ impl Definitions { #[inline] pub fn def_index_to_hir_id(&self, def_index: DefIndex) -> hir::HirId { - let node_id = self.def_index_to_node[def_index.index()]; + let node_id = self.def_index_to_node[def_index]; self.node_to_hir_id[node_id] } @@ -411,7 +413,7 @@ impl Definitions { #[inline] pub fn opt_span(&self, def_id: DefId) -> Option { if def_id.krate == LOCAL_CRATE { - self.def_index_to_span.get(&def_id.index).cloned() + self.def_index_to_span.get(&def_id.index).copied() } else { None } @@ -470,7 +472,7 @@ impl Definitions { // Find the next free disambiguator for this key. let disambiguator = { - let next_disamb = self.next_disambiguator.entry((parent, data.clone())).or_insert(0); + let next_disamb = self.next_disambiguator.entry((parent, data)).or_insert(0); let disambiguator = *next_disamb; *next_disamb = next_disamb.checked_add(1).expect("disambiguator overflow"); disambiguator @@ -523,7 +525,7 @@ impl Definitions { } pub fn expansion_that_defined(&self, index: DefIndex) -> ExpnId { - self.expansions_that_defined.get(&index).cloned().unwrap_or(ExpnId::root()) + self.expansions_that_defined.get(&index).copied().unwrap_or(ExpnId::root()) } pub fn parent_module_of_macro_def(&self, expn_id: ExpnId) -> DefId { @@ -542,10 +544,19 @@ impl Definitions { let old_parent = self.invocation_parents.insert(invoc_id, parent); assert!(old_parent.is_none(), "parent `DefIndex` is reset for an invocation"); } + + pub fn placeholder_field_index(&self, node_id: ast::NodeId) -> usize { + self.placeholder_field_indices[&node_id] + } + + pub fn set_placeholder_field_index(&mut self, node_id: ast::NodeId, index: usize) { + let old_index = self.placeholder_field_indices.insert(node_id, index); + assert!(old_index.is_none(), "placeholder field index is reset for a node ID"); + } } impl DefPathData { - pub fn get_opt_name(&self) -> Option { + pub fn get_opt_name(&self) -> Option { use self::DefPathData::*; match *self { TypeNs(name) | @@ -564,15 +575,15 @@ impl DefPathData { } } - pub fn as_interned_str(&self) -> InternedString { + pub fn as_symbol(&self) -> Symbol { use self::DefPathData::*; - let s = match *self { + match *self { TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) | GlobalMetaData(name) => { - return name + name } // Note that this does not show up in user print-outs. CrateRoot => sym::double_braced_crate, @@ -582,13 +593,11 @@ impl DefPathData { Ctor => sym::double_braced_constructor, AnonConst => sym::double_braced_constant, ImplTrait => sym::double_braced_opaque, - }; - - s.as_interned_str() + } } pub fn to_string(&self) -> String { - self.as_interned_str().to_string() + self.as_symbol().to_string() } } @@ -599,7 +608,6 @@ macro_rules! define_global_metadata_kind { (pub enum GlobalMetaDataKind { $($variant:ident),* }) => ( - #[derive(Clone, Copy, Debug, Hash, RustcEncodable, RustcDecodable)] pub enum GlobalMetaDataKind { $($variant),* } @@ -611,7 +619,7 @@ macro_rules! define_global_metadata_kind { definitions.create_def_with_parent( CRATE_DEF_INDEX, ast::DUMMY_NODE_ID, - DefPathData::GlobalMetaData(instance.name().as_interned_str()), + DefPathData::GlobalMetaData(instance.name()), ExpnId::root(), DUMMY_SP ); @@ -625,7 +633,7 @@ macro_rules! define_global_metadata_kind { let def_key = DefKey { parent: Some(CRATE_DEF_INDEX), disambiguated_data: DisambiguatedDefPathData { - data: DefPathData::GlobalMetaData(self.name().as_interned_str()), + data: DefPathData::GlobalMetaData(self.name()), disambiguator: 0, } }; diff --git a/src/librustc/hir/map/hir_id_validator.rs b/src/librustc/hir/map/hir_id_validator.rs index 889659382d..b66c2ce117 100644 --- a/src/librustc/hir/map/hir_id_validator.rs +++ b/src/librustc/hir/map/hir_id_validator.rs @@ -10,7 +10,7 @@ pub fn check_crate(hir_map: &hir::map::Map<'_>) { let errors = Lock::new(Vec::new()); par_iter(&hir_map.krate().modules).for_each(|(module_id, _)| { - let local_def_id = hir_map.local_def_id_from_node_id(*module_id); + let local_def_id = hir_map.local_def_id(*module_id); hir_map.visit_item_likes_in_module(local_def_id, &mut OuterVisitor { hir_map, errors: &errors, diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 5cec8a593f..acadd77cc3 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -17,14 +17,12 @@ use crate::util::common::time; use rustc_target::spec::abi::Abi; use rustc_data_structures::svh::Svh; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use syntax::ast::{self, Name, NodeId}; use syntax::source_map::Spanned; -use syntax::ext::base::MacroKind; +use syntax_pos::hygiene::MacroKind; use syntax_pos::{Span, DUMMY_SP}; -use std::result::Result::Err; - pub mod blocks; mod collector; mod def_collector; @@ -50,28 +48,28 @@ impl<'hir> Entry<'hir> { fn fn_decl(&self) -> Option<&'hir FnDecl> { match self.node { Node::Item(ref item) => { - match item.node { + match item.kind { ItemKind::Fn(ref fn_decl, _, _, _) => Some(fn_decl), _ => None, } } Node::TraitItem(ref item) => { - match item.node { + match item.kind { TraitItemKind::Method(ref method_sig, _) => Some(&method_sig.decl), _ => None } } Node::ImplItem(ref item) => { - match item.node { + match item.kind { ImplItemKind::Method(ref method_sig, _) => Some(&method_sig.decl), _ => None, } } Node::Expr(ref expr) => { - match expr.node { + match expr.kind { ExprKind::Closure(_, ref fn_decl, ..) => Some(fn_decl), _ => None, } @@ -84,7 +82,7 @@ impl<'hir> Entry<'hir> { fn associated_body(self) -> Option { match self.node { Node::Item(item) => { - match item.node { + match item.kind { ItemKind::Const(_, body) | ItemKind::Static(.., body) | ItemKind::Fn(_, _, _, body) => Some(body), @@ -93,7 +91,7 @@ impl<'hir> Entry<'hir> { } Node::TraitItem(item) => { - match item.node { + match item.kind { TraitItemKind::Const(_, Some(body)) | TraitItemKind::Method(_, TraitMethod::Provided(body)) => Some(body), _ => None @@ -101,7 +99,7 @@ impl<'hir> Entry<'hir> { } Node::ImplItem(item) => { - match item.node { + match item.kind { ImplItemKind::Const(_, body) | ImplItemKind::Method(_, body) => Some(body), _ => None, @@ -111,7 +109,7 @@ impl<'hir> Entry<'hir> { Node::AnonConst(constant) => Some(constant.body), Node::Expr(expr) => { - match expr.node { + match expr.kind { ExprKind::Closure(.., body, _, _) => Some(body), _ => None, } @@ -158,9 +156,9 @@ impl Forest { /// This type is effectively a `HashMap>`, /// but it is implemented as 2 layers of arrays. -/// - first we have `A = Vec>` mapping a `DefIndex`'s index to an inner value +/// - first we have `A = IndexVec` mapping `DefIndex`s to an inner value /// - which is `B = IndexVec>` which gives you the `Entry`. -pub(super) type HirEntryMap<'hir> = Vec>>>>; +pub(super) type HirEntryMap<'hir> = IndexVec>>>; /// Represents a mapping from `NodeId`s to AST elements and their parent `NodeId`s. #[derive(Clone)] @@ -183,11 +181,49 @@ pub struct Map<'hir> { hir_to_node_id: FxHashMap, } +struct ParentHirIterator<'map> { + current_id: HirId, + map: &'map Map<'map>, +} + +impl<'map> ParentHirIterator<'map> { + fn new(current_id: HirId, map: &'map Map<'map>) -> ParentHirIterator<'map> { + ParentHirIterator { + current_id, + map, + } + } +} + +impl<'map> Iterator for ParentHirIterator<'map> { + type Item = (HirId, Node<'map>); + + fn next(&mut self) -> Option { + if self.current_id == CRATE_HIR_ID { + return None; + } + loop { // There are nodes that do not have entries, so we need to skip them. + let parent_id = self.map.get_parent_node(self.current_id); + + if parent_id == self.current_id { + self.current_id = CRATE_HIR_ID; + return None; + } + + self.current_id = parent_id; + if let Some(entry) = self.map.find_entry(parent_id) { + return Some((parent_id, entry.node)); + } + // If this `HirId` doesn't have an `Entry`, skip it and look for its `parent_id`. + } + } +} + impl<'hir> Map<'hir> { #[inline] fn lookup(&self, id: HirId) -> Option<&Entry<'hir>> { - let local_map = self.map.get(id.owner.index())?; - local_map.as_ref()?.get(id.local_id)?.as_ref() + let local_map = self.map.get(id.owner)?; + local_map.get(id.local_id)?.as_ref() } /// Registers a read in the dependency graph of the AST node with @@ -293,7 +329,7 @@ impl<'hir> Map<'hir> { Some(match node { Node::Item(item) => { - match item.node { + match item.kind { ItemKind::Static(..) => DefKind::Static, ItemKind::Const(..) => DefKind::Const, ItemKind::Fn(..) => DefKind::Fn, @@ -313,21 +349,21 @@ impl<'hir> Map<'hir> { } } Node::ForeignItem(item) => { - match item.node { + match item.kind { ForeignItemKind::Fn(..) => DefKind::Fn, ForeignItemKind::Static(..) => DefKind::Static, ForeignItemKind::Type => DefKind::ForeignTy, } } Node::TraitItem(item) => { - match item.node { + match item.kind { TraitItemKind::Const(..) => DefKind::AssocConst, TraitItemKind::Method(..) => DefKind::Method, TraitItemKind::Type(..) => DefKind::AssocTy, } } Node::ImplItem(item) => { - match item.node { + match item.kind { ImplItemKind::Const(..) => DefKind::AssocConst, ImplItemKind::Method(..) => DefKind::Method, ImplItemKind::TyAlias(..) => DefKind::AssocTy, @@ -453,22 +489,22 @@ impl<'hir> Map<'hir> { pub fn body_owner_kind(&self, id: HirId) -> BodyOwnerKind { match self.get(id) { - Node::Item(&Item { node: ItemKind::Const(..), .. }) | - Node::TraitItem(&TraitItem { node: TraitItemKind::Const(..), .. }) | - Node::ImplItem(&ImplItem { node: ImplItemKind::Const(..), .. }) | + Node::Item(&Item { kind: ItemKind::Const(..), .. }) | + Node::TraitItem(&TraitItem { kind: TraitItemKind::Const(..), .. }) | + Node::ImplItem(&ImplItem { kind: ImplItemKind::Const(..), .. }) | Node::AnonConst(_) => { BodyOwnerKind::Const } Node::Ctor(..) | - Node::Item(&Item { node: ItemKind::Fn(..), .. }) | - Node::TraitItem(&TraitItem { node: TraitItemKind::Method(..), .. }) | - Node::ImplItem(&ImplItem { node: ImplItemKind::Method(..), .. }) => { + Node::Item(&Item { kind: ItemKind::Fn(..), .. }) | + Node::TraitItem(&TraitItem { kind: TraitItemKind::Method(..), .. }) | + Node::ImplItem(&ImplItem { kind: ImplItemKind::Method(..), .. }) => { BodyOwnerKind::Fn } - Node::Item(&Item { node: ItemKind::Static(_, m, _), .. }) => { + Node::Item(&Item { kind: ItemKind::Static(_, m, _), .. }) => { BodyOwnerKind::Static(m) } - Node::Expr(&Expr { node: ExprKind::Closure(..), .. }) => { + Node::Expr(&Expr { kind: ExprKind::Closure(..), .. }) => { BodyOwnerKind::Closure } node => bug!("{:#?} is not a body node", node), @@ -477,8 +513,8 @@ impl<'hir> Map<'hir> { pub fn ty_param_owner(&self, id: HirId) -> HirId { match self.get(id) { - Node::Item(&Item { node: ItemKind::Trait(..), .. }) | - Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => id, + Node::Item(&Item { kind: ItemKind::Trait(..), .. }) | + Node::Item(&Item { kind: ItemKind::TraitAlias(..), .. }) => id, Node::GenericParam(_) => self.get_parent_node(id), _ => bug!("ty_param_owner: {} not a type parameter", self.node_to_string(id)) } @@ -486,8 +522,8 @@ impl<'hir> Map<'hir> { pub fn ty_param_name(&self, id: HirId) -> Name { match self.get(id) { - Node::Item(&Item { node: ItemKind::Trait(..), .. }) | - Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => kw::SelfUpper, + Node::Item(&Item { kind: ItemKind::Trait(..), .. }) | + Node::Item(&Item { kind: ItemKind::TraitAlias(..), .. }) => kw::SelfUpper, Node::GenericParam(param) => param.name.ident().name, _ => bug!("ty_param_name: {} not a type parameter", self.node_to_string(id)), } @@ -517,7 +553,7 @@ impl<'hir> Map<'hir> { match self.find_entry(hir_id).unwrap().node { Node::Item(&Item { span, - node: ItemKind::Mod(ref m), + kind: ItemKind::Mod(ref m), .. }) => (m, span, hir_id), Node::Crate => (&self.forest.krate.module, self.forest.krate.span, hir_id), @@ -536,9 +572,7 @@ impl<'hir> Map<'hir> { // in the expect_* calls the loops below self.read(hir_id); - let node_id = self.hir_to_node_id[&hir_id]; - - let module = &self.forest.krate.modules[&node_id]; + let module = &self.forest.krate.modules[&hir_id]; for id in &module.items { visitor.visit_item(self.expect_item(*id)); @@ -570,7 +604,7 @@ impl<'hir> Map<'hir> { Node::ImplItem(ref impl_item) => Some(&impl_item.generics), Node::TraitItem(ref trait_item) => Some(&trait_item.generics), Node::Item(ref item) => { - match item.node { + match item.kind { ItemKind::Fn(_, _, ref generics, _) | ItemKind::TyAlias(_, ref generics) | ItemKind::Enum(_, ref generics) | @@ -636,7 +670,7 @@ impl<'hir> Map<'hir> { Some(Node::TraitItem(_)) | Some(Node::ImplItem(_)) => true, Some(Node::Expr(e)) => { - match e.node { + match e.kind { ExprKind::Closure(..) => true, _ => false, } @@ -651,24 +685,24 @@ impl<'hir> Map<'hir> { let parent_id = self.get_parent_item(hir_id); match self.get(parent_id) { Node::Item(&Item { - node: ItemKind::Const(..), + kind: ItemKind::Const(..), .. }) | Node::TraitItem(&TraitItem { - node: TraitItemKind::Const(..), + kind: TraitItemKind::Const(..), .. }) | Node::ImplItem(&ImplItem { - node: ImplItemKind::Const(..), + kind: ImplItemKind::Const(..), .. }) | Node::AnonConst(_) | Node::Item(&Item { - node: ItemKind::Static(..), + kind: ItemKind::Static(..), .. }) => true, Node::Item(&Item { - node: ItemKind::Fn(_, header, ..), + kind: ItemKind::Fn(_, header, ..), .. }) => header.constness == Constness::Const, _ => false, @@ -678,51 +712,12 @@ impl<'hir> Map<'hir> { /// Wether `hir_id` corresponds to a `mod` or a crate. pub fn is_hir_id_module(&self, hir_id: HirId) -> bool { match self.lookup(hir_id) { - Some(Entry { node: Node::Item(Item { node: ItemKind::Mod(_), .. }), .. }) | + Some(Entry { node: Node::Item(Item { kind: ItemKind::Mod(_), .. }), .. }) | Some(Entry { node: Node::Crate, .. }) => true, _ => false, } } - - /// If there is some error when walking the parents (e.g., a node does not - /// have a parent in the map or a node can't be found), then we return the - /// last good `HirId` we found. Note that reaching the crate root (`id == 0`), - /// is not an error, since items in the crate module have the crate root as - /// parent. - fn walk_parent_nodes(&self, - start_id: HirId, - found: F, - bail_early: F2) - -> Result - where F: Fn(&Node<'hir>) -> bool, F2: Fn(&Node<'hir>) -> bool - { - let mut id = start_id; - loop { - let parent_id = self.get_parent_node(id); - if parent_id == CRATE_HIR_ID { - return Ok(CRATE_HIR_ID); - } - if parent_id == id { - return Err(id); - } - - if let Some(entry) = self.find_entry(parent_id) { - if let Node::Crate = entry.node { - return Err(id); - } - if found(&entry.node) { - return Ok(parent_id); - } else if bail_early(&entry.node) { - return Err(parent_id); - } - id = parent_id; - } else { - return Err(id); - } - } - } - /// Retrieves the `HirId` for `id`'s enclosing method, unless there's a /// `while` or `loop` before reaching it, as block tail returns are not /// available in them. @@ -746,29 +741,46 @@ impl<'hir> Map<'hir> { /// } /// ``` pub fn get_return_block(&self, id: HirId) -> Option { - let match_fn = |node: &Node<'_>| { - match *node { + let mut iter = ParentHirIterator::new(id, &self).peekable(); + let mut ignore_tail = false; + if let Some(entry) = self.find_entry(id) { + if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = entry.node { + // When dealing with `return` statements, we don't care about climbing only tail + // expressions. + ignore_tail = true; + } + } + while let Some((hir_id, node)) = iter.next() { + if let (Some((_, next_node)), false) = (iter.peek(), ignore_tail) { + match next_node { + Node::Block(Block { expr: None, .. }) => return None, + Node::Block(Block { expr: Some(expr), .. }) => { + if hir_id != expr.hir_id { + // The current node is not the tail expression of its parent. + return None; + } + } + _ => {} + } + } + match node { Node::Item(_) | Node::ForeignItem(_) | Node::TraitItem(_) | - Node::Expr(Expr { node: ExprKind::Closure(..), ..}) | - Node::ImplItem(_) => true, - _ => false, - } - }; - let match_non_returning_block = |node: &Node<'_>| { - match *node { + Node::Expr(Expr { kind: ExprKind::Closure(..), ..}) | + Node::ImplItem(_) => return Some(hir_id), Node::Expr(ref expr) => { - match expr.node { - ExprKind::Loop(..) | ExprKind::Ret(..) => true, - _ => false, + match expr.kind { + // Ignore `return`s on the first iteration + ExprKind::Loop(..) | ExprKind::Ret(..) => return None, + _ => {} } } - _ => false, + Node::Local(_) => return None, + _ => {} } - }; - - self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok() + } + None } /// Retrieves the `HirId` for `id`'s parent item, or `id` itself if no @@ -776,16 +788,17 @@ impl<'hir> Map<'hir> { /// in the HIR which is recorded by the map and is an item, either an item /// in a module, trait, or impl. pub fn get_parent_item(&self, hir_id: HirId) -> HirId { - match self.walk_parent_nodes(hir_id, |node| match *node { - Node::Item(_) | - Node::ForeignItem(_) | - Node::TraitItem(_) | - Node::ImplItem(_) => true, - _ => false, - }, |_| false) { - Ok(id) => id, - Err(id) => id, + for (hir_id, node) in ParentHirIterator::new(hir_id, &self) { + match node { + Node::Crate | + Node::Item(_) | + Node::ForeignItem(_) | + Node::TraitItem(_) | + Node::ImplItem(_) => return hir_id, + _ => {} + } } + hir_id } /// Returns the `DefId` of `id`'s nearest module parent, or `id` itself if no @@ -797,64 +810,94 @@ impl<'hir> Map<'hir> { /// Returns the `HirId` of `id`'s nearest module parent, or `id` itself if no /// module parent is in this map. pub fn get_module_parent_node(&self, hir_id: HirId) -> HirId { - match self.walk_parent_nodes(hir_id, |node| match *node { - Node::Item(&Item { node: ItemKind::Mod(_), .. }) => true, - _ => false, - }, |_| false) { - Ok(id) => id, - Err(id) => id, + for (hir_id, node) in ParentHirIterator::new(hir_id, &self) { + if let Node::Item(&Item { kind: ItemKind::Mod(_), .. }) = node { + return hir_id; + } } + CRATE_HIR_ID + } + + /// When on a match arm tail expression or on a match arm, give back the enclosing `match` + /// expression. + /// + /// Used by error reporting when there's a type error in a match arm caused by the `match` + /// expression needing to be unit. + pub fn get_match_if_cause(&self, hir_id: HirId) -> Option<&Expr> { + for (_, node) in ParentHirIterator::new(hir_id, &self) { + match node { + Node::Item(_) | + Node::ForeignItem(_) | + Node::TraitItem(_) | + Node::ImplItem(_) => break, + Node::Expr(expr) => match expr.kind { + ExprKind::Match(_, _, _) => return Some(expr), + _ => {} + }, + Node::Stmt(stmt) => match stmt.kind { + StmtKind::Local(_) => break, + _ => {} + } + _ => {} + } + } + None } /// Returns the nearest enclosing scope. A scope is roughly an item or block. pub fn get_enclosing_scope(&self, hir_id: HirId) -> Option { - self.walk_parent_nodes(hir_id, |node| match *node { - Node::Item(i) => { - match i.node { - ItemKind::Fn(..) - | ItemKind::Mod(..) - | ItemKind::Enum(..) - | ItemKind::Struct(..) - | ItemKind::Union(..) - | ItemKind::Trait(..) - | ItemKind::Impl(..) => true, - _ => false, - } - }, - Node::ForeignItem(fi) => { - match fi.node { - ForeignItemKind::Fn(..) => true, - _ => false, - } - }, - Node::TraitItem(ti) => { - match ti.node { - TraitItemKind::Method(..) => true, - _ => false, - } - }, - Node::ImplItem(ii) => { - match ii.node { - ImplItemKind::Method(..) => true, - _ => false, - } - }, - Node::Block(_) => true, - _ => false, - }, |_| false).ok() + for (hir_id, node) in ParentHirIterator::new(hir_id, &self) { + if match node { + Node::Item(i) => { + match i.kind { + ItemKind::Fn(..) + | ItemKind::Mod(..) + | ItemKind::Enum(..) + | ItemKind::Struct(..) + | ItemKind::Union(..) + | ItemKind::Trait(..) + | ItemKind::Impl(..) => true, + _ => false, + } + }, + Node::ForeignItem(fi) => { + match fi.kind { + ForeignItemKind::Fn(..) => true, + _ => false, + } + }, + Node::TraitItem(ti) => { + match ti.kind { + TraitItemKind::Method(..) => true, + _ => false, + } + }, + Node::ImplItem(ii) => { + match ii.kind { + ImplItemKind::Method(..) => true, + _ => false, + } + }, + Node::Block(_) => true, + _ => false, + } { + return Some(hir_id); + } + } + None } /// Returns the defining scope for an opaque type definition. - pub fn get_defining_scope(&self, id: HirId) -> Option { + pub fn get_defining_scope(&self, id: HirId) -> HirId { let mut scope = id; loop { - scope = self.get_enclosing_scope(scope)?; + scope = self.get_enclosing_scope(scope).unwrap_or(CRATE_HIR_ID); if scope == CRATE_HIR_ID { - return Some(CRATE_HIR_ID); + return CRATE_HIR_ID; } match self.get(scope) { Node::Item(i) => { - match i.node { + match i.kind { ItemKind::OpaqueTy(OpaqueTy { impl_trait_fn: None, .. }) => {} _ => break, } @@ -863,7 +906,7 @@ impl<'hir> Map<'hir> { _ => break, } } - Some(scope) + scope } pub fn get_parent_did(&self, id: HirId) -> DefId { @@ -874,7 +917,7 @@ impl<'hir> Map<'hir> { let parent = self.get_parent_item(hir_id); if let Some(entry) = self.find_entry(parent) { if let Entry { - node: Node::Item(Item { node: ItemKind::ForeignMod(ref nm), .. }), .. } = entry + node: Node::Item(Item { kind: ItemKind::ForeignMod(ref nm), .. }), .. } = entry { self.read(hir_id); // reveals some of the content of a node return nm.abi; @@ -907,7 +950,7 @@ impl<'hir> Map<'hir> { pub fn expect_variant_data(&self, id: HirId) -> &'hir VariantData { match self.find(id) { Some(Node::Item(i)) => { - match i.node { + match i.kind { ItemKind::Struct(ref struct_def, _) | ItemKind::Union(ref struct_def, _) => struct_def, _ => bug!("struct ID bound to non-struct {}", self.node_to_string(id)) @@ -950,7 +993,7 @@ impl<'hir> Map<'hir> { Node::Field(f) => f.ident.name, Node::Lifetime(lt) => lt.name.ident().name, Node::GenericParam(param) => param.name.ident().name, - Node::Binding(&Pat { node: PatKind::Binding(_, _, l, _), .. }) => l.name, + Node::Binding(&Pat { kind: PatKind::Binding(_, _, l, _), .. }) => l.name, Node::Ctor(..) => self.name(self.get_parent_item(id)), _ => bug!("no name for {}", self.node_to_string(id)) } @@ -970,7 +1013,7 @@ impl<'hir> Map<'hir> { Some(Node::Variant(ref v)) => Some(&v.attrs[..]), Some(Node::Field(ref f)) => Some(&f.attrs[..]), Some(Node::Expr(ref e)) => Some(&*e.attrs), - Some(Node::Stmt(ref s)) => Some(s.node.attrs()), + Some(Node::Stmt(ref s)) => Some(s.kind.attrs()), Some(Node::Arm(ref a)) => Some(&*a.attrs), Some(Node::GenericParam(param)) => Some(¶m.attrs[..]), // Unit/tuple structs/variants take the attributes straight from @@ -988,14 +1031,12 @@ impl<'hir> Map<'hir> { // see the comment on `HirEntryMap`. // Iterate over all the indices and return a reference to // local maps and their index given that they exist. - self.map.iter().enumerate().filter_map(|(i, local_map)| { - local_map.as_ref().map(|m| (i, m)) - }).flat_map(move |(array_index, local_map)| { + self.map.iter_enumerated().flat_map(move |(owner, local_map)| { // Iterate over each valid entry in the local map. local_map.iter_enumerated().filter_map(move |(i, entry)| entry.map(move |_| { // Reconstruct the `HirId` based on the 3 indices we used to find it. HirId { - owner: DefIndex::from(array_index), + owner, local_id: i, } })) @@ -1066,6 +1107,14 @@ impl<'hir> Map<'hir> { self.as_local_hir_id(id).map(|id| self.span(id)) } + pub fn res_span(&self, res: Res) -> Option { + match res { + Res::Err => None, + Res::Local(id) => Some(self.span(id)), + res => self.span_if_local(res.opt_def_id()?), + } + } + pub fn node_to_string(&self, id: HirId) -> String { hir_id_to_string(self, id, true) } @@ -1125,7 +1174,7 @@ impl<'a> NodesMatchingSuffix<'a> { } fn item_is_mod(item: &Item) -> bool { - match item.node { + match item.kind { ItemKind::Mod(_) => true, _ => false, } @@ -1171,6 +1220,8 @@ pub fn map_crate<'hir>(sess: &crate::session::Session, forest: &'hir Forest, definitions: &'hir Definitions) -> Map<'hir> { + let _prof_timer = sess.prof.generic_activity("build_hir_map"); + // Build the reverse mapping of `node_to_hir_id`. let hir_to_node_id = definitions.node_to_hir_id.iter_enumerated() .map(|(node_id, &hir_id)| (hir_id, node_id)).collect(); @@ -1288,7 +1339,7 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId, include_id: bool) -> String { match map.find(id) { Some(Node::Item(item)) => { - let item_str = match item.node { + let item_str = match item.kind { ItemKind::ExternCrate(..) => "extern crate", ItemKind::Use(..) => "use", ItemKind::Static(..) => "static", @@ -1312,7 +1363,7 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId, include_id: bool) -> String { format!("foreign item {}{}", path_str(), id_str) } Some(Node::ImplItem(ii)) => { - match ii.node { + match ii.kind { ImplItemKind::Const(..) => { format!("assoc const {} in {}{}", ii.ident, path_str(), id_str) } @@ -1328,7 +1379,7 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId, include_id: bool) -> String { } } Some(Node::TraitItem(ti)) => { - let kind = match ti.node { + let kind = match ti.kind { TraitItemKind::Const(..) => "assoc constant", TraitItemKind::Method(..) => "trait method", TraitItemKind::Type(..) => "assoc type", diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 92a8c00804..0edc41e6b4 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -19,7 +19,7 @@ use crate::ty::query::Providers; use crate::util::nodemap::{NodeMap, FxHashSet}; use errors::FatalError; -use syntax_pos::{Span, DUMMY_SP, symbol::InternedString, MultiSpan}; +use syntax_pos::{Span, DUMMY_SP, MultiSpan}; use syntax::source_map::Spanned; use syntax::ast::{self, CrateSugar, Ident, Name, NodeId, AsmDialect}; use syntax::ast::{Attribute, Label, LitKind, StrStyle, FloatTy, IntTy, UintTy}; @@ -122,9 +122,9 @@ impl fmt::Display for HirId { // Hack to ensure that we don't try to access the private parts of `ItemLocalId` in this module. mod item_local_id_inner { - use rustc_data_structures::indexed_vec::Idx; + use rustc_index::vec::Idx; use rustc_macros::HashStable; - newtype_index! { + rustc_index::newtype_index! { /// An `ItemLocalId` uniquely identifies something within a given "item-like"; /// that is, within a `hir::Item`, `hir::TraitItem`, or `hir::ImplItem`. There is no /// guarantee that the numerical value of a given `ItemLocalId` corresponds to @@ -479,7 +479,7 @@ impl GenericArgs { match arg { GenericArg::Lifetime(_) => {} GenericArg::Type(ref ty) => { - if let TyKind::Tup(ref tys) = ty.node { + if let TyKind::Tup(ref tys) = ty.kind { return tys; } break; @@ -628,9 +628,9 @@ impl Generics { own_counts } - pub fn get_named(&self, name: InternedString) -> Option<&GenericParam> { + pub fn get_named(&self, name: Symbol) -> Option<&GenericParam> { for param in &self.params { - if name == param.name.ident().as_interned_str() { + if name == param.name.ident().name { return Some(param); } } @@ -669,6 +669,12 @@ impl WhereClause { Some(self.span) } } + + /// The `WhereClause` under normal circumstances points at either the predicates or the empty + /// space where the `where` clause should be. Only of use for diagnostic suggestions. + pub fn span_for_predicates_or_empty_place(&self) -> Span { + self.span + } } /// A single predicate in a where-clause. @@ -766,7 +772,7 @@ pub struct Crate { /// A list of modules written out in the order in which they /// appear in the crate. This includes the main crate module. - pub modules: BTreeMap, + pub modules: BTreeMap, } impl Crate { @@ -861,7 +867,7 @@ pub struct Block { pub span: Span, /// If true, then there may exist `break 'a` values that aim to /// break out of this block early. - /// Used by `'label: {}` blocks and by `catch` statements. + /// Used by `'label: {}` blocks and by `try {}` blocks. pub targeted_by_break: bool, } @@ -869,7 +875,7 @@ pub struct Block { pub struct Pat { #[stable_hasher(ignore)] pub hir_id: HirId, - pub node: PatKind, + pub kind: PatKind, pub span: Span, } @@ -882,44 +888,61 @@ impl fmt::Debug for Pat { impl Pat { // FIXME(#19596) this is a workaround, but there should be a better way - fn walk_(&self, it: &mut G) -> bool - where G: FnMut(&Pat) -> bool - { + fn walk_short_(&self, it: &mut impl FnMut(&Pat) -> bool) -> bool { if !it(self) { return false; } - match self.node { - PatKind::Binding(.., Some(ref p)) => p.walk_(it), - PatKind::Struct(_, ref fields, _) => { - fields.iter().all(|field| field.pat.walk_(it)) - } - PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => { - s.iter().all(|p| p.walk_(it)) - } - PatKind::Or(ref pats) => pats.iter().all(|p| p.walk_(it)), - PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { - s.walk_(it) - } - PatKind::Slice(ref before, ref slice, ref after) => { + use PatKind::*; + match &self.kind { + Wild | Lit(_) | Range(..) | Binding(.., None) | Path(_) => true, + Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it), + Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)), + TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)), + Slice(before, slice, after) => { before.iter() .chain(slice.iter()) .chain(after.iter()) - .all(|p| p.walk_(it)) - } - PatKind::Wild | - PatKind::Lit(_) | - PatKind::Range(..) | - PatKind::Binding(..) | - PatKind::Path(_) => { - true + .all(|p| p.walk_short_(it)) } } } - pub fn walk(&self, mut it: F) -> bool - where F: FnMut(&Pat) -> bool - { + /// Walk the pattern in left-to-right order, + /// short circuiting (with `.all(..)`) if `false` is returned. + /// + /// Note that when visiting e.g. `Tuple(ps)`, + /// if visiting `ps[0]` returns `false`, + /// then `ps[1]` will not be visited. + pub fn walk_short(&self, mut it: impl FnMut(&Pat) -> bool) -> bool { + self.walk_short_(&mut it) + } + + // FIXME(#19596) this is a workaround, but there should be a better way + fn walk_(&self, it: &mut impl FnMut(&Pat) -> bool) { + if !it(self) { + return; + } + + use PatKind::*; + match &self.kind { + Wild | Lit(_) | Range(..) | Binding(.., None) | Path(_) => {}, + Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it), + Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)), + TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)), + Slice(before, slice, after) => { + before.iter() + .chain(slice.iter()) + .chain(after.iter()) + .for_each(|p| p.walk_(it)) + } + } + } + + /// Walk the pattern in left-to-right order. + /// + /// If `it(pat)` returns `false`, the children are not visited. + pub fn walk(&self, mut it: impl FnMut(&Pat) -> bool) { self.walk_(&mut it) } } @@ -972,6 +995,15 @@ pub enum RangeEnd { Excluded, } +impl fmt::Display for RangeEnd { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + RangeEnd::Included => "..=", + RangeEnd::Excluded => "..", + }) + } +} + #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)] pub enum PatKind { /// Represents a wildcard pattern (i.e., `_`). @@ -1036,9 +1068,16 @@ impl Mutability { MutImmutable => MutImmutable, } } + + pub fn invert(self) -> Self { + match self { + MutMutable => MutImmutable, + MutImmutable => MutMutable, + } + } } -#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Hash, HashStable)] +#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable)] pub enum BinOpKind { /// The `+` operator (addition). Add, @@ -1172,7 +1211,7 @@ impl Into for BinOpKind { pub type BinOp = Spanned; -#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Hash, HashStable)] +#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable)] pub enum UnOp { /// The `*` operator (deferencing). UnDeref, @@ -1204,7 +1243,7 @@ impl UnOp { #[derive(RustcEncodable, RustcDecodable)] pub struct Stmt { pub hir_id: HirId, - pub node: StmtKind, + pub kind: StmtKind, pub span: Span, } @@ -1259,15 +1298,15 @@ pub struct Local { } /// Represents a single arm of a `match` expression, e.g. -/// ` (if ) => `. +/// ` (if ) => `. #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)] pub struct Arm { #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, pub attrs: HirVec, - /// Multiple patterns can be combined with `|` - pub pats: HirVec>, + /// If this pattern and the optional guard matches, then `body` is evaluated. + pub pat: P, /// Optional guard clause. pub guard: Option, /// The expression the arm evaluates to if this arm matches. @@ -1342,24 +1381,54 @@ impl Body { hir_id: self.value.hir_id, } } + + pub fn generator_kind(&self) -> Option { + self.generator_kind + } } /// The type of source expression that caused this generator to be created. -// Not `IsAsync` because we want to eventually add support for `AsyncGen` -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, HashStable, - RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +#[derive(Clone, PartialEq, Eq, HashStable, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum GeneratorKind { - /// An `async` block or function. - Async, + /// An explicit `async` block or the body of an async function. + Async(AsyncGeneratorKind), + /// A generator literal created via a `yield` inside a closure. Gen, } impl fmt::Display for GeneratorKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + GeneratorKind::Async(k) => fmt::Display::fmt(k, f), + GeneratorKind::Gen => f.write_str("generator"), + } + } +} + +/// In the case of a generator created as part of an async construct, +/// which kind of async construct caused it to be created? +/// +/// This helps error messages but is also used to drive coercions in +/// type-checking (see #60424). +#[derive(Clone, PartialEq, Eq, HashStable, RustcEncodable, RustcDecodable, Debug, Copy)] +pub enum AsyncGeneratorKind { + /// An explicit `async` block written by the user. + Block, + + /// An explicit `async` block written by the user. + Closure, + + /// The `async` block generated as the body of an async function. + Fn, +} + +impl fmt::Display for AsyncGeneratorKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { - GeneratorKind::Async => "`async` object", - GeneratorKind::Gen => "generator", + AsyncGeneratorKind::Block => "`async` block", + AsyncGeneratorKind::Closure => "`async` closure body", + AsyncGeneratorKind::Fn => "`async fn` body", }) } } @@ -1406,7 +1475,7 @@ pub struct AnonConst { #[derive(RustcEncodable, RustcDecodable)] pub struct Expr { pub hir_id: HirId, - pub node: ExprKind, + pub kind: ExprKind, pub attrs: ThinVec, pub span: Span, } @@ -1417,7 +1486,7 @@ static_assert_size!(Expr, 72); impl Expr { pub fn precedence(&self) -> ExprPrecedence { - match self.node { + match self.kind { ExprKind::Box(_) => ExprPrecedence::Box, ExprKind::Array(_) => ExprPrecedence::Array, ExprKind::Call(..) => ExprPrecedence::Call, @@ -1450,7 +1519,7 @@ impl Expr { } pub fn is_place_expr(&self) -> bool { - match self.node { + match self.kind { ExprKind::Path(QPath::Resolved(_, ref path)) => { match path.res { Res::Local(..) @@ -1503,6 +1572,19 @@ impl Expr { } } } + + /// If `Self.kind` is `ExprKind::DropTemps(expr)`, drill down until we get a non-`DropTemps` + /// `Expr`. This is used in suggestions to ignore this `ExprKind` as it is semantically + /// silent, only signaling the ownership system. By doing this, suggestions that check the + /// `ExprKind` of any given `Expr` for presentation don't have to care about `DropTemps` + /// beyond remembering to call this function before doing analysis on it. + pub fn peel_drop_temps(&self) -> &Self { + let mut expr = self; + while let ExprKind::DropTemps(inner) = &expr.kind { + expr = inner; + } + expr + } } impl fmt::Debug for Expr { @@ -1535,7 +1617,7 @@ pub enum ExprKind { /// Thus, `x.foo::(a, b, c, d)` is represented as /// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`. MethodCall(P, Span, HirVec), - /// A tuple (e.g., `(a, b, c ,d)`). + /// A tuple (e.g., `(a, b, c, d)`). Tup(HirVec), /// A binary operation (e.g., `a + b`, `a * b`). Binary(BinOp, P, P), @@ -1741,6 +1823,7 @@ pub struct Destination { pub enum GeneratorMovability { /// May contain self-references, `!Unpin`. Static, + /// Must not contain self-references, `Unpin`. Movable, } @@ -1802,7 +1885,7 @@ pub struct TraitItem { pub hir_id: HirId, pub attrs: HirVec, pub generics: Generics, - pub node: TraitItemKind, + pub kind: TraitItemKind, pub span: Span, } @@ -1845,7 +1928,7 @@ pub struct ImplItem { pub defaultness: Defaultness, pub attrs: HirVec, pub generics: Generics, - pub node: ImplItemKind, + pub kind: ImplItemKind, pub span: Span, } @@ -1911,7 +1994,7 @@ impl TypeBinding { #[derive(RustcEncodable, RustcDecodable)] pub struct Ty { pub hir_id: HirId, - pub node: TyKind, + pub kind: TyKind, pub span: Span, } @@ -1999,9 +2082,6 @@ pub enum TyKind { Infer, /// Placeholder for a type that has failed to be defined. Err, - /// Placeholder for C-variadic arguments. We "spoof" the `VaListImpl` created - /// from the variadic arguments. This type is only valid up to typeck. - CVarArgs(Lifetime), } #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, HashStable)] @@ -2388,7 +2468,7 @@ pub struct Item { pub ident: Ident, pub hir_id: HirId, pub attrs: HirVec, - pub node: ItemKind, + pub kind: ItemKind, pub vis: Visibility, pub span: Span, } @@ -2432,7 +2512,7 @@ pub enum ItemKind { Fn(P, FnHeader, Generics, BodyId), /// A module. Mod(Mod), - /// An external module. + /// An external module, e.g. `extern { .. }`. ForeignMod(ForeignMod), /// Module-level inline assembly (from `global_asm!`). GlobalAsm(P), @@ -2553,7 +2633,7 @@ pub struct ForeignItem { #[stable_hasher(project(name))] pub ident: Ident, pub attrs: HirVec, - pub node: ForeignItemKind, + pub kind: ForeignItemKind, pub hir_id: HirId, pub span: Span, pub vis: Visibility, @@ -2626,6 +2706,11 @@ pub struct CodegenFnAttrs { /// probably isn't set when this is set, this is for foreign items while /// `#[export_name]` is for Rust-defined functions. pub link_name: Option, + /// The `#[link_ordinal = "..."]` attribute, indicating an ordinal an + /// imported function has in the dynamic library. Note that this must not + /// be set when `link_name` is set. This is for foreign items with the + /// "raw-dylib" kind. + pub link_ordinal: Option, /// The `#[target_feature(enable = "...")]` attribute and the enabled /// features (only enabled features are supported right now). pub target_features: Vec, @@ -2671,9 +2756,11 @@ bitflags! { /// `#[used]`: indicates that LLVM can't eliminate this function (but the /// linker can!). const USED = 1 << 9; - /// #[ffi_returns_twice], indicates that an extern function can return + /// `#[ffi_returns_twice]`, indicates that an extern function can return /// multiple times - const FFI_RETURNS_TWICE = 1 << 10; + const FFI_RETURNS_TWICE = 1 << 10; + /// `#[track_caller]`: allow access to the caller location + const TRACK_CALLER = 1 << 11; } } @@ -2685,6 +2772,7 @@ impl CodegenFnAttrs { optimize: OptimizeAttr::None, export_name: None, link_name: None, + link_ordinal: None, target_features: vec![], linkage: None, link_section: None, diff --git a/src/librustc/hir/pat_util.rs b/src/librustc/hir/pat_util.rs index 0d2c7d393b..feb0d97822 100644 --- a/src/librustc/hir/pat_util.rs +++ b/src/librustc/hir/pat_util.rs @@ -45,7 +45,7 @@ impl EnumerateAndAdjustIterator for T { impl hir::Pat { pub fn is_refutable(&self) -> bool { - match self.node { + match self.kind { PatKind::Lit(_) | PatKind::Range(..) | PatKind::Path(hir::QPath::Resolved(Some(..), _)) | @@ -66,50 +66,70 @@ impl hir::Pat { /// Call `f` on every "binding" in a pattern, e.g., on `a` in /// `match foo() { Some(a) => (), None => () }` - pub fn each_binding(&self, mut f: F) - where F: FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident), - { + pub fn each_binding(&self, mut f: impl FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident)) { self.walk(|p| { - if let PatKind::Binding(binding_mode, _, ident, _) = p.node { + if let PatKind::Binding(binding_mode, _, ident, _) = p.kind { f(binding_mode, p.hir_id, p.span, ident); } true }); } + /// Call `f` on every "binding" in a pattern, e.g., on `a` in + /// `match foo() { Some(a) => (), None => () }`. + /// + /// When encountering an or-pattern `p_0 | ... | p_n` only `p_0` will be visited. + pub fn each_binding_or_first( + &self, + f: &mut impl FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident), + ) { + self.walk(|p| match &p.kind { + PatKind::Or(ps) => { + ps[0].each_binding_or_first(f); + false + }, + PatKind::Binding(bm, _, ident, _) => { + f(*bm, p.hir_id, p.span, *ident); + true + } + _ => true, + }) + } + /// Checks if the pattern contains any patterns that bind something to /// an ident, e.g., `foo`, or `Foo(foo)` or `foo @ Bar(..)`. pub fn contains_bindings(&self) -> bool { - let mut contains_bindings = false; - self.walk(|p| { - if let PatKind::Binding(..) = p.node { - contains_bindings = true; - false // there's at least one binding, can short circuit now. - } else { - true - } - }); - contains_bindings + self.satisfies(|p| match p.kind { + PatKind::Binding(..) => true, + _ => false, + }) } /// Checks if the pattern contains any patterns that bind something to /// an ident or wildcard, e.g., `foo`, or `Foo(_)`, `foo @ Bar(..)`, pub fn contains_bindings_or_wild(&self) -> bool { - let mut contains_bindings = false; - self.walk(|p| { - match p.node { - PatKind::Binding(..) | PatKind::Wild => { - contains_bindings = true; - false // there's at least one binding/wildcard, can short circuit now. - } - _ => true + self.satisfies(|p| match p.kind { + PatKind::Binding(..) | PatKind::Wild => true, + _ => false, + }) + } + + /// Checks if the pattern satisfies the given predicate on some sub-pattern. + fn satisfies(&self, pred: impl Fn(&Self) -> bool) -> bool { + let mut satisfies = false; + self.walk_short(|p| { + if pred(p) { + satisfies = true; + false // Found one, can short circuit now. + } else { + true } }); - contains_bindings + satisfies } pub fn simple_ident(&self) -> Option { - match self.node { + match self.kind { PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, None) | PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, None) => Some(ident), _ => None, @@ -119,20 +139,20 @@ impl hir::Pat { /// Returns variants that are necessary to exist for the pattern to match. pub fn necessary_variants(&self) -> Vec { let mut variants = vec![]; - self.walk(|p| { - match p.node { - PatKind::Path(hir::QPath::Resolved(_, ref path)) | - PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) | - PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => { - match path.res { - Res::Def(DefKind::Variant, id) => variants.push(id), - Res::Def(DefKind::Ctor(CtorOf::Variant, ..), id) => variants.push(id), - _ => () - } + self.walk(|p| match &p.kind { + PatKind::Or(_) => false, + PatKind::Path(hir::QPath::Resolved(_, path)) | + PatKind::TupleStruct(hir::QPath::Resolved(_, path), ..) | + PatKind::Struct(hir::QPath::Resolved(_, path), ..) => { + if let Res::Def(DefKind::Variant, id) + | Res::Def(DefKind::Ctor(CtorOf::Variant, ..), id) + = path.res + { + variants.push(id); } - _ => () + true } - true + _ => true, }); variants.sort(); variants.dedup(); @@ -148,33 +168,14 @@ impl hir::Pat { let mut result = None; self.each_binding(|annotation, _, _, _| { match annotation { - hir::BindingAnnotation::Ref => { - match result { - None | Some(hir::MutImmutable) => result = Some(hir::MutImmutable), - _ => (), - } + hir::BindingAnnotation::Ref => match result { + None | Some(hir::MutImmutable) => result = Some(hir::MutImmutable), + _ => {} } hir::BindingAnnotation::RefMut => result = Some(hir::MutMutable), - _ => (), + _ => {} } }); result } } - -impl hir::Arm { - /// Checks if the patterns for this arm contain any `ref` or `ref mut` - /// bindings, and if yes whether its containing mutable ones or just immutables ones. - pub fn contains_explicit_ref_binding(&self) -> Option { - // FIXME(tschottdorf): contains_explicit_ref_binding() must be removed - // for #42640 (default match binding modes). - // - // See #44848. - self.pats.iter() - .filter_map(|pat| pat.contains_explicit_ref_binding()) - .max_by_key(|m| match *m { - hir::MutMutable => 1, - hir::MutImmutable => 0, - }) - } -} diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index cfbfb5eceb..64b355f6ec 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -1,10 +1,10 @@ use rustc_target::spec::abi::Abi; use syntax::ast; use syntax::source_map::{SourceMap, Spanned}; -use syntax::parse::ParseSess; use syntax::print::pp::{self, Breaks}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; use syntax::print::pprust::{self, Comments, PrintState}; +use syntax::sess::ParseSess; use syntax::symbol::kw; use syntax::util::parser::{self, AssocOp, Fixity}; use syntax_pos::{self, BytePos, FileName}; @@ -286,7 +286,7 @@ impl<'a> State<'a> { pub fn print_type(&mut self, ty: &hir::Ty) { self.maybe_print_comment(ty.span.lo()); self.ibox(0); - match ty.node { + match ty.kind { hir::TyKind::Slice(ref ty) => { self.s.word("["); self.print_type(&ty); @@ -361,9 +361,6 @@ impl<'a> State<'a> { self.s.word("/*ERROR*/"); self.pclose(); } - hir::TyKind::CVarArgs(_) => { - self.s.word("..."); - } } self.end() } @@ -372,7 +369,7 @@ impl<'a> State<'a> { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); self.print_outer_attributes(&item.attrs); - match item.node { + match item.kind { hir::ForeignItemKind::Fn(ref decl, ref arg_names, ref generics) => { self.head(""); self.print_fn(decl, @@ -474,7 +471,7 @@ impl<'a> State<'a> { self.maybe_print_comment(item.span.lo()); self.print_outer_attributes(&item.attrs); self.ann.pre(self, AnnNode::Item(item)); - match item.node { + match item.kind { hir::ItemKind::ExternCrate(orig_name) => { self.head(visibility_qualified(&item.vis, "extern crate")); if let Some(orig_name) = orig_name { @@ -858,7 +855,7 @@ impl<'a> State<'a> { self.hardbreak_if_not_bol(); self.maybe_print_comment(ti.span.lo()); self.print_outer_attributes(&ti.attrs); - match ti.node { + match ti.kind { hir::TraitItemKind::Const(ref ty, default) => { let vis = Spanned { span: syntax_pos::DUMMY_SP, node: hir::VisibilityKind::Inherited }; @@ -896,7 +893,7 @@ impl<'a> State<'a> { self.print_outer_attributes(&ii.attrs); self.print_defaultness(ii.defaultness); - match ii.node { + match ii.kind { hir::ImplItemKind::Const(ref ty, expr) => { self.print_associated_const(ii.ident, &ty, Some(expr), &ii.vis); } @@ -944,7 +941,7 @@ impl<'a> State<'a> { pub fn print_stmt(&mut self, st: &hir::Stmt) { self.maybe_print_comment(st.span.lo()); - match st.node { + match st.kind { hir::StmtKind::Local(ref loc) => { self.print_local(loc.init.as_deref(), |this| this.print_local_decl(&loc)); } @@ -961,7 +958,7 @@ impl<'a> State<'a> { self.s.word(";"); } } - if stmt_ends_with_semi(&st.node) { + if stmt_ends_with_semi(&st.kind) { self.s.word(";"); } self.maybe_print_trailing_comment(st.span, None) @@ -1035,7 +1032,7 @@ impl<'a> State<'a> { /// Print an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. pub fn print_expr_as_cond(&mut self, expr: &hir::Expr) { - let needs_par = match expr.node { + let needs_par = match expr.kind { // These cases need parens due to the parse error observed in #26461: `if return {}` // parses as the erroneous construct `if (return {})`, not `if (return) {}`. hir::ExprKind::Closure(..) | @@ -1119,11 +1116,10 @@ impl<'a> State<'a> { } fn print_expr_call(&mut self, func: &hir::Expr, args: &[hir::Expr]) { - let prec = - match func.node { - hir::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, - _ => parser::PREC_POSTFIX, - }; + let prec = match func.kind { + hir::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, + _ => parser::PREC_POSTFIX, + }; self.print_expr_maybe_paren(func, prec); self.print_call_post(args) @@ -1161,7 +1157,7 @@ impl<'a> State<'a> { Fixity::None => (prec + 1, prec + 1), }; - let left_prec = match (&lhs.node, op.node) { + let left_prec = match (&lhs.kind, op.node) { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. @@ -1200,7 +1196,7 @@ impl<'a> State<'a> { self.print_outer_attributes(&expr.attrs); self.ibox(INDENT_UNIT); self.ann.pre(self, AnnNode::Expr(expr)); - match expr.node { + match expr.kind { hir::ExprKind::Box(ref expr) => { self.word_space("box"); self.print_expr_maybe_paren(expr, parser::PREC_PREFIX); @@ -1527,9 +1523,17 @@ impl<'a> State<'a> { colons_before_params) } hir::QPath::TypeRelative(ref qself, ref item_segment) => { - self.s.word("<"); - self.print_type(qself); - self.s.word(">"); + // If we've got a compound-qualified-path, let's push an additional pair of angle + // brackets, so that we pretty-print `<::C>` as `::C`, instead of just + // `A::B::C` (since the latter could be ambiguous to the user) + if let hir::TyKind::Path(hir::QPath::Resolved(None, _)) = &qself.kind { + self.print_type(qself); + } else { + self.s.word("<"); + self.print_type(qself); + self.s.word(">"); + } + self.s.word("::"); self.print_ident(item_segment.ident); self.print_generic_args(item_segment.generic_args(), @@ -1618,7 +1622,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::Pat(pat)); // Pat isn't normalized, but the beauty of it // is that it doesn't matter - match pat.node { + match pat.kind { PatKind::Wild => self.s.word("_"), PatKind::Binding(binding_mode, _, ident, ref sub) => { match binding_mode { @@ -1711,7 +1715,7 @@ impl<'a> State<'a> { self.pclose(); } PatKind::Box(ref inner) => { - let is_range_inner = match inner.node { + let is_range_inner = match inner.kind { PatKind::Range(..) => true, _ => false, }; @@ -1725,7 +1729,7 @@ impl<'a> State<'a> { } } PatKind::Ref(ref inner, mutbl) => { - let is_range_inner = match inner.node { + let is_range_inner = match inner.kind { PatKind::Range(..) => true, _ => false, }; @@ -1758,7 +1762,7 @@ impl<'a> State<'a> { if !before.is_empty() { self.word_space(","); } - if let PatKind::Wild = p.node { + if let PatKind::Wild = p.kind { // Print nothing. } else { self.print_pat(&p); @@ -1790,16 +1794,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::Arm(arm)); self.ibox(0); self.print_outer_attributes(&arm.attrs); - let mut first = true; - for p in &arm.pats { - if first { - first = false; - } else { - self.s.space(); - self.word_space("|"); - } - self.print_pat(&p); - } + self.print_pat(&arm.pat); self.s.space(); if let Some(ref g) = arm.guard { match g { @@ -1812,7 +1807,7 @@ impl<'a> State<'a> { } self.word_space("=>"); - match arm.body.node { + match arm.body.kind { hir::ExprKind::Block(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident); @@ -1890,7 +1885,7 @@ impl<'a> State<'a> { s.ann.nested(s, Nested::BodyParamPat(body_id, i)); i += 1; - if let hir::TyKind::Infer = ty.node { + if let hir::TyKind::Infer = ty.kind { // Print nothing. } else { s.s.word(":"); @@ -2231,7 +2226,7 @@ impl<'a> State<'a> { // // Duplicated from `parse::classify`, but adapted for the HIR. fn expr_requires_semi_to_be_stmt(e: &hir::Expr) -> bool { - match e.node { + match e.kind { hir::ExprKind::Match(..) | hir::ExprKind::Block(..) | hir::ExprKind::Loop(..) => false, @@ -2282,7 +2277,7 @@ fn bin_op_to_assoc_op(op: hir::BinOpKind) -> AssocOp { /// parens or other delimiters, e.g., `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and /// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not. fn contains_exterior_struct_lit(value: &hir::Expr) -> bool { - match value.node { + match value.kind { hir::ExprKind::Struct(..) => true, hir::ExprKind::Assign(ref lhs, ref rhs) | diff --git a/src/librustc/hir/ptr.rs b/src/librustc/hir/ptr.rs index 1976b4c9e5..7ee461a859 100644 --- a/src/librustc/hir/ptr.rs +++ b/src/librustc/hir/ptr.rs @@ -9,10 +9,9 @@ use std::{slice, vec}; use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; -use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, - HashStable}; +use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; /// An owned smart pointer. -#[derive(Hash, PartialEq, Eq)] +#[derive(PartialEq, Eq)] pub struct P { ptr: Box } @@ -133,9 +132,7 @@ impl Decodable for P<[T]> { impl HashStable for P where T: ?Sized + HashStable { - fn hash_stable(&self, - hcx: &mut CTX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { (**self).hash_stable(hcx, hasher); } } diff --git a/src/librustc/hir/upvars.rs b/src/librustc/hir/upvars.rs index cc532cb064..5c5f7f6120 100644 --- a/src/librustc/hir/upvars.rs +++ b/src/librustc/hir/upvars.rs @@ -47,7 +47,7 @@ impl Visitor<'tcx> for LocalCollector { } fn visit_pat(&mut self, pat: &'tcx hir::Pat) { - if let hir::PatKind::Binding(_, hir_id, ..) = pat.node { + if let hir::PatKind::Binding(_, hir_id, ..) = pat.kind { self.locals.insert(hir_id); } intravisit::walk_pat(self, pat); @@ -82,7 +82,7 @@ impl Visitor<'tcx> for CaptureCollector<'a, 'tcx> { } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { - if let hir::ExprKind::Closure(..) = expr.node { + if let hir::ExprKind::Closure(..) = expr.kind { let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id); if let Some(upvars) = self.tcx.upvars(closure_def_id) { // Every capture of a closure expression is a local in scope, diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 182a9ade8c..957dab3941 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -13,14 +13,13 @@ use std::cell::RefCell; use syntax::ast; use syntax::source_map::SourceMap; -use syntax::ext::hygiene::SyntaxContext; use syntax::symbol::Symbol; use syntax::tokenstream::DelimSpan; use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::hygiene; +use syntax_pos::hygiene::{self, SyntaxContext}; use rustc_data_structures::stable_hasher::{ - HashStable, StableHasher, StableHasherResult, ToStableHashKey, + HashStable, StableHasher, ToStableHashKey, }; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use smallvec::SmallVec; @@ -219,9 +218,7 @@ impl<'a> StableHashingContextProvider<'a> for StableHashingContext<'a> { impl<'a> crate::dep_graph::DepGraphSafe for StableHashingContext<'a> {} impl<'a> HashStable> for hir::BodyId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { if hcx.hash_bodies() { hcx.body_resolver.body(*self).hash_stable(hcx, hasher); } @@ -230,9 +227,7 @@ impl<'a> HashStable> for hir::BodyId { impl<'a> HashStable> for hir::HirId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { match hcx.node_id_hashing_mode { NodeIdHashingMode::Ignore => { // Don't do anything. @@ -263,9 +258,7 @@ impl<'a> ToStableHashKey> for hir::HirId { } impl<'a> HashStable> for ast::NodeId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { match hcx.node_id_hashing_mode { NodeIdHashingMode::Ignore => { // Don't do anything. @@ -298,9 +291,7 @@ impl<'a> HashStable> for Span { /// codepoint offsets. For the purpose of the hash that's sufficient. /// Also, hashing filenames is expensive so we avoid doing it twice when the /// span starts and ends in the same file, which is almost always the case. - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { const TAG_VALID_SPAN: u8 = 0; const TAG_INVALID_SPAN: u8 = 1; const TAG_EXPANSION: u8 = 0; @@ -379,24 +370,18 @@ impl<'a> HashStable> for Span { } impl<'a> HashStable> for DelimSpan { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.open.hash_stable(hcx, hasher); self.close.hash_stable(hcx, hasher); } } -pub fn hash_stable_trait_impls<'a, W>( +pub fn hash_stable_trait_impls<'a>( hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, + hasher: &mut StableHasher, blanket_impls: &[DefId], non_blanket_impls: &FxHashMap>, -) where - W: StableHasherResult, -{ +) { { let mut blanket_impls: SmallVec<[_; 8]> = blanket_impls .iter() diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 6e6492d042..c0255e5b8a 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -6,9 +6,7 @@ use crate::hir::map::DefPathHash; use crate::hir::def_id::{DefId, LocalDefId, CrateNum, CRATE_DEF_INDEX}; use crate::ich::{StableHashingContext, NodeIdHashingMode, Fingerprint}; -use rustc_data_structures::stable_hasher::{ - HashStable, ToStableHashKey, StableHasher, StableHasherResult, -}; +use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher}; use smallvec::SmallVec; use std::mem; use syntax::ast; @@ -16,9 +14,7 @@ use syntax::attr; impl<'a> HashStable> for DefId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.def_path_hash(*self).hash_stable(hcx, hasher); } } @@ -34,9 +30,7 @@ impl<'a> ToStableHashKey> for DefId { impl<'a> HashStable> for LocalDefId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.def_path_hash(self.to_def_id()).hash_stable(hcx, hasher); } } @@ -52,9 +46,7 @@ impl<'a> ToStableHashKey> for LocalDefId { impl<'a> HashStable> for CrateNum { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.def_path_hash(DefId { krate: *self, index: CRATE_DEF_INDEX @@ -92,9 +84,7 @@ for hir::ItemLocalId { // in "DefPath Mode". impl<'a> HashStable> for hir::ItemId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::ItemId { id } = *self; @@ -106,9 +96,7 @@ impl<'a> HashStable> for hir::ItemId { } impl<'a> HashStable> for hir::TraitItemId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::TraitItemId { hir_id } = * self; @@ -120,9 +108,7 @@ impl<'a> HashStable> for hir::TraitItemId { } impl<'a> HashStable> for hir::ImplItemId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::ImplItemId { hir_id } = * self; @@ -138,17 +124,15 @@ impl_stable_hash_for!(struct ast::Label { }); impl<'a> HashStable> for hir::Ty { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.while_hashing_hir_bodies(true, |hcx| { let hir::Ty { hir_id: _, - ref node, + ref kind, ref span, } = *self; - node.hash_stable(hcx, hasher); + kind.hash_stable(hcx, hasher); span.hash_stable(hcx, hasher); }) } @@ -158,7 +142,7 @@ impl_stable_hash_for_spanned!(hir::BinOpKind); impl_stable_hash_for!(struct hir::Stmt { hir_id, - node, + kind, span, }); @@ -166,19 +150,17 @@ impl_stable_hash_for!(struct hir::Stmt { impl_stable_hash_for_spanned!(ast::Name); impl<'a> HashStable> for hir::Expr { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.while_hashing_hir_bodies(true, |hcx| { let hir::Expr { hir_id: _, ref span, - ref node, + ref kind, ref attrs } = *self; span.hash_stable(hcx, hasher); - node.hash_stable(hcx, hasher); + kind.hash_stable(hcx, hasher); attrs.hash_stable(hcx, hasher); }) } @@ -192,15 +174,13 @@ impl_stable_hash_for!(struct ast::Ident { }); impl<'a> HashStable> for hir::TraitItem { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::TraitItem { hir_id: _, ident, ref attrs, ref generics, - ref node, + ref kind, span } = *self; @@ -208,7 +188,7 @@ impl<'a> HashStable> for hir::TraitItem { ident.name.hash_stable(hcx, hasher); attrs.hash_stable(hcx, hasher); generics.hash_stable(hcx, hasher); - node.hash_stable(hcx, hasher); + kind.hash_stable(hcx, hasher); span.hash_stable(hcx, hasher); }); } @@ -216,9 +196,7 @@ impl<'a> HashStable> for hir::TraitItem { impl<'a> HashStable> for hir::ImplItem { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::ImplItem { hir_id: _, ident, @@ -226,7 +204,7 @@ impl<'a> HashStable> for hir::ImplItem { defaultness, ref attrs, ref generics, - ref node, + ref kind, span } = *self; @@ -236,7 +214,7 @@ impl<'a> HashStable> for hir::ImplItem { defaultness.hash_stable(hcx, hasher); attrs.hash_stable(hcx, hasher); generics.hash_stable(hcx, hasher); - node.hash_stable(hcx, hasher); + kind.hash_stable(hcx, hasher); span.hash_stable(hcx, hasher); }); } @@ -248,9 +226,7 @@ impl_stable_hash_for!(enum ast::CrateSugar { }); impl<'a> HashStable> for hir::VisibilityKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { hir::VisibilityKind::Public | @@ -273,9 +249,7 @@ impl<'a> HashStable> for hir::VisibilityKind { impl_stable_hash_for_spanned!(hir::VisibilityKind); impl<'a> HashStable> for hir::Mod { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::Mod { inner: ref inner_span, ref item_ids, @@ -305,14 +279,12 @@ impl_stable_hash_for_spanned!(hir::Variant); impl<'a> HashStable> for hir::Item { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::Item { ident, ref attrs, hir_id: _, - ref node, + ref kind, ref vis, span } = *self; @@ -320,7 +292,7 @@ impl<'a> HashStable> for hir::Item { hcx.hash_hir_item_like(|hcx| { ident.name.hash_stable(hcx, hasher); attrs.hash_stable(hcx, hasher); - node.hash_stable(hcx, hasher); + kind.hash_stable(hcx, hasher); vis.hash_stable(hcx, hasher); span.hash_stable(hcx, hasher); }); @@ -328,9 +300,7 @@ impl<'a> HashStable> for hir::Item { } impl<'a> HashStable> for hir::Body { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let hir::Body { params, value, @@ -359,9 +329,7 @@ impl<'a> ToStableHashKey> for hir::BodyId { impl<'a> HashStable> for hir::def_id::DefIndex { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.local_def_path_hash(*self).hash_stable(hcx, hasher); } } @@ -376,17 +344,13 @@ impl<'a> ToStableHashKey> for hir::def_id::DefIndex { } impl<'a> HashStable> for crate::middle::lang_items::LangItem { - fn hash_stable(&self, - _: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, _: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { ::std::hash::Hash::hash(self, hasher); } } impl<'a> HashStable> for hir::TraitCandidate { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { let hir::TraitCandidate { def_id, @@ -418,17 +382,13 @@ impl<'a> ToStableHashKey> for hir::TraitCandidate { } impl<'hir> HashStable> for attr::InlineAttr { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'hir>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'hir>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); } } impl<'hir> HashStable> for attr::OptimizeAttr { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'hir>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'hir>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); } } diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index a33181e592..4fd4e25f64 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -9,53 +9,49 @@ use std::mem; use syntax::ast; use syntax::feature_gate; use syntax::parse::token; -use syntax::symbol::InternedString; +use syntax::symbol::LocalInternedString; use syntax::tokenstream; use syntax_pos::SourceFile; use crate::hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX}; use smallvec::SmallVec; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher}; -impl<'a> HashStable> for InternedString { +impl<'a> HashStable> for LocalInternedString { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - self.with(|s| s.hash_stable(hcx, hasher)) + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { + let str = self as &str; + str.hash_stable(hcx, hasher) } } -impl<'a> ToStableHashKey> for InternedString { - type KeyType = InternedString; +impl<'a> ToStableHashKey> for LocalInternedString { + type KeyType = LocalInternedString; #[inline] fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) - -> InternedString { + -> LocalInternedString { self.clone() } } impl<'a> HashStable> for ast::Name { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.as_str().hash_stable(hcx, hasher); } } impl<'a> ToStableHashKey> for ast::Name { - type KeyType = InternedString; + type KeyType = LocalInternedString; #[inline] fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) - -> InternedString { - self.as_interned_str() + -> LocalInternedString { + self.as_str() } } @@ -64,7 +60,7 @@ impl_stable_hash_for!(enum ::syntax::ast::AsmDialect { Intel }); -impl_stable_hash_for!(enum ::syntax::ext::base::MacroKind { +impl_stable_hash_for!(enum ::syntax_pos::hygiene::MacroKind { Bang, Attr, Derive, @@ -84,6 +80,7 @@ impl_stable_hash_for!(enum ::rustc_target::spec::abi::Abi { Msp430Interrupt, X86Interrupt, AmdGpuKernel, + EfiApi, Rust, C, System, @@ -110,9 +107,7 @@ impl_stable_hash_for!(enum ::syntax::edition::Edition { impl<'a> HashStable> for ::syntax::attr::StabilityLevel { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ::syntax::attr::StabilityLevel::Unstable { ref reason, ref issue, ref is_soft } => { @@ -142,7 +137,7 @@ impl_stable_hash_for!(enum ::syntax::ast::LitIntType { }); impl_stable_hash_for!(struct ::syntax::ast::Lit { - node, + kind, token, span }); @@ -172,9 +167,7 @@ impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) }); impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner }); impl<'a> HashStable> for [ast::Attribute] { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { if self.len() == 0 { self.len().hash_stable(hcx, hasher); return @@ -197,9 +190,7 @@ impl<'a> HashStable> for [ast::Attribute] { } impl<'a> HashStable> for ast::Path { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.segments.len().hash_stable(hcx, hasher); for segment in &self.segments { segment.ident.name.hash_stable(hcx, hasher); @@ -207,37 +198,34 @@ impl<'a> HashStable> for ast::Path { } } +impl_stable_hash_for!(struct ::syntax::ast::AttrItem { + path, + tokens, +}); + impl<'a> HashStable> for ast::Attribute { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { // Make sure that these have been filtered out. debug_assert!(!self.ident().map_or(false, |ident| hcx.is_ignored_attr(ident.name))); debug_assert!(!self.is_sugared_doc); let ast::Attribute { + ref item, id: _, style, - ref path, - ref tokens, is_sugared_doc: _, span, } = *self; + item.hash_stable(hcx, hasher); style.hash_stable(hcx, hasher); - path.hash_stable(hcx, hasher); - for tt in tokens.trees() { - tt.hash_stable(hcx, hasher); - } span.hash_stable(hcx, hasher); } } impl<'a> HashStable> for tokenstream::TokenTree { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { tokenstream::TokenTree::Token(ref token) => { @@ -256,9 +244,7 @@ for tokenstream::TokenTree { impl<'a> HashStable> for tokenstream::TokenStream { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { for sub_tt in self.trees() { sub_tt.hash_stable(hcx, hasher); } @@ -285,9 +271,7 @@ impl_stable_hash_for!(struct token::Lit { }); impl<'a> HashStable> for token::TokenKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { token::Eq | @@ -361,7 +345,7 @@ impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem { impl_stable_hash_for!(struct ::syntax::ast::MetaItem { path, - node, + kind, span }); @@ -426,9 +410,7 @@ impl_stable_hash_for!(enum ::syntax_pos::FileName { }); impl<'a> HashStable> for SourceFile { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let SourceFile { name: _, // We hash the smaller name_hash instead of this name_hash, @@ -444,6 +426,7 @@ impl<'a> HashStable> for SourceFile { ref lines, ref multibyte_chars, ref non_narrow_chars, + ref normalized_pos, } = *self; (name_hash as u64).hash_stable(hcx, hasher); @@ -472,6 +455,12 @@ impl<'a> HashStable> for SourceFile { for &char_pos in non_narrow_chars.iter() { stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher); } + + normalized_pos.len().hash_stable(hcx, hasher); + for &char_pos in normalized_pos.iter() { + stable_normalized_pos(char_pos, start_pos).hash_stable(hcx, hasher); + } + } } @@ -501,12 +490,20 @@ fn stable_non_narrow_char(swc: ::syntax_pos::NonNarrowChar, (pos.0 - source_file_start.0, width as u32) } +fn stable_normalized_pos(np: ::syntax_pos::NormalizedPos, + source_file_start: ::syntax_pos::BytePos) + -> (u32, u32) { + let ::syntax_pos::NormalizedPos { + pos, + diff + } = np; + + (pos.0 - source_file_start.0, diff) +} + + impl<'tcx> HashStable> for feature_gate::Features { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'tcx>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) { // Unfortunately we cannot exhaustively list fields here, since the // struct is macro generated. self.declared_lang_features.hash_stable(hcx, hasher); diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index f230c53728..c643baf112 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -3,8 +3,7 @@ use crate::ich::{Fingerprint, StableHashingContext, NodeIdHashingMode}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher}; use std::cell::RefCell; use std::mem; use crate::middle::region; @@ -15,9 +14,7 @@ impl<'a, 'tcx, T> HashStable> for &'tcx ty::List where T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { thread_local! { static CACHE: RefCell> = RefCell::new(Default::default()); @@ -56,19 +53,15 @@ where } } -impl<'a, 'tcx> HashStable> for ty::subst::Kind<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'tcx> HashStable> for ty::subst::GenericArg<'tcx> { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.unpack().hash_stable(hcx, hasher); } } impl<'a> HashStable> for ty::RegionKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::ReErased | @@ -112,31 +105,21 @@ for ty::RegionKind { impl<'a> HashStable> for ty::RegionVid { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.index().hash_stable(hcx, hasher); } } impl<'a, 'tcx> HashStable> for ty::ConstVid<'tcx> { #[inline] - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.index.hash_stable(hcx, hasher); } } impl<'tcx> HashStable> for ty::BoundVar { #[inline] - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'tcx>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) { self.index().hash_stable(hcx, hasher); } } @@ -145,20 +128,14 @@ impl<'a, T> HashStable> for ty::Binder where T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.skip_binder().hash_stable(hcx, hasher); } } // AllocIds get resolved to whatever they point to (to be stable) impl<'a> HashStable> for mir::interpret::AllocId { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { ty::tls::with_opt(|tcx| { trace!("hashing {:?}", *self); let tcx = tcx.expect("can't hash AllocIds during hir lowering"); @@ -174,11 +151,7 @@ for mir::interpret::Relocations where Tag: HashStable>, { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, - ) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.len().hash_stable(hcx, hasher); for reloc in self.iter() { reloc.hash_stable(hcx, hasher); @@ -201,9 +174,7 @@ impl<'a> ToStableHashKey> for region::Scope { } impl<'a> HashStable> for ty::TyVid { - fn hash_stable(&self, - _hcx: &mut StableHashingContext<'a>, - _hasher: &mut StableHasher) { + fn hash_stable(&self, _hcx: &mut StableHashingContext<'a>, _hasher: &mut StableHasher) { // `TyVid` values are confined to an inference context and hence // should not be hashed. bug!("ty::TyKind::hash_stable() - can't hash a TyVid {:?}.", *self) @@ -211,9 +182,7 @@ impl<'a> HashStable> for ty::TyVid { } impl<'a> HashStable> for ty::IntVid { - fn hash_stable(&self, - _hcx: &mut StableHashingContext<'a>, - _hasher: &mut StableHasher) { + fn hash_stable(&self, _hcx: &mut StableHashingContext<'a>, _hasher: &mut StableHasher) { // `IntVid` values are confined to an inference context and hence // should not be hashed. bug!("ty::TyKind::hash_stable() - can't hash an IntVid {:?}.", *self) @@ -221,9 +190,7 @@ impl<'a> HashStable> for ty::IntVid { } impl<'a> HashStable> for ty::FloatVid { - fn hash_stable(&self, - _hcx: &mut StableHashingContext<'a>, - _hasher: &mut StableHasher) { + fn hash_stable(&self, _hcx: &mut StableHashingContext<'a>, _hasher: &mut StableHasher) { // `FloatVid` values are confined to an inference context and hence // should not be hashed. bug!("ty::TyKind::hash_stable() - can't hash a FloatVid {:?}.", *self) @@ -234,18 +201,14 @@ impl<'a, T> HashStable> for ty::steal::Steal where T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { self.borrow().hash_stable(hcx, hasher); } } impl<'a> HashStable> for crate::middle::privacy::AccessLevels { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { let crate::middle::privacy::AccessLevels { ref map diff --git a/src/librustc/infer/canonical/canonicalizer.rs b/src/librustc/infer/canonical/canonicalizer.rs index db724875b8..49a2c90bdb 100644 --- a/src/librustc/infer/canonical/canonicalizer.rs +++ b/src/librustc/infer/canonical/canonicalizer.rs @@ -13,12 +13,12 @@ use crate::infer::InferCtxt; use crate::mir::interpret::ConstValue; use std::sync::atomic::Ordering; use crate::ty::fold::{TypeFoldable, TypeFolder}; -use crate::ty::subst::Kind; +use crate::ty::subst::GenericArg; use crate::ty::{self, BoundVar, InferConst, List, Ty, TyCtxt, TypeFlags}; use crate::ty::flags::FlagComputation; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; +use rustc_index::vec::Idx; use smallvec::SmallVec; impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { @@ -282,7 +282,7 @@ struct Canonicalizer<'cx, 'tcx> { query_state: &'cx mut OriginalQueryValues<'tcx>, // Note that indices is only used once `var_values` is big enough to be // heap-allocated. - indices: FxHashMap, BoundVar>, + indices: FxHashMap, BoundVar>, canonicalize_region_mode: &'cx dyn CanonicalizeRegionMode, needs_canonical_flags: TypeFlags, @@ -343,7 +343,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> { } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - match t.sty { + match t.kind { ty::Infer(ty::TyVar(vid)) => { debug!("canonical: type var found with vid {:?}", vid); match self.infcx.unwrap().probe_ty_var(vid) { @@ -468,7 +468,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> { ConstValue::Infer(InferConst::Fresh(_)) => { bug!("encountered a fresh const during canonicalization") } - ConstValue::Infer(InferConst::Canonical(debruijn, _)) => { + ConstValue::Bound(debruijn, _) => { if debruijn >= self.binder_index { bug!("escaping bound type during canonicalization") } else { @@ -566,7 +566,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> { /// or returns an existing variable if `kind` has already been /// seen. `kind` is expected to be an unbound variable (or /// potentially a free region). - fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> BoundVar { + fn canonical_var(&mut self, info: CanonicalVarInfo, kind: GenericArg<'tcx>) -> BoundVar { let Canonicalizer { variables, query_state, @@ -700,8 +700,8 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> { let var = self.canonical_var(info, const_var.into()); self.tcx().mk_const( ty::Const { - val: ConstValue::Infer(InferConst::Canonical(self.binder_index, var.into())), - ty: const_var.ty, + val: ConstValue::Bound(self.binder_index, var.into()), + ty: self.fold_ty(const_var.ty), } ) } diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs index 6840611d4b..d833feeeb0 100644 --- a/src/librustc/infer/canonical/mod.rs +++ b/src/librustc/infer/canonical/mod.rs @@ -25,15 +25,15 @@ use crate::infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin, TypeVari use crate::infer::{ConstVariableOrigin, ConstVariableOriginKind}; use crate::infer::region_constraints::MemberConstraint; use crate::mir::interpret::ConstValue; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_macros::HashStable; use rustc_serialize::UseSpecializedDecodable; use smallvec::SmallVec; use std::ops::Index; use syntax::source_map::Span; use crate::ty::fold::TypeFoldable; -use crate::ty::subst::Kind; -use crate::ty::{self, BoundVar, InferConst, Lift, List, Region, TyCtxt}; +use crate::ty::subst::GenericArg; +use crate::ty::{self, BoundVar, Lift, List, Region, TyCtxt}; mod canonicalizer; @@ -66,14 +66,14 @@ impl<'tcx> UseSpecializedDecodable for CanonicalVarInfos<'tcx> {} /// canonicalized query response. #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)] pub struct CanonicalVarValues<'tcx> { - pub var_values: IndexVec>, + pub var_values: IndexVec>, } /// When we canonicalize a value to form a query, we wind up replacing /// various parts of it with canonical variables. This struct stores /// those replaced bits to remember for when we process the query /// result. -#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)] +#[derive(Clone, Debug)] pub struct OriginalQueryValues<'tcx> { /// Map from the universes that appear in the query to the /// universes in the caller context. For the time being, we only @@ -83,7 +83,7 @@ pub struct OriginalQueryValues<'tcx> { /// This is equivalent to `CanonicalVarValues`, but using a /// `SmallVec` yields a significant performance win. - pub var_values: SmallVec<[Kind<'tcx>; 8]>, + pub var_values: SmallVec<[GenericArg<'tcx>; 8]>, } impl Default for OriginalQueryValues<'tcx> { @@ -308,7 +308,7 @@ impl<'tcx, V> Canonical<'tcx, V> { } pub type QueryOutlivesConstraint<'tcx> = - ty::Binder, Region<'tcx>>>; + ty::Binder, Region<'tcx>>>; impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { /// Creates a substitution S for the canonical value with fresh @@ -359,7 +359,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { variables: &List, universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex, ) -> CanonicalVarValues<'tcx> { - let var_values: IndexVec> = variables + let var_values: IndexVec> = variables .iter() .map(|info| self.instantiate_canonical_var(span, *info, &universe_map)) .collect(); @@ -376,7 +376,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { span: Span, cv_info: CanonicalVarInfo, universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex, - ) -> Kind<'tcx> { + ) -> GenericArg<'tcx> { match cv_info.kind { CanonicalVarKind::Ty(ty_kind) => { let ty = match ty_kind { @@ -495,24 +495,22 @@ impl<'tcx> CanonicalVarValues<'tcx> { /// we'll return a substitution `subst` with: /// `subst.var_values == [Type(^0), Lifetime(^1), Type(^2)]`. pub fn make_identity(&self, tcx: TyCtxt<'tcx>) -> Self { - use crate::ty::subst::UnpackedKind; + use crate::ty::subst::GenericArgKind; CanonicalVarValues { var_values: self.var_values.iter() .zip(0..) .map(|(kind, i)| match kind.unpack() { - UnpackedKind::Type(..) => tcx.mk_ty( + GenericArgKind::Type(..) => tcx.mk_ty( ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into()) ).into(), - UnpackedKind::Lifetime(..) => tcx.mk_region( + GenericArgKind::Lifetime(..) => tcx.mk_region( ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(i)) ).into(), - UnpackedKind::Const(ct) => { + GenericArgKind::Const(ct) => { tcx.mk_const(ty::Const { ty: ct.ty, - val: ConstValue::Infer( - InferConst::Canonical(ty::INNERMOST, ty::BoundVar::from_u32(i)) - ), + val: ConstValue::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i)), }).into() } }) @@ -522,8 +520,8 @@ impl<'tcx> CanonicalVarValues<'tcx> { } impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> { - type Item = Kind<'tcx>; - type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>; + type Item = GenericArg<'tcx>; + type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, GenericArg<'tcx>>>; fn into_iter(self) -> Self::IntoIter { self.var_values.iter().cloned() @@ -570,9 +568,9 @@ BraceStructLiftImpl! { } impl<'tcx> Index for CanonicalVarValues<'tcx> { - type Output = Kind<'tcx>; + type Output = GenericArg<'tcx>; - fn index(&self, value: BoundVar) -> &Kind<'tcx> { + fn index(&self, value: BoundVar) -> &GenericArg<'tcx> { &self.var_values[value] } } diff --git a/src/librustc/infer/canonical/query_response.rs b/src/librustc/infer/canonical/query_response.rs index 79c5538626..7ad6006012 100644 --- a/src/librustc/infer/canonical/query_response.rs +++ b/src/librustc/infer/canonical/query_response.rs @@ -17,16 +17,16 @@ use crate::infer::region_constraints::{Constraint, RegionConstraintData}; use crate::infer::InferCtxtBuilder; use crate::infer::{InferCtxt, InferOk, InferResult}; use crate::mir::interpret::ConstValue; -use rustc_data_structures::indexed_vec::Idx; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::Idx; +use rustc_index::vec::IndexVec; use std::fmt::Debug; use syntax_pos::DUMMY_SP; use crate::traits::query::{Fallible, NoSolution}; use crate::traits::TraitEngine; use crate::traits::{Obligation, ObligationCause, PredicateObligation}; use crate::ty::fold::TypeFoldable; -use crate::ty::subst::{Kind, UnpackedKind}; -use crate::ty::{self, BoundVar, InferConst, Ty, TyCtxt}; +use crate::ty::subst::{GenericArg, GenericArgKind}; +use crate::ty::{self, BoundVar, Ty, TyCtxt}; use crate::util::captures::Captures; impl<'tcx> InferCtxtBuilder<'tcx> { @@ -298,11 +298,14 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { &v.var_values[BoundVar::new(index)] }); match (original_value.unpack(), result_value.unpack()) { - (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => { - // no action needed + ( + GenericArgKind::Lifetime(ty::ReErased), + GenericArgKind::Lifetime(ty::ReErased), + ) => { + // No action needed. } - (UnpackedKind::Lifetime(v_o), UnpackedKind::Lifetime(v_r)) => { + (GenericArgKind::Lifetime(v_o), GenericArgKind::Lifetime(v_r)) => { // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`. if v_o != v_r { output_query_region_constraints @@ -314,12 +317,12 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { } } - (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => { + (GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => { let ok = self.at(cause, param_env).eq(v1, v2)?; obligations.extend(ok.into_obligations()); } - (UnpackedKind::Const(v1), UnpackedKind::Const(v2)) => { + (GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => { let ok = self.at(cause, param_env).eq(v1, v2)?; obligations.extend(ok.into_obligations()); } @@ -462,16 +465,16 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { // is directly equal to one of the canonical variables in the // result, then we can type the corresponding value from the // input. See the example above. - let mut opt_values: IndexVec>> = + let mut opt_values: IndexVec>> = IndexVec::from_elem_n(None, query_response.variables.len()); // In terms of our example above, we are iterating over pairs like: // [(?A, Vec), ('static, '?1), (?B, ?0)] for (original_value, result_value) in original_values.var_values.iter().zip(result_values) { match result_value.unpack() { - UnpackedKind::Type(result_value) => { + GenericArgKind::Type(result_value) => { // e.g., here `result_value` might be `?0` in the example above... - if let ty::Bound(debruijn, b) = result_value.sty { + if let ty::Bound(debruijn, b) = result_value.kind { // ...in which case we would set `canonical_vars[0]` to `Some(?U)`. // We only allow a `ty::INNERMOST` index in substitutions. @@ -479,7 +482,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { opt_values[b.var] = Some(*original_value); } } - UnpackedKind::Lifetime(result_value) => { + GenericArgKind::Lifetime(result_value) => { // e.g., here `result_value` might be `'?1` in the example above... if let &ty::RegionKind::ReLateBound(debruijn, br) = result_value { // ... in which case we would set `canonical_vars[0]` to `Some('static)`. @@ -489,11 +492,8 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { opt_values[br.assert_bound_var()] = Some(*original_value); } } - UnpackedKind::Const(result_value) => { - if let ty::Const { - val: ConstValue::Infer(InferConst::Canonical(debrujin, b)), - .. - } = result_value { + GenericArgKind::Const(result_value) => { + if let ty::Const { val: ConstValue::Bound(debrujin, b), .. } = result_value { // ...in which case we would set `canonical_vars[0]` to `Some(const X)`. // We only allow a `ty::INNERMOST` index in substitutions. @@ -553,7 +553,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { // canonical variable; this is taken from // `query_response.var_values` after applying the substitution // `result_subst`. - let substituted_query_response = |index: BoundVar| -> Kind<'tcx> { + let substituted_query_response = |index: BoundVar| -> GenericArg<'tcx> { query_response.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index]) }; @@ -586,17 +586,17 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { cause.clone(), param_env, match k1.unpack() { - UnpackedKind::Lifetime(r1) => ty::Predicate::RegionOutlives( + GenericArgKind::Lifetime(r1) => ty::Predicate::RegionOutlives( ty::Binder::bind( ty::OutlivesPredicate(r1, r2) ) ), - UnpackedKind::Type(t1) => ty::Predicate::TypeOutlives( + GenericArgKind::Type(t1) => ty::Predicate::TypeOutlives( ty::Binder::bind( ty::OutlivesPredicate(t1, r2) ) ), - UnpackedKind::Const(..) => { + GenericArgKind::Const(..) => { // Consts cannot outlive one another, so we don't expect to // ecounter this branch. span_bug!(cause.span, "unexpected const outlives {:?}", constraint); @@ -613,7 +613,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { cause: &ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, variables1: &OriginalQueryValues<'tcx>, - variables2: impl Fn(BoundVar) -> Kind<'tcx>, + variables2: impl Fn(BoundVar) -> GenericArg<'tcx>, ) -> InferResult<'tcx, ()> { self.commit_if_ok(|_| { let mut obligations = vec![]; @@ -621,21 +621,21 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { let value2 = variables2(BoundVar::new(index)); match (value1.unpack(), value2.unpack()) { - (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => { + (GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => { obligations .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations()); } ( - UnpackedKind::Lifetime(ty::ReErased), - UnpackedKind::Lifetime(ty::ReErased), + GenericArgKind::Lifetime(ty::ReErased), + GenericArgKind::Lifetime(ty::ReErased), ) => { // no action needed } - (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => { + (GenericArgKind::Lifetime(v1), GenericArgKind::Lifetime(v2)) => { obligations .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations()); } - (UnpackedKind::Const(v1), UnpackedKind::Const(v2)) => { + (GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => { let ok = self.at(cause, param_env).eq(v1, v2)?; obligations.extend(ok.into_obligations()); } diff --git a/src/librustc/infer/canonical/substitute.rs b/src/librustc/infer/canonical/substitute.rs index 1234b96ab1..4f5bb09c91 100644 --- a/src/librustc/infer/canonical/substitute.rs +++ b/src/librustc/infer/canonical/substitute.rs @@ -8,7 +8,7 @@ use crate::infer::canonical::{Canonical, CanonicalVarValues}; use crate::ty::fold::TypeFoldable; -use crate::ty::subst::UnpackedKind; +use crate::ty::subst::GenericArgKind; use crate::ty::{self, TyCtxt}; impl<'tcx, V> Canonical<'tcx, V> { @@ -58,21 +58,21 @@ where } else { let fld_r = |br: ty::BoundRegion| { match var_values.var_values[br.assert_bound_var()].unpack() { - UnpackedKind::Lifetime(l) => l, + GenericArgKind::Lifetime(l) => l, r => bug!("{:?} is a region but value is {:?}", br, r), } }; let fld_t = |bound_ty: ty::BoundTy| { match var_values.var_values[bound_ty.var].unpack() { - UnpackedKind::Type(ty) => ty, + GenericArgKind::Type(ty) => ty, r => bug!("{:?} is a type but value is {:?}", bound_ty, r), } }; let fld_c = |bound_ct: ty::BoundVar, _| { match var_values.var_values[bound_ct].unpack() { - UnpackedKind::Const(ct) => ct, + GenericArgKind::Const(ct) => ct, c => bug!("{:?} is a const but value is {:?}", bound_ct, c), } }; diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index 966c581017..51ae4e4949 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -53,7 +53,7 @@ pub struct CombineFields<'infcx, 'tcx> { pub obligations: PredicateObligations<'tcx>, } -#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] +#[derive(Copy, Clone, Debug)] pub enum RelationDir { SubtypeOf, SupertypeOf, EqTo } @@ -70,7 +70,7 @@ impl<'infcx, 'tcx> InferCtxt<'infcx, 'tcx> { { let a_is_expected = relation.a_is_expected(); - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { // Relate integral variables to other types (&ty::Infer(ty::IntVar(a_id)), &ty::Infer(ty::IntVar(b_id))) => { self.int_unification_table @@ -486,7 +486,7 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> { // any other type variable related to `vid` via // subtyping. This is basically our "occurs check", preventing // us from creating infinitely sized types. - match t.sty { + match t.kind { ty::Infer(ty::TyVar(vid)) => { let mut variables = self.infcx.type_variables.borrow_mut(); let vid = variables.root_var(vid); @@ -494,7 +494,7 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> { if sub_vid == self.for_vid_sub_root { // If sub-roots are equal, then `for_vid` and // `vid` are related via subtyping. - return Err(TypeError::CyclicTy(self.root_ty)); + Err(TypeError::CyclicTy(self.root_ty)) } else { match variables.probe(vid) { TypeVariableValue::Known { value: u } => { @@ -527,7 +527,7 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> { let u = self.tcx().mk_ty_var(new_var_id); debug!("generalize: replacing original vid={:?} with new={:?}", vid, u); - return Ok(u); + Ok(u) } } } @@ -602,19 +602,26 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> { ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { assert_eq!(c, c2); // we are abusing TypeRelation here; both LHS and RHS ought to be == - match c { - ty::Const { val: ConstValue::Infer(InferConst::Var(vid)), .. } => { + match c.val { + ConstValue::Infer(InferConst::Var(vid)) => { let mut variable_table = self.infcx.const_unification_table.borrow_mut(); - match variable_table.probe_value(*vid).val.known() { - Some(u) => { - self.relate(&u, &u) + let var_value = variable_table.probe_value(vid); + match var_value.val { + ConstVariableValue::Known { value: u } => self.relate(&u, &u), + ConstVariableValue::Unknown { universe } => { + if self.for_universe.can_name(universe) { + Ok(c) + } else { + let new_var_id = variable_table.new_key(ConstVarValue { + origin: var_value.origin, + val: ConstVariableValue::Unknown { universe: self.for_universe }, + }); + Ok(self.tcx().mk_const_var(new_var_id, c.ty)) + } } - None => Ok(c), } } - _ => { - relate::super_relate_consts(self, c, c) - } + _ => relate::super_relate_consts(self, c, c), } } } diff --git a/src/librustc/infer/equate.rs b/src/librustc/infer/equate.rs index 6065387647..aea58acab5 100644 --- a/src/librustc/infer/equate.rs +++ b/src/librustc/infer/equate.rs @@ -68,7 +68,7 @@ impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> { debug!("{}.tys: replacements ({:?}, {:?})", self.tag(), a, b); - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (&ty::Infer(TyVar(a_id)), &ty::Infer(TyVar(b_id))) => { infcx.type_variables.borrow_mut().equate(a_id, b_id); } diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index 0b6740d7bb..498600f1e9 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -90,7 +90,7 @@ impl<'tcx> TyCtxt<'tcx> { let span = scope.span(self, region_scope_tree); let tag = match self.hir().find(scope.hir_id(region_scope_tree)) { Some(Node::Block(_)) => "block", - Some(Node::Expr(expr)) => match expr.node { + Some(Node::Expr(expr)) => match expr.kind { hir::ExprKind::Call(..) => "call", hir::ExprKind::MethodCall(..) => "method call", hir::ExprKind::Match(.., hir::MatchSource::IfLetDesugar { .. }) => "if let", @@ -200,7 +200,7 @@ impl<'tcx> TyCtxt<'tcx> { { sp = param.span; } - (format!("the lifetime {} as defined on", br.name), sp) + (format!("the lifetime `{}` as defined on", br.name), sp) } ty::ReFree(ty::FreeRegion { bound_region: ty::BoundRegion::BrNamed(_, name), @@ -213,7 +213,7 @@ impl<'tcx> TyCtxt<'tcx> { { sp = param.span; } - (format!("the lifetime {} as defined on", name), sp) + (format!("the lifetime `{}` as defined on", name), sp) } ty::ReFree(ref fr) => match fr.bound_region { ty::BrAnon(idx) => ( @@ -221,7 +221,7 @@ impl<'tcx> TyCtxt<'tcx> { self.hir().span(node), ), _ => ( - format!("the lifetime {} as defined on", region), + format!("the lifetime `{}` as defined on", region), cm.def_span(self.hir().span(node)), ), }, @@ -248,7 +248,7 @@ impl<'tcx> TyCtxt<'tcx> { } fn item_scope_tag(item: &hir::Item) -> &'static str { - match item.node { + match item.kind { hir::ItemKind::Impl(..) => "impl", hir::ItemKind::Struct(..) => "struct", hir::ItemKind::Union(..) => "union", @@ -260,14 +260,14 @@ impl<'tcx> TyCtxt<'tcx> { } fn trait_item_scope_tag(item: &hir::TraitItem) -> &'static str { - match item.node { + match item.kind { hir::TraitItemKind::Method(..) => "method body", hir::TraitItemKind::Const(..) | hir::TraitItemKind::Type(..) => "associated item", } } fn impl_item_scope_tag(item: &hir::ImplItem) -> &'static str { - match item.node { + match item.kind { hir::ImplItemKind::Method(..) => "method body", hir::ImplItemKind::Const(..) | hir::ImplItemKind::OpaqueTy(..) @@ -464,7 +464,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { use hir::def_id::CrateNum; use hir::map::DisambiguatedDefPathData; use ty::print::Printer; - use ty::subst::Kind; + use ty::subst::GenericArg; struct AbsolutePathPrinter<'tcx> { tcx: TyCtxt<'tcx>, @@ -542,13 +542,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { disambiguated_data: &DisambiguatedDefPathData, ) -> Result { let mut path = print_prefix(self)?; - path.push(disambiguated_data.data.as_interned_str().to_string()); + path.push(disambiguated_data.data.as_symbol().to_string()); Ok(path) } fn path_generic_args( self, print_prefix: impl FnOnce(Self) -> Result, - _args: &[Kind<'tcx>], + _args: &[GenericArg<'tcx>], ) -> Result { print_prefix(self) } @@ -589,7 +589,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // if they are both "path types", there's a chance of ambiguity // due to different versions of the same crate if let (&ty::Adt(exp_adt, _), &ty::Adt(found_adt, _)) - = (&exp_found.expected.sty, &exp_found.found.sty) + = (&exp_found.expected.kind, &exp_found.found.kind) { report_path_match(err, exp_adt.did, found_adt.did); } @@ -639,7 +639,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { hir::MatchSource::TryDesugar => { if let Some(ty::error::ExpectedFound { expected, .. }) = exp_found { let discrim_expr = self.tcx.hir().expect_expr(discrim_hir_id); - let discrim_ty = if let hir::ExprKind::Call(_, args) = &discrim_expr.node { + let discrim_ty = if let hir::ExprKind::Call(_, args) = &discrim_expr.kind { let arg_expr = args.first().expect("try desugaring call w/out arg"); self.in_progress_tables.and_then(|tables| { tables.borrow().expr_ty_opt(arg_expr) @@ -803,7 +803,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.highlight_outer(&mut t1_out, &mut t2_out, path, sub, i, &other_ty); return Some(()); } - if let &ty::Adt(def, _) = &ta.sty { + if let &ty::Adt(def, _) = &ta.kind { let path_ = self.tcx.def_path_str(def.did.clone()); if path_ == other_path { self.highlight_outer(&mut t1_out, &mut t2_out, path, sub, i, &other_ty); @@ -867,8 +867,11 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { /// Compares two given types, eliding parts that are the same between them and highlighting /// relevant differences, and return two representation of those types for highlighted printing. fn cmp(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> (DiagnosticStyledString, DiagnosticStyledString) { + debug!("cmp(t1={}, t1.kind={:?}, t2={}, t2.kind={:?})", t1, t1.kind, t2, t2.kind); + + // helper functions fn equals<'tcx>(a: Ty<'tcx>, b: Ty<'tcx>) -> bool { - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (a, b) if *a == *b => true, (&ty::Int(_), &ty::Infer(ty::InferTy::IntVar(_))) | (&ty::Infer(ty::InferTy::IntVar(_)), &ty::Int(_)) @@ -902,7 +905,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { s.push_normal(ty.to_string()); } - match (&t1.sty, &t2.sty) { + // process starts here + match (&t1.kind, &t2.kind) { (&ty::Adt(def1, sub1), &ty::Adt(def2, sub2)) => { let sub_no_defaults_1 = self.strip_generic_default_params(def1.did, sub1); let sub_no_defaults_2 = self.strip_generic_default_params(def2.did, sub2); @@ -935,6 +939,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { .filter(|(a, b)| a == b) .count(); let len = sub1.len() - common_default_params; + let consts_offset = len - sub1.consts().count(); // Only draw `<...>` if there're lifetime/type arguments. if len > 0 { @@ -981,7 +986,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // ^ elided type as this type argument was the same in both sides let type_arguments = sub1.types().zip(sub2.types()); let regions_len = sub1.regions().count(); - for (i, (ta1, ta2)) in type_arguments.take(len).enumerate() { + let num_display_types = consts_offset - regions_len; + for (i, (ta1, ta2)) in type_arguments.take(num_display_types).enumerate() { let i = i + regions_len; if ta1 == ta2 { values.0.push_normal("_"); @@ -994,6 +1000,21 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.push_comma(&mut values.0, &mut values.1, len, i); } + // Do the same for const arguments, if they are equal, do not highlight and + // elide them from the output. + let const_arguments = sub1.consts().zip(sub2.consts()); + for (i, (ca1, ca2)) in const_arguments.enumerate() { + let i = i + consts_offset; + if ca1 == ca2 { + values.0.push_normal("_"); + values.1.push_normal("_"); + } else { + values.0.push_highlighted(ca1.to_string()); + values.1.push_highlighted(ca2.to_string()); + } + self.push_comma(&mut values.0, &mut values.1, len, i); + } + // Close the type argument bracket. // Only draw `<...>` if there're lifetime/type arguments. if len > 0 { @@ -1035,12 +1056,47 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { return values; } - // We couldn't find anything in common, highlight everything. - // let x: Bar = y::>(); - ( - DiagnosticStyledString::highlighted(t1.to_string()), - DiagnosticStyledString::highlighted(t2.to_string()), - ) + // We can't find anything in common, highlight relevant part of type path. + // let x: foo::bar::Baz = y:>(); + // foo::bar::Baz + // foo::bar::Bar + // -------- this part of the path is different + + let t1_str = t1.to_string(); + let t2_str = t2.to_string(); + let min_len = t1_str.len().min(t2_str.len()); + + const SEPARATOR: &str = "::"; + let separator_len = SEPARATOR.len(); + let split_idx: usize = + t1_str.split(SEPARATOR) + .zip(t2_str.split(SEPARATOR)) + .take_while(|(mod1_str, mod2_str)| mod1_str == mod2_str) + .map(|(mod_str, _)| mod_str.len() + separator_len) + .sum(); + + debug!("cmp: separator_len={}, split_idx={}, min_len={}", + separator_len, split_idx, min_len + ); + + if split_idx >= min_len { + // paths are identical, highlight everything + ( + DiagnosticStyledString::highlighted(t1_str), + DiagnosticStyledString::highlighted(t2_str) + ) + } else { + let (common, uniq1) = t1_str.split_at(split_idx); + let (_, uniq2) = t2_str.split_at(split_idx); + debug!("cmp: common={}, uniq1={}, uniq2={}", common, uniq1, uniq2); + + values.0.push_normal(common); + values.0.push_highlighted(uniq1); + values.1.push_normal(common); + values.1.push_highlighted(uniq2); + + values + } } } @@ -1103,6 +1159,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { _ => {} } + debug!("note_type_err(diag={:?})", diag); let (expected_found, exp_found, is_simple_error) = match values { None => (None, None, false), Some(values) => { @@ -1129,16 +1186,23 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let span = cause.span(self.tcx); - diag.span_label(span, terr.to_string()); - if let Some((sp, msg)) = secondary_span { - diag.span_label(sp, msg); - } + // Ignore msg for object safe coercion + // since E0038 message will be printed + match terr { + TypeError::ObjectUnsafeCoercion(_) => {} + _ => { + diag.span_label(span, terr.to_string()); + if let Some((sp, msg)) = secondary_span { + diag.span_label(sp, msg); + } + } + }; if let Some((expected, found)) = expected_found { match (terr, is_simple_error, expected == found) { (&TypeError::Sorts(ref values), false, true) => { let sort_string = | a_type: Ty<'tcx> | - if let ty::Opaque(def_id, _) = a_type.sty { + if let ty::Opaque(def_id, _) = a_type.kind { format!(" (opaque type at {})", self.tcx.sess.source_map() .mk_substr_filename(self.tcx.def_span(def_id))) } else { @@ -1152,7 +1216,14 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { &sort_string(values.found), ); } + (TypeError::ObjectUnsafeCoercion(_), ..) => { + diag.note_unsuccessfull_coercion(found, expected); + } (_, false, _) => { + debug!( + "note_type_err: exp_found={:?}, expected={:?} found={:?}", + exp_found, expected, found + ); if let Some(exp_found) = exp_found { self.suggest_as_ref_where_appropriate(span, &exp_found, diag); } @@ -1179,9 +1250,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { exp_found: &ty::error::ExpectedFound>, diag: &mut DiagnosticBuilder<'tcx>, ) { - match (&exp_found.expected.sty, &exp_found.found.sty) { + match (&exp_found.expected.kind, &exp_found.found.kind) { (ty::Adt(exp_def, exp_substs), ty::Ref(_, found_ty, _)) => { - if let ty::Adt(found_def, found_substs) = found_ty.sty { + if let ty::Adt(found_def, found_substs) = found_ty.kind { let path_str = format!("{:?}", exp_def); if exp_def == &found_def { let opt_msg = "you can convert from `&Option` to `Option<&T>` using \ @@ -1203,9 +1274,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { { let mut show_suggestion = true; for (exp_ty, found_ty) in exp_substs.types().zip(found_substs.types()) { - match exp_ty.sty { + match exp_ty.kind { ty::Ref(_, exp_ty, _) => { - match (&exp_ty.sty, &found_ty.sty) { + match (&exp_ty.kind, &found_ty.kind) { (_, ty::Param(_)) | (_, ty::Infer(_)) | (ty::Param(_), _) | @@ -1250,6 +1321,10 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let span = trace.cause.span(self.tcx); let failure_code = trace.cause.as_failure_code(terr); let mut diag = match failure_code { + FailureCode::Error0038(did) => { + let violations = self.tcx.object_safety_violations(did); + self.tcx.report_object_safety_error(span, did, violations) + } FailureCode::Error0317(failure_str) => { struct_span_err!(self.tcx.sess, span, E0317, "{}", failure_str) } @@ -1611,6 +1686,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } enum FailureCode { + Error0038(DefId), Error0317(&'static str), Error0580(&'static str), Error0308(&'static str), @@ -1649,6 +1725,7 @@ impl<'tcx> ObligationCause<'tcx> { TypeError::IntrinsicCast => { Error0308("cannot coerce intrinsics to function pointers") } + TypeError::ObjectUnsafeCoercion(did) => Error0038(did.clone()), _ => Error0308("mismatched types"), }, } diff --git a/src/librustc/infer/error_reporting/need_type_info.rs b/src/librustc/infer/error_reporting/need_type_info.rs index 7068fe3601..b89731273f 100644 --- a/src/librustc/infer/error_reporting/need_type_info.rs +++ b/src/librustc/infer/error_reporting/need_type_info.rs @@ -44,7 +44,7 @@ impl<'a, 'tcx> FindLocalByTypeVisitor<'a, 'tcx> { Some(ty) => { let ty = self.infcx.resolve_vars_if_possible(&ty); if ty.walk().any(|inner_ty| { - inner_ty == self.target_ty || match (&inner_ty.sty, &self.target_ty.sty) { + inner_ty == self.target_ty || match (&inner_ty.kind, &self.target_ty.kind) { (&Infer(TyVar(a_vid)), &Infer(TyVar(b_vid))) => { self.infcx .type_variables @@ -92,10 +92,10 @@ impl<'a, 'tcx> Visitor<'tcx> for FindLocalByTypeVisitor<'a, 'tcx> { fn visit_expr(&mut self, expr: &'tcx Expr) { if let (ExprKind::Closure(_, _fn_decl, _id, _sp, _), Some(_)) = ( - &expr.node, + &expr.kind, self.node_matches_type(expr.hir_id), ) { - self.found_closure = Some(&expr.node); + self.found_closure = Some(&expr.kind); } intravisit::walk_expr(self, expr); } @@ -114,7 +114,7 @@ fn closure_return_type_suggestion( FunctionRetTy::DefaultReturn(_) => ("-> ", " "), _ => ("", ""), }; - let suggestion = match body.value.node { + let suggestion = match body.value.kind { ExprKind::Block(..) => { vec![(output.span(), format!("{}{}{}", arrow, ret, post))] } @@ -151,7 +151,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ty: Ty<'tcx>, highlight: Option, ) -> (String, Option) { - if let ty::Infer(ty::TyVar(ty_vid)) = ty.sty { + if let ty::Infer(ty::TyVar(ty_vid)) = ty.kind { let ty_vars = self.type_variables.borrow(); let var_origin = ty_vars.var_origin(ty_vid); if let TypeVariableOriginKind::TypeParameterDefinition(name) = var_origin.kind { @@ -219,8 +219,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { }; let ty_msg = match local_visitor.found_ty { - Some(ty::TyS { sty: ty::Closure(def_id, substs), .. }) => { - let fn_sig = substs.closure_sig(*def_id, self.tcx); + Some(ty::TyS { kind: ty::Closure(def_id, substs), .. }) => { + let fn_sig = substs.as_closure().sig(*def_id, self.tcx); let args = closure_args(&fn_sig); let ret = fn_sig.output().skip_binder().to_string(); format!(" for the closure `fn({}) -> {}`", args, ret) @@ -254,8 +254,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { ); let suffix = match local_visitor.found_ty { - Some(ty::TyS { sty: ty::Closure(def_id, substs), .. }) => { - let fn_sig = substs.closure_sig(*def_id, self.tcx); + Some(ty::TyS { kind: ty::Closure(def_id, substs), .. }) => { + let fn_sig = substs.as_closure().sig(*def_id, self.tcx); let ret = fn_sig.output().skip_binder().to_string(); if let Some(ExprKind::Closure(_, decl, body_id, ..)) = local_visitor.found_closure { diff --git a/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs b/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs index 34f3b8a2c7..f4751e591b 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs @@ -31,15 +31,15 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { if let Some(hir_id) = self.tcx().hir().as_local_hir_id(def_id) { let fndecl = match self.tcx().hir().get(hir_id) { Node::Item(&hir::Item { - node: hir::ItemKind::Fn(ref fndecl, ..), + kind: hir::ItemKind::Fn(ref fndecl, ..), .. }) => &fndecl, Node::TraitItem(&hir::TraitItem { - node: hir::TraitItemKind::Method(ref m, ..), + kind: hir::TraitItemKind::Method(ref m, ..), .. }) | Node::ImplItem(&hir::ImplItem { - node: hir::ImplItemKind::Method(ref m, ..), + kind: hir::ImplItemKind::Method(ref m, ..), .. }) => &m.decl, _ => return None, @@ -62,7 +62,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { &self, arg: &'tcx hir::Ty, br: &ty::BoundRegion, - ) -> Option<(&'tcx hir::Ty)> { + ) -> Option<&'tcx hir::Ty> { let mut nested_visitor = FindNestedTypeVisitor { tcx: self.tcx(), bound_region: *br, @@ -98,7 +98,7 @@ impl Visitor<'tcx> for FindNestedTypeVisitor<'tcx> { } fn visit_ty(&mut self, arg: &'tcx hir::Ty) { - match arg.node { + match arg.kind { hir::TyKind::BareFn(_) => { self.current_index.shift_in(1); intravisit::walk_ty(self, arg); diff --git a/src/librustc/infer/error_reporting/nice_region_error/mod.rs b/src/librustc/infer/error_reporting/nice_region_error/mod.rs index 1edb1c601b..cd003aa8da 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/mod.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/mod.rs @@ -12,6 +12,7 @@ mod named_anon_conflict; mod placeholder_error; mod outlives_closure; mod static_impl_trait; +mod trait_impl_difference; mod util; impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { @@ -73,6 +74,7 @@ impl<'cx, 'tcx> NiceRegionError<'cx, 'tcx> { .or_else(|| self.try_report_anon_anon_conflict()) .or_else(|| self.try_report_outlives_closure()) .or_else(|| self.try_report_static_impl_trait()) + .or_else(|| self.try_report_impl_not_conforming_to_trait()) } pub fn get_regions(&self) -> (Span, ty::Region<'tcx>, ty::Region<'tcx>) { diff --git a/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs b/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs index 604115cfc3..a9a2c15d7d 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs @@ -87,7 +87,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { return None; } if let FunctionRetTy::Return(ty) = &fndecl.output { - if let (TyKind::Def(_, _), ty::ReStatic) = (&ty.node, sub) { + if let (TyKind::Def(_, _), ty::ReStatic) = (&ty.kind, sub) { // This is an impl Trait return that evaluates de need of 'static. // We handle this case better in `static_impl_trait`. return None; diff --git a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs index f5a4dac2c2..9231e4f779 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs @@ -50,7 +50,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { let hir = &self.tcx().hir(); if let Some(hir_id) = hir.as_local_hir_id(free_region.scope) { if let Node::Expr(Expr { - node: Closure(_, _, _, closure_span, None), + kind: Closure(_, _, _, closure_span, None), .. }) = hir.get(hir_id) { let sup_sp = sup_origin.span(); diff --git a/src/librustc/infer/error_reporting/nice_region_error/trait_impl_difference.rs b/src/librustc/infer/error_reporting/nice_region_error/trait_impl_difference.rs new file mode 100644 index 0000000000..0194300c50 --- /dev/null +++ b/src/librustc/infer/error_reporting/nice_region_error/trait_impl_difference.rs @@ -0,0 +1,59 @@ +//! Error Reporting for `impl` items that do not match the obligations from their `trait`. + +use syntax_pos::Span; +use crate::ty::Ty; +use crate::infer::{ValuePairs, Subtype}; +use crate::infer::error_reporting::nice_region_error::NiceRegionError; +use crate::infer::lexical_region_resolve::RegionResolutionError; +use crate::util::common::ErrorReported; +use crate::traits::ObligationCauseCode::CompareImplMethodObligation; + +impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { + /// Print the error message for lifetime errors when the `impl` doesn't conform to the `trait`. + pub(super) fn try_report_impl_not_conforming_to_trait(&self) -> Option { + if let Some(ref error) = self.error { + debug!("try_report_impl_not_conforming_to_trait {:?}", error); + if let RegionResolutionError::SubSupConflict( + _, + var_origin, + sub_origin, + _sub, + sup_origin, + _sup, + ) = error.clone() { + match (&sup_origin, &sub_origin) { + (&Subtype(ref sup_trace), &Subtype(ref sub_trace)) => { + if let ( + ValuePairs::Types(sub_expected_found), + ValuePairs::Types(sup_expected_found), + CompareImplMethodObligation { trait_item_def_id, .. }, + ) = (&sub_trace.values, &sup_trace.values, &sub_trace.cause.code) { + if sup_expected_found == sub_expected_found { + self.emit_err( + var_origin.span(), + sub_expected_found.expected, + sub_expected_found.found, + self.tcx().def_span(*trait_item_def_id), + ); + return Some(ErrorReported); + } + } + } + _ => {} + } + } + } + None + } + + fn emit_err(&self, sp: Span, expected: Ty<'tcx>, found: Ty<'tcx>, impl_sp: Span) { + let mut err = self.tcx().sess.struct_span_err( + sp, + "`impl` item signature doesn't match `trait` item signature", + ); + err.note(&format!("expected `{:?}`\n found `{:?}`", expected, found)); + err.span_label(sp, &format!("found {:?}", found)); + err.span_label(impl_sp, &format!("expected {:?}", expected)); + err.emit(); + } +} diff --git a/src/librustc/infer/error_reporting/nice_region_error/util.rs b/src/librustc/infer/error_reporting/nice_region_error/util.rs index 668c99da00..a2e48cf07c 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/util.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/util.rs @@ -109,7 +109,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { decl: &hir::FnDecl, ) -> Option { let ret_ty = self.tcx().type_of(scope_def_id); - if let ty::FnDef(_, _) = ret_ty.sty { + if let ty::FnDef(_, _) = ret_ty.kind { let sig = ret_ty.fn_sig(self.tcx()); let late_bound_regions = self.tcx() .collect_referenced_late_bound_regions(&sig.output()); diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs index 400a538baa..1841bd9ea6 100644 --- a/src/librustc/infer/freshen.rs +++ b/src/librustc/infer/freshen.rs @@ -153,7 +153,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> { let tcx = self.infcx.tcx; - match t.sty { + match t.kind { ty::Infer(ty::TyVar(v)) => { let opt_ty = self.infcx.type_variables.borrow_mut().probe(v).known(); self.freshen_ty( @@ -252,7 +252,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> { return ct; } - ConstValue::Infer(ty::InferConst::Canonical(..)) | + ConstValue::Bound(..) | ConstValue::Placeholder(_) => { bug!("unexpected const {:?}", ct) } diff --git a/src/librustc/infer/fudge.rs b/src/librustc/infer/fudge.rs index 658a9c1d88..e27766f461 100644 --- a/src/librustc/infer/fudge.rs +++ b/src/librustc/infer/fudge.rs @@ -148,7 +148,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for InferenceFudger<'a, 'tcx> { } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - match ty.sty { + match ty.kind { ty::Infer(ty::InferTy::TyVar(vid)) => { if self.type_vars.0.contains(&vid) { // This variable was created during the fudging. diff --git a/src/librustc/infer/lattice.rs b/src/librustc/infer/lattice.rs index 68cbef4407..39701231aa 100644 --- a/src/librustc/infer/lattice.rs +++ b/src/librustc/infer/lattice.rs @@ -61,7 +61,7 @@ where let infcx = this.infcx(); let a = infcx.type_variables.borrow_mut().replace_if_possible(a); let b = infcx.type_variables.borrow_mut().replace_if_possible(b); - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { // If one side is known to be a variable and one is not, // create a variable (`v`) to represent the LUB. Make sure to // relate `v` to the non-type-variable first (by passing it diff --git a/src/librustc/infer/lexical_region_resolve/README.md b/src/librustc/infer/lexical_region_resolve/README.md index 7eb4da86ec..c26b5625a9 100644 --- a/src/librustc/infer/lexical_region_resolve/README.md +++ b/src/librustc/infer/lexical_region_resolve/README.md @@ -1,268 +1,7 @@ -# Region inference -> WARNING: This README is obsolete and will be removed soon! For -> more info on how the current borrowck works, see the [rustc guide]. -> -> As of edition 2018, region inference is done using Non-lexical lifetimes, -> which is described in the guide and [this RFC]. +Lexical Region Resolution was removed in https://github.com/rust-lang/rust/pull/64790. -[rustc guide]: https://rust-lang.github.io/rustc-guide/borrow_check/region_inference.html -[this RFC]: https://github.com/rust-lang/rfcs/blob/master/text/2094-nll.md +Rust now uses Non-lexical lifetimes. For more info, please see the [borrowck +chapter][bc] in the rustc-guide. -## Terminology - -Note that we use the terms region and lifetime interchangeably. - -## Introduction - -Region inference uses a somewhat more involved algorithm than type -inference. It is not the most efficient thing ever written though it -seems to work well enough in practice (famous last words). The reason -that we use a different algorithm is because, unlike with types, it is -impractical to hand-annotate with regions (in some cases, there aren't -even the requisite syntactic forms). So we have to get it right, and -it's worth spending more time on a more involved analysis. Moreover, -regions are a simpler case than types: they don't have aggregate -structure, for example. - -## The problem - -Basically our input is a directed graph where nodes can be divided -into two categories: region variables and concrete regions. Each edge -`R -> S` in the graph represents a constraint that the region `R` is a -subregion of the region `S`. - -Region variable nodes can have arbitrary degree. There is one region -variable node per region variable. - -Each concrete region node is associated with some, well, concrete -region: e.g., a free lifetime, or the region for a particular scope. -Note that there may be more than one concrete region node for a -particular region value. Moreover, because of how the graph is built, -we know that all concrete region nodes have either in-degree 1 or -out-degree 1. - -Before resolution begins, we build up the constraints in a hashmap -that maps `Constraint` keys to spans. During resolution, we construct -the actual `Graph` structure that we describe here. - -## Computing the values for region variables - -The algorithm is a simple dataflow algorithm. Each region variable -begins as empty. We iterate over the constraints, and for each constraint -we grow the relevant region variable to be as big as it must be to meet all the -constraints. This means the region variables can grow to be `'static` if -necessary. - -## Verification - -After all constraints are fully propoagated, we do a "verification" -step where we walk over the verify bounds and check that they are -satisfied. These bounds represent the "maximal" values that a region -variable can take on, basically. - -## The Region Hierarchy - -### Without closures - -Let's first consider the region hierarchy without thinking about -closures, because they add a lot of complications. The region -hierarchy *basically* mirrors the lexical structure of the code. -There is a region for every piece of 'evaluation' that occurs, meaning -every expression, block, and pattern (patterns are considered to -"execute" by testing the value they are applied to and creating any -relevant bindings). So, for example: - -```rust -fn foo(x: isize, y: isize) { // -+ -// +------------+ // | -// | +-----+ // | -// | +-+ +-+ +-+ // | -// | | | | | | | // | -// v v v v v v v // | - let z = x + y; // | - ... // | -} // -+ - -fn bar() { ... } -``` - -In this example, there is a region for the fn body block as a whole, -and then a subregion for the declaration of the local variable. -Within that, there are sublifetimes for the assignment pattern and -also the expression `x + y`. The expression itself has sublifetimes -for evaluating `x` and `y`. - -#s## Function calls - -Function calls are a bit tricky. I will describe how we handle them -*now* and then a bit about how we can improve them (Issue #6268). - -Consider a function call like `func(expr1, expr2)`, where `func`, -`arg1`, and `arg2` are all arbitrary expressions. Currently, -we construct a region hierarchy like: - - +----------------+ - | | - +--+ +---+ +---+| - v v v v v vv - func(expr1, expr2) - -Here you can see that the call as a whole has a region and the -function plus arguments are subregions of that. As a side-effect of -this, we get a lot of spurious errors around nested calls, in -particular when combined with `&mut` functions. For example, a call -like this one - -```rust -self.foo(self.bar()) -``` - -where both `foo` and `bar` are `&mut self` functions will always yield -an error. - -Here is a more involved example (which is safe) so we can see what's -going on: - -```rust -struct Foo { f: usize, g: usize } -// ... -fn add(p: &mut usize, v: usize) { - *p += v; -} -// ... -fn inc(p: &mut usize) -> usize { - *p += 1; *p -} -fn weird() { - let mut x: Box = box Foo { /* ... */ }; - 'a: add(&mut (*x).f, - 'b: inc(&mut (*x).f)) // (..) -} -``` - -The important part is the line marked `(..)` which contains a call to -`add()`. The first argument is a mutable borrow of the field `f`. The -second argument also borrows the field `f`. Now, in the current borrow -checker, the first borrow is given the lifetime of the call to -`add()`, `'a`. The second borrow is given the lifetime of `'b` of the -call to `inc()`. Because `'b` is considered to be a sublifetime of -`'a`, an error is reported since there are two co-existing mutable -borrows of the same data. - -However, if we were to examine the lifetimes a bit more carefully, we -can see that this error is unnecessary. Let's examine the lifetimes -involved with `'a` in detail. We'll break apart all the steps involved -in a call expression: - -```rust -'a: { - 'a_arg1: let a_temp1: ... = add; - 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f; - 'a_arg3: let a_temp3: usize = { - let b_temp1: ... = inc; - let b_temp2: &'b = &'b mut (*x).f; - 'b_call: b_temp1(b_temp2) - }; - 'a_call: a_temp1(a_temp2, a_temp3) // (**) -} -``` - -Here we see that the lifetime `'a` includes a number of substatements. -In particular, there is this lifetime I've called `'a_call` that -corresponds to the *actual execution of the function `add()`*, after -all arguments have been evaluated. There is a corresponding lifetime -`'b_call` for the execution of `inc()`. If we wanted to be precise -about it, the lifetime of the two borrows should be `'a_call` and -`'b_call` respectively, since the references that were created -will not be dereferenced except during the execution itself. - -However, this model by itself is not sound. The reason is that -while the two references that are created will never be used -simultaneously, it is still true that the first reference is -*created* before the second argument is evaluated, and so even though -it will not be *dereferenced* during the evaluation of the second -argument, it can still be *invalidated* by that evaluation. Consider -this similar but unsound example: - -```rust -struct Foo { f: usize, g: usize } -// ... -fn add(p: &mut usize, v: usize) { - *p += v; -} -// ... -fn consume(x: Box) -> usize { - x.f + x.g -} -fn weird() { - let mut x: Box = box Foo { ... }; - 'a: add(&mut (*x).f, consume(x)) // (..) -} -``` - -In this case, the second argument to `add` actually consumes `x`, thus -invalidating the first argument. - -So, for now, we exclude the `call` lifetimes from our model. -Eventually I would like to include them, but we will have to make the -borrow checker handle this situation correctly. In particular, if -there is a reference created whose lifetime does not enclose -the borrow expression, we must issue sufficient restrictions to ensure -that the pointee remains valid. - -### Modeling closures - -Integrating closures properly into the model is a bit of -work-in-progress. In an ideal world, we would model closures as -closely as possible after their desugared equivalents. That is, a -closure type would be modeled as a struct, and the region hierarchy of -different closure bodies would be completely distinct from all other -fns. We are generally moving in that direction but there are -complications in terms of the implementation. - -In practice what we currently do is somewhat different. The basis for -the current approach is the observation that the only time that -regions from distinct fn bodies interact with one another is through -an upvar or the type of a fn parameter (since closures live in the fn -body namespace, they can in fact have fn parameters whose types -include regions from the surrounding fn body). For these cases, there -are separate mechanisms which ensure that the regions that appear in -upvars/parameters outlive the dynamic extent of each call to the -closure: - -1. Types must outlive the region of any expression where they are used. - For a closure type `C` to outlive a region `'r`, that implies that the - types of all its upvars must outlive `'r`. -2. Parameters must outlive the region of any fn that they are passed to. - -Therefore, we can -- sort of -- assume that any region from an -enclosing fns is larger than any region from one of its enclosed -fn. And that is precisely what we do: when building the region -hierarchy, each region lives in its own distinct subtree, but if we -are asked to compute the `LUB(r1, r2)` of two regions, and those -regions are in disjoint subtrees, we compare the lexical nesting of -the two regions. - -*Ideas for improving the situation:* (FIXME #3696) The correctness -argument here is subtle and a bit hand-wavy. The ideal, as stated -earlier, would be to model things in such a way that it corresponds -more closely to the desugared code. The best approach for doing this -is a bit unclear: it may in fact be possible to *actually* desugar -before we start, but I don't think so. The main option that I've been -thinking through is imposing a "view shift" as we enter the fn body, -so that regions appearing in the types of fn parameters and upvars are -translated from being regions in the outer fn into free region -parameters, just as they would be if we applied the desugaring. The -challenge here is that type inference may not have fully run, so the -types may not be fully known: we could probably do this translation -lazilly, as type variables are instantiated. We would also have to -apply a kind of inverse translation to the return value. This would be -a good idea anyway, as right now it is possible for free regions -instantiated within the closure to leak into the parent: this -currently leads to type errors, since those regions cannot outlive any -expressions within the parent hierarchy. Much like the current -handling of closures, there are no known cases where this leads to a -type-checking accepting incorrect code (though it sometimes rejects -what might be considered correct code; see rust-lang/rust#22557), but -it still doesn't feel like the right approach. +[bc]: https://rust-lang.github.io/rustc-guide/borrow_check/region_inference.html diff --git a/src/librustc/infer/lexical_region_resolve/mod.rs b/src/librustc/infer/lexical_region_resolve/mod.rs index 6282fde59c..f30f19d415 100644 --- a/src/librustc/infer/lexical_region_resolve/mod.rs +++ b/src/librustc/infer/lexical_region_resolve/mod.rs @@ -19,8 +19,8 @@ use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::implementation::{ Direction, Graph, NodeIndex, INCOMING, OUTGOING, }; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use smallvec::SmallVec; +use rustc_index::bit_set::BitSet; +use rustc_index::vec::{Idx, IndexVec}; use std::fmt; use syntax_pos::Span; @@ -304,8 +304,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) { - self.iterate_until_fixed_point("Expansion", |constraint| { - debug!("expansion: constraint={:?}", constraint); + let mut process_constraint = |constraint: &Constraint<'tcx>| { let (a_region, b_vid, b_data, retain) = match *constraint { Constraint::RegSubVar(a_region, b_vid) => { let b_data = var_values.value_mut(b_vid); @@ -331,7 +330,33 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { let changed = self.expand_node(a_region, b_vid, b_data); (changed, retain) - }) + }; + + // Using bitsets to track the remaining elements is faster than using a + // `Vec` by itself (which requires removing elements, which requires + // element shuffling, which is slow). + let constraints: Vec<_> = self.data.constraints.keys().collect(); + let mut live_indices: BitSet = BitSet::new_filled(constraints.len()); + let mut killed_indices: BitSet = BitSet::new_empty(constraints.len()); + let mut changed = true; + while changed { + changed = false; + for index in live_indices.iter() { + let constraint = constraints[index]; + let (edge_changed, retain) = process_constraint(constraint); + if edge_changed { + changed = true; + } + if !retain { + let changed = killed_indices.insert(index); + debug_assert!(changed); + } + } + live_indices.subtract(&killed_indices); + + // We could clear `killed_indices` here, but we don't need to and + // it's cheaper not to. + } } // This function is very hot in some workloads. There's a single callsite @@ -360,13 +385,21 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { match *b_data { VarValue::Value(cur_region) => { // Identical scopes can show up quite often, if the fixed point - // iteration converges slowly, skip them + // iteration converges slowly. Skip them. This is purely an + // optimization. if let (ReScope(a_scope), ReScope(cur_scope)) = (a_region, cur_region) { if a_scope == cur_scope { return false; } } + // This is a specialized version of the `lub_concrete_regions` + // check below for a common case, here purely as an + // optimization. + if let ReEmpty = a_region { + return false; + } + let mut lub = self.lub_concrete_regions(a_region, cur_region); if lub == cur_region { return false; @@ -407,8 +440,6 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { /// Returns the smallest region `c` such that `a <= c` and `b <= c`. fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> { - let tcx = self.tcx(); - match (a, b) { (&ty::ReClosureBound(..), _) | (_, &ty::ReClosureBound(..)) @@ -468,7 +499,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // otherwise, we don't know what the free region is, // so we must conservatively say the LUB is static: - tcx.lifetimes.re_static + self.tcx().lifetimes.re_static } (&ReScope(a_id), &ReScope(b_id)) => { @@ -476,7 +507,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // subtype of the region corresponding to an inner // block. let lub = self.region_rels.region_scope_tree.nearest_common_ancestor(a_id, b_id); - tcx.mk_region(ReScope(lub)) + self.tcx().mk_region(ReScope(lub)) } (&ReEarlyBound(_), &ReEarlyBound(_)) @@ -490,7 +521,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { if a == b { a } else { - tcx.lifetimes.re_static + self.tcx().lifetimes.re_static } } } @@ -860,29 +891,6 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } } - fn iterate_until_fixed_point(&self, tag: &str, mut body: F) - where - F: FnMut(&Constraint<'tcx>) -> (bool, bool), - { - let mut constraints: SmallVec<[_; 16]> = self.data.constraints.keys().collect(); - let mut iteration = 0; - let mut changed = true; - while changed { - changed = false; - iteration += 1; - debug!("---- {} Iteration {}{}", "#", tag, iteration); - constraints.retain(|constraint| { - let (edge_changed, retain) = body(constraint); - if edge_changed { - debug!("updated due to constraint {:?}", constraint); - changed = true; - } - retain - }); - } - debug!("---- {} Complete after {} iteration(s)", tag, iteration); - } - fn bound_is_met( &self, bound: &VerifyBound<'tcx>, diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index c5712cc994..e385d576b8 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -20,10 +20,10 @@ use crate::traits::{self, ObligationCause, PredicateObligations, TraitEngine}; use crate::ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric}; use crate::ty::fold::{TypeFolder, TypeFoldable}; use crate::ty::relate::RelateResult; -use crate::ty::subst::{Kind, InternalSubsts, SubstsRef}; +use crate::ty::subst::{GenericArg, InternalSubsts, SubstsRef}; use crate::ty::{self, GenericParamDefKind, Ty, TyCtxt, InferConst}; use crate::ty::{FloatVid, IntVid, TyVid, ConstVid}; -use crate::util::nodemap::FxHashMap; +use crate::util::nodemap::{FxHashMap, FxHashSet}; use errors::DiagnosticBuilder; use rustc_data_structures::sync::Lrc; @@ -32,7 +32,7 @@ use std::cell::{Cell, Ref, RefCell, RefMut}; use std::collections::BTreeMap; use std::fmt; use syntax::ast; -use syntax_pos::symbol::InternedString; +use syntax_pos::symbol::Symbol; use syntax_pos::Span; use self::combine::CombineFields; @@ -93,6 +93,8 @@ impl SuppressRegionErrors { /// checks, so we should ignore errors if NLL is (unconditionally) /// enabled. pub fn when_nll_is_enabled(tcx: TyCtxt<'_>) -> Self { + // FIXME(Centril): Once we actually remove `::Migrate` also make + // this always `true` and then proceed to eliminate the dead code. match tcx.borrowck_mode() { // If we're on Migrate mode, report AST region errors BorrowckMode::Migrate => SuppressRegionErrors { suppressed: false }, @@ -153,6 +155,8 @@ pub struct InferCtxt<'a, 'tcx> { /// avoid reporting the same error twice. pub reported_trait_errors: RefCell>>>, + pub reported_closure_mismatch: RefCell)>>, + /// When an error occurs, we want to avoid reporting "derived" /// errors that are due to this original failure. Normally, we /// handle this with the `err_count_on_creation` count, which @@ -388,7 +392,7 @@ pub enum RegionVariableOrigin { Coercion(Span), /// Region variables created as the values for early-bound regions - EarlyBoundRegion(Span, InternedString), + EarlyBoundRegion(Span, Symbol), /// Region variables created for bound regions /// in a function or method that is called @@ -403,7 +407,7 @@ pub enum RegionVariableOrigin { NLL(NLLRegionVariableOrigin), } -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, Debug)] pub enum NLLRegionVariableOrigin { /// During NLL region processing, we create variables for free /// regions that we encounter in the function signature and @@ -414,7 +418,19 @@ pub enum NLLRegionVariableOrigin { /// from a `for<'a> T` binder). Meant to represent "any region". Placeholder(ty::PlaceholderRegion), - Existential, + Existential { + /// If this is true, then this variable was created to represent a lifetime + /// bound in a `for` binder. For example, it might have been created to + /// represent the lifetime `'a` in a type like `for<'a> fn(&'a u32)`. + /// Such variables are created when we are trying to figure out if there + /// is any valid instantiation of `'a` that could fit into some scenario. + /// + /// This is used to inform error reporting: in the case that we are trying to + /// determine whether there is any valid instantiation of a `'a` variable that meets + /// some constraint C, we want to blame the "source" of that `for` type, + /// rather than blaming the source of the constraint C. + from_forall: bool + }, } impl NLLRegionVariableOrigin { @@ -422,7 +438,7 @@ impl NLLRegionVariableOrigin { match self { NLLRegionVariableOrigin::FreeRegion => true, NLLRegionVariableOrigin::Placeholder(..) => true, - NLLRegionVariableOrigin::Existential => false, + NLLRegionVariableOrigin::Existential{ .. } => false, } } @@ -536,6 +552,7 @@ impl<'tcx> InferCtxtBuilder<'tcx> { selection_cache: Default::default(), evaluation_cache: Default::default(), reported_trait_errors: Default::default(), + reported_closure_mismatch: Default::default(), tainted_by_errors_flag: Cell::new(false), err_count_on_creation: tcx.sess.err_count(), in_snapshot: Cell::new(false), @@ -614,7 +631,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } pub fn type_var_diverges(&'a self, ty: Ty<'_>) -> bool { - match ty.sty { + match ty.kind { ty::Infer(ty::TyVar(vid)) => self.type_variables.borrow().var_diverges(vid), _ => false, } @@ -627,7 +644,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn type_is_unconstrained_numeric(&'a self, ty: Ty<'_>) -> UnconstrainedNumeric { use crate::ty::error::UnconstrainedNumeric::Neither; use crate::ty::error::UnconstrainedNumeric::{UnconstrainedFloat, UnconstrainedInt}; - match ty.sty { + match ty.kind { ty::Infer(ty::IntVar(vid)) => { if self.int_unification_table .borrow_mut() @@ -797,16 +814,16 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { /// Executes `f` and commit the bindings. pub fn commit_unconditionally(&self, f: F) -> R where - F: FnOnce() -> R, + F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, { - debug!("commit()"); + debug!("commit_unconditionally()"); let snapshot = self.start_snapshot(); - let r = f(); + let r = f(&snapshot); self.commit_from(snapshot); r } - /// Executes `f` and commit the bindings if closure `f` returns `Ok(_)`. + /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. pub fn commit_if_ok(&self, f: F) -> Result where F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result, @@ -826,19 +843,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { r } - /// Execute `f` in a snapshot, and commit the bindings it creates. - pub fn in_snapshot(&self, f: F) -> T - where - F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> T, - { - debug!("in_snapshot()"); - let snapshot = self.start_snapshot(); - let r = f(&snapshot); - self.commit_from(snapshot); - r - } - - /// Executes `f` then unroll any bindings it creates. + /// Execute `f` then unroll any bindings it creates. pub fn probe(&self, f: F) -> R where F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, @@ -1110,7 +1115,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.next_region_var_in_universe(RegionVariableOrigin::NLL(origin), universe) } - pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> Kind<'tcx> { + pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { match param.kind { GenericParamDefKind::Lifetime => { // Create a region inference variable for the given @@ -1302,6 +1307,14 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } } + /// Resolve any type variables found in `value` -- but only one + /// level. So, if the variable `?X` is bound to some type + /// `Foo`, then this would return `Foo` (but `?Y` may + /// itself be bound to a type). + /// + /// Useful when you only need to inspect the outermost level of + /// the type and don't care about nested types (or perhaps you + /// will be resolving them as well, e.g. in a loop). pub fn shallow_resolve(&self, value: T) -> T where T: TypeFoldable<'tcx>, @@ -1460,7 +1473,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // type-checking closure types are in local tables only. if !self.in_progress_tables.is_some() || !ty.has_closure_types() { if !(param_env, ty).has_local_value() { - return ty.is_copy_modulo_regions(self.tcx.global_tcx(), param_env, span); + return ty.is_copy_modulo_regions(self.tcx, param_env, span); } } @@ -1479,9 +1492,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn closure_kind( &self, closure_def_id: DefId, - closure_substs: ty::ClosureSubsts<'tcx>, + closure_substs: SubstsRef<'tcx>, ) -> Option { - let closure_kind_ty = closure_substs.closure_kind_ty(closure_def_id, self.tcx); + let closure_kind_ty = closure_substs.as_closure().kind_ty(closure_def_id, self.tcx); let closure_kind_ty = self.shallow_resolve(closure_kind_ty); closure_kind_ty.to_opt_closure_kind() } @@ -1493,9 +1506,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn closure_sig( &self, def_id: DefId, - substs: ty::ClosureSubsts<'tcx>, + substs: SubstsRef<'tcx>, ) -> ty::PolyFnSig<'tcx> { - let closure_sig_ty = substs.closure_sig_ty(def_id, self.tcx); + let closure_sig_ty = substs.as_closure().sig_ty(def_id, self.tcx); let closure_sig_ty = self.shallow_resolve(closure_sig_ty); closure_sig_ty.fn_sig(self.tcx) } @@ -1562,8 +1575,11 @@ impl<'a, 'tcx> ShallowResolver<'a, 'tcx> { ShallowResolver { infcx } } + /// If `typ` is a type variable of some kind, resolve it one level + /// (but do not resolve types found in the result). If `typ` is + /// not a type variable, just return it unmodified. pub fn shallow_resolve(&mut self, typ: Ty<'tcx>) -> Ty<'tcx> { - match typ.sty { + match typ.kind { ty::Infer(ty::TyVar(v)) => { // Not entirely obvious: if `typ` is a type variable, // it can be resolved to an int/float variable, which @@ -1600,29 +1616,30 @@ impl<'a, 'tcx> ShallowResolver<'a, 'tcx> { // `resolver.shallow_resolve_changed(ty)` is equivalent to // `resolver.shallow_resolve(ty) != ty`, but more efficient. It's always - // inlined, despite being large, because it has a single call site that is - // extremely hot. + // inlined, despite being large, because it has only two call sites that + // are extremely hot. #[inline(always)] pub fn shallow_resolve_changed(&mut self, typ: Ty<'tcx>) -> bool { - match typ.sty { + match typ.kind { ty::Infer(ty::TyVar(v)) => { use self::type_variable::TypeVariableValue; // See the comment in `shallow_resolve()`. - match self.infcx.type_variables.borrow_mut().probe(v) { + match self.infcx.type_variables.borrow_mut().inlined_probe(v) { TypeVariableValue::Known { value: t } => self.fold_ty(t) != typ, TypeVariableValue::Unknown { .. } => false, } } ty::Infer(ty::IntVar(v)) => { - match self.infcx.int_unification_table.borrow_mut().probe_value(v) { + match self.infcx.int_unification_table.borrow_mut().inlined_probe_value(v) { Some(v) => v.to_type(self.infcx.tcx) != typ, None => false, } } ty::Infer(ty::FloatVar(v)) => { + // Not `inlined_probe_value(v)` because this call site is colder. match self.infcx.float_unification_table.borrow_mut().probe_value(v) { Some(v) => v.to_type(self.infcx.tcx) != typ, None => false, diff --git a/src/librustc/infer/nll_relate/mod.rs b/src/librustc/infer/nll_relate/mod.rs index 5d521def65..d6f76e9ee3 100644 --- a/src/librustc/infer/nll_relate/mod.rs +++ b/src/librustc/infer/nll_relate/mod.rs @@ -26,13 +26,14 @@ use crate::traits::DomainGoal; use crate::ty::error::TypeError; use crate::ty::fold::{TypeFoldable, TypeVisitor}; use crate::ty::relate::{self, Relate, RelateResult, TypeRelation}; -use crate::ty::subst::Kind; +use crate::ty::subst::GenericArg; use crate::ty::{self, Ty, TyCtxt, InferConst}; +use crate::infer::{ConstVariableValue, ConstVarValue}; use crate::mir::interpret::ConstValue; use rustc_data_structures::fx::FxHashMap; use std::fmt::Debug; -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(PartialEq)] pub enum NormalizationStrategy { Lazy, Eager, @@ -93,7 +94,7 @@ pub trait TypeRelatingDelegate<'tcx> { /// we will invoke this method to instantiate `'a` with an /// inference variable (though `'b` would be instantiated first, /// as a placeholder). - fn next_existential_region_var(&mut self) -> ty::Region<'tcx>; + fn next_existential_region_var(&mut self, was_placeholder: bool) -> ty::Region<'tcx>; /// Creates a new region variable representing a /// higher-ranked region that is instantiated universally. @@ -124,7 +125,7 @@ pub trait TypeRelatingDelegate<'tcx> { #[derive(Clone, Debug)] struct ScopesAndKind<'tcx> { scopes: Vec>, - kind: Kind<'tcx>, + kind: GenericArg<'tcx>, } #[derive(Clone, Debug, Default)] @@ -193,7 +194,7 @@ where let placeholder = ty::PlaceholderRegion { universe, name: br }; delegate.next_placeholder_region(placeholder) } else { - delegate.next_existential_region_var() + delegate.next_existential_region_var(true) } } }; @@ -274,7 +275,7 @@ where use crate::traits::WhereClause; use syntax_pos::DUMMY_SP; - match value_ty.sty { + match value_ty.kind { ty::Projection(other_projection_ty) => { let var = self.infcx.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, @@ -324,11 +325,11 @@ where let vid = pair.vid(); let value_ty = pair.value_ty(); - // FIXME -- this logic assumes invariance, but that is wrong. + // FIXME(invariance) -- this logic assumes invariance, but that is wrong. // This only presently applies to chalk integration, as NLL // doesn't permit type variables to appear on both sides (and // doesn't use lazy norm). - match value_ty.sty { + match value_ty.kind { ty::Infer(ty::TyVar(value_vid)) => { // Two type variables: just equate them. self.infcx @@ -548,7 +549,7 @@ where b = self.infcx.shallow_resolve(b); } - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (_, &ty::Infer(ty::TyVar(vid))) => { if D::forbid_inference_vars() { // Forbid inference variables in the RHS. @@ -616,15 +617,21 @@ where fn consts( &mut self, a: &'tcx ty::Const<'tcx>, - b: &'tcx ty::Const<'tcx>, + mut b: &'tcx ty::Const<'tcx>, ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { - if let ty::Const { val: ConstValue::Infer(InferConst::Canonical(_, _)), .. } = a { - // FIXME(const_generics): I'm unsure how this branch should actually be handled, - // so this is probably not correct. - self.infcx.super_combine_consts(self, a, b) - } else { - debug!("consts(a={:?}, b={:?}, variance={:?})", a, b, self.ambient_variance); - relate::super_relate_consts(self, a, b) + let a = self.infcx.shallow_resolve(a); + + if !D::forbid_inference_vars() { + b = self.infcx.shallow_resolve(b); + } + + match b.val { + ConstValue::Infer(InferConst::Var(_)) if D::forbid_inference_vars() => { + // Forbid inference variables in the RHS. + bug!("unexpected inference var {:?}", b) + } + // FIXME(invariance): see the related FIXME above. + _ => self.infcx.super_combine_consts(self, a, b) } } @@ -878,7 +885,7 @@ where debug!("TypeGeneralizer::tys(a={:?})", a); - match a.sty { + match a.kind { ty::Infer(ty::TyVar(_)) | ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) if D::forbid_inference_vars() => { @@ -991,15 +998,28 @@ where a: &'tcx ty::Const<'tcx>, _: &'tcx ty::Const<'tcx>, ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { - debug!("TypeGeneralizer::consts(a={:?})", a); - - if let ty::Const { val: ConstValue::Infer(InferConst::Canonical(_, _)), .. } = a { - bug!( - "unexpected inference variable encountered in NLL generalization: {:?}", - a - ); - } else { - relate::super_relate_consts(self, a, a) + match a.val { + ConstValue::Infer(InferConst::Var(_)) if D::forbid_inference_vars() => { + bug!( + "unexpected inference variable encountered in NLL generalization: {:?}", + a + ); + } + ConstValue::Infer(InferConst::Var(vid)) => { + let mut variable_table = self.infcx.const_unification_table.borrow_mut(); + let var_value = variable_table.probe_value(vid); + match var_value.val.known() { + Some(u) => self.relate(&u, &u), + None => { + let new_var_id = variable_table.new_key(ConstVarValue { + origin: var_value.origin, + val: ConstVariableValue::Unknown { universe: self.universe }, + }); + Ok(self.tcx().mk_const_var(new_var_id, a.ty)) + } + } + } + _ => relate::super_relate_consts(self, a, a), } } diff --git a/src/librustc/infer/opaque_types/mod.rs b/src/librustc/infer/opaque_types/mod.rs index c9fd3392a9..bd19a002fe 100644 --- a/src/librustc/infer/opaque_types/mod.rs +++ b/src/librustc/infer/opaque_types/mod.rs @@ -7,7 +7,7 @@ use crate::middle::region; use crate::mir::interpret::ConstValue; use crate::traits::{self, PredicateObligation}; use crate::ty::fold::{BottomUpFolder, TypeFoldable, TypeFolder, TypeVisitor}; -use crate::ty::subst::{InternalSubsts, Kind, SubstsRef, UnpackedKind}; +use crate::ty::subst::{InternalSubsts, GenericArg, SubstsRef, GenericArgKind}; use crate::ty::{self, GenericParamDefKind, Ty, TyCtxt}; use crate::util::nodemap::DefIdMap; use errors::DiagnosticBuilder; @@ -561,16 +561,14 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { def_id, instantiated_ty ); - let gcx = self.tcx.global_tcx(); - // Use substs to build up a reverse map from regions to their // identity mappings. This is necessary because of `impl // Trait` lifetimes are computed by replacing existing // lifetimes with 'static and remapping only those used in the // `impl Trait` return type, resulting in the parameters // shifting. - let id_substs = InternalSubsts::identity_for_item(gcx, def_id); - let map: FxHashMap, Kind<'tcx>> = opaque_defn + let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id); + let map: FxHashMap, GenericArg<'tcx>> = opaque_defn .substs .iter() .enumerate() @@ -720,27 +718,27 @@ where return false; // keep visiting } - match ty.sty { + match ty.kind { ty::Closure(def_id, ref substs) => { // Skip lifetime parameters of the enclosing item(s) - for upvar_ty in substs.upvar_tys(def_id, self.tcx) { + for upvar_ty in substs.as_closure().upvar_tys(def_id, self.tcx) { upvar_ty.visit_with(self); } - substs.closure_sig_ty(def_id, self.tcx).visit_with(self); + substs.as_closure().sig_ty(def_id, self.tcx).visit_with(self); } ty::Generator(def_id, ref substs, _) => { // Skip lifetime parameters of the enclosing item(s) // Also skip the witness type, because that has no free regions. - for upvar_ty in substs.upvar_tys(def_id, self.tcx) { + for upvar_ty in substs.as_generator().upvar_tys(def_id, self.tcx) { upvar_ty.visit_with(self); } - substs.return_ty(def_id, self.tcx).visit_with(self); - substs.yield_ty(def_id, self.tcx).visit_with(self); + substs.as_generator().return_ty(def_id, self.tcx).visit_with(self); + substs.as_generator().yield_ty(def_id, self.tcx).visit_with(self); } _ => { ty.super_visit_with(self); @@ -759,7 +757,7 @@ struct ReverseMapper<'tcx> { tainted_by_errors: bool, opaque_type_def_id: DefId, - map: FxHashMap, Kind<'tcx>>, + map: FxHashMap, GenericArg<'tcx>>, map_missing_regions_to_empty: bool, /// initially `Some`, set to `None` once error has been reported @@ -774,7 +772,7 @@ impl ReverseMapper<'tcx> { tcx: TyCtxt<'tcx>, tainted_by_errors: bool, opaque_type_def_id: DefId, - map: FxHashMap, Kind<'tcx>>, + map: FxHashMap, GenericArg<'tcx>>, hidden_ty: Ty<'tcx>, span: Span, ) -> Self { @@ -789,7 +787,10 @@ impl ReverseMapper<'tcx> { } } - fn fold_kind_mapping_missing_regions_to_empty(&mut self, kind: Kind<'tcx>) -> Kind<'tcx> { + fn fold_kind_mapping_missing_regions_to_empty( + &mut self, + kind: GenericArg<'tcx>, + ) -> GenericArg<'tcx> { assert!(!self.map_missing_regions_to_empty); self.map_missing_regions_to_empty = true; let kind = kind.fold_with(self); @@ -797,7 +798,7 @@ impl ReverseMapper<'tcx> { kind } - fn fold_kind_normally(&mut self, kind: Kind<'tcx>) -> Kind<'tcx> { + fn fold_kind_normally(&mut self, kind: GenericArg<'tcx>) -> GenericArg<'tcx> { assert!(!self.map_missing_regions_to_empty); kind.fold_with(self) } @@ -822,7 +823,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { let generics = self.tcx().generics_of(self.opaque_type_def_id); match self.map.get(&r.into()).map(|k| k.unpack()) { - Some(UnpackedKind::Lifetime(r1)) => r1, + Some(GenericArgKind::Lifetime(r1)) => r1, Some(u) => panic!("region mapped to unexpected kind: {:?}", u), None if generics.parent.is_some() => { if !self.map_missing_regions_to_empty && !self.tainted_by_errors { @@ -851,13 +852,13 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { ) .emit(); - self.tcx().global_tcx().mk_region(ty::ReStatic) + self.tcx().mk_region(ty::ReStatic) }, } } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - match ty.sty { + match ty.kind { ty::Closure(def_id, substs) => { // I am a horrible monster and I pray for death. When // we encounter a closure here, it is always a closure @@ -885,7 +886,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { let generics = self.tcx.generics_of(def_id); let substs = - self.tcx.mk_substs(substs.substs.iter().enumerate().map(|(index, &kind)| { + self.tcx.mk_substs(substs.iter().enumerate().map(|(index, &kind)| { if index < generics.parent_count { // Accommodate missing regions in the parent kinds... self.fold_kind_mapping_missing_regions_to_empty(kind) @@ -895,13 +896,13 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { } })); - self.tcx.mk_closure(def_id, ty::ClosureSubsts { substs }) + self.tcx.mk_closure(def_id, substs) } ty::Generator(def_id, substs, movability) => { let generics = self.tcx.generics_of(def_id); let substs = - self.tcx.mk_substs(substs.substs.iter().enumerate().map(|(index, &kind)| { + self.tcx.mk_substs(substs.iter().enumerate().map(|(index, &kind)| { if index < generics.parent_count { // Accommodate missing regions in the parent kinds... self.fold_kind_mapping_missing_regions_to_empty(kind) @@ -911,7 +912,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { } })); - self.tcx.mk_generator(def_id, ty::GeneratorSubsts { substs }, movability) + self.tcx.mk_generator(def_id, substs, movability) } ty::Param(..) => { @@ -919,7 +920,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { match self.map.get(&ty.into()).map(|k| k.unpack()) { // Found it in the substitution list; replace with the parameter from the // opaque type. - Some(UnpackedKind::Type(t1)) => t1, + Some(GenericArgKind::Type(t1)) => t1, Some(u) => panic!("type mapped to unexpected kind: {:?}", u), None => { self.tcx.sess @@ -949,7 +950,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> { match self.map.get(&ct.into()).map(|k| k.unpack()) { // Found it in the substitution list, replace with the parameter from the // opaque type. - Some(UnpackedKind::Const(c1)) => c1, + Some(GenericArgKind::Const(c1)) => c1, Some(u) => panic!("const mapped to unexpected kind: {:?}", u), None => { self.tcx.sess @@ -988,7 +989,9 @@ impl<'a, 'tcx> Instantiator<'a, 'tcx> { value.fold_with(&mut BottomUpFolder { tcx, ty_op: |ty| { - if let ty::Opaque(def_id, substs) = ty.sty { + if ty.references_error() { + return tcx.types.err; + } else if let ty::Opaque(def_id, substs) = ty.kind { // Check that this is `impl Trait` type is // declared by `parent_def_id` -- i.e., one whose // value we are inferring. At present, this is @@ -1031,7 +1034,7 @@ impl<'a, 'tcx> Instantiator<'a, 'tcx> { .local_def_id(opaque_parent_hir_id) }; let (in_definition_scope, origin) = match tcx.hir().find(opaque_hir_id) { - Some(Node::Item(item)) => match item.node { + Some(Node::Item(item)) => match item.kind { // Anonymous `impl Trait` hir::ItemKind::OpaqueTy(hir::OpaqueTy { impl_trait_fn: Some(parent), @@ -1055,7 +1058,7 @@ impl<'a, 'tcx> Instantiator<'a, 'tcx> { (def_scope_default(), hir::OpaqueTyOrigin::TypeAlias) } }, - Some(Node::ImplItem(item)) => match item.node { + Some(Node::ImplItem(item)) => match item.kind { hir::ImplItemKind::OpaqueTy(_) => ( may_define_opaque_type( tcx, @@ -1155,6 +1158,15 @@ impl<'a, 'tcx> Instantiator<'a, 'tcx> { ); debug!("instantiate_opaque_types: ty_var={:?}", ty_var); + for predicate in &bounds.predicates { + if let ty::Predicate::Projection(projection) = &predicate { + if projection.skip_binder().ty.references_error() { + // No point on adding these obligations since there's a type error involved. + return ty_var; + } + } + } + self.obligations.reserve(bounds.predicates.len()); for predicate in bounds.predicates { // Change the predicate to refer to the type variable, @@ -1201,7 +1213,7 @@ pub fn may_define_opaque_type( let mut hir_id = tcx.hir().as_local_hir_id(def_id).unwrap(); // Named opaque types can be defined by any siblings or children of siblings. - let scope = tcx.hir().get_defining_scope(opaque_hir_id).expect("could not get defining scope"); + let scope = tcx.hir().get_defining_scope(opaque_hir_id); // We walk up the node tree until we hit the root or the scope of the opaque type. while hir_id != scope && hir_id != hir::CRATE_HIR_ID { hir_id = tcx.hir().get_parent_item(hir_id); diff --git a/src/librustc/infer/outlives/obligations.rs b/src/librustc/infer/outlives/obligations.rs index e1470e4ef0..f780618877 100644 --- a/src/librustc/infer/outlives/obligations.rs +++ b/src/librustc/infer/outlives/obligations.rs @@ -67,7 +67,7 @@ use crate::hir; use crate::traits::ObligationCause; use crate::ty::outlives::Component; use crate::ty::{self, Region, Ty, TyCtxt, TypeFoldable}; -use crate::ty::subst::UnpackedKind; +use crate::ty::subst::GenericArgKind; impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> { /// Registers that the given region obligation must be resolved @@ -403,7 +403,7 @@ where // 'a` in the environment but `trait Foo<'b> { type Item: 'b // }` in the trait definition. approx_env_bounds.retain(|bound| { - match bound.0.sty { + match bound.0.kind { ty::Projection(projection_ty) => { self.verify_bound.projection_declared_bounds_from_trait(projection_ty) .all(|r| r != bound.1) @@ -433,13 +433,13 @@ where for k in projection_ty.substs { match k.unpack() { - UnpackedKind::Lifetime(lt) => { + GenericArgKind::Lifetime(lt) => { self.delegate.push_sub_region_constraint(origin.clone(), region, lt); } - UnpackedKind::Type(ty) => { + GenericArgKind::Type(ty) => { self.type_must_outlive(origin.clone(), ty, region); } - UnpackedKind::Const(_) => { + GenericArgKind::Const(_) => { // Const parameters don't impose constraints. } } diff --git a/src/librustc/infer/outlives/verify.rs b/src/librustc/infer/outlives/verify.rs index f23e52fcfe..3110b027c5 100644 --- a/src/librustc/infer/outlives/verify.rs +++ b/src/librustc/infer/outlives/verify.rs @@ -44,7 +44,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { } fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> { - match ty.sty { + match ty.kind { ty::Param(p) => self.param_bound(p), ty::Projection(data) => self.projection_bound(data), _ => self.recursive_type_bound(ty), @@ -87,7 +87,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { let projection_ty = GenericKind::Projection(projection_ty).to_ty(self.tcx); let erased_projection_ty = self.tcx.erase_regions(&projection_ty); self.declared_generic_bounds_from_env_with_compare_fn(|ty| { - if let ty::Projection(..) = ty.sty { + if let ty::Projection(..) = ty.kind { let erased_ty = self.tcx.erase_regions(&ty); erased_ty == erased_projection_ty } else { diff --git a/src/librustc/infer/region_constraints/leak_check.rs b/src/librustc/infer/region_constraints/leak_check.rs index 0c83bbc1e5..3d06942568 100644 --- a/src/librustc/infer/region_constraints/leak_check.rs +++ b/src/librustc/infer/region_constraints/leak_check.rs @@ -14,9 +14,11 @@ impl<'tcx> RegionConstraintCollector<'tcx> { /// retain the older (arguably incorrect) behavior of the /// compiler. /// - /// NB. The use of snapshot here is mostly an efficiency thing -- - /// we could search *all* region constraints, but that'd be a - /// bigger set and the data structures are not setup for that. If + /// NB. Although `_snapshot` isn't used, it's passed in to prove + /// that we are in a snapshot, which guarantees that we can just + /// search the "undo log" for edges. This is mostly an efficiency + /// thing -- we could search *all* region constraints, but that'd be + /// a bigger set and the data structures are not setup for that. If /// we wind up keeping some form of this check long term, it would /// probably be better to remove the snapshot parameter and to /// refactor the constraint set. diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 21904edb30..8c6a7c9a37 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -7,7 +7,7 @@ use super::unify_key; use super::{MiscVariable, RegionVariableOrigin, SubregionOrigin}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_index::vec::IndexVec; use rustc_data_structures::sync::Lrc; use rustc_data_structures::unify as ut; use crate::hir::def_id::DefId; @@ -116,7 +116,7 @@ pub struct RegionConstraintData<'tcx> { } /// Represents a constraint that influences the inference process. -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)] pub enum Constraint<'tcx> { /// A region variable is a subregion of another. VarSubVar(RegionVid, RegionVid), diff --git a/src/librustc/infer/resolve.rs b/src/librustc/infer/resolve.rs index 7e553d7666..7c3a338366 100644 --- a/src/librustc/infer/resolve.rs +++ b/src/librustc/infer/resolve.rs @@ -1,7 +1,7 @@ use super::{InferCtxt, FixupError, FixupResult, Span}; use super::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use crate::mir::interpret::ConstValue; -use crate::ty::{self, Ty, Const, TyCtxt, TypeFoldable, InferConst, TypeFlags}; +use crate::ty::{self, Ty, Const, TyCtxt, TypeFoldable, InferConst}; use crate::ty::fold::{TypeFolder, TypeVisitor}; /////////////////////////////////////////////////////////////////////////// @@ -29,7 +29,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> { } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - if !t.has_infer_types() { + if !t.has_infer_types() && !t.has_infer_consts() { t // micro-optimize -- if there is nothing in this type that this fold affects... } else { let t = self.infcx.shallow_resolve(t); @@ -38,7 +38,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> { } fn fold_const(&mut self, ct: &'tcx Const<'tcx>) -> &'tcx Const<'tcx> { - if !ct.has_type_flags(TypeFlags::HAS_CT_INFER) { + if !ct.has_infer_consts() { ct // micro-optimize -- if there is nothing in this const that this fold affects... } else { let ct = self.infcx.shallow_resolve(ct); @@ -118,7 +118,7 @@ impl<'a, 'tcx> TypeVisitor<'tcx> for UnresolvedTypeFinder<'a, 'tcx> { fn visit_ty(&mut self, t: Ty<'tcx>) -> bool { let t = self.infcx.shallow_resolve(t); if t.has_infer_types() { - if let ty::Infer(infer_ty) = t.sty { + if let ty::Infer(infer_ty) = t.kind { // Since we called `shallow_resolve` above, this must // be an (as yet...) unresolved inference variable. let ty_var_span = @@ -188,7 +188,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> { // defaulted tuples. } else { let t = self.infcx.shallow_resolve(t); - match t.sty { + match t.kind { ty::Infer(ty::TyVar(vid)) => { self.err = Some(FixupError::UnresolvedTy(vid)); self.tcx().types.err diff --git a/src/librustc/infer/sub.rs b/src/librustc/infer/sub.rs index 67c97ef5d8..21c847e80f 100644 --- a/src/librustc/infer/sub.rs +++ b/src/librustc/infer/sub.rs @@ -71,7 +71,7 @@ impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> { let infcx = self.fields.infcx; let a = infcx.type_variables.borrow_mut().replace_if_possible(a); let b = infcx.type_variables.borrow_mut().replace_if_possible(b); - match (&a.sty, &b.sty) { + match (&a.kind, &b.kind) { (&ty::Infer(TyVar(a_vid)), &ty::Infer(TyVar(b_vid))) => { // Shouldn't have any LBR here, so we can safely put // this under a binder below without fear of accidental diff --git a/src/librustc/infer/type_variable.rs b/src/librustc/infer/type_variable.rs index e30e86998a..f79a30c7ae 100644 --- a/src/librustc/infer/type_variable.rs +++ b/src/librustc/infer/type_variable.rs @@ -1,4 +1,4 @@ -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; use syntax_pos::Span; use crate::ty::{self, Ty, TyVid}; @@ -49,7 +49,7 @@ pub enum TypeVariableOriginKind { MiscVariable, NormalizeProjectionType, TypeInference, - TypeParameterDefinition(InternedString), + TypeParameterDefinition(Symbol), /// One of the upvars or closure kind parameters in a `ClosureSubsts` /// (before it has been determined). @@ -234,14 +234,20 @@ impl<'tcx> TypeVariableTable<'tcx> { /// Retrieves the type to which `vid` has been instantiated, if /// any. pub fn probe(&mut self, vid: ty::TyVid) -> TypeVariableValue<'tcx> { - self.eq_relations.probe_value(vid) + self.inlined_probe(vid) + } + + /// An always-inlined variant of `probe`, for very hot call sites. + #[inline(always)] + pub fn inlined_probe(&mut self, vid: ty::TyVid) -> TypeVariableValue<'tcx> { + self.eq_relations.inlined_probe_value(vid) } /// If `t` is a type-inference variable, and it has been /// instantiated, then return the with which it was /// instantiated. Otherwise, returns `t`. pub fn replace_if_possible(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - match t.sty { + match t.kind { ty::Infer(ty::TyVar(v)) => { match self.probe(v) { TypeVariableValue::Unknown { .. } => t, diff --git a/src/librustc/infer/unify_key.rs b/src/librustc/infer/unify_key.rs index 846611db05..b0b6d971c6 100644 --- a/src/librustc/infer/unify_key.rs +++ b/src/librustc/infer/unify_key.rs @@ -3,7 +3,7 @@ use crate::mir::interpret::ConstValue; use rustc_data_structures::unify::{NoError, EqUnifyValue, UnifyKey, UnifyValue, UnificationTable}; use rustc_data_structures::unify::InPlace; use syntax_pos::{Span, DUMMY_SP}; -use syntax::symbol::InternedString; +use syntax::symbol::Symbol; use std::cmp; use std::marker::PhantomData; @@ -90,7 +90,7 @@ pub struct ConstVariableOrigin { pub enum ConstVariableOriginKind { MiscVariable, ConstInference, - ConstParameterDefinition(InternedString), + ConstParameterDefinition(Symbol), SubstitutionPlaceholder, } diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 7a01ae6b6d..996f5b1241 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -35,15 +35,15 @@ #![feature(const_transmute)] #![feature(core_intrinsics)] #![feature(drain_filter)] -#![feature(inner_deref)] #![cfg_attr(windows, feature(libc))] #![feature(never_type)] #![feature(exhaustive_patterns)] #![feature(overlapping_marker_traits)] #![feature(extern_types)] #![feature(nll)] -#![feature(non_exhaustive)] +#![cfg_attr(bootstrap, feature(non_exhaustive))] #![feature(optin_builtin_traits)] +#![feature(option_expect_none)] #![feature(range_is_empty)] #![feature(slice_patterns)] #![feature(specialization)] @@ -57,9 +57,8 @@ #![feature(test)] #![feature(in_band_lifetimes)] #![feature(crate_visibility_modifier)] -#![feature(proc_macro_hygiene)] +#![cfg_attr(bootstrap, feature(proc_macro_hygiene))] #![feature(log_syntax)] -#![feature(mem_take)] #![feature(associated_type_bounds)] #![feature(rustc_attrs)] @@ -100,19 +99,14 @@ pub mod infer; pub mod lint; pub mod middle { - pub mod borrowck; pub mod expr_use_visitor; pub mod cstore; - pub mod dead; pub mod dependency_format; pub mod diagnostic_items; - pub mod entry; pub mod exported_symbols; pub mod free_region; - pub mod intrinsicck; pub mod lib_features; pub mod lang_items; - pub mod liveness; pub mod mem_categorization; pub mod privacy; pub mod reachable; diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 5906a6388a..5c871bb6b6 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -4,11 +4,12 @@ //! compiler code, rather than using their own custom pass. Those //! lints are all available in `rustc_lint::builtin`. -use crate::lint::{LintPass, LateLintPass, LintArray}; +use crate::lint::{LintPass, LateLintPass, LintArray, FutureIncompatibleInfo}; use crate::middle::stability; use crate::session::Session; use errors::{Applicability, DiagnosticBuilder, pluralise}; use syntax::ast; +use syntax::edition::Edition; use syntax::source_map::Span; use syntax::symbol::Symbol; @@ -21,7 +22,8 @@ declare_lint! { declare_lint! { pub CONST_ERR, Deny, - "constant evaluation detected erroneous expression" + "constant evaluation detected erroneous expression", + report_in_external_macro } declare_lint! { @@ -66,11 +68,17 @@ declare_lint! { "detect unused, unexported items" } +declare_lint! { + pub UNUSED_ATTRIBUTES, + Warn, + "detects attributes that were not used by the compiler" +} + declare_lint! { pub UNREACHABLE_CODE, Warn, "detects unreachable code paths", - report_in_external_macro: true + report_in_external_macro } declare_lint! { @@ -79,6 +87,12 @@ declare_lint! { "detects unreachable patterns" } +declare_lint! { + pub OVERLAPPING_PATTERNS, + Warn, + "detects overlapping patterns" +} + declare_lint! { pub UNUSED_MACROS, Warn, @@ -124,7 +138,11 @@ declare_lint! { declare_lint! { pub PRIVATE_IN_PUBLIC, Warn, - "detect private items in public interfaces not caught by the old implementation" + "detect private items in public interfaces not caught by the old implementation", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #34537 ", + edition: None, + }; } declare_lint! { @@ -136,13 +154,21 @@ declare_lint! { declare_lint! { pub PUB_USE_OF_PRIVATE_EXTERN_CRATE, Deny, - "detect public re-exports of private extern crates" + "detect public re-exports of private extern crates", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #34537 ", + edition: None, + }; } declare_lint! { pub INVALID_TYPE_PARAM_DEFAULT, Deny, - "type parameter default erroneously allowed in invalid location" + "type parameter default erroneously allowed in invalid location", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #36887 ", + edition: None, + }; } declare_lint! { @@ -154,63 +180,99 @@ declare_lint! { declare_lint! { pub SAFE_EXTERN_STATICS, Deny, - "safe access to extern statics was erroneously allowed" + "safe access to extern statics was erroneously allowed", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #36247 ", + edition: None, + }; } declare_lint! { pub SAFE_PACKED_BORROWS, Warn, - "safe borrows of fields of packed structs were was erroneously allowed" + "safe borrows of fields of packed structs were was erroneously allowed", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #46043 ", + edition: None, + }; } declare_lint! { pub PATTERNS_IN_FNS_WITHOUT_BODY, Warn, - "patterns in functions without body were erroneously allowed" + "patterns in functions without body were erroneously allowed", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #35203 ", + edition: None, + }; } declare_lint! { pub LEGACY_DIRECTORY_OWNERSHIP, Deny, "non-inline, non-`#[path]` modules (e.g., `mod foo;`) were erroneously allowed in some files \ - not named `mod.rs`" + not named `mod.rs`", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #37872 ", + edition: None, + }; } declare_lint! { pub LEGACY_CONSTRUCTOR_VISIBILITY, Deny, - "detects use of struct constructors that would be invisible with new visibility rules" + "detects use of struct constructors that would be invisible with new visibility rules", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #39207 ", + edition: None, + }; } declare_lint! { pub MISSING_FRAGMENT_SPECIFIER, Deny, - "detects missing fragment specifiers in unused `macro_rules!` patterns" + "detects missing fragment specifiers in unused `macro_rules!` patterns", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #40107 ", + edition: None, + }; } declare_lint! { pub PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES, Deny, - "detects parenthesized generic parameters in type and module names" + "detects parenthesized generic parameters in type and module names", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #42238 ", + edition: None, + }; } declare_lint! { pub LATE_BOUND_LIFETIME_ARGUMENTS, Warn, - "detects generic lifetime arguments in path segments with late bound lifetime parameters" + "detects generic lifetime arguments in path segments with late bound lifetime parameters", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #42868 ", + edition: None, + }; } declare_lint! { pub ORDER_DEPENDENT_TRAIT_OBJECTS, Deny, - "trait-object types were treated as different depending on marker-trait order" + "trait-object types were treated as different depending on marker-trait order", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #56484 ", + edition: None, + }; } declare_lint! { pub DEPRECATED, Warn, "detects use of deprecated items", - report_in_external_macro: true + report_in_external_macro } declare_lint! { @@ -246,7 +308,11 @@ declare_lint! { declare_lint! { pub TYVAR_BEHIND_RAW_POINTER, Warn, - "raw pointer to an inference variable" + "raw pointer to an inference variable", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #46906 ", + edition: Some(Edition::Edition2018), + }; } declare_lint! { @@ -265,19 +331,33 @@ declare_lint! { pub ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, Allow, "fully qualified paths that start with a module name \ - instead of `crate`, `self`, or an extern crate name" + instead of `crate`, `self`, or an extern crate name", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #53130 ", + edition: Some(Edition::Edition2018), + }; } declare_lint! { pub ILLEGAL_FLOATING_POINT_LITERAL_PATTERN, Warn, - "floating-point literals cannot be used in patterns" + "floating-point literals cannot be used in patterns", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #41620 ", + edition: None, + }; } declare_lint! { pub UNSTABLE_NAME_COLLISIONS, Warn, - "detects name collision with an existing but unstable method" + "detects name collision with an existing but unstable method", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #48919 ", + edition: None, + // Note: this item represents future incompatibility of all unstable functions in the + // standard library, and thus should never be removed or changed to an error. + }; } declare_lint! { @@ -295,7 +375,11 @@ declare_lint! { declare_lint! { pub DUPLICATE_MACRO_EXPORTS, Deny, - "detects duplicate macro exports" + "detects duplicate macro exports", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #35896 ", + edition: Some(Edition::Edition2018), + }; } declare_lint! { @@ -319,13 +403,21 @@ declare_lint! { declare_lint! { pub WHERE_CLAUSES_OBJECT_SAFETY, Warn, - "checks the object safety of where clauses" + "checks the object safety of where clauses", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #51443 ", + edition: None, + }; } declare_lint! { pub PROC_MACRO_DERIVE_RESOLUTION_FALLBACK, Warn, - "detects proc macro derives using inaccessible names from parent modules" + "detects proc macro derives using inaccessible names from parent modules", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #50504 ", + edition: None, + }; } declare_lint! { @@ -339,7 +431,11 @@ declare_lint! { pub MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS, Deny, "macro-expanded `macro_export` macros from the current crate \ - cannot be referred to by absolute paths" + cannot be referred to by absolute paths", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #52234 ", + edition: None, + }; } declare_lint! { @@ -352,7 +448,11 @@ declare_lint! { pub INDIRECT_STRUCTURAL_MATCH, // defaulting to allow until rust-lang/rust#62614 is fixed. Allow, - "pattern with const indirectly referencing non-`#[structural_match]` type" + "pattern with const indirectly referencing non-`#[structural_match]` type", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #62411 ", + edition: None, + }; } /// Some lints that are buffered from `libsyntax`. See `syntax::early_buffered_lints`. @@ -360,7 +460,11 @@ pub mod parser { declare_lint! { pub ILL_FORMED_ATTRIBUTE_INPUT, Warn, - "ill-formed attribute inputs that were previously accepted and used in practice" + "ill-formed attribute inputs that were previously accepted and used in practice", + @future_incompatible = super::FutureIncompatibleInfo { + reference: "issue #57571 ", + edition: None, + }; } declare_lint! { @@ -368,37 +472,59 @@ pub mod parser { Allow, "possible meta-variable misuse at macro definition" } + + declare_lint! { + pub INCOMPLETE_INCLUDE, + Deny, + "trailing content in included file" + } } declare_lint! { pub DEPRECATED_IN_FUTURE, Allow, "detects use of items that will be deprecated in a future version", - report_in_external_macro: true + report_in_external_macro } declare_lint! { pub AMBIGUOUS_ASSOCIATED_ITEMS, Deny, - "ambiguous associated items" + "ambiguous associated items", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #57644 ", + edition: None, + }; } declare_lint! { pub NESTED_IMPL_TRAIT, Warn, - "nested occurrence of `impl Trait` type" + "nested occurrence of `impl Trait` type", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #59014 ", + edition: None, + }; } declare_lint! { pub MUTABLE_BORROW_RESERVATION_CONFLICT, Warn, - "reservation of a two-phased borrow conflicts with other shared borrows" + "reservation of a two-phased borrow conflicts with other shared borrows", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #59159 ", + edition: None, + }; } declare_lint! { pub SOFT_UNSTABLE, Deny, - "a feature gate that doesn't break dependent crates" + "a feature gate that doesn't break dependent crates", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #64266 ", + edition: None, + }; } declare_lint_pass! { @@ -416,6 +542,7 @@ declare_lint_pass! { DEAD_CODE, UNREACHABLE_CODE, UNREACHABLE_PATTERNS, + OVERLAPPING_PATTERNS, UNUSED_MACROS, WARNINGS, UNUSED_FEATURES, diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index c658120b95..eef1cee8db 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -22,23 +22,21 @@ use crate::hir::intravisit as hir_visit; use crate::hir::intravisit::Visitor; use crate::hir::map::{definitions::DisambiguatedDefPathData, DefPathData}; use crate::lint::{EarlyLintPass, LateLintPass, EarlyLintPassObject, LateLintPassObject}; -use crate::lint::{LintArray, Level, Lint, LintId, LintPass, LintBuffer}; +use crate::lint::{Level, Lint, LintId, LintPass, LintBuffer, FutureIncompatibleInfo}; use crate::lint::builtin::BuiltinLintDiagnostics; use crate::lint::levels::{LintLevelSets, LintLevelsBuilder}; use crate::middle::privacy::AccessLevels; -use crate::session::{config, early_error, Session}; -use crate::ty::{self, print::Printer, subst::Kind, TyCtxt, Ty}; +use crate::session::Session; +use crate::ty::{self, print::Printer, subst::GenericArg, TyCtxt, Ty}; use crate::ty::layout::{LayoutError, LayoutOf, TyLayout}; use crate::util::nodemap::FxHashMap; use crate::util::common::time; use errors::DiagnosticBuilder; use std::slice; -use std::default::Default as StdDefault; -use rustc_data_structures::sync::{ReadGuard, Lock, ParallelIterator, join, par_iter}; +use rustc_data_structures::sync::{self, ParallelIterator, join, par_iter}; use rustc_serialize::{Decoder, Decodable, Encoder, Encodable}; use syntax::ast; -use syntax::edition; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit as ast_visit; use syntax_pos::{MultiSpan, Span, symbol::Symbol}; @@ -50,24 +48,25 @@ use syntax_pos::{MultiSpan, Span, symbol::Symbol}; pub struct LintStore { /// Registered lints. The bool is true if the lint was /// added by a plugin. - lints: Vec<(&'static Lint, bool)>, + lints: Vec<&'static Lint>, - /// Trait objects for each lint pass. - /// This is only `None` while performing a lint pass. - pre_expansion_passes: Option>, - early_passes: Option>, - late_passes: Lock>>, - late_module_passes: Vec, + /// Constructor functions for each variety of lint pass. + /// + /// These should only be called once, but since we want to avoid locks or + /// interior mutability, we don't enforce this (and lints should, in theory, + /// be compatible with being constructed more than once, though not + /// necessarily in a sane manner. This is safe though.) + pre_expansion_passes: Vec EarlyLintPassObject + sync::Send + sync::Sync>>, + early_passes: Vec EarlyLintPassObject + sync::Send + sync::Sync>>, + late_passes: Vec LateLintPassObject + sync::Send + sync::Sync>>, + /// This is unique in that we construct them per-module, so not once. + late_module_passes: Vec LateLintPassObject + sync::Send + sync::Sync>>, /// Lints indexed by name. by_name: FxHashMap, /// Map of registered lint groups to what lints they expand to. lint_groups: FxHashMap<&'static str, LintGroup>, - - /// Extra info for future incompatibility lints, describing the - /// issue or RFC that caused the incompatibility. - future_incompatible: FxHashMap, } /// Lints that are buffered up early on in the `Session` before the @@ -81,18 +80,6 @@ pub struct BufferedEarlyLint { pub diagnostic: BuiltinLintDiagnostics, } -/// Extra information for a future incompatibility lint. See the call -/// to `register_future_incompatible` in `librustc_lint/lib.rs` for -/// guidelines. -pub struct FutureIncompatibleInfo { - pub id: LintId, - /// e.g., a URL for an issue/PR/RFC or error code - pub reference: &'static str, - /// If this is an edition fixing lint, the edition in which - /// this lint becomes obsolete - pub edition: Option, -} - /// The target of the `by_name` map, which accounts for renaming/deprecation. enum TargetLint { /// A direct lint target @@ -142,17 +129,16 @@ impl LintStore { pub fn new() -> LintStore { LintStore { lints: vec![], - pre_expansion_passes: Some(vec![]), - early_passes: Some(vec![]), - late_passes: Lock::new(Some(vec![])), + pre_expansion_passes: vec![], + early_passes: vec![], + late_passes: vec![], late_module_passes: vec![], by_name: Default::default(), - future_incompatible: Default::default(), lint_groups: Default::default(), } } - pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] { + pub fn get_lints<'t>(&'t self) -> &'t [&'static Lint] { &self.lints } @@ -168,101 +154,66 @@ impl LintStore { .collect() } - pub fn register_early_pass(&mut self, - sess: Option<&Session>, - from_plugin: bool, - register_only: bool, - pass: EarlyLintPassObject) { - self.push_pass(sess, from_plugin, &pass); - if !register_only { - self.early_passes.as_mut().unwrap().push(pass); - } + pub fn register_early_pass( + &mut self, + pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync + ) { + self.early_passes.push(Box::new(pass)); } pub fn register_pre_expansion_pass( &mut self, - sess: Option<&Session>, - from_plugin: bool, - register_only: bool, - pass: EarlyLintPassObject, + pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync, ) { - self.push_pass(sess, from_plugin, &pass); - if !register_only { - self.pre_expansion_passes.as_mut().unwrap().push(pass); - } + self.pre_expansion_passes.push(Box::new(pass)); } - pub fn register_late_pass(&mut self, - sess: Option<&Session>, - from_plugin: bool, - register_only: bool, - per_module: bool, - pass: LateLintPassObject) { - self.push_pass(sess, from_plugin, &pass); - if !register_only { - if per_module { - self.late_module_passes.push(pass); - } else { - self.late_passes.lock().as_mut().unwrap().push(pass); - } - } + pub fn register_late_pass( + &mut self, + pass: impl Fn() -> LateLintPassObject + 'static + sync::Send + sync::Sync, + ) { + self.late_passes.push(Box::new(pass)); + } + + pub fn register_late_mod_pass( + &mut self, + pass: impl Fn() -> LateLintPassObject + 'static + sync::Send + sync::Sync, + ) { + self.late_module_passes.push(Box::new(pass)); } // Helper method for register_early/late_pass - fn push_pass(&mut self, - sess: Option<&Session>, - from_plugin: bool, - pass: &Box